mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Compare commits
1231 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6c4c2fa0e0 | ||
|
|
ba26e1f5d5 | ||
|
|
bbddd3f946 | ||
|
|
2634866b91 | ||
|
|
d13825daff | ||
|
|
e7233db9fa | ||
|
|
cec5942d6b | ||
|
|
aaef516c22 | ||
|
|
09ae07d003 | ||
|
|
a144c9f394 | ||
|
|
0e10a8c857 | ||
|
|
33387b7227 | ||
|
|
3b5a237f1e | ||
|
|
152e3ace99 | ||
|
|
64b6f18e66 | ||
|
|
be1003648a | ||
|
|
536800f9f5 | ||
|
|
20f858c305 | ||
|
|
07fdea7496 | ||
|
|
449d675e3d | ||
|
|
bdcb9ecffe | ||
|
|
850756cf7d | ||
|
|
b3f55c72f8 | ||
|
|
021848eb8e | ||
|
|
06ceed0e66 | ||
|
|
d599f000c1 | ||
|
|
cd9760d69b | ||
|
|
ada4cad25c | ||
|
|
4ba6f9567b | ||
|
|
d84867d6f3 | ||
|
|
05aaba4d89 | ||
|
|
33e5d3a444 | ||
|
|
7b6a0e9cad | ||
|
|
8e681359ba | ||
|
|
39e5f5bab3 | ||
|
|
4ff37a783f | ||
|
|
1c5916d3f2 | ||
|
|
d8425af684 | ||
|
|
8db3638ce4 | ||
|
|
d8cc60a026 | ||
|
|
9f49efef0a | ||
|
|
58d65c2d27 | ||
|
|
9c921b331c | ||
|
|
b065d8c0d7 | ||
|
|
5148422e39 | ||
|
|
88131e0844 | ||
|
|
6c8c815ad8 | ||
|
|
06cee71e07 | ||
|
|
6106441e58 | ||
|
|
a949338a91 | ||
|
|
ad93511c98 | ||
|
|
014f026eb2 | ||
|
|
ca8ad800ca | ||
|
|
54335c74f6 | ||
|
|
fb2f19e666 | ||
|
|
7e9612fe9f | ||
|
|
acbe0e4a51 | ||
|
|
22e09a587c | ||
|
|
1b5d35a536 | ||
|
|
5d480fc6d6 | ||
|
|
f5b361c31b | ||
|
|
918a69e6f0 | ||
|
|
6d346fdc8d | ||
|
|
09506dbbd9 | ||
|
|
e11a671a1d | ||
|
|
c72ad3f402 | ||
|
|
48b240630e | ||
|
|
715732bb93 | ||
|
|
2f01a4bd78 | ||
|
|
62e34caa4c | ||
|
|
142dcd27e5 | ||
|
|
2f94e46f1f | ||
|
|
5c5999fbf3 | ||
|
|
a2537fa108 | ||
|
|
8e76ba2020 | ||
|
|
0f5e2a1ea4 | ||
|
|
00ff4a1d8a | ||
|
|
98dacd07c2 | ||
|
|
701f5220fc | ||
|
|
2aa27c99b3 | ||
|
|
236f521e13 | ||
|
|
d0d6dfa5c0 | ||
|
|
5af528184d | ||
|
|
01c4fba092 | ||
|
|
20c9ed9f36 | ||
|
|
40f4c49ba9 | ||
|
|
a265237b2e | ||
|
|
a4eea6b8cd | ||
|
|
e7f5cadfcb | ||
|
|
21d3a3a141 | ||
|
|
96a162225d | ||
|
|
09c2f763f1 | ||
|
|
ce096e277d | ||
|
|
c0bb883aaa | ||
|
|
23eea82139 | ||
|
|
edd9881642 | ||
|
|
e46c6968ba | ||
|
|
1528847249 | ||
|
|
182aaf8fce | ||
|
|
d6dfa1dcbc | ||
|
|
6450d5861a | ||
|
|
9562b8ad3d | ||
|
|
507248dd95 | ||
|
|
19a67c07fe | ||
|
|
2ef130530d | ||
|
|
0c6f4383a7 | ||
|
|
97908204a3 | ||
|
|
5606916d99 | ||
|
|
3d4b682d63 | ||
|
|
96561c24be | ||
|
|
0cd1566145 | ||
|
|
b6aa9f9b12 | ||
|
|
b6d0a41718 | ||
|
|
0240b76cc3 | ||
|
|
3f96bcc32b | ||
|
|
9b8823ab3c | ||
|
|
8ba9446fcd | ||
|
|
04a6af4272 | ||
|
|
a1641f2ffa | ||
|
|
fe183c07f5 | ||
|
|
a620d45635 | ||
|
|
fa9660d5a1 | ||
|
|
8b39d30a6e | ||
|
|
090ffa4191 | ||
|
|
42194094a2 | ||
|
|
08058f9f2e | ||
|
|
3d76ba001f | ||
|
|
078a2877c7 | ||
|
|
a22ba56596 | ||
|
|
b6cadc93f2 | ||
|
|
a5fa26461c | ||
|
|
5ddcdede74 | ||
|
|
c4cdcc8db7 | ||
|
|
7219ebdf3c | ||
|
|
cd548c6ed6 | ||
|
|
75140f5d52 | ||
|
|
774303a846 | ||
|
|
6b18b25039 | ||
|
|
bb2afbb03d | ||
|
|
50ddb0ba16 | ||
|
|
556a98b525 | ||
|
|
1f022a84ea | ||
|
|
110fa6d384 | ||
|
|
c971c14a83 | ||
|
|
4bec82a19e | ||
|
|
0c23050eaf | ||
|
|
e1c81a8884 | ||
|
|
19e4dabf01 | ||
|
|
a98ad13af4 | ||
|
|
24eb965adb | ||
|
|
852957c769 | ||
|
|
e1eff56d6a | ||
|
|
220ecabd8c | ||
|
|
27ea8d0bf5 | ||
|
|
8f02926d68 | ||
|
|
1769083a85 | ||
|
|
05c714af76 | ||
|
|
f39456ee47 | ||
|
|
911a532051 | ||
|
|
44910dbcd8 | ||
|
|
42cb8ec3cf | ||
|
|
957dd15d5b | ||
|
|
1e9435c999 | ||
|
|
2e731a1084 | ||
|
|
d394e8db21 | ||
|
|
a8b6267471 | ||
|
|
637682e452 | ||
|
|
38bd07a113 | ||
|
|
68b6152b42 | ||
|
|
a349f81e2d | ||
|
|
8c2e41cc99 | ||
|
|
6c0618f75a | ||
|
|
b1eed16422 | ||
|
|
432f27480e | ||
|
|
23addec9a9 | ||
|
|
e50cc9b210 | ||
|
|
58d865f293 | ||
|
|
b69fef2c39 | ||
|
|
f8fee56446 | ||
|
|
10903e7e38 | ||
|
|
645411e256 | ||
|
|
29510b26e0 | ||
|
|
e4c1cc572b | ||
|
|
7c85c11944 | ||
|
|
5318402be5 | ||
|
|
8323450958 | ||
|
|
68793b1f0f | ||
|
|
c6e3c8fab8 | ||
|
|
54c1d7c9d9 | ||
|
|
3a19ba4523 | ||
|
|
210ca9a86f | ||
|
|
ff9e0aedd6 | ||
|
|
cb9bef43a5 | ||
|
|
115ac6b999 | ||
|
|
4c500e1fb2 | ||
|
|
5715802999 | ||
|
|
8b00872d36 | ||
|
|
c67c8a7482 | ||
|
|
ee370cbf43 | ||
|
|
78217e1cee | ||
|
|
86364da07e | ||
|
|
4ec878ba0d | ||
|
|
28a2b39a55 | ||
|
|
92e1b1eb40 | ||
|
|
3bea39eb10 | ||
|
|
a34398eb92 | ||
|
|
2f6035cd1e | ||
|
|
e437cf749d | ||
|
|
5218b60681 | ||
|
|
8afe123084 | ||
|
|
c60328383d | ||
|
|
69bfe73165 | ||
|
|
3429ddeaa1 | ||
|
|
9242a2f83b | ||
|
|
b0894e20ef | ||
|
|
0cc47dd47d | ||
|
|
fdbfd8b806 | ||
|
|
9f3ee8dff6 | ||
|
|
166012dcf0 | ||
|
|
bb754d8849 | ||
|
|
0eaaa2bacc | ||
|
|
a1eab2a439 | ||
|
|
487cad7041 | ||
|
|
43f393a02d | ||
|
|
3c1f4c8ee1 | ||
|
|
81db880a7b | ||
|
|
277a71f6f6 | ||
|
|
0cfcb6917b | ||
|
|
681561229e | ||
|
|
43825c3426 | ||
|
|
f4fd6ed94e | ||
|
|
ef42680646 | ||
|
|
03ce24221e | ||
|
|
e9f9a9ef12 | ||
|
|
476b39c353 | ||
|
|
0eea2bd7bb | ||
|
|
f78ce5389e | ||
|
|
dd32b8f441 | ||
|
|
39a9c768c8 | ||
|
|
ff257175ac | ||
|
|
877ee775a3 | ||
|
|
19e151390e | ||
|
|
7da9038e79 | ||
|
|
1f8ef83657 | ||
|
|
2770e4e111 | ||
|
|
96dfbc71eb | ||
|
|
9076086d9b | ||
|
|
dc76ca7e37 | ||
|
|
7ee2b60762 | ||
|
|
e3a9395b4c | ||
|
|
83513977a4 | ||
|
|
42912141c5 | ||
|
|
8561ece4d7 | ||
|
|
028f2ab92c | ||
|
|
34c83d9495 | ||
|
|
e37f5b8df5 | ||
|
|
df342798b0 | ||
|
|
b91738721b | ||
|
|
123f4c26c3 | ||
|
|
1bbf75b5ab | ||
|
|
70e56303ab | ||
|
|
ec2e3428ec | ||
|
|
f8601dd663 | ||
|
|
df5faa0f21 | ||
|
|
8341992a38 | ||
|
|
293ed8aa8d | ||
|
|
6a8d54e983 | ||
|
|
a18155da49 | ||
|
|
2a1e850924 | ||
|
|
528adc1a04 | ||
|
|
9e7b1bf3cb | ||
|
|
a517de8168 | ||
|
|
a4ab7555e2 | ||
|
|
b666123f8e | ||
|
|
8e36ddbafe | ||
|
|
abd0c47838 | ||
|
|
8625f91b1b | ||
|
|
6c6b07fc68 | ||
|
|
f5a6f20af0 | ||
|
|
76ee294c29 | ||
|
|
77bff8fd6a | ||
|
|
40b2e19310 | ||
|
|
aaf281ec0e | ||
|
|
82bc320843 | ||
|
|
1f38db61df | ||
|
|
db20462481 | ||
|
|
79be60bf8f | ||
|
|
d57ff3abd6 | ||
|
|
49fd25236e | ||
|
|
cd6c315fc8 | ||
|
|
60bc772d48 | ||
|
|
7ad99baf32 | ||
|
|
252ae63854 | ||
|
|
7e3a9434ef | ||
|
|
f388daab39 | ||
|
|
83a742621f | ||
|
|
89f4885b62 | ||
|
|
23a1ae5085 | ||
|
|
9cd6d6f001 | ||
|
|
a15ed48377 | ||
|
|
f5e8671481 | ||
|
|
614f4f31ed | ||
|
|
95c8405e0c | ||
|
|
e6baa1d690 | ||
|
|
928409a355 | ||
|
|
86c9c9f6f3 | ||
|
|
49f29a53c0 | ||
|
|
40c04c8f6f | ||
|
|
800d26cdad | ||
|
|
016f9852d6 | ||
|
|
fbcad41813 | ||
|
|
cdf6fb4a22 | ||
|
|
9dc7329601 | ||
|
|
bd4204b89e | ||
|
|
bbf0e65c74 | ||
|
|
b7e2f7cff9 | ||
|
|
f9e50f4826 | ||
|
|
184095cc02 | ||
|
|
6a78ac61a7 | ||
|
|
b498d51889 | ||
|
|
7fb16b7abe | ||
|
|
7bfdd4086e | ||
|
|
dd27ddda17 | ||
|
|
b5181488a0 | ||
|
|
001068ec75 | ||
|
|
e660442e2e | ||
|
|
f7b7a89709 | ||
|
|
f46a895ae3 | ||
|
|
da396d4c28 | ||
|
|
93556449c9 | ||
|
|
813c150918 | ||
|
|
3ced5ec784 | ||
|
|
cdde9096da | ||
|
|
2cb2194686 | ||
|
|
6ae12cd0be | ||
|
|
a3e1420476 | ||
|
|
66a2d5fa2e | ||
|
|
582daf273a | ||
|
|
310a804507 | ||
|
|
5e4b860fb7 | ||
|
|
86f78792c4 | ||
|
|
064c62b587 | ||
|
|
7f34de90c6 | ||
|
|
7fb389fda3 | ||
|
|
85e4c58aeb | ||
|
|
de47a28ca0 | ||
|
|
0f42d35f29 | ||
|
|
d51df2aef3 | ||
|
|
0c02bd18db | ||
|
|
75276c8380 | ||
|
|
442aa4ed33 | ||
|
|
1ab8c8da53 | ||
|
|
f03201209f | ||
|
|
d79d535182 | ||
|
|
01dc7e2294 | ||
|
|
b2d1917fa0 | ||
|
|
ca9ca29cc9 | ||
|
|
1ea21fbb8d | ||
|
|
46274496d6 | ||
|
|
16f1f87a8e | ||
|
|
7847e17735 | ||
|
|
8cacd10210 | ||
|
|
9a6446bae3 | ||
|
|
f6034b85fb | ||
|
|
55c24a30df | ||
|
|
88afdac5d2 | ||
|
|
c5e9023762 | ||
|
|
b7d23efc1b | ||
|
|
300d47b36b | ||
|
|
e4d381d81c | ||
|
|
3ece15b398 | ||
|
|
c8761d39d7 | ||
|
|
51195290b8 | ||
|
|
63cb5aac20 | ||
|
|
11be3f3f08 | ||
|
|
91dd8d4950 | ||
|
|
9c26488f29 | ||
|
|
b2a5c30a44 | ||
|
|
e3d520dba8 | ||
|
|
715680bc36 | ||
|
|
b5df71950d | ||
|
|
ed6bcfddef | ||
|
|
a633dd8c79 | ||
|
|
0974586d5c | ||
|
|
774f658f67 | ||
|
|
37ef402a1f | ||
|
|
99ad32511e | ||
|
|
a282b56f46 | ||
|
|
9845eba9df | ||
|
|
a49303c93f | ||
|
|
4adbbc3fcc | ||
|
|
03cadf22fa | ||
|
|
89c8d1e64c | ||
|
|
56ebfae598 | ||
|
|
b1bb1a7345 | ||
|
|
4510bf1007 | ||
|
|
dd8b368f76 | ||
|
|
17d0b1e02f | ||
|
|
0626def699 | ||
|
|
b3c6c85387 | ||
|
|
455266f27d | ||
|
|
567408bc5e | ||
|
|
0c668ee179 | ||
|
|
8c9ba2214a | ||
|
|
8ba773127c | ||
|
|
c598c9717d | ||
|
|
ef41a1ac67 | ||
|
|
3bc17e822d | ||
|
|
6bd97c7fc7 | ||
|
|
f6c4e39dbc | ||
|
|
7c636c4f65 | ||
|
|
6a8de63d2d | ||
|
|
107fee8859 | ||
|
|
00de2b011d | ||
|
|
1cb1705f8e | ||
|
|
e0c1a84821 | ||
|
|
b7249b671f | ||
|
|
f5e86bc691 | ||
|
|
109841ceb1 | ||
|
|
1b96f87888 | ||
|
|
4d40cab954 | ||
|
|
4533b9baaa | ||
|
|
5b35f92bfe | ||
|
|
17d54d61b4 | ||
|
|
bc923bb6b1 | ||
|
|
2a867c9c7f | ||
|
|
5a4bbf42c1 | ||
|
|
087ce10f12 | ||
|
|
27092191a8 | ||
|
|
bc93a99864 | ||
|
|
2e4fbe3430 | ||
|
|
64f77293e8 | ||
|
|
9a42628577 | ||
|
|
fc9bc8b9e1 | ||
|
|
2180c11dc6 | ||
|
|
11207283c8 | ||
|
|
8552ec35b3 | ||
|
|
5fdc2e12bf | ||
|
|
37c8a94369 | ||
|
|
7d17d69c1b | ||
|
|
ee519aeb4b | ||
|
|
a825a013d6 | ||
|
|
b4906fc3b8 | ||
|
|
fafeead5dc | ||
|
|
5c602d0a4e | ||
|
|
96084fa59a | ||
|
|
c8eeda6c8a | ||
|
|
2315cdb993 | ||
|
|
2245174d28 | ||
|
|
c089b3f28f | ||
|
|
7e178cc416 | ||
|
|
8603760ebe | ||
|
|
9cae54bb55 | ||
|
|
5bbffb00f5 | ||
|
|
4476a23f5a | ||
|
|
834f37810b | ||
|
|
7e9bc26796 | ||
|
|
f83f226edc | ||
|
|
a04b9dd7cd | ||
|
|
f1ffc9e581 | ||
|
|
de636f1c0b | ||
|
|
ec6ef033a1 | ||
|
|
a375a063d5 | ||
|
|
d66147a277 | ||
|
|
283cc3a975 | ||
|
|
23af754194 | ||
|
|
50f4050444 | ||
|
|
bf11900bc9 | ||
|
|
61b8a8969f | ||
|
|
0fb7017add | ||
|
|
86329a0758 | ||
|
|
4a5165ad7f | ||
|
|
017c6fd3f2 | ||
|
|
3887acd915 | ||
|
|
a80bf9e024 | ||
|
|
27c9774d1b | ||
|
|
10cadb8725 | ||
|
|
9a5d52e460 | ||
|
|
a8ae3d3ae5 | ||
|
|
9bf1b5867a | ||
|
|
f64a589e98 | ||
|
|
72026d3a3d | ||
|
|
53f29344f8 | ||
|
|
0436973fb7 | ||
|
|
3a8651e31f | ||
|
|
953768641c | ||
|
|
b59d80b00c | ||
|
|
56dec42c70 | ||
|
|
c88585cffb | ||
|
|
46779da3b5 | ||
|
|
654050a459 | ||
|
|
c2ca9f40ac | ||
|
|
fe9f186b13 | ||
|
|
01616ef54e | ||
|
|
9fd802a208 | ||
|
|
86e0b94700 | ||
|
|
6e4e5e78ad | ||
|
|
183c280263 | ||
|
|
cb0b3a688f | ||
|
|
4f656c200b | ||
|
|
791d9eed41 | ||
|
|
74775b2e0a | ||
|
|
04c78c8c33 | ||
|
|
762b389721 | ||
|
|
32e10d2c37 | ||
|
|
d7baf4ed7b | ||
|
|
5bef198e6d | ||
|
|
c03e310422 | ||
|
|
43723ba5ef | ||
|
|
25954c8992 | ||
|
|
12f01e9e94 | ||
|
|
41f7589d4e | ||
|
|
d93edf0897 | ||
|
|
7a6202a8c3 | ||
|
|
240f6a63f4 | ||
|
|
f0558769f2 | ||
|
|
12086b69ac | ||
|
|
e47b312866 | ||
|
|
eb5f1468d2 | ||
|
|
77c7ee6d2e | ||
|
|
4f547d79a9 | ||
|
|
8a5b26536e | ||
|
|
355ac3d81a | ||
|
|
8b8123a3c1 | ||
|
|
fc44af9343 | ||
|
|
4654553d07 | ||
|
|
940df300e8 | ||
|
|
72b8e1c76f | ||
|
|
92282e3677 | ||
|
|
9e9cbb144e | ||
|
|
8c5f1e201e | ||
|
|
ab20c033fe | ||
|
|
fd9c657ed4 | ||
|
|
fea07d025d | ||
|
|
41a570818d | ||
|
|
c1e38425d0 | ||
|
|
6fb4e6836c | ||
|
|
a2f9150f50 | ||
|
|
2dcee1e43c | ||
|
|
867a4d96fe | ||
|
|
b8c7b71ca5 | ||
|
|
70396ab636 | ||
|
|
67b5b51c47 | ||
|
|
7b443fcdde | ||
|
|
d86f8ba12f | ||
|
|
856ee4724c | ||
|
|
e635af4ef9 | ||
|
|
b30163444f | ||
|
|
051bedefab | ||
|
|
1d7f63e38b | ||
|
|
d96657a48b | ||
|
|
8bdd6ea510 | ||
|
|
9f65e702fc | ||
|
|
a500dee94e | ||
|
|
1a11a6ab48 | ||
|
|
c5f1317b47 | ||
|
|
5e03ff961b | ||
|
|
a0c06eb6b9 | ||
|
|
7f9142a41d | ||
|
|
94b3ea3df5 | ||
|
|
6d7eae5011 | ||
|
|
3bbeea3682 | ||
|
|
562ec13696 | ||
|
|
a5c3bd7c13 | ||
|
|
6ae6882e1a | ||
|
|
c4ad830931 | ||
|
|
5d417b4669 | ||
|
|
02a264fad8 | ||
|
|
31c3c43f64 | ||
|
|
5197ea2488 | ||
|
|
66167de1f9 | ||
|
|
9482c2b822 | ||
|
|
012a9c0e05 | ||
|
|
881201a2c6 | ||
|
|
d656988258 | ||
|
|
89f7f8a00b | ||
|
|
e4ec48fe8d | ||
|
|
1a4f890d04 | ||
|
|
71633871f3 | ||
|
|
f523561e66 | ||
|
|
f3007215b1 | ||
|
|
1847b237c9 | ||
|
|
07169db217 | ||
|
|
4b4b368356 | ||
|
|
09ef0cd3e1 | ||
|
|
0f6bca7a3d | ||
|
|
5e3d811902 | ||
|
|
12c655a48a | ||
|
|
fdddffe041 | ||
|
|
c3aab69b11 | ||
|
|
3f1161d713 | ||
|
|
cf14c5fa4f | ||
|
|
8aac009b00 | ||
|
|
05bb8b10fd | ||
|
|
ecacbc937b | ||
|
|
86d0f0a84a | ||
|
|
34218d9a5c | ||
|
|
de7ca0d954 | ||
|
|
81176f8dfa | ||
|
|
6ee999617e | ||
|
|
88442094f9 | ||
|
|
5037dd5add | ||
|
|
aa41c87158 | ||
|
|
569eac2222 | ||
|
|
2de23046cc | ||
|
|
ca8b27ff15 | ||
|
|
0f17ba0fcf | ||
|
|
d5002ef06a | ||
|
|
ce443e73f1 | ||
|
|
89b5e9f5e6 | ||
|
|
c6c5e79ccf | ||
|
|
2bac3af299 | ||
|
|
5411c5457d | ||
|
|
92595cffa3 | ||
|
|
ad77279bbf | ||
|
|
e7d8b99ca2 | ||
|
|
f0ad498b93 | ||
|
|
971d848eec | ||
|
|
f353fa3890 | ||
|
|
64ce62a804 | ||
|
|
bb9537c5b4 | ||
|
|
9f00b5478d | ||
|
|
086b565488 | ||
|
|
27566d93e2 | ||
|
|
7b1c78b848 | ||
|
|
4ec9145700 | ||
|
|
922fe46f56 | ||
|
|
d4628cf5db | ||
|
|
cb9c3732f2 | ||
|
|
ce6a8aa671 | ||
|
|
bda61ec3d2 | ||
|
|
3ea0d479fc | ||
|
|
9ff1a9a54c | ||
|
|
024005d912 | ||
|
|
4d7cd834be | ||
|
|
281e7456d5 | ||
|
|
f41c79066b | ||
|
|
063ba50952 | ||
|
|
70bc0b3b77 | ||
|
|
057ea80582 | ||
|
|
a046e1c380 | ||
|
|
e3c2a5e510 | ||
|
|
a00967ddc4 | ||
|
|
599e1e2a51 | ||
|
|
44638e1c6b | ||
|
|
cabd4fb4c5 | ||
|
|
086845f189 | ||
|
|
413c881af8 | ||
|
|
74eb60a354 | ||
|
|
85e54e9af2 | ||
|
|
245521db22 | ||
|
|
5b8ad31d75 | ||
|
|
a0aae8cdc1 | ||
|
|
80321272b1 | ||
|
|
02500d3830 | ||
|
|
921ceaa221 | ||
|
|
5232a45d1f | ||
|
|
fdf000ec62 | ||
|
|
c7933537b1 | ||
|
|
e194d7238e | ||
|
|
33d777a6ff | ||
|
|
1b90dfedf9 | ||
|
|
b0b7d75bcd | ||
|
|
74ba0f9c39 | ||
|
|
a2d6b374da | ||
|
|
8465961e72 | ||
|
|
f5cb6e035d | ||
|
|
7946de7116 | ||
|
|
9e65eb35e7 | ||
|
|
76a1369932 | ||
|
|
12e92f127b | ||
|
|
50f3b2e398 | ||
|
|
a716151c83 | ||
|
|
3c70fffb67 | ||
|
|
a0a879a3a3 | ||
|
|
5d35d18634 | ||
|
|
a3a6c2f8b3 | ||
|
|
8eef1c33b8 | ||
|
|
6182b0fbc2 | ||
|
|
4e4f3c6e17 | ||
|
|
800f445b22 | ||
|
|
a9db25cc5b | ||
|
|
b5e1bda3e4 | ||
|
|
f06167eaca | ||
|
|
687e82307e | ||
|
|
d9358d8af3 | ||
|
|
fb3e59c6f5 | ||
|
|
b3fbd42786 | ||
|
|
590ed26f7b | ||
|
|
12093ee1f0 | ||
|
|
c0c0252bab | ||
|
|
02953ac3ef | ||
|
|
ee840321d1 | ||
|
|
0cbc44677c | ||
|
|
a9b944ac36 | ||
|
|
759860d866 | ||
|
|
c4b21f51e4 | ||
|
|
fdccffddfc | ||
|
|
e610789d38 | ||
|
|
3bd8ed45e4 | ||
|
|
42311e1bf3 | ||
|
|
9e9aae1d83 | ||
|
|
229ab3a268 | ||
|
|
85840aadc2 | ||
|
|
32b7b3e1b1 | ||
|
|
5e7eeae98e | ||
|
|
de8c4daa45 | ||
|
|
64990a00b8 | ||
|
|
6df8bd62d3 | ||
|
|
d6e3bcc875 | ||
|
|
200473ba27 | ||
|
|
05fb0c35fa | ||
|
|
33434f08f4 | ||
|
|
d5730f6fd1 | ||
|
|
2305496f94 | ||
|
|
d031392507 | ||
|
|
7e251d7345 | ||
|
|
e33e76a1a7 | ||
|
|
9064369517 | ||
|
|
a9c86d5453 | ||
|
|
4b0c975426 | ||
|
|
6ec22a1674 | ||
|
|
71b48eaf55 | ||
|
|
694f51d09e | ||
|
|
79040d00c7 | ||
|
|
7dfc1644ce | ||
|
|
777f1d27d1 | ||
|
|
c4a90bbbcd | ||
|
|
ac0920a11b | ||
|
|
a4bdca6d6b | ||
|
|
f9bfcceba9 | ||
|
|
e0ceccc0c6 | ||
|
|
cd3e88fe8b | ||
|
|
254b6de1d3 | ||
|
|
a93e9493e2 | ||
|
|
53b356deee | ||
|
|
9dca6fa9cc | ||
|
|
7226aa18de | ||
|
|
ce97865bd2 | ||
|
|
424fa0f56b | ||
|
|
007fc9ebd0 | ||
|
|
e0104c8d12 | ||
|
|
1a98a1f938 | ||
|
|
e005893d4c | ||
|
|
6c74d854ec | ||
|
|
06b185f725 | ||
|
|
64ec42cf4a | ||
|
|
c4b24d9ae1 | ||
|
|
e6f38cbf45 | ||
|
|
1764674a13 | ||
|
|
4741a47d21 | ||
|
|
432bffe3ec | ||
|
|
090216df2a | ||
|
|
7b2fec88d1 | ||
|
|
76128ccb37 | ||
|
|
f5ede0d0f9 | ||
|
|
70d1649b45 | ||
|
|
de706a2eb9 | ||
|
|
33ebee0f6a | ||
|
|
51a989d5f1 | ||
|
|
3fc208b4ce | ||
|
|
0fa54a85d8 | ||
|
|
96e8a5d2cc | ||
|
|
838fbc5626 | ||
|
|
486af800b5 | ||
|
|
f0b9b3a35a | ||
|
|
80780ab042 | ||
|
|
bd19f6ed17 | ||
|
|
2ae76842a0 | ||
|
|
750804876e | ||
|
|
1d8da964e2 | ||
|
|
d21d0f281a | ||
|
|
a8e337e0eb | ||
|
|
37429d91a0 | ||
|
|
4b9c6fdf62 | ||
|
|
4130825a43 | ||
|
|
8c42e61271 | ||
|
|
6d95590861 | ||
|
|
dbc36a5e63 | ||
|
|
32a106cedd | ||
|
|
78ed103f90 | ||
|
|
3e26b2a0a7 | ||
|
|
4fb910533f | ||
|
|
b71cab6add | ||
|
|
394fb528cc | ||
|
|
d2f4eec397 | ||
|
|
5f29bf3bb4 | ||
|
|
5a9d35917f | ||
|
|
0029183078 | ||
|
|
0978258f57 | ||
|
|
772a9a582c | ||
|
|
e633d565a9 | ||
|
|
7d6ee108c4 | ||
|
|
dd64c3b545 | ||
|
|
b462e29e1d | ||
|
|
43f4f5bd32 | ||
|
|
904e86d901 | ||
|
|
374164a299 | ||
|
|
f73f309595 | ||
|
|
f23110a98d | ||
|
|
3193fc90f9 | ||
|
|
cc04519520 | ||
|
|
f51c5e3159 | ||
|
|
a75d918b93 | ||
|
|
c439ca5f97 | ||
|
|
4d45f13783 | ||
|
|
2a56719378 | ||
|
|
23289d8901 | ||
|
|
7d594b55e4 | ||
|
|
5268a93fa4 | ||
|
|
ae44530a66 | ||
|
|
285216a258 | ||
|
|
4d3720745e | ||
|
|
a681a252d4 | ||
|
|
22de40f5f6 | ||
|
|
7fbfe0a4b4 | ||
|
|
29f5ea591f | ||
|
|
438c0a4ec1 | ||
|
|
887933c86a | ||
|
|
53340ddd4c | ||
|
|
72b70a11bc | ||
|
|
d853864edb | ||
|
|
62ad763933 | ||
|
|
6a15ae47ee | ||
|
|
1bebb50482 | ||
|
|
d351d6091d | ||
|
|
fae8f83f64 | ||
|
|
d3d62726ae | ||
|
|
cf15832504 | ||
|
|
fdc81d8818 | ||
|
|
764df07450 | ||
|
|
68dfff60b5 | ||
|
|
479871f019 | ||
|
|
10ec56e667 | ||
|
|
1e958a18f8 | ||
|
|
9fa0f6cd6f | ||
|
|
4e339db911 | ||
|
|
c1469b25a1 | ||
|
|
ef6abed81a | ||
|
|
96473849e0 | ||
|
|
b14e09af6b | ||
|
|
42050c4d12 | ||
|
|
84dc918729 | ||
|
|
032125b114 | ||
|
|
b1a137135e | ||
|
|
1a53d1973a | ||
|
|
490afdddd1 | ||
|
|
9822b153eb | ||
|
|
1af71c8945 | ||
|
|
acc1a56da4 | ||
|
|
bf4596c26d | ||
|
|
3e3fb0cdfe | ||
|
|
d907ab9940 | ||
|
|
9c1d6e154c | ||
|
|
b5681ca559 | ||
|
|
4b8f362eb7 | ||
|
|
2e39d1d582 | ||
|
|
fa797df0c7 | ||
|
|
c7100be139 | ||
|
|
91284e5530 | ||
|
|
e5cf7ac764 | ||
|
|
3ae785605e | ||
|
|
e7ac4e0a29 | ||
|
|
b275e53b08 | ||
|
|
f363b198e1 | ||
|
|
37c5570cec | ||
|
|
2db2f5a46d | ||
|
|
e33f4ca96e | ||
|
|
246580fb43 | ||
|
|
fab34da3e7 | ||
|
|
a9a28ce58d | ||
|
|
f2b80a239f | ||
|
|
c6d38ab647 | ||
|
|
420594874a | ||
|
|
912f635d2a | ||
|
|
4ae5dd360f | ||
|
|
407c40f7d3 | ||
|
|
329f9a0fc8 | ||
|
|
d62257b149 | ||
|
|
19539404a4 | ||
|
|
9ee0523cad | ||
|
|
89bc82d9df | ||
|
|
30aa3fd5d6 | ||
|
|
846e84fc8c | ||
|
|
cd006487b3 | ||
|
|
597ce9adc3 | ||
|
|
61040402df | ||
|
|
8013cd081a | ||
|
|
22cdb9ee90 | ||
|
|
df448c0761 | ||
|
|
99c296264a | ||
|
|
ba51461604 | ||
|
|
6610d0dd46 | ||
|
|
3adc0e1b16 | ||
|
|
0a47b4865a | ||
|
|
13f1a1fc74 | ||
|
|
3ad129e6e6 | ||
|
|
475e865809 | ||
|
|
1e93e98d30 | ||
|
|
d0034b4fb9 | ||
|
|
0c3dcb0a9b | ||
|
|
3138fa79a0 | ||
|
|
c88170b6f6 | ||
|
|
3b79cf3cf2 | ||
|
|
f3ee7072a6 | ||
|
|
5b5d9da33c | ||
|
|
dc1d17a051 | ||
|
|
0bc28d9424 | ||
|
|
5b06a46451 | ||
|
|
8b5b8a9760 | ||
|
|
6c98bbf02c | ||
|
|
9f0964cd7d | ||
|
|
b68732f0c7 | ||
|
|
15a746650c | ||
|
|
b99abba27f | ||
|
|
9c12823d38 | ||
|
|
28bee50e6a | ||
|
|
4148ff1c29 | ||
|
|
e408b5fbaa | ||
|
|
e26bf5a0d2 | ||
|
|
465d60ba86 | ||
|
|
d5c3978a6e | ||
|
|
d4312c05bf | ||
|
|
7efad57176 | ||
|
|
009bff6cc2 | ||
|
|
c918c5b742 | ||
|
|
4a33b7ae8e | ||
|
|
777952adcb | ||
|
|
ef4c47347d | ||
|
|
5e34315bb3 | ||
|
|
4f5624cd5f | ||
|
|
f76d64f9aa | ||
|
|
4444b6daa1 | ||
|
|
ec786b73bc | ||
|
|
7ca58f8dd9 | ||
|
|
58420f62d9 | ||
|
|
a20631af04 | ||
|
|
44995d6f62 | ||
|
|
2d7dea2d97 | ||
|
|
2cdbe64b66 | ||
|
|
030ad89a14 | ||
|
|
a34ee513c0 | ||
|
|
96d29b7662 | ||
|
|
3f077ea71e | ||
|
|
de94b85c0d | ||
|
|
1c771cc27d | ||
|
|
a41ec3a801 | ||
|
|
d9d9e01242 | ||
|
|
04abb5310a | ||
|
|
c7ed9bd7b3 | ||
|
|
8aadb5eeaa | ||
|
|
e4b5593728 | ||
|
|
14d363b942 | ||
|
|
f8c6277946 | ||
|
|
8254bcc3ac | ||
|
|
f8389f0d93 | ||
|
|
af12db9276 | ||
|
|
688a6bb581 | ||
|
|
a8b2b0a86b | ||
|
|
5d5935965a | ||
|
|
f795b20582 | ||
|
|
c2023d33b9 | ||
|
|
d9c375b74a | ||
|
|
7179ec56ef | ||
|
|
26c850c37f | ||
|
|
2023f35af7 | ||
|
|
c0a57dbd1b | ||
|
|
78f072b46a | ||
|
|
da51510597 | ||
|
|
47389cc827 | ||
|
|
f035203e1c | ||
|
|
083f6fc3b4 | ||
|
|
d5bfe40f37 | ||
|
|
0b350defb5 | ||
|
|
88d0408875 | ||
|
|
c7a155efef | ||
|
|
9187fffc48 | ||
|
|
7d2603ceb7 | ||
|
|
c5bb287c74 | ||
|
|
6b6f5eaaff | ||
|
|
f3fa2317a6 | ||
|
|
d096187196 | ||
|
|
c5a3b34546 | ||
|
|
70eafb2ffc | ||
|
|
983a3e6073 | ||
|
|
fc8d2f641c | ||
|
|
9a5f9a5e9b | ||
|
|
806369ce7f | ||
|
|
4398cda9a5 | ||
|
|
cf6eeec22a | ||
|
|
583e6fe2e8 | ||
|
|
500f8cd869 | ||
|
|
2e5866e6d8 | ||
|
|
600648c8af | ||
|
|
395f4375da | ||
|
|
bb348c4038 | ||
|
|
1a4be4dfa0 | ||
|
|
1ac51d2261 | ||
|
|
1766123448 | ||
|
|
5ea039a74e | ||
|
|
0af1a49cbd | ||
|
|
151b7d53b0 | ||
|
|
6e82d2a689 | ||
|
|
b02c6c1e54 | ||
|
|
cd406cc6b9 | ||
|
|
52d46ddc8c | ||
|
|
188fad1814 | ||
|
|
a86ff11084 | ||
|
|
6630f3bc4a | ||
|
|
2164f285f5 | ||
|
|
086855fcce | ||
|
|
33b421ff0b | ||
|
|
36e8fe1b25 | ||
|
|
9696ee589e | ||
|
|
f66da93e64 | ||
|
|
d766c14305 | ||
|
|
5b749060a4 | ||
|
|
9c76078b4f | ||
|
|
c54ffa78f4 | ||
|
|
dde1addced | ||
|
|
6108d53eb2 | ||
|
|
7ae475a811 | ||
|
|
c3c2c9c7fe | ||
|
|
f8955e919b | ||
|
|
dc9ad22ec4 | ||
|
|
e33cf5f933 | ||
|
|
4c7b432090 | ||
|
|
8afd6a1bd8 | ||
|
|
7725bbb36b | ||
|
|
333d9cfffb | ||
|
|
495b50cbda | ||
|
|
fe8dbd662b | ||
|
|
cdde73f5ee | ||
|
|
05c49245b0 | ||
|
|
0955dd2ef0 | ||
|
|
6c5a9e97fe | ||
|
|
e5d2795ec0 | ||
|
|
61aa378c45 | ||
|
|
db296bee80 | ||
|
|
3e091eacc2 | ||
|
|
b2303eac1e | ||
|
|
c01e347bc0 | ||
|
|
6d8583a0b4 | ||
|
|
c85255c5af | ||
|
|
5fac67cea5 | ||
|
|
7b9e0afef9 | ||
|
|
b45c4f5379 | ||
|
|
1fa4ed6bc2 | ||
|
|
2d16f863f7 | ||
|
|
f5ebbd42d3 | ||
|
|
b998a5c282 | ||
|
|
58a9b56f4d | ||
|
|
3ceae6b5c1 | ||
|
|
2612ea35bc | ||
|
|
5bf2299461 | ||
|
|
b26e4a7556 | ||
|
|
c9bd6096b9 | ||
|
|
7d50697701 | ||
|
|
e2314b57fe | ||
|
|
055743f886 | ||
|
|
152151bd44 | ||
|
|
2431f2120c | ||
|
|
6a8e14dcf3 | ||
|
|
a07d6f82ee | ||
|
|
116d158336 | ||
|
|
4863d16657 | ||
|
|
da97f1af28 | ||
|
|
6a03ea048b | ||
|
|
7924d0d8f8 | ||
|
|
781cd4069c | ||
|
|
505a361d98 | ||
|
|
c493c436da | ||
|
|
fb7c97c83f | ||
|
|
b13001c5cc | ||
|
|
4e916ce94b | ||
|
|
1fad3be12a | ||
|
|
6b688ba696 | ||
|
|
48d8919043 | ||
|
|
0479f72a93 | ||
|
|
1877c8c383 | ||
|
|
5f6d74d849 | ||
|
|
72ae6cd8ca | ||
|
|
8457f6397d | ||
|
|
24820ed935 | ||
|
|
ad6947eeb4 | ||
|
|
9c27ec0313 | ||
|
|
7a21d66877 | ||
|
|
7c1265cd2d | ||
|
|
6d73ae58b6 | ||
|
|
2d9d6f5669 | ||
|
|
0a49062a02 | ||
|
|
04bab94c89 | ||
|
|
9bb230d7c8 | ||
|
|
121f096173 | ||
|
|
c06f3fbc57 | ||
|
|
831f8a1f1f | ||
|
|
5e4623a44a | ||
|
|
1a60a00d3e | ||
|
|
08eef5f110 | ||
|
|
0e2d3a2ac1 | ||
|
|
f852df397b | ||
|
|
041ab041ae | ||
|
|
ad9a57f8f9 | ||
|
|
b2bf4b0bd9 | ||
|
|
c625642845 | ||
|
|
35e077ce86 | ||
|
|
7839459607 | ||
|
|
212be40710 | ||
|
|
dc8685f918 | ||
|
|
75072ae5cc | ||
|
|
3edd765076 | ||
|
|
1d66e593e2 | ||
|
|
b8bafd246e | ||
|
|
95c822457a | ||
|
|
26df1034ec | ||
|
|
c495d19540 | ||
|
|
b405847573 | ||
|
|
1abcb2edb7 | ||
|
|
e3669d2bb6 | ||
|
|
1b9a49e226 | ||
|
|
0ee716b1e9 | ||
|
|
9469f481f3 | ||
|
|
acc190bb04 | ||
|
|
5953e22efb | ||
|
|
2c26486588 | ||
|
|
e9d2c0cf28 | ||
|
|
a35c3ca739 | ||
|
|
0ee2f17a61 | ||
|
|
83ce189a82 | ||
|
|
c97e112c72 | ||
|
|
eee124f6c6 | ||
|
|
adc9246f66 | ||
|
|
560555bcd8 | ||
|
|
900a6bc2b8 | ||
|
|
3613d09c38 | ||
|
|
02749dd5cf | ||
|
|
abda879d5a | ||
|
|
d2e909677b | ||
|
|
baa42daae8 | ||
|
|
0b2465482a | ||
|
|
453a097c22 | ||
|
|
4b26a56e64 | ||
|
|
c1d54db2cc | ||
|
|
bcaeb5d464 | ||
|
|
d65bbfbe8d | ||
|
|
4c9b16aa08 | ||
|
|
8355f5031a | ||
|
|
c794c6e24b | ||
|
|
611b790a2c | ||
|
|
78708df79d | ||
|
|
54a4af75b5 | ||
|
|
72d698ebaa | ||
|
|
209f9f0072 | ||
|
|
93457746ac | ||
|
|
2696a9c5e7 | ||
|
|
7c170972a0 | ||
|
|
d00dfd82c1 | ||
|
|
9003139119 | ||
|
|
36e867ec76 | ||
|
|
cf4813979c | ||
|
|
7e12c3eff1 | ||
|
|
281cc985bf | ||
|
|
dcc2be0781 | ||
|
|
161d076bfd | ||
|
|
09fbcc9a72 | ||
|
|
ee2b92cf82 | ||
|
|
3511380c72 | ||
|
|
38736a2db9 | ||
|
|
720914b290 | ||
|
|
16f8e54ed7 | ||
|
|
50ecb63058 | ||
|
|
586650f01c | ||
|
|
ae753e6e88 | ||
|
|
04a2845e91 | ||
|
|
acb20d95ca | ||
|
|
5a9ef5eac2 | ||
|
|
287e1b855d | ||
|
|
d3ebe1844d | ||
|
|
fc8492e8f7 | ||
|
|
ff5ffd0482 | ||
|
|
50db6d0150 | ||
|
|
2e0b854428 | ||
|
|
1dfb44cff7 | ||
|
|
0a8fad2040 | ||
|
|
9b97d3ac8a | ||
|
|
26e78c0c1b | ||
|
|
b036e8d3c2 | ||
|
|
f84a904ad8 | ||
|
|
b1684037d6 | ||
|
|
7c9e973082 | ||
|
|
1c85d0b38a | ||
|
|
97c998946b | ||
|
|
8529c90a4d | ||
|
|
ec3434cf1d | ||
|
|
0e20f6d454 | ||
|
|
696573b14c | ||
|
|
fbb31f018c | ||
|
|
d92d208a45 | ||
|
|
b798e28bfb | ||
|
|
ebd6077cd7 | ||
|
|
7f87d22d78 | ||
|
|
d890b73c2f | ||
|
|
eca1f61dab | ||
|
|
4126d0e445 | ||
|
|
1d3cffc6dd | ||
|
|
675d0865da | ||
|
|
b954d22eba | ||
|
|
01981c310d | ||
|
|
0ff50a6b02 | ||
|
|
e857b23429 | ||
|
|
09c76246f6 | ||
|
|
31d6b110d2 | ||
|
|
29a0db402c | ||
|
|
21a7fe9f12 | ||
|
|
3b558db518 | ||
|
|
44066fbb0b | ||
|
|
0c2794e9de | ||
|
|
69a9ac9366 | ||
|
|
59e199d0c3 | ||
|
|
a572b467b4 | ||
|
|
90f1ba95a4 | ||
|
|
286c8a1b4a | ||
|
|
0f006af583 | ||
|
|
2bbf92d5f8 | ||
|
|
a41631d9fa | ||
|
|
645f4d6194 | ||
|
|
46cde87c09 | ||
|
|
91364a9769 | ||
|
|
23b6b4c499 | ||
|
|
1e34faa920 | ||
|
|
e0190a5a6e |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1 +1,3 @@
|
||||
Gemfile.lock
|
||||
.bundle/
|
||||
vendor/
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
before_install: sudo apt-get install libicu-dev -y
|
||||
before_install:
|
||||
- sudo apt-get install libicu-dev -y
|
||||
- gem update --system 2.1.11
|
||||
rvm:
|
||||
- 1.8.7
|
||||
- 1.9.2
|
||||
- 1.9.3
|
||||
- 2.0.0
|
||||
- 2.1.1
|
||||
- ree
|
||||
notifications:
|
||||
disabled: true
|
||||
|
||||
7
Gemfile
7
Gemfile
@@ -1,2 +1,7 @@
|
||||
source :rubygems
|
||||
source 'https://rubygems.org'
|
||||
gemspec
|
||||
|
||||
if RUBY_VERSION < "1.9.3"
|
||||
# escape_utils 1.0.0 requires 1.9.3 and above
|
||||
gem "escape_utils", "0.3.2"
|
||||
end
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
||||
Copyright (c) 2011 GitHub, Inc.
|
||||
Copyright (c) 2011-2014 GitHub, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
|
||||
80
README.md
80
README.md
@@ -1,22 +1,24 @@
|
||||
# Linguist
|
||||
|
||||
We use this library at GitHub to detect blob languages, highlight code, ignore binary files, suppress generated files in diffs and generate language breakdown graphs.
|
||||
We use this library at GitHub to detect blob languages, highlight code, ignore binary files, suppress generated files in diffs, and generate language breakdown graphs.
|
||||
|
||||
## Features
|
||||
|
||||
### Language detection
|
||||
|
||||
Linguist defines the list of all languages known to GitHub in a [yaml file](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml). In order for a file to be highlighted, a language and lexer must be defined there.
|
||||
Linguist defines a list of all languages known to GitHub in a [yaml file](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml). In order for a file to be highlighted, a language and a lexer must be defined there.
|
||||
|
||||
Most languages are detected by their file extension. This is the fastest and most common situation.
|
||||
Most languages are detected by their file extension. For disambiguating between files with common extensions, we first apply some common-sense heuristics to pick out obvious languages. After that, we use a
|
||||
[statistical
|
||||
classifier](https://github.com/github/linguist/blob/master/lib/linguist/classifier.rb).
|
||||
This process can help us tell the difference between, for example, `.h` files which could be either C, C++, or Obj-C.
|
||||
|
||||
For disambiguating between files with common extensions, we use a [bayesian classifier](https://github.com/github/linguist/blob/master/lib/linguist/classifier.rb). For an example, this helps us tell the difference between `.h` files which could be either C, C++, or Obj-C.
|
||||
```ruby
|
||||
|
||||
In the actual GitHub app we deal with `Grit::Blob` objects. For testing, there is a simple `FileBlob` API.
|
||||
Linguist::FileBlob.new("lib/linguist.rb").language.name #=> "Ruby"
|
||||
|
||||
Linguist::FileBlob.new("lib/linguist.rb").language.name #=> "Ruby"
|
||||
|
||||
Linguist::FileBlob.new("bin/linguist").language.name #=> "Ruby"
|
||||
Linguist::FileBlob.new("bin/linguist").language.name #=> "Ruby"
|
||||
```
|
||||
|
||||
See [lib/linguist/language.rb](https://github.com/github/linguist/blob/master/lib/linguist/language.rb) and [lib/linguist/languages.yml](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml).
|
||||
|
||||
@@ -24,39 +26,59 @@ See [lib/linguist/language.rb](https://github.com/github/linguist/blob/master/li
|
||||
|
||||
The actual syntax highlighting is handled by our Pygments wrapper, [pygments.rb](https://github.com/tmm1/pygments.rb). It also provides a [Lexer abstraction](https://github.com/tmm1/pygments.rb/blob/master/lib/pygments/lexer.rb) that determines which highlighter should be used on a file.
|
||||
|
||||
We typically run on a prerelease version of Pygments, [pygments.rb](https://github.com/tmm1/pygments.rb), to get early access to new lexers. The [lexers.yml](https://github.com/github/linguist/blob/master/lib/linguist/lexers.yml) file is a dump of the lexers we have available on our server.
|
||||
|
||||
### Stats
|
||||
|
||||
The Language Graph you see on every repository is built by aggregating the languages of all repo's blobs. The top language in the graph determines the project's primary language. Collectively, these stats make up the [Top Languages](https://github.com/languages) page.
|
||||
The Language stats bar that you see on every repository is built by aggregating the languages of each file in that repository. The top language in the graph determines the project's primary language.
|
||||
|
||||
The repository stats API can be used on a directory:
|
||||
The repository stats API, accessed through `#languages`, can be used on a directory:
|
||||
|
||||
project = Linguist::Repository.from_directory(".")
|
||||
project.language.name #=> "Ruby"
|
||||
project.languages #=> { "Ruby" => 0.98,
|
||||
"Shell" => 0.02 }
|
||||
```ruby
|
||||
project = Linguist::Repository.from_directory(".")
|
||||
project.language.name #=> "Ruby"
|
||||
project.languages #=> { "Ruby" => 0.98, "Shell" => 0.02 }
|
||||
```
|
||||
|
||||
These stats are also printed out by the binary. Try running `linguist` on itself:
|
||||
These stats are also printed out by the `linguist` binary. You can use the
|
||||
`--breakdown` flag, and the binary will also output the breakdown of files by language.
|
||||
|
||||
$ bundle exec linguist lib/
|
||||
100% Ruby
|
||||
You can try running `linguist` on the `lib/` directory in this repository itself:
|
||||
|
||||
$ bundle exec linguist lib/ --breakdown
|
||||
|
||||
100.00% Ruby
|
||||
|
||||
Ruby:
|
||||
linguist/blob_helper.rb
|
||||
linguist/classifier.rb
|
||||
linguist/file_blob.rb
|
||||
linguist/generated.rb
|
||||
linguist/heuristics.rb
|
||||
linguist/language.rb
|
||||
linguist/md5.rb
|
||||
linguist/repository.rb
|
||||
linguist/samples.rb
|
||||
linguist/tokenizer.rb
|
||||
linguist.rb
|
||||
|
||||
#### Ignore vendored files
|
||||
|
||||
Checking other code into your git repo is a common practice. But this often inflates your project's language stats and may even cause your project to be labeled as another language. We are able to identify some of these files and directories and exclude them.
|
||||
|
||||
Linguist::FileBlob.new("vendor/plugins/foo.rb").vendored? # => true
|
||||
```ruby
|
||||
Linguist::FileBlob.new("vendor/plugins/foo.rb").vendored? # => true
|
||||
```
|
||||
|
||||
See [Linguist::BlobHelper#vendored?](https://github.com/github/linguist/blob/master/lib/linguist/blob_helper.rb) and [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml).
|
||||
|
||||
#### Generated file detection
|
||||
|
||||
Not all plain text files are true source files. Generated files like minified js and compiled CoffeeScript can be detected and excluded from language stats. As an extra bonus, these files are suppressed in Diffs.
|
||||
Not all plain text files are true source files. Generated files like minified js and compiled CoffeeScript can be detected and excluded from language stats. As an extra bonus, these files are suppressed in diffs.
|
||||
|
||||
Linguist::FileBlob.new("underscore.min.js").generated? # => true
|
||||
```ruby
|
||||
Linguist::FileBlob.new("underscore.min.js").generated? # => true
|
||||
```
|
||||
|
||||
See [Linguist::BlobHelper#generated?](https://github.com/github/linguist/blob/master/lib/linguist/blob_helper.rb).
|
||||
See [Linguist::Generated#generated?](https://github.com/github/linguist/blob/master/lib/linguist/generated.rb).
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -74,12 +96,18 @@ To run the tests:
|
||||
|
||||
## Contributing
|
||||
|
||||
The majority of patches won't need to touch any Ruby code at all. The [master language list](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml) is just a configuration file.
|
||||
The majority of contributions won't need to touch any Ruby code at all. The [master language list](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml) is just a YAML configuration file.
|
||||
|
||||
We try to only add languages once they have some usage on GitHub, so please note in-the-wild usage examples in your pull request.
|
||||
|
||||
Almost all bug fixes or new language additions should come with some additional code samples. Just drop them under [`samples/`](https://github.com/github/linguist/tree/master/samples) in the correct subdirectory and our test suite will automatically test them. In most cases you shouldn't need to add any new assertions.
|
||||
|
||||
To update the `samples.json` after adding new files to [`samples/`](https://github.com/github/linguist/tree/master/samples):
|
||||
|
||||
bundle exec rake samples
|
||||
|
||||
### Testing
|
||||
|
||||
Sometimes getting the tests running can be to much work especially if you don't have much Ruby experience. Its okay, be lazy and let our build bot [Travis](http://travis-ci.org/#!/github/linguist) run the tests for you. Just open a pull request and the bot will start cranking away.
|
||||
Sometimes getting the tests running can be too much work, especially if you don't have much Ruby experience. It's okay: be lazy and let our build bot [Travis](http://travis-ci.org/#!/github/linguist) run the tests for you. Just open a pull request and the bot will start cranking away.
|
||||
|
||||
Heres our current build status, which is hopefully green: [](http://travis-ci.org/github/linguist)
|
||||
Here's our current build status, which is hopefully green: [](http://travis-ci.org/github/linguist)
|
||||
|
||||
13
Rakefile
13
Rakefile
@@ -1,11 +1,11 @@
|
||||
require 'json'
|
||||
require 'rake/clean'
|
||||
require 'rake/testtask'
|
||||
require 'yaml'
|
||||
|
||||
task :default => :test
|
||||
|
||||
Rake::TestTask.new do |t|
|
||||
t.warning = true
|
||||
end
|
||||
Rake::TestTask.new
|
||||
|
||||
task :samples do
|
||||
require 'linguist/samples'
|
||||
@@ -15,6 +15,13 @@ task :samples do
|
||||
File.open('lib/linguist/samples.json', 'w') { |io| io.write json }
|
||||
end
|
||||
|
||||
task :build_gem do
|
||||
languages = YAML.load_file("lib/linguist/languages.yml")
|
||||
File.write("lib/linguist/languages.json", JSON.dump(languages))
|
||||
`gem build github-linguist.gemspec`
|
||||
File.delete("lib/linguist/languages.json")
|
||||
end
|
||||
|
||||
namespace :classifier do
|
||||
LIMIT = 1_000
|
||||
|
||||
|
||||
28
bin/linguist
28
bin/linguist
@@ -1,15 +1,39 @@
|
||||
#!/usr/bin/env ruby
|
||||
|
||||
# linguist — detect language type for a file, or, given a directory, determine language breakdown
|
||||
# usage: linguist <path> [<--breakdown>]
|
||||
|
||||
require 'linguist/file_blob'
|
||||
require 'linguist/repository'
|
||||
|
||||
path = ARGV[0] || Dir.pwd
|
||||
|
||||
# special case if not given a directory but still given the --breakdown option
|
||||
if path == "--breakdown"
|
||||
path = Dir.pwd
|
||||
breakdown = true
|
||||
end
|
||||
|
||||
ARGV.shift
|
||||
breakdown = true if ARGV[0] == "--breakdown"
|
||||
|
||||
if File.directory?(path)
|
||||
repo = Linguist::Repository.from_directory(path)
|
||||
repo.languages.sort_by { |_, size| size }.reverse.each do |language, size|
|
||||
percentage = ((size / repo.size.to_f) * 100).round
|
||||
puts "%-4s %s" % ["#{percentage}%", language]
|
||||
percentage = ((size / repo.size.to_f) * 100)
|
||||
percentage = sprintf '%.2f' % percentage
|
||||
puts "%-7s %s" % ["#{percentage}%", language]
|
||||
end
|
||||
if breakdown
|
||||
puts
|
||||
file_breakdown = repo.breakdown_by_file
|
||||
file_breakdown.each do |lang, files|
|
||||
puts "#{lang}:"
|
||||
files.each do |file|
|
||||
puts file
|
||||
end
|
||||
puts
|
||||
end
|
||||
end
|
||||
elsif File.file?(path)
|
||||
blob = Linguist::FileBlob.new(path, Dir.pwd)
|
||||
|
||||
@@ -1,19 +1,23 @@
|
||||
Gem::Specification.new do |s|
|
||||
s.name = 'github-linguist'
|
||||
s.version = '2.3.4'
|
||||
s.version = '2.10.13'
|
||||
s.summary = "GitHub Language detection"
|
||||
s.description = 'We use this library at GitHub to detect blob languages, highlight code, ignore binary files, suppress generated files in diffs, and generate language breakdown graphs.'
|
||||
|
||||
s.authors = "GitHub"
|
||||
s.authors = "GitHub"
|
||||
s.homepage = "https://github.com/github/linguist"
|
||||
s.license = "MIT"
|
||||
|
||||
s.files = Dir['lib/**/*']
|
||||
s.executables << 'linguist'
|
||||
|
||||
s.add_dependency 'charlock_holmes', '~> 0.6.6'
|
||||
s.add_dependency 'escape_utils', '~> 0.2.3'
|
||||
s.add_dependency 'escape_utils', '>= 0.3.1'
|
||||
s.add_dependency 'mime-types', '~> 1.19'
|
||||
s.add_dependency 'pygments.rb', '>= 0.2.13'
|
||||
s.add_development_dependency 'mocha'
|
||||
s.add_dependency 'pygments.rb', '~> 0.5.4'
|
||||
|
||||
s.add_development_dependency 'json'
|
||||
s.add_development_dependency 'mocha'
|
||||
s.add_development_dependency 'rake'
|
||||
s.add_development_dependency 'yajl-ruby'
|
||||
end
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
require 'linguist/blob_helper'
|
||||
require 'linguist/generated'
|
||||
require 'linguist/heuristics'
|
||||
require 'linguist/language'
|
||||
require 'linguist/repository'
|
||||
require 'linguist/samples'
|
||||
|
||||
@@ -8,6 +8,12 @@ require 'pygments'
|
||||
require 'yaml'
|
||||
|
||||
module Linguist
|
||||
# DEPRECATED Avoid mixing into Blob classes. Prefer functional interfaces
|
||||
# like `Language.detect` over `Blob#language`. Functions are much easier to
|
||||
# cache and compose.
|
||||
#
|
||||
# Avoid adding additional bloat to this module.
|
||||
#
|
||||
# BlobHelper is a mixin for Blobish classes that respond to "name",
|
||||
# "data" and "size" such as Grit::Blob.
|
||||
module BlobHelper
|
||||
@@ -58,6 +64,15 @@ module Linguist
|
||||
_mime_type ? _mime_type.binary? : false
|
||||
end
|
||||
|
||||
# Internal: Is the blob binary according to its mime type,
|
||||
# overriding it if we have better data from the languages.yml
|
||||
# database.
|
||||
#
|
||||
# Return true or false
|
||||
def likely_binary?
|
||||
binary_mime_type? && !Language.find_by_filename(name)
|
||||
end
|
||||
|
||||
# Public: Get the Content-Type header value
|
||||
#
|
||||
# This value is used when serving raw blobs.
|
||||
@@ -139,7 +154,28 @@ module Linguist
|
||||
#
|
||||
# Return true or false
|
||||
def image?
|
||||
['.png', '.jpg', '.jpeg', '.gif'].include?(extname)
|
||||
['.png', '.jpg', '.jpeg', '.gif'].include?(extname.downcase)
|
||||
end
|
||||
|
||||
# Public: Is the blob a supported 3D model format?
|
||||
#
|
||||
# Return true or false
|
||||
def solid?
|
||||
extname.downcase == '.stl'
|
||||
end
|
||||
|
||||
# Public: Is this blob a CSV file?
|
||||
#
|
||||
# Return true or false
|
||||
def csv?
|
||||
text? && extname.downcase == '.csv'
|
||||
end
|
||||
|
||||
# Public: Is the blob a PDF?
|
||||
#
|
||||
# Return true or false
|
||||
def pdf?
|
||||
extname.downcase == '.pdf'
|
||||
end
|
||||
|
||||
MEGABYTE = 1024 * 1024
|
||||
@@ -153,10 +189,9 @@ module Linguist
|
||||
|
||||
# Public: Is the blob safe to colorize?
|
||||
#
|
||||
# We use Pygments.rb for syntax highlighting blobs, which
|
||||
# has some quirks and also is essentially 'un-killable' via
|
||||
# normal timeout. To workaround this we try to
|
||||
# carefully handling Pygments.rb anything it can't handle.
|
||||
# We use Pygments for syntax highlighting blobs. Pygments
|
||||
# can be too slow for very large blobs or for certain
|
||||
# corner-case blobs.
|
||||
#
|
||||
# Return true or false
|
||||
def safe_to_colorize?
|
||||
@@ -206,31 +241,12 @@ module Linguist
|
||||
def lines
|
||||
@lines ||=
|
||||
if viewable? && data
|
||||
data.split(line_split_character, -1)
|
||||
data.split(/\r\n|\r|\n/, -1)
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
# Character used to split lines. This is almost always "\n" except when Mac
|
||||
# Format is detected in which case it's "\r".
|
||||
#
|
||||
# Returns a split pattern string.
|
||||
def line_split_character
|
||||
@line_split_character ||= (mac_format?? "\r" : "\n")
|
||||
end
|
||||
|
||||
# Public: Is the data in ** Mac Format **. This format uses \r (0x0d) characters
|
||||
# for line ends and does not include a \n (0x0a).
|
||||
#
|
||||
# Returns true when mac format is detected.
|
||||
def mac_format?
|
||||
return if !viewable?
|
||||
if pos = data[0, 4096].index("\r")
|
||||
data[pos + 1] != ?\n
|
||||
end
|
||||
end
|
||||
|
||||
# Public: Get number of lines of code
|
||||
#
|
||||
# Requires Blob#data
|
||||
@@ -251,7 +267,7 @@ module Linguist
|
||||
|
||||
# Public: Is the blob a generated file?
|
||||
#
|
||||
# Generated source code is supressed in diffs and is ignored by
|
||||
# Generated source code is suppressed in diffs and is ignored by
|
||||
# language statistics.
|
||||
#
|
||||
# May load Blob#data
|
||||
@@ -261,36 +277,6 @@ module Linguist
|
||||
@_generated ||= Generated.generated?(name, lambda { data })
|
||||
end
|
||||
|
||||
# Public: Should the blob be indexed for searching?
|
||||
#
|
||||
# Excluded:
|
||||
# - Files over 0.1MB
|
||||
# - Non-text files
|
||||
# - Langauges marked as not searchable
|
||||
# - Generated source files
|
||||
#
|
||||
# Please add additional test coverage to
|
||||
# `test/test_blob.rb#test_indexable` if you make any changes.
|
||||
#
|
||||
# Return true or false
|
||||
def indexable?
|
||||
if size > 100 * 1024
|
||||
false
|
||||
elsif binary?
|
||||
false
|
||||
elsif extname == '.txt'
|
||||
true
|
||||
elsif language.nil?
|
||||
false
|
||||
elsif !language.searchable?
|
||||
false
|
||||
elsif generated?
|
||||
false
|
||||
else
|
||||
true
|
||||
end
|
||||
end
|
||||
|
||||
# Public: Detects the Language of the blob.
|
||||
#
|
||||
# May load Blob#data
|
||||
@@ -326,19 +312,5 @@ module Linguist
|
||||
options[:options][:encoding] ||= encoding
|
||||
lexer.highlight(data, options)
|
||||
end
|
||||
|
||||
# Public: Highlight syntax of blob without the outer highlight div
|
||||
# wrapper.
|
||||
#
|
||||
# options - A Hash of options (defaults to {})
|
||||
#
|
||||
# Returns html String
|
||||
def colorize_without_wrapper(options = {})
|
||||
if text = colorize(options)
|
||||
text[%r{<div class="highlight"><pre>(.*?)</pre>\s*</div>}m, 1]
|
||||
else
|
||||
''
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -14,6 +14,9 @@ module Linguist
|
||||
# Classifier.train(db, 'Ruby', "def hello; end")
|
||||
#
|
||||
# Returns nothing.
|
||||
#
|
||||
# Set LINGUIST_DEBUG=1 or =2 to see probabilities per-token or
|
||||
# per-language. See also #dump_all_tokens, below.
|
||||
def self.train!(db, language, data)
|
||||
tokens = Tokenizer.tokenize(data)
|
||||
|
||||
@@ -40,7 +43,7 @@ module Linguist
|
||||
|
||||
# Public: Guess language of data.
|
||||
#
|
||||
# db - Hash of classifer tokens database.
|
||||
# db - Hash of classifier tokens database.
|
||||
# data - Array of tokens or String data to analyze.
|
||||
# languages - Array of language name Strings to restrict to.
|
||||
#
|
||||
@@ -75,17 +78,19 @@ module Linguist
|
||||
def classify(tokens, languages)
|
||||
return [] if tokens.nil?
|
||||
tokens = Tokenizer.tokenize(tokens) if tokens.is_a?(String)
|
||||
|
||||
scores = {}
|
||||
|
||||
debug_dump_all_tokens(tokens, languages) if verbosity >= 2
|
||||
|
||||
languages.each do |language|
|
||||
scores[language] = tokens_probability(tokens, language) +
|
||||
language_probability(language)
|
||||
scores[language] = tokens_probability(tokens, language) + language_probability(language)
|
||||
debug_dump_probabilities(tokens, language, scores[language]) if verbosity >= 1
|
||||
end
|
||||
|
||||
scores.sort { |a, b| b[1] <=> a[1] }.map { |score| [score[0], score[1]] }
|
||||
end
|
||||
|
||||
# Internal: Probably of set of tokens in a language occuring - P(D | C)
|
||||
# Internal: Probably of set of tokens in a language occurring - P(D | C)
|
||||
#
|
||||
# tokens - Array of String tokens.
|
||||
# language - Language to check.
|
||||
@@ -97,7 +102,7 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
# Internal: Probably of token in language occuring - P(F | C)
|
||||
# Internal: Probably of token in language occurring - P(F | C)
|
||||
#
|
||||
# token - String token.
|
||||
# language - Language to check.
|
||||
@@ -111,7 +116,7 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
# Internal: Probably of a language occuring - P(C)
|
||||
# Internal: Probably of a language occurring - P(C)
|
||||
#
|
||||
# language - Language to check.
|
||||
#
|
||||
@@ -119,5 +124,48 @@ module Linguist
|
||||
def language_probability(language)
|
||||
Math.log(@languages[language].to_f / @languages_total.to_f)
|
||||
end
|
||||
|
||||
private
|
||||
def verbosity
|
||||
@verbosity ||= (ENV['LINGUIST_DEBUG'] || 0).to_i
|
||||
end
|
||||
|
||||
def debug_dump_probabilities(tokens, language, score)
|
||||
printf("%10s = %10.3f + %7.3f = %10.3f\n",
|
||||
language, tokens_probability(tokens, language), language_probability(language), score)
|
||||
end
|
||||
|
||||
# Internal: show a table of probabilities for each <token,language> pair.
|
||||
#
|
||||
# The number in each table entry is the number of "points" that each
|
||||
# token contributes toward the belief that the file under test is a
|
||||
# particular language. Points are additive.
|
||||
#
|
||||
# Points are the number of times a token appears in the file, times
|
||||
# how much more likely (log of probability ratio) that token is to
|
||||
# appear in one language vs. the least-likely language. Dashes
|
||||
# indicate the least-likely language (and zero points) for each token.
|
||||
def debug_dump_all_tokens(tokens, languages)
|
||||
maxlen = tokens.map { |tok| tok.size }.max
|
||||
|
||||
printf "%#{maxlen}s", ""
|
||||
puts " #" + languages.map { |lang| sprintf("%10s", lang) }.join
|
||||
|
||||
token_map = Hash.new(0)
|
||||
tokens.each { |tok| token_map[tok] += 1 }
|
||||
|
||||
token_map.sort.each { |tok, count|
|
||||
arr = languages.map { |lang| [lang, token_probability(tok, lang)] }
|
||||
min = arr.map { |a,b| b }.min
|
||||
minlog = Math.log(min)
|
||||
if !arr.inject(true) { |result, n| result && n[1] == arr[0][1] }
|
||||
printf "%#{maxlen}s%5d", tok, count
|
||||
|
||||
puts arr.map { |ent|
|
||||
ent[1] == min ? " -" : sprintf("%10.3f", count * (Math.log(ent[1]) - minlog))
|
||||
}.join
|
||||
end
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -43,7 +43,7 @@ module Linguist
|
||||
|
||||
# Internal: Is the blob a generated file?
|
||||
#
|
||||
# Generated source code is supressed in diffs and is ignored by
|
||||
# Generated source code is suppressed in diffs and is ignored by
|
||||
# language statistics.
|
||||
#
|
||||
# Please add additional test coverage to
|
||||
@@ -52,11 +52,18 @@ module Linguist
|
||||
# Return true or false
|
||||
def generated?
|
||||
name == 'Gemfile.lock' ||
|
||||
minified_javascript? ||
|
||||
minified_files? ||
|
||||
compiled_coffeescript? ||
|
||||
xcode_project_file? ||
|
||||
generated_parser? ||
|
||||
generated_net_docfile? ||
|
||||
generated_parser?
|
||||
generated_net_designer_file? ||
|
||||
generated_postscript? ||
|
||||
generated_protocol_buffer? ||
|
||||
generated_jni_header? ||
|
||||
composer_lock? ||
|
||||
node_modules? ||
|
||||
vcr_cassette?
|
||||
end
|
||||
|
||||
# Internal: Is the blob an XCode project file?
|
||||
@@ -69,16 +76,18 @@ module Linguist
|
||||
['.xib', '.nib', '.storyboard', '.pbxproj', '.xcworkspacedata', '.xcuserstate'].include?(extname)
|
||||
end
|
||||
|
||||
# Internal: Is the blob minified JS?
|
||||
# Internal: Is the blob minified files?
|
||||
#
|
||||
# Consider JS minified if the average line length is
|
||||
# greater then 100c.
|
||||
# Consider a file minified if the average line length is
|
||||
# greater then 110c.
|
||||
#
|
||||
# Currently, only JS and CSS files are detected by this method.
|
||||
#
|
||||
# Returns true or false.
|
||||
def minified_javascript?
|
||||
return unless extname == '.js'
|
||||
def minified_files?
|
||||
return unless ['.js', '.css'].include? extname
|
||||
if lines.any?
|
||||
(lines.inject(0) { |n, l| n += l.length } / lines.length) > 100
|
||||
(lines.inject(0) { |n, l| n += l.length } / lines.length) > 110
|
||||
else
|
||||
false
|
||||
end
|
||||
@@ -86,7 +95,7 @@ module Linguist
|
||||
|
||||
# Internal: Is the blob of JS generated by CoffeeScript?
|
||||
#
|
||||
# CoffeScript is meant to output JS that would be difficult to
|
||||
# CoffeeScript is meant to output JS that would be difficult to
|
||||
# tell if it was generated or not. Look for a number of patterns
|
||||
# output by the CS compiler.
|
||||
#
|
||||
@@ -142,6 +151,16 @@ module Linguist
|
||||
lines[-2].include?("</doc>")
|
||||
end
|
||||
|
||||
# Internal: Is this a codegen file for a .NET project?
|
||||
#
|
||||
# Visual Studio often uses code generation to generate partial classes, and
|
||||
# these files can be quite unwieldy. Let's hide them.
|
||||
#
|
||||
# Returns true or false
|
||||
def generated_net_designer_file?
|
||||
name.downcase =~ /\.designer\.cs$/
|
||||
end
|
||||
|
||||
# Internal: Is the blob of JS a parser generated by PEG.js?
|
||||
#
|
||||
# PEG.js-generated parsers are not meant to be consumed by humans.
|
||||
@@ -158,5 +177,74 @@ module Linguist
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
# Internal: Is the blob of PostScript generated?
|
||||
#
|
||||
# PostScript files are often generated by other programs. If they tell us so,
|
||||
# we can detect them.
|
||||
#
|
||||
# Returns true or false.
|
||||
def generated_postscript?
|
||||
return false unless ['.ps', '.eps'].include? extname
|
||||
|
||||
# We analyze the "%%Creator:" comment, which contains the author/generator
|
||||
# of the file. If there is one, it should be in one of the first few lines.
|
||||
creator = lines[0..9].find {|line| line =~ /^%%Creator: /}
|
||||
return false if creator.nil?
|
||||
|
||||
# Most generators write their version number, while human authors' or companies'
|
||||
# names don't contain numbers. So look if the line contains digits. Also
|
||||
# look for some special cases without version numbers.
|
||||
return creator =~ /[0-9]/ ||
|
||||
creator.include?("mpage") ||
|
||||
creator.include?("draw") ||
|
||||
creator.include?("ImageMagick")
|
||||
end
|
||||
|
||||
# Internal: Is the blob a C++, Java or Python source file generated by the
|
||||
# Protocol Buffer compiler?
|
||||
#
|
||||
# Returns true of false.
|
||||
def generated_protocol_buffer?
|
||||
return false unless ['.py', '.java', '.h', '.cc', '.cpp'].include?(extname)
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("Generated by the protocol buffer compiler. DO NOT EDIT!")
|
||||
end
|
||||
|
||||
# Internal: Is the blob a C/C++ header generated by the Java JNI tool javah?
|
||||
#
|
||||
# Returns true of false.
|
||||
def generated_jni_header?
|
||||
return false unless extname == '.h'
|
||||
return false unless lines.count > 2
|
||||
|
||||
return lines[0].include?("/* DO NOT EDIT THIS FILE - it is machine generated */") &&
|
||||
lines[1].include?("#include <jni.h>")
|
||||
end
|
||||
|
||||
# Internal: Is the blob part of node_modules/, which are not meant for humans in pull requests.
|
||||
#
|
||||
# Returns true or false.
|
||||
def node_modules?
|
||||
!!name.match(/node_modules\//)
|
||||
end
|
||||
|
||||
# Internal: Is the blob a generated php composer lock file?
|
||||
#
|
||||
# Returns true or false.
|
||||
def composer_lock?
|
||||
!!name.match(/composer.lock/)
|
||||
end
|
||||
|
||||
# Is the blob a VCR Cassette file?
|
||||
#
|
||||
# Returns true or false
|
||||
def vcr_cassette?
|
||||
return false unless extname == '.yml'
|
||||
return false unless lines.count > 2
|
||||
# VCR Cassettes have "recorded_with: VCR" in the second last line.
|
||||
return lines[-2].include?("recorded_with: VCR")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
90
lib/linguist/heuristics.rb
Normal file
90
lib/linguist/heuristics.rb
Normal file
@@ -0,0 +1,90 @@
|
||||
module Linguist
|
||||
# A collection of simple heuristics that can be used to better analyze languages.
|
||||
class Heuristics
|
||||
ACTIVE = false
|
||||
|
||||
# Public: Given an array of String language names,
|
||||
# apply heuristics against the given data and return an array
|
||||
# of matching languages, or nil.
|
||||
#
|
||||
# data - Array of tokens or String data to analyze.
|
||||
# languages - Array of language name Strings to restrict to.
|
||||
#
|
||||
# Returns an array of Languages or []
|
||||
def self.find_by_heuristics(data, languages)
|
||||
if active?
|
||||
if languages.all? { |l| ["Objective-C", "C++"].include?(l) }
|
||||
disambiguate_c(data, languages)
|
||||
end
|
||||
if languages.all? { |l| ["Perl", "Prolog"].include?(l) }
|
||||
disambiguate_pl(data, languages)
|
||||
end
|
||||
if languages.all? { |l| ["ECL", "Prolog"].include?(l) }
|
||||
disambiguate_ecl(data, languages)
|
||||
end
|
||||
if languages.all? { |l| ["TypeScript", "XML"].include?(l) }
|
||||
disambiguate_ts(data, languages)
|
||||
end
|
||||
if languages.all? { |l| ["Common Lisp", "OpenCL"].include?(l) }
|
||||
disambiguate_cl(data, languages)
|
||||
end
|
||||
if languages.all? { |l| ["Rebol", "R"].include?(l) }
|
||||
disambiguate_r(data, languages)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# .h extensions are ambigious between C, C++, and Objective-C.
|
||||
# We want to shortcut look for Objective-C _and_ now C++ too!
|
||||
#
|
||||
# Returns an array of Languages or []
|
||||
def self.disambiguate_c(data, languages)
|
||||
matches = []
|
||||
matches << Language["Objective-C"] if data.include?("@interface")
|
||||
matches << Language["C++"] if data.include?("#include <cstdint>")
|
||||
matches
|
||||
end
|
||||
|
||||
def self.disambiguate_pl(data, languages)
|
||||
matches = []
|
||||
matches << Language["Prolog"] if data.include?(":-")
|
||||
matches << Language["Perl"] if data.include?("use strict")
|
||||
matches
|
||||
end
|
||||
|
||||
def self.disambiguate_ecl(data, languages)
|
||||
matches = []
|
||||
matches << Language["Prolog"] if data.include?(":-")
|
||||
matches << Language["ECL"] if data.include?(":=")
|
||||
matches
|
||||
end
|
||||
|
||||
def self.disambiguate_ts(data, languages)
|
||||
matches = []
|
||||
if (data.include?("</translation>"))
|
||||
matches << Language["XML"]
|
||||
else
|
||||
matches << Language["TypeScript"]
|
||||
end
|
||||
matches
|
||||
end
|
||||
|
||||
def self.disambiguate_cl(data, languages)
|
||||
matches = []
|
||||
matches << Language["Common Lisp"] if data.include?("(defun ")
|
||||
matches << Language["OpenCL"] if /\/\* |\/\/ |^\}/.match(data)
|
||||
matches
|
||||
end
|
||||
|
||||
def self.disambiguate_r(data, languages)
|
||||
matches = []
|
||||
matches << Language["Rebol"] if /\bRebol\b/i.match(data)
|
||||
matches << Language["R"] if data.include?("<-")
|
||||
matches
|
||||
end
|
||||
|
||||
def self.active?
|
||||
!!ACTIVE
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,8 +1,13 @@
|
||||
require 'escape_utils'
|
||||
require 'pygments'
|
||||
require 'yaml'
|
||||
begin
|
||||
require 'json'
|
||||
rescue LoadError
|
||||
end
|
||||
|
||||
require 'linguist/classifier'
|
||||
require 'linguist/heuristics'
|
||||
require 'linguist/samples'
|
||||
|
||||
module Linguist
|
||||
@@ -15,11 +20,30 @@ module Linguist
|
||||
@index = {}
|
||||
@name_index = {}
|
||||
@alias_index = {}
|
||||
@extension_index = Hash.new { |h,k| h[k] = [] }
|
||||
@filename_index = Hash.new { |h,k| h[k] = [] }
|
||||
|
||||
@extension_index = Hash.new { |h,k| h[k] = [] }
|
||||
@interpreter_index = Hash.new { |h,k| h[k] = [] }
|
||||
@filename_index = Hash.new { |h,k| h[k] = [] }
|
||||
@primary_extension_index = {}
|
||||
|
||||
# Valid Languages types
|
||||
TYPES = [:data, :markup, :programming]
|
||||
TYPES = [:data, :markup, :programming, :prose]
|
||||
|
||||
# Names of non-programming languages that we will still detect
|
||||
#
|
||||
# Returns an array
|
||||
def self.detectable_markup
|
||||
["CSS", "Less", "Sass", "SCSS", "Stylus", "TeX"]
|
||||
end
|
||||
|
||||
# Detect languages by a specific type
|
||||
#
|
||||
# type - A symbol that exists within TYPES
|
||||
#
|
||||
# Returns an array
|
||||
def self.by_type(type)
|
||||
all.select { |h| h.type == type }
|
||||
end
|
||||
|
||||
# Internal: Create a new Language object
|
||||
#
|
||||
@@ -56,6 +80,16 @@ module Linguist
|
||||
@extension_index[extension] << language
|
||||
end
|
||||
|
||||
if @primary_extension_index.key?(language.primary_extension)
|
||||
raise ArgumentError, "Duplicate primary extension: #{language.primary_extension}"
|
||||
end
|
||||
|
||||
@primary_extension_index[language.primary_extension] = language
|
||||
|
||||
language.interpreters.each do |interpreter|
|
||||
@interpreter_index[interpreter] << language
|
||||
end
|
||||
|
||||
language.filenames.each do |filename|
|
||||
@filename_index[filename] << language
|
||||
end
|
||||
@@ -73,23 +107,39 @@ module Linguist
|
||||
#
|
||||
# Returns Language or nil.
|
||||
def self.detect(name, data, mode = nil)
|
||||
# A bit of an elegant hack. If the file is exectable but extensionless,
|
||||
# A bit of an elegant hack. If the file is executable but extensionless,
|
||||
# append a "magic" extension so it can be classified with other
|
||||
# languages that have shebang scripts.
|
||||
if File.extname(name).empty? && mode && (mode.to_i(8) & 05) == 05
|
||||
name += ".script!"
|
||||
end
|
||||
|
||||
# First try to find languages that match based on filename.
|
||||
possible_languages = find_by_filename(name)
|
||||
|
||||
# If there is more than one possible language with that extension (or no
|
||||
# extension at all, in the case of extensionless scripts), we need to continue
|
||||
# our detection work
|
||||
if possible_languages.length > 1
|
||||
data = data.call() if data.respond_to?(:call)
|
||||
possible_language_names = possible_languages.map(&:name)
|
||||
|
||||
# Don't bother with emptiness
|
||||
if data.nil? || data == ""
|
||||
nil
|
||||
elsif result = Classifier.classify(Samples::DATA, data, possible_languages.map(&:name)).first
|
||||
Language[result[0]]
|
||||
# Check if there's a shebang line and use that as authoritative
|
||||
elsif (result = find_by_shebang(data)) && !result.empty?
|
||||
result.first
|
||||
# No shebang. Still more work to do. Try to find it with our heuristics.
|
||||
elsif (determined = Heuristics.find_by_heuristics(data, possible_language_names)) && !determined.empty?
|
||||
determined.first
|
||||
# Lastly, fall back to the probablistic classifier.
|
||||
elsif classified = Classifier.classify(Samples::DATA, data, possible_language_names ).first
|
||||
# Return the actual Language object based of the string language name (i.e., first element of `#classify`)
|
||||
Language[classified[0]]
|
||||
end
|
||||
else
|
||||
# Simplest and most common case, we can just return the one match based on extension
|
||||
possible_languages.first
|
||||
end
|
||||
end
|
||||
@@ -141,7 +191,24 @@ module Linguist
|
||||
# Returns all matching Languages or [] if none were found.
|
||||
def self.find_by_filename(filename)
|
||||
basename, extname = File.basename(filename), File.extname(filename)
|
||||
@filename_index[basename] + @extension_index[extname]
|
||||
langs = [@primary_extension_index[extname]] +
|
||||
@filename_index[basename] +
|
||||
@extension_index[extname]
|
||||
langs.compact.uniq
|
||||
end
|
||||
|
||||
# Public: Look up Languages by shebang line.
|
||||
#
|
||||
# data - Array of tokens or String data to analyze.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Language.find_by_shebang("#!/bin/bash\ndate;")
|
||||
# # => [#<Language name="Bash">]
|
||||
#
|
||||
# Returns the matching Language
|
||||
def self.find_by_shebang(data)
|
||||
@interpreter_index[Linguist.interpreter_from_shebang(data)]
|
||||
end
|
||||
|
||||
# Public: Look up Language by its name or lexer.
|
||||
@@ -222,12 +289,14 @@ module Linguist
|
||||
raise(ArgumentError, "#{@name} is missing lexer")
|
||||
|
||||
@ace_mode = attributes[:ace_mode]
|
||||
@wrap = attributes[:wrap] || false
|
||||
|
||||
# Set legacy search term
|
||||
@search_term = attributes[:search_term] || default_alias_name
|
||||
|
||||
# Set extensions or default to [].
|
||||
@extensions = attributes[:extensions] || []
|
||||
@interpreters = attributes[:interpreters] || []
|
||||
@filenames = attributes[:filenames] || []
|
||||
|
||||
unless @primary_extension = attributes[:primary_extension]
|
||||
@@ -312,6 +381,11 @@ module Linguist
|
||||
# Returns a String name or nil
|
||||
attr_reader :ace_mode
|
||||
|
||||
# Public: Should language lines be wrapped
|
||||
#
|
||||
# Returns true or false
|
||||
attr_reader :wrap
|
||||
|
||||
# Public: Get extensions
|
||||
#
|
||||
# Examples
|
||||
@@ -323,7 +397,7 @@ module Linguist
|
||||
|
||||
# Deprecated: Get primary extension
|
||||
#
|
||||
# Defaults to the first extension but can be overriden
|
||||
# Defaults to the first extension but can be overridden
|
||||
# in the languages.yml.
|
||||
#
|
||||
# The primary extension can not be nil. Tests should verify this.
|
||||
@@ -335,6 +409,15 @@ module Linguist
|
||||
# Returns the extension String.
|
||||
attr_reader :primary_extension
|
||||
|
||||
# Public: Get interpreters
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# # => ['awk', 'gawk', 'mawk' ...]
|
||||
#
|
||||
# Returns the interpreters Array
|
||||
attr_reader :interpreters
|
||||
|
||||
# Public: Get filenames
|
||||
#
|
||||
# Examples
|
||||
@@ -343,6 +426,11 @@ module Linguist
|
||||
#
|
||||
# Returns the extensions Array
|
||||
attr_reader :filenames
|
||||
|
||||
# Public: Return all possible extensions for language
|
||||
def all_extensions
|
||||
(extensions + [primary_extension]).uniq
|
||||
end
|
||||
|
||||
# Public: Get URL escaped name.
|
||||
#
|
||||
@@ -402,7 +490,7 @@ module Linguist
|
||||
#
|
||||
# Returns html String
|
||||
def colorize(text, options = {})
|
||||
lexer.highlight(text, options = {})
|
||||
lexer.highlight(text, options)
|
||||
end
|
||||
|
||||
# Public: Return name as String representation
|
||||
@@ -428,19 +516,40 @@ module Linguist
|
||||
end
|
||||
|
||||
extensions = Samples::DATA['extnames']
|
||||
interpreters = Samples::DATA['interpreters']
|
||||
filenames = Samples::DATA['filenames']
|
||||
popular = YAML.load_file(File.expand_path("../popular.yml", __FILE__))
|
||||
|
||||
YAML.load_file(File.expand_path("../languages.yml", __FILE__)).each do |name, options|
|
||||
languages_yml = File.expand_path("../languages.yml", __FILE__)
|
||||
languages_json = File.expand_path("../languages.json", __FILE__)
|
||||
|
||||
if File.exist?(languages_json) && defined?(JSON)
|
||||
languages = JSON.load(File.read(languages_json))
|
||||
else
|
||||
languages = YAML.load_file(languages_yml)
|
||||
end
|
||||
|
||||
languages.each do |name, options|
|
||||
options['extensions'] ||= []
|
||||
options['interpreters'] ||= []
|
||||
options['filenames'] ||= []
|
||||
|
||||
if extnames = extensions[name]
|
||||
extnames.each do |extname|
|
||||
if !options['extensions'].include?(extname)
|
||||
options['extensions'] << extname
|
||||
else
|
||||
warn "#{name} #{extname.inspect} is already defined in samples/. Remove from languages.yml."
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if interpreters == nil
|
||||
interpreters = {}
|
||||
end
|
||||
|
||||
if interpreter_names = interpreters[name]
|
||||
interpreter_names.each do |interpreter|
|
||||
if !options['interpreters'].include?(interpreter)
|
||||
options['interpreters'] << interpreter
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -449,8 +558,6 @@ module Linguist
|
||||
fns.each do |filename|
|
||||
if !options['filenames'].include?(filename)
|
||||
options['filenames'] << filename
|
||||
else
|
||||
warn "#{name} #{filename.inspect} is already defined in samples/. Remove from languages.yml."
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -462,10 +569,12 @@ module Linguist
|
||||
:aliases => options['aliases'],
|
||||
:lexer => options['lexer'],
|
||||
:ace_mode => options['ace_mode'],
|
||||
:wrap => options['wrap'],
|
||||
:group_name => options['group'],
|
||||
:searchable => options.key?('searchable') ? options['searchable'] : true,
|
||||
:search_term => options['search_term'],
|
||||
:extensions => options['extensions'].sort,
|
||||
:interpreters => options['interpreters'].sort,
|
||||
:primary_extension => options['primary_extension'],
|
||||
:filenames => options['filenames'],
|
||||
:popular => popular.include?(name)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@ module Linguist
|
||||
module MD5
|
||||
# Public: Create deep nested digest of value object.
|
||||
#
|
||||
# Useful for object comparsion.
|
||||
# Useful for object comparison.
|
||||
#
|
||||
# obj - Object to digest.
|
||||
#
|
||||
|
||||
@@ -8,6 +8,8 @@
|
||||
- C#
|
||||
- C++
|
||||
- CSS
|
||||
- Clojure
|
||||
- CoffeeScript
|
||||
- Common Lisp
|
||||
- Diff
|
||||
- Emacs Lisp
|
||||
@@ -25,5 +27,3 @@
|
||||
- SQL
|
||||
- Scala
|
||||
- Scheme
|
||||
- TeX
|
||||
- XML
|
||||
|
||||
@@ -29,6 +29,7 @@ module Linguist
|
||||
@computed_stats = false
|
||||
@language = @size = nil
|
||||
@sizes = Hash.new { 0 }
|
||||
@file_breakdown = Hash.new { |h,k| h[k] = Array.new }
|
||||
end
|
||||
|
||||
# Public: Returns a breakdown of language stats.
|
||||
@@ -60,6 +61,12 @@ module Linguist
|
||||
@size
|
||||
end
|
||||
|
||||
# Public: Return the language breakdown of this repository by file
|
||||
def breakdown_by_file
|
||||
compute_stats
|
||||
@file_breakdown
|
||||
end
|
||||
|
||||
# Internal: Compute language breakdown for each blob in the Repository.
|
||||
#
|
||||
# Returns nothing
|
||||
@@ -67,14 +74,18 @@ module Linguist
|
||||
return if @computed_stats
|
||||
|
||||
@enum.each do |blob|
|
||||
# Skip binary file extensions
|
||||
next if blob.binary_mime_type?
|
||||
# Skip files that are likely binary
|
||||
next if blob.likely_binary?
|
||||
|
||||
# Skip vendored or generated blobs
|
||||
next if blob.vendored? || blob.generated? || blob.language.nil?
|
||||
|
||||
# Only include programming languages
|
||||
if blob.language.type == :programming
|
||||
# Only include programming languages and acceptable markup languages
|
||||
if blob.language.type == :programming || Language.detectable_markup.include?(blob.language.name)
|
||||
|
||||
# Build up the per-file breakdown stats
|
||||
@file_breakdown[blob.language.group.name] << blob.name
|
||||
|
||||
@sizes[blob.language.group] += blob.size
|
||||
end
|
||||
end
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,8 @@
|
||||
require 'yaml'
|
||||
begin
|
||||
require 'json'
|
||||
rescue LoadError
|
||||
require 'yaml'
|
||||
end
|
||||
|
||||
require 'linguist/md5'
|
||||
require 'linguist/classifier'
|
||||
@@ -14,7 +18,8 @@ module Linguist
|
||||
|
||||
# Hash of serialized samples object
|
||||
if File.exist?(PATH)
|
||||
DATA = YAML.load_file(PATH)
|
||||
serializer = defined?(JSON) ? JSON : YAML
|
||||
DATA = serializer.load(File.read(PATH))
|
||||
end
|
||||
|
||||
# Public: Iterate over each sample.
|
||||
@@ -52,6 +57,7 @@ module Linguist
|
||||
yield({
|
||||
:path => File.join(dirname, filename),
|
||||
:language => category,
|
||||
:interpreter => File.exist?(filename) ? Linguist.interpreter_from_shebang(File.read(filename)) : nil,
|
||||
:extname => File.extname(filename)
|
||||
})
|
||||
end
|
||||
@@ -67,6 +73,7 @@ module Linguist
|
||||
def self.data
|
||||
db = {}
|
||||
db['extnames'] = {}
|
||||
db['interpreters'] = {}
|
||||
db['filenames'] = {}
|
||||
|
||||
each do |sample|
|
||||
@@ -80,6 +87,14 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
if sample[:interpreter]
|
||||
db['interpreters'][language_name] ||= []
|
||||
if !db['interpreters'][language_name].include?(sample[:interpreter])
|
||||
db['interpreters'][language_name] << sample[:interpreter]
|
||||
db['interpreters'][language_name].sort!
|
||||
end
|
||||
end
|
||||
|
||||
if sample[:filename]
|
||||
db['filenames'][language_name] ||= []
|
||||
db['filenames'][language_name] << sample[:filename]
|
||||
@@ -95,4 +110,40 @@ module Linguist
|
||||
db
|
||||
end
|
||||
end
|
||||
|
||||
# Used to retrieve the interpreter from the shebang line of a file's
|
||||
# data.
|
||||
def self.interpreter_from_shebang(data)
|
||||
lines = data.lines.to_a
|
||||
|
||||
if lines.any? && (match = lines[0].match(/(.+)\n?/)) && (bang = match[0]) =~ /^#!/
|
||||
bang.sub!(/^#! /, '#!')
|
||||
tokens = bang.split(' ')
|
||||
pieces = tokens.first.split('/')
|
||||
|
||||
if pieces.size > 1
|
||||
script = pieces.last
|
||||
else
|
||||
script = pieces.first.sub('#!', '')
|
||||
end
|
||||
|
||||
script = script == 'env' ? tokens[1] : script
|
||||
|
||||
# "python2.6" -> "python"
|
||||
if script =~ /((?:\d+\.?)+)/
|
||||
script.sub! $1, ''
|
||||
end
|
||||
|
||||
# Check for multiline shebang hacks that call `exec`
|
||||
if script == 'sh' &&
|
||||
lines[0...5].any? { |l| l.match(/exec (\w+).+\$0.+\$@/) }
|
||||
script = $1
|
||||
end
|
||||
|
||||
script
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
@@ -32,7 +32,8 @@ module Linguist
|
||||
['/*', '*/'], # C
|
||||
['<!--', '-->'], # XML
|
||||
['{-', '-}'], # Haskell
|
||||
['(*', '*)'] # Coq
|
||||
['(*', '*)'], # Coq
|
||||
['"""', '"""'] # Python
|
||||
]
|
||||
|
||||
START_SINGLE_LINE_COMMENT = Regexp.compile(SINGLE_LINE_COMMENTS.map { |c|
|
||||
@@ -138,7 +139,7 @@ module Linguist
|
||||
s.scan(/\s+/)
|
||||
script = s.scan(/\S+/)
|
||||
end
|
||||
script = script[/[^\d]+/, 0]
|
||||
script = script[/[^\d]+/, 0] if script
|
||||
return script
|
||||
end
|
||||
|
||||
|
||||
@@ -10,25 +10,55 @@
|
||||
## Vendor Conventions ##
|
||||
|
||||
# Caches
|
||||
- cache/
|
||||
- (^|/)cache/
|
||||
|
||||
# Dependencies
|
||||
- ^[Dd]ependencies/
|
||||
|
||||
# C deps
|
||||
# https://github.com/joyent/node
|
||||
- ^deps/
|
||||
- ^tools/
|
||||
- (^|/)configure$
|
||||
- (^|/)configure.ac$
|
||||
- (^|/)config.guess$
|
||||
- (^|/)config.sub$
|
||||
|
||||
# Node depedencies
|
||||
# Node dependencies
|
||||
- node_modules/
|
||||
|
||||
# Vendored depedencies
|
||||
- vendor/
|
||||
# Bower Components
|
||||
- bower_components/
|
||||
|
||||
# Erlang bundles
|
||||
- ^rebar$
|
||||
|
||||
# Bootstrap minified css and js
|
||||
- (^|/)bootstrap([^.]*)(\.min)?\.(js|css)$
|
||||
|
||||
# Foundation css
|
||||
- foundation.min.css
|
||||
- foundation.css
|
||||
|
||||
# Vendored dependencies
|
||||
- thirdparty/
|
||||
- vendors?/
|
||||
|
||||
# Debian packaging
|
||||
- ^debian/
|
||||
|
||||
# Haxelib projects often contain a neko bytecode file named run.n
|
||||
- run.n$
|
||||
|
||||
## Commonly Bundled JavaScript frameworks ##
|
||||
|
||||
# jQuery
|
||||
- (^|/)jquery([^.]*)(\.min)?\.js$
|
||||
- (^|/)jquery\-\d\.\d(\.\d)?(\.min)?\.js$
|
||||
- (^|/)jquery\-\d\.\d+(\.\d+)?(\.min)?\.js$
|
||||
|
||||
# jQuery UI
|
||||
- (^|/)jquery\-ui(\-\d\.\d+(\.\d+)?)?(\.\w+)?(\.min)?\.(js|css)$
|
||||
- (^|/)jquery\.(ui|effects)\.([^.]*)(\.min)?\.(js|css)$
|
||||
|
||||
# Prototype
|
||||
- (^|/)prototype(.*)\.js$
|
||||
@@ -36,6 +66,9 @@
|
||||
- (^|/)controls\.js$
|
||||
- (^|/)dragdrop\.js$
|
||||
|
||||
# Typescript definition files
|
||||
- (.*?)\.d\.ts$
|
||||
|
||||
# MooTools
|
||||
- (^|/)mootools([^.]*)\d+\.\d+.\d+([^.]*)\.js$
|
||||
|
||||
@@ -49,10 +82,6 @@
|
||||
- (^|/)yahoo-([^.]*)\.js$
|
||||
- (^|/)yui([^.]*)\.js$
|
||||
|
||||
# LESS css
|
||||
- (^|/)less([^.]*)(\.min)?\.js$
|
||||
- (^|/)less\-\d+\.\d+\.\d+(\.min)?\.js$
|
||||
|
||||
# WYS editors
|
||||
- (^|/)ckeditor\.js$
|
||||
- (^|/)tiny_mce([^.]*)\.js$
|
||||
@@ -61,27 +90,52 @@
|
||||
# MathJax
|
||||
- (^|/)MathJax/
|
||||
|
||||
# SyntaxHighlighter - http://alexgorbatchev.com/
|
||||
- (^|/)shBrush([^.]*)\.js$
|
||||
- (^|/)shCore\.js$
|
||||
- (^|/)shLegacy\.js$
|
||||
|
||||
# AngularJS
|
||||
- (^|/)angular([^.]*)(\.min)?\.js$
|
||||
|
||||
# React
|
||||
- (^|/)react(-[^.]*)?(\.min)?\.js$
|
||||
|
||||
## Python ##
|
||||
|
||||
# django
|
||||
- (^|/)admin_media/
|
||||
|
||||
# Fabric
|
||||
- ^fabfile\.py$
|
||||
|
||||
# WAF
|
||||
- ^waf$
|
||||
|
||||
# .osx
|
||||
- ^.osx$
|
||||
|
||||
## Obj-C ##
|
||||
|
||||
# Sparkle
|
||||
- (^|/)Sparkle/
|
||||
|
||||
## Groovy ##
|
||||
|
||||
# Gradle
|
||||
- (^|/)gradlew$
|
||||
- (^|/)gradlew\.bat$
|
||||
- (^|/)gradle/wrapper/
|
||||
|
||||
## .NET ##
|
||||
|
||||
# Visual Studio IntelliSense
|
||||
- -vsdoc\.js$
|
||||
- \.intellisense\.js$
|
||||
|
||||
# jQuery validation plugin (MS bundles this with asp.net mvc)
|
||||
- (^|/)jquery([^.]*)\.validate(\.min)?\.js$
|
||||
- (^|/)jquery([^.]*)\.validate(\.unobtrusive)?(\.min)?\.js$
|
||||
- (^|/)jquery([^.]*)\.unobtrusive\-ajax(\.min)?\.js$
|
||||
|
||||
# Microsoft Ajax
|
||||
- (^|/)[Mm]icrosoft([Mm]vc)?([Aa]jax|[Vv]alidation)(\.debug)?\.js$
|
||||
@@ -90,7 +144,48 @@
|
||||
- ^[Pp]ackages/
|
||||
|
||||
# ExtJS
|
||||
- (^|/)extjs/
|
||||
- (^|/)extjs/.*?\.js$
|
||||
- (^|/)extjs/.*?\.xml$
|
||||
- (^|/)extjs/.*?\.txt$
|
||||
- (^|/)extjs/.*?\.html$
|
||||
- (^|/)extjs/.*?\.properties$
|
||||
- (^|/)extjs/.sencha/
|
||||
- (^|/)extjs/docs/
|
||||
- (^|/)extjs/builds/
|
||||
- (^|/)extjs/cmd/
|
||||
- (^|/)extjs/examples/
|
||||
- (^|/)extjs/locale/
|
||||
- (^|/)extjs/packages/
|
||||
- (^|/)extjs/plugins/
|
||||
- (^|/)extjs/resources/
|
||||
- (^|/)extjs/src/
|
||||
- (^|/)extjs/welcome/
|
||||
|
||||
# Samples folders
|
||||
- ^[Ss]amples/
|
||||
|
||||
# LICENSE, README, git config files
|
||||
- ^COPYING$
|
||||
- LICENSE$
|
||||
- License$
|
||||
- gitattributes$
|
||||
- gitignore$
|
||||
- gitmodules$
|
||||
- ^README$
|
||||
- ^readme$
|
||||
|
||||
# Test fixtures
|
||||
- ^[Tt]est/fixtures/
|
||||
|
||||
# PhoneGap/Cordova
|
||||
- (^|/)cordova([^.]*)(\.min)?\.js$
|
||||
- (^|/)cordova\-\d\.\d(\.\d)?(\.min)?\.js$
|
||||
|
||||
# Vagrant
|
||||
- ^Vagrantfile$
|
||||
|
||||
# .DS_Store's
|
||||
- .[Dd][Ss]_[Ss]tore$
|
||||
|
||||
# Mercury --use-subdirs
|
||||
- Mercury/
|
||||
|
||||
219
samples/ABAP/cl_csv_parser.abap
Normal file
219
samples/ABAP/cl_csv_parser.abap
Normal file
@@ -0,0 +1,219 @@
|
||||
*/**
|
||||
* The MIT License (MIT)
|
||||
* Copyright (c) 2012 René van Mil
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining
|
||||
* a copy of this software and associated documentation files (the
|
||||
* "Software"), to deal in the Software without restriction, including
|
||||
* without limitation the rights to use, copy, modify, merge, publish,
|
||||
* distribute, sublicense, and/or sell copies of the Software, and to
|
||||
* permit persons to whom the Software is furnished to do so, subject to
|
||||
* the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be
|
||||
* included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
*----------------------------------------------------------------------*
|
||||
* CLASS CL_CSV_PARSER DEFINITION
|
||||
*----------------------------------------------------------------------*
|
||||
*
|
||||
*----------------------------------------------------------------------*
|
||||
class cl_csv_parser definition
|
||||
public
|
||||
inheriting from cl_object
|
||||
final
|
||||
create public .
|
||||
|
||||
public section.
|
||||
*"* public components of class CL_CSV_PARSER
|
||||
*"* do not include other source files here!!!
|
||||
|
||||
type-pools abap .
|
||||
methods constructor
|
||||
importing
|
||||
!delegate type ref to if_csv_parser_delegate
|
||||
!csvstring type string
|
||||
!separator type c
|
||||
!skip_first_line type abap_bool .
|
||||
methods parse
|
||||
raising
|
||||
cx_csv_parse_error .
|
||||
protected section.
|
||||
*"* protected components of class CL_CSV_PARSER
|
||||
*"* do not include other source files here!!!
|
||||
private section.
|
||||
*"* private components of class CL_CSV_PARSER
|
||||
*"* do not include other source files here!!!
|
||||
|
||||
constants _textindicator type c value '"'. "#EC NOTEXT
|
||||
data _delegate type ref to if_csv_parser_delegate .
|
||||
data _csvstring type string .
|
||||
data _separator type c .
|
||||
type-pools abap .
|
||||
data _skip_first_line type abap_bool .
|
||||
|
||||
methods _lines
|
||||
returning
|
||||
value(returning) type stringtab .
|
||||
methods _parse_line
|
||||
importing
|
||||
!line type string
|
||||
returning
|
||||
value(returning) type stringtab
|
||||
raising
|
||||
cx_csv_parse_error .
|
||||
endclass. "CL_CSV_PARSER DEFINITION
|
||||
|
||||
|
||||
|
||||
*----------------------------------------------------------------------*
|
||||
* CLASS CL_CSV_PARSER IMPLEMENTATION
|
||||
*----------------------------------------------------------------------*
|
||||
*
|
||||
*----------------------------------------------------------------------*
|
||||
class cl_csv_parser implementation.
|
||||
|
||||
|
||||
* <SIGNATURE>---------------------------------------------------------------------------------------+
|
||||
* | Instance Public Method CL_CSV_PARSER->CONSTRUCTOR
|
||||
* +-------------------------------------------------------------------------------------------------+
|
||||
* | [--->] DELEGATE TYPE REF TO IF_CSV_PARSER_DELEGATE
|
||||
* | [--->] CSVSTRING TYPE STRING
|
||||
* | [--->] SEPARATOR TYPE C
|
||||
* | [--->] SKIP_FIRST_LINE TYPE ABAP_BOOL
|
||||
* +--------------------------------------------------------------------------------------</SIGNATURE>
|
||||
method constructor.
|
||||
super->constructor( ).
|
||||
_delegate = delegate.
|
||||
_csvstring = csvstring.
|
||||
_separator = separator.
|
||||
_skip_first_line = skip_first_line.
|
||||
endmethod. "constructor
|
||||
|
||||
|
||||
* <SIGNATURE>---------------------------------------------------------------------------------------+
|
||||
* | Instance Public Method CL_CSV_PARSER->PARSE
|
||||
* +-------------------------------------------------------------------------------------------------+
|
||||
* | [!CX!] CX_CSV_PARSE_ERROR
|
||||
* +--------------------------------------------------------------------------------------</SIGNATURE>
|
||||
method parse.
|
||||
data msg type string.
|
||||
if _csvstring is initial.
|
||||
message e002(csv) into msg.
|
||||
raise exception type cx_csv_parse_error
|
||||
exporting
|
||||
message = msg.
|
||||
endif.
|
||||
|
||||
" Get the lines
|
||||
data is_first_line type abap_bool value abap_true.
|
||||
data lines type standard table of string.
|
||||
lines = _lines( ).
|
||||
field-symbols <line> type string.
|
||||
loop at lines assigning <line>.
|
||||
" Should we skip the first line?
|
||||
if _skip_first_line = abap_true and is_first_line = abap_true.
|
||||
is_first_line = abap_false.
|
||||
continue.
|
||||
endif.
|
||||
" Parse the line
|
||||
data values type standard table of string.
|
||||
values = _parse_line( <line> ).
|
||||
" Send values to delegate
|
||||
_delegate->values_found( values ).
|
||||
endloop.
|
||||
endmethod. "parse
|
||||
|
||||
|
||||
* <SIGNATURE>---------------------------------------------------------------------------------------+
|
||||
* | Instance Private Method CL_CSV_PARSER->_LINES
|
||||
* +-------------------------------------------------------------------------------------------------+
|
||||
* | [<-()] RETURNING TYPE STRINGTAB
|
||||
* +--------------------------------------------------------------------------------------</SIGNATURE>
|
||||
method _lines.
|
||||
split _csvstring at cl_abap_char_utilities=>cr_lf into table returning.
|
||||
endmethod. "_lines
|
||||
|
||||
|
||||
* <SIGNATURE>---------------------------------------------------------------------------------------+
|
||||
* | Instance Private Method CL_CSV_PARSER->_PARSE_LINE
|
||||
* +-------------------------------------------------------------------------------------------------+
|
||||
* | [--->] LINE TYPE STRING
|
||||
* | [<-()] RETURNING TYPE STRINGTAB
|
||||
* | [!CX!] CX_CSV_PARSE_ERROR
|
||||
* +--------------------------------------------------------------------------------------</SIGNATURE>
|
||||
method _parse_line.
|
||||
data msg type string.
|
||||
|
||||
data csvvalue type string.
|
||||
data csvvalues type standard table of string.
|
||||
|
||||
data char type c.
|
||||
data pos type i value 0.
|
||||
data len type i.
|
||||
len = strlen( line ).
|
||||
while pos < len.
|
||||
char = line+pos(1).
|
||||
if char <> _separator.
|
||||
if char = _textindicator.
|
||||
data text_ended type abap_bool.
|
||||
text_ended = abap_false.
|
||||
while text_ended = abap_false.
|
||||
pos = pos + 1.
|
||||
if pos < len.
|
||||
char = line+pos(1).
|
||||
if char = _textindicator.
|
||||
text_ended = abap_true.
|
||||
else.
|
||||
if char is initial. " Space
|
||||
concatenate csvvalue ` ` into csvvalue.
|
||||
else.
|
||||
concatenate csvvalue char into csvvalue.
|
||||
endif.
|
||||
endif.
|
||||
else.
|
||||
" Reached the end of the line while inside a text value
|
||||
" This indicates an error in the CSV formatting
|
||||
text_ended = abap_true.
|
||||
message e003(csv) into msg.
|
||||
raise exception type cx_csv_parse_error
|
||||
exporting
|
||||
message = msg.
|
||||
endif.
|
||||
endwhile.
|
||||
" Check if next character is a separator, otherwise the CSV formatting is incorrect
|
||||
data nextpos type i.
|
||||
nextpos = pos + 1.
|
||||
if nextpos < len and line+nextpos(1) <> _separator.
|
||||
message e003(csv) into msg.
|
||||
raise exception type cx_csv_parse_error
|
||||
exporting
|
||||
message = msg.
|
||||
endif.
|
||||
else.
|
||||
if char is initial. " Space
|
||||
concatenate csvvalue ` ` into csvvalue.
|
||||
else.
|
||||
concatenate csvvalue char into csvvalue.
|
||||
endif.
|
||||
endif.
|
||||
else.
|
||||
append csvvalue to csvvalues.
|
||||
clear csvvalue.
|
||||
endif.
|
||||
pos = pos + 1.
|
||||
endwhile.
|
||||
append csvvalue to csvvalues. " Don't forget the last value
|
||||
|
||||
returning = csvvalues.
|
||||
endmethod. "_parse_line
|
||||
endclass. "CL_CSV_PARSER IMPLEMENTATION
|
||||
110
samples/ATS/CoYonedaLemma.dats
Normal file
110
samples/ATS/CoYonedaLemma.dats
Normal file
@@ -0,0 +1,110 @@
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX-2014-01
|
||||
// CoYoneda Lemma:
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
#include
|
||||
"share/atspre_staload.hats"
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
staload
|
||||
"libats/ML/SATS/basis.sats"
|
||||
staload
|
||||
"libats/ML/SATS/list0.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
staload _ = "libats/ML/DATS/list0.dats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
sortdef ftype = type -> type
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
infixr (->) ->>
|
||||
typedef ->> (a:type, b:type) = a -<cloref1> b
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
typedef
|
||||
functor(F:ftype) =
|
||||
{a,b:type} (a ->> b) ->> F(a) ->> F(b)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
typedef
|
||||
list0 (a:type) = list0 (a)
|
||||
extern
|
||||
val functor_list0 : functor (list0)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
functor_list0{a,b}
|
||||
(f) = lam xs => list0_map<a><b> (xs, f)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
datatype
|
||||
CoYoneda
|
||||
(F:ftype, r:type) = {a:type} CoYoneda of (a ->> r, F(a))
|
||||
// end of [CoYoneda]
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
extern
|
||||
fun CoYoneda_phi
|
||||
: {F:ftype}functor(F) -> {r:type} (F (r) ->> CoYoneda (F, r))
|
||||
extern
|
||||
fun CoYoneda_psi
|
||||
: {F:ftype}functor(F) -> {r:type} (CoYoneda (F, r) ->> F (r))
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
CoYoneda_phi(ftor) = lam (fx) => CoYoneda (lam x => x, fx)
|
||||
implement
|
||||
CoYoneda_psi(ftor) = lam (CoYoneda(f, fx)) => ftor (f) (fx)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
datatype int0 = I of (int)
|
||||
datatype bool = True | False // boxed boolean
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun bool2string
|
||||
(x:bool): string =
|
||||
(
|
||||
case+ x of True() => "True" | False() => "False"
|
||||
)
|
||||
//
|
||||
implement
|
||||
fprint_val<bool> (out, x) = fprint (out, bool2string(x))
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
fun int2bool (i: int0): bool =
|
||||
let val+I(i) = i in if i > 0 then True else False end
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
val myintlist0 = g0ofg1($list{int0}((I)1, (I)0, (I)1, (I)0, (I)0))
|
||||
val myboolist0 = CoYoneda{list0,bool}{int0}(lam (i) => int2bool(i), myintlist0)
|
||||
val myboolist0 = CoYoneda_psi{list0}(functor_list0){bool}(myboolist0)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
val ((*void*)) = fprintln! (stdout_ref, "myboolist0 = ", myboolist0)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement main0 () = ()
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* end of [CoYonedaLemma.dats] *)
|
||||
178
samples/ATS/DiningPhil2.dats
Normal file
178
samples/ATS/DiningPhil2.dats
Normal file
@@ -0,0 +1,178 @@
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX-2013-11
|
||||
//
|
||||
// Implementing a variant of
|
||||
// the problem of Dining Philosophers
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
#include
|
||||
"share/atspre_define.hats"
|
||||
#include
|
||||
"share/atspre_staload.hats"
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
staload
|
||||
UN = "prelude/SATS/unsafe.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
staload "libc/SATS/stdlib.sats"
|
||||
staload "libc/SATS/unistd.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
staload "{$LIBATSHWXI}/teaching/mythread/SATS/channel.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
staload _ = "libats/DATS/deqarray.dats"
|
||||
staload _ = "{$LIBATSHWXI}/teaching/mythread/DATS/channel.dats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
staload "./DiningPhil2.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement phil_left (n) = n
|
||||
implement phil_right (n) = (n+1) \nmod NPHIL
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
extern
|
||||
fun randsleep (n: intGte(1)): void
|
||||
//
|
||||
implement
|
||||
randsleep (n) =
|
||||
ignoret (sleep($UN.cast{uInt}(rand() mod n + 1)))
|
||||
// end of [randsleep]
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
phil_think (n) =
|
||||
{
|
||||
val () = println! ("phil_think(", n, ") starts")
|
||||
val () = randsleep (6)
|
||||
val () = println! ("phil_think(", n, ") finishes")
|
||||
}
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
phil_dine (n, lf, rf) =
|
||||
{
|
||||
val () = println! ("phil_dine(", n, ") starts")
|
||||
val () = randsleep (3)
|
||||
val () = println! ("phil_dine(", n, ") finishes")
|
||||
}
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
phil_loop (n) = let
|
||||
//
|
||||
val () = phil_think (n)
|
||||
//
|
||||
val nl = phil_left (n)
|
||||
val nr = phil_right (n)
|
||||
//
|
||||
val ch_lfork = fork_changet (nl)
|
||||
val ch_rfork = fork_changet (nr)
|
||||
//
|
||||
val lf = channel_takeout (ch_lfork)
|
||||
val () = println! ("phil_loop(", n, ") picks left fork")
|
||||
//
|
||||
val () = randsleep (2) // HX: try to actively induce deadlock
|
||||
//
|
||||
val rf = channel_takeout (ch_rfork)
|
||||
val () = println! ("phil_loop(", n, ") picks right fork")
|
||||
//
|
||||
val () = phil_dine (n, lf, rf)
|
||||
//
|
||||
val ch_forktray = forktray_changet ()
|
||||
val () = channel_insert (ch_forktray, lf)
|
||||
val () = channel_insert (ch_forktray, rf)
|
||||
//
|
||||
in
|
||||
phil_loop (n)
|
||||
end // end of [phil_loop]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
cleaner_wash (f) =
|
||||
{
|
||||
val f = fork_get_num (f)
|
||||
val () = println! ("cleaner_wash(", f, ") starts")
|
||||
val () = randsleep (1)
|
||||
val () = println! ("cleaner_wash(", f, ") finishes")
|
||||
}
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
cleaner_return (f) =
|
||||
{
|
||||
val n = fork_get_num (f)
|
||||
val ch = fork_changet (n)
|
||||
val () = channel_insert (ch, f)
|
||||
}
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
cleaner_loop () = let
|
||||
//
|
||||
val ch = forktray_changet ()
|
||||
val f0 = channel_takeout (ch)
|
||||
//
|
||||
val () = cleaner_wash (f0)
|
||||
val () = cleaner_return (f0)
|
||||
//
|
||||
in
|
||||
cleaner_loop ()
|
||||
end // end of [cleaner_loop]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
dynload "DiningPhil2.sats"
|
||||
dynload "DiningPhil2_fork.dats"
|
||||
dynload "DiningPhil2_thread.dats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
local
|
||||
//
|
||||
staload
|
||||
"{$LIBATSHWXI}/teaching/mythread/SATS/mythread.sats"
|
||||
//
|
||||
in (* in of [local] *)
|
||||
//
|
||||
val () = mythread_create_cloptr (llam () => phil_loop (0))
|
||||
val () = mythread_create_cloptr (llam () => phil_loop (1))
|
||||
val () = mythread_create_cloptr (llam () => phil_loop (2))
|
||||
val () = mythread_create_cloptr (llam () => phil_loop (3))
|
||||
val () = mythread_create_cloptr (llam () => phil_loop (4))
|
||||
//
|
||||
val () = mythread_create_cloptr (llam () => cleaner_loop ())
|
||||
//
|
||||
end // end of [local]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
main0 () =
|
||||
{
|
||||
//
|
||||
val () = println! ("DiningPhil2: starting")
|
||||
val ((*void*)) = while (true) ignoret (sleep(1))
|
||||
//
|
||||
} (* end of [main0] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* end of [DiningPhil2.dats] *)
|
||||
71
samples/ATS/DiningPhil2.sats
Normal file
71
samples/ATS/DiningPhil2.sats
Normal file
@@ -0,0 +1,71 @@
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX-2013-11
|
||||
//
|
||||
// Implementing a variant of
|
||||
// the problem of Dining Philosophers
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
#include
|
||||
"share/atspre_define.hats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
staload "{$LIBATSHWXI}/teaching/mythread/SATS/channel.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
%{#
|
||||
#define NPHIL 5
|
||||
%} // end of [%{#]
|
||||
#define NPHIL 5
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
typedef nphil = natLt(NPHIL)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun phil_left (n: nphil): nphil
|
||||
fun phil_right (n: nphil): nphil
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun phil_loop (n: nphil): void
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
fun cleaner_loop ((*void*)): void
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
absvtype fork_vtype = ptr
|
||||
vtypedef fork = fork_vtype
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun fork_get_num (!fork): nphil
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun phil_dine
|
||||
(n: nphil, lf: !fork, rf: !fork): void
|
||||
// end of [phil_dine]
|
||||
|
||||
fun phil_think (n: nphil): void
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun cleaner_wash (f: !fork): void
|
||||
fun cleaner_return (f: fork): void
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun fork_changet (n: nphil): channel(fork)
|
||||
//
|
||||
fun forktray_changet ((*void*)): channel(fork)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
(* end of [DiningPhil2.sats] *)
|
||||
89
samples/ATS/DiningPhil2_fork.dats
Normal file
89
samples/ATS/DiningPhil2_fork.dats
Normal file
@@ -0,0 +1,89 @@
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX-2013-11
|
||||
//
|
||||
// Implementing a variant of
|
||||
// the problem of Dining Philosophers
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
#include
|
||||
"share/atspre_define.hats"
|
||||
#include
|
||||
"share/atspre_staload.hats"
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
staload
|
||||
UN = "prelude/SATS/unsafe.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
staload "{$LIBATSHWXI}/teaching/mythread/SATS/channel.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
staload _ = "libats/DATS/deqarray.dats"
|
||||
staload _ = "{$LIBATSHWXI}/teaching/mythread/DATS/channel.dats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
staload "./DiningPhil2.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
datavtype fork = FORK of (nphil)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
assume fork_vtype = fork
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
fork_get_num (f) = let val FORK(n) = f in n end
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
local
|
||||
|
||||
val
|
||||
the_forkarray = let
|
||||
//
|
||||
typedef t = channel(fork)
|
||||
//
|
||||
implement
|
||||
array_tabulate$fopr<t>
|
||||
(n) = ch where
|
||||
{
|
||||
val n = $UN.cast{nphil}(n)
|
||||
val ch = channel_create_exn<fork> (i2sz(2))
|
||||
val () = channel_insert (ch, FORK (n))
|
||||
}
|
||||
//
|
||||
in
|
||||
arrayref_tabulate<t> (i2sz(NPHIL))
|
||||
end // end of [val]
|
||||
|
||||
in (* in of [local] *)
|
||||
|
||||
implement fork_changet (n) = the_forkarray[n]
|
||||
|
||||
end // end of [local]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
local
|
||||
|
||||
val the_forktray =
|
||||
channel_create_exn<fork> (i2sz(NPHIL+1))
|
||||
|
||||
in (* in of [local] *)
|
||||
|
||||
implement forktray_changet () = the_forktray
|
||||
|
||||
end // end of [local]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* end of [DiningPhil2_fork.dats] *)
|
||||
43
samples/ATS/DiningPhil2_thread.dats
Normal file
43
samples/ATS/DiningPhil2_thread.dats
Normal file
@@ -0,0 +1,43 @@
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX-2013-11
|
||||
//
|
||||
// Implementing a variant of
|
||||
// the problem of Dining Philosophers
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
#include "share/atspre_define.hats"
|
||||
#include "share/atspre_staload.hats"
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
staload "{$LIBATSHWXI}/teaching/mythread/SATS/mythread.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
local
|
||||
//
|
||||
#include "{$LIBATSHWXI}/teaching/mythread/DATS/mythread.dats"
|
||||
//
|
||||
in (* in of [local] *)
|
||||
//
|
||||
// HX: it is intentionally left to be empty
|
||||
//
|
||||
end // end of [local]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
local
|
||||
//
|
||||
#include "{$LIBATSHWXI}/teaching/mythread/DATS/mythread_posix.dats"
|
||||
//
|
||||
in (* in of [local] *)
|
||||
//
|
||||
// HX: it is intentionally left to be empty
|
||||
//
|
||||
end // end of [local]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* end of [DiningPhil2_thread.dats] *)
|
||||
178
samples/ATS/YonedaLemma.dats
Normal file
178
samples/ATS/YonedaLemma.dats
Normal file
@@ -0,0 +1,178 @@
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX-2014-01
|
||||
// Yoneda Lemma:
|
||||
// The hardest "trivial" theorem :)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
#include
|
||||
"share/atspre_staload.hats"
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
staload
|
||||
"libats/ML/SATS/basis.sats"
|
||||
staload
|
||||
"libats/ML/SATS/list0.sats"
|
||||
staload
|
||||
"libats/ML/SATS/option0.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
staload _ = "libats/ML/DATS/list0.dats"
|
||||
staload _ = "libats/ML/DATS/option0.dats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
sortdef ftype = type -> type
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
infixr (->) ->>
|
||||
typedef ->> (a:type, b:type) = a -<cloref1> b
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
typedef
|
||||
functor(F:ftype) =
|
||||
{a,b:type} (a ->> b) ->> F(a) ->> F(b)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
typedef
|
||||
list0 (a:type) = list0 (a)
|
||||
extern
|
||||
val functor_list0 : functor (list0)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
functor_list0{a,b}
|
||||
(f) = lam xs => list0_map<a><b> (xs, f)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
typedef
|
||||
option0 (a:type) = option0 (a)
|
||||
extern
|
||||
val functor_option0 : functor (option0)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
functor_option0{a,b}
|
||||
(f) = lam opt => option0_map<a><b> (opt, f)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
extern
|
||||
val functor_homres
|
||||
: {c:type} functor (lam(r:type) => c ->> r)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
functor_homres{c}{a,b} (f) = lam (r) => lam (x) => f (r(x))
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
extern
|
||||
fun Yoneda_phi : {F:ftype}functor(F) ->
|
||||
{a:type}F(a) ->> ({r:type}(a ->> r) ->> F(r))
|
||||
extern
|
||||
fun Yoneda_psi : {F:ftype}functor(F) ->
|
||||
{a:type}({r:type}(a ->> r) ->> F(r)) ->> F(a)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
implement
|
||||
Yoneda_phi
|
||||
(ftor) = lam(fx) => lam (m) => ftor(m)(fx)
|
||||
//
|
||||
implement
|
||||
Yoneda_psi (ftor) = lam(mf) => mf(lam x => x)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
(*
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX-2014-01-05:
|
||||
// Another version based on Natural Transformation
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
typedef
|
||||
natrans(F:ftype, G:ftype) = {x:type} (F(x) ->> G(x))
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
extern
|
||||
fun Yoneda_phi_nat : {F:ftype}functor(F) ->
|
||||
{a:type} F(a) ->> natrans(lam (r:type) => (a ->> r), F)
|
||||
extern
|
||||
fun Yoneda_psi_nat : {F:ftype}functor(F) ->
|
||||
{a:type} natrans(lam (r:type) => (a ->> r), F) ->> F(a)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
implement
|
||||
Yoneda_phi_nat
|
||||
(ftor) = lam(fx) => lam (m) => ftor(m)(fx)
|
||||
//
|
||||
implement
|
||||
Yoneda_psi_nat (ftor) = lam(mf) => mf(lam x => x)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
*)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
datatype bool = True | False // boxed boolean
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun bool2string
|
||||
(x:bool): string =
|
||||
(
|
||||
case+ x of True() => "True" | False() => "False"
|
||||
)
|
||||
//
|
||||
implement
|
||||
fprint_val<bool> (out, x) = fprint (out, bool2string(x))
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
val myboolist0 =
|
||||
$list_t{bool}(True, False, True, False, False)
|
||||
val myboolist0 = g0ofg1_list (myboolist0)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
extern
|
||||
val Yoneda_bool_list0 : {r:type} (bool ->> r) ->> list0(r)
|
||||
//
|
||||
implement
|
||||
Yoneda_bool_list0 =
|
||||
Yoneda_phi(functor_list0){bool}(myboolist0)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
val myboolist1 =
|
||||
Yoneda_psi(functor_list0){bool}(Yoneda_bool_list0)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
val () = fprintln! (stdout_ref, "myboolist0 = ", myboolist0)
|
||||
val () = fprintln! (stdout_ref, "myboolist1 = ", myboolist1)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement main0 () = ()
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* end of [YonedaLemma.dats] *)
|
||||
187
samples/ATS/linset.hats
Normal file
187
samples/ATS/linset.hats
Normal file
@@ -0,0 +1,187 @@
|
||||
(***********************************************************************)
|
||||
(* *)
|
||||
(* Applied Type System *)
|
||||
(* *)
|
||||
(***********************************************************************)
|
||||
|
||||
(*
|
||||
** ATS/Postiats - Unleashing the Potential of Types!
|
||||
** Copyright (C) 2011-2013 Hongwei Xi, ATS Trustful Software, Inc.
|
||||
** All rights reserved
|
||||
**
|
||||
** ATS is free software; you can redistribute it and/or modify it under
|
||||
** the terms of the GNU GENERAL PUBLIC LICENSE (GPL) as published by the
|
||||
** Free Software Foundation; either version 3, or (at your option) any
|
||||
** later version.
|
||||
**
|
||||
** ATS is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
** WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
** FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
** for more details.
|
||||
**
|
||||
** You should have received a copy of the GNU General Public License
|
||||
** along with ATS; see the file COPYING. If not, please write to the
|
||||
** Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
** 02110-1301, USA.
|
||||
*)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* Author: Hongwei Xi *)
|
||||
(* Authoremail: hwxi AT cs DOT bu DOT edu *)
|
||||
(* Start time: December, 2012 *)
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX: shared by linset_listord (* ordered list *)
|
||||
// HX: shared by linset_avltree (* AVL-tree-based *)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX-2013-02:
|
||||
// for sets of nonlinear elements
|
||||
//
|
||||
absvtype set_vtype (a:t@ype+) = ptr
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
vtypedef set (a:t0p) = set_vtype (a)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p}
|
||||
compare_elt_elt (x1: a, x2: a):<> int
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{} linset_nil{a:t0p} ():<> set(a)
|
||||
fun{} linset_make_nil{a:t0p} ():<> set(a)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p} linset_sing (x: a):<!wrt> set(a)
|
||||
fun{a:t0p} linset_make_sing (x: a):<!wrt> set(a)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p}
|
||||
linset_make_list (xs: List(INV(a))):<!wrt> set(a)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{}
|
||||
linset_is_nil {a:t0p} (xs: !set(INV(a))):<> bool
|
||||
fun{}
|
||||
linset_isnot_nil {a:t0p} (xs: !set(INV(a))):<> bool
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p} linset_size (!set(INV(a))): size_t
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p}
|
||||
linset_is_member (xs: !set(INV(a)), x0: a):<> bool
|
||||
fun{a:t0p}
|
||||
linset_isnot_member (xs: !set(INV(a)), x0: a):<> bool
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p}
|
||||
linset_copy (!set(INV(a))):<!wrt> set(a)
|
||||
fun{a:t0p}
|
||||
linset_free (xs: set(INV(a))):<!wrt> void
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{a:t0p}
|
||||
linset_insert
|
||||
(xs: &set(INV(a)) >> _, x0: a):<!wrt> bool
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{a:t0p}
|
||||
linset_takeout
|
||||
(
|
||||
&set(INV(a)) >> _, a, res: &(a?) >> opt(a, b)
|
||||
) :<!wrt> #[b:bool] bool(b) // endfun
|
||||
fun{a:t0p}
|
||||
linset_takeout_opt (&set(INV(a)) >> _, a):<!wrt> Option_vt(a)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{a:t0p}
|
||||
linset_remove
|
||||
(xs: &set(INV(a)) >> _, x0: a):<!wrt> bool
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX: choosing an element in an unspecified manner
|
||||
//
|
||||
fun{a:t0p}
|
||||
linset_choose
|
||||
(
|
||||
xs: !set(INV(a)), x: &a? >> opt (a, b)
|
||||
) :<!wrt> #[b:bool] bool(b)
|
||||
//
|
||||
fun{a:t0p}
|
||||
linset_choose_opt (xs: !set(INV(a))):<!wrt> Option_vt(a)
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p}
|
||||
linset_takeoutmax
|
||||
(
|
||||
xs: &set(INV(a)) >> _, res: &a? >> opt(a, b)
|
||||
) :<!wrt> #[b:bool] bool (b)
|
||||
fun{a:t0p}
|
||||
linset_takeoutmax_opt (xs: &set(INV(a)) >> _):<!wrt> Option_vt(a)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p}
|
||||
linset_takeoutmin
|
||||
(
|
||||
xs: &set(INV(a)) >> _, res: &a? >> opt(a, b)
|
||||
) :<!wrt> #[b:bool] bool (b)
|
||||
fun{a:t0p}
|
||||
linset_takeoutmin_opt (xs: &set(INV(a)) >> _):<!wrt> Option_vt(a)
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{}
|
||||
fprint_linset$sep (FILEref): void // ", "
|
||||
//
|
||||
fun{a:t0p}
|
||||
fprint_linset (out: FILEref, xs: !set(INV(a))): void
|
||||
//
|
||||
overload fprint with fprint_linset
|
||||
//
|
||||
(* ****** ****** *)
|
||||
//
|
||||
fun{
|
||||
a:t0p}{env:vt0p
|
||||
} linset_foreach$fwork
|
||||
(x: a, env: &(env) >> _): void
|
||||
//
|
||||
fun{a:t0p}
|
||||
linset_foreach (set: !set(INV(a))): void
|
||||
fun{
|
||||
a:t0p}{env:vt0p
|
||||
} linset_foreach_env
|
||||
(set: !set(INV(a)), env: &(env) >> _): void
|
||||
// end of [linset_foreach_env]
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p}
|
||||
linset_listize (xs: set(INV(a))): List0_vt (a)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
fun{a:t0p}
|
||||
linset_listize1 (xs: !set(INV(a))): List0_vt (a)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* end of [linset.hats] *)
|
||||
504
samples/ATS/linset_listord.dats
Normal file
504
samples/ATS/linset_listord.dats
Normal file
@@ -0,0 +1,504 @@
|
||||
(***********************************************************************)
|
||||
(* *)
|
||||
(* Applied Type System *)
|
||||
(* *)
|
||||
(***********************************************************************)
|
||||
|
||||
(*
|
||||
** ATS/Postiats - Unleashing the Potential of Types!
|
||||
** Copyright (C) 2011-2013 Hongwei Xi, ATS Trustful Software, Inc.
|
||||
** All rights reserved
|
||||
**
|
||||
** ATS is free software; you can redistribute it and/or modify it under
|
||||
** the terms of the GNU GENERAL PUBLIC LICENSE (GPL) as published by the
|
||||
** Free Software Foundation; either version 3, or (at your option) any
|
||||
** later version.
|
||||
**
|
||||
** ATS is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
** WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
** FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
** for more details.
|
||||
**
|
||||
** You should have received a copy of the GNU General Public License
|
||||
** along with ATS; see the file COPYING. If not, please write to the
|
||||
** Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
** 02110-1301, USA.
|
||||
*)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* Author: Hongwei Xi *)
|
||||
(* Authoremail: hwxi AT cs DOT bu DOT edu *)
|
||||
(* Start time: February, 2013 *)
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX-2013-08:
|
||||
// a set is represented as a sorted list in descending order;
|
||||
// note that descending order is chosen to faciliate set comparison
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
staload
|
||||
UN = "prelude/SATS/unsafe.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
staload "libats/SATS/linset_listord.sats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
#include "./SHARE/linset.hats" // code reuse
|
||||
#include "./SHARE/linset_node.hats" // code reuse
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
assume
|
||||
set_vtype (elt:t@ype) = List0_vt (elt)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
linset_nil () = list_vt_nil ()
|
||||
implement{}
|
||||
linset_make_nil () = list_vt_nil ()
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
linset_sing
|
||||
(x) = list_vt_cons{a}(x, list_vt_nil)
|
||||
// end of [linset_sing]
|
||||
implement{a}
|
||||
linset_make_sing
|
||||
(x) = list_vt_cons{a}(x, list_vt_nil)
|
||||
// end of [linset_make_sing]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{}
|
||||
linset_is_nil (xs) = list_vt_is_nil (xs)
|
||||
implement{}
|
||||
linset_isnot_nil (xs) = list_vt_is_cons (xs)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{a}
|
||||
linset_size (xs) =
|
||||
let val n = list_vt_length(xs) in i2sz(n) end
|
||||
// end of [linset_size]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{a}
|
||||
linset_is_member
|
||||
(xs, x0) = let
|
||||
//
|
||||
fun aux
|
||||
{n:nat} .<n>.
|
||||
(
|
||||
xs: !list_vt (a, n)
|
||||
) :<> bool = let
|
||||
in
|
||||
//
|
||||
case+ xs of
|
||||
| list_vt_cons (x, xs) => let
|
||||
val sgn = compare_elt_elt<a> (x0, x) in
|
||||
if sgn > 0 then false else (if sgn < 0 then aux (xs) else true)
|
||||
end // end of [list_vt_cons]
|
||||
| list_vt_nil ((*void*)) => false
|
||||
//
|
||||
end // end of [aux]
|
||||
//
|
||||
in
|
||||
aux (xs)
|
||||
end // end of [linset_is_member]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{a}
|
||||
linset_copy (xs) = list_vt_copy<a> (xs)
|
||||
implement{a}
|
||||
linset_free (xs) = list_vt_free<a> (xs)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{a}
|
||||
linset_insert
|
||||
(xs, x0) = let
|
||||
//
|
||||
fun
|
||||
mynode_cons
|
||||
{n:nat} .<>.
|
||||
(
|
||||
nx: mynode1 (a), xs: list_vt (a, n)
|
||||
) : list_vt (a, n+1) = let
|
||||
//
|
||||
val xs1 =
|
||||
$UN.castvwtp0{List1_vt(a)}(nx)
|
||||
val+@list_vt_cons (_, xs2) = xs1
|
||||
prval () = $UN.cast2void (xs2); val () = (xs2 := xs)
|
||||
//
|
||||
in
|
||||
fold@ (xs1); xs1
|
||||
end // end of [mynode_cons]
|
||||
//
|
||||
fun ins
|
||||
{n:nat} .<n>. // tail-recursive
|
||||
(
|
||||
xs: &list_vt (a, n) >> list_vt (a, n1)
|
||||
) : #[n1:nat | n <= n1; n1 <= n+1] bool =
|
||||
(
|
||||
case+ xs of
|
||||
| @list_vt_cons
|
||||
(x, xs1) => let
|
||||
val sgn =
|
||||
compare_elt_elt<a> (x0, x)
|
||||
// end of [val]
|
||||
in
|
||||
if sgn > 0 then let
|
||||
prval () = fold@ (xs)
|
||||
val nx = mynode_make_elt<a> (x0)
|
||||
val ((*void*)) = xs := mynode_cons (nx, xs)
|
||||
in
|
||||
false
|
||||
end else if sgn < 0 then let
|
||||
val ans = ins (xs1)
|
||||
prval () = fold@ (xs)
|
||||
in
|
||||
ans
|
||||
end else let // [x0] is found
|
||||
prval () = fold@ (xs)
|
||||
in
|
||||
true (* [x0] in [xs] *)
|
||||
end (* end of [if] *)
|
||||
end // end of [list_vt_cons]
|
||||
| list_vt_nil () => let
|
||||
val nx = mynode_make_elt<a> (x0)
|
||||
val ((*void*)) = xs := mynode_cons (nx, xs)
|
||||
in
|
||||
false
|
||||
end // end of [list_vt_nil]
|
||||
) (* end of [ins] *)
|
||||
//
|
||||
in
|
||||
$effmask_all (ins (xs))
|
||||
end // end of [linset_insert]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(*
|
||||
//
|
||||
HX-2013-08:
|
||||
[linset_remove] moved up
|
||||
//
|
||||
implement{a}
|
||||
linset_remove
|
||||
(xs, x0) = let
|
||||
//
|
||||
fun rem
|
||||
{n:nat} .<n>. // tail-recursive
|
||||
(
|
||||
xs: &list_vt (a, n) >> list_vt (a, n1)
|
||||
) : #[n1:nat | n1 <= n; n <= n1+1] bool =
|
||||
(
|
||||
case+ xs of
|
||||
| @list_vt_cons
|
||||
(x, xs1) => let
|
||||
val sgn =
|
||||
compare_elt_elt<a> (x0, x)
|
||||
// end of [val]
|
||||
in
|
||||
if sgn > 0 then let
|
||||
prval () = fold@ (xs)
|
||||
in
|
||||
false
|
||||
end else if sgn < 0 then let
|
||||
val ans = rem (xs1)
|
||||
prval () = fold@ (xs)
|
||||
in
|
||||
ans
|
||||
end else let // x0 = x
|
||||
val xs1_ = xs1
|
||||
val ((*void*)) = free@{a}{0}(xs)
|
||||
val () = xs := xs1_
|
||||
in
|
||||
true // [x0] in [xs]
|
||||
end (* end of [if] *)
|
||||
end // end of [list_vt_cons]
|
||||
| list_vt_nil () => false
|
||||
) (* end of [rem] *)
|
||||
//
|
||||
in
|
||||
$effmask_all (rem (xs))
|
||||
end // end of [linset_remove]
|
||||
*)
|
||||
|
||||
(* ****** ****** *)
|
||||
(*
|
||||
** By Brandon Barker
|
||||
*)
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
linset_choose
|
||||
(xs, x0) = let
|
||||
in
|
||||
//
|
||||
case+ xs of
|
||||
| list_vt_cons
|
||||
(x, xs1) => let
|
||||
val () = x0 := x
|
||||
prval () = opt_some{a}(x0)
|
||||
in
|
||||
true
|
||||
end // end of [list_vt_cons]
|
||||
| list_vt_nil () => let
|
||||
prval () = opt_none{a}(x0)
|
||||
in
|
||||
false
|
||||
end // end of [list_vt_nil]
|
||||
//
|
||||
end // end of [linset_choose]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
{a}{env}
|
||||
linset_foreach_env (xs, env) = let
|
||||
//
|
||||
implement
|
||||
list_vt_foreach$fwork<a><env>
|
||||
(x, env) = linset_foreach$fwork<a><env> (x, env)
|
||||
//
|
||||
in
|
||||
list_vt_foreach_env<a><env> (xs, env)
|
||||
end // end of [linset_foreach_env]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{a}
|
||||
linset_listize (xs) = xs
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{a}
|
||||
linset_listize1 (xs) = list_vt_copy (xs)
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// HX: functions for processing mynodes
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{
|
||||
} mynode_null{a} () =
|
||||
$UN.castvwtp0{mynode(a,null)}(the_null_ptr)
|
||||
// end of [mynode_null]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
mynode_make_elt
|
||||
(x) = let
|
||||
//
|
||||
val nx = list_vt_cons{a}{0}(x, _ )
|
||||
//
|
||||
in
|
||||
$UN.castvwtp0{mynode1(a)}(nx)
|
||||
end // end of [mynode_make_elt]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement{
|
||||
} mynode_free
|
||||
{a}(nx) = () where {
|
||||
val nx =
|
||||
$UN.castvwtp0{List1_vt(a)}(nx)
|
||||
//
|
||||
val+~list_vt_cons (_, nx2) = nx
|
||||
//
|
||||
prval ((*void*)) = $UN.cast2void (nx2)
|
||||
//
|
||||
} (* end of [mynode_free] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
mynode_get_elt
|
||||
(nx) = (x) where {
|
||||
//
|
||||
val nx1 =
|
||||
$UN.castvwtp1{List1_vt(a)}(nx)
|
||||
//
|
||||
val+list_vt_cons (x, _) = nx1
|
||||
//
|
||||
prval ((*void*)) = $UN.cast2void (nx1)
|
||||
//
|
||||
} (* end of [mynode_get_elt] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
mynode_set_elt
|
||||
{l} (nx, x0) =
|
||||
{
|
||||
//
|
||||
val nx1 =
|
||||
$UN.castvwtp1{List1_vt(a)}(nx)
|
||||
//
|
||||
val+@list_vt_cons (x, _) = nx1
|
||||
//
|
||||
val () = x := x0
|
||||
//
|
||||
prval () = fold@ (nx1)
|
||||
prval () = $UN.cast2void (nx1)
|
||||
//
|
||||
prval () = __assert (nx) where
|
||||
{
|
||||
extern praxi __assert (nx: !mynode(a?, l) >> mynode (a, l)): void
|
||||
} (* end of [prval] *)
|
||||
//
|
||||
} (* end of [mynode_set_elt] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
mynode_getfree_elt
|
||||
(nx) = (x) where {
|
||||
//
|
||||
val nx =
|
||||
$UN.castvwtp0{List1_vt(a)}(nx)
|
||||
//
|
||||
val+~list_vt_cons (x, nx2) = nx
|
||||
//
|
||||
prval ((*void*)) = $UN.cast2void (nx2)
|
||||
//
|
||||
} (* end of [mynode_getfree_elt] *)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(*
|
||||
fun{a:t0p}
|
||||
linset_takeout_ngc
|
||||
(set: &set(INV(a)) >> _, x0: a):<!wrt> mynode0 (a)
|
||||
// end of [linset_takeout_ngc]
|
||||
*)
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
linset_takeout_ngc
|
||||
(set, x0) = let
|
||||
//
|
||||
fun takeout
|
||||
(
|
||||
xs: &List0_vt (a) >> _
|
||||
) : mynode0(a) = let
|
||||
in
|
||||
//
|
||||
case+ xs of
|
||||
| @list_vt_cons
|
||||
(x, xs1) => let
|
||||
prval pf_x = view@x
|
||||
prval pf_xs1 = view@xs1
|
||||
val sgn =
|
||||
compare_elt_elt<a> (x0, x)
|
||||
// end of [val]
|
||||
in
|
||||
if sgn > 0 then let
|
||||
prval () = fold@ (xs)
|
||||
in
|
||||
mynode_null{a}((*void*))
|
||||
end else if sgn < 0 then let
|
||||
val res = takeout (xs1)
|
||||
prval ((*void*)) = fold@ (xs)
|
||||
in
|
||||
res
|
||||
end else let // x0 = x
|
||||
val xs1_ = xs1
|
||||
val res = $UN.castvwtp0{mynode1(a)}((pf_x, pf_xs1 | xs))
|
||||
val () = xs := xs1_
|
||||
in
|
||||
res // [x0] in [xs]
|
||||
end (* end of [if] *)
|
||||
end // end of [list_vt_cons]
|
||||
| list_vt_nil () => mynode_null{a}((*void*))
|
||||
//
|
||||
end (* end of [takeout] *)
|
||||
//
|
||||
in
|
||||
$effmask_all (takeout (set))
|
||||
end // end of [linset_takeout_ngc]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
linset_takeoutmax_ngc
|
||||
(xs) = let
|
||||
in
|
||||
//
|
||||
case+ xs of
|
||||
| @list_vt_cons
|
||||
(x, xs1) => let
|
||||
prval pf_x = view@x
|
||||
prval pf_xs1 = view@xs1
|
||||
val xs_ = xs
|
||||
val () = xs := xs1
|
||||
in
|
||||
$UN.castvwtp0{mynode1(a)}((pf_x, pf_xs1 | xs_))
|
||||
end // end of [list_vt_cons]
|
||||
| @list_vt_nil () => let
|
||||
prval () = fold@ (xs)
|
||||
in
|
||||
mynode_null{a}((*void*))
|
||||
end // end of [list_vt_nil]
|
||||
//
|
||||
end // end of [linset_takeoutmax_ngc]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
implement
|
||||
{a}(*tmp*)
|
||||
linset_takeoutmin_ngc
|
||||
(xs) = let
|
||||
//
|
||||
fun unsnoc
|
||||
{n:pos} .<n>.
|
||||
(
|
||||
xs: &list_vt (a, n) >> list_vt (a, n-1)
|
||||
) :<!wrt> mynode1 (a) = let
|
||||
//
|
||||
val+@list_vt_cons (x, xs1) = xs
|
||||
//
|
||||
prval pf_x = view@x and pf_xs1 = view@xs1
|
||||
//
|
||||
in
|
||||
//
|
||||
case+ xs1 of
|
||||
| list_vt_cons _ =>
|
||||
let val res = unsnoc(xs1) in fold@xs; res end
|
||||
// end of [list_vt_cons]
|
||||
| list_vt_nil () => let
|
||||
val xs_ = xs
|
||||
val () = xs := list_vt_nil{a}()
|
||||
in
|
||||
$UN.castvwtp0{mynode1(a)}((pf_x, pf_xs1 | xs_))
|
||||
end // end of [list_vt_nil]
|
||||
//
|
||||
end // end of [unsnoc]
|
||||
//
|
||||
in
|
||||
//
|
||||
case+ xs of
|
||||
| list_vt_cons _ => unsnoc (xs)
|
||||
| list_vt_nil () => mynode_null{a}((*void*))
|
||||
//
|
||||
end // end of [linset_takeoutmin_ngc]
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* end of [linset_listord.dats] *)
|
||||
51
samples/ATS/linset_listord.sats
Normal file
51
samples/ATS/linset_listord.sats
Normal file
@@ -0,0 +1,51 @@
|
||||
(***********************************************************************)
|
||||
(* *)
|
||||
(* Applied Type System *)
|
||||
(* *)
|
||||
(***********************************************************************)
|
||||
|
||||
(*
|
||||
** ATS/Postiats - Unleashing the Potential of Types!
|
||||
** Copyright (C) 2011-2013 Hongwei Xi, ATS Trustful Software, Inc.
|
||||
** All rights reserved
|
||||
**
|
||||
** ATS is free software; you can redistribute it and/or modify it under
|
||||
** the terms of the GNU GENERAL PUBLIC LICENSE (GPL) as published by the
|
||||
** Free Software Foundation; either version 3, or (at your option) any
|
||||
** later version.
|
||||
**
|
||||
** ATS is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
** WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
** FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
** for more details.
|
||||
**
|
||||
** You should have received a copy of the GNU General Public License
|
||||
** along with ATS; see the file COPYING. If not, please write to the
|
||||
** Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
** 02110-1301, USA.
|
||||
*)
|
||||
|
||||
(* ****** ****** *)
|
||||
//
|
||||
// Author: Hongwei Xi
|
||||
// Authoremail: hwxiATcsDOTbuDOTedu
|
||||
// Time: October, 2010
|
||||
//
|
||||
(* ****** ****** *)
|
||||
|
||||
#define ATS_PACKNAME "ATSLIB.libats.linset_listord"
|
||||
#define ATS_STALOADFLAG 0 // no static loading at run-time
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
#include "./SHARE/linset.hats"
|
||||
#include "./SHARE/linset_node.hats"
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
castfn
|
||||
linset2list {a:t0p} (xs: set (INV(a))):<> List0_vt (a)
|
||||
|
||||
(* ****** ****** *)
|
||||
|
||||
(* end of [linset_listord.sats] *)
|
||||
215
samples/ATS/main.atxt
Normal file
215
samples/ATS/main.atxt
Normal file
@@ -0,0 +1,215 @@
|
||||
%{
|
||||
#include "./../ATEXT/atextfun.hats"
|
||||
%}
|
||||
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
|
||||
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
|
||||
<head>
|
||||
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
|
||||
<title>EFFECTIVATS-DiningPhil2</title>
|
||||
#patscode_style()
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<h1>
|
||||
Effective ATS: Dining Philosophers
|
||||
</h1>
|
||||
|
||||
In this article, I present an implementation of a slight variant of the
|
||||
famous problem of 5-Dining-Philosophers by Dijkstra that makes simple but
|
||||
convincing use of linear types.
|
||||
|
||||
<h2>
|
||||
The Original Problem
|
||||
</h2>
|
||||
|
||||
There are five philosophers sitting around a table and there are also 5
|
||||
forks placed on the table such that each fork is located between the left
|
||||
hand of a philosopher and the right hand of another philosopher. Each
|
||||
philosopher does the following routine repeatedly: thinking and dining. In
|
||||
order to dine, a philosopher needs to first acquire two forks: one located
|
||||
on his left-hand side and the other on his right-hand side. After
|
||||
finishing dining, a philosopher puts the two acquired forks onto the table:
|
||||
one on his left-hand side and the other on his right-hand side.
|
||||
|
||||
<h2>
|
||||
A Variant of the Original Problem
|
||||
</h2>
|
||||
|
||||
The following twist is added to the original version:
|
||||
|
||||
<p>
|
||||
|
||||
After a fork is used, it becomes a "dirty" fork and needs to be put in a
|
||||
tray for dirty forks. There is a cleaner who cleans dirty forks and then
|
||||
puts them back on the table.
|
||||
|
||||
<h2>
|
||||
Channels for Communication
|
||||
</h2>
|
||||
|
||||
A channel is just a shared queue of fixed capacity. The following two
|
||||
functions are for inserting an element into and taking an element out of a
|
||||
given channel:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_sats("\
|
||||
fun{a:vt0p} channel_insert (channel (a), a): void
|
||||
fun{a:vt0p} channel_takeout (chan: channel (a)): (a)
|
||||
")</pre>
|
||||
|
||||
If [channel_insert] is called on a channel that is full, then the caller is
|
||||
blocked until an element is taken out of the channel. If [channel_takeout]
|
||||
is called on a channel that is empty, then the caller is blocked until an
|
||||
element is inserted into the channel.
|
||||
|
||||
<h2>
|
||||
A Channel for Each Fork
|
||||
</h2>
|
||||
|
||||
Forks are resources given a linear type. Each fork is initially stored in a
|
||||
channel, which can be obtained by calling the following function:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_sats("\
|
||||
fun fork_changet (n: nphil): channel(fork)
|
||||
")</pre>
|
||||
|
||||
where the type [nphil] is defined to be [natLt(5)] (for natural numbers
|
||||
less than 5). The channels for storing forks are chosen to be of capacity
|
||||
2. The reason that channels of capacity 2 are chosen to store at most one
|
||||
element (in each of them) is to guarantee that these channels can never be
|
||||
full (so that there is no attempt made to send signals to awake callers
|
||||
supposedly being blocked due to channels being full).
|
||||
|
||||
|
||||
<h2>
|
||||
A Channel for the Fork Tray
|
||||
</h2>
|
||||
|
||||
A tray for storing "dirty" forks is also a channel, which can be obtained
|
||||
by calling the following function:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_sats("\
|
||||
fun forktray_changet ((*void*)): channel(fork)
|
||||
")</pre>
|
||||
|
||||
The capacity chosen for the channel is 6 (instead of 5) so that it can
|
||||
never become full (as there are only 5 forks in total).
|
||||
|
||||
<h2>
|
||||
Philosopher Loop
|
||||
</h2>
|
||||
|
||||
Each philosopher is implemented as a loop:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_dats('\
|
||||
implement
|
||||
phil_loop (n) = let
|
||||
//
|
||||
val () = phil_think (n)
|
||||
//
|
||||
val nl = phil_left (n) // = n
|
||||
val nr = phil_right (n) // = (n+1) % 5
|
||||
//
|
||||
val ch_lfork = fork_changet (nl)
|
||||
val ch_rfork = fork_changet (nr)
|
||||
//
|
||||
val lf = channel_takeout (ch_lfork)
|
||||
val () = println! ("phil_loop(", n, ") picks left fork")
|
||||
//
|
||||
val () = randsleep (2) // sleep up to 2 seconds
|
||||
//
|
||||
val rf = channel_takeout (ch_rfork)
|
||||
val () = println! ("phil_loop(", n, ") picks right fork")
|
||||
//
|
||||
val () = phil_dine (n, lf, rf)
|
||||
//
|
||||
val ch_forktray = forktray_changet ()
|
||||
val () = channel_insert (ch_forktray, lf) // left fork to dirty tray
|
||||
val () = channel_insert (ch_forktray, rf) // right fork to dirty tray
|
||||
//
|
||||
in
|
||||
phil_loop (n)
|
||||
end // end of [phil_loop]
|
||||
')</pre>
|
||||
|
||||
It should be straighforward to follow the code for [phil_loop].
|
||||
|
||||
<h2>
|
||||
Fork Cleaner Loop
|
||||
</h2>
|
||||
|
||||
A cleaner is implemented as a loop:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_dats('\
|
||||
implement
|
||||
cleaner_loop () = let
|
||||
//
|
||||
val ch = forktray_changet ()
|
||||
val f0 = channel_takeout (ch) // [f0] is dirty
|
||||
//
|
||||
val () = cleaner_wash (f0) // washes dirty [f0]
|
||||
val () = cleaner_return (f0) // puts back cleaned [f0]
|
||||
//
|
||||
in
|
||||
cleaner_loop ()
|
||||
end // end of [cleaner_loop]
|
||||
')</pre>
|
||||
|
||||
The function [cleaner_return] first finds out the number of a given fork
|
||||
and then uses the number to locate the channel for storing the fork. Its
|
||||
actual implementation is given as follows:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_dats('\
|
||||
implement
|
||||
cleaner_return (f) =
|
||||
{
|
||||
val n = fork_get_num (f)
|
||||
val ch = fork_changet (n)
|
||||
val () = channel_insert (ch, f)
|
||||
}
|
||||
')</pre>
|
||||
|
||||
It should now be straighforward to follow the code for [cleaner_loop].
|
||||
|
||||
<h2>
|
||||
Testing
|
||||
</h2>
|
||||
|
||||
The entire code of this implementation is stored in the following files:
|
||||
|
||||
<pre>
|
||||
DiningPhil2.sats
|
||||
DiningPhil2.dats
|
||||
DiningPhil2_fork.dats
|
||||
DiningPhil2_thread.dats
|
||||
</pre>
|
||||
|
||||
There is also a Makefile available for compiling the ATS source code into
|
||||
an excutable for testing. One should be able to encounter a deadlock after
|
||||
running the simulation for a while.
|
||||
|
||||
<hr size="2">
|
||||
|
||||
This article is written by <a href="http://www.cs.bu.edu/~hwxi/">Hongwei Xi</a>.
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
||||
%{
|
||||
implement main () = fprint_filsub (stdout_ref, "main_atxt.txt")
|
||||
%}
|
||||
39
samples/Agda/NatCat.agda
Normal file
39
samples/Agda/NatCat.agda
Normal file
@@ -0,0 +1,39 @@
|
||||
module NatCat where
|
||||
|
||||
open import Relation.Binary.PropositionalEquality
|
||||
|
||||
-- If you can show that a relation only ever has one inhabitant
|
||||
-- you get the category laws for free
|
||||
module
|
||||
EasyCategory
|
||||
(obj : Set)
|
||||
(_⟶_ : obj → obj → Set)
|
||||
(_∘_ : ∀ {x y z} → x ⟶ y → y ⟶ z → x ⟶ z)
|
||||
(id : ∀ x → x ⟶ x)
|
||||
(single-inhabitant : (x y : obj) (r s : x ⟶ y) → r ≡ s)
|
||||
where
|
||||
|
||||
idʳ : ∀ x y (r : x ⟶ y) → r ∘ id y ≡ r
|
||||
idʳ x y r = single-inhabitant x y (r ∘ id y) r
|
||||
|
||||
idˡ : ∀ x y (r : x ⟶ y) → id x ∘ r ≡ r
|
||||
idˡ x y r = single-inhabitant x y (id x ∘ r) r
|
||||
|
||||
∘-assoc : ∀ w x y z (r : w ⟶ x) (s : x ⟶ y) (t : y ⟶ z) → (r ∘ s) ∘ t ≡ r ∘ (s ∘ t)
|
||||
∘-assoc w x y z r s t = single-inhabitant w z ((r ∘ s) ∘ t) (r ∘ (s ∘ t))
|
||||
|
||||
open import Data.Nat
|
||||
|
||||
same : (x y : ℕ) (r s : x ≤ y) → r ≡ s
|
||||
same .0 y z≤n z≤n = refl
|
||||
same .(suc m) .(suc n) (s≤s {m} {n} r) (s≤s s) = cong s≤s (same m n r s)
|
||||
|
||||
≤-trans : ∀ x y z → x ≤ y → y ≤ z → x ≤ z
|
||||
≤-trans .0 y z z≤n s = z≤n
|
||||
≤-trans .(suc m) .(suc n) .(suc n₁) (s≤s {m} {n} r) (s≤s {.n} {n₁} s) = s≤s (≤-trans m n n₁ r s)
|
||||
|
||||
≤-refl : ∀ x → x ≤ x
|
||||
≤-refl zero = z≤n
|
||||
≤-refl (suc x) = s≤s (≤-refl x)
|
||||
|
||||
module Nat-EasyCategory = EasyCategory ℕ _≤_ (λ {x}{y}{z} → ≤-trans x y z) ≤-refl same
|
||||
59
samples/Alloy/file_system.als
Normal file
59
samples/Alloy/file_system.als
Normal file
@@ -0,0 +1,59 @@
|
||||
module examples/systems/file_system
|
||||
|
||||
/*
|
||||
* Model of a generic file system.
|
||||
*/
|
||||
|
||||
abstract sig Object {}
|
||||
|
||||
sig Name {}
|
||||
|
||||
sig File extends Object {} { some d: Dir | this in d.entries.contents }
|
||||
|
||||
sig Dir extends Object {
|
||||
entries: set DirEntry,
|
||||
parent: lone Dir
|
||||
} {
|
||||
parent = this.~@contents.~@entries
|
||||
all e1, e2 : entries | e1.name = e2.name => e1 = e2
|
||||
this !in this.^@parent
|
||||
this != Root => Root in this.^@parent
|
||||
}
|
||||
|
||||
one sig Root extends Dir {} { no parent }
|
||||
|
||||
lone sig Cur extends Dir {}
|
||||
|
||||
sig DirEntry {
|
||||
name: Name,
|
||||
contents: Object
|
||||
} {
|
||||
one this.~entries
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* all directories besides root have one parent
|
||||
*/
|
||||
pred OneParent_buggyVersion {
|
||||
all d: Dir - Root | one d.parent
|
||||
}
|
||||
|
||||
/**
|
||||
* all directories besides root have one parent
|
||||
*/
|
||||
pred OneParent_correctVersion {
|
||||
all d: Dir - Root | (one d.parent && one contents.d)
|
||||
}
|
||||
|
||||
/**
|
||||
* Only files may be linked (that is, have more than one entry)
|
||||
* That is, all directories are the contents of at most one directory entry
|
||||
*/
|
||||
pred NoDirAliases {
|
||||
all o: Dir | lone o.~contents
|
||||
}
|
||||
|
||||
check { OneParent_buggyVersion => NoDirAliases } for 5 expect 1
|
||||
|
||||
check { OneParent_correctVersion => NoDirAliases } for 5 expect 0
|
||||
83
samples/Alloy/marksweepgc.als
Normal file
83
samples/Alloy/marksweepgc.als
Normal file
@@ -0,0 +1,83 @@
|
||||
module examples/systems/marksweepgc
|
||||
|
||||
/*
|
||||
* Model of mark and sweep garbage collection.
|
||||
*/
|
||||
|
||||
// a node in the heap
|
||||
sig Node {}
|
||||
|
||||
sig HeapState {
|
||||
left, right : Node -> lone Node,
|
||||
marked : set Node,
|
||||
freeList : lone Node
|
||||
}
|
||||
|
||||
pred clearMarks[hs, hs' : HeapState] {
|
||||
// clear marked set
|
||||
no hs'.marked
|
||||
// left and right fields are unchanged
|
||||
hs'.left = hs.left
|
||||
hs'.right = hs.right
|
||||
}
|
||||
|
||||
/**
|
||||
* simulate the recursion of the mark() function using transitive closure
|
||||
*/
|
||||
fun reachable[hs: HeapState, n: Node] : set Node {
|
||||
n + n.^(hs.left + hs.right)
|
||||
}
|
||||
|
||||
pred mark[hs: HeapState, from : Node, hs': HeapState] {
|
||||
hs'.marked = hs.reachable[from]
|
||||
hs'.left = hs.left
|
||||
hs'.right = hs.right
|
||||
}
|
||||
|
||||
/**
|
||||
* complete hack to simulate behavior of code to set freeList
|
||||
*/
|
||||
pred setFreeList[hs, hs': HeapState] {
|
||||
// especially hackish
|
||||
hs'.freeList.*(hs'.left) in (Node - hs.marked)
|
||||
all n: Node |
|
||||
(n !in hs.marked) => {
|
||||
no hs'.right[n]
|
||||
hs'.left[n] in (hs'.freeList.*(hs'.left))
|
||||
n in hs'.freeList.*(hs'.left)
|
||||
} else {
|
||||
hs'.left[n] = hs.left[n]
|
||||
hs'.right[n] = hs.right[n]
|
||||
}
|
||||
hs'.marked = hs.marked
|
||||
}
|
||||
|
||||
pred GC[hs: HeapState, root : Node, hs': HeapState] {
|
||||
some hs1, hs2: HeapState |
|
||||
hs.clearMarks[hs1] && hs1.mark[root, hs2] && hs2.setFreeList[hs']
|
||||
}
|
||||
|
||||
assert Soundness1 {
|
||||
all h, h' : HeapState, root : Node |
|
||||
h.GC[root, h'] =>
|
||||
(all live : h.reachable[root] | {
|
||||
h'.left[live] = h.left[live]
|
||||
h'.right[live] = h.right[live]
|
||||
})
|
||||
}
|
||||
|
||||
assert Soundness2 {
|
||||
all h, h' : HeapState, root : Node |
|
||||
h.GC[root, h'] =>
|
||||
no h'.reachable[root] & h'.reachable[h'.freeList]
|
||||
}
|
||||
|
||||
assert Completeness {
|
||||
all h, h' : HeapState, root : Node |
|
||||
h.GC[root, h'] =>
|
||||
(Node - h'.reachable[root]) in h'.reachable[h'.freeList]
|
||||
}
|
||||
|
||||
check Soundness1 for 3 expect 0
|
||||
check Soundness2 for 3 expect 0
|
||||
check Completeness for 3 expect 0
|
||||
217
samples/Alloy/views.als
Normal file
217
samples/Alloy/views.als
Normal file
@@ -0,0 +1,217 @@
|
||||
module examples/systems/views
|
||||
|
||||
/*
|
||||
* Model of views in object-oriented programming.
|
||||
*
|
||||
* Two object references, called the view and the backing,
|
||||
* are related by a view mechanism when changes to the
|
||||
* backing are automatically propagated to the view. Note
|
||||
* that the state of a view need not be a projection of the
|
||||
* state of the backing; the keySet method of Map, for
|
||||
* example, produces two view relationships, and for the
|
||||
* one in which the map is modified by changes to the key
|
||||
* set, the value of the new map cannot be determined from
|
||||
* the key set. Note that in the iterator view mechanism,
|
||||
* the iterator is by this definition the backing object,
|
||||
* since changes are propagated from iterator to collection
|
||||
* and not vice versa. Oddly, a reference may be a view of
|
||||
* more than one backing: there can be two iterators on the
|
||||
* same collection, eg. A reference cannot be a view under
|
||||
* more than one view type.
|
||||
*
|
||||
* A reference is made dirty when it is a backing for a view
|
||||
* with which it is no longer related by the view invariant.
|
||||
* This usually happens when a view is modified, either
|
||||
* directly or via another backing. For example, changing a
|
||||
* collection directly when it has an iterator invalidates
|
||||
* it, as does changing the collection through one iterator
|
||||
* when there are others.
|
||||
*
|
||||
* More work is needed if we want to model more closely the
|
||||
* failure of an iterator when its collection is invalidated.
|
||||
*
|
||||
* As a terminological convention, when there are two
|
||||
* complementary view relationships, we will give them types
|
||||
* t and t'. For example, KeySetView propagates from map to
|
||||
* set, and KeySetView' propagates from set to map.
|
||||
*
|
||||
* author: Daniel Jackson
|
||||
*/
|
||||
|
||||
open util/ordering[State] as so
|
||||
open util/relation as rel
|
||||
|
||||
sig Ref {}
|
||||
sig Object {}
|
||||
|
||||
-- t->b->v in views when v is view of type t of backing b
|
||||
-- dirty contains refs that have been invalidated
|
||||
sig State {
|
||||
refs: set Ref,
|
||||
obj: refs -> one Object,
|
||||
views: ViewType -> refs -> refs,
|
||||
dirty: set refs
|
||||
-- , anyviews: Ref -> Ref -- for visualization
|
||||
}
|
||||
-- {anyviews = ViewType.views}
|
||||
|
||||
sig Map extends Object {
|
||||
keys: set Ref,
|
||||
map: keys -> one Ref
|
||||
}{all s: State | keys + Ref.map in s.refs}
|
||||
sig MapRef extends Ref {}
|
||||
fact {State.obj[MapRef] in Map}
|
||||
|
||||
sig Iterator extends Object {
|
||||
left, done: set Ref,
|
||||
lastRef: lone done
|
||||
}{all s: State | done + left + lastRef in s.refs}
|
||||
sig IteratorRef extends Ref {}
|
||||
fact {State.obj[IteratorRef] in Iterator}
|
||||
|
||||
sig Set extends Object {
|
||||
elts: set Ref
|
||||
}{all s: State | elts in s.refs}
|
||||
sig SetRef extends Ref {}
|
||||
fact {State.obj[SetRef] in Set}
|
||||
|
||||
abstract sig ViewType {}
|
||||
one sig KeySetView, KeySetView', IteratorView extends ViewType {}
|
||||
fact ViewTypes {
|
||||
State.views[KeySetView] in MapRef -> SetRef
|
||||
State.views[KeySetView'] in SetRef -> MapRef
|
||||
State.views[IteratorView] in IteratorRef -> SetRef
|
||||
all s: State | s.views[KeySetView] = ~(s.views[KeySetView'])
|
||||
}
|
||||
|
||||
/**
|
||||
* mods is refs modified directly or by view mechanism
|
||||
* doesn't handle possibility of modifying an object and its view at once?
|
||||
* should we limit frame conds to non-dirty refs?
|
||||
*/
|
||||
pred modifies [pre, post: State, rs: set Ref] {
|
||||
let vr = pre.views[ViewType], mods = rs.*vr {
|
||||
all r: pre.refs - mods | pre.obj[r] = post.obj[r]
|
||||
all b: mods, v: pre.refs, t: ViewType |
|
||||
b->v in pre.views[t] => viewFrame [t, pre.obj[v], post.obj[v], post.obj[b]]
|
||||
post.dirty = pre.dirty +
|
||||
{b: pre.refs | some v: Ref, t: ViewType |
|
||||
b->v in pre.views[t] && !viewFrame [t, pre.obj[v], post.obj[v], post.obj[b]]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pred allocates [pre, post: State, rs: set Ref] {
|
||||
no rs & pre.refs
|
||||
post.refs = pre.refs + rs
|
||||
}
|
||||
|
||||
/**
|
||||
* models frame condition that limits change to view object from v to v' when backing object changes to b'
|
||||
*/
|
||||
pred viewFrame [t: ViewType, v, v', b': Object] {
|
||||
t in KeySetView => v'.elts = dom [b'.map]
|
||||
t in KeySetView' => b'.elts = dom [v'.map]
|
||||
t in KeySetView' => (b'.elts) <: (v.map) = (b'.elts) <: (v'.map)
|
||||
t in IteratorView => v'.elts = b'.left + b'.done
|
||||
}
|
||||
|
||||
pred MapRef.keySet [pre, post: State, setRefs: SetRef] {
|
||||
post.obj[setRefs].elts = dom [pre.obj[this].map]
|
||||
modifies [pre, post, none]
|
||||
allocates [pre, post, setRefs]
|
||||
post.views = pre.views + KeySetView->this->setRefs + KeySetView'->setRefs->this
|
||||
}
|
||||
|
||||
pred MapRef.put [pre, post: State, k, v: Ref] {
|
||||
post.obj[this].map = pre.obj[this].map ++ k->v
|
||||
modifies [pre, post, this]
|
||||
allocates [pre, post, none]
|
||||
post.views = pre.views
|
||||
}
|
||||
|
||||
pred SetRef.iterator [pre, post: State, iterRef: IteratorRef] {
|
||||
let i = post.obj[iterRef] {
|
||||
i.left = pre.obj[this].elts
|
||||
no i.done + i.lastRef
|
||||
}
|
||||
modifies [pre,post,none]
|
||||
allocates [pre, post, iterRef]
|
||||
post.views = pre.views + IteratorView->iterRef->this
|
||||
}
|
||||
|
||||
pred IteratorRef.remove [pre, post: State] {
|
||||
let i = pre.obj[this], i' = post.obj[this] {
|
||||
i'.left = i.left
|
||||
i'.done = i.done - i.lastRef
|
||||
no i'.lastRef
|
||||
}
|
||||
modifies [pre,post,this]
|
||||
allocates [pre, post, none]
|
||||
pre.views = post.views
|
||||
}
|
||||
|
||||
pred IteratorRef.next [pre, post: State, ref: Ref] {
|
||||
let i = pre.obj[this], i' = post.obj[this] {
|
||||
ref in i.left
|
||||
i'.left = i.left - ref
|
||||
i'.done = i.done + ref
|
||||
i'.lastRef = ref
|
||||
}
|
||||
modifies [pre, post, this]
|
||||
allocates [pre, post, none]
|
||||
pre.views = post.views
|
||||
}
|
||||
|
||||
pred IteratorRef.hasNext [s: State] {
|
||||
some s.obj[this].left
|
||||
}
|
||||
|
||||
assert zippishOK {
|
||||
all
|
||||
ks, vs: SetRef,
|
||||
m: MapRef,
|
||||
ki, vi: IteratorRef,
|
||||
k, v: Ref |
|
||||
let s0=so/first,
|
||||
s1=so/next[s0],
|
||||
s2=so/next[s1],
|
||||
s3=so/next[s2],
|
||||
s4=so/next[s3],
|
||||
s5=so/next[s4],
|
||||
s6=so/next[s5],
|
||||
s7=so/next[s6] |
|
||||
({
|
||||
precondition [s0, ks, vs, m]
|
||||
no s0.dirty
|
||||
ks.iterator [s0, s1, ki]
|
||||
vs.iterator [s1, s2, vi]
|
||||
ki.hasNext [s2]
|
||||
vi.hasNext [s2]
|
||||
ki.this/next [s2, s3, k]
|
||||
vi.this/next [s3, s4, v]
|
||||
m.put [s4, s5, k, v]
|
||||
ki.remove [s5, s6]
|
||||
vi.remove [s6, s7]
|
||||
} => no State.dirty)
|
||||
}
|
||||
|
||||
pred precondition [pre: State, ks, vs, m: Ref] {
|
||||
// all these conditions and other errors discovered in scope of 6 but 8,3
|
||||
// in initial state, must have view invariants hold
|
||||
(all t: ViewType, b, v: pre.refs |
|
||||
b->v in pre.views[t] => viewFrame [t, pre.obj[v], pre.obj[v], pre.obj[b]])
|
||||
// sets are not aliases
|
||||
-- ks != vs
|
||||
// sets are not views of map
|
||||
-- no (ks+vs)->m & ViewType.pre.views
|
||||
// no iterator currently on either set
|
||||
-- no Ref->(ks+vs) & ViewType.pre.views
|
||||
}
|
||||
|
||||
check zippishOK for 6 but 8 State, 3 ViewType expect 1
|
||||
|
||||
/**
|
||||
* experiment with controlling heap size
|
||||
*/
|
||||
fact {all s: State | #s.obj < 5}
|
||||
26
samples/ApacheConf/filenames/.htaccess
Normal file
26
samples/ApacheConf/filenames/.htaccess
Normal file
@@ -0,0 +1,26 @@
|
||||
ServerSignature Off
|
||||
RewriteCond %{REQUEST_METHOD} ^(HEAD|TRACE|DELETE|TRACK) [NC,OR]
|
||||
RewriteCond %{THE_REQUEST} (\\r|\\n|%0A|%0D) [NC,OR]
|
||||
|
||||
RewriteCond %{HTTP_REFERER} (<|>|’|%0A|%0D|%27|%3C|%3E|%00) [NC,OR]
|
||||
RewriteCond %{HTTP_COOKIE} (<|>|’|%0A|%0D|%27|%3C|%3E|%00) [NC,OR]
|
||||
RewriteCond %{REQUEST_URI} ^/(,|;|:|<|>|”>|”<|/|\\\.\.\\).{0,9999} [NC,OR]
|
||||
|
||||
RewriteCond %{HTTP_USER_AGENT} ^$ [OR]
|
||||
RewriteCond %{HTTP_USER_AGENT} ^(java|curl|wget) [NC,OR]
|
||||
RewriteCond %{HTTP_USER_AGENT} (winhttp|HTTrack|clshttp|archiver|loader|email|harvest|extract|grab|miner) [NC,OR]
|
||||
RewriteCond %{HTTP_USER_AGENT} (libwww-perl|curl|wget|python|nikto|scan) [NC,OR]
|
||||
RewriteCond %{HTTP_USER_AGENT} (<|>|’|%0A|%0D|%27|%3C|%3E|%00) [NC,OR]
|
||||
|
||||
#Block mySQL injects
|
||||
RewriteCond %{QUERY_STRING} (;|<|>|’|”|\)|%0A|%0D|%22|%27|%3C|%3E|%00).*(/\*|union|select|insert|cast|set|declare|drop|update|md5|benchmark) [NC,OR]
|
||||
|
||||
RewriteCond %{QUERY_STRING} \.\./\.\. [OR]
|
||||
|
||||
RewriteCond %{QUERY_STRING} (localhost|loopback|127\.0\.0\.1) [NC,OR]
|
||||
RewriteCond %{QUERY_STRING} \.[a-z0-9] [NC,OR]
|
||||
RewriteCond %{QUERY_STRING} (<|>|’|%0A|%0D|%27|%3C|%3E|%00) [NC]
|
||||
# Note: The final RewriteCond must NOT use the [OR] flag.
|
||||
|
||||
# Return 403 Forbidden error.
|
||||
RewriteRule .* index.php [F]
|
||||
470
samples/ApacheConf/filenames/apache2.conf
Normal file
470
samples/ApacheConf/filenames/apache2.conf
Normal file
@@ -0,0 +1,470 @@
|
||||
# This is the main Apache HTTP server configuration file. It contains the
|
||||
# configuration directives that give the server its instructions.
|
||||
# See <URL:http://httpd.apache.org/docs/2.2> for detailed information.
|
||||
# In particular, see
|
||||
# <URL:http://httpd.apache.org/docs/2.2/mod/directives.html>
|
||||
# for a discussion of each configuration directive.
|
||||
#
|
||||
# Do NOT simply read the instructions in here without understanding
|
||||
# what they do. They're here only as hints or reminders. If you are unsure
|
||||
# consult the online docs. You have been warned.
|
||||
#
|
||||
# Configuration and logfile names: If the filenames you specify for many
|
||||
# of the server's control files begin with "/" (or "drive:/" for Win32), the
|
||||
# server will use that explicit path. If the filenames do *not* begin
|
||||
# with "/", the value of ServerRoot is prepended -- so "/var/log/apache2/foo.log"
|
||||
# with ServerRoot set to "" will be interpreted by the
|
||||
# server as "//var/log/apache2/foo.log".
|
||||
|
||||
#
|
||||
# ServerRoot: The top of the directory tree under which the server's
|
||||
# configuration, error, and log files are kept.
|
||||
#
|
||||
# Do not add a slash at the end of the directory path. If you point
|
||||
# ServerRoot at a non-local disk, be sure to point the LockFile directive
|
||||
# at a local disk. If you wish to share the same ServerRoot for multiple
|
||||
# httpd daemons, you will need to change at least LockFile and PidFile.
|
||||
#
|
||||
ServerRoot ""
|
||||
|
||||
#
|
||||
# Listen: Allows you to bind Apache to specific IP addresses and/or
|
||||
# ports, instead of the default. See also the <VirtualHost>
|
||||
# directive.
|
||||
#
|
||||
# Change this to Listen on specific IP addresses as shown below to
|
||||
# prevent Apache from glomming onto all bound IP addresses.
|
||||
#
|
||||
#Listen 12.34.56.78:80
|
||||
Listen 80
|
||||
|
||||
#
|
||||
# Dynamic Shared Object (DSO) Support
|
||||
#
|
||||
# To be able to use the functionality of a module which was built as a DSO you
|
||||
# have to place corresponding `LoadModule' lines at this location so the
|
||||
# directives contained in it are actually available _before_ they are used.
|
||||
# Statically compiled modules (those listed by `httpd -l') do not need
|
||||
# to be loaded here.
|
||||
#
|
||||
# Example:
|
||||
# LoadModule foo_module modules/mod_foo.so
|
||||
#
|
||||
LoadModule authn_file_module /usr/lib/apache2/modules/mod_authn_file.so
|
||||
LoadModule authn_dbm_module /usr/lib/apache2/modules/mod_authn_dbm.so
|
||||
LoadModule authn_anon_module /usr/lib/apache2/modules/mod_authn_anon.so
|
||||
LoadModule authn_dbd_module /usr/lib/apache2/modules/mod_authn_dbd.so
|
||||
LoadModule authn_default_module /usr/lib/apache2/modules/mod_authn_default.so
|
||||
LoadModule authn_alias_module /usr/lib/apache2/modules/mod_authn_alias.so
|
||||
LoadModule authz_host_module /usr/lib/apache2/modules/mod_authz_host.so
|
||||
LoadModule authz_groupfile_module /usr/lib/apache2/modules/mod_authz_groupfile.so
|
||||
LoadModule authz_user_module /usr/lib/apache2/modules/mod_authz_user.so
|
||||
LoadModule authz_dbm_module /usr/lib/apache2/modules/mod_authz_dbm.so
|
||||
LoadModule authz_owner_module /usr/lib/apache2/modules/mod_authz_owner.so
|
||||
LoadModule authnz_ldap_module /usr/lib/apache2/modules/mod_authnz_ldap.so
|
||||
LoadModule authz_default_module /usr/lib/apache2/modules/mod_authz_default.so
|
||||
LoadModule auth_basic_module /usr/lib/apache2/modules/mod_auth_basic.so
|
||||
LoadModule auth_digest_module /usr/lib/apache2/modules/mod_auth_digest.so
|
||||
LoadModule file_cache_module /usr/lib/apache2/modules/mod_file_cache.so
|
||||
LoadModule cache_module /usr/lib/apache2/modules/mod_cache.so
|
||||
LoadModule disk_cache_module /usr/lib/apache2/modules/mod_disk_cache.so
|
||||
LoadModule mem_cache_module /usr/lib/apache2/modules/mod_mem_cache.so
|
||||
LoadModule dbd_module /usr/lib/apache2/modules/mod_dbd.so
|
||||
LoadModule dumpio_module /usr/lib/apache2/modules/mod_dumpio.so
|
||||
LoadModule ext_filter_module /usr/lib/apache2/modules/mod_ext_filter.so
|
||||
LoadModule include_module /usr/lib/apache2/modules/mod_include.so
|
||||
LoadModule filter_module /usr/lib/apache2/modules/mod_filter.so
|
||||
LoadModule charset_lite_module /usr/lib/apache2/modules/mod_charset_lite.so
|
||||
LoadModule deflate_module /usr/lib/apache2/modules/mod_deflate.so
|
||||
LoadModule ldap_module /usr/lib/apache2/modules/mod_ldap.so
|
||||
LoadModule log_forensic_module /usr/lib/apache2/modules/mod_log_forensic.so
|
||||
LoadModule env_module /usr/lib/apache2/modules/mod_env.so
|
||||
LoadModule mime_magic_module /usr/lib/apache2/modules/mod_mime_magic.so
|
||||
LoadModule cern_meta_module /usr/lib/apache2/modules/mod_cern_meta.so
|
||||
LoadModule expires_module /usr/lib/apache2/modules/mod_expires.so
|
||||
LoadModule headers_module /usr/lib/apache2/modules/mod_headers.so
|
||||
LoadModule ident_module /usr/lib/apache2/modules/mod_ident.so
|
||||
LoadModule usertrack_module /usr/lib/apache2/modules/mod_usertrack.so
|
||||
LoadModule unique_id_module /usr/lib/apache2/modules/mod_unique_id.so
|
||||
LoadModule setenvif_module /usr/lib/apache2/modules/mod_setenvif.so
|
||||
LoadModule version_module /usr/lib/apache2/modules/mod_version.so
|
||||
LoadModule proxy_module /usr/lib/apache2/modules/mod_proxy.so
|
||||
LoadModule proxy_connect_module /usr/lib/apache2/modules/mod_proxy_connect.so
|
||||
LoadModule proxy_ftp_module /usr/lib/apache2/modules/mod_proxy_ftp.so
|
||||
LoadModule proxy_http_module /usr/lib/apache2/modules/mod_proxy_http.so
|
||||
LoadModule proxy_ajp_module /usr/lib/apache2/modules/mod_proxy_ajp.so
|
||||
LoadModule proxy_balancer_module /usr/lib/apache2/modules/mod_proxy_balancer.so
|
||||
LoadModule ssl_module /usr/lib/apache2/modules/mod_ssl.so
|
||||
LoadModule mime_module /usr/lib/apache2/modules/mod_mime.so
|
||||
LoadModule dav_module /usr/lib/apache2/modules/mod_dav.so
|
||||
LoadModule status_module /usr/lib/apache2/modules/mod_status.so
|
||||
LoadModule autoindex_module /usr/lib/apache2/modules/mod_autoindex.so
|
||||
LoadModule asis_module /usr/lib/apache2/modules/mod_asis.so
|
||||
LoadModule info_module /usr/lib/apache2/modules/mod_info.so
|
||||
LoadModule suexec_module /usr/lib/apache2/modules/mod_suexec.so
|
||||
LoadModule cgid_module /usr/lib/apache2/modules/mod_cgid.so
|
||||
LoadModule cgi_module /usr/lib/apache2/modules/mod_cgi.so
|
||||
LoadModule dav_fs_module /usr/lib/apache2/modules/mod_dav_fs.so
|
||||
LoadModule dav_lock_module /usr/lib/apache2/modules/mod_dav_lock.so
|
||||
LoadModule vhost_alias_module /usr/lib/apache2/modules/mod_vhost_alias.so
|
||||
LoadModule negotiation_module /usr/lib/apache2/modules/mod_negotiation.so
|
||||
LoadModule dir_module /usr/lib/apache2/modules/mod_dir.so
|
||||
LoadModule imagemap_module /usr/lib/apache2/modules/mod_imagemap.so
|
||||
LoadModule actions_module /usr/lib/apache2/modules/mod_actions.so
|
||||
LoadModule speling_module /usr/lib/apache2/modules/mod_speling.so
|
||||
LoadModule userdir_module /usr/lib/apache2/modules/mod_userdir.so
|
||||
LoadModule alias_module /usr/lib/apache2/modules/mod_alias.so
|
||||
LoadModule rewrite_module /usr/lib/apache2/modules/mod_rewrite.so
|
||||
|
||||
<IfModule !mpm_netware_module>
|
||||
#
|
||||
# If you wish httpd to run as a different user or group, you must run
|
||||
# httpd as root initially and it will switch.
|
||||
#
|
||||
# User/Group: The name (or #number) of the user/group to run httpd as.
|
||||
# It is usually good practice to create a dedicated user and group for
|
||||
# running httpd, as with most system services.
|
||||
#
|
||||
User daemon
|
||||
Group daemon
|
||||
</IfModule>
|
||||
|
||||
# 'Main' server configuration
|
||||
#
|
||||
# The directives in this section set up the values used by the 'main'
|
||||
# server, which responds to any requests that aren't handled by a
|
||||
# <VirtualHost> definition. These values also provide defaults for
|
||||
# any <VirtualHost> containers you may define later in the file.
|
||||
#
|
||||
# All of these directives may appear inside <VirtualHost> containers,
|
||||
# in which case these default settings will be overridden for the
|
||||
# virtual host being defined.
|
||||
#
|
||||
|
||||
#
|
||||
# ServerAdmin: Your address, where problems with the server should be
|
||||
# e-mailed. This address appears on some server-generated pages, such
|
||||
# as error documents. e.g. admin@your-domain.com
|
||||
#
|
||||
ServerAdmin you@example.com
|
||||
|
||||
#
|
||||
# ServerName gives the name and port that the server uses to identify itself.
|
||||
# This can often be determined automatically, but we recommend you specify
|
||||
# it explicitly to prevent problems during startup.
|
||||
#
|
||||
# If your host doesn't have a registered DNS name, enter its IP address here.
|
||||
#
|
||||
#ServerName www.example.com:80
|
||||
|
||||
#
|
||||
# DocumentRoot: The directory out of which you will serve your
|
||||
# documents. By default, all requests are taken from this directory, but
|
||||
# symbolic links and aliases may be used to point to other locations.
|
||||
#
|
||||
DocumentRoot "/usr/share/apache2/default-site/htdocs"
|
||||
|
||||
#
|
||||
# Each directory to which Apache has access can be configured with respect
|
||||
# to which services and features are allowed and/or disabled in that
|
||||
# directory (and its subdirectories).
|
||||
#
|
||||
# First, we configure the "default" to be a very restrictive set of
|
||||
# features.
|
||||
#
|
||||
<Directory />
|
||||
Options FollowSymLinks
|
||||
AllowOverride None
|
||||
Order deny,allow
|
||||
Deny from all
|
||||
</Directory>
|
||||
|
||||
#
|
||||
# Note that from this point forward you must specifically allow
|
||||
# particular features to be enabled - so if something's not working as
|
||||
# you might expect, make sure that you have specifically enabled it
|
||||
# below.
|
||||
#
|
||||
|
||||
#
|
||||
# This should be changed to whatever you set DocumentRoot to.
|
||||
#
|
||||
<Directory "/usr/share/apache2/default-site/htdocs">
|
||||
#
|
||||
# Possible values for the Options directive are "None", "All",
|
||||
# or any combination of:
|
||||
# Indexes Includes FollowSymLinks SymLinksifOwnerMatch ExecCGI MultiViews
|
||||
#
|
||||
# Note that "MultiViews" must be named *explicitly* --- "Options All"
|
||||
# doesn't give it to you.
|
||||
#
|
||||
# The Options directive is both complicated and important. Please see
|
||||
# http://httpd.apache.org/docs/2.2/mod/core.html#options
|
||||
# for more information.
|
||||
#
|
||||
Options Indexes FollowSymLinks
|
||||
|
||||
#
|
||||
# AllowOverride controls what directives may be placed in .htaccess files.
|
||||
# It can be "All", "None", or any combination of the keywords:
|
||||
# Options FileInfo AuthConfig Limit
|
||||
#
|
||||
AllowOverride None
|
||||
|
||||
#
|
||||
# Controls who can get stuff from this server.
|
||||
#
|
||||
Order allow,deny
|
||||
Allow from all
|
||||
|
||||
</Directory>
|
||||
|
||||
#
|
||||
# DirectoryIndex: sets the file that Apache will serve if a directory
|
||||
# is requested.
|
||||
#
|
||||
<IfModule dir_module>
|
||||
DirectoryIndex index.html
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# The following lines prevent .htaccess and .htpasswd files from being
|
||||
# viewed by Web clients.
|
||||
#
|
||||
<FilesMatch "^\.ht">
|
||||
Order allow,deny
|
||||
Deny from all
|
||||
Satisfy All
|
||||
</FilesMatch>
|
||||
|
||||
#
|
||||
# ErrorLog: The location of the error log file.
|
||||
# If you do not specify an ErrorLog directive within a <VirtualHost>
|
||||
# container, error messages relating to that virtual host will be
|
||||
# logged here. If you *do* define an error logfile for a <VirtualHost>
|
||||
# container, that host's errors will be logged there and not here.
|
||||
#
|
||||
ErrorLog /var/log/apache2/error_log
|
||||
|
||||
#
|
||||
# LogLevel: Control the number of messages logged to the error_log.
|
||||
# Possible values include: debug, info, notice, warn, error, crit,
|
||||
# alert, emerg.
|
||||
#
|
||||
LogLevel warn
|
||||
|
||||
<IfModule log_config_module>
|
||||
#
|
||||
# The following directives define some format nicknames for use with
|
||||
# a CustomLog directive (see below).
|
||||
#
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b" common
|
||||
|
||||
<IfModule logio_module>
|
||||
# You need to enable mod_logio.c to use %I and %O
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %I %O" combinedio
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# The location and format of the access logfile (Common Logfile Format).
|
||||
# If you do not define any access logfiles within a <VirtualHost>
|
||||
# container, they will be logged here. Contrariwise, if you *do*
|
||||
# define per-<VirtualHost> access logfiles, transactions will be
|
||||
# logged therein and *not* in this file.
|
||||
#
|
||||
CustomLog /var/log/apache2/access_log common
|
||||
|
||||
#
|
||||
# If you prefer a logfile with access, agent, and referer information
|
||||
# (Combined Logfile Format) you can use the following directive.
|
||||
#
|
||||
#CustomLog /var/log/apache2/access_log combined
|
||||
</IfModule>
|
||||
|
||||
<IfModule alias_module>
|
||||
#
|
||||
# Redirect: Allows you to tell clients about documents that used to
|
||||
# exist in your server's namespace, but do not anymore. The client
|
||||
# will make a new request for the document at its new location.
|
||||
# Example:
|
||||
# Redirect permanent /foo http://www.example.com/bar
|
||||
|
||||
#
|
||||
# Alias: Maps web paths into filesystem paths and is used to
|
||||
# access content that does not live under the DocumentRoot.
|
||||
# Example:
|
||||
# Alias /webpath /full/filesystem/path
|
||||
#
|
||||
# If you include a trailing / on /webpath then the server will
|
||||
# require it to be present in the URL. You will also likely
|
||||
# need to provide a <Directory> section to allow access to
|
||||
# the filesystem path.
|
||||
|
||||
#
|
||||
# ScriptAlias: This controls which directories contain server scripts.
|
||||
# ScriptAliases are essentially the same as Aliases, except that
|
||||
# documents in the target directory are treated as applications and
|
||||
# run by the server when requested rather than as documents sent to the
|
||||
# client. The same rules about trailing "/" apply to ScriptAlias
|
||||
# directives as to Alias.
|
||||
#
|
||||
ScriptAlias /cgi-bin/ "/usr/lib/cgi-bin/"
|
||||
|
||||
</IfModule>
|
||||
|
||||
<IfModule cgid_module>
|
||||
#
|
||||
# ScriptSock: On threaded servers, designate the path to the UNIX
|
||||
# socket used to communicate with the CGI daemon of mod_cgid.
|
||||
#
|
||||
#Scriptsock /var/run/apache2/cgisock
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# "/usr/lib/cgi-bin" should be changed to whatever your ScriptAliased
|
||||
# CGI directory exists, if you have that configured.
|
||||
#
|
||||
<Directory "/usr/lib/cgi-bin">
|
||||
AllowOverride None
|
||||
Options None
|
||||
Order allow,deny
|
||||
Allow from all
|
||||
</Directory>
|
||||
|
||||
#
|
||||
# DefaultType: the default MIME type the server will use for a document
|
||||
# if it cannot otherwise determine one, such as from filename extensions.
|
||||
# If your server contains mostly text or HTML documents, "text/plain" is
|
||||
# a good value. If most of your content is binary, such as applications
|
||||
# or images, you may want to use "application/octet-stream" instead to
|
||||
# keep browsers from trying to display binary files as though they are
|
||||
# text.
|
||||
#
|
||||
DefaultType text/plain
|
||||
|
||||
<IfModule mime_module>
|
||||
#
|
||||
# TypesConfig points to the file containing the list of mappings from
|
||||
# filename extension to MIME-type.
|
||||
#
|
||||
TypesConfig /etc/apache2/mime.types
|
||||
|
||||
#
|
||||
# AddType allows you to add to or override the MIME configuration
|
||||
# file specified in TypesConfig for specific file types.
|
||||
#
|
||||
#AddType application/x-gzip .tgz
|
||||
#
|
||||
# AddEncoding allows you to have certain browsers uncompress
|
||||
# information on the fly. Note: Not all browsers support this.
|
||||
#
|
||||
#AddEncoding x-compress .Z
|
||||
#AddEncoding x-gzip .gz .tgz
|
||||
#
|
||||
# If the AddEncoding directives above are commented-out, then you
|
||||
# probably should define those extensions to indicate media types:
|
||||
#
|
||||
AddType application/x-compress .Z
|
||||
AddType application/x-gzip .gz .tgz
|
||||
|
||||
#
|
||||
# AddHandler allows you to map certain file extensions to "handlers":
|
||||
# actions unrelated to filetype. These can be either built into the server
|
||||
# or added with the Action directive (see below)
|
||||
#
|
||||
# To use CGI scripts outside of ScriptAliased directories:
|
||||
# (You will also need to add "ExecCGI" to the "Options" directive.)
|
||||
#
|
||||
#AddHandler cgi-script .cgi
|
||||
|
||||
# For type maps (negotiated resources):
|
||||
#AddHandler type-map var
|
||||
|
||||
#
|
||||
# Filters allow you to process content before it is sent to the client.
|
||||
#
|
||||
# To parse .shtml files for server-side includes (SSI):
|
||||
# (You will also need to add "Includes" to the "Options" directive.)
|
||||
#
|
||||
#AddType text/html .shtml
|
||||
#AddOutputFilter INCLUDES .shtml
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# The mod_mime_magic module allows the server to use various hints from the
|
||||
# contents of the file itself to determine its type. The MIMEMagicFile
|
||||
# directive tells the module where the hint definitions are located.
|
||||
#
|
||||
#MIMEMagicFile /etc/apache2/magic
|
||||
|
||||
#
|
||||
# Customizable error responses come in three flavors:
|
||||
# 1) plain text 2) local redirects 3) external redirects
|
||||
#
|
||||
# Some examples:
|
||||
#ErrorDocument 500 "The server made a boo boo."
|
||||
#ErrorDocument 404 /missing.html
|
||||
#ErrorDocument 404 "/cgi-bin/missing_handler.pl"
|
||||
#ErrorDocument 402 http://www.example.com/subscription_info.html
|
||||
#
|
||||
|
||||
#
|
||||
# EnableMMAP and EnableSendfile: On systems that support it,
|
||||
# memory-mapping or the sendfile syscall is used to deliver
|
||||
# files. This usually improves server performance, but must
|
||||
# be turned off when serving from networked-mounted
|
||||
# filesystems or if support for these functions is otherwise
|
||||
# broken on your system.
|
||||
#
|
||||
#EnableMMAP off
|
||||
#EnableSendfile off
|
||||
|
||||
# Supplemental configuration
|
||||
#
|
||||
# The configuration files in the /etc/apache2/extra/ directory can be
|
||||
# included to add extra features or to modify the default configuration of
|
||||
# the server, or you may simply copy their contents here and change as
|
||||
# necessary.
|
||||
|
||||
# Server-pool management (MPM specific)
|
||||
#Include /etc/apache2/extra/httpd-mpm.conf
|
||||
|
||||
# Multi-language error messages
|
||||
#Include /etc/apache2/extra/httpd-multilang-errordoc.conf
|
||||
|
||||
# Fancy directory listings
|
||||
#Include /etc/apache2/extra/httpd-autoindex.conf
|
||||
|
||||
# Language settings
|
||||
#Include /etc/apache2/extra/httpd-languages.conf
|
||||
|
||||
# User home directories
|
||||
#Include /etc/apache2/extra/httpd-userdir.conf
|
||||
|
||||
# Real-time info on requests and configuration
|
||||
#Include /etc/apache2/extra/httpd-info.conf
|
||||
|
||||
# Virtual hosts
|
||||
#Include /etc/apache2/extra/httpd-vhosts.conf
|
||||
|
||||
# Local access to the Apache HTTP Server Manual
|
||||
#Include /etc/apache2/extra/httpd-manual.conf
|
||||
|
||||
# Distributed authoring and versioning (WebDAV)
|
||||
#Include /etc/apache2/extra/httpd-dav.conf
|
||||
|
||||
# Various default settings
|
||||
#Include /etc/apache2/extra/httpd-default.conf
|
||||
|
||||
# Secure (SSL/TLS) connections
|
||||
#Include /etc/apache2/extra/httpd-ssl.conf
|
||||
#
|
||||
# Note: The following must must be present to support
|
||||
# starting without SSL on platforms with no /dev/random equivalent
|
||||
# but a statically compiled-in mod_ssl.
|
||||
#
|
||||
<IfModule ssl_module>
|
||||
SSLRandomSeed startup builtin
|
||||
SSLRandomSeed connect builtin
|
||||
</IfModule>
|
||||
500
samples/ApacheConf/filenames/httpd.conf
Normal file
500
samples/ApacheConf/filenames/httpd.conf
Normal file
@@ -0,0 +1,500 @@
|
||||
#
|
||||
# This is the main Apache HTTP server configuration file. It contains the
|
||||
# configuration directives that give the server its instructions.
|
||||
# See <URL:http://httpd.apache.org/docs/2.2> for detailed information.
|
||||
# In particular, see
|
||||
# <URL:http://httpd.apache.org/docs/2.2/mod/directives.html>
|
||||
# for a discussion of each configuration directive.
|
||||
#
|
||||
# Do NOT simply read the instructions in here without understanding
|
||||
# what they do. They're here only as hints or reminders. If you are unsure
|
||||
# consult the online docs. You have been warned.
|
||||
#
|
||||
# Configuration and logfile names: If the filenames you specify for many
|
||||
# of the server's control files begin with "/" (or "drive:/" for Win32), the
|
||||
# server will use that explicit path. If the filenames do *not* begin
|
||||
# with "/", the value of ServerRoot is prepended -- so "log/foo_log"
|
||||
# with ServerRoot set to "/usr" will be interpreted by the
|
||||
# server as "/usr/log/foo_log".
|
||||
|
||||
#
|
||||
# ServerRoot: The top of the directory tree under which the server's
|
||||
# configuration, error, and log files are kept.
|
||||
#
|
||||
# Do not add a slash at the end of the directory path. If you point
|
||||
# ServerRoot at a non-local disk, be sure to point the LockFile directive
|
||||
# at a local disk. If you wish to share the same ServerRoot for multiple
|
||||
# httpd daemons, you will need to change at least LockFile and PidFile.
|
||||
#
|
||||
ServerRoot "/usr"
|
||||
|
||||
#
|
||||
# Listen: Allows you to bind Apache to specific IP addresses and/or
|
||||
# ports, instead of the default. See also the <VirtualHost>
|
||||
# directive.
|
||||
#
|
||||
# Change this to Listen on specific IP addresses as shown below to
|
||||
# prevent Apache from glomming onto all bound IP addresses.
|
||||
#
|
||||
#Listen 12.34.56.78:80
|
||||
Listen 80
|
||||
|
||||
#
|
||||
# Dynamic Shared Object (DSO) Support
|
||||
#
|
||||
# To be able to use the functionality of a module which was built as a DSO you
|
||||
# have to place corresponding `LoadModule' lines at this location so the
|
||||
# directives contained in it are actually available _before_ they are used.
|
||||
# Statically compiled modules (those listed by `httpd -l') do not need
|
||||
# to be loaded here.
|
||||
#
|
||||
# Example:
|
||||
# LoadModule foo_module modules/mod_foo.so
|
||||
#
|
||||
LoadModule authn_file_module libexec/apache2/mod_authn_file.so
|
||||
LoadModule authn_dbm_module libexec/apache2/mod_authn_dbm.so
|
||||
LoadModule authn_anon_module libexec/apache2/mod_authn_anon.so
|
||||
LoadModule authn_dbd_module libexec/apache2/mod_authn_dbd.so
|
||||
LoadModule authn_default_module libexec/apache2/mod_authn_default.so
|
||||
LoadModule authz_host_module libexec/apache2/mod_authz_host.so
|
||||
LoadModule authz_groupfile_module libexec/apache2/mod_authz_groupfile.so
|
||||
LoadModule authz_user_module libexec/apache2/mod_authz_user.so
|
||||
LoadModule authz_dbm_module libexec/apache2/mod_authz_dbm.so
|
||||
LoadModule authz_owner_module libexec/apache2/mod_authz_owner.so
|
||||
LoadModule authz_default_module libexec/apache2/mod_authz_default.so
|
||||
LoadModule auth_basic_module libexec/apache2/mod_auth_basic.so
|
||||
LoadModule auth_digest_module libexec/apache2/mod_auth_digest.so
|
||||
LoadModule cache_module libexec/apache2/mod_cache.so
|
||||
LoadModule disk_cache_module libexec/apache2/mod_disk_cache.so
|
||||
LoadModule mem_cache_module libexec/apache2/mod_mem_cache.so
|
||||
LoadModule dbd_module libexec/apache2/mod_dbd.so
|
||||
LoadModule dumpio_module libexec/apache2/mod_dumpio.so
|
||||
LoadModule reqtimeout_module libexec/apache2/mod_reqtimeout.so
|
||||
LoadModule ext_filter_module libexec/apache2/mod_ext_filter.so
|
||||
LoadModule include_module libexec/apache2/mod_include.so
|
||||
LoadModule filter_module libexec/apache2/mod_filter.so
|
||||
LoadModule substitute_module libexec/apache2/mod_substitute.so
|
||||
LoadModule deflate_module libexec/apache2/mod_deflate.so
|
||||
LoadModule log_config_module libexec/apache2/mod_log_config.so
|
||||
LoadModule log_forensic_module libexec/apache2/mod_log_forensic.so
|
||||
LoadModule logio_module libexec/apache2/mod_logio.so
|
||||
LoadModule env_module libexec/apache2/mod_env.so
|
||||
LoadModule mime_magic_module libexec/apache2/mod_mime_magic.so
|
||||
LoadModule cern_meta_module libexec/apache2/mod_cern_meta.so
|
||||
LoadModule expires_module libexec/apache2/mod_expires.so
|
||||
LoadModule headers_module libexec/apache2/mod_headers.so
|
||||
LoadModule ident_module libexec/apache2/mod_ident.so
|
||||
LoadModule usertrack_module libexec/apache2/mod_usertrack.so
|
||||
#LoadModule unique_id_module libexec/apache2/mod_unique_id.so
|
||||
LoadModule setenvif_module libexec/apache2/mod_setenvif.so
|
||||
LoadModule version_module libexec/apache2/mod_version.so
|
||||
LoadModule proxy_module libexec/apache2/mod_proxy.so
|
||||
LoadModule proxy_connect_module libexec/apache2/mod_proxy_connect.so
|
||||
LoadModule proxy_ftp_module libexec/apache2/mod_proxy_ftp.so
|
||||
LoadModule proxy_http_module libexec/apache2/mod_proxy_http.so
|
||||
LoadModule proxy_scgi_module libexec/apache2/mod_proxy_scgi.so
|
||||
LoadModule proxy_ajp_module libexec/apache2/mod_proxy_ajp.so
|
||||
LoadModule proxy_balancer_module libexec/apache2/mod_proxy_balancer.so
|
||||
LoadModule ssl_module libexec/apache2/mod_ssl.so
|
||||
LoadModule mime_module libexec/apache2/mod_mime.so
|
||||
LoadModule dav_module libexec/apache2/mod_dav.so
|
||||
LoadModule status_module libexec/apache2/mod_status.so
|
||||
LoadModule autoindex_module libexec/apache2/mod_autoindex.so
|
||||
LoadModule asis_module libexec/apache2/mod_asis.so
|
||||
LoadModule info_module libexec/apache2/mod_info.so
|
||||
LoadModule cgi_module libexec/apache2/mod_cgi.so
|
||||
LoadModule dav_fs_module libexec/apache2/mod_dav_fs.so
|
||||
LoadModule vhost_alias_module libexec/apache2/mod_vhost_alias.so
|
||||
LoadModule negotiation_module libexec/apache2/mod_negotiation.so
|
||||
LoadModule dir_module libexec/apache2/mod_dir.so
|
||||
LoadModule imagemap_module libexec/apache2/mod_imagemap.so
|
||||
LoadModule actions_module libexec/apache2/mod_actions.so
|
||||
LoadModule speling_module libexec/apache2/mod_speling.so
|
||||
LoadModule userdir_module libexec/apache2/mod_userdir.so
|
||||
LoadModule alias_module libexec/apache2/mod_alias.so
|
||||
LoadModule rewrite_module libexec/apache2/mod_rewrite.so
|
||||
#LoadModule perl_module libexec/apache2/mod_perl.so
|
||||
#LoadModule php5_module libexec/apache2/libphp5.so
|
||||
#LoadModule hfs_apple_module libexec/apache2/mod_hfs_apple.so
|
||||
|
||||
<IfModule !mpm_netware_module>
|
||||
<IfModule !mpm_winnt_module>
|
||||
#
|
||||
# If you wish httpd to run as a different user or group, you must run
|
||||
# httpd as root initially and it will switch.
|
||||
#
|
||||
# User/Group: The name (or #number) of the user/group to run httpd as.
|
||||
# It is usually good practice to create a dedicated user and group for
|
||||
# running httpd, as with most system services.
|
||||
#
|
||||
User _www
|
||||
Group _www
|
||||
|
||||
</IfModule>
|
||||
</IfModule>
|
||||
|
||||
# 'Main' server configuration
|
||||
#
|
||||
# The directives in this section set up the values used by the 'main'
|
||||
# server, which responds to any requests that aren't handled by a
|
||||
# <VirtualHost> definition. These values also provide defaults for
|
||||
# any <VirtualHost> containers you may define later in the file.
|
||||
#
|
||||
# All of these directives may appear inside <VirtualHost> containers,
|
||||
# in which case these default settings will be overridden for the
|
||||
# virtual host being defined.
|
||||
#
|
||||
|
||||
#
|
||||
# ServerAdmin: Your address, where problems with the server should be
|
||||
# e-mailed. This address appears on some server-generated pages, such
|
||||
# as error documents. e.g. admin@your-domain.com
|
||||
#
|
||||
ServerAdmin you@example.com
|
||||
|
||||
#
|
||||
# ServerName gives the name and port that the server uses to identify itself.
|
||||
# This can often be determined automatically, but we recommend you specify
|
||||
# it explicitly to prevent problems during startup.
|
||||
#
|
||||
# If your host doesn't have a registered DNS name, enter its IP address here.
|
||||
#
|
||||
#ServerName www.example.com:80
|
||||
|
||||
#
|
||||
# DocumentRoot: The directory out of which you will serve your
|
||||
# documents. By default, all requests are taken from this directory, but
|
||||
# symbolic links and aliases may be used to point to other locations.
|
||||
#
|
||||
DocumentRoot "/Library/WebServer/Documents"
|
||||
|
||||
#
|
||||
# Each directory to which Apache has access can be configured with respect
|
||||
# to which services and features are allowed and/or disabled in that
|
||||
# directory (and its subdirectories).
|
||||
#
|
||||
# First, we configure the "default" to be a very restrictive set of
|
||||
# features.
|
||||
#
|
||||
<Directory />
|
||||
Options FollowSymLinks
|
||||
AllowOverride None
|
||||
Order deny,allow
|
||||
Deny from all
|
||||
</Directory>
|
||||
|
||||
#
|
||||
# Note that from this point forward you must specifically allow
|
||||
# particular features to be enabled - so if something's not working as
|
||||
# you might expect, make sure that you have specifically enabled it
|
||||
# below.
|
||||
#
|
||||
|
||||
#
|
||||
# This should be changed to whatever you set DocumentRoot to.
|
||||
#
|
||||
<Directory "/Library/WebServer/Documents">
|
||||
#
|
||||
# Possible values for the Options directive are "None", "All",
|
||||
# or any combination of:
|
||||
# Indexes Includes FollowSymLinks SymLinksifOwnerMatch ExecCGI MultiViews
|
||||
#
|
||||
# Note that "MultiViews" must be named *explicitly* --- "Options All"
|
||||
# doesn't give it to you.
|
||||
#
|
||||
# The Options directive is both complicated and important. Please see
|
||||
# http://httpd.apache.org/docs/2.2/mod/core.html#options
|
||||
# for more information.
|
||||
#
|
||||
Options Indexes FollowSymLinks MultiViews
|
||||
|
||||
#
|
||||
# AllowOverride controls what directives may be placed in .htaccess files.
|
||||
# It can be "All", "None", or any combination of the keywords:
|
||||
# Options FileInfo AuthConfig Limit
|
||||
#
|
||||
AllowOverride None
|
||||
|
||||
#
|
||||
# Controls who can get stuff from this server.
|
||||
#
|
||||
Order allow,deny
|
||||
Allow from all
|
||||
|
||||
</Directory>
|
||||
|
||||
#
|
||||
# DirectoryIndex: sets the file that Apache will serve if a directory
|
||||
# is requested.
|
||||
#
|
||||
<IfModule dir_module>
|
||||
DirectoryIndex index.html
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# The following lines prevent .htaccess and .htpasswd files from being
|
||||
# viewed by Web clients.
|
||||
#
|
||||
<FilesMatch "^\.([Hh][Tt]|[Dd][Ss]_[Ss])">
|
||||
Order allow,deny
|
||||
Deny from all
|
||||
Satisfy All
|
||||
</FilesMatch>
|
||||
|
||||
#
|
||||
# Apple specific filesystem protection.
|
||||
#
|
||||
<Files "rsrc">
|
||||
Order allow,deny
|
||||
Deny from all
|
||||
Satisfy All
|
||||
</Files>
|
||||
<DirectoryMatch ".*\.\.namedfork">
|
||||
Order allow,deny
|
||||
Deny from all
|
||||
Satisfy All
|
||||
</DirectoryMatch>
|
||||
|
||||
#
|
||||
# ErrorLog: The location of the error log file.
|
||||
# If you do not specify an ErrorLog directive within a <VirtualHost>
|
||||
# container, error messages relating to that virtual host will be
|
||||
# logged here. If you *do* define an error logfile for a <VirtualHost>
|
||||
# container, that host's errors will be logged there and not here.
|
||||
#
|
||||
ErrorLog "/private/var/log/apache2/error_log"
|
||||
|
||||
#
|
||||
# LogLevel: Control the number of messages logged to the error_log.
|
||||
# Possible values include: debug, info, notice, warn, error, crit,
|
||||
# alert, emerg.
|
||||
#
|
||||
LogLevel warn
|
||||
|
||||
<IfModule log_config_module>
|
||||
#
|
||||
# The following directives define some format nicknames for use with
|
||||
# a CustomLog directive (see below).
|
||||
#
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b" common
|
||||
|
||||
<IfModule logio_module>
|
||||
# You need to enable mod_logio.c to use %I and %O
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %I %O" combinedio
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# The location and format of the access logfile (Common Logfile Format).
|
||||
# If you do not define any access logfiles within a <VirtualHost>
|
||||
# container, they will be logged here. Contrariwise, if you *do*
|
||||
# define per-<VirtualHost> access logfiles, transactions will be
|
||||
# logged therein and *not* in this file.
|
||||
#
|
||||
CustomLog "/private/var/log/apache2/access_log" common
|
||||
|
||||
#
|
||||
# If you prefer a logfile with access, agent, and referer information
|
||||
# (Combined Logfile Format) you can use the following directive.
|
||||
#
|
||||
#CustomLog "/private/var/log/apache2/access_log" combined
|
||||
</IfModule>
|
||||
|
||||
<IfModule alias_module>
|
||||
#
|
||||
# Redirect: Allows you to tell clients about documents that used to
|
||||
# exist in your server's namespace, but do not anymore. The client
|
||||
# will make a new request for the document at its new location.
|
||||
# Example:
|
||||
# Redirect permanent /foo http://www.example.com/bar
|
||||
|
||||
#
|
||||
# Alias: Maps web paths into filesystem paths and is used to
|
||||
# access content that does not live under the DocumentRoot.
|
||||
# Example:
|
||||
# Alias /webpath /full/filesystem/path
|
||||
#
|
||||
# If you include a trailing / on /webpath then the server will
|
||||
# require it to be present in the URL. You will also likely
|
||||
# need to provide a <Directory> section to allow access to
|
||||
# the filesystem path.
|
||||
|
||||
#
|
||||
# ScriptAlias: This controls which directories contain server scripts.
|
||||
# ScriptAliases are essentially the same as Aliases, except that
|
||||
# documents in the target directory are treated as applications and
|
||||
# run by the server when requested rather than as documents sent to the
|
||||
# client. The same rules about trailing "/" apply to ScriptAlias
|
||||
# directives as to Alias.
|
||||
#
|
||||
ScriptAliasMatch ^/cgi-bin/((?!(?i:webobjects)).*$) "/Library/WebServer/CGI-Executables/$1"
|
||||
|
||||
</IfModule>
|
||||
|
||||
<IfModule cgid_module>
|
||||
#
|
||||
# ScriptSock: On threaded servers, designate the path to the UNIX
|
||||
# socket used to communicate with the CGI daemon of mod_cgid.
|
||||
#
|
||||
#Scriptsock /private/var/run/cgisock
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# "/Library/WebServer/CGI-Executables" should be changed to whatever your ScriptAliased
|
||||
# CGI directory exists, if you have that configured.
|
||||
#
|
||||
<Directory "/Library/WebServer/CGI-Executables">
|
||||
AllowOverride None
|
||||
Options None
|
||||
Order allow,deny
|
||||
Allow from all
|
||||
</Directory>
|
||||
|
||||
#
|
||||
# DefaultType: the default MIME type the server will use for a document
|
||||
# if it cannot otherwise determine one, such as from filename extensions.
|
||||
# If your server contains mostly text or HTML documents, "text/plain" is
|
||||
# a good value. If most of your content is binary, such as applications
|
||||
# or images, you may want to use "application/octet-stream" instead to
|
||||
# keep browsers from trying to display binary files as though they are
|
||||
# text.
|
||||
#
|
||||
DefaultType text/plain
|
||||
|
||||
<IfModule mime_module>
|
||||
#
|
||||
# TypesConfig points to the file containing the list of mappings from
|
||||
# filename extension to MIME-type.
|
||||
#
|
||||
TypesConfig /private/etc/apache2/mime.types
|
||||
|
||||
#
|
||||
# AddType allows you to add to or override the MIME configuration
|
||||
# file specified in TypesConfig for specific file types.
|
||||
#
|
||||
#AddType application/x-gzip .tgz
|
||||
#
|
||||
# AddEncoding allows you to have certain browsers uncompress
|
||||
# information on the fly. Note: Not all browsers support this.
|
||||
#
|
||||
#AddEncoding x-compress .Z
|
||||
#AddEncoding x-gzip .gz .tgz
|
||||
#
|
||||
# If the AddEncoding directives above are commented-out, then you
|
||||
# probably should define those extensions to indicate media types:
|
||||
#
|
||||
AddType application/x-compress .Z
|
||||
AddType application/x-gzip .gz .tgz
|
||||
|
||||
#
|
||||
# AddHandler allows you to map certain file extensions to "handlers":
|
||||
# actions unrelated to filetype. These can be either built into the server
|
||||
# or added with the Action directive (see below)
|
||||
#
|
||||
# To use CGI scripts outside of ScriptAliased directories:
|
||||
# (You will also need to add "ExecCGI" to the "Options" directive.)
|
||||
#
|
||||
#AddHandler cgi-script .cgi
|
||||
|
||||
# For type maps (negotiated resources):
|
||||
#AddHandler type-map var
|
||||
|
||||
#
|
||||
# Filters allow you to process content before it is sent to the client.
|
||||
#
|
||||
# To parse .shtml files for server-side includes (SSI):
|
||||
# (You will also need to add "Includes" to the "Options" directive.)
|
||||
#
|
||||
#AddType text/html .shtml
|
||||
#AddOutputFilter INCLUDES .shtml
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# The mod_mime_magic module allows the server to use various hints from the
|
||||
# contents of the file itself to determine its type. The MIMEMagicFile
|
||||
# directive tells the module where the hint definitions are located.
|
||||
#
|
||||
#MIMEMagicFile /private/etc/apache2/magic
|
||||
|
||||
#
|
||||
# Customizable error responses come in three flavors:
|
||||
# 1) plain text 2) local redirects 3) external redirects
|
||||
#
|
||||
# Some examples:
|
||||
#ErrorDocument 500 "The server made a boo boo."
|
||||
#ErrorDocument 404 /missing.html
|
||||
#ErrorDocument 404 "/cgi-bin/missing_handler.pl"
|
||||
#ErrorDocument 402 http://www.example.com/subscription_info.html
|
||||
#
|
||||
|
||||
#
|
||||
# MaxRanges: Maximum number of Ranges in a request before
|
||||
# returning the entire resource, or one of the special
|
||||
# values 'default', 'none' or 'unlimited'.
|
||||
# Default setting is to accept 200 Ranges.
|
||||
#MaxRanges unlimited
|
||||
|
||||
#
|
||||
# EnableMMAP and EnableSendfile: On systems that support it,
|
||||
# memory-mapping or the sendfile syscall is used to deliver
|
||||
# files. This usually improves server performance, but must
|
||||
# be turned off when serving from networked-mounted
|
||||
# filesystems or if support for these functions is otherwise
|
||||
# broken on your system.
|
||||
#
|
||||
#EnableMMAP off
|
||||
#EnableSendfile off
|
||||
|
||||
# 6894961
|
||||
TraceEnable off
|
||||
|
||||
# Supplemental configuration
|
||||
#
|
||||
# The configuration files in the /private/etc/apache2/extra/ directory can be
|
||||
# included to add extra features or to modify the default configuration of
|
||||
# the server, or you may simply copy their contents here and change as
|
||||
# necessary.
|
||||
|
||||
# Server-pool management (MPM specific)
|
||||
Include /private/etc/apache2/extra/httpd-mpm.conf
|
||||
|
||||
# Multi-language error messages
|
||||
#Include /private/etc/apache2/extra/httpd-multilang-errordoc.conf
|
||||
|
||||
# Fancy directory listings
|
||||
Include /private/etc/apache2/extra/httpd-autoindex.conf
|
||||
|
||||
# Language settings
|
||||
Include /private/etc/apache2/extra/httpd-languages.conf
|
||||
|
||||
# User home directories
|
||||
Include /private/etc/apache2/extra/httpd-userdir.conf
|
||||
|
||||
# Real-time info on requests and configuration
|
||||
#Include /private/etc/apache2/extra/httpd-info.conf
|
||||
|
||||
# Virtual hosts
|
||||
#Include /private/etc/apache2/extra/httpd-vhosts.conf
|
||||
|
||||
# Local access to the Apache HTTP Server Manual
|
||||
Include /private/etc/apache2/extra/httpd-manual.conf
|
||||
|
||||
# Distributed authoring and versioning (WebDAV)
|
||||
#Include /private/etc/apache2/extra/httpd-dav.conf
|
||||
|
||||
# Various default settings
|
||||
#Include /private/etc/apache2/extra/httpd-default.conf
|
||||
|
||||
# Secure (SSL/TLS) connections
|
||||
#Include /private/etc/apache2/extra/httpd-ssl.conf
|
||||
#
|
||||
# Note: The following must must be present to support
|
||||
# starting without SSL on platforms with no /dev/random equivalent
|
||||
# but a statically compiled-in mod_ssl.
|
||||
#
|
||||
<IfModule ssl_module>
|
||||
SSLRandomSeed startup builtin
|
||||
SSLRandomSeed connect builtin
|
||||
</IfModule>
|
||||
|
||||
Include /private/etc/apache2/other/*.conf
|
||||
13
samples/AsciiDoc/encoding.asciidoc
Normal file
13
samples/AsciiDoc/encoding.asciidoc
Normal file
@@ -0,0 +1,13 @@
|
||||
Gregory Romé has written an AsciiDoc plugin for the Redmine project management application.
|
||||
|
||||
https://github.com/foo-users/foo
|
||||
へと `vicmd` キーマップを足してみている試み、
|
||||
アニメーションgifです。
|
||||
|
||||
tag::romé[]
|
||||
Gregory Romé has written an AsciiDoc plugin for the Redmine project management application.
|
||||
end::romé[]
|
||||
|
||||
== Überschrift
|
||||
|
||||
* Codierungen sind verrückt auf älteren Versionen von Ruby
|
||||
10
samples/AsciiDoc/list.asc
Normal file
10
samples/AsciiDoc/list.asc
Normal file
@@ -0,0 +1,10 @@
|
||||
AsciiDoc Home Page
|
||||
==================
|
||||
|
||||
Example Articles
|
||||
~~~~~~~~~~~~~~~~
|
||||
- Item 1
|
||||
|
||||
- Item 2
|
||||
|
||||
- Item 3
|
||||
25
samples/AsciiDoc/sample.adoc
Normal file
25
samples/AsciiDoc/sample.adoc
Normal file
@@ -0,0 +1,25 @@
|
||||
Document Title
|
||||
==============
|
||||
Doc Writer <thedoc@asciidoctor.org>
|
||||
:idprefix: id_
|
||||
|
||||
Preamble paragraph.
|
||||
|
||||
NOTE: This is test, only a test.
|
||||
|
||||
== Section A
|
||||
|
||||
*Section A* paragraph.
|
||||
|
||||
=== Section A Subsection
|
||||
|
||||
*Section A* 'subsection' paragraph.
|
||||
|
||||
== Section B
|
||||
|
||||
*Section B* paragraph.
|
||||
|
||||
.Section B list
|
||||
* Item 1
|
||||
* Item 2
|
||||
* Item 3
|
||||
41
samples/AspectJ/CacheAspect.aj
Normal file
41
samples/AspectJ/CacheAspect.aj
Normal file
@@ -0,0 +1,41 @@
|
||||
package com.blogspot.miguelinlas3.aspectj.cache;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.WeakHashMap;
|
||||
|
||||
import org.aspectj.lang.JoinPoint;
|
||||
|
||||
import com.blogspot.miguelinlas3.aspectj.cache.marker.Cachable;
|
||||
|
||||
/**
|
||||
* This simple aspect simulates the behaviour of a very simple cache
|
||||
*
|
||||
* @author migue
|
||||
*
|
||||
*/
|
||||
public aspect CacheAspect {
|
||||
|
||||
public pointcut cache(Cachable cachable): execution(@Cachable * * (..)) && @annotation(cachable);
|
||||
|
||||
Object around(Cachable cachable): cache(cachable){
|
||||
|
||||
String evaluatedKey = this.evaluateKey(cachable.scriptKey(), thisJoinPoint);
|
||||
|
||||
if(cache.containsKey(evaluatedKey)){
|
||||
System.out.println("Cache hit for key " + evaluatedKey);
|
||||
return this.cache.get(evaluatedKey);
|
||||
}
|
||||
|
||||
System.out.println("Cache miss for key " + evaluatedKey);
|
||||
Object value = proceed(cachable);
|
||||
cache.put(evaluatedKey, value);
|
||||
return value;
|
||||
}
|
||||
|
||||
protected String evaluateKey(String key, JoinPoint joinPoint) {
|
||||
// TODO add some smart staff to allow simple scripting in @Cachable annotation
|
||||
return key;
|
||||
}
|
||||
|
||||
protected Map<String, Object> cache = new WeakHashMap<String, Object>();
|
||||
}
|
||||
50
samples/AspectJ/OptimizeRecursionCache.aj
Normal file
50
samples/AspectJ/OptimizeRecursionCache.aj
Normal file
@@ -0,0 +1,50 @@
|
||||
package aspects.caching;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Cache aspect for optimize recursive functions.
|
||||
*
|
||||
* @author Migueli
|
||||
* @date 05/11/2013
|
||||
* @version 1.0
|
||||
*
|
||||
*/
|
||||
public abstract aspect OptimizeRecursionCache {
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
private Map _cache;
|
||||
|
||||
public OptimizeRecursionCache() {
|
||||
_cache = getCache();
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
abstract public Map getCache();
|
||||
|
||||
abstract public pointcut operation(Object o);
|
||||
|
||||
pointcut topLevelOperation(Object o): operation(o) && !cflowbelow(operation(Object));
|
||||
|
||||
before(Object o) : topLevelOperation(o) {
|
||||
System.out.println("Seeking value for " + o);
|
||||
}
|
||||
|
||||
Object around(Object o) : operation(o) {
|
||||
Object cachedValue = _cache.get(o);
|
||||
if (cachedValue != null) {
|
||||
System.out.println("Found cached value for " + o + ": " + cachedValue);
|
||||
return cachedValue;
|
||||
}
|
||||
return proceed(o);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
after(Object o) returning(Object result) : topLevelOperation(o) {
|
||||
_cache.put(o, result);
|
||||
}
|
||||
|
||||
after(Object o) returning(Object result) : topLevelOperation(o) {
|
||||
System.out.println("cache size: " + _cache.size());
|
||||
}
|
||||
}
|
||||
121
samples/Awk/test.awk
Normal file
121
samples/Awk/test.awk
Normal file
@@ -0,0 +1,121 @@
|
||||
#!/bin/awk -f
|
||||
|
||||
BEGIN {
|
||||
# It is not possible to define output file names here because
|
||||
# FILENAME is not define in the BEGIN section
|
||||
n = "";
|
||||
printf "Generating data files ...";
|
||||
network_max_bandwidth_in_byte = 10000000;
|
||||
network_max_packet_per_second = 1000000;
|
||||
last3 = 0;
|
||||
last4 = 0;
|
||||
last5 = 0;
|
||||
last6 = 0;
|
||||
}
|
||||
{
|
||||
if ($1 ~ /Average/)
|
||||
{ # Skip the Average values
|
||||
n = "";
|
||||
next;
|
||||
}
|
||||
|
||||
if ($2 ~ /all/)
|
||||
{ # This is the cpu info
|
||||
print $3 > FILENAME".cpu.user.dat";
|
||||
# print $4 > FILENAME".cpu.nice.dat";
|
||||
print $5 > FILENAME".cpu.system.dat";
|
||||
# print $6 > FILENAME".cpu.iowait.dat";
|
||||
print $7 > FILENAME".cpu.idle.dat";
|
||||
print 100-$7 > FILENAME".cpu.busy.dat";
|
||||
}
|
||||
if ($2 ~ /eth0/)
|
||||
{ # This is the eth0 network info
|
||||
if ($3 > network_max_packet_per_second)
|
||||
print last3 > FILENAME".net.rxpck.dat"; # Total number of packets received per second.
|
||||
else
|
||||
{
|
||||
last3 = $3;
|
||||
print $3 > FILENAME".net.rxpck.dat"; # Total number of packets received per second.
|
||||
}
|
||||
if ($4 > network_max_packet_per_second)
|
||||
print last4 > FILENAME".net.txpck.dat"; # Total number of packets transmitted per second.
|
||||
else
|
||||
{
|
||||
last4 = $4;
|
||||
print $4 > FILENAME".net.txpck.dat"; # Total number of packets transmitted per second.
|
||||
}
|
||||
if ($5 > network_max_bandwidth_in_byte)
|
||||
print last5 > FILENAME".net.rxbyt.dat"; # Total number of bytes received per second.
|
||||
else
|
||||
{
|
||||
last5 = $5;
|
||||
print $5 > FILENAME".net.rxbyt.dat"; # Total number of bytes received per second.
|
||||
}
|
||||
if ($6 > network_max_bandwidth_in_byte)
|
||||
print last6 > FILENAME".net.txbyt.dat"; # Total number of bytes transmitted per second.
|
||||
else
|
||||
{
|
||||
last6 = $6;
|
||||
print $6 > FILENAME".net.txbyt.dat"; # Total number of bytes transmitted per second.
|
||||
}
|
||||
# print $7 > FILENAME".net.rxcmp.dat"; # Number of compressed packets received per second (for cslip etc.).
|
||||
# print $8 > FILENAME".net.txcmp.dat"; # Number of compressed packets transmitted per second.
|
||||
# print $9 > FILENAME".net.rxmcst.dat"; # Number of multicast packets received per second.
|
||||
}
|
||||
|
||||
# Detect which is the next info to be parsed
|
||||
if ($2 ~ /proc|cswch|tps|kbmemfree|totsck/)
|
||||
{
|
||||
n = $2;
|
||||
}
|
||||
|
||||
# Only get lines with numbers (real data !)
|
||||
if ($2 ~ /[0-9]/)
|
||||
{
|
||||
if (n == "proc/s")
|
||||
{ # This is the proc/s info
|
||||
print $2 > FILENAME".proc.dat";
|
||||
# n = "";
|
||||
}
|
||||
if (n == "cswch/s")
|
||||
{ # This is the context switches per second info
|
||||
print $2 > FILENAME".ctxsw.dat";
|
||||
# n = "";
|
||||
}
|
||||
if (n == "tps")
|
||||
{ # This is the disk info
|
||||
print $2 > FILENAME".disk.tps.dat"; # total transfers per second
|
||||
print $3 > FILENAME".disk.rtps.dat"; # read requests per second
|
||||
print $4 > FILENAME".disk.wtps.dat"; # write requests per second
|
||||
print $5 > FILENAME".disk.brdps.dat"; # block reads per second
|
||||
print $6 > FILENAME".disk.bwrps.dat"; # block writes per second
|
||||
# n = "";
|
||||
}
|
||||
if (n == "kbmemfree")
|
||||
{ # This is the mem info
|
||||
print $2 > FILENAME".mem.kbmemfree.dat"; # Amount of free memory available in kilobytes.
|
||||
print $3 > FILENAME".mem.kbmemused.dat"; # Amount of used memory in kilobytes. This does not take into account memory used by the kernel itself.
|
||||
print $4 > FILENAME".mem.memused.dat"; # Percentage of used memory.
|
||||
# It appears the kbmemshrd has been removed from the sysstat output - ntolia
|
||||
# print $X > FILENAME".mem.kbmemshrd.dat"; # Amount of memory shared by the system in kilobytes. Always zero with 2.4 kernels.
|
||||
# print $5 > FILENAME".mem.kbbuffers.dat"; # Amount of memory used as buffers by the kernel in kilobytes.
|
||||
print $6 > FILENAME".mem.kbcached.dat"; # Amount of memory used to cache data by the kernel in kilobytes.
|
||||
# print $7 > FILENAME".mem.kbswpfree.dat"; # Amount of free swap space in kilobytes.
|
||||
# print $8 > FILENAME".mem.kbswpused.dat"; # Amount of used swap space in kilobytes.
|
||||
print $9 > FILENAME".mem.swpused.dat"; # Percentage of used swap space.
|
||||
# n = "";
|
||||
}
|
||||
if (n == "totsck")
|
||||
{ # This is the socket info
|
||||
print $2 > FILENAME".sock.totsck.dat"; # Total number of used sockets.
|
||||
print $3 > FILENAME".sock.tcpsck.dat"; # Number of TCP sockets currently in use.
|
||||
# print $4 > FILENAME".sock.udpsck.dat"; # Number of UDP sockets currently in use.
|
||||
# print $5 > FILENAME".sock.rawsck.dat"; # Number of RAW sockets currently in use.
|
||||
# print $6 > FILENAME".sock.ip-frag.dat"; # Number of IP fragments currently in use.
|
||||
# n = "";
|
||||
}
|
||||
}
|
||||
}
|
||||
END {
|
||||
print " '" FILENAME "' done.";
|
||||
}
|
||||
BIN
samples/Binary/cube.stl
Normal file
BIN
samples/Binary/cube.stl
Normal file
Binary file not shown.
147
samples/BlitzBasic/HalfAndDouble.bb
Normal file
147
samples/BlitzBasic/HalfAndDouble.bb
Normal file
@@ -0,0 +1,147 @@
|
||||
|
||||
Local bk = CreateBank(8)
|
||||
PokeFloat bk, 0, -1
|
||||
Print Bin(PeekInt(bk, 0))
|
||||
Print %1000000000000000
|
||||
Print Bin(1 Shl 31)
|
||||
Print $1f
|
||||
Print $ff
|
||||
Print $1f + (127 - 15)
|
||||
Print Hex(%01111111100000000000000000000000)
|
||||
Print Hex(~%11111111100000000000000000000000)
|
||||
|
||||
Print Bin(FloatToHalf(-2.5))
|
||||
Print HalfToFloat(FloatToHalf(-200000000000.0))
|
||||
|
||||
Print Bin(FToI(-2.5))
|
||||
|
||||
WaitKey
|
||||
End
|
||||
|
||||
|
||||
; Half-precision (16-bit) arithmetic library
|
||||
;============================================
|
||||
|
||||
Global Half_CBank_
|
||||
|
||||
Function FToI(f#)
|
||||
If Half_CBank_ = 0 Then Half_CBank_ = CreateBank(4)
|
||||
PokeFloat Half_CBank_, 0, f
|
||||
Return PeekInt(Half_CBank_, 0)
|
||||
End Function
|
||||
|
||||
Function HalfToFloat#(h)
|
||||
Local signBit, exponent, fraction, fBits
|
||||
|
||||
signBit = (h And 32768) <> 0
|
||||
exponent = (h And %0111110000000000) Shr 10
|
||||
fraction = (h And %0000001111111111)
|
||||
|
||||
If exponent = $1F Then exponent = $FF : ElseIf exponent Then exponent = (exponent - 15) + 127
|
||||
fBits = (signBit Shl 31) Or (exponent Shl 23) Or (fraction Shl 13)
|
||||
|
||||
If Half_CBank_ = 0 Then Half_CBank_ = CreateBank(4)
|
||||
PokeInt Half_CBank_, 0, fBits
|
||||
Return PeekFloat(Half_CBank_, 0)
|
||||
End Function
|
||||
|
||||
Function FloatToHalf(f#)
|
||||
Local signBit, exponent, fraction, fBits
|
||||
|
||||
If Half_CBank_ = 0 Then Half_CBank_ = CreateBank(4)
|
||||
PokeFloat Half_CBank_, 0, f
|
||||
fBits = PeekInt(Half_CBank_, 0)
|
||||
|
||||
signBit = (fBits And (1 Shl 31)) <> 0
|
||||
exponent = (fBits And $7F800000) Shr 23
|
||||
fraction = fBits And $007FFFFF
|
||||
|
||||
If exponent
|
||||
exponent = exponent - 127
|
||||
If Abs(exponent) > $1F
|
||||
If exponent <> ($FF - 127) Then fraction = 0
|
||||
exponent = $1F * Sgn(exponent)
|
||||
Else
|
||||
exponent = exponent + 15
|
||||
EndIf
|
||||
exponent = exponent And %11111
|
||||
EndIf
|
||||
fraction = fraction Shr 13
|
||||
|
||||
Return (signBit Shl 15) Or (exponent Shl 10) Or fraction
|
||||
End Function
|
||||
|
||||
Function HalfAdd(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function HalfSub(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function HalfMul(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function HalfDiv(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function HalfLT(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function HalfGT(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
|
||||
; Double-precision (64-bit) arithmetic library)
|
||||
;===============================================
|
||||
|
||||
Global DoubleOut[1], Double_CBank_
|
||||
|
||||
Function DoubleToFloat#(d[1])
|
||||
|
||||
End Function
|
||||
|
||||
Function FloatToDouble(f#)
|
||||
|
||||
End Function
|
||||
|
||||
Function IntToDouble(i)
|
||||
|
||||
End Function
|
||||
|
||||
Function SefToDouble(s, e, f)
|
||||
|
||||
End Function
|
||||
|
||||
Function DoubleAdd(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function DoubleSub(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function DoubleMul(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function DoubleDiv(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function DoubleLT(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function DoubleGT(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
|
||||
;~IDEal Editor Parameters:
|
||||
;~F#1A#20#2F
|
||||
;~C#Blitz3D
|
||||
369
samples/BlitzBasic/LList.bb
Normal file
369
samples/BlitzBasic/LList.bb
Normal file
@@ -0,0 +1,369 @@
|
||||
|
||||
; Double-linked list container class
|
||||
;====================================
|
||||
|
||||
; with thanks to MusicianKool, for concept and issue fixes
|
||||
|
||||
|
||||
Type LList
|
||||
Field head_.ListNode
|
||||
Field tail_.ListNode
|
||||
End Type
|
||||
|
||||
Type ListNode
|
||||
Field pv_.ListNode
|
||||
Field nx_.ListNode
|
||||
Field Value
|
||||
End Type
|
||||
|
||||
Type Iterator
|
||||
Field Value
|
||||
Field l_.LList
|
||||
Field cn_.ListNode, cni_
|
||||
End Type
|
||||
|
||||
|
||||
;Create a new LList object
|
||||
Function CreateList.LList()
|
||||
Local l.LList = New LList
|
||||
|
||||
l\head_ = New ListNode
|
||||
l\tail_ = New ListNode
|
||||
|
||||
l\head_\nx_ = l\tail_ ;End caps
|
||||
l\head_\pv_ = l\head_ ;These make it more or less safe to iterate freely
|
||||
l\head_\Value = 0
|
||||
|
||||
l\tail_\nx_ = l\tail_
|
||||
l\tail_\pv_ = l\head_
|
||||
l\tail_\Value = 0
|
||||
|
||||
Return l
|
||||
End Function
|
||||
|
||||
;Free a list and all elements (not any values)
|
||||
Function FreeList(l.LList)
|
||||
ClearList l
|
||||
Delete l\head_
|
||||
Delete l\tail_
|
||||
Delete l
|
||||
End Function
|
||||
|
||||
;Remove all the elements from a list (does not free values)
|
||||
Function ClearList(l.LList)
|
||||
Local n.ListNode = l\head_\nx_
|
||||
While n <> l\tail_
|
||||
Local nx.ListNode = n\nx_
|
||||
Delete n
|
||||
n = nx
|
||||
Wend
|
||||
l\head_\nx_ = l\tail_
|
||||
l\tail_\pv_ = l\head_
|
||||
End Function
|
||||
|
||||
;Count the number of elements in a list (slow)
|
||||
Function ListLength(l.LList)
|
||||
Local i.Iterator = GetIterator(l), elems
|
||||
While EachIn(i)
|
||||
elems = elems + 1
|
||||
Wend
|
||||
Return elems
|
||||
End Function
|
||||
|
||||
;Return True if a list contains a given value
|
||||
Function ListContains(l.LList, Value)
|
||||
Return (ListFindNode(l, Value) <> Null)
|
||||
End Function
|
||||
|
||||
;Create a linked list from the intvalues in a bank (slow)
|
||||
Function ListFromBank.LList(bank)
|
||||
Local l.LList = CreateList()
|
||||
Local size = BankSize(bank), p
|
||||
|
||||
For p = 0 To size - 4 Step 4
|
||||
ListAddLast l, PeekInt(bank, p)
|
||||
Next
|
||||
|
||||
Return l
|
||||
End Function
|
||||
|
||||
;Create a bank containing all the values in a list (slow)
|
||||
Function ListToBank(l.LList)
|
||||
Local size = ListLength(l) * 4
|
||||
Local bank = CreateBank(size)
|
||||
|
||||
Local i.Iterator = GetIterator(l), p = 0
|
||||
While EachIn(i)
|
||||
PokeInt bank, p, i\Value
|
||||
p = p + 4
|
||||
Wend
|
||||
|
||||
Return bank
|
||||
End Function
|
||||
|
||||
;Swap the contents of two list objects
|
||||
Function SwapLists(l1.LList, l2.LList)
|
||||
Local tempH.ListNode = l1\head_, tempT.ListNode = l1\tail_
|
||||
l1\head_ = l2\head_
|
||||
l1\tail_ = l2\tail_
|
||||
l2\head_ = tempH
|
||||
l2\tail_ = tempT
|
||||
End Function
|
||||
|
||||
;Create a new list containing the same values as the first
|
||||
Function CopyList.LList(lo.LList)
|
||||
Local ln.LList = CreateList()
|
||||
Local i.Iterator = GetIterator(lo) : While EachIn(i)
|
||||
ListAddLast ln, i\Value
|
||||
Wend
|
||||
Return ln
|
||||
End Function
|
||||
|
||||
;Reverse the order of elements of a list
|
||||
Function ReverseList(l.LList)
|
||||
Local n1.ListNode, n2.ListNode, tmp.ListNode
|
||||
|
||||
n1 = l\head_
|
||||
n2 = l\head_\nx_
|
||||
|
||||
While n1 <> l\tail_
|
||||
n1\pv_ = n2
|
||||
tmp = n2\nx_
|
||||
n2\nx_ = n1
|
||||
n1 = n2
|
||||
n2 = tmp
|
||||
Wend
|
||||
|
||||
tmp = l\head_
|
||||
l\head_ = l\tail_
|
||||
l\tail_ = tmp
|
||||
|
||||
l\head_\pv_ = l\head_
|
||||
l\tail_\nx_ = l\tail_
|
||||
End Function
|
||||
|
||||
;Search a list to retrieve the first node with the given value
|
||||
Function ListFindNode.ListNode(l.LList, Value)
|
||||
Local n.ListNode = l\head_\nx_
|
||||
|
||||
While n <> l\tail_
|
||||
If n\Value = Value Then Return n
|
||||
n = n\nx_
|
||||
Wend
|
||||
|
||||
Return Null
|
||||
End Function
|
||||
|
||||
;Append a value to the end of a list (fast) and return the node
|
||||
Function ListAddLast.ListNode(l.LList, Value)
|
||||
Local n.ListNode = New ListNode
|
||||
|
||||
n\pv_ = l\tail_\pv_
|
||||
n\nx_ = l\tail_
|
||||
n\Value = Value
|
||||
|
||||
l\tail_\pv_ = n
|
||||
n\pv_\nx_ = n
|
||||
|
||||
Return n
|
||||
End Function
|
||||
|
||||
;Attach a value to the start of a list (fast) and return the node
|
||||
Function ListAddFirst.ListNode(l.LList, Value)
|
||||
Local n.ListNode = New ListNode
|
||||
|
||||
n\pv_ = l\head_
|
||||
n\nx_ = l\head_\nx_
|
||||
n\Value = Value
|
||||
|
||||
l\head_\nx_ = n
|
||||
n\nx_\pv_ = n
|
||||
|
||||
Return n
|
||||
End Function
|
||||
|
||||
;Remove the first occurence of the given value from a list
|
||||
Function ListRemove(l.LList, Value)
|
||||
Local n.ListNode = ListFindNode(l, Value)
|
||||
If n <> Null Then RemoveListNode n
|
||||
End Function
|
||||
|
||||
;Remove a node from a list
|
||||
Function RemoveListNode(n.ListNode)
|
||||
n\pv_\nx_ = n\nx_
|
||||
n\nx_\pv_ = n\pv_
|
||||
Delete n
|
||||
End Function
|
||||
|
||||
;Return the value of the element at the given position from the start of the list,
|
||||
;or backwards from the end of the list for a negative index
|
||||
Function ValueAtIndex(l.LList, index)
|
||||
Local n.ListNode = ListNodeAtIndex(l, index)
|
||||
If n <> Null Then Return n\Value : Else Return 0
|
||||
End Function
|
||||
|
||||
;Return the ListNode at the given position from the start of the list, or backwards
|
||||
;from the end of the list for a negative index, or Null if invalid
|
||||
Function ListNodeAtIndex.ListNode(l.LList, index)
|
||||
Local e, n.ListNode
|
||||
|
||||
If index >= 0
|
||||
n = l\head_
|
||||
For e = 0 To index
|
||||
n = n\nx_
|
||||
Next
|
||||
If n = l\tail_ Then n = Null ;Beyond the end of the list - not valid
|
||||
|
||||
Else ;Negative index - count backward
|
||||
n = l\tail_
|
||||
For e = 0 To index Step -1
|
||||
n = n\pv_
|
||||
Next
|
||||
If n = l\head_ Then n = Null ;Before the start of the list - not valid
|
||||
|
||||
EndIf
|
||||
|
||||
Return n
|
||||
End Function
|
||||
|
||||
;Replace a value at the given position (added by MusicianKool)
|
||||
Function ReplaceValueAtIndex(l.LList,index,value)
|
||||
Local n.ListNode = ListNodeAtIndex(l,index)
|
||||
If n <> Null Then n\Value = value:Else Return 0
|
||||
End Function
|
||||
|
||||
;Remove and return a value at the given position (added by MusicianKool)
|
||||
Function RemoveNodeAtIndex(l.LList,index)
|
||||
Local n.ListNode = ListNodeAtIndex(l,index),tval
|
||||
If n <> Null Then tval = n\Value:RemoveListNode(n):Return tval:Else Return 0
|
||||
End Function
|
||||
|
||||
;Retrieve the first value from a list
|
||||
Function ListFirst(l.LList)
|
||||
If l\head_\nx_ <> l\tail_ Then Return l\head_\nx_\Value
|
||||
End Function
|
||||
|
||||
;Retrieve the last value from a list
|
||||
Function ListLast(l.LList)
|
||||
If l\tail_\pv_ <> l\head_ Then Return l\tail_\pv_\Value
|
||||
End Function
|
||||
|
||||
;Remove the first element from a list, and return its value
|
||||
Function ListRemoveFirst(l.LList)
|
||||
Local val
|
||||
If l\head_\nx_ <> l\tail_
|
||||
val = l\head_\nx_\Value
|
||||
RemoveListNode l\head_\nx_
|
||||
EndIf
|
||||
Return val
|
||||
End Function
|
||||
|
||||
;Remove the last element from a list, and return its value
|
||||
Function ListRemoveLast(l.LList)
|
||||
Local val
|
||||
If l\tail_\pv_ <> l\head_
|
||||
val = l\tail_\pv_\Value
|
||||
RemoveListNode l\tail_\pv_
|
||||
EndIf
|
||||
Return val
|
||||
End Function
|
||||
|
||||
;Insert a value into a list before the specified node, and return the new node
|
||||
Function InsertBeforeNode.ListNode(Value, n.ListNode)
|
||||
Local bef.ListNode = New ListNode
|
||||
|
||||
bef\pv_ = n\pv_
|
||||
bef\nx_ = n
|
||||
bef\Value = Value
|
||||
|
||||
n\pv_ = bef
|
||||
bef\pv_\nx_ = bef
|
||||
|
||||
Return bef
|
||||
End Function
|
||||
|
||||
;Insert a value into a list after the specified node, and return then new node
|
||||
Function InsertAfterNode.ListNode(Value, n.ListNode)
|
||||
Local aft.ListNode = New ListNode
|
||||
|
||||
aft\nx_ = n\nx_
|
||||
aft\pv_ = n
|
||||
aft\Value = Value
|
||||
|
||||
n\nx_ = aft
|
||||
aft\nx_\pv_ = aft
|
||||
|
||||
Return aft
|
||||
End Function
|
||||
|
||||
;Get an iterator object to use with a loop
|
||||
;This function means that most programs won't have to think about deleting iterators manually
|
||||
;(in general only a small, constant number will be created)
|
||||
Function GetIterator.Iterator(l.LList)
|
||||
Local i.Iterator
|
||||
|
||||
If l = Null Then RuntimeError "Cannot create Iterator for Null"
|
||||
|
||||
For i = Each Iterator ;See if there's an available iterator at the moment
|
||||
If i\l_ = Null Then Exit
|
||||
Next
|
||||
|
||||
If i = Null Then i = New Iterator ;If there wasn't, create one
|
||||
|
||||
i\l_ = l
|
||||
i\cn_ = l\head_
|
||||
i\cni_ = -1
|
||||
i\Value = 0 ;No especial reason why this has to be anything, but meh
|
||||
|
||||
Return i
|
||||
End Function
|
||||
|
||||
;Use as the argument to While to iterate over the members of a list
|
||||
Function EachIn(i.Iterator)
|
||||
|
||||
i\cn_ = i\cn_\nx_
|
||||
|
||||
If i\cn_ <> i\l_\tail_ ;Still items in the list
|
||||
i\Value = i\cn_\Value
|
||||
i\cni_ = i\cni_ + 1
|
||||
Return True
|
||||
|
||||
Else
|
||||
i\l_ = Null ;Disconnect from the list, having reached the end
|
||||
i\cn_ = Null
|
||||
i\cni_ = -1
|
||||
Return False
|
||||
|
||||
EndIf
|
||||
End Function
|
||||
|
||||
;Remove from the containing list the element currently pointed to by an iterator
|
||||
Function IteratorRemove(i.Iterator)
|
||||
If (i\cn_ <> i\l_\head_) And (i\cn_ <> i\l_\tail_)
|
||||
Local temp.ListNode = i\cn_
|
||||
|
||||
i\cn_ = i\cn_\pv_
|
||||
i\cni_ = i\cni_ - 1
|
||||
i\Value = 0
|
||||
|
||||
RemoveListNode temp
|
||||
|
||||
Return True
|
||||
Else
|
||||
Return False
|
||||
EndIf
|
||||
End Function
|
||||
|
||||
;Call this before breaking out of an EachIn loop, to disconnect the iterator from the list
|
||||
Function IteratorBreak(i.Iterator)
|
||||
i\l_ = Null
|
||||
i\cn_ = Null
|
||||
i\cni_ = -1
|
||||
i\Value = 0
|
||||
End Function
|
||||
|
||||
|
||||
;~IDEal Editor Parameters:
|
||||
;~F#5#A#10#18#2A#32#3E#47#4C#58#66#6F#78#8F#9B#A9#B7#BD#C5#CC
|
||||
;~F#E3#E9#EF#F4#F9#103#10D#11B#12B#13F#152#163
|
||||
;~C#Blitz3D
|
||||
66
samples/BlitzBasic/PObj.bb
Normal file
66
samples/BlitzBasic/PObj.bb
Normal file
@@ -0,0 +1,66 @@
|
||||
|
||||
Local i, start, result
|
||||
|
||||
Local s.Sum3Obj = New Sum3Obj
|
||||
|
||||
For i = 1 To 100000
|
||||
s = New Sum3Obj
|
||||
result = Handle Before s
|
||||
Delete s
|
||||
Next
|
||||
|
||||
start = MilliSecs()
|
||||
For i = 1 To 1000000
|
||||
result = Sum3_(MakeSum3Obj(i, i, i))
|
||||
Next
|
||||
start = MilliSecs() - start
|
||||
Print start
|
||||
|
||||
start = MilliSecs()
|
||||
For i = 1 To 1000000
|
||||
result = Sum3(i, i, i)
|
||||
Next
|
||||
start = MilliSecs() - start
|
||||
Print start
|
||||
|
||||
WaitKey
|
||||
End
|
||||
|
||||
|
||||
Function Sum3(a, b, c)
|
||||
Return a + b + c
|
||||
End Function
|
||||
|
||||
|
||||
Type Sum3Obj
|
||||
Field isActive
|
||||
Field a, b, c
|
||||
End Type
|
||||
|
||||
Function MakeSum3Obj(a, b, c)
|
||||
Local s.Sum3Obj = Last Sum3Obj
|
||||
If s\isActive Then s = New Sum3Obj
|
||||
s\isActive = True
|
||||
s\a = a
|
||||
s\b = b
|
||||
s\c = c
|
||||
|
||||
Restore label
|
||||
Read foo
|
||||
|
||||
Return Handle(s)
|
||||
End Function
|
||||
|
||||
.label
|
||||
Data (10 + 2), 12, 14
|
||||
:
|
||||
Function Sum3_(a_)
|
||||
Local a.Sum3Obj = Object.Sum3Obj a_
|
||||
Local return_ = a\a + a\b + a\c
|
||||
Insert a Before First Sum3Obj :: a\isActive = False
|
||||
Return return_
|
||||
End Function
|
||||
|
||||
|
||||
;~IDEal Editor Parameters:
|
||||
;~C#Blitz3D
|
||||
167
samples/Bluespec/TL.bsv
Normal file
167
samples/Bluespec/TL.bsv
Normal file
@@ -0,0 +1,167 @@
|
||||
package TL;
|
||||
|
||||
interface TL;
|
||||
method Action ped_button_push();
|
||||
|
||||
(* always_enabled *)
|
||||
method Action set_car_state_N(Bool x);
|
||||
(* always_enabled *)
|
||||
method Action set_car_state_S(Bool x);
|
||||
(* always_enabled *)
|
||||
method Action set_car_state_E(Bool x);
|
||||
(* always_enabled *)
|
||||
method Action set_car_state_W(Bool x);
|
||||
|
||||
method Bool lampRedNS();
|
||||
method Bool lampAmberNS();
|
||||
method Bool lampGreenNS();
|
||||
|
||||
method Bool lampRedE();
|
||||
method Bool lampAmberE();
|
||||
method Bool lampGreenE();
|
||||
|
||||
method Bool lampRedW();
|
||||
method Bool lampAmberW();
|
||||
method Bool lampGreenW();
|
||||
|
||||
method Bool lampRedPed();
|
||||
method Bool lampAmberPed();
|
||||
method Bool lampGreenPed();
|
||||
endinterface: TL
|
||||
|
||||
typedef enum {
|
||||
AllRed,
|
||||
GreenNS, AmberNS,
|
||||
GreenE, AmberE,
|
||||
GreenW, AmberW,
|
||||
GreenPed, AmberPed} TLstates deriving (Eq, Bits);
|
||||
|
||||
typedef UInt#(5) Time32;
|
||||
typedef UInt#(20) CtrSize;
|
||||
|
||||
(* synthesize *)
|
||||
module sysTL(TL);
|
||||
Time32 allRedDelay = 2;
|
||||
Time32 amberDelay = 4;
|
||||
Time32 nsGreenDelay = 20;
|
||||
Time32 ewGreenDelay = 10;
|
||||
Time32 pedGreenDelay = 10;
|
||||
Time32 pedAmberDelay = 6;
|
||||
|
||||
CtrSize clocks_per_sec = 100;
|
||||
|
||||
Reg#(TLstates) state <- mkReg(AllRed);
|
||||
Reg#(TLstates) next_green <- mkReg(GreenNS);
|
||||
Reg#(Time32) secs <- mkReg(0);
|
||||
Reg#(Bool) ped_button_pushed <- mkReg(False);
|
||||
Reg#(Bool) car_present_N <- mkReg(True);
|
||||
Reg#(Bool) car_present_S <- mkReg(True);
|
||||
Reg#(Bool) car_present_E <- mkReg(True);
|
||||
Reg#(Bool) car_present_W <- mkReg(True);
|
||||
Bool car_present_NS = car_present_N || car_present_S;
|
||||
Reg#(CtrSize) cycle_ctr <- mkReg(0);
|
||||
|
||||
rule dec_cycle_ctr (cycle_ctr != 0);
|
||||
cycle_ctr <= cycle_ctr - 1;
|
||||
endrule
|
||||
|
||||
Rules low_priority_rule = (rules
|
||||
rule inc_sec (cycle_ctr == 0);
|
||||
secs <= secs + 1;
|
||||
cycle_ctr <= clocks_per_sec;
|
||||
endrule endrules);
|
||||
|
||||
function Action next_state(TLstates ns);
|
||||
action
|
||||
state <= ns;
|
||||
secs <= 0;
|
||||
endaction
|
||||
endfunction: next_state
|
||||
|
||||
function TLstates green_seq(TLstates x);
|
||||
case (x)
|
||||
GreenNS: return (GreenE);
|
||||
GreenE: return (GreenW);
|
||||
GreenW: return (GreenNS);
|
||||
endcase
|
||||
endfunction
|
||||
|
||||
function Bool car_present(TLstates x);
|
||||
case (x)
|
||||
GreenNS: return (car_present_NS);
|
||||
GreenE: return (car_present_E);
|
||||
GreenW: return (car_present_W);
|
||||
endcase
|
||||
endfunction
|
||||
|
||||
function Rules make_from_green_rule(TLstates green_state, Time32 delay, Bool car_is_present, TLstates ns);
|
||||
return (rules
|
||||
rule from_green (state == green_state && (secs >= delay || !car_is_present));
|
||||
next_state(ns);
|
||||
endrule endrules);
|
||||
endfunction: make_from_green_rule
|
||||
|
||||
function Rules make_from_amber_rule(TLstates amber_state, TLstates ng);
|
||||
return (rules
|
||||
rule from_amber (state == amber_state && secs >= amberDelay);
|
||||
next_state(AllRed);
|
||||
next_green <= ng;
|
||||
endrule endrules);
|
||||
endfunction: make_from_amber_rule
|
||||
|
||||
Rules hprs[7];
|
||||
|
||||
hprs[1] = make_from_green_rule(GreenNS, nsGreenDelay, car_present_NS, AmberNS);
|
||||
hprs[2] = make_from_amber_rule(AmberNS, GreenE);
|
||||
hprs[3] = make_from_green_rule(GreenE, ewGreenDelay, car_present_E, AmberE);
|
||||
hprs[4] = make_from_amber_rule(AmberE, GreenW);
|
||||
hprs[5] = make_from_green_rule(GreenW, ewGreenDelay, car_present_W, AmberW);
|
||||
hprs[6] = make_from_amber_rule(AmberW, GreenNS);
|
||||
|
||||
hprs[0] = (rules
|
||||
rule fromAllRed (state == AllRed && secs >= allRedDelay);
|
||||
if (ped_button_pushed) action
|
||||
ped_button_pushed <= False;
|
||||
next_state(GreenPed);
|
||||
endaction else if (car_present(next_green))
|
||||
next_state(next_green);
|
||||
else if (car_present(green_seq(next_green)))
|
||||
next_state(green_seq(next_green));
|
||||
else if (car_present(green_seq(green_seq(next_green))))
|
||||
next_state(green_seq(green_seq(next_green)));
|
||||
else
|
||||
noAction;
|
||||
endrule: fromAllRed endrules);
|
||||
|
||||
Rules high_priority_rules = hprs[0];
|
||||
for (Integer i = 1; i<7; i=i+1)
|
||||
high_priority_rules = rJoin(hprs[i], high_priority_rules);
|
||||
|
||||
addRules(preempts(high_priority_rules, low_priority_rule));
|
||||
|
||||
method Action ped_button_push();
|
||||
ped_button_pushed <= True;
|
||||
endmethod: ped_button_push
|
||||
|
||||
method Action set_car_state_N(b) ; car_present_N <= b; endmethod
|
||||
method Action set_car_state_S(b) ; car_present_S <= b; endmethod
|
||||
method Action set_car_state_E(b) ; car_present_E <= b; endmethod
|
||||
method Action set_car_state_W(b) ; car_present_W <= b; endmethod
|
||||
|
||||
method lampRedNS() = (!(state == GreenNS || state == AmberNS));
|
||||
method lampAmberNS() = (state == AmberNS);
|
||||
method lampGreenNS() = (state == GreenNS);
|
||||
method lampRedE() = (!(state == GreenE || state == AmberE));
|
||||
method lampAmberE() = (state == AmberE);
|
||||
method lampGreenE() = (state == GreenE);
|
||||
method lampRedW() = (!(state == GreenW || state == AmberW));
|
||||
method lampAmberW() = (state == AmberW);
|
||||
method lampGreenW() = (state == GreenW);
|
||||
|
||||
method lampRedPed() = (!(state == GreenPed || state == AmberPed));
|
||||
method lampAmberPed() = (state == AmberPed);
|
||||
method lampGreenPed() = (state == GreenPed);
|
||||
|
||||
endmodule: sysTL
|
||||
|
||||
endpackage: TL
|
||||
109
samples/Bluespec/TbTL.bsv
Normal file
109
samples/Bluespec/TbTL.bsv
Normal file
@@ -0,0 +1,109 @@
|
||||
package TbTL;
|
||||
|
||||
import TL::*;
|
||||
|
||||
interface Lamp;
|
||||
method Bool changed;
|
||||
method Action show_offs;
|
||||
method Action show_ons;
|
||||
method Action reset;
|
||||
endinterface
|
||||
|
||||
module mkLamp#(String name, Bool lamp)(Lamp);
|
||||
Reg#(Bool) prev <- mkReg(False);
|
||||
|
||||
method changed = (prev != lamp);
|
||||
|
||||
method Action show_offs;
|
||||
if (prev && !lamp)
|
||||
$write (name + " off, ");
|
||||
endmethod
|
||||
|
||||
method Action show_ons;
|
||||
if (!prev && lamp)
|
||||
$write (name + " on, ");
|
||||
endmethod
|
||||
|
||||
method Action reset;
|
||||
prev <= lamp;
|
||||
endmethod
|
||||
endmodule
|
||||
|
||||
|
||||
(* synthesize *)
|
||||
module mkTest();
|
||||
let dut <- sysTL;
|
||||
|
||||
Reg#(Bit#(16)) ctr <- mkReg(0);
|
||||
|
||||
Reg#(Bool) carN <- mkReg(False);
|
||||
Reg#(Bool) carS <- mkReg(False);
|
||||
Reg#(Bool) carE <- mkReg(False);
|
||||
Reg#(Bool) carW <- mkReg(False);
|
||||
|
||||
Lamp lamps[12];
|
||||
|
||||
lamps[0] <- mkLamp("0: NS red ", dut.lampRedNS);
|
||||
lamps[1] <- mkLamp("1: NS amber", dut.lampAmberNS);
|
||||
lamps[2] <- mkLamp("2: NS green", dut.lampGreenNS);
|
||||
lamps[3] <- mkLamp("3: E red ", dut.lampRedE);
|
||||
lamps[4] <- mkLamp("4: E amber", dut.lampAmberE);
|
||||
lamps[5] <- mkLamp("5: E green", dut.lampGreenE);
|
||||
lamps[6] <- mkLamp("6: W red ", dut.lampRedW);
|
||||
lamps[7] <- mkLamp("7: W amber", dut.lampAmberW);
|
||||
lamps[8] <- mkLamp("8: W green", dut.lampGreenW);
|
||||
|
||||
lamps[9] <- mkLamp("9: Ped red ", dut.lampRedPed);
|
||||
lamps[10] <- mkLamp("10: Ped amber", dut.lampAmberPed);
|
||||
lamps[11] <- mkLamp("11: Ped green", dut.lampGreenPed);
|
||||
|
||||
rule start (ctr == 0);
|
||||
$dumpvars;
|
||||
endrule
|
||||
|
||||
rule detect_cars;
|
||||
dut.set_car_state_N(carN);
|
||||
dut.set_car_state_S(carS);
|
||||
dut.set_car_state_E(carE);
|
||||
dut.set_car_state_W(carW);
|
||||
endrule
|
||||
|
||||
rule go;
|
||||
ctr <= ctr + 1;
|
||||
if (ctr == 5000) carN <= True;
|
||||
if (ctr == 6500) carN <= False;
|
||||
if (ctr == 12_000) dut.ped_button_push;
|
||||
endrule
|
||||
|
||||
rule stop (ctr > 32768);
|
||||
$display("TESTS FINISHED");
|
||||
$finish(0);
|
||||
endrule
|
||||
|
||||
function do_offs(l) = l.show_offs;
|
||||
function do_ons(l) = l.show_ons;
|
||||
function do_reset(l) = l.reset;
|
||||
|
||||
function do_it(f);
|
||||
action
|
||||
for (Integer i=0; i<12; i=i+1)
|
||||
f(lamps[i]);
|
||||
endaction
|
||||
endfunction
|
||||
|
||||
function any_changes();
|
||||
Bool b = False;
|
||||
for (Integer i=0; i<12; i=i+1)
|
||||
b = b || lamps[i].changed;
|
||||
return b;
|
||||
endfunction
|
||||
|
||||
rule show (any_changes());
|
||||
do_it(do_offs);
|
||||
do_it(do_ons);
|
||||
do_it(do_reset);
|
||||
$display("(at time %d)", $time);
|
||||
endrule
|
||||
endmodule
|
||||
|
||||
endpackage
|
||||
305
samples/Brightscript/SimpleGrid.brs
Normal file
305
samples/Brightscript/SimpleGrid.brs
Normal file
@@ -0,0 +1,305 @@
|
||||
' *********************************************************
|
||||
' ** Simple Grid Screen Demonstration App
|
||||
' ** Jun 2010
|
||||
' ** Copyright (c) 2010 Roku Inc. All Rights Reserved.
|
||||
' *********************************************************
|
||||
|
||||
'************************************************************
|
||||
'** Application startup
|
||||
'************************************************************
|
||||
Sub Main()
|
||||
|
||||
'initialize theme attributes like titles, logos and overhang color
|
||||
initTheme()
|
||||
|
||||
gridstyle = "Flat-Movie"
|
||||
|
||||
'set to go, time to get started
|
||||
while gridstyle <> ""
|
||||
print "starting grid style= ";gridstyle
|
||||
screen=preShowGridScreen(gridstyle)
|
||||
gridstyle = showGridScreen(screen, gridstyle)
|
||||
end while
|
||||
|
||||
End Sub
|
||||
|
||||
|
||||
'*************************************************************
|
||||
'** Set the configurable theme attributes for the application
|
||||
'**
|
||||
'** Configure the custom overhang and Logo attributes
|
||||
'** These attributes affect the branding of the application
|
||||
'** and are artwork, colors and offsets specific to the app
|
||||
'*************************************************************
|
||||
|
||||
Sub initTheme()
|
||||
app = CreateObject("roAppManager")
|
||||
app.SetTheme(CreateDefaultTheme())
|
||||
End Sub
|
||||
|
||||
'******************************************************
|
||||
'** @return The default application theme.
|
||||
'** Screens can make slight adjustments to the default
|
||||
'** theme by getting it from here and then overriding
|
||||
'** individual theme attributes.
|
||||
'******************************************************
|
||||
Function CreateDefaultTheme() as Object
|
||||
theme = CreateObject("roAssociativeArray")
|
||||
|
||||
theme.ThemeType = "generic-dark"
|
||||
|
||||
' All these are greyscales
|
||||
theme.GridScreenBackgroundColor = "#363636"
|
||||
theme.GridScreenMessageColor = "#808080"
|
||||
theme.GridScreenRetrievingColor = "#CCCCCC"
|
||||
theme.GridScreenListNameColor = "#FFFFFF"
|
||||
|
||||
' Color values work here
|
||||
theme.GridScreenDescriptionTitleColor = "#001090"
|
||||
theme.GridScreenDescriptionDateColor = "#FF005B"
|
||||
theme.GridScreenDescriptionRuntimeColor = "#5B005B"
|
||||
theme.GridScreenDescriptionSynopsisColor = "#606000"
|
||||
|
||||
'used in the Grid Screen
|
||||
theme.CounterTextLeft = "#FF0000"
|
||||
theme.CounterSeparator = "#00FF00"
|
||||
theme.CounterTextRight = "#0000FF"
|
||||
|
||||
theme.GridScreenLogoHD = "pkg:/images/Overhang_Test_HD.png"
|
||||
|
||||
theme.GridScreenLogoOffsetHD_X = "0"
|
||||
theme.GridScreenLogoOffsetHD_Y = "0"
|
||||
theme.GridScreenOverhangHeightHD = "99"
|
||||
|
||||
theme.GridScreenLogoSD = "pkg:/images/Overhang_Test_SD43.png"
|
||||
theme.GridScreenOverhangHeightSD = "66"
|
||||
theme.GridScreenLogoOffsetSD_X = "0"
|
||||
theme.GridScreenLogoOffsetSD_Y = "0"
|
||||
|
||||
' to use your own focus ring artwork
|
||||
'theme.GridScreenFocusBorderSD = "pkg:/images/GridCenter_Border_Movies_SD43.png"
|
||||
'theme.GridScreenBorderOffsetSD = "(-26,-25)"
|
||||
'theme.GridScreenFocusBorderHD = "pkg:/images/GridCenter_Border_Movies_HD.png"
|
||||
'theme.GridScreenBorderOffsetHD = "(-28,-20)"
|
||||
|
||||
' to use your own description background artwork
|
||||
'theme.GridScreenDescriptionImageSD = "pkg:/images/Grid_Description_Background_SD43.png"
|
||||
'theme.GridScreenDescriptionOffsetSD = "(125,170)"
|
||||
'theme.GridScreenDescriptionImageHD = "pkg:/images/Grid_Description_Background_HD.png"
|
||||
'theme.GridScreenDescriptionOffsetHD = "(190,255)"
|
||||
|
||||
|
||||
return theme
|
||||
End Function
|
||||
|
||||
'******************************************************
|
||||
'** Perform any startup/initialization stuff prior to
|
||||
'** initially showing the screen.
|
||||
'******************************************************
|
||||
Function preShowGridScreen(style as string) As Object
|
||||
|
||||
m.port=CreateObject("roMessagePort")
|
||||
screen = CreateObject("roGridScreen")
|
||||
screen.SetMessagePort(m.port)
|
||||
' screen.SetDisplayMode("best-fit")
|
||||
screen.SetDisplayMode("scale-to-fill")
|
||||
|
||||
screen.SetGridStyle(style)
|
||||
return screen
|
||||
|
||||
End Function
|
||||
|
||||
|
||||
'******************************************************
|
||||
'** Display the gird screen and wait for events from
|
||||
'** the screen. The screen will show retreiving while
|
||||
'** we fetch and parse the feeds for the show posters
|
||||
'******************************************************
|
||||
Function showGridScreen(screen As Object, gridstyle as string) As string
|
||||
|
||||
print "enter showGridScreen"
|
||||
|
||||
categoryList = getCategoryList()
|
||||
categoryList[0] = "GridStyle: " + gridstyle
|
||||
screen.setupLists(categoryList.count())
|
||||
screen.SetListNames(categoryList)
|
||||
StyleButtons = getGridControlButtons()
|
||||
screen.SetContentList(0, StyleButtons)
|
||||
for i = 1 to categoryList.count()-1
|
||||
screen.SetContentList(i, getShowsForCategoryItem(categoryList[i]))
|
||||
end for
|
||||
screen.Show()
|
||||
|
||||
while true
|
||||
print "Waiting for message"
|
||||
msg = wait(0, m.port)
|
||||
'msg = wait(0, screen.GetMessagePort()) ' getmessageport does not work on gridscreen
|
||||
print "Got Message:";type(msg)
|
||||
if type(msg) = "roGridScreenEvent" then
|
||||
print "msg= "; msg.GetMessage() " , index= "; msg.GetIndex(); " data= "; msg.getData()
|
||||
if msg.isListItemFocused() then
|
||||
print"list item focused | current show = "; msg.GetIndex()
|
||||
else if msg.isListItemSelected() then
|
||||
row = msg.GetIndex()
|
||||
selection = msg.getData()
|
||||
print "list item selected row= "; row; " selection= "; selection
|
||||
|
||||
' Did we get a selection from the gridstyle selection row?
|
||||
if (row = 0)
|
||||
' yes, return so we can come back with new style
|
||||
return StyleButtons[selection].Title
|
||||
endif
|
||||
|
||||
'm.curShow = displayShowDetailScreen(showList[msg.GetIndex()])
|
||||
else if msg.isScreenClosed() then
|
||||
return ""
|
||||
end if
|
||||
end If
|
||||
end while
|
||||
|
||||
|
||||
End Function
|
||||
|
||||
'**********************************************************
|
||||
'** When a poster on the home screen is selected, we call
|
||||
'** this function passing an roAssociativeArray with the
|
||||
'** ContentMetaData for the selected show. This data should
|
||||
'** be sufficient for the springboard to display
|
||||
'**********************************************************
|
||||
Function displayShowDetailScreen(category as Object, showIndex as Integer) As Integer
|
||||
|
||||
'add code to create springboard, for now we do nothing
|
||||
return 1
|
||||
|
||||
End Function
|
||||
|
||||
|
||||
'**************************************************************
|
||||
'** Return the list of categories to display in the filter
|
||||
'** banner. The result is an roArray containing the names of
|
||||
'** all of the categories. All just static data for the example.
|
||||
'***************************************************************
|
||||
Function getCategoryList() As Object
|
||||
|
||||
categoryList = [ "GridStyle", "Reality", "History", "News", "Comedy", "Drama"]
|
||||
return categoryList
|
||||
|
||||
End Function
|
||||
|
||||
|
||||
'********************************************************************
|
||||
'** Given the category from the filter banner, return an array
|
||||
'** of ContentMetaData objects (roAssociativeArray's) representing
|
||||
'** the shows for the category. For this example, we just cheat and
|
||||
'** create and return a static array with just the minimal items
|
||||
'** set, but ideally, you'd go to a feed service, fetch and parse
|
||||
'** this data dynamically, so content for each category is dynamic
|
||||
'********************************************************************
|
||||
Function getShowsForCategoryItem(category As Object) As Object
|
||||
|
||||
print "getting shows for category "; category
|
||||
|
||||
showList = [
|
||||
{
|
||||
Title: category + ": Header",
|
||||
releaseDate: "1976",
|
||||
length: 3600-600,
|
||||
Description:"This row is category " + category,
|
||||
hdBranded: true,
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/4/43/Gold_star_on_blue.gif",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/4/43/Gold_star_on_blue.gif",
|
||||
Description:"Short Synopsis #1",
|
||||
Synopsis:"Length",
|
||||
StarRating:10,
|
||||
}
|
||||
{
|
||||
Title: category + ": Beverly Hillbillies",
|
||||
releaseDate: "1969",
|
||||
rating: "PG",
|
||||
Description:"Come and listen to a story about a man named Jed: Poor mountaineer, barely kept his family fed. Then one day he was shootin at some food, and up through the ground came a bubblin crude. Oil that is, black gold, Texas tea.",
|
||||
numEpisodes:42,
|
||||
contentType:"season",
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/en/4/4e/The_Beverly_Hillbillies.jpg",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/en/4/4e/The_Beverly_Hillbillies.jpg",
|
||||
StarRating:80,
|
||||
UserStarRating:40
|
||||
}
|
||||
{
|
||||
Title: category + ": Babylon 5",
|
||||
releaseDate: "1996",
|
||||
rating: "PG",
|
||||
Description:"The show centers on the Babylon 5 space station: a focal point for politics, diplomacy, and conflict during the years 2257-2262.",
|
||||
numEpisodes:102,
|
||||
contentType:"season",
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/en/9/9d/Smb5-s4.jpg",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/en/9/9d/Smb5-s4.jpg",
|
||||
StarRating:80,
|
||||
UserStarRating:40
|
||||
}
|
||||
{
|
||||
Title: category + ": John F. Kennedy",
|
||||
releaseDate: "1961",
|
||||
rating: "PG",
|
||||
Description:"My fellow citizens of the world: ask not what America will do for you, but what together we can do for the freedom of man.",
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/en/5/52/Jfk_happy_birthday_1.jpg",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/en/5/52/Jfk_happy_birthday_1.jpg",
|
||||
StarRating:100
|
||||
}
|
||||
{
|
||||
Title: category + ": Man on the Moon",
|
||||
releaseDate: "1969",
|
||||
rating: "PG",
|
||||
Description:"That's one small step for a man, one giant leap for mankind.",
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/1/1e/Apollo_11_first_step.jpg",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/1/1e/Apollo_11_first_step.jpg",
|
||||
StarRating:100
|
||||
}
|
||||
{
|
||||
Title: category + ": I have a Dream",
|
||||
releaseDate: "1963",
|
||||
rating: "PG",
|
||||
Description:"I have a dream that my four little children will one day live in a nation where they will not be judged by the color of their skin, but by the content of their character.",
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/8/81/Martin_Luther_King_-_March_on_Washington.jpg",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/8/81/Martin_Luther_King_-_March_on_Washington.jpg",
|
||||
StarRating:100
|
||||
}
|
||||
]
|
||||
|
||||
return showList
|
||||
End Function
|
||||
|
||||
function getGridControlButtons() as object
|
||||
buttons = [
|
||||
{ Title: "Flat-Movie"
|
||||
ReleaseDate: "HD:5x2 SD:5x2"
|
||||
Description: "Flat-Movie (Netflix) style"
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/4/43/Gold_star_on_blue.gif"
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/4/43/Gold_star_on_blue.gif"
|
||||
}
|
||||
{ Title: "Flat-Landscape"
|
||||
ReleaseDate: "HD:5x3 SD:4x3"
|
||||
Description: "Channel Store"
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/thumb/9/96/Dunkery_Hill.jpg/800px-Dunkery_Hill.jpg",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/thumb/9/96/Dunkery_Hill.jpg/800px-Dunkery_Hill.jpg",
|
||||
}
|
||||
{ Title: "Flat-Portrait"
|
||||
ReleaseDate: "HD:5x2 SD:5x2"
|
||||
Description: "3x4 style posters"
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/9/9f/Kane_George_Gurnett.jpg",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/9/9f/Kane_George_Gurnett.jpg",
|
||||
}
|
||||
{ Title: "Flat-Square"
|
||||
ReleaseDate: "HD:7x3 SD:6x3"
|
||||
Description: "1x1 style posters"
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/thumb/d/de/SQUARE_SHAPE.svg/536px-SQUARE_SHAPE.svg.png",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/thumb/d/de/SQUARE_SHAPE.svg/536px-SQUARE_SHAPE.svg.png",
|
||||
}
|
||||
{ Title: "Flat-16x9"
|
||||
ReleaseDate: "HD:5x3 SD:4x3"
|
||||
Description: "HD style posters"
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/thumb/2/22/%C3%89cran_TV_plat.svg/200px-%C3%89cran_TV_plat.svg.png",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/thumb/2/22/%C3%89cran_TV_plat.svg/200px-%C3%89cran_TV_plat.svg.png",
|
||||
}
|
||||
]
|
||||
return buttons
|
||||
End Function
|
||||
45
samples/C#/Index.cshtml
Normal file
45
samples/C#/Index.cshtml
Normal file
@@ -0,0 +1,45 @@
|
||||
@{
|
||||
ViewBag.Title = "Home Page";
|
||||
}
|
||||
@section featured {
|
||||
<section class="featured">
|
||||
<div class="content-wrapper">
|
||||
<hgroup class="title">
|
||||
<h1>@ViewBag.Title.</h1>
|
||||
<h2>@ViewBag.Message</h2>
|
||||
</hgroup>
|
||||
<p>
|
||||
To learn more about ASP.NET MVC visit
|
||||
<a href="http://asp.net/mvc" title="ASP.NET MVC Website">http://asp.net/mvc</a>.
|
||||
The page features <mark>videos, tutorials, and samples</mark> to help you get the most from ASP.NET MVC.
|
||||
If you have any questions about ASP.NET MVC visit
|
||||
<a href="http://forums.asp.net/1146.aspx/1?MVC" title="ASP.NET MVC Forum">our forums</a>.
|
||||
</p>
|
||||
</div>
|
||||
</section>
|
||||
}
|
||||
<h3>We suggest the following:</h3>
|
||||
<ol class="round">
|
||||
<li class="one">
|
||||
<h5>Getting Started</h5>
|
||||
ASP.NET MVC gives you a powerful, patterns-based way to build dynamic websites that
|
||||
enables a clean separation of concerns and that gives you full control over markup
|
||||
for enjoyable, agile development. ASP.NET MVC includes many features that enable
|
||||
fast, TDD-friendly development for creating sophisticated applications that use
|
||||
the latest web standards.
|
||||
<a href="http://go.microsoft.com/fwlink/?LinkId=245151">Learn more…</a>
|
||||
</li>
|
||||
|
||||
<li class="two">
|
||||
<h5>Add NuGet packages and jump-start your coding</h5>
|
||||
NuGet makes it easy to install and update free libraries and tools.
|
||||
<a href="http://go.microsoft.com/fwlink/?LinkId=245153">Learn more…</a>
|
||||
</li>
|
||||
|
||||
<li class="three">
|
||||
<h5>Find Web Hosting</h5>
|
||||
You can easily find a web hosting company that offers the right mix of features
|
||||
and price for your applications.
|
||||
<a href="http://go.microsoft.com/fwlink/?LinkId=245157">Learn more…</a>
|
||||
</li>
|
||||
</ol>
|
||||
21
samples/C#/Program.cs
Normal file
21
samples/C#/Program.cs
Normal file
@@ -0,0 +1,21 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace LittleSampleApp
|
||||
{
|
||||
/// <summary>
|
||||
/// Just what it says on the tin. A little sample application for Linguist to try out.
|
||||
///
|
||||
/// </summary>
|
||||
class Program
|
||||
{
|
||||
static void Main(string[] args)
|
||||
{
|
||||
Console.WriteLine("Hello, I am a little sample application to test GitHub's Linguist module.");
|
||||
Console.WriteLine("I also include a Razor MVC file just to prove it handles cshtml files now.");
|
||||
}
|
||||
}
|
||||
}
|
||||
42
samples/C++/CsvStreamer.h
Normal file
42
samples/C++/CsvStreamer.h
Normal file
@@ -0,0 +1,42 @@
|
||||
#pragma once
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <fstream>
|
||||
#include "util.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
|
||||
#define DEFAULT_DELIMITER ','
|
||||
|
||||
|
||||
class CsvStreamer
|
||||
{
|
||||
private:
|
||||
ofstream file; // File output stream
|
||||
vector<string> row_buffer; // Buffer which stores a row's data before being flushed/written
|
||||
int fields; // Number of fields (columns)
|
||||
long rows; // Number of rows (records) including header row
|
||||
char delimiter; // Delimiter character; comma by default
|
||||
string sanitize(string); // Returns a string ready for output into the file
|
||||
|
||||
public:
|
||||
CsvStreamer(); // Empty CSV streamer... be sure to open the file before writing!
|
||||
CsvStreamer(string, char); // Same as open(string, char)...
|
||||
CsvStreamer(string); // Opens an output CSV file given a file path/name
|
||||
~CsvStreamer(); // Ensures the output file is closed and saved
|
||||
void open(string); // Opens an output CSV file given a file path/name (default delimiter)
|
||||
void open(string, char); // Opens an output CSV file given a file path/name and a delimiting character (default comma)
|
||||
void add_field(string); // If still on first line, adds a new field to the header row
|
||||
void save_fields(); // Call this to save the header row; all new writes should be through append()
|
||||
void append(string); // Appends the current row with this data for the next field; quoted only if needed (leading/trailing spaces are trimmed)
|
||||
void append(string, bool); // Like append(string) but can specify whether to trim spaces at either end of the data (false to keep spaces)
|
||||
void append(float); // Appends the current row with this number
|
||||
void append(double); // Appends the current row with this number
|
||||
void append(long); // Appends the current row with this number
|
||||
void append(int); // Appends the current row with this number
|
||||
void writeln(); // Flushes what was in the row buffer into the file (writes the row)
|
||||
void close(); // Saves and closes the file
|
||||
int field_count(); // Gets the number of fields (columns)
|
||||
long row_count(); // Gets the number of records (rows) -- NOT including the header row
|
||||
};
|
||||
32
samples/C++/Field.h
Normal file
32
samples/C++/Field.h
Normal file
@@ -0,0 +1,32 @@
|
||||
/*****************************************************************************
|
||||
* Dwarf Mine - The 13-11 Benchmark
|
||||
*
|
||||
* Copyright (c) 2013 Bünger, Thomas; Kieschnick, Christian; Kusber,
|
||||
* Michael; Lohse, Henning; Wuttke, Nikolai; Xylander, Oliver; Yao, Gary;
|
||||
* Zimmermann, Florian
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining
|
||||
* a copy of this software and associated documentation files (the
|
||||
* "Software"), to deal in the Software without restriction, including
|
||||
* without limitation the rights to use, copy, modify, merge, publish,
|
||||
* distribute, sublicense, and/or sell copies of the Software, and to
|
||||
* permit persons to whom the Software is furnished to do so, subject to
|
||||
* the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be
|
||||
* included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*****************************************************************************/
|
||||
|
||||
#pragma once
|
||||
|
||||
enum Field { Free, Black, White, Illegal };
|
||||
|
||||
typedef Field Player;
|
||||
530
samples/C++/Math.inl
Normal file
530
samples/C++/Math.inl
Normal file
@@ -0,0 +1,530 @@
|
||||
/*
|
||||
===========================================================================
|
||||
The Open Game Libraries.
|
||||
Copyright (C) 2007-2010 Lusito Software
|
||||
|
||||
Author: Santo Pfingsten (TTK-Bandit)
|
||||
Purpose: Math namespace
|
||||
-----------------------------------------
|
||||
|
||||
This software is provided 'as-is', without any express or implied
|
||||
warranty. In no event will the authors be held liable for any damages
|
||||
arising from the use of this software.
|
||||
|
||||
Permission is granted to anyone to use this software for any purpose,
|
||||
including commercial applications, and to alter it and redistribute it
|
||||
freely, subject to the following restrictions:
|
||||
|
||||
1. The origin of this software must not be misrepresented; you must not
|
||||
claim that you wrote the original software. If you use this software
|
||||
in a product, an acknowledgment in the product documentation would be
|
||||
appreciated but is not required.
|
||||
|
||||
2. Altered source versions must be plainly marked as such, and must not be
|
||||
misrepresented as being the original software.
|
||||
|
||||
3. This notice may not be removed or altered from any source distribution.
|
||||
===========================================================================
|
||||
*/
|
||||
|
||||
#ifndef __OG_MATH_INL__
|
||||
#define __OG_MATH_INL__
|
||||
|
||||
namespace og {
|
||||
|
||||
/*
|
||||
==============================================================================
|
||||
|
||||
Math
|
||||
|
||||
==============================================================================
|
||||
*/
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Abs
|
||||
================
|
||||
*/
|
||||
OG_INLINE int Math::Abs( int i ) {
|
||||
#if 1
|
||||
if ( i & 0x80000000 )
|
||||
return 0x80000000 - (i & MASK_SIGNED);
|
||||
return i;
|
||||
#else
|
||||
int y = x >> 31;
|
||||
return ( ( x ^ y ) - y );
|
||||
#endif
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Fabs
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Fabs( float f ) {
|
||||
#if 1
|
||||
uInt *pf = reinterpret_cast<uInt*>(&f);
|
||||
*(pf) &= MASK_SIGNED;
|
||||
return f;
|
||||
#else
|
||||
return fabsf( f );
|
||||
#endif
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Round
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Round( float f ) {
|
||||
return floorf( f + 0.5f );
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Floor
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Floor( float f ) {
|
||||
return floorf( f );
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Ceil
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Ceil( float f ) {
|
||||
return ceilf( f );
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Ftoi
|
||||
|
||||
ok since this is SSE, why should the other ftoi be the faster one ?
|
||||
and: we might need to add a check for SSE extensions..
|
||||
because sse isn't *really* faster (I actually read that GCC does not handle
|
||||
SSE extensions perfectly. I'll find the link and send it to you when you're online)
|
||||
================
|
||||
*/
|
||||
OG_INLINE int Math::Ftoi( float f ) {
|
||||
//! @todo needs testing
|
||||
// note: sse function cvttss2si
|
||||
#if OG_ASM_MSVC
|
||||
int i;
|
||||
#if defined(OG_FTOI_USE_SSE)
|
||||
if( SysInfo::cpu.general.SSE ) {
|
||||
__asm cvttss2si eax, f
|
||||
__asm mov i, eax
|
||||
return i;
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
__asm fld f
|
||||
__asm fistp i
|
||||
//__asm mov eax, i // do we need this ? O_o
|
||||
}
|
||||
return i;
|
||||
#elif OG_ASM_GNU
|
||||
int i;
|
||||
#if defined(OG_FTOI_USE_SSE)
|
||||
if( SysInfo::cpu.general.SSE ) {
|
||||
__asm__ __volatile__( "cvttss2si %1 \n\t"
|
||||
: "=m" (i)
|
||||
: "m" (f)
|
||||
);
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
__asm__ __volatile__( "flds %1 \n\t"
|
||||
"fistpl %0 \n\t"
|
||||
: "=m" (i)
|
||||
: "m" (f)
|
||||
);
|
||||
}
|
||||
return i;
|
||||
#else
|
||||
// we use c++ cast instead of c cast (not sure why id did that)
|
||||
return static_cast<int>(f);
|
||||
#endif
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::FtoiFast
|
||||
================
|
||||
*/
|
||||
OG_INLINE int Math::FtoiFast( float f ) {
|
||||
#if OG_ASM_MSVC
|
||||
int i;
|
||||
__asm fld f
|
||||
__asm fistp i
|
||||
//__asm mov eax, i // do we need this ? O_o
|
||||
return i;
|
||||
#elif OG_ASM_GNU
|
||||
int i;
|
||||
__asm__ __volatile__( "flds %1 \n\t"
|
||||
"fistpl %0 \n\t"
|
||||
: "=m" (i)
|
||||
: "m" (f)
|
||||
);
|
||||
return i;
|
||||
#else
|
||||
// we use c++ cast instead of c cast (not sure why id did that)
|
||||
return static_cast<int>(f);
|
||||
#endif
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Ftol
|
||||
================
|
||||
*/
|
||||
OG_INLINE long Math::Ftol( float f ) {
|
||||
#if OG_ASM_MSVC
|
||||
long i;
|
||||
__asm fld f
|
||||
__asm fistp i
|
||||
//__asm mov eax, i // do we need this ? O_o
|
||||
return i;
|
||||
#elif OG_ASM_GNU
|
||||
long i;
|
||||
__asm__ __volatile__( "flds %1 \n\t"
|
||||
"fistpl %0 \n\t"
|
||||
: "=m" (i)
|
||||
: "m" (f)
|
||||
);
|
||||
return i;
|
||||
#else
|
||||
// we use c++ cast instead of c cast (not sure why id did that)
|
||||
return static_cast<long>(f);
|
||||
#endif
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Sign
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Sign( float f ) {
|
||||
if ( f > 0.0f )
|
||||
return 1.0f;
|
||||
if ( f < 0.0f )
|
||||
return -1.0f;
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Fmod
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Fmod( float numerator, float denominator ) {
|
||||
return fmodf( numerator, denominator );
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Modf
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Modf( float f, float& i ) {
|
||||
return modff( f, &i );
|
||||
}
|
||||
OG_INLINE float Math::Modf( float f ) {
|
||||
float i;
|
||||
return modff( f, &i );
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Sqrt
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Sqrt( float f ) {
|
||||
return sqrtf( f );
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::InvSqrt
|
||||
|
||||
Cannot be 0.0f
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::InvSqrt( float f ) {
|
||||
OG_ASSERT( f != 0.0f );
|
||||
return 1.0f / sqrtf( f );
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::RSqrt
|
||||
|
||||
Can be 0.0f
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::RSqrt( float f ) {
|
||||
float g = 0.5f * f;
|
||||
int i = *reinterpret_cast<int *>(&f);
|
||||
|
||||
// do a guess
|
||||
i = 0x5f375a86 - ( i>>1 );
|
||||
f = *reinterpret_cast<float *>(&i);
|
||||
|
||||
// Newtons calculation
|
||||
f = f * ( 1.5f - g * f * f );
|
||||
return f;
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Log/Log2/Log10
|
||||
|
||||
Log of 0 is bad.
|
||||
I've also heard you're not really
|
||||
supposed to do log of negatives, yet
|
||||
they work fine.
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Log( float f ) {
|
||||
OG_ASSERT( f != 0.0f );
|
||||
return logf( f );
|
||||
}
|
||||
OG_INLINE float Math::Log2( float f ) {
|
||||
OG_ASSERT( f != 0.0f );
|
||||
return INV_LN_2 * logf( f );
|
||||
}
|
||||
OG_INLINE float Math::Log10( float f ) {
|
||||
OG_ASSERT( f != 0.0f );
|
||||
return INV_LN_10 * logf( f );
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Pow
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Pow( float base, float exp ) {
|
||||
return powf( base, exp );
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Exp
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Exp( float f ) {
|
||||
return expf( f );
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::IsPowerOfTwo
|
||||
================
|
||||
*/
|
||||
OG_INLINE bool Math::IsPowerOfTwo( int x ) {
|
||||
// This is the faster of the two known methods
|
||||
// with the x > 0 check moved to the beginning
|
||||
return x > 0 && ( x & ( x - 1 ) ) == 0;
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::HigherPowerOfTwo
|
||||
================
|
||||
*/
|
||||
OG_INLINE int Math::HigherPowerOfTwo( int x ) {
|
||||
x--;
|
||||
x |= x >> 1;
|
||||
x |= x >> 2;
|
||||
x |= x >> 4;
|
||||
x |= x >> 8;
|
||||
x |= x >> 16;
|
||||
return x + 1;
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::LowerPowerOfTwo
|
||||
================
|
||||
*/
|
||||
OG_INLINE int Math::LowerPowerOfTwo( int x ) {
|
||||
return HigherPowerOfTwo( x ) >> 1;
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::FloorPowerOfTwo
|
||||
================
|
||||
*/
|
||||
OG_INLINE int Math::FloorPowerOfTwo( int x ) {
|
||||
return IsPowerOfTwo( x ) ? x : LowerPowerOfTwo( x );
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::CeilPowerOfTwo
|
||||
================
|
||||
*/
|
||||
OG_INLINE int Math::CeilPowerOfTwo( int x ) {
|
||||
return IsPowerOfTwo( x ) ? x : HigherPowerOfTwo( x );
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::ClosestPowerOfTwo
|
||||
================
|
||||
*/
|
||||
OG_INLINE int Math::ClosestPowerOfTwo( int x ) {
|
||||
if ( IsPowerOfTwo( x ) )
|
||||
return x;
|
||||
int high = HigherPowerOfTwo( x );
|
||||
int low = high >> 1;
|
||||
return ((high-x) < (x-low)) ? high : low;
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Digits
|
||||
================
|
||||
*/
|
||||
OG_INLINE int Math::Digits( int x ) {
|
||||
int digits = 1;
|
||||
int step = 10;
|
||||
while (step <= x) {
|
||||
digits++;
|
||||
step *= 10;
|
||||
}
|
||||
return digits;
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Sin/ASin
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Sin( float f ) {
|
||||
return sinf( f );
|
||||
}
|
||||
OG_INLINE float Math::ASin( float f ) {
|
||||
if ( f <= -1.0f )
|
||||
return -HALF_PI;
|
||||
if ( f >= 1.0f )
|
||||
return HALF_PI;
|
||||
return asinf( f );
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Cos/ACos
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Cos( float f ) {
|
||||
return cosf( f );
|
||||
}
|
||||
OG_INLINE float Math::ACos( float f ) {
|
||||
if ( f <= -1.0f )
|
||||
return PI;
|
||||
if ( f >= 1.0f )
|
||||
return 0.0f;
|
||||
return acosf( f );
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Tan/ATan
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Tan( float f ) {
|
||||
return tanf( f );
|
||||
}
|
||||
OG_INLINE float Math::ATan( float f ) {
|
||||
return atanf( f );
|
||||
}
|
||||
OG_INLINE float Math::ATan( float f1, float f2 ) {
|
||||
return atan2f( f1, f2 );
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::SinCos
|
||||
================
|
||||
*/
|
||||
OG_INLINE void Math::SinCos( float f, float &s, float &c ) {
|
||||
#if OG_ASM_MSVC
|
||||
// sometimes assembler is just waaayy faster
|
||||
_asm {
|
||||
fld f
|
||||
fsincos
|
||||
mov ecx, c
|
||||
mov edx, s
|
||||
fstp dword ptr [ecx]
|
||||
fstp dword ptr [edx]
|
||||
}
|
||||
#elif OG_ASM_GNU
|
||||
asm ("fsincos" : "=t" (c), "=u" (s) : "0" (f));
|
||||
#else
|
||||
s = Sin(f);
|
||||
c = Sqrt( 1.0f - s * s ); // faster than calling Cos(f)
|
||||
#endif
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Deg2Rad
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Deg2Rad( float f ) {
|
||||
return f * DEG_TO_RAD;
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Rad2Deg
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Rad2Deg( float f ) {
|
||||
return f * RAD_TO_DEG;
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Square
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Square( float v ) {
|
||||
return v * v;
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Cube
|
||||
================
|
||||
*/
|
||||
OG_INLINE float Math::Cube( float v ) {
|
||||
return v * v * v;
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Sec2Ms
|
||||
================
|
||||
*/
|
||||
OG_INLINE int Math::Sec2Ms( int sec ) {
|
||||
return sec * 1000;
|
||||
}
|
||||
|
||||
/*
|
||||
================
|
||||
Math::Ms2Sec
|
||||
================
|
||||
*/
|
||||
OG_INLINE int Math::Ms2Sec( int ms ) {
|
||||
return FtoiFast( ms * 0.001f );
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
32
samples/C++/Types.h
Normal file
32
samples/C++/Types.h
Normal file
@@ -0,0 +1,32 @@
|
||||
/*****************************************************************************
|
||||
* Dwarf Mine - The 13-11 Benchmark
|
||||
*
|
||||
* Copyright (c) 2013 Bünger, Thomas; Kieschnick, Christian; Kusber,
|
||||
* Michael; Lohse, Henning; Wuttke, Nikolai; Xylander, Oliver; Yao, Gary;
|
||||
* Zimmermann, Florian
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining
|
||||
* a copy of this software and associated documentation files (the
|
||||
* "Software"), to deal in the Software without restriction, including
|
||||
* without limitation the rights to use, copy, modify, merge, publish,
|
||||
* distribute, sublicense, and/or sell copies of the Software, and to
|
||||
* permit persons to whom the Software is furnished to do so, subject to
|
||||
* the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be
|
||||
* included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*****************************************************************************/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
typedef uint32_t smallPrime_t;
|
||||
1129
samples/C++/bcm2835.h
Normal file
1129
samples/C++/bcm2835.h
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,39 +0,0 @@
|
||||
void foo()
|
||||
{
|
||||
cudaArray* cu_array;
|
||||
texture<float, 2, cudaReadModeElementType> tex;
|
||||
|
||||
// Allocate array
|
||||
cudaChannelFormatDesc description = cudaCreateChannelDesc<float>();
|
||||
cudaMallocArray(&cu_array, &description, width, height);
|
||||
|
||||
// Copy image data to array
|
||||
cudaMemcpyToArray(cu_array, image, width*height*sizeof(float), cudaMemcpyHostToDevice);
|
||||
|
||||
// Set texture parameters (default)
|
||||
tex.addressMode[0] = cudaAddressModeClamp;
|
||||
tex.addressMode[1] = cudaAddressModeClamp;
|
||||
tex.filterMode = cudaFilterModePoint;
|
||||
tex.normalized = false; // do not normalize coordinates
|
||||
|
||||
// Bind the array to the texture
|
||||
cudaBindTextureToArray(tex, cu_array);
|
||||
|
||||
// Run kernel
|
||||
dim3 blockDim(16, 16, 1);
|
||||
dim3 gridDim((width + blockDim.x - 1)/ blockDim.x, (height + blockDim.y - 1) / blockDim.y, 1);
|
||||
kernel<<< gridDim, blockDim, 0 >>>(d_data, height, width);
|
||||
|
||||
// Unbind the array from the texture
|
||||
cudaUnbindTexture(tex);
|
||||
} //end foo()
|
||||
|
||||
__global__ void kernel(float* odata, int height, int width)
|
||||
{
|
||||
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
|
||||
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
|
||||
if (x < width && y < height) {
|
||||
float c = tex2D(tex, x, y);
|
||||
odata[y*width+x] = c;
|
||||
}
|
||||
}
|
||||
138
samples/C++/libcanister.h
Normal file
138
samples/C++/libcanister.h
Normal file
@@ -0,0 +1,138 @@
|
||||
#ifndef LIBCANIH
|
||||
#define LIBCANIH
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <stdlib.h>
|
||||
#include <cstring>
|
||||
|
||||
#define int64 unsigned long long
|
||||
//#define DEBUG
|
||||
|
||||
#ifdef DEBUG
|
||||
#define dout cout
|
||||
#else
|
||||
#define dout if (0) cerr
|
||||
#endif
|
||||
|
||||
using namespace std;
|
||||
|
||||
namespace libcanister
|
||||
{
|
||||
|
||||
//the canmem object is a generic memory container used commonly
|
||||
//throughout the canister framework to hold memory of uncertain
|
||||
//length which may or may not contain null bytes.
|
||||
class canmem
|
||||
{
|
||||
public:
|
||||
char* data; //the raw memory block
|
||||
int size; //the absolute length of the block
|
||||
canmem(); //creates an unallocated canmem
|
||||
canmem(int allocsize); //creates an allocated, blank canmem of size
|
||||
canmem(char* strdata); //automates the creation of zero-limited canmems
|
||||
~canmem(); //cleans up the canmem
|
||||
void zeromem(); //overwrites this canmem
|
||||
void fragmem(); //overwrites this canmem with fragment notation
|
||||
void countlen(); //counts length of zero-limited strings and stores it in size
|
||||
void trim(); //removes any nulls from the end of the string
|
||||
static canmem null(); //returns a singleton null canmem
|
||||
|
||||
};
|
||||
|
||||
//contains information about the canister
|
||||
class caninfo
|
||||
{
|
||||
public:
|
||||
canmem path; //physical path
|
||||
canmem internalname; //a name for the canister
|
||||
int numfiles; //the number of files in the canister
|
||||
};
|
||||
|
||||
//necessary for the use of this class as a type in canfile
|
||||
class canister;
|
||||
|
||||
//this object holds the definition of a 'file' within the
|
||||
//canister 'filesystem.'
|
||||
class canfile
|
||||
{
|
||||
public:
|
||||
libcanister::canister* parent; //the canister that holds this file
|
||||
canmem path; //internal path ('filename')
|
||||
canmem data; //the file's decompressed contents
|
||||
int isfrag; //0 = probably not fragment, 1 = definitely a fragment (ignore)
|
||||
int cfid; //'canfile id' -- a unique ID for this file
|
||||
int64 dsize; //ondisk size (compressed form size)
|
||||
int cachestate; //0 = not in memory, 1 = in memory, 2 = in memory and needs flush
|
||||
//-1 = error, check the data for the message
|
||||
void cache(); //pull the file from disk and cache it in memory
|
||||
void cachedump(); //deletes the contents of this file from the memory cache after assuring the on disk copy is up to date
|
||||
void cachedumpfinal(fstream& infile); //same as cachedump, but more efficient during closing procedures
|
||||
void flush(); //updates the on disk copy, but retains the memory cache
|
||||
};
|
||||
|
||||
//the primary class
|
||||
//this defines and controls a single canister
|
||||
class canister
|
||||
{
|
||||
//table of contents
|
||||
//absolutely worthless to the control code in the canister
|
||||
//but quite useful to programs using the API, as they may
|
||||
//desire to enumerate the files in a canister for a user's
|
||||
//use or for their own.
|
||||
//contains a newline-delimited list of files in the container.
|
||||
canfile TOC;
|
||||
public:
|
||||
caninfo info; //the general info about this canister
|
||||
|
||||
//the raw canfiles -- recommended that programs do not modify
|
||||
//these files directly, but not enforced.
|
||||
canfile* files;
|
||||
bool readonly; //if true then no write routines will do anything
|
||||
|
||||
//maximum number of files to have in memory at any given
|
||||
//time, change this to whatever suits your application.
|
||||
int cachemax;
|
||||
int cachecnt; //number of files in the cache (should not be modified)
|
||||
|
||||
//both initialize the canister from a physical location
|
||||
canister (canmem fspath);
|
||||
canister (char* fspath);
|
||||
|
||||
//destroys the canister (after flushing the modded buffers, of course)
|
||||
~canister();
|
||||
|
||||
//open the fspath
|
||||
//does it exist?
|
||||
// | --- yes --- opening it (return 1)
|
||||
// | --- yes --- file is corrupted, halting (return -1)
|
||||
// | --- no --- making a new one (return 0)
|
||||
int open();
|
||||
|
||||
//close the canister, flush all buffers, clean up
|
||||
int close();
|
||||
|
||||
//deletes the file at path inside this canister
|
||||
int delFile(canmem path);
|
||||
|
||||
//pulls the contents of the file from disk or memory and returns it as a file
|
||||
canfile getFile(canmem path);
|
||||
|
||||
//creates a file if it does not exist, otherwise overwrites
|
||||
//returns whether operation succeeded
|
||||
bool writeFile(canmem path, canmem data);
|
||||
bool writeFile(canfile file);
|
||||
|
||||
//get the 'table of contents', a file containing a newline delimited
|
||||
//list of the file paths in the container which have contents
|
||||
canfile getTOC();
|
||||
|
||||
//brings the cache back within the cachemax limit
|
||||
//important: sCFID is the safe CFID
|
||||
//(the CFID of the file we want to avoid uncaching)
|
||||
//really just used internally, but it can't do any harm.
|
||||
void cacheclean(int sCFID, bool dFlush = false);
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
92
samples/C++/metrics.h
Normal file
92
samples/C++/metrics.h
Normal file
@@ -0,0 +1,92 @@
|
||||
// Copyright 2011 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef NINJA_METRICS_H_
|
||||
#define NINJA_METRICS_H_
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
using namespace std;
|
||||
|
||||
#include "util.h" // For int64_t.
|
||||
|
||||
/// The Metrics module is used for the debug mode that dumps timing stats of
|
||||
/// various actions. To use, see METRIC_RECORD below.
|
||||
|
||||
/// A single metrics we're tracking, like "depfile load time".
|
||||
struct Metric {
|
||||
string name;
|
||||
/// Number of times we've hit the code path.
|
||||
int count;
|
||||
/// Total time (in micros) we've spent on the code path.
|
||||
int64_t sum;
|
||||
};
|
||||
|
||||
|
||||
/// A scoped object for recording a metric across the body of a function.
|
||||
/// Used by the METRIC_RECORD macro.
|
||||
struct ScopedMetric {
|
||||
explicit ScopedMetric(Metric* metric);
|
||||
~ScopedMetric();
|
||||
|
||||
private:
|
||||
Metric* metric_;
|
||||
/// Timestamp when the measurement started.
|
||||
/// Value is platform-dependent.
|
||||
int64_t start_;
|
||||
};
|
||||
|
||||
/// The singleton that stores metrics and prints the report.
|
||||
struct Metrics {
|
||||
Metric* NewMetric(const string& name);
|
||||
|
||||
/// Print a summary report to stdout.
|
||||
void Report();
|
||||
|
||||
private:
|
||||
vector<Metric*> metrics_;
|
||||
};
|
||||
|
||||
/// Get the current time as relative to some epoch.
|
||||
/// Epoch varies between platforms; only useful for measuring elapsed time.
|
||||
int64_t GetTimeMillis();
|
||||
|
||||
/// A simple stopwatch which returns the time
|
||||
/// in seconds since Restart() was called.
|
||||
struct Stopwatch {
|
||||
public:
|
||||
Stopwatch() : started_(0) {}
|
||||
|
||||
/// Seconds since Restart() call.
|
||||
double Elapsed() const {
|
||||
return 1e-6 * static_cast<double>(Now() - started_);
|
||||
}
|
||||
|
||||
void Restart() { started_ = Now(); }
|
||||
|
||||
private:
|
||||
uint64_t started_;
|
||||
uint64_t Now() const;
|
||||
};
|
||||
|
||||
/// The primary interface to metrics. Use METRIC_RECORD("foobar") at the top
|
||||
/// of a function to get timing stats recorded for each call of the function.
|
||||
#define METRIC_RECORD(name) \
|
||||
static Metric* metrics_h_metric = \
|
||||
g_metrics ? g_metrics->NewMetric(name) : NULL; \
|
||||
ScopedMetric metrics_h_scoped(metrics_h_metric);
|
||||
|
||||
extern Metrics* g_metrics;
|
||||
|
||||
#endif // NINJA_METRICS_H_
|
||||
327
samples/C++/protocol-buffer.pb.cc
Normal file
327
samples/C++/protocol-buffer.pb.cc
Normal file
@@ -0,0 +1,327 @@
|
||||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: protocol-buffer.proto
|
||||
|
||||
#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION
|
||||
#include "protocol-buffer.pb.h"
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/stubs/once.h>
|
||||
#include <google/protobuf/io/coded_stream.h>
|
||||
#include <google/protobuf/wire_format_lite_inl.h>
|
||||
#include <google/protobuf/descriptor.h>
|
||||
#include <google/protobuf/generated_message_reflection.h>
|
||||
#include <google/protobuf/reflection_ops.h>
|
||||
#include <google/protobuf/wire_format.h>
|
||||
// @@protoc_insertion_point(includes)
|
||||
|
||||
namespace persons {
|
||||
|
||||
namespace {
|
||||
|
||||
const ::google::protobuf::Descriptor* Person_descriptor_ = NULL;
|
||||
const ::google::protobuf::internal::GeneratedMessageReflection*
|
||||
Person_reflection_ = NULL;
|
||||
|
||||
} // namespace
|
||||
|
||||
|
||||
void protobuf_AssignDesc_protocol_2dbuffer_2eproto() {
|
||||
protobuf_AddDesc_protocol_2dbuffer_2eproto();
|
||||
const ::google::protobuf::FileDescriptor* file =
|
||||
::google::protobuf::DescriptorPool::generated_pool()->FindFileByName(
|
||||
"protocol-buffer.proto");
|
||||
GOOGLE_CHECK(file != NULL);
|
||||
Person_descriptor_ = file->message_type(0);
|
||||
static const int Person_offsets_[1] = {
|
||||
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(Person, name_),
|
||||
};
|
||||
Person_reflection_ =
|
||||
new ::google::protobuf::internal::GeneratedMessageReflection(
|
||||
Person_descriptor_,
|
||||
Person::default_instance_,
|
||||
Person_offsets_,
|
||||
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(Person, _has_bits_[0]),
|
||||
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(Person, _unknown_fields_),
|
||||
-1,
|
||||
::google::protobuf::DescriptorPool::generated_pool(),
|
||||
::google::protobuf::MessageFactory::generated_factory(),
|
||||
sizeof(Person));
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
||||
GOOGLE_PROTOBUF_DECLARE_ONCE(protobuf_AssignDescriptors_once_);
|
||||
inline void protobuf_AssignDescriptorsOnce() {
|
||||
::google::protobuf::GoogleOnceInit(&protobuf_AssignDescriptors_once_,
|
||||
&protobuf_AssignDesc_protocol_2dbuffer_2eproto);
|
||||
}
|
||||
|
||||
void protobuf_RegisterTypes(const ::std::string&) {
|
||||
protobuf_AssignDescriptorsOnce();
|
||||
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
|
||||
Person_descriptor_, &Person::default_instance());
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
void protobuf_ShutdownFile_protocol_2dbuffer_2eproto() {
|
||||
delete Person::default_instance_;
|
||||
delete Person_reflection_;
|
||||
}
|
||||
|
||||
void protobuf_AddDesc_protocol_2dbuffer_2eproto() {
|
||||
static bool already_here = false;
|
||||
if (already_here) return;
|
||||
already_here = true;
|
||||
GOOGLE_PROTOBUF_VERIFY_VERSION;
|
||||
|
||||
::google::protobuf::DescriptorPool::InternalAddGeneratedFile(
|
||||
"\n\025protocol-buffer.proto\022\007persons\"\026\n\006Pers"
|
||||
"on\022\014\n\004name\030\001 \002(\t", 56);
|
||||
::google::protobuf::MessageFactory::InternalRegisterGeneratedFile(
|
||||
"protocol-buffer.proto", &protobuf_RegisterTypes);
|
||||
Person::default_instance_ = new Person();
|
||||
Person::default_instance_->InitAsDefaultInstance();
|
||||
::google::protobuf::internal::OnShutdown(&protobuf_ShutdownFile_protocol_2dbuffer_2eproto);
|
||||
}
|
||||
|
||||
// Force AddDescriptors() to be called at static initialization time.
|
||||
struct StaticDescriptorInitializer_protocol_2dbuffer_2eproto {
|
||||
StaticDescriptorInitializer_protocol_2dbuffer_2eproto() {
|
||||
protobuf_AddDesc_protocol_2dbuffer_2eproto();
|
||||
}
|
||||
} static_descriptor_initializer_protocol_2dbuffer_2eproto_;
|
||||
|
||||
// ===================================================================
|
||||
|
||||
#ifndef _MSC_VER
|
||||
const int Person::kNameFieldNumber;
|
||||
#endif // !_MSC_VER
|
||||
|
||||
Person::Person()
|
||||
: ::google::protobuf::Message() {
|
||||
SharedCtor();
|
||||
}
|
||||
|
||||
void Person::InitAsDefaultInstance() {
|
||||
}
|
||||
|
||||
Person::Person(const Person& from)
|
||||
: ::google::protobuf::Message() {
|
||||
SharedCtor();
|
||||
MergeFrom(from);
|
||||
}
|
||||
|
||||
void Person::SharedCtor() {
|
||||
_cached_size_ = 0;
|
||||
name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString);
|
||||
::memset(_has_bits_, 0, sizeof(_has_bits_));
|
||||
}
|
||||
|
||||
Person::~Person() {
|
||||
SharedDtor();
|
||||
}
|
||||
|
||||
void Person::SharedDtor() {
|
||||
if (name_ != &::google::protobuf::internal::kEmptyString) {
|
||||
delete name_;
|
||||
}
|
||||
if (this != default_instance_) {
|
||||
}
|
||||
}
|
||||
|
||||
void Person::SetCachedSize(int size) const {
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
|
||||
_cached_size_ = size;
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_END();
|
||||
}
|
||||
const ::google::protobuf::Descriptor* Person::descriptor() {
|
||||
protobuf_AssignDescriptorsOnce();
|
||||
return Person_descriptor_;
|
||||
}
|
||||
|
||||
const Person& Person::default_instance() {
|
||||
if (default_instance_ == NULL) protobuf_AddDesc_protocol_2dbuffer_2eproto();
|
||||
return *default_instance_;
|
||||
}
|
||||
|
||||
Person* Person::default_instance_ = NULL;
|
||||
|
||||
Person* Person::New() const {
|
||||
return new Person;
|
||||
}
|
||||
|
||||
void Person::Clear() {
|
||||
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
|
||||
if (has_name()) {
|
||||
if (name_ != &::google::protobuf::internal::kEmptyString) {
|
||||
name_->clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
::memset(_has_bits_, 0, sizeof(_has_bits_));
|
||||
mutable_unknown_fields()->Clear();
|
||||
}
|
||||
|
||||
bool Person::MergePartialFromCodedStream(
|
||||
::google::protobuf::io::CodedInputStream* input) {
|
||||
#define DO_(EXPRESSION) if (!(EXPRESSION)) return false
|
||||
::google::protobuf::uint32 tag;
|
||||
while ((tag = input->ReadTag()) != 0) {
|
||||
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
|
||||
// required string name = 1;
|
||||
case 1: {
|
||||
if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
|
||||
::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) {
|
||||
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
|
||||
input, this->mutable_name()));
|
||||
::google::protobuf::internal::WireFormat::VerifyUTF8String(
|
||||
this->name().data(), this->name().length(),
|
||||
::google::protobuf::internal::WireFormat::PARSE);
|
||||
} else {
|
||||
goto handle_uninterpreted;
|
||||
}
|
||||
if (input->ExpectAtEnd()) return true;
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
handle_uninterpreted:
|
||||
if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
|
||||
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
|
||||
return true;
|
||||
}
|
||||
DO_(::google::protobuf::internal::WireFormat::SkipField(
|
||||
input, tag, mutable_unknown_fields()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
#undef DO_
|
||||
}
|
||||
|
||||
void Person::SerializeWithCachedSizes(
|
||||
::google::protobuf::io::CodedOutputStream* output) const {
|
||||
// required string name = 1;
|
||||
if (has_name()) {
|
||||
::google::protobuf::internal::WireFormat::VerifyUTF8String(
|
||||
this->name().data(), this->name().length(),
|
||||
::google::protobuf::internal::WireFormat::SERIALIZE);
|
||||
::google::protobuf::internal::WireFormatLite::WriteString(
|
||||
1, this->name(), output);
|
||||
}
|
||||
|
||||
if (!unknown_fields().empty()) {
|
||||
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
|
||||
unknown_fields(), output);
|
||||
}
|
||||
}
|
||||
|
||||
::google::protobuf::uint8* Person::SerializeWithCachedSizesToArray(
|
||||
::google::protobuf::uint8* target) const {
|
||||
// required string name = 1;
|
||||
if (has_name()) {
|
||||
::google::protobuf::internal::WireFormat::VerifyUTF8String(
|
||||
this->name().data(), this->name().length(),
|
||||
::google::protobuf::internal::WireFormat::SERIALIZE);
|
||||
target =
|
||||
::google::protobuf::internal::WireFormatLite::WriteStringToArray(
|
||||
1, this->name(), target);
|
||||
}
|
||||
|
||||
if (!unknown_fields().empty()) {
|
||||
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
|
||||
unknown_fields(), target);
|
||||
}
|
||||
return target;
|
||||
}
|
||||
|
||||
int Person::ByteSize() const {
|
||||
int total_size = 0;
|
||||
|
||||
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
|
||||
// required string name = 1;
|
||||
if (has_name()) {
|
||||
total_size += 1 +
|
||||
::google::protobuf::internal::WireFormatLite::StringSize(
|
||||
this->name());
|
||||
}
|
||||
|
||||
}
|
||||
if (!unknown_fields().empty()) {
|
||||
total_size +=
|
||||
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
|
||||
unknown_fields());
|
||||
}
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
|
||||
_cached_size_ = total_size;
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_END();
|
||||
return total_size;
|
||||
}
|
||||
|
||||
void Person::MergeFrom(const ::google::protobuf::Message& from) {
|
||||
GOOGLE_CHECK_NE(&from, this);
|
||||
const Person* source =
|
||||
::google::protobuf::internal::dynamic_cast_if_available<const Person*>(
|
||||
&from);
|
||||
if (source == NULL) {
|
||||
::google::protobuf::internal::ReflectionOps::Merge(from, this);
|
||||
} else {
|
||||
MergeFrom(*source);
|
||||
}
|
||||
}
|
||||
|
||||
void Person::MergeFrom(const Person& from) {
|
||||
GOOGLE_CHECK_NE(&from, this);
|
||||
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
|
||||
if (from.has_name()) {
|
||||
set_name(from.name());
|
||||
}
|
||||
}
|
||||
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
|
||||
}
|
||||
|
||||
void Person::CopyFrom(const ::google::protobuf::Message& from) {
|
||||
if (&from == this) return;
|
||||
Clear();
|
||||
MergeFrom(from);
|
||||
}
|
||||
|
||||
void Person::CopyFrom(const Person& from) {
|
||||
if (&from == this) return;
|
||||
Clear();
|
||||
MergeFrom(from);
|
||||
}
|
||||
|
||||
bool Person::IsInitialized() const {
|
||||
if ((_has_bits_[0] & 0x00000001) != 0x00000001) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void Person::Swap(Person* other) {
|
||||
if (other != this) {
|
||||
std::swap(name_, other->name_);
|
||||
std::swap(_has_bits_[0], other->_has_bits_[0]);
|
||||
_unknown_fields_.Swap(&other->_unknown_fields_);
|
||||
std::swap(_cached_size_, other->_cached_size_);
|
||||
}
|
||||
}
|
||||
|
||||
::google::protobuf::Metadata Person::GetMetadata() const {
|
||||
protobuf_AssignDescriptorsOnce();
|
||||
::google::protobuf::Metadata metadata;
|
||||
metadata.descriptor = Person_descriptor_;
|
||||
metadata.reflection = Person_reflection_;
|
||||
return metadata;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(namespace_scope)
|
||||
|
||||
} // namespace persons
|
||||
|
||||
// @@protoc_insertion_point(global_scope)
|
||||
218
samples/C++/protocol-buffer.pb.h
Normal file
218
samples/C++/protocol-buffer.pb.h
Normal file
@@ -0,0 +1,218 @@
|
||||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: protocol-buffer.proto
|
||||
|
||||
#ifndef PROTOBUF_protocol_2dbuffer_2eproto__INCLUDED
|
||||
#define PROTOBUF_protocol_2dbuffer_2eproto__INCLUDED
|
||||
|
||||
#include <string>
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
|
||||
#if GOOGLE_PROTOBUF_VERSION < 2005000
|
||||
#error This file was generated by a newer version of protoc which is
|
||||
#error incompatible with your Protocol Buffer headers. Please update
|
||||
#error your headers.
|
||||
#endif
|
||||
#if 2005000 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION
|
||||
#error This file was generated by an older version of protoc which is
|
||||
#error incompatible with your Protocol Buffer headers. Please
|
||||
#error regenerate this file with a newer version of protoc.
|
||||
#endif
|
||||
|
||||
#include <google/protobuf/generated_message_util.h>
|
||||
#include <google/protobuf/message.h>
|
||||
#include <google/protobuf/repeated_field.h>
|
||||
#include <google/protobuf/extension_set.h>
|
||||
#include <google/protobuf/unknown_field_set.h>
|
||||
// @@protoc_insertion_point(includes)
|
||||
|
||||
namespace persons {
|
||||
|
||||
// Internal implementation detail -- do not call these.
|
||||
void protobuf_AddDesc_protocol_2dbuffer_2eproto();
|
||||
void protobuf_AssignDesc_protocol_2dbuffer_2eproto();
|
||||
void protobuf_ShutdownFile_protocol_2dbuffer_2eproto();
|
||||
|
||||
class Person;
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class Person : public ::google::protobuf::Message {
|
||||
public:
|
||||
Person();
|
||||
virtual ~Person();
|
||||
|
||||
Person(const Person& from);
|
||||
|
||||
inline Person& operator=(const Person& from) {
|
||||
CopyFrom(from);
|
||||
return *this;
|
||||
}
|
||||
|
||||
inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const {
|
||||
return _unknown_fields_;
|
||||
}
|
||||
|
||||
inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() {
|
||||
return &_unknown_fields_;
|
||||
}
|
||||
|
||||
static const ::google::protobuf::Descriptor* descriptor();
|
||||
static const Person& default_instance();
|
||||
|
||||
void Swap(Person* other);
|
||||
|
||||
// implements Message ----------------------------------------------
|
||||
|
||||
Person* New() const;
|
||||
void CopyFrom(const ::google::protobuf::Message& from);
|
||||
void MergeFrom(const ::google::protobuf::Message& from);
|
||||
void CopyFrom(const Person& from);
|
||||
void MergeFrom(const Person& from);
|
||||
void Clear();
|
||||
bool IsInitialized() const;
|
||||
|
||||
int ByteSize() const;
|
||||
bool MergePartialFromCodedStream(
|
||||
::google::protobuf::io::CodedInputStream* input);
|
||||
void SerializeWithCachedSizes(
|
||||
::google::protobuf::io::CodedOutputStream* output) const;
|
||||
::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const;
|
||||
int GetCachedSize() const { return _cached_size_; }
|
||||
private:
|
||||
void SharedCtor();
|
||||
void SharedDtor();
|
||||
void SetCachedSize(int size) const;
|
||||
public:
|
||||
|
||||
::google::protobuf::Metadata GetMetadata() const;
|
||||
|
||||
// nested types ----------------------------------------------------
|
||||
|
||||
// accessors -------------------------------------------------------
|
||||
|
||||
// required string name = 1;
|
||||
inline bool has_name() const;
|
||||
inline void clear_name();
|
||||
static const int kNameFieldNumber = 1;
|
||||
inline const ::std::string& name() const;
|
||||
inline void set_name(const ::std::string& value);
|
||||
inline void set_name(const char* value);
|
||||
inline void set_name(const char* value, size_t size);
|
||||
inline ::std::string* mutable_name();
|
||||
inline ::std::string* release_name();
|
||||
inline void set_allocated_name(::std::string* name);
|
||||
|
||||
// @@protoc_insertion_point(class_scope:persons.Person)
|
||||
private:
|
||||
inline void set_has_name();
|
||||
inline void clear_has_name();
|
||||
|
||||
::google::protobuf::UnknownFieldSet _unknown_fields_;
|
||||
|
||||
::std::string* name_;
|
||||
|
||||
mutable int _cached_size_;
|
||||
::google::protobuf::uint32 _has_bits_[(1 + 31) / 32];
|
||||
|
||||
friend void protobuf_AddDesc_protocol_2dbuffer_2eproto();
|
||||
friend void protobuf_AssignDesc_protocol_2dbuffer_2eproto();
|
||||
friend void protobuf_ShutdownFile_protocol_2dbuffer_2eproto();
|
||||
|
||||
void InitAsDefaultInstance();
|
||||
static Person* default_instance_;
|
||||
};
|
||||
// ===================================================================
|
||||
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// Person
|
||||
|
||||
// required string name = 1;
|
||||
inline bool Person::has_name() const {
|
||||
return (_has_bits_[0] & 0x00000001u) != 0;
|
||||
}
|
||||
inline void Person::set_has_name() {
|
||||
_has_bits_[0] |= 0x00000001u;
|
||||
}
|
||||
inline void Person::clear_has_name() {
|
||||
_has_bits_[0] &= ~0x00000001u;
|
||||
}
|
||||
inline void Person::clear_name() {
|
||||
if (name_ != &::google::protobuf::internal::kEmptyString) {
|
||||
name_->clear();
|
||||
}
|
||||
clear_has_name();
|
||||
}
|
||||
inline const ::std::string& Person::name() const {
|
||||
return *name_;
|
||||
}
|
||||
inline void Person::set_name(const ::std::string& value) {
|
||||
set_has_name();
|
||||
if (name_ == &::google::protobuf::internal::kEmptyString) {
|
||||
name_ = new ::std::string;
|
||||
}
|
||||
name_->assign(value);
|
||||
}
|
||||
inline void Person::set_name(const char* value) {
|
||||
set_has_name();
|
||||
if (name_ == &::google::protobuf::internal::kEmptyString) {
|
||||
name_ = new ::std::string;
|
||||
}
|
||||
name_->assign(value);
|
||||
}
|
||||
inline void Person::set_name(const char* value, size_t size) {
|
||||
set_has_name();
|
||||
if (name_ == &::google::protobuf::internal::kEmptyString) {
|
||||
name_ = new ::std::string;
|
||||
}
|
||||
name_->assign(reinterpret_cast<const char*>(value), size);
|
||||
}
|
||||
inline ::std::string* Person::mutable_name() {
|
||||
set_has_name();
|
||||
if (name_ == &::google::protobuf::internal::kEmptyString) {
|
||||
name_ = new ::std::string;
|
||||
}
|
||||
return name_;
|
||||
}
|
||||
inline ::std::string* Person::release_name() {
|
||||
clear_has_name();
|
||||
if (name_ == &::google::protobuf::internal::kEmptyString) {
|
||||
return NULL;
|
||||
} else {
|
||||
::std::string* temp = name_;
|
||||
name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString);
|
||||
return temp;
|
||||
}
|
||||
}
|
||||
inline void Person::set_allocated_name(::std::string* name) {
|
||||
if (name_ != &::google::protobuf::internal::kEmptyString) {
|
||||
delete name_;
|
||||
}
|
||||
if (name) {
|
||||
set_has_name();
|
||||
name_ = name;
|
||||
} else {
|
||||
clear_has_name();
|
||||
name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(namespace_scope)
|
||||
|
||||
} // namespace persons
|
||||
|
||||
#ifndef SWIG
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
|
||||
|
||||
} // namespace google
|
||||
} // namespace protobuf
|
||||
#endif // SWIG
|
||||
|
||||
// @@protoc_insertion_point(global_scope)
|
||||
|
||||
#endif // PROTOBUF_protocol_2dbuffer_2eproto__INCLUDED
|
||||
6
samples/C++/render_adapter.cpp
Normal file
6
samples/C++/render_adapter.cpp
Normal file
@@ -0,0 +1,6 @@
|
||||
#include <cstdint>
|
||||
|
||||
namespace Gui
|
||||
{
|
||||
|
||||
}
|
||||
26
samples/C++/rpc.h
Normal file
26
samples/C++/rpc.h
Normal file
@@ -0,0 +1,26 @@
|
||||
// Copyright (C) 2013 Simon Que
|
||||
//
|
||||
// This file is part of DuinoCube.
|
||||
//
|
||||
// DuinoCube is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU Lesser General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// DuinoCube is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU Lesser General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU Lesser General Public License
|
||||
// along with DuinoCube. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
// DuinoCube remote procedure call functions.
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
// Initializes RPC system.
|
||||
void rpc_init();
|
||||
|
||||
// Runs the RPC server loop forever.
|
||||
void rpc_server_loop();
|
||||
4674
samples/C++/wrapper_inner.cpp
Normal file
4674
samples/C++/wrapper_inner.cpp
Normal file
File diff suppressed because it is too large
Load Diff
102
samples/C/bootstrap.h
Normal file
102
samples/C/bootstrap.h
Normal file
@@ -0,0 +1,102 @@
|
||||
#ifndef BOOTSTRAP_H
|
||||
#define BOOTSTRAP_H
|
||||
|
||||
#include <stdio.h>
|
||||
#include "cxrs.h"
|
||||
|
||||
/* If we're not using GNU C, elide __attribute__ */
|
||||
#ifndef __GNUC__
|
||||
# define __attribute__(x) /*NOTHING*/
|
||||
#endif
|
||||
|
||||
typedef struct object object;
|
||||
|
||||
object *true;
|
||||
object *false;
|
||||
object *eof;
|
||||
object *empty_list;
|
||||
object *global_enviroment;
|
||||
|
||||
enum obj_type {
|
||||
scm_bool,
|
||||
scm_empty_list,
|
||||
scm_eof,
|
||||
scm_char,
|
||||
scm_int,
|
||||
scm_pair,
|
||||
scm_symbol,
|
||||
scm_prim_fun,
|
||||
scm_lambda,
|
||||
scm_str,
|
||||
scm_file
|
||||
};
|
||||
|
||||
typedef object *(*prim_proc)(object *args);
|
||||
|
||||
object *read(FILE *in);
|
||||
object *eval(object *code, object *env);
|
||||
void print(FILE *out, object *obj, int display);
|
||||
|
||||
int check_type(enum obj_type type, object *obj, int err_on_false);
|
||||
|
||||
static inline int is_true(object *obj)
|
||||
{
|
||||
return obj != false;
|
||||
}
|
||||
|
||||
object *make_int(int value);
|
||||
int obj2int(object *i);
|
||||
|
||||
object *make_bool(int value);
|
||||
int obj2bool(object *b);
|
||||
|
||||
object *make_char(char c);
|
||||
char obj2char(object *ch);
|
||||
|
||||
object *make_str(char *str);
|
||||
char *obj2str(object *str);
|
||||
|
||||
object *cons(object *car, object *cdr);
|
||||
object *car(object *pair);
|
||||
object *cdr(object *pair);
|
||||
void set_car(object *pair, object *new);
|
||||
void set_cdr(object *pair, object *new);
|
||||
|
||||
object *make_symbol(char *name);
|
||||
char *sym2str(object *sym);
|
||||
object *get_symbol(char *name) __attribute__((pure));
|
||||
|
||||
object *make_prim_fun(prim_proc fun);
|
||||
prim_proc obj2prim_proc(object *proc);
|
||||
|
||||
object *make_lambda(object *args, object *code, object *env);
|
||||
object *lambda_code(object *lambda);
|
||||
object *lambda_args(object *lambda);
|
||||
|
||||
object *make_port(FILE *handle, int direction);
|
||||
int port_direction(object *port);
|
||||
FILE *port_handle(object *port);
|
||||
void set_port_handle_to_null(object *port);
|
||||
|
||||
/*both of these should never be called*/
|
||||
object *apply_proc(object *);
|
||||
object *eval_proc(object *);
|
||||
|
||||
|
||||
object *maybe_add_begin(object *code);
|
||||
|
||||
void init_enviroment(object *env);
|
||||
|
||||
|
||||
void eval_err(char *msg, object *code) __attribute__((noreturn));
|
||||
|
||||
void define_var(object *var, object *val, object *env);
|
||||
void set_var(object *var, object *val, object *env);
|
||||
object *get_var(object *var, object *env);
|
||||
|
||||
object *cond2nested_if(object *cond);
|
||||
object *let2lambda(object *let);
|
||||
object *and2nested_if(object *and);
|
||||
object *or2nested_if(object *or);
|
||||
|
||||
#endif /*include guard*/
|
||||
56
samples/C/dynarray.cats
Normal file
56
samples/C/dynarray.cats
Normal file
@@ -0,0 +1,56 @@
|
||||
/* ******************************************************************* */
|
||||
/* */
|
||||
/* Applied Type System */
|
||||
/* */
|
||||
/* ******************************************************************* */
|
||||
|
||||
/*
|
||||
** ATS/Postiats - Unleashing the Potential of Types!
|
||||
** Copyright (C) 2011-20?? Hongwei Xi, ATS Trustful Software, Inc.
|
||||
** All rights reserved
|
||||
**
|
||||
** ATS is free software; you can redistribute it and/or modify it under
|
||||
** the terms of the GNU GENERAL PUBLIC LICENSE (GPL) as published by the
|
||||
** Free Software Foundation; either version 3, or (at your option) any
|
||||
** later version.
|
||||
**
|
||||
** ATS is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
** WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
** FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
** for more details.
|
||||
**
|
||||
** You should have received a copy of the GNU General Public License
|
||||
** along with ATS; see the file COPYING. If not, please write to the
|
||||
** Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
** 02110-1301, USA.
|
||||
*/
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
/*
|
||||
(* Author: Hongwei Xi *)
|
||||
(* Authoremail: hwxi AT cs DOT bu DOT edu *)
|
||||
(* Start time: March, 2013 *)
|
||||
*/
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
#ifndef ATSHOME_LIBATS_DYNARRAY_CATS
|
||||
#define ATSHOME_LIBATS_DYNARRAY_CATS
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
#include <string.h>
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
#define atslib_dynarray_memcpy memcpy
|
||||
#define atslib_dynarray_memmove memmove
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
#endif // ifndef ATSHOME_LIBATS_DYNARRAY_CATS
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
/* end of [dynarray.cats] */
|
||||
61
samples/C/jni_layer.h
Normal file
61
samples/C/jni_layer.h
Normal file
@@ -0,0 +1,61 @@
|
||||
/* DO NOT EDIT THIS FILE - it is machine generated */
|
||||
#include <jni.h>
|
||||
/* Header for class jni_JniLayer */
|
||||
|
||||
#ifndef _Included_jni_JniLayer
|
||||
#define _Included_jni_JniLayer
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
/*
|
||||
* Class: jni_JniLayer
|
||||
* Method: jni_layer_initialize
|
||||
* Signature: ([II)J
|
||||
*/
|
||||
JNIEXPORT jlong JNICALL Java_jni_JniLayer_jni_1layer_1initialize
|
||||
(JNIEnv *, jobject, jintArray, jint, jint);
|
||||
|
||||
/*
|
||||
* Class: jni_JniLayer
|
||||
* Method: jni_layer_mainloop
|
||||
* Signature: (J)V
|
||||
*/
|
||||
JNIEXPORT void JNICALL Java_jni_JniLayer_jni_1layer_1mainloop
|
||||
(JNIEnv *, jobject, jlong);
|
||||
|
||||
/*
|
||||
* Class: jni_JniLayer
|
||||
* Method: jni_layer_set_button
|
||||
* Signature: (JII)V
|
||||
*/
|
||||
JNIEXPORT void JNICALL Java_jni_JniLayer_jni_1layer_1set_1button
|
||||
(JNIEnv *, jobject, jlong, jint, jint);
|
||||
|
||||
/*
|
||||
* Class: jni_JniLayer
|
||||
* Method: jni_layer_set_analog
|
||||
* Signature: (JIIF)V
|
||||
*/
|
||||
JNIEXPORT void JNICALL Java_jni_JniLayer_jni_1layer_1set_1analog
|
||||
(JNIEnv *, jobject, jlong, jint, jint, jfloat);
|
||||
|
||||
/*
|
||||
* Class: jni_JniLayer
|
||||
* Method: jni_layer_report_analog_chg
|
||||
* Signature: (JI)V
|
||||
*/
|
||||
JNIEXPORT void JNICALL Java_jni_JniLayer_jni_1layer_1report_1analog_1chg
|
||||
(JNIEnv *, jobject, jlong, jint);
|
||||
|
||||
/*
|
||||
* Class: jni_JniLayer
|
||||
* Method: jni_layer_kill
|
||||
* Signature: (J)V
|
||||
*/
|
||||
JNIEXPORT void JNICALL Java_jni_JniLayer_jni_1layer_1kill
|
||||
(JNIEnv *, jobject, jlong);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
47
samples/C/readline.cats
Normal file
47
samples/C/readline.cats
Normal file
@@ -0,0 +1,47 @@
|
||||
/*
|
||||
** API in ATS for GNU-readline
|
||||
*/
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
/*
|
||||
** Permission to use, copy, modify, and distribute this software for any
|
||||
** purpose with or without fee is hereby granted, provided that the above
|
||||
** copyright notice and this permission notice appear in all copies.
|
||||
**
|
||||
** THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
** WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
** MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
** ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
** WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
** ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
** OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
#ifndef READLINE_READLINE_CATS
|
||||
#define READLINE_READLINE_CATS
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
#include <readline/readline.h>
|
||||
|
||||
/* ****** ****** */
|
||||
//
|
||||
#define \
|
||||
atscntrb_readline_rl_library_version() ((char*)rl_library_version)
|
||||
//
|
||||
#define atscntrb_readline_rl_readline_version() (rl_readline_version)
|
||||
//
|
||||
/* ****** ****** */
|
||||
|
||||
#define atscntrb_readline_readline readline
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
#endif // ifndef READLINE_READLINE_CATS
|
||||
|
||||
/* ****** ****** */
|
||||
|
||||
/* end of [readline.cats] */
|
||||
15669
samples/C/sgd_fast.c
Normal file
15669
samples/C/sgd_fast.c
Normal file
File diff suppressed because it is too large
Load Diff
5
samples/C/syscalldefs.h
Normal file
5
samples/C/syscalldefs.h
Normal file
@@ -0,0 +1,5 @@
|
||||
static const syscalldef syscalldefs[] = {
|
||||
[SYSCALL_OR_NUM(0, SYS_restart_syscall)] = MAKE_UINT16(0, 1),
|
||||
[SYSCALL_OR_NUM(1, SYS_exit)] = MAKE_UINT16(1, 17),
|
||||
[SYSCALL_OR_NUM(2, SYS_fork)] = MAKE_UINT16(0, 22),
|
||||
};
|
||||
5
samples/COBOL/hello_world.cbl
Normal file
5
samples/COBOL/hello_world.cbl
Normal file
@@ -0,0 +1,5 @@
|
||||
program-id. hello.
|
||||
procedure division.
|
||||
display "Hello, World!".
|
||||
stop run.
|
||||
|
||||
6
samples/COBOL/hello_world.ccp
Normal file
6
samples/COBOL/hello_world.ccp
Normal file
@@ -0,0 +1,6 @@
|
||||
IDENTIFICATION DIVISION.
|
||||
PROGRAM-ID. hello.
|
||||
PROCEDURE DIVISION.
|
||||
DISPLAY "Hello World, yet again.".
|
||||
STOP RUN.
|
||||
|
||||
6
samples/COBOL/hello_world.cob
Normal file
6
samples/COBOL/hello_world.cob
Normal file
@@ -0,0 +1,6 @@
|
||||
IDENTIFICATION DIVISION.
|
||||
PROGRAM-ID. hello.
|
||||
PROCEDURE DIVISION.
|
||||
DISPLAY "Hello World!".
|
||||
STOP RUN.
|
||||
|
||||
7
samples/COBOL/simple.cpy
Normal file
7
samples/COBOL/simple.cpy
Normal file
@@ -0,0 +1,7 @@
|
||||
01 COBOL-TEST-RECORD.
|
||||
05 COBOL-TEST-USAGES.
|
||||
10 COBOL-4-COMP PIC S9(4) COMP.
|
||||
10 COBOL-8-COMP PIC S9(8) COMP.
|
||||
10 COBOL-9-COMP PIC S9(9) COMP.
|
||||
10 COBOL-4-COMP2 PIC S9(4) COMP-2.
|
||||
10 COBOL-7-COMP2 PIC 9(7) COMP-2.
|
||||
6307
samples/CSS/bootstrap.css
vendored
Normal file
6307
samples/CSS/bootstrap.css
vendored
Normal file
File diff suppressed because it is too large
Load Diff
873
samples/CSS/bootstrap.min.css
vendored
Normal file
873
samples/CSS/bootstrap.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
12
samples/Cirru/array.cirru
Normal file
12
samples/Cirru/array.cirru
Normal file
@@ -0,0 +1,12 @@
|
||||
|
||||
print $ array
|
||||
int 1
|
||||
string 2
|
||||
|
||||
print $ array
|
||||
int 1
|
||||
array
|
||||
int 2
|
||||
string 3
|
||||
array
|
||||
string 4
|
||||
7
samples/Cirru/block.cirru
Normal file
7
samples/Cirru/block.cirru
Normal file
@@ -0,0 +1,7 @@
|
||||
|
||||
set f $ block (a b c)
|
||||
print a b c
|
||||
|
||||
call f (int 1) (int 2) (int 3)
|
||||
|
||||
f (int 1) (int 2) (int 3)
|
||||
7
samples/Cirru/bool.cirru
Normal file
7
samples/Cirru/bool.cirru
Normal file
@@ -0,0 +1,7 @@
|
||||
|
||||
print $ bool true
|
||||
print $ bool false
|
||||
print $ bool yes
|
||||
print $ bool no
|
||||
print $ bool 1
|
||||
print $ bool 0
|
||||
14
samples/Cirru/map.cirru
Normal file
14
samples/Cirru/map.cirru
Normal file
@@ -0,0 +1,14 @@
|
||||
|
||||
print $ map
|
||||
a $ int 5
|
||||
b $ array (int 1) (int 2)
|
||||
c $ map
|
||||
int 1
|
||||
array (int 4)
|
||||
|
||||
set m $ map
|
||||
a $ int 1
|
||||
|
||||
set m b $ int 2
|
||||
|
||||
print m
|
||||
3
samples/Cirru/number.cirru
Normal file
3
samples/Cirru/number.cirru
Normal file
@@ -0,0 +1,3 @@
|
||||
|
||||
print $ int 1
|
||||
print $ float 1.2
|
||||
2
samples/Cirru/require.cirru
Normal file
2
samples/Cirru/require.cirru
Normal file
@@ -0,0 +1,2 @@
|
||||
|
||||
require ./stdio.cr
|
||||
23
samples/Cirru/scope.cirru
Normal file
23
samples/Cirru/scope.cirru
Normal file
@@ -0,0 +1,23 @@
|
||||
|
||||
set a (int 2)
|
||||
|
||||
print (self)
|
||||
|
||||
set c (child)
|
||||
|
||||
under c
|
||||
under parent
|
||||
print a
|
||||
|
||||
print $ get c a
|
||||
|
||||
set c x (int 3)
|
||||
print $ get c x
|
||||
|
||||
set just-print $ code
|
||||
print a
|
||||
|
||||
print just-print
|
||||
|
||||
eval (self) just-print
|
||||
eval just-print
|
||||
55
samples/Cirru/stdio.cirru
Normal file
55
samples/Cirru/stdio.cirru
Normal file
@@ -0,0 +1,55 @@
|
||||
|
||||
set a $ string 1
|
||||
print a
|
||||
|
||||
print (string 1)
|
||||
|
||||
print nothing
|
||||
|
||||
print
|
||||
map
|
||||
a (int 4)
|
||||
b $ map
|
||||
a $ int 5
|
||||
b $ int 6
|
||||
c $ map
|
||||
int 7
|
||||
|
||||
print
|
||||
array
|
||||
int 1
|
||||
int 2
|
||||
array
|
||||
int 3
|
||||
int 4
|
||||
|
||||
print
|
||||
array
|
||||
int 1
|
||||
map
|
||||
a $ int 2
|
||||
b $ array
|
||||
int 3
|
||||
|
||||
print
|
||||
int 1
|
||||
int 2
|
||||
|
||||
print $ code
|
||||
set a 1
|
||||
print (get a)
|
||||
print $ array
|
||||
int a
|
||||
array
|
||||
int a
|
||||
|
||||
set container (map)
|
||||
set container code $ code
|
||||
set a 1
|
||||
print (get a)
|
||||
print $ array
|
||||
int a
|
||||
array
|
||||
int a
|
||||
|
||||
print container
|
||||
3
samples/Cirru/string.cirru
Normal file
3
samples/Cirru/string.cirru
Normal file
@@ -0,0 +1,3 @@
|
||||
|
||||
print $ string a
|
||||
print $ string "a b"
|
||||
17
samples/Clojure/for.clj
Normal file
17
samples/Clojure/for.clj
Normal file
@@ -0,0 +1,17 @@
|
||||
(defn prime? [n]
|
||||
(not-any? zero? (map #(rem n %) (range 2 n))))
|
||||
|
||||
(range 3 33 2)
|
||||
'(3 5 7 9 11 13 15 17 19 21 23 25 27 29 31)
|
||||
|
||||
;; :when continues through the collection even if some have the
|
||||
;; condition evaluate to false, like filter
|
||||
(for [x (range 3 33 2) :when (prime? x)]
|
||||
x)
|
||||
'(3 5 7 11 13 17 19 23 29 31)
|
||||
|
||||
;; :while stops at the first collection element that evaluates to
|
||||
;; false, like take-while
|
||||
(for [x (range 3 33 2) :while (prime? x)]
|
||||
x)
|
||||
'(3 5 7)
|
||||
8
samples/Clojure/hiccup.hic
Normal file
8
samples/Clojure/hiccup.hic
Normal file
@@ -0,0 +1,8 @@
|
||||
[:html
|
||||
[:head
|
||||
[:meta {:charset "utf-8"}]
|
||||
[:link {:rel "stylesheet" :href "css/bootstrap.min.css"}]
|
||||
[:script {:src "app.js"}]]
|
||||
[:body
|
||||
[:div.nav
|
||||
[:p "Hello world!"]]]]
|
||||
13
samples/Clojure/into-array.cljc
Normal file
13
samples/Clojure/into-array.cljc
Normal file
@@ -0,0 +1,13 @@
|
||||
(defn into-array
|
||||
([aseq]
|
||||
(into-array nil aseq))
|
||||
([type aseq]
|
||||
(let [n (count aseq)
|
||||
a (make-array n)]
|
||||
(loop [aseq (seq aseq)
|
||||
i 0]
|
||||
(if (< i n)
|
||||
(do
|
||||
(aset a i (first aseq))
|
||||
(recur (next aseq) (inc i)))
|
||||
a)))))
|
||||
15
samples/Clojure/protocol.cljs
Normal file
15
samples/Clojure/protocol.cljs
Normal file
@@ -0,0 +1,15 @@
|
||||
(defprotocol ISound (sound []))
|
||||
|
||||
(deftype Cat []
|
||||
ISound
|
||||
(sound [_] "Meow!"))
|
||||
|
||||
(deftype Dog []
|
||||
ISound
|
||||
(sound [_] "Woof!"))
|
||||
|
||||
(extend-type default
|
||||
ISound
|
||||
(sound [_] "... silence ..."))
|
||||
|
||||
(sound 1) ;; => "... silence ..."
|
||||
5
samples/Clojure/rand.cljscm
Normal file
5
samples/Clojure/rand.cljscm
Normal file
@@ -0,0 +1,5 @@
|
||||
(defn rand
|
||||
"Returns a random floating point number between 0 (inclusive) and
|
||||
n (default 1) (exclusive)."
|
||||
([] (scm* [n] (random-real)))
|
||||
([n] (* (rand) n)))
|
||||
20
samples/Clojure/svg.cljx
Normal file
20
samples/Clojure/svg.cljx
Normal file
@@ -0,0 +1,20 @@
|
||||
^:clj (ns c2.svg
|
||||
(:use [c2.core :only [unify]]
|
||||
[c2.maths :only [Pi Tau radians-per-degree
|
||||
sin cos mean]]))
|
||||
|
||||
^:cljs (ns c2.svg
|
||||
(:use [c2.core :only [unify]]
|
||||
[c2.maths :only [Pi Tau radians-per-degree
|
||||
sin cos mean]])
|
||||
(:require [c2.dom :as dom]))
|
||||
|
||||
;;Stub for float fn, which does not exist on cljs runtime
|
||||
^:cljs (def float identity)
|
||||
|
||||
(defn ->xy
|
||||
"Convert coordinates (potentially map of `{:x :y}`) to 2-vector."
|
||||
[coordinates]
|
||||
(cond
|
||||
(and (vector? coordinates) (= 2 (count coordinates))) coordinates
|
||||
(map? coordinates) [(:x coordinates) (:y coordinates)]))
|
||||
20
samples/Clojure/unit-test.cl2
Normal file
20
samples/Clojure/unit-test.cl2
Normal file
@@ -0,0 +1,20 @@
|
||||
(deftest function-tests
|
||||
(is (= 3
|
||||
(count [1 2 3])))
|
||||
(is (= false
|
||||
(not true)))
|
||||
(is (= true
|
||||
(contains? {:foo 1 :bar 2} :foo)))
|
||||
|
||||
(is (= {"foo" 1, "baz" 3}
|
||||
(select-keys {:foo 1 :bar 2 :baz 3} [:foo :baz])))
|
||||
|
||||
(is (= [1 2 3]
|
||||
(vals {:foo 1 :bar 2 :baz 3})))
|
||||
|
||||
(is (= ["foo" "bar" "baz"]
|
||||
(keys {:foo 1 :bar 2 :baz 3})))
|
||||
|
||||
(is (= [2 4 6]
|
||||
(filter (fn [x] (=== (rem x 2) 0)) [1 2 3 4 5 6]))))
|
||||
|
||||
82
samples/Common Lisp/macros-advanced.cl
Normal file
82
samples/Common Lisp/macros-advanced.cl
Normal file
@@ -0,0 +1,82 @@
|
||||
;; @file macros-advanced.cl
|
||||
;;
|
||||
;; @breif Advanced macro practices - defining your own macros
|
||||
;;
|
||||
;; Macro definition skeleton:
|
||||
;; (defmacro name (parameter*)
|
||||
;; "Optional documentation string"
|
||||
;; body-form*)
|
||||
;;
|
||||
;; Note that backquote expression is most often used in the `body-form`
|
||||
;;
|
||||
|
||||
; `primep` test a number for prime
|
||||
(defun primep (n)
|
||||
"test a number for prime"
|
||||
(if (< n 2) (return-from primep))
|
||||
(do ((i 2 (1+ i)) (p t (not (zerop (mod n i)))))
|
||||
((> i (sqrt n)) p)
|
||||
(when (not p) (return))))
|
||||
; `next-prime` return the next prime bigger than the specified number
|
||||
(defun next-prime (n)
|
||||
"return the next prime bigger than the speicified number"
|
||||
(do ((i (1+ n) (1+ i)))
|
||||
((primep i) i)))
|
||||
;
|
||||
; The recommended procedures to writting a new macro are as follows:
|
||||
; 1. Write a sample call to the macro and the code it should expand into
|
||||
(do-primes (p 0 19)
|
||||
(format t "~d " p))
|
||||
; Expected expanded codes
|
||||
(do ((p (next-prime (- 0 1)) (next-prime p)))
|
||||
((> p 19))
|
||||
(format t "~d " p))
|
||||
; 2. Write code that generate the hardwritten expansion from the arguments in
|
||||
; the sample call
|
||||
(defmacro do-primes (var-and-range &rest body)
|
||||
(let ((var (first var-and-range))
|
||||
(start (second var-and-range))
|
||||
(end (third var-and-range)))
|
||||
`(do ((,var (next-prime (- ,start 1)) (next-prime ,var)))
|
||||
((> ,var ,end))
|
||||
,@body)))
|
||||
; 2-1. More concise implementations with the 'parameter list destructuring' and
|
||||
; '&body' synonym, it also emits more friendly messages on incorrent input.
|
||||
(defmacro do-primes ((var start end) &body body)
|
||||
`(do ((,var (next-prime (- ,start 1)) (next-prime ,var)))
|
||||
((> ,var ,end))
|
||||
,@body))
|
||||
; 2-2. Test the result of macro expansion with the `macroexpand-1` function
|
||||
(macroexpand-1 '(do-primes (p 0 19) (format t "~d " p)))
|
||||
; 3. Make sure the macro abstraction does not "leak"
|
||||
(defmacro do-primes ((var start end) &body body)
|
||||
(let ((end-value-name (gensym)))
|
||||
`(do ((,var (next-prime (- ,start 1)) (next-prime ,var))
|
||||
(,end-value-name ,end))
|
||||
((> ,var ,end-value-name))
|
||||
,@body)))
|
||||
; 3-1. Rules to observe to avoid common and possible leaks
|
||||
; a. include any subforms in the expansion in positions that will be evaluated
|
||||
; in the same order as the subforms appear in the macro call
|
||||
; b. make sure subforms are evaluated only once by creating a variable in the
|
||||
; expansion to hold the value of evaluating the argument form, and then
|
||||
; using that variable anywhere else the value is needed in the expansion
|
||||
; c. use `gensym` at macro expansion time to create variable names used in the
|
||||
; expansion
|
||||
;
|
||||
; Appendix I. Macro-writting macros, 'with-gensyms', to guranttee that rule c
|
||||
; gets observed.
|
||||
; Example usage of `with-gensyms`
|
||||
(defmacro do-primes-a ((var start end) &body body)
|
||||
"do-primes implementation with macro-writting macro 'with-gensyms'"
|
||||
(with-gensyms (end-value-name)
|
||||
`(do ((,var (next-prime (- ,start 1)) (next-prime ,var))
|
||||
(,end-value-name ,end))
|
||||
((> ,var ,end-value-name))
|
||||
,@body)))
|
||||
; Define the macro, note how comma is used to interpolate the value of the loop
|
||||
; expression
|
||||
(defmacro with-gensyms ((&rest names) &body body)
|
||||
`(let ,(loop for n in names collect `(,n (gensym)))
|
||||
,@body)
|
||||
)
|
||||
475
samples/Common Lisp/motor-inferencia.cl
Normal file
475
samples/Common Lisp/motor-inferencia.cl
Normal file
@@ -0,0 +1,475 @@
|
||||
#|
|
||||
ESCUELA POLITECNICA SUPERIOR - UNIVERSIDAD AUTONOMA DE MADRID
|
||||
INTELIGENCIA ARTIFICIAL
|
||||
|
||||
Motor de inferencia
|
||||
Basado en parte en "Paradigms of AI Programming: Case Studies
|
||||
in Common Lisp", de Peter Norvig, 1992
|
||||
|#
|
||||
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;
|
||||
;;;; Global variables
|
||||
;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
|
||||
(defvar *hypothesis-list*)
|
||||
(defvar *rule-list*)
|
||||
(defvar *fact-list*)
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;
|
||||
;;;; Constants
|
||||
;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defconstant +fail+ nil "Indicates unification failure")
|
||||
|
||||
(defconstant +no-bindings+ '((nil))
|
||||
"Indicates unification success, with no variables.")
|
||||
|
||||
(defconstant *mundo-abierto* nil)
|
||||
|
||||
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;;;; Functions for the user
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
|
||||
;; Resets *fact-list* to NIL
|
||||
(defun erase-facts () (setq *fact-list* nil))
|
||||
|
||||
(defun set-hypothesis-list (h) (setq *hypothesis-list* h))
|
||||
|
||||
|
||||
;; Returns a list of solutions, each one satisfying all the hypothesis contained
|
||||
;; in *hypothesis-list*
|
||||
(defun motor-inferencia ()
|
||||
(consulta *hypothesis-list*))
|
||||
|
||||
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;;;; Auxiliary functions
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
#|____________________________________________________________________________
|
||||
FUNCTION: CONSULTA
|
||||
|
||||
COMMENTS:
|
||||
CONSULTA receives a list of hypothesis (variable <hypotheses>), and returns
|
||||
a list of binding lists (each binding list being a solution).
|
||||
|
||||
EXAMPLES:
|
||||
hypotheses is:
|
||||
((brothers ?x ?y) (neighbours juan ?x)).
|
||||
|
||||
That is, we are searching the brothers of the possible neighbors of Juan.
|
||||
|
||||
The function can return in this case:
|
||||
|
||||
(((?x . sergio) (?y . javier)) ((?x . julian) (?y . mario)) ((?x . julian) (?y . pedro))).
|
||||
That is, the neighbors of Juan (Sergio and Julian) have 3 brothers in total(Javier, Mario, Pedro)
|
||||
____________________________________________________________________________|#
|
||||
|
||||
(defun consulta (hypotheses)
|
||||
(if (null hypotheses) (list +no-bindings+)
|
||||
(mapcan #'(lambda (b)
|
||||
(mapcar #'(lambda (x) (une-bindings-con-bindings b x))
|
||||
(consulta (subst-bindings b (rest hypotheses)))))
|
||||
(find-hypothesis-value (first hypotheses)))))
|
||||
|
||||
|
||||
|
||||
#|____________________________________________________________________________
|
||||
FUNCTION: FIND-HYPOTHESIS-VALUE
|
||||
|
||||
COMMENTS:
|
||||
This function manages the query a single query (only one hypothesis) given a binding list.
|
||||
It tries (in the following order) to:
|
||||
- Answer the query from *fact-list*
|
||||
- Answer the query from the rules in *rule-list*
|
||||
- Ask the user
|
||||
|
||||
The function returns a list of solutions (list of binding lists).
|
||||
|
||||
EXAMPLES:
|
||||
If hypothesis is (brothers ?x ?y)
|
||||
and the function returns:
|
||||
(((?x . sergio) (?y . javier)) ((?x . julian) (?y . maria)) ((?x . alberto) (?y . pedro))).
|
||||
|
||||
Means that Sergio and Javier and brothers, Julian and Mario are brothers, and Alberto and Pedro are brothers.
|
||||
____________________________________________________________________________|#
|
||||
|
||||
(defun find-hypothesis-value (hypothesis)
|
||||
(let (rules)
|
||||
(cond
|
||||
((equality? hypothesis)
|
||||
(value-from-equality hypothesis))
|
||||
((value-from-facts hypothesis))
|
||||
((setq good-rules (find-rules hypothesis))
|
||||
(value-from-rules hypothesis good-rules))
|
||||
(t (ask-user hypothesis)))))
|
||||
|
||||
|
||||
|
||||
; une-bindings-con-bindings takes two binding lists and returns a binding list
|
||||
; Assumes that b1 and b2 are not +fail+
|
||||
(defun une-bindings-con-bindings (b1 b2)
|
||||
(cond
|
||||
((equal b1 +no-bindings+) b2)
|
||||
((equal b2 +no-bindings+) b1)
|
||||
(T (append b1 b2))))
|
||||
|
||||
|
||||
|
||||
#|____________________________________________________________________________
|
||||
FUNCTION: VALUE-FROM-FACTS
|
||||
|
||||
COMMENTS:
|
||||
Returns all the solutions of <hypothesis> obtained directly from *fact-list*
|
||||
|
||||
EXAMPLES:
|
||||
> (setf *fact-list* '((man luis) (man pedro)(woman mart)(man daniel)(woman laura)))
|
||||
|
||||
> (value-from-facts '(man ?x))
|
||||
returns:
|
||||
|
||||
(((?X . LUIS)) ((?X . PEDRO)) ((?X . DANIEL)))
|
||||
____________________________________________________________________________|#
|
||||
|
||||
(defun value-from-facts (hypothesis)
|
||||
(mapcan #'(lambda(x) (let ((aux (unify hypothesis x)))
|
||||
(when aux (list aux)))) *fact-list*))
|
||||
|
||||
|
||||
|
||||
|
||||
#|____________________________________________________________________________
|
||||
FUNCTION: FIND-RULES
|
||||
|
||||
COMMENTS:
|
||||
Returns the rules in *rule-list* whose THENs unify with the term given in <hypothesis>
|
||||
The variables in the rules that satisfy this requirement are renamed.
|
||||
|
||||
EXAMPLES:
|
||||
> (setq *rule-list*
|
||||
'((R1 (pertenece ?E (?E . ?_)))
|
||||
(R2 (pertenece ?E (?_ . ?Xs)) :- ((pertenece ?E ?Xs)))))
|
||||
|
||||
Then:
|
||||
> (FIND-RULES (PERTENECE 1 (2 5)))
|
||||
returns:
|
||||
((R2 (PERTENECE ?E.1 (?_ . ?XS.2)) :- ((PERTENECE ?E.1 ?XS.2))))
|
||||
That is, only the THEN of rule R2 unify with <hypothesis>
|
||||
|
||||
However,
|
||||
> (FIND-RULES (PERTENECE 1 (1 6 7)))
|
||||
|
||||
returns:
|
||||
((R1 (PERTENECE ?E.6 (?E.6 . ?_)))
|
||||
(R2 (PERTENECE ?E.7 (?_ . ?XS.8)) :- ((PERTENECE ?E.7 ?XS.8))))
|
||||
So the THEN of both rules unify with <hypothesis>
|
||||
____________________________________________________________________________|#
|
||||
|
||||
(defun find-rules (hypothesis)
|
||||
(mapcan #'(lambda(b) (let ((renamed-rule (rename-variables b)))
|
||||
(when (in-then? hypothesis renamed-rule)
|
||||
(list renamed-rule)))) *rule-list*))
|
||||
|
||||
(defun in-then? (hypothesis rule)
|
||||
(unless (null (rule-then rule))
|
||||
(not (equal +fail+ (unify hypothesis (rule-then rule))))))
|
||||
|
||||
|
||||
|
||||
#|____________________________________________________________________________
|
||||
FUNCTION: VALUE-FROM-RULES
|
||||
|
||||
COMMENTS:
|
||||
Returns all the solutions to <hypothesis> found using all the rules given in
|
||||
the list <rules>. Note that a single rule can have multiple solutions.
|
||||
____________________________________________________________________________|#
|
||||
(defun value-from-rules (hypothesis rules)
|
||||
(mapcan #'(lambda (r) (eval-rule hypothesis r)) rules))
|
||||
|
||||
(defun limpia-vinculos (termino bindings)
|
||||
(unify termino (subst-bindings bindings termino)))
|
||||
|
||||
|
||||
#|____________________________________________________________________________
|
||||
FUNCTION: EVAL-RULE
|
||||
|
||||
COMMENTS:
|
||||
Returns all the solutions found using the rule given as input argument.
|
||||
|
||||
EXAMPLES:
|
||||
> (setq *rule-list*
|
||||
'((R1 (pertenece ?E (?E . ?_)))
|
||||
(R2 (pertenece ?E (?_ . ?Xs)) :- ((pertenece ?E ?Xs)))))
|
||||
Then:
|
||||
> (EVAL-RULE
|
||||
(PERTENECE 1 (1 6 7))
|
||||
(R1 (PERTENECE ?E.42 (?E.42 . ?_))))
|
||||
returns:
|
||||
(((NIL)))
|
||||
That is, the query (PERTENECE 1 (1 6 7)) can be proven from the given rule, and
|
||||
no binding in the variables in the query is necessary (in fact, the query has no variables).
|
||||
On the other hand:
|
||||
> (EVAL-RULE
|
||||
(PERTENECE 1 (7))
|
||||
(R2 (PERTENECE ?E.49 (?_ . ?XS.50)) :- ((PERTENECE ?E.49 ?XS.50))))
|
||||
returns:
|
||||
NIL
|
||||
That is, the query can not be proven from the rule R2.
|
||||
____________________________________________________________________________|#
|
||||
|
||||
(defun eval-rule (hypothesis rule)
|
||||
(let ((bindings-then
|
||||
(unify (rule-then rule) hypothesis)))
|
||||
(unless (equal +fail+ bindings-then)
|
||||
(if (rule-ifs rule)
|
||||
(mapcar #'(lambda(b) (limpia-vinculos hypothesis (append bindings-then b)))
|
||||
(consulta (subst-bindings bindings-then (rule-ifs rule))))
|
||||
(list (limpia-vinculos hypothesis bindings-then))))))
|
||||
|
||||
|
||||
(defun ask-user (hypothesis)
|
||||
(let ((question hypothesis))
|
||||
(cond
|
||||
((variables-in question) +fail+)
|
||||
((not-in-fact-list? question) +fail+)
|
||||
(*mundo-abierto*
|
||||
(format t "~%Es cierto el hecho ~S? (T/nil)" question)
|
||||
(cond
|
||||
((read) (add-fact question) +no-bindings+)
|
||||
(T (add-fact (list 'NOT question)) +fail+)))
|
||||
(T +fail+))))
|
||||
|
||||
|
||||
; value-from-equality:
|
||||
(defun value-from-equality (hypothesis)
|
||||
(let ((new-bindings (unify (second hypothesis) (third hypothesis))))
|
||||
(if (not (equal +fail+ new-bindings))
|
||||
(list new-bindings))))
|
||||
|
||||
|
||||
|
||||
#|____________________________________________________________________________
|
||||
FUNCTION: UNIFY
|
||||
|
||||
COMMENTS:
|
||||
Finds the most general unifier of two input expressions, taking into account the
|
||||
bindings specified in the input <bingings>
|
||||
In case the two expressions can unify, the function returns the total bindings necessary
|
||||
for that unification. Otherwise, returns +fail+
|
||||
|
||||
EXAMPLES:
|
||||
> (unify '1 '1)
|
||||
((NIL)) ;; which is the constant +no-bindings+
|
||||
> (unify 1 '2)
|
||||
nil ;; which is the constant +fail+
|
||||
> (unify '?x 1)
|
||||
((?x . 1))
|
||||
> (unify '(1 1) ?x)
|
||||
((? x 1 1))
|
||||
> (unify '?_ '?x)
|
||||
((NIL))
|
||||
> (unify '(p ?x 1 2) '(p ?y ?_ ?_))
|
||||
((?x . ?y))
|
||||
> (unify '(?a . ?_) '(1 2 3))
|
||||
((?a . 1))
|
||||
> (unify '(?_ ?_) '(1 2))
|
||||
((nil))
|
||||
> (unify '(?a . ?b) '(1 2 3))
|
||||
((?b 2 3) (?a . 1))
|
||||
> (unify '(?a . ?b) '(?v . ?d))
|
||||
((?b . ?d) (?a . ?v))
|
||||
> (unify '(?eval (+ 1 1)) '1)
|
||||
nil
|
||||
> (unify '(?eval (+ 1 1)) '2)
|
||||
(nil))
|
||||
____________________________________________________________________________|#
|
||||
|
||||
(defun unify (x y &optional (bindings +no-bindings+))
|
||||
"See if x and y match with given bindings. If they do,
|
||||
return a binding list that would make them equal [p 303]."
|
||||
(cond ((eq bindings +fail+) +fail+)
|
||||
((eql x y) bindings)
|
||||
((eval? x) (unify-eval x y bindings))
|
||||
((eval? y) (unify-eval y x bindings))
|
||||
((variable? x) (unify-var x y bindings))
|
||||
((variable? y) (unify-var y x bindings))
|
||||
((and (consp x) (consp y))
|
||||
(unify (rest x) (rest y)
|
||||
(unify (first x) (first y) bindings)))
|
||||
(t +fail+)))
|
||||
|
||||
|
||||
;; rename-variables: renombra ?X por ?X.1, ?Y por ?Y.2 etc. salvo ?_ que no se renombra
|
||||
(defun rename-variables (x)
|
||||
"Replace all variables in x with new ones. Excepto ?_"
|
||||
(sublis (mapcar #'(lambda (var)
|
||||
(if (anonymous-var? var)
|
||||
(make-binding var var)
|
||||
(make-binding var (new-variable var))))
|
||||
(variables-in x))
|
||||
x))
|
||||
|
||||
|
||||
|
||||
;;;; Auxiliary Functions
|
||||
|
||||
(defun unify-var (var x bindings)
|
||||
"Unify var with x, using (and maybe extending) bindings [p 303]."
|
||||
(cond ((or (anonymous-var? var)(anonymous-var? x)) bindings)
|
||||
((get-binding var bindings)
|
||||
(unify (lookup var bindings) x bindings))
|
||||
((and (variable? x) (get-binding x bindings))
|
||||
(unify var (lookup x bindings) bindings))
|
||||
((occurs-in? var x bindings)
|
||||
+fail+)
|
||||
(t (extend-bindings var x bindings))))
|
||||
|
||||
(defun variable? (x)
|
||||
"Is x a variable (a symbol starting with ?)?"
|
||||
(and (symbolp x) (eql (char (symbol-name x) 0) #\?)))
|
||||
|
||||
(defun get-binding (var bindings)
|
||||
"Find a (variable . value) pair in a binding list."
|
||||
(assoc var bindings))
|
||||
|
||||
(defun binding-var (binding)
|
||||
"Get the variable part of a single binding."
|
||||
(car binding))
|
||||
|
||||
(defun binding-val (binding)
|
||||
"Get the value part of a single binding."
|
||||
(cdr binding))
|
||||
|
||||
(defun make-binding (var val) (cons var val))
|
||||
|
||||
(defun lookup (var bindings)
|
||||
"Get the value part (for var) from a binding list."
|
||||
(binding-val (get-binding var bindings)))
|
||||
|
||||
(defun extend-bindings (var val bindings)
|
||||
"Add a (var . value) pair to a binding list."
|
||||
(append
|
||||
(unless (eq bindings +no-bindings+) bindings)
|
||||
(list (make-binding var val))))
|
||||
|
||||
(defun occurs-in? (var x bindings)
|
||||
"Does var occur anywhere inside x?"
|
||||
(cond ((eq var x) t)
|
||||
((and (variable? x) (get-binding x bindings))
|
||||
(occurs-in? var (lookup x bindings) bindings))
|
||||
((consp x) (or (occurs-in? var (first x) bindings)
|
||||
(occurs-in? var (rest x) bindings)))
|
||||
(t nil)))
|
||||
|
||||
(defun subst-bindings (bindings x)
|
||||
"Substitute the value of variables in bindings into x,
|
||||
taking recursively bound variables into account."
|
||||
(cond ((eq bindings +fail+) +fail+)
|
||||
((eq bindings +no-bindings+) x)
|
||||
((and (listp x) (eq '?eval (car x)))
|
||||
(subst-bindings-quote bindings x))
|
||||
((and (variable? x) (get-binding x bindings))
|
||||
(subst-bindings bindings (lookup x bindings)))
|
||||
((atom x) x)
|
||||
(t (cons (subst-bindings bindings (car x)) ;; s/reuse-cons/cons
|
||||
(subst-bindings bindings (cdr x))))))
|
||||
|
||||
(defun unifier (x y)
|
||||
"Return something that unifies with both x and y (or fail)."
|
||||
(subst-bindings (unify x y) x))
|
||||
|
||||
(defun variables-in (exp)
|
||||
"Return a list of all the variables in EXP."
|
||||
(unique-find-anywhere-if #'variable? exp))
|
||||
|
||||
(defun unique-find-anywhere-if (predicate tree &optional found-so-far)
|
||||
"Return a list of leaves of tree satisfying predicate,
|
||||
with duplicates removed."
|
||||
(if (atom tree)
|
||||
(if (funcall predicate tree)
|
||||
(pushnew tree found-so-far)
|
||||
found-so-far)
|
||||
(unique-find-anywhere-if
|
||||
predicate
|
||||
(first tree)
|
||||
(unique-find-anywhere-if predicate (rest tree)
|
||||
found-so-far))))
|
||||
|
||||
(defun find-anywhere-if (predicate tree)
|
||||
"Does predicate apply to any atom in the tree?"
|
||||
(if (atom tree)
|
||||
(funcall predicate tree)
|
||||
(or (find-anywhere-if predicate (first tree))
|
||||
(find-anywhere-if predicate (rest tree)))))
|
||||
|
||||
(defun new-variable (var)
|
||||
"Create a new variable. Assumes user never types variables of form ?X.9"
|
||||
(gentemp (format nil "~S." var)))
|
||||
; (gentemp "?") )
|
||||
;;;
|
||||
|
||||
(defun anonymous-var? (x)
|
||||
(eq x '?_))
|
||||
|
||||
(defun subst-bindings-quote (bindings x)
|
||||
"Substitute the value of variables in bindings into x,
|
||||
taking recursively bound variables into account."
|
||||
(cond ((eq bindings +fail+) +fail+)
|
||||
((eq bindings +no-bindings+) x)
|
||||
((and (variable? x) (get-binding x bindings))
|
||||
(if (variable? (lookup x bindings))
|
||||
(subst-bindings-quote bindings (lookup x bindings))
|
||||
(subst-bindings-quote bindings (list 'quote (lookup x bindings)))
|
||||
)
|
||||
)
|
||||
((atom x) x)
|
||||
(t (cons (subst-bindings-quote bindings (car x)) ;; s/reuse-cons/cons
|
||||
(subst-bindings-quote bindings (cdr x))))))
|
||||
|
||||
(defun eval? (x)
|
||||
(and (consp x) (eq (first x) '?eval)))
|
||||
|
||||
(defun unify-eval (x y bindings)
|
||||
(let ((exp (subst-bindings-quote bindings (second x))))
|
||||
(if (variables-in exp)
|
||||
+fail+
|
||||
(unify (eval exp) y bindings))))
|
||||
|
||||
|
||||
|
||||
(defun rule-ifs (rule) (fourth rule))
|
||||
(defun rule-then (rule) (second rule))
|
||||
|
||||
|
||||
(defun equality? (term)
|
||||
(and (consp term) (eql (first term) '?=)))
|
||||
|
||||
|
||||
(defun in-fact-list? (expresion)
|
||||
(some #'(lambda(x) (equal x expresion)) *fact-list*))
|
||||
|
||||
(defun not-in-fact-list? (expresion)
|
||||
(if (eq (car expresion) 'NOT)
|
||||
(in-fact-list? (second expresion))
|
||||
(in-fact-list? (list 'NOT expresion))))
|
||||
|
||||
|
||||
;; add-fact:
|
||||
|
||||
(defun add-fact (fact)
|
||||
(setq *fact-list* (cons fact *fact-list*)))
|
||||
|
||||
|
||||
(defun variable? (x)
|
||||
"Is x a variable (a symbol starting with ?) except ?eval and ?="
|
||||
(and (not (equal x '?eval)) (not (equal x '?=))
|
||||
(symbolp x) (eql (char (symbol-name x) 0) #\?)))
|
||||
|
||||
|
||||
;; EOF
|
||||
21
samples/Common Lisp/sample.lisp
Normal file
21
samples/Common Lisp/sample.lisp
Normal file
@@ -0,0 +1,21 @@
|
||||
;;;; -*- lisp -*-
|
||||
|
||||
(in-package :foo)
|
||||
|
||||
;;; Header comment.
|
||||
(defvar *foo*)
|
||||
|
||||
(eval-when (:execute :compile-toplevel :load-toplevel)
|
||||
(defun add (x &optional y &key z)
|
||||
(declare (ignore z))
|
||||
;; Inline comment.
|
||||
(+ x (or y 1))))
|
||||
|
||||
#|
|
||||
Multi-line comment.
|
||||
|#
|
||||
|
||||
(defmacro foo (x &body b)
|
||||
(if x
|
||||
`(1+ ,x) ;After-line comment.
|
||||
42))
|
||||
47
samples/Creole/creole.creole
Normal file
47
samples/Creole/creole.creole
Normal file
@@ -0,0 +1,47 @@
|
||||
= Creole
|
||||
|
||||
Creole is a Creole-to-HTML converter for Creole, the lightweight markup
|
||||
language (http://wikicreole.org/). Github uses this converter to render *.creole files.
|
||||
|
||||
Project page on github:
|
||||
|
||||
* http://github.com/minad/creole
|
||||
|
||||
Travis-CI:
|
||||
|
||||
* https://travis-ci.org/minad/creole
|
||||
|
||||
RDOC:
|
||||
|
||||
* http://rdoc.info/projects/minad/creole
|
||||
|
||||
== INSTALLATION
|
||||
|
||||
{{{
|
||||
gem install creole
|
||||
}}}
|
||||
|
||||
== SYNOPSIS
|
||||
|
||||
{{{
|
||||
require 'creole'
|
||||
html = Creole.creolize('== Creole text')
|
||||
}}}
|
||||
|
||||
== BUGS
|
||||
|
||||
If you found a bug, please report it at the Creole project's tracker
|
||||
on GitHub:
|
||||
|
||||
http://github.com/minad/creole/issues
|
||||
|
||||
== AUTHORS
|
||||
|
||||
* Lars Christensen (larsch)
|
||||
* Daniel Mendler (minad)
|
||||
|
||||
== LICENSE
|
||||
|
||||
Creole is Copyright (c) 2008 - 2013 Lars Christensen, Daniel Mendler. It is free software, and
|
||||
may be redistributed under the terms specified in the README file of
|
||||
the Ruby distribution.
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user