mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Compare commits
845 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8c9ba2214a | ||
|
|
8ba773127c | ||
|
|
c598c9717d | ||
|
|
ef41a1ac67 | ||
|
|
f6c4e39dbc | ||
|
|
7c636c4f65 | ||
|
|
6a8de63d2d | ||
|
|
107fee8859 | ||
|
|
1cb1705f8e | ||
|
|
e0c1a84821 | ||
|
|
b7249b671f | ||
|
|
f5e86bc691 | ||
|
|
109841ceb1 | ||
|
|
1b96f87888 | ||
|
|
4d40cab954 | ||
|
|
4533b9baaa | ||
|
|
5b35f92bfe | ||
|
|
17d54d61b4 | ||
|
|
2a867c9c7f | ||
|
|
5a4bbf42c1 | ||
|
|
087ce10f12 | ||
|
|
27092191a8 | ||
|
|
bc93a99864 | ||
|
|
2e4fbe3430 | ||
|
|
64f77293e8 | ||
|
|
9a42628577 | ||
|
|
fc9bc8b9e1 | ||
|
|
2180c11dc6 | ||
|
|
11207283c8 | ||
|
|
8552ec35b3 | ||
|
|
5fdc2e12bf | ||
|
|
37c8a94369 | ||
|
|
7d17d69c1b | ||
|
|
ee519aeb4b | ||
|
|
a825a013d6 | ||
|
|
b4906fc3b8 | ||
|
|
fafeead5dc | ||
|
|
5c602d0a4e | ||
|
|
96084fa59a | ||
|
|
c8eeda6c8a | ||
|
|
2315cdb993 | ||
|
|
2245174d28 | ||
|
|
c089b3f28f | ||
|
|
7e178cc416 | ||
|
|
8603760ebe | ||
|
|
5bbffb00f5 | ||
|
|
4476a23f5a | ||
|
|
834f37810b | ||
|
|
7e9bc26796 | ||
|
|
f83f226edc | ||
|
|
a04b9dd7cd | ||
|
|
de636f1c0b | ||
|
|
283cc3a975 | ||
|
|
23af754194 | ||
|
|
50f4050444 | ||
|
|
bf11900bc9 | ||
|
|
61b8a8969f | ||
|
|
0fb7017add | ||
|
|
4a5165ad7f | ||
|
|
017c6fd3f2 | ||
|
|
3887acd915 | ||
|
|
a80bf9e024 | ||
|
|
27c9774d1b | ||
|
|
10cadb8725 | ||
|
|
9a5d52e460 | ||
|
|
a8ae3d3ae5 | ||
|
|
9bf1b5867a | ||
|
|
f64a589e98 | ||
|
|
72026d3a3d | ||
|
|
3a8651e31f | ||
|
|
953768641c | ||
|
|
b59d80b00c | ||
|
|
56dec42c70 | ||
|
|
c88585cffb | ||
|
|
46779da3b5 | ||
|
|
654050a459 | ||
|
|
fe9f186b13 | ||
|
|
01616ef54e | ||
|
|
9fd802a208 | ||
|
|
86e0b94700 | ||
|
|
6e4e5e78ad | ||
|
|
183c280263 | ||
|
|
cb0b3a688f | ||
|
|
4f656c200b | ||
|
|
791d9eed41 | ||
|
|
74775b2e0a | ||
|
|
04c78c8c33 | ||
|
|
762b389721 | ||
|
|
32e10d2c37 | ||
|
|
d7baf4ed7b | ||
|
|
5bef198e6d | ||
|
|
c03e310422 | ||
|
|
43723ba5ef | ||
|
|
25954c8992 | ||
|
|
12f01e9e94 | ||
|
|
41f7589d4e | ||
|
|
d93edf0897 | ||
|
|
7a6202a8c3 | ||
|
|
240f6a63f4 | ||
|
|
f0558769f2 | ||
|
|
12086b69ac | ||
|
|
e47b312866 | ||
|
|
eb5f1468d2 | ||
|
|
77c7ee6d2e | ||
|
|
4f547d79a9 | ||
|
|
8a5b26536e | ||
|
|
355ac3d81a | ||
|
|
8b8123a3c1 | ||
|
|
fc44af9343 | ||
|
|
4654553d07 | ||
|
|
940df300e8 | ||
|
|
72b8e1c76f | ||
|
|
92282e3677 | ||
|
|
9e9cbb144e | ||
|
|
8c5f1e201e | ||
|
|
ab20c033fe | ||
|
|
fd9c657ed4 | ||
|
|
fea07d025d | ||
|
|
41a570818d | ||
|
|
c1e38425d0 | ||
|
|
6fb4e6836c | ||
|
|
a2f9150f50 | ||
|
|
2dcee1e43c | ||
|
|
867a4d96fe | ||
|
|
b8c7b71ca5 | ||
|
|
70396ab636 | ||
|
|
67b5b51c47 | ||
|
|
7b443fcdde | ||
|
|
d86f8ba12f | ||
|
|
856ee4724c | ||
|
|
e635af4ef9 | ||
|
|
b30163444f | ||
|
|
051bedefab | ||
|
|
1d7f63e38b | ||
|
|
d96657a48b | ||
|
|
8bdd6ea510 | ||
|
|
9f65e702fc | ||
|
|
a500dee94e | ||
|
|
1a11a6ab48 | ||
|
|
c5f1317b47 | ||
|
|
5e03ff961b | ||
|
|
a0c06eb6b9 | ||
|
|
94b3ea3df5 | ||
|
|
6d7eae5011 | ||
|
|
3bbeea3682 | ||
|
|
562ec13696 | ||
|
|
a5c3bd7c13 | ||
|
|
6ae6882e1a | ||
|
|
c4ad830931 | ||
|
|
5d417b4669 | ||
|
|
02a264fad8 | ||
|
|
31c3c43f64 | ||
|
|
5197ea2488 | ||
|
|
66167de1f9 | ||
|
|
9482c2b822 | ||
|
|
012a9c0e05 | ||
|
|
881201a2c6 | ||
|
|
d656988258 | ||
|
|
89f7f8a00b | ||
|
|
e4ec48fe8d | ||
|
|
1a4f890d04 | ||
|
|
71633871f3 | ||
|
|
f523561e66 | ||
|
|
f3007215b1 | ||
|
|
1847b237c9 | ||
|
|
07169db217 | ||
|
|
4b4b368356 | ||
|
|
09ef0cd3e1 | ||
|
|
0f6bca7a3d | ||
|
|
5e3d811902 | ||
|
|
12c655a48a | ||
|
|
fdddffe041 | ||
|
|
c3aab69b11 | ||
|
|
3f1161d713 | ||
|
|
cf14c5fa4f | ||
|
|
8aac009b00 | ||
|
|
05bb8b10fd | ||
|
|
ecacbc937b | ||
|
|
86d0f0a84a | ||
|
|
34218d9a5c | ||
|
|
de7ca0d954 | ||
|
|
81176f8dfa | ||
|
|
6ee999617e | ||
|
|
88442094f9 | ||
|
|
5037dd5add | ||
|
|
aa41c87158 | ||
|
|
569eac2222 | ||
|
|
2de23046cc | ||
|
|
ca8b27ff15 | ||
|
|
0f17ba0fcf | ||
|
|
d5002ef06a | ||
|
|
ce443e73f1 | ||
|
|
89b5e9f5e6 | ||
|
|
c6c5e79ccf | ||
|
|
2bac3af299 | ||
|
|
5411c5457d | ||
|
|
92595cffa3 | ||
|
|
ad77279bbf | ||
|
|
e7d8b99ca2 | ||
|
|
971d848eec | ||
|
|
f353fa3890 | ||
|
|
64ce62a804 | ||
|
|
bb9537c5b4 | ||
|
|
9f00b5478d | ||
|
|
086b565488 | ||
|
|
27566d93e2 | ||
|
|
7b1c78b848 | ||
|
|
4ec9145700 | ||
|
|
922fe46f56 | ||
|
|
d4628cf5db | ||
|
|
cb9c3732f2 | ||
|
|
ce6a8aa671 | ||
|
|
bda61ec3d2 | ||
|
|
3ea0d479fc | ||
|
|
9ff1a9a54c | ||
|
|
024005d912 | ||
|
|
4d7cd834be | ||
|
|
281e7456d5 | ||
|
|
f41c79066b | ||
|
|
063ba50952 | ||
|
|
70bc0b3b77 | ||
|
|
057ea80582 | ||
|
|
a046e1c380 | ||
|
|
e3c2a5e510 | ||
|
|
a00967ddc4 | ||
|
|
599e1e2a51 | ||
|
|
44638e1c6b | ||
|
|
cabd4fb4c5 | ||
|
|
086845f189 | ||
|
|
413c881af8 | ||
|
|
74eb60a354 | ||
|
|
85e54e9af2 | ||
|
|
245521db22 | ||
|
|
5b8ad31d75 | ||
|
|
80321272b1 | ||
|
|
02500d3830 | ||
|
|
921ceaa221 | ||
|
|
5232a45d1f | ||
|
|
fdf000ec62 | ||
|
|
c7933537b1 | ||
|
|
e194d7238e | ||
|
|
33d777a6ff | ||
|
|
1b90dfedf9 | ||
|
|
b0b7d75bcd | ||
|
|
74ba0f9c39 | ||
|
|
8465961e72 | ||
|
|
f5cb6e035d | ||
|
|
7946de7116 | ||
|
|
9e65eb35e7 | ||
|
|
76a1369932 | ||
|
|
12e92f127b | ||
|
|
50f3b2e398 | ||
|
|
a716151c83 | ||
|
|
3c70fffb67 | ||
|
|
a0a879a3a3 | ||
|
|
5d35d18634 | ||
|
|
a3a6c2f8b3 | ||
|
|
8eef1c33b8 | ||
|
|
6182b0fbc2 | ||
|
|
4e4f3c6e17 | ||
|
|
800f445b22 | ||
|
|
a9db25cc5b | ||
|
|
b5e1bda3e4 | ||
|
|
f06167eaca | ||
|
|
687e82307e | ||
|
|
d9358d8af3 | ||
|
|
b3fbd42786 | ||
|
|
590ed26f7b | ||
|
|
12093ee1f0 | ||
|
|
ee840321d1 | ||
|
|
0cbc44677c | ||
|
|
a9b944ac36 | ||
|
|
759860d866 | ||
|
|
c4b21f51e4 | ||
|
|
fdccffddfc | ||
|
|
e610789d38 | ||
|
|
3bd8ed45e4 | ||
|
|
42311e1bf3 | ||
|
|
9e9aae1d83 | ||
|
|
229ab3a268 | ||
|
|
85840aadc2 | ||
|
|
32b7b3e1b1 | ||
|
|
5e7eeae98e | ||
|
|
de8c4daa45 | ||
|
|
64990a00b8 | ||
|
|
33434f08f4 | ||
|
|
d5730f6fd1 | ||
|
|
2305496f94 | ||
|
|
d031392507 | ||
|
|
7e251d7345 | ||
|
|
e33e76a1a7 | ||
|
|
9064369517 | ||
|
|
a9c86d5453 | ||
|
|
4b0c975426 | ||
|
|
6ec22a1674 | ||
|
|
71b48eaf55 | ||
|
|
694f51d09e | ||
|
|
79040d00c7 | ||
|
|
7dfc1644ce | ||
|
|
777f1d27d1 | ||
|
|
c4a90bbbcd | ||
|
|
ac0920a11b | ||
|
|
a4bdca6d6b | ||
|
|
f9bfcceba9 | ||
|
|
e0ceccc0c6 | ||
|
|
cd3e88fe8b | ||
|
|
254b6de1d3 | ||
|
|
a93e9493e2 | ||
|
|
53b356deee | ||
|
|
9dca6fa9cc | ||
|
|
7226aa18de | ||
|
|
ce97865bd2 | ||
|
|
424fa0f56b | ||
|
|
007fc9ebd0 | ||
|
|
e0104c8d12 | ||
|
|
1a98a1f938 | ||
|
|
e005893d4c | ||
|
|
6c74d854ec | ||
|
|
06b185f725 | ||
|
|
64ec42cf4a | ||
|
|
c4b24d9ae1 | ||
|
|
e6f38cbf45 | ||
|
|
1764674a13 | ||
|
|
4741a47d21 | ||
|
|
432bffe3ec | ||
|
|
090216df2a | ||
|
|
7b2fec88d1 | ||
|
|
76128ccb37 | ||
|
|
f5ede0d0f9 | ||
|
|
70d1649b45 | ||
|
|
de706a2eb9 | ||
|
|
33ebee0f6a | ||
|
|
51a989d5f1 | ||
|
|
3fc208b4ce | ||
|
|
0fa54a85d8 | ||
|
|
96e8a5d2cc | ||
|
|
838fbc5626 | ||
|
|
486af800b5 | ||
|
|
f0b9b3a35a | ||
|
|
80780ab042 | ||
|
|
bd19f6ed17 | ||
|
|
2ae76842a0 | ||
|
|
750804876e | ||
|
|
1d8da964e2 | ||
|
|
d21d0f281a | ||
|
|
a8e337e0eb | ||
|
|
37429d91a0 | ||
|
|
4b9c6fdf62 | ||
|
|
4130825a43 | ||
|
|
8c42e61271 | ||
|
|
6d95590861 | ||
|
|
dbc36a5e63 | ||
|
|
32a106cedd | ||
|
|
78ed103f90 | ||
|
|
3e26b2a0a7 | ||
|
|
4fb910533f | ||
|
|
b71cab6add | ||
|
|
394fb528cc | ||
|
|
d2f4eec397 | ||
|
|
5f29bf3bb4 | ||
|
|
5a9d35917f | ||
|
|
0029183078 | ||
|
|
0978258f57 | ||
|
|
772a9a582c | ||
|
|
e633d565a9 | ||
|
|
7d6ee108c4 | ||
|
|
dd64c3b545 | ||
|
|
b462e29e1d | ||
|
|
43f4f5bd32 | ||
|
|
904e86d901 | ||
|
|
374164a299 | ||
|
|
f73f309595 | ||
|
|
f23110a98d | ||
|
|
3193fc90f9 | ||
|
|
cc04519520 | ||
|
|
f51c5e3159 | ||
|
|
a75d918b93 | ||
|
|
c439ca5f97 | ||
|
|
4d45f13783 | ||
|
|
2a56719378 | ||
|
|
23289d8901 | ||
|
|
7d594b55e4 | ||
|
|
5268a93fa4 | ||
|
|
ae44530a66 | ||
|
|
285216a258 | ||
|
|
4d3720745e | ||
|
|
a681a252d4 | ||
|
|
22de40f5f6 | ||
|
|
7fbfe0a4b4 | ||
|
|
29f5ea591f | ||
|
|
438c0a4ec1 | ||
|
|
887933c86a | ||
|
|
53340ddd4c | ||
|
|
72b70a11bc | ||
|
|
d853864edb | ||
|
|
62ad763933 | ||
|
|
6a15ae47ee | ||
|
|
1bebb50482 | ||
|
|
d351d6091d | ||
|
|
fae8f83f64 | ||
|
|
d3d62726ae | ||
|
|
cf15832504 | ||
|
|
fdc81d8818 | ||
|
|
764df07450 | ||
|
|
68dfff60b5 | ||
|
|
479871f019 | ||
|
|
10ec56e667 | ||
|
|
1e958a18f8 | ||
|
|
9fa0f6cd6f | ||
|
|
4e339db911 | ||
|
|
c1469b25a1 | ||
|
|
ef6abed81a | ||
|
|
96473849e0 | ||
|
|
b14e09af6b | ||
|
|
42050c4d12 | ||
|
|
84dc918729 | ||
|
|
032125b114 | ||
|
|
b1a137135e | ||
|
|
1a53d1973a | ||
|
|
490afdddd1 | ||
|
|
9822b153eb | ||
|
|
1af71c8945 | ||
|
|
acc1a56da4 | ||
|
|
bf4596c26d | ||
|
|
3e3fb0cdfe | ||
|
|
d907ab9940 | ||
|
|
9c1d6e154c | ||
|
|
b5681ca559 | ||
|
|
4b8f362eb7 | ||
|
|
2e39d1d582 | ||
|
|
fa797df0c7 | ||
|
|
c7100be139 | ||
|
|
91284e5530 | ||
|
|
e5cf7ac764 | ||
|
|
3ae785605e | ||
|
|
e7ac4e0a29 | ||
|
|
b275e53b08 | ||
|
|
f363b198e1 | ||
|
|
37c5570cec | ||
|
|
2db2f5a46d | ||
|
|
e33f4ca96e | ||
|
|
246580fb43 | ||
|
|
f2b80a239f | ||
|
|
c6d38ab647 | ||
|
|
420594874a | ||
|
|
912f635d2a | ||
|
|
4ae5dd360f | ||
|
|
407c40f7d3 | ||
|
|
329f9a0fc8 | ||
|
|
d62257b149 | ||
|
|
19539404a4 | ||
|
|
9ee0523cad | ||
|
|
89bc82d9df | ||
|
|
30aa3fd5d6 | ||
|
|
846e84fc8c | ||
|
|
cd006487b3 | ||
|
|
597ce9adc3 | ||
|
|
61040402df | ||
|
|
8013cd081a | ||
|
|
22cdb9ee90 | ||
|
|
df448c0761 | ||
|
|
99c296264a | ||
|
|
ba51461604 | ||
|
|
6610d0dd46 | ||
|
|
3adc0e1b16 | ||
|
|
0a47b4865a | ||
|
|
13f1a1fc74 | ||
|
|
3ad129e6e6 | ||
|
|
475e865809 | ||
|
|
1e93e98d30 | ||
|
|
d0034b4fb9 | ||
|
|
0c3dcb0a9b | ||
|
|
3138fa79a0 | ||
|
|
c88170b6f6 | ||
|
|
3b79cf3cf2 | ||
|
|
f3ee7072a6 | ||
|
|
5b5d9da33c | ||
|
|
dc1d17a051 | ||
|
|
0bc28d9424 | ||
|
|
5b06a46451 | ||
|
|
8b5b8a9760 | ||
|
|
6c98bbf02c | ||
|
|
9f0964cd7d | ||
|
|
b68732f0c7 | ||
|
|
15a746650c | ||
|
|
b99abba27f | ||
|
|
9c12823d38 | ||
|
|
28bee50e6a | ||
|
|
4148ff1c29 | ||
|
|
e408b5fbaa | ||
|
|
e26bf5a0d2 | ||
|
|
465d60ba86 | ||
|
|
d5c3978a6e | ||
|
|
d4312c05bf | ||
|
|
7efad57176 | ||
|
|
009bff6cc2 | ||
|
|
c918c5b742 | ||
|
|
4a33b7ae8e | ||
|
|
777952adcb | ||
|
|
ef4c47347d | ||
|
|
5e34315bb3 | ||
|
|
4f5624cd5f | ||
|
|
f76d64f9aa | ||
|
|
4444b6daa1 | ||
|
|
ec786b73bc | ||
|
|
7ca58f8dd9 | ||
|
|
58420f62d9 | ||
|
|
a20631af04 | ||
|
|
44995d6f62 | ||
|
|
2d7dea2d97 | ||
|
|
2cdbe64b66 | ||
|
|
030ad89a14 | ||
|
|
a34ee513c0 | ||
|
|
96d29b7662 | ||
|
|
3f077ea71e | ||
|
|
de94b85c0d | ||
|
|
1c771cc27d | ||
|
|
a41ec3a801 | ||
|
|
d9d9e01242 | ||
|
|
04abb5310a | ||
|
|
c7ed9bd7b3 | ||
|
|
8aadb5eeaa | ||
|
|
e4b5593728 | ||
|
|
14d363b942 | ||
|
|
f8c6277946 | ||
|
|
8254bcc3ac | ||
|
|
f8389f0d93 | ||
|
|
af12db9276 | ||
|
|
688a6bb581 | ||
|
|
5d5935965a | ||
|
|
f795b20582 | ||
|
|
c2023d33b9 | ||
|
|
d9c375b74a | ||
|
|
7179ec56ef | ||
|
|
26c850c37f | ||
|
|
2023f35af7 | ||
|
|
c0a57dbd1b | ||
|
|
78f072b46a | ||
|
|
da51510597 | ||
|
|
47389cc827 | ||
|
|
f035203e1c | ||
|
|
083f6fc3b4 | ||
|
|
d5bfe40f37 | ||
|
|
0b350defb5 | ||
|
|
88d0408875 | ||
|
|
c7a155efef | ||
|
|
9187fffc48 | ||
|
|
7d2603ceb7 | ||
|
|
c5bb287c74 | ||
|
|
6b6f5eaaff | ||
|
|
f3fa2317a6 | ||
|
|
d096187196 | ||
|
|
c5a3b34546 | ||
|
|
70eafb2ffc | ||
|
|
983a3e6073 | ||
|
|
fc8d2f641c | ||
|
|
9a5f9a5e9b | ||
|
|
806369ce7f | ||
|
|
4398cda9a5 | ||
|
|
cf6eeec22a | ||
|
|
583e6fe2e8 | ||
|
|
500f8cd869 | ||
|
|
2e5866e6d8 | ||
|
|
600648c8af | ||
|
|
1ac51d2261 | ||
|
|
1766123448 | ||
|
|
5ea039a74e | ||
|
|
0af1a49cbd | ||
|
|
151b7d53b0 | ||
|
|
6e82d2a689 | ||
|
|
b02c6c1e54 | ||
|
|
cd406cc6b9 | ||
|
|
52d46ddc8c | ||
|
|
188fad1814 | ||
|
|
a86ff11084 | ||
|
|
6630f3bc4a | ||
|
|
2164f285f5 | ||
|
|
086855fcce | ||
|
|
33b421ff0b | ||
|
|
36e8fe1b25 | ||
|
|
9696ee589e | ||
|
|
f66da93e64 | ||
|
|
d766c14305 | ||
|
|
5b749060a4 | ||
|
|
9c76078b4f | ||
|
|
c54ffa78f4 | ||
|
|
dde1addced | ||
|
|
6108d53eb2 | ||
|
|
7ae475a811 | ||
|
|
c3c2c9c7fe | ||
|
|
f8955e919b | ||
|
|
dc9ad22ec4 | ||
|
|
e33cf5f933 | ||
|
|
4c7b432090 | ||
|
|
8afd6a1bd8 | ||
|
|
7725bbb36b | ||
|
|
333d9cfffb | ||
|
|
495b50cbda | ||
|
|
fe8dbd662b | ||
|
|
cdde73f5ee | ||
|
|
05c49245b0 | ||
|
|
0955dd2ef0 | ||
|
|
6c5a9e97fe | ||
|
|
e5d2795ec0 | ||
|
|
61aa378c45 | ||
|
|
db296bee80 | ||
|
|
3e091eacc2 | ||
|
|
b2303eac1e | ||
|
|
c01e347bc0 | ||
|
|
6d8583a0b4 | ||
|
|
c85255c5af | ||
|
|
5fac67cea5 | ||
|
|
7b9e0afef9 | ||
|
|
b45c4f5379 | ||
|
|
1fa4ed6bc2 | ||
|
|
2d16f863f7 | ||
|
|
f5ebbd42d3 | ||
|
|
b998a5c282 | ||
|
|
58a9b56f4d | ||
|
|
3ceae6b5c1 | ||
|
|
2612ea35bc | ||
|
|
5bf2299461 | ||
|
|
b26e4a7556 | ||
|
|
c9bd6096b9 | ||
|
|
7d50697701 | ||
|
|
e2314b57fe | ||
|
|
055743f886 | ||
|
|
152151bd44 | ||
|
|
2431f2120c | ||
|
|
6a8e14dcf3 | ||
|
|
a07d6f82ee | ||
|
|
116d158336 | ||
|
|
4863d16657 | ||
|
|
da97f1af28 | ||
|
|
6a03ea048b | ||
|
|
7924d0d8f8 | ||
|
|
781cd4069c | ||
|
|
505a361d98 | ||
|
|
c493c436da | ||
|
|
fb7c97c83f | ||
|
|
b13001c5cc | ||
|
|
4e916ce94b | ||
|
|
1fad3be12a | ||
|
|
6b688ba696 | ||
|
|
48d8919043 | ||
|
|
0479f72a93 | ||
|
|
1877c8c383 | ||
|
|
5f6d74d849 | ||
|
|
72ae6cd8ca | ||
|
|
8457f6397d | ||
|
|
24820ed935 | ||
|
|
ad6947eeb4 | ||
|
|
9c27ec0313 | ||
|
|
7a21d66877 | ||
|
|
7c1265cd2d | ||
|
|
6d73ae58b6 | ||
|
|
2d9d6f5669 | ||
|
|
0a49062a02 | ||
|
|
04bab94c89 | ||
|
|
9bb230d7c8 | ||
|
|
121f096173 | ||
|
|
c06f3fbc57 | ||
|
|
831f8a1f1f | ||
|
|
5e4623a44a | ||
|
|
1a60a00d3e | ||
|
|
08eef5f110 | ||
|
|
0e2d3a2ac1 | ||
|
|
f852df397b | ||
|
|
041ab041ae | ||
|
|
ad9a57f8f9 | ||
|
|
b2bf4b0bd9 | ||
|
|
c625642845 | ||
|
|
35e077ce86 | ||
|
|
7839459607 | ||
|
|
212be40710 | ||
|
|
dc8685f918 | ||
|
|
75072ae5cc | ||
|
|
3edd765076 | ||
|
|
1d66e593e2 | ||
|
|
b8bafd246e | ||
|
|
95c822457a | ||
|
|
26df1034ec | ||
|
|
c495d19540 | ||
|
|
b405847573 | ||
|
|
1abcb2edb7 | ||
|
|
e3669d2bb6 | ||
|
|
1b9a49e226 | ||
|
|
0ee716b1e9 | ||
|
|
9469f481f3 | ||
|
|
acc190bb04 | ||
|
|
5953e22efb | ||
|
|
2c26486588 | ||
|
|
e9d2c0cf28 | ||
|
|
a35c3ca739 | ||
|
|
0ee2f17a61 | ||
|
|
83ce189a82 | ||
|
|
c97e112c72 | ||
|
|
eee124f6c6 | ||
|
|
adc9246f66 | ||
|
|
560555bcd8 | ||
|
|
900a6bc2b8 | ||
|
|
3613d09c38 | ||
|
|
02749dd5cf | ||
|
|
abda879d5a | ||
|
|
d2e909677b | ||
|
|
baa42daae8 | ||
|
|
0b2465482a | ||
|
|
453a097c22 | ||
|
|
4b26a56e64 | ||
|
|
c1d54db2cc | ||
|
|
bcaeb5d464 | ||
|
|
d65bbfbe8d | ||
|
|
4c9b16aa08 | ||
|
|
8355f5031a | ||
|
|
c794c6e24b | ||
|
|
611b790a2c | ||
|
|
78708df79d | ||
|
|
54a4af75b5 | ||
|
|
72d698ebaa | ||
|
|
209f9f0072 | ||
|
|
93457746ac | ||
|
|
2696a9c5e7 | ||
|
|
7c170972a0 | ||
|
|
d00dfd82c1 | ||
|
|
9003139119 | ||
|
|
36e867ec76 | ||
|
|
cf4813979c | ||
|
|
7e12c3eff1 | ||
|
|
281cc985bf | ||
|
|
dcc2be0781 | ||
|
|
161d076bfd | ||
|
|
09fbcc9a72 | ||
|
|
ee2b92cf82 | ||
|
|
3511380c72 | ||
|
|
38736a2db9 | ||
|
|
720914b290 | ||
|
|
16f8e54ed7 | ||
|
|
50ecb63058 | ||
|
|
586650f01c | ||
|
|
ae753e6e88 | ||
|
|
04a2845e91 | ||
|
|
acb20d95ca | ||
|
|
5a9ef5eac2 | ||
|
|
287e1b855d | ||
|
|
d3ebe1844d | ||
|
|
fc8492e8f7 | ||
|
|
ff5ffd0482 | ||
|
|
50db6d0150 | ||
|
|
2e0b854428 | ||
|
|
1dfb44cff7 | ||
|
|
0a8fad2040 | ||
|
|
9b97d3ac8a | ||
|
|
26e78c0c1b | ||
|
|
b036e8d3c2 | ||
|
|
f84a904ad8 | ||
|
|
b1684037d6 | ||
|
|
1c85d0b38a | ||
|
|
97c998946b | ||
|
|
8529c90a4d | ||
|
|
ec3434cf1d | ||
|
|
0e20f6d454 | ||
|
|
696573b14c | ||
|
|
fbb31f018c | ||
|
|
d92d208a45 | ||
|
|
b798e28bfb | ||
|
|
ebd6077cd7 | ||
|
|
9e9500dfa9 | ||
|
|
04cc100fba | ||
|
|
31e33f99f2 | ||
|
|
7c51b90586 | ||
|
|
2b36f73da6 | ||
|
|
d96dd473b8 | ||
|
|
f9066ffb7b | ||
|
|
945941d529 | ||
|
|
10e875e899 | ||
|
|
7f87d22d78 | ||
|
|
d890b73c2f | ||
|
|
d24e5c938e | ||
|
|
aa069a336f | ||
|
|
662fc2ee9d | ||
|
|
eca1f61dab | ||
|
|
4126d0e445 | ||
|
|
1d3cffc6dd | ||
|
|
675d0865da | ||
|
|
b954d22eba | ||
|
|
567cd6ef68 | ||
|
|
01981c310d | ||
|
|
887a050db9 | ||
|
|
bda895eaae | ||
|
|
2e49c06f47 | ||
|
|
ae137847b4 | ||
|
|
5443dc50a3 | ||
|
|
fc435a2541 | ||
|
|
04394750e7 | ||
|
|
e415a1351b | ||
|
|
0ff50a6b02 | ||
|
|
6ec907a915 | ||
|
|
1f55f01fa9 | ||
|
|
5d79b88875 | ||
|
|
458890b4b9 | ||
|
|
89267f792d | ||
|
|
b183fcca05 | ||
|
|
684a57dbc0 | ||
|
|
e857b23429 | ||
|
|
09c76246f6 | ||
|
|
400086a5c8 | ||
|
|
38b966a554 | ||
|
|
31b0df67b7 | ||
|
|
cfe496e9fc | ||
|
|
b85aeaad3e | ||
|
|
64f3509222 | ||
|
|
f8df871d85 | ||
|
|
620150d188 | ||
|
|
630dca515a | ||
|
|
d2de997fcc | ||
|
|
b8711f8ccf | ||
|
|
34aaab19b2 | ||
|
|
220108857c | ||
|
|
31d6b110d2 | ||
|
|
29a0db402c | ||
|
|
21a7fe9f12 | ||
|
|
3b558db518 | ||
|
|
44066fbb0b | ||
|
|
0c2794e9de | ||
|
|
69a9ac9366 | ||
|
|
59e199d0c3 | ||
|
|
657adaabec | ||
|
|
a41f40a30e | ||
|
|
a572b467b4 | ||
|
|
90f1ba95a4 | ||
|
|
286c8a1b4a | ||
|
|
080cd097ba | ||
|
|
866e446dbe | ||
|
|
897f39083d | ||
|
|
f8a7d11808 | ||
|
|
0f006af583 | ||
|
|
2bbf92d5f8 | ||
|
|
da6cf8dbb4 | ||
|
|
a41631d9fa | ||
|
|
645f4d6194 | ||
|
|
46cde87c09 | ||
|
|
91364a9769 | ||
|
|
23b6b4c499 | ||
|
|
1e34faa920 | ||
|
|
e0190a5a6e |
@@ -3,6 +3,7 @@ rvm:
|
||||
- 1.8.7
|
||||
- 1.9.2
|
||||
- 1.9.3
|
||||
- 2.0.0
|
||||
- ree
|
||||
notifications:
|
||||
disabled: true
|
||||
|
||||
7
Gemfile
7
Gemfile
@@ -1,2 +1,7 @@
|
||||
source :rubygems
|
||||
source 'https://rubygems.org'
|
||||
gemspec
|
||||
|
||||
if RUBY_VERSION < "1.9.3"
|
||||
# escape_utils 1.0.0 requires 1.9.3 and above
|
||||
gem "escape_utils", "0.3.2"
|
||||
end
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
||||
Copyright (c) 2011 GitHub, Inc.
|
||||
Copyright (c) 2011-2013 GitHub, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
|
||||
51
README.md
51
README.md
@@ -10,13 +10,16 @@ Linguist defines the list of all languages known to GitHub in a [yaml file](http
|
||||
|
||||
Most languages are detected by their file extension. This is the fastest and most common situation.
|
||||
|
||||
For disambiguating between files with common extensions, we use a [bayesian classifier](https://github.com/github/linguist/blob/master/lib/linguist/classifier.rb). For an example, this helps us tell the difference between `.h` files which could be either C, C++, or Obj-C.
|
||||
For disambiguating between files with common extensions, we use a [Bayesian classifier](https://github.com/github/linguist/blob/master/lib/linguist/classifier.rb). For an example, this helps us tell the difference between `.h` files which could be either C, C++, or Obj-C.
|
||||
|
||||
In the actual GitHub app we deal with `Grit::Blob` objects. For testing, there is a simple `FileBlob` API.
|
||||
|
||||
Linguist::FileBlob.new("lib/linguist.rb").language.name #=> "Ruby"
|
||||
```ruby
|
||||
|
||||
Linguist::FileBlob.new("bin/linguist").language.name #=> "Ruby"
|
||||
Linguist::FileBlob.new("lib/linguist.rb").language.name #=> "Ruby"
|
||||
|
||||
Linguist::FileBlob.new("bin/linguist").language.name #=> "Ruby"
|
||||
```
|
||||
|
||||
See [lib/linguist/language.rb](https://github.com/github/linguist/blob/master/lib/linguist/language.rb) and [lib/linguist/languages.yml](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml).
|
||||
|
||||
@@ -24,20 +27,22 @@ See [lib/linguist/language.rb](https://github.com/github/linguist/blob/master/li
|
||||
|
||||
The actual syntax highlighting is handled by our Pygments wrapper, [pygments.rb](https://github.com/tmm1/pygments.rb). It also provides a [Lexer abstraction](https://github.com/tmm1/pygments.rb/blob/master/lib/pygments/lexer.rb) that determines which highlighter should be used on a file.
|
||||
|
||||
We typically run on a prerelease version of Pygments, [pygments.rb](https://github.com/tmm1/pygments.rb), to get early access to new lexers. The [lexers.yml](https://github.com/github/linguist/blob/master/lib/linguist/lexers.yml) file is a dump of the lexers we have available on our server.
|
||||
We typically run on a pre-release version of Pygments, [pygments.rb](https://github.com/tmm1/pygments.rb), to get early access to new lexers. The [languages.yml](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml) file is a dump of the lexers we have available on our server.
|
||||
|
||||
### Stats
|
||||
|
||||
The Language Graph you see on every repository is built by aggregating the languages of all repo's blobs. The top language in the graph determines the project's primary language. Collectively, these stats make up the [Top Languages](https://github.com/languages) page.
|
||||
The Language Graph you see on every repository is built by aggregating the languages of each file in that repository.
|
||||
The top language in the graph determines the project's primary language. Collectively, these stats make up the [Top Languages](https://github.com/languages) page.
|
||||
|
||||
The repository stats API can be used on a directory:
|
||||
The repository stats API, accessed through `#languages`, can be used on a directory:
|
||||
|
||||
project = Linguist::Repository.from_directory(".")
|
||||
project.language.name #=> "Ruby"
|
||||
project.languages #=> { "Ruby" => 0.98,
|
||||
"Shell" => 0.02 }
|
||||
```ruby
|
||||
project = Linguist::Repository.from_directory(".")
|
||||
project.language.name #=> "Ruby"
|
||||
project.languages #=> { "Ruby" => 0.98, "Shell" => 0.02 }
|
||||
```
|
||||
|
||||
These stats are also printed out by the binary. Try running `linguist` on itself:
|
||||
These stats are also printed out by the `linguist` binary. Try running `linguist` on itself:
|
||||
|
||||
$ bundle exec linguist lib/
|
||||
100% Ruby
|
||||
@@ -46,17 +51,21 @@ These stats are also printed out by the binary. Try running `linguist` on itself
|
||||
|
||||
Checking other code into your git repo is a common practice. But this often inflates your project's language stats and may even cause your project to be labeled as another language. We are able to identify some of these files and directories and exclude them.
|
||||
|
||||
Linguist::FileBlob.new("vendor/plugins/foo.rb").vendored? # => true
|
||||
```ruby
|
||||
Linguist::FileBlob.new("vendor/plugins/foo.rb").vendored? # => true
|
||||
```
|
||||
|
||||
See [Linguist::BlobHelper#vendored?](https://github.com/github/linguist/blob/master/lib/linguist/blob_helper.rb) and [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml).
|
||||
|
||||
#### Generated file detection
|
||||
|
||||
Not all plain text files are true source files. Generated files like minified js and compiled CoffeeScript can be detected and excluded from language stats. As an extra bonus, these files are suppressed in Diffs.
|
||||
Not all plain text files are true source files. Generated files like minified js and compiled CoffeeScript can be detected and excluded from language stats. As an extra bonus, these files are suppressed in diffs.
|
||||
|
||||
Linguist::FileBlob.new("underscore.min.js").generated? # => true
|
||||
```ruby
|
||||
Linguist::FileBlob.new("underscore.min.js").generated? # => true
|
||||
```
|
||||
|
||||
See [Linguist::BlobHelper#generated?](https://github.com/github/linguist/blob/master/lib/linguist/blob_helper.rb).
|
||||
See [Linguist::Generated#generated?](https://github.com/github/linguist/blob/master/lib/linguist/generated.rb).
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -74,12 +83,18 @@ To run the tests:
|
||||
|
||||
## Contributing
|
||||
|
||||
The majority of patches won't need to touch any Ruby code at all. The [master language list](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml) is just a configuration file.
|
||||
The majority of contributions won't need to touch any Ruby code at all. The [master language list](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml) is just a YAML configuration file.
|
||||
|
||||
We try to only add languages once they have some usage on GitHub, so please note in-the-wild usage examples in your pull request.
|
||||
|
||||
Almost all bug fixes or new language additions should come with some additional code samples. Just drop them under [`samples/`](https://github.com/github/linguist/tree/master/samples) in the correct subdirectory and our test suite will automatically test them. In most cases you shouldn't need to add any new assertions.
|
||||
|
||||
To update the `samples.json` after adding new files to [`samples/`](https://github.com/github/linguist/tree/master/samples):
|
||||
|
||||
bundle exec rake samples
|
||||
|
||||
### Testing
|
||||
|
||||
Sometimes getting the tests running can be to much work especially if you don't have much Ruby experience. Its okay, be lazy and let our build bot [Travis](http://travis-ci.org/#!/github/linguist) run the tests for you. Just open a pull request and the bot will start cranking away.
|
||||
Sometimes getting the tests running can be too much work, especially if you don't have much Ruby experience. It's okay: be lazy and let our build bot [Travis](http://travis-ci.org/#!/github/linguist) run the tests for you. Just open a pull request and the bot will start cranking away.
|
||||
|
||||
Heres our current build status, which is hopefully green: [](http://travis-ci.org/github/linguist)
|
||||
Here's our current build status, which is hopefully green: [](http://travis-ci.org/github/linguist)
|
||||
|
||||
13
Rakefile
13
Rakefile
@@ -1,11 +1,11 @@
|
||||
require 'rake/clean'
|
||||
require 'rake/testtask'
|
||||
require 'yaml'
|
||||
require 'json'
|
||||
|
||||
task :default => :test
|
||||
|
||||
Rake::TestTask.new do |t|
|
||||
t.warning = true
|
||||
end
|
||||
Rake::TestTask.new
|
||||
|
||||
task :samples do
|
||||
require 'linguist/samples'
|
||||
@@ -15,6 +15,13 @@ task :samples do
|
||||
File.open('lib/linguist/samples.json', 'w') { |io| io.write json }
|
||||
end
|
||||
|
||||
task :build_gem do
|
||||
languages = YAML.load_file("lib/linguist/languages.yml")
|
||||
File.write("lib/linguist/languages.json", JSON.dump(languages))
|
||||
`gem build github-linguist.gemspec`
|
||||
File.delete("lib/linguist/languages.json")
|
||||
end
|
||||
|
||||
namespace :classifier do
|
||||
LIMIT = 1_000
|
||||
|
||||
|
||||
10
bin/linguist
10
bin/linguist
@@ -1,5 +1,9 @@
|
||||
#!/usr/bin/env ruby
|
||||
|
||||
# linguist — detect language type for a file, or, given a directory, determine language breakdown
|
||||
#
|
||||
# usage: linguist <path>
|
||||
|
||||
require 'linguist/file_blob'
|
||||
require 'linguist/repository'
|
||||
|
||||
@@ -8,8 +12,9 @@ path = ARGV[0] || Dir.pwd
|
||||
if File.directory?(path)
|
||||
repo = Linguist::Repository.from_directory(path)
|
||||
repo.languages.sort_by { |_, size| size }.reverse.each do |language, size|
|
||||
percentage = ((size / repo.size.to_f) * 100).round
|
||||
puts "%-4s %s" % ["#{percentage}%", language]
|
||||
percentage = ((size / repo.size.to_f) * 100)
|
||||
percentage = sprintf '%.2f' % percentage
|
||||
puts "%-7s %s" % ["#{percentage}%", language]
|
||||
end
|
||||
elsif File.file?(path)
|
||||
blob = Linguist::FileBlob.new(path, Dir.pwd)
|
||||
@@ -23,7 +28,6 @@ elsif File.file?(path)
|
||||
|
||||
puts "#{blob.name}: #{blob.loc} lines (#{blob.sloc} sloc)"
|
||||
puts " type: #{type}"
|
||||
puts " extension: #{blob.pathname.extname}"
|
||||
puts " mime type: #{blob.mime_type}"
|
||||
puts " language: #{blob.language}"
|
||||
|
||||
|
||||
@@ -1,18 +1,21 @@
|
||||
Gem::Specification.new do |s|
|
||||
s.name = 'github-linguist'
|
||||
s.version = '2.2.1'
|
||||
s.version = '2.10.5'
|
||||
s.summary = "GitHub Language detection"
|
||||
|
||||
s.authors = "GitHub"
|
||||
s.authors = "GitHub"
|
||||
s.homepage = "https://github.com/github/linguist"
|
||||
|
||||
s.files = Dir['lib/**/*']
|
||||
s.executables << 'linguist'
|
||||
|
||||
s.add_dependency 'charlock_holmes', '~> 0.6.6'
|
||||
s.add_dependency 'escape_utils', '~> 0.2.3'
|
||||
s.add_dependency 'mime-types', '~> 1.18'
|
||||
s.add_dependency 'pygments.rb', '>= 0.2.13'
|
||||
s.add_dependency 'escape_utils', '>= 0.3.1'
|
||||
s.add_dependency 'mime-types', '~> 1.19'
|
||||
s.add_dependency 'pygments.rb', '~> 0.5.4'
|
||||
|
||||
s.add_development_dependency 'json'
|
||||
s.add_development_dependency 'mocha'
|
||||
s.add_development_dependency 'rake'
|
||||
s.add_development_dependency 'yajl-ruby'
|
||||
end
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
require 'linguist/blob_helper'
|
||||
require 'linguist/generated'
|
||||
require 'linguist/language'
|
||||
require 'linguist/mime'
|
||||
require 'linguist/repository'
|
||||
require 'linguist/samples'
|
||||
|
||||
@@ -1,13 +1,19 @@
|
||||
require 'linguist/generated'
|
||||
require 'linguist/language'
|
||||
require 'linguist/mime'
|
||||
|
||||
require 'charlock_holmes'
|
||||
require 'escape_utils'
|
||||
require 'mime/types'
|
||||
require 'pygments'
|
||||
require 'yaml'
|
||||
|
||||
module Linguist
|
||||
# DEPRECATED Avoid mixing into Blob classes. Prefer functional interfaces
|
||||
# like `Language.detect` over `Blob#language`. Functions are much easier to
|
||||
# cache and compose.
|
||||
#
|
||||
# Avoid adding additional bloat to this module.
|
||||
#
|
||||
# BlobHelper is a mixin for Blobish classes that respond to "name",
|
||||
# "data" and "size" such as Grit::Blob.
|
||||
module BlobHelper
|
||||
@@ -23,6 +29,22 @@ module Linguist
|
||||
File.extname(name.to_s)
|
||||
end
|
||||
|
||||
# Internal: Lookup mime type for extension.
|
||||
#
|
||||
# Returns a MIME::Type
|
||||
def _mime_type
|
||||
if defined? @_mime_type
|
||||
@_mime_type
|
||||
else
|
||||
guesses = ::MIME::Types.type_for(extname.to_s)
|
||||
|
||||
# Prefer text mime types over binary
|
||||
@_mime_type = guesses.detect { |type| type.ascii? } ||
|
||||
# Otherwise use the first guess
|
||||
guesses.first
|
||||
end
|
||||
end
|
||||
|
||||
# Public: Get the actual blob mime type
|
||||
#
|
||||
# Examples
|
||||
@@ -32,7 +54,23 @@ module Linguist
|
||||
#
|
||||
# Returns a mime type String.
|
||||
def mime_type
|
||||
@mime_type ||= Mime.mime_for(extname.to_s)
|
||||
_mime_type ? _mime_type.to_s : 'text/plain'
|
||||
end
|
||||
|
||||
# Internal: Is the blob binary according to its mime type
|
||||
#
|
||||
# Return true or false
|
||||
def binary_mime_type?
|
||||
_mime_type ? _mime_type.binary? : false
|
||||
end
|
||||
|
||||
# Internal: Is the blob binary according to its mime type,
|
||||
# overriding it if we have better data from the languages.yml
|
||||
# database.
|
||||
#
|
||||
# Return true or false
|
||||
def likely_binary?
|
||||
binary_mime_type? && !Language.find_by_filename(name)
|
||||
end
|
||||
|
||||
# Public: Get the Content-Type header value
|
||||
@@ -83,15 +121,6 @@ module Linguist
|
||||
@detect_encoding ||= CharlockHolmes::EncodingDetector.new.detect(data) if data
|
||||
end
|
||||
|
||||
# Public: Is the blob binary according to its mime type
|
||||
#
|
||||
# Return true or false
|
||||
def binary_mime_type?
|
||||
if mime_type = Mime.lookup_mime_type_for(extname)
|
||||
mime_type.binary?
|
||||
end
|
||||
end
|
||||
|
||||
# Public: Is the blob binary?
|
||||
#
|
||||
# Return true or false
|
||||
@@ -125,7 +154,28 @@ module Linguist
|
||||
#
|
||||
# Return true or false
|
||||
def image?
|
||||
['.png', '.jpg', '.jpeg', '.gif'].include?(extname)
|
||||
['.png', '.jpg', '.jpeg', '.gif'].include?(extname.downcase)
|
||||
end
|
||||
|
||||
# Public: Is the blob a supported 3D model format?
|
||||
#
|
||||
# Return true or false
|
||||
def solid?
|
||||
extname.downcase == '.stl'
|
||||
end
|
||||
|
||||
# Public: Is this blob a CSV file?
|
||||
#
|
||||
# Return true or false
|
||||
def csv?
|
||||
text? && extname.downcase == '.csv'
|
||||
end
|
||||
|
||||
# Public: Is the blob a PDF?
|
||||
#
|
||||
# Return true or false
|
||||
def pdf?
|
||||
extname.downcase == '.pdf'
|
||||
end
|
||||
|
||||
MEGABYTE = 1024 * 1024
|
||||
@@ -139,14 +189,13 @@ module Linguist
|
||||
|
||||
# Public: Is the blob safe to colorize?
|
||||
#
|
||||
# We use Pygments.rb for syntax highlighting blobs, which
|
||||
# has some quirks and also is essentially 'un-killable' via
|
||||
# normal timeout. To workaround this we try to
|
||||
# carefully handling Pygments.rb anything it can't handle.
|
||||
# We use Pygments for syntax highlighting blobs. Pygments
|
||||
# can be too slow for very large blobs or for certain
|
||||
# corner-case blobs.
|
||||
#
|
||||
# Return true or false
|
||||
def safe_to_colorize?
|
||||
text? && !large? && !high_ratio_of_long_lines?
|
||||
!large? && text? && !high_ratio_of_long_lines?
|
||||
end
|
||||
|
||||
# Internal: Does the blob have a ratio of long lines?
|
||||
@@ -190,7 +239,12 @@ module Linguist
|
||||
#
|
||||
# Returns an Array of lines
|
||||
def lines
|
||||
@lines ||= (viewable? && data) ? data.split("\n", -1) : []
|
||||
@lines ||=
|
||||
if viewable? && data
|
||||
data.split(/\r\n|\r|\n/, -1)
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
# Public: Get number of lines of code
|
||||
@@ -213,7 +267,7 @@ module Linguist
|
||||
|
||||
# Public: Is the blob a generated file?
|
||||
#
|
||||
# Generated source code is supressed in diffs and is ignored by
|
||||
# Generated source code is suppressed in diffs and is ignored by
|
||||
# language statistics.
|
||||
#
|
||||
# May load Blob#data
|
||||
@@ -223,47 +277,21 @@ module Linguist
|
||||
@_generated ||= Generated.generated?(name, lambda { data })
|
||||
end
|
||||
|
||||
# Public: Should the blob be indexed for searching?
|
||||
#
|
||||
# Excluded:
|
||||
# - Files over 0.1MB
|
||||
# - Non-text files
|
||||
# - Langauges marked as not searchable
|
||||
# - Generated source files
|
||||
#
|
||||
# Please add additional test coverage to
|
||||
# `test/test_blob.rb#test_indexable` if you make any changes.
|
||||
#
|
||||
# Return true or false
|
||||
def indexable?
|
||||
if binary?
|
||||
false
|
||||
elsif extname == '.txt'
|
||||
true
|
||||
elsif language.nil?
|
||||
false
|
||||
elsif !language.searchable?
|
||||
false
|
||||
elsif generated?
|
||||
false
|
||||
elsif size > 100 * 1024
|
||||
false
|
||||
else
|
||||
true
|
||||
end
|
||||
end
|
||||
|
||||
# Public: Detects the Language of the blob.
|
||||
#
|
||||
# May load Blob#data
|
||||
#
|
||||
# Returns a Language or nil if none is detected
|
||||
def language
|
||||
if defined? @language
|
||||
@language
|
||||
elsif !binary_mime_type?
|
||||
@language = Language.detect(name.to_s, lambda { data }, mode)
|
||||
return @language if defined? @language
|
||||
|
||||
if defined?(@data) && @data.is_a?(String)
|
||||
data = @data
|
||||
else
|
||||
data = lambda { (binary_mime_type? || binary?) ? "" : self.data }
|
||||
end
|
||||
|
||||
@language = Language.detect(name.to_s, data, mode)
|
||||
end
|
||||
|
||||
# Internal: Get the lexer of the blob.
|
||||
@@ -284,19 +312,5 @@ module Linguist
|
||||
options[:options][:encoding] ||= encoding
|
||||
lexer.highlight(data, options)
|
||||
end
|
||||
|
||||
# Public: Highlight syntax of blob without the outer highlight div
|
||||
# wrapper.
|
||||
#
|
||||
# options - A Hash of options (defaults to {})
|
||||
#
|
||||
# Returns html String
|
||||
def colorize_without_wrapper(options = {})
|
||||
if text = colorize(options)
|
||||
text[%r{<div class="highlight"><pre>(.*?)</pre>\s*</div>}m, 1]
|
||||
else
|
||||
''
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -14,6 +14,9 @@ module Linguist
|
||||
# Classifier.train(db, 'Ruby', "def hello; end")
|
||||
#
|
||||
# Returns nothing.
|
||||
#
|
||||
# Set LINGUIST_DEBUG=1 or =2 to see probabilities per-token or
|
||||
# per-language. See also #dump_all_tokens, below.
|
||||
def self.train!(db, language, data)
|
||||
tokens = Tokenizer.tokenize(data)
|
||||
|
||||
@@ -40,7 +43,7 @@ module Linguist
|
||||
|
||||
# Public: Guess language of data.
|
||||
#
|
||||
# db - Hash of classifer tokens database.
|
||||
# db - Hash of classifier tokens database.
|
||||
# data - Array of tokens or String data to analyze.
|
||||
# languages - Array of language name Strings to restrict to.
|
||||
#
|
||||
@@ -75,17 +78,19 @@ module Linguist
|
||||
def classify(tokens, languages)
|
||||
return [] if tokens.nil?
|
||||
tokens = Tokenizer.tokenize(tokens) if tokens.is_a?(String)
|
||||
|
||||
scores = {}
|
||||
|
||||
debug_dump_all_tokens(tokens, languages) if verbosity >= 2
|
||||
|
||||
languages.each do |language|
|
||||
scores[language] = tokens_probability(tokens, language) +
|
||||
language_probability(language)
|
||||
debug_dump_probabilities(tokens, language) if verbosity >= 1
|
||||
scores[language] = tokens_probability(tokens, language) + language_probability(language)
|
||||
end
|
||||
|
||||
scores.sort { |a, b| b[1] <=> a[1] }.map { |score| [score[0], score[1]] }
|
||||
end
|
||||
|
||||
# Internal: Probably of set of tokens in a language occuring - P(D | C)
|
||||
# Internal: Probably of set of tokens in a language occurring - P(D | C)
|
||||
#
|
||||
# tokens - Array of String tokens.
|
||||
# language - Language to check.
|
||||
@@ -97,7 +102,7 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
# Internal: Probably of token in language occuring - P(F | C)
|
||||
# Internal: Probably of token in language occurring - P(F | C)
|
||||
#
|
||||
# token - String token.
|
||||
# language - Language to check.
|
||||
@@ -111,7 +116,7 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
# Internal: Probably of a language occuring - P(C)
|
||||
# Internal: Probably of a language occurring - P(C)
|
||||
#
|
||||
# language - Language to check.
|
||||
#
|
||||
@@ -119,5 +124,48 @@ module Linguist
|
||||
def language_probability(language)
|
||||
Math.log(@languages[language].to_f / @languages_total.to_f)
|
||||
end
|
||||
|
||||
private
|
||||
def verbosity
|
||||
@verbosity ||= (ENV['LINGUIST_DEBUG'] || 0).to_i
|
||||
end
|
||||
|
||||
def debug_dump_probabilities
|
||||
printf("%10s = %10.3f + %7.3f = %10.3f\n",
|
||||
language, tokens_probability(tokens, language), language_probability(language), scores[language])
|
||||
end
|
||||
|
||||
# Internal: show a table of probabilities for each <token,language> pair.
|
||||
#
|
||||
# The number in each table entry is the number of "points" that each
|
||||
# token contributes toward the belief that the file under test is a
|
||||
# particular language. Points are additive.
|
||||
#
|
||||
# Points are the number of times a token appears in the file, times
|
||||
# how much more likely (log of probability ratio) that token is to
|
||||
# appear in one language vs. the least-likely language. Dashes
|
||||
# indicate the least-likely language (and zero points) for each token.
|
||||
def debug_dump_all_tokens(tokens, languages)
|
||||
maxlen = tokens.map { |tok| tok.size }.max
|
||||
|
||||
printf "%#{maxlen}s", ""
|
||||
puts " #" + languages.map { |lang| sprintf("%10s", lang) }.join
|
||||
|
||||
token_map = Hash.new(0)
|
||||
tokens.each { |tok| token_map[tok] += 1 }
|
||||
|
||||
token_map.sort.each { |tok, count|
|
||||
arr = languages.map { |lang| [lang, token_probability(tok, lang)] }
|
||||
min = arr.map { |a,b| b }.min
|
||||
minlog = Math.log(min)
|
||||
if !arr.inject(true) { |result, n| result && n[1] == arr[0][1] }
|
||||
printf "%#{maxlen}s%5d", tok, count
|
||||
|
||||
puts arr.map { |ent|
|
||||
ent[1] == min ? " -" : sprintf("%10.3f", count * (Math.log(ent[1]) - minlog))
|
||||
}.join
|
||||
end
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -43,7 +43,7 @@ module Linguist
|
||||
|
||||
# Internal: Is the blob a generated file?
|
||||
#
|
||||
# Generated source code is supressed in diffs and is ignored by
|
||||
# Generated source code is suppressed in diffs and is ignored by
|
||||
# language statistics.
|
||||
#
|
||||
# Please add additional test coverage to
|
||||
@@ -52,11 +52,16 @@ module Linguist
|
||||
# Return true or false
|
||||
def generated?
|
||||
name == 'Gemfile.lock' ||
|
||||
minified_javascript? ||
|
||||
minified_files? ||
|
||||
compiled_coffeescript? ||
|
||||
xcode_project_file? ||
|
||||
generated_parser? ||
|
||||
generated_net_docfile? ||
|
||||
generated_parser?
|
||||
generated_net_designer_file? ||
|
||||
generated_protocol_buffer? ||
|
||||
generated_jni_header? ||
|
||||
composer_lock? ||
|
||||
node_modules?
|
||||
end
|
||||
|
||||
# Internal: Is the blob an XCode project file?
|
||||
@@ -69,16 +74,18 @@ module Linguist
|
||||
['.xib', '.nib', '.storyboard', '.pbxproj', '.xcworkspacedata', '.xcuserstate'].include?(extname)
|
||||
end
|
||||
|
||||
# Internal: Is the blob minified JS?
|
||||
# Internal: Is the blob minified files?
|
||||
#
|
||||
# Consider JS minified if the average line length is
|
||||
# greater then 100c.
|
||||
# Consider a file minified if the average line length is
|
||||
# greater then 110c.
|
||||
#
|
||||
# Currently, only JS and CSS files are detected by this method.
|
||||
#
|
||||
# Returns true or false.
|
||||
def minified_javascript?
|
||||
return unless extname == '.js'
|
||||
def minified_files?
|
||||
return unless ['.js', '.css'].include? extname
|
||||
if lines.any?
|
||||
(lines.inject(0) { |n, l| n += l.length } / lines.length) > 100
|
||||
(lines.inject(0) { |n, l| n += l.length } / lines.length) > 110
|
||||
else
|
||||
false
|
||||
end
|
||||
@@ -86,7 +93,7 @@ module Linguist
|
||||
|
||||
# Internal: Is the blob of JS generated by CoffeeScript?
|
||||
#
|
||||
# CoffeScript is meant to output JS that would be difficult to
|
||||
# CoffeeScript is meant to output JS that would be difficult to
|
||||
# tell if it was generated or not. Look for a number of patterns
|
||||
# output by the CS compiler.
|
||||
#
|
||||
@@ -142,6 +149,16 @@ module Linguist
|
||||
lines[-2].include?("</doc>")
|
||||
end
|
||||
|
||||
# Internal: Is this a codegen file for a .NET project?
|
||||
#
|
||||
# Visual Studio often uses code generation to generate partial classes, and
|
||||
# these files can be quite unwieldy. Let's hide them.
|
||||
#
|
||||
# Returns true or false
|
||||
def generated_net_designer_file?
|
||||
name.downcase =~ /\.designer\.cs$/
|
||||
end
|
||||
|
||||
# Internal: Is the blob of JS a parser generated by PEG.js?
|
||||
#
|
||||
# PEG.js-generated parsers are not meant to be consumed by humans.
|
||||
@@ -158,5 +175,43 @@ module Linguist
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
# Internal: Is the blob a C++, Java or Python source file generated by the
|
||||
# Protocol Buffer compiler?
|
||||
#
|
||||
# Returns true of false.
|
||||
def generated_protocol_buffer?
|
||||
return false unless ['.py', '.java', '.h', '.cc', '.cpp'].include?(extname)
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("Generated by the protocol buffer compiler. DO NOT EDIT!")
|
||||
end
|
||||
|
||||
# Internal: Is the blob a C/C++ header generated by the Java JNI tool javah?
|
||||
#
|
||||
# Returns true of false.
|
||||
def generated_jni_header?
|
||||
return false unless extname == '.h'
|
||||
return false unless lines.count > 2
|
||||
|
||||
return lines[0].include?("/* DO NOT EDIT THIS FILE - it is machine generated */") &&
|
||||
lines[1].include?("#include <jni.h>")
|
||||
end
|
||||
|
||||
# node_modules/ can contain large amounts of files, in general not meant
|
||||
# for humans in pull requests.
|
||||
#
|
||||
# Returns true or false.
|
||||
def node_modules?
|
||||
!!name.match(/node_modules\//)
|
||||
end
|
||||
|
||||
# the php composer tool generates a lock file to represent a specific dependency state.
|
||||
# In general not meant for humans in pull requests.
|
||||
#
|
||||
# Returns true or false.
|
||||
def composer_lock?
|
||||
!!name.match(/composer.lock/)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
require 'escape_utils'
|
||||
require 'pygments'
|
||||
require 'yaml'
|
||||
begin
|
||||
require 'json'
|
||||
rescue LoadError
|
||||
end
|
||||
|
||||
require 'linguist/classifier'
|
||||
require 'linguist/samples'
|
||||
@@ -15,11 +19,30 @@ module Linguist
|
||||
@index = {}
|
||||
@name_index = {}
|
||||
@alias_index = {}
|
||||
@extension_index = Hash.new { |h,k| h[k] = [] }
|
||||
@filename_index = Hash.new { |h,k| h[k] = [] }
|
||||
|
||||
@extension_index = Hash.new { |h,k| h[k] = [] }
|
||||
@interpreter_index = Hash.new { |h,k| h[k] = [] }
|
||||
@filename_index = Hash.new { |h,k| h[k] = [] }
|
||||
@primary_extension_index = {}
|
||||
|
||||
# Valid Languages types
|
||||
TYPES = [:data, :markup, :programming]
|
||||
TYPES = [:data, :markup, :programming, :prose]
|
||||
|
||||
# Names of non-programming languages that we will still detect
|
||||
#
|
||||
# Returns an array
|
||||
def self.detectable_markup
|
||||
["CSS", "Less", "Sass", "Stylus", "TeX"]
|
||||
end
|
||||
|
||||
# Detect languages by a specific type
|
||||
#
|
||||
# type - A symbol that exists within TYPES
|
||||
#
|
||||
# Returns an array
|
||||
def self.by_type(type)
|
||||
all.select { |h| h.type == type }
|
||||
end
|
||||
|
||||
# Internal: Create a new Language object
|
||||
#
|
||||
@@ -56,6 +79,16 @@ module Linguist
|
||||
@extension_index[extension] << language
|
||||
end
|
||||
|
||||
if @primary_extension_index.key?(language.primary_extension)
|
||||
raise ArgumentError, "Duplicate primary extension: #{language.primary_extension}"
|
||||
end
|
||||
|
||||
@primary_extension_index[language.primary_extension] = language
|
||||
|
||||
language.interpreters.each do |interpreter|
|
||||
@interpreter_index[interpreter] << language
|
||||
end
|
||||
|
||||
language.filenames.each do |filename|
|
||||
@filename_index[filename] << language
|
||||
end
|
||||
@@ -73,7 +106,7 @@ module Linguist
|
||||
#
|
||||
# Returns Language or nil.
|
||||
def self.detect(name, data, mode = nil)
|
||||
# A bit of an elegant hack. If the file is exectable but extensionless,
|
||||
# A bit of an elegant hack. If the file is executable but extensionless,
|
||||
# append a "magic" extension so it can be classified with other
|
||||
# languages that have shebang scripts.
|
||||
if File.extname(name).empty? && mode && (mode.to_i(8) & 05) == 05
|
||||
@@ -84,8 +117,13 @@ module Linguist
|
||||
|
||||
if possible_languages.length > 1
|
||||
data = data.call() if data.respond_to?(:call)
|
||||
if result = Classifier.classify(Samples::DATA, data, possible_languages.map(&:name)).first
|
||||
Language[result[0]]
|
||||
|
||||
if data.nil? || data == ""
|
||||
nil
|
||||
elsif (result = find_by_shebang(data)) && !result.empty?
|
||||
result.first
|
||||
elsif classified = Classifier.classify(Samples::DATA, data, possible_languages.map(&:name)).first
|
||||
Language[classified[0]]
|
||||
end
|
||||
else
|
||||
possible_languages.first
|
||||
@@ -139,7 +177,24 @@ module Linguist
|
||||
# Returns all matching Languages or [] if none were found.
|
||||
def self.find_by_filename(filename)
|
||||
basename, extname = File.basename(filename), File.extname(filename)
|
||||
@filename_index[basename] + @extension_index[extname]
|
||||
langs = [@primary_extension_index[extname]] +
|
||||
@filename_index[basename] +
|
||||
@extension_index[extname]
|
||||
langs.compact.uniq
|
||||
end
|
||||
|
||||
# Public: Look up Languages by shebang line.
|
||||
#
|
||||
# data - Array of tokens or String data to analyze.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Language.find_by_shebang("#!/bin/bash\ndate;")
|
||||
# # => [#<Language name="Bash">]
|
||||
#
|
||||
# Returns the matching Language
|
||||
def self.find_by_shebang(data)
|
||||
@interpreter_index[Linguist.interpreter_from_shebang(data)]
|
||||
end
|
||||
|
||||
# Public: Look up Language by its name or lexer.
|
||||
@@ -220,12 +275,14 @@ module Linguist
|
||||
raise(ArgumentError, "#{@name} is missing lexer")
|
||||
|
||||
@ace_mode = attributes[:ace_mode]
|
||||
@wrap = attributes[:wrap] || false
|
||||
|
||||
# Set legacy search term
|
||||
@search_term = attributes[:search_term] || default_alias_name
|
||||
|
||||
# Set extensions or default to [].
|
||||
@extensions = attributes[:extensions] || []
|
||||
@interpreters = attributes[:interpreters] || []
|
||||
@filenames = attributes[:filenames] || []
|
||||
|
||||
unless @primary_extension = attributes[:primary_extension]
|
||||
@@ -310,6 +367,11 @@ module Linguist
|
||||
# Returns a String name or nil
|
||||
attr_reader :ace_mode
|
||||
|
||||
# Public: Should language lines be wrapped
|
||||
#
|
||||
# Returns true or false
|
||||
attr_reader :wrap
|
||||
|
||||
# Public: Get extensions
|
||||
#
|
||||
# Examples
|
||||
@@ -321,7 +383,7 @@ module Linguist
|
||||
|
||||
# Deprecated: Get primary extension
|
||||
#
|
||||
# Defaults to the first extension but can be overriden
|
||||
# Defaults to the first extension but can be overridden
|
||||
# in the languages.yml.
|
||||
#
|
||||
# The primary extension can not be nil. Tests should verify this.
|
||||
@@ -333,6 +395,15 @@ module Linguist
|
||||
# Returns the extension String.
|
||||
attr_reader :primary_extension
|
||||
|
||||
# Public: Get interpreters
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# # => ['awk', 'gawk', 'mawk' ...]
|
||||
#
|
||||
# Returns the interpreters Array
|
||||
attr_reader :interpreters
|
||||
|
||||
# Public: Get filenames
|
||||
#
|
||||
# Examples
|
||||
@@ -426,19 +497,40 @@ module Linguist
|
||||
end
|
||||
|
||||
extensions = Samples::DATA['extnames']
|
||||
interpreters = Samples::DATA['interpreters']
|
||||
filenames = Samples::DATA['filenames']
|
||||
popular = YAML.load_file(File.expand_path("../popular.yml", __FILE__))
|
||||
|
||||
YAML.load_file(File.expand_path("../languages.yml", __FILE__)).each do |name, options|
|
||||
languages_yml = File.expand_path("../languages.yml", __FILE__)
|
||||
languages_json = File.expand_path("../languages.json", __FILE__)
|
||||
|
||||
if File.exist?(languages_json) && defined?(JSON)
|
||||
languages = JSON.load(File.read(languages_json))
|
||||
else
|
||||
languages = YAML.load_file(languages_yml)
|
||||
end
|
||||
|
||||
languages.each do |name, options|
|
||||
options['extensions'] ||= []
|
||||
options['interpreters'] ||= []
|
||||
options['filenames'] ||= []
|
||||
|
||||
if extnames = extensions[name]
|
||||
extnames.each do |extname|
|
||||
if !options['extensions'].include?(extname)
|
||||
options['extensions'] << extname
|
||||
else
|
||||
warn "#{name} #{extname.inspect} is already defined in samples/. Remove from languages.yml."
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if interpreters == nil
|
||||
interpreters = {}
|
||||
end
|
||||
|
||||
if interpreter_names = interpreters[name]
|
||||
interpreter_names.each do |interpreter|
|
||||
if !options['interpreters'].include?(interpreter)
|
||||
options['interpreters'] << interpreter
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -447,8 +539,6 @@ module Linguist
|
||||
fns.each do |filename|
|
||||
if !options['filenames'].include?(filename)
|
||||
options['filenames'] << filename
|
||||
else
|
||||
warn "#{name} #{filename.inspect} is already defined in samples/. Remove from languages.yml."
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -460,10 +550,12 @@ module Linguist
|
||||
:aliases => options['aliases'],
|
||||
:lexer => options['lexer'],
|
||||
:ace_mode => options['ace_mode'],
|
||||
:wrap => options['wrap'],
|
||||
:group_name => options['group'],
|
||||
:searchable => options.key?('searchable') ? options['searchable'] : true,
|
||||
:search_term => options['search_term'],
|
||||
:extensions => options['extensions'].sort,
|
||||
:interpreters => options['interpreters'].sort,
|
||||
:primary_extension => options['primary_extension'],
|
||||
:filenames => options['filenames'],
|
||||
:popular => popular.include?(name)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@ module Linguist
|
||||
module MD5
|
||||
# Public: Create deep nested digest of value object.
|
||||
#
|
||||
# Useful for object comparsion.
|
||||
# Useful for object comparison.
|
||||
#
|
||||
# obj - Object to digest.
|
||||
#
|
||||
|
||||
@@ -1,91 +0,0 @@
|
||||
require 'mime/types'
|
||||
require 'yaml'
|
||||
|
||||
class MIME::Type
|
||||
attr_accessor :override
|
||||
end
|
||||
|
||||
# Register additional mime type extensions
|
||||
#
|
||||
# Follows same format as mime-types data file
|
||||
# https://github.com/halostatue/mime-types/blob/master/lib/mime/types.rb.data
|
||||
File.read(File.expand_path("../mimes.yml", __FILE__)).lines.each do |line|
|
||||
# Regexp was cargo culted from mime-types lib
|
||||
next unless line =~ %r{^
|
||||
#{MIME::Type::MEDIA_TYPE_RE}
|
||||
(?:\s@([^\s]+))?
|
||||
(?:\s:(#{MIME::Type::ENCODING_RE}))?
|
||||
}x
|
||||
|
||||
mediatype = $1
|
||||
subtype = $2
|
||||
extensions = $3
|
||||
encoding = $4
|
||||
|
||||
# Lookup existing mime type
|
||||
mime_type = MIME::Types["#{mediatype}/#{subtype}"].first ||
|
||||
# Or create a new instance
|
||||
MIME::Type.new("#{mediatype}/#{subtype}")
|
||||
|
||||
if extensions
|
||||
extensions.split(/,/).each do |extension|
|
||||
mime_type.extensions << extension
|
||||
end
|
||||
end
|
||||
|
||||
if encoding
|
||||
mime_type.encoding = encoding
|
||||
end
|
||||
|
||||
mime_type.override = true
|
||||
|
||||
# Kind of hacky, but we need to reindex the mime type after making changes
|
||||
MIME::Types.add_type_variant(mime_type)
|
||||
MIME::Types.index_extensions(mime_type)
|
||||
end
|
||||
|
||||
module Linguist
|
||||
module Mime
|
||||
# Internal: Look up mime type for extension.
|
||||
#
|
||||
# ext - The extension String. May include leading "."
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Mime.mime_for('.html')
|
||||
# # => 'text/html'
|
||||
#
|
||||
# Mime.mime_for('txt')
|
||||
# # => 'text/plain'
|
||||
#
|
||||
# Return mime type String otherwise falls back to 'text/plain'.
|
||||
def self.mime_for(ext)
|
||||
mime_type = lookup_mime_type_for(ext)
|
||||
mime_type ? mime_type.to_s : 'text/plain'
|
||||
end
|
||||
|
||||
# Internal: Lookup mime type for extension or mime type
|
||||
#
|
||||
# ext_or_mime_type - A file extension ".txt" or mime type "text/plain".
|
||||
#
|
||||
# Returns a MIME::Type
|
||||
def self.lookup_mime_type_for(ext_or_mime_type)
|
||||
ext_or_mime_type ||= ''
|
||||
|
||||
if ext_or_mime_type =~ /\w+\/\w+/
|
||||
guesses = ::MIME::Types[ext_or_mime_type]
|
||||
else
|
||||
guesses = ::MIME::Types.type_for(ext_or_mime_type)
|
||||
end
|
||||
|
||||
# Use custom override first
|
||||
guesses.detect { |type| type.override } ||
|
||||
|
||||
# Prefer text mime types over binary
|
||||
guesses.detect { |type| type.ascii? } ||
|
||||
|
||||
# Otherwise use the first guess
|
||||
guesses.first
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,62 +0,0 @@
|
||||
# Additional types to add to MIME::Types
|
||||
#
|
||||
# MIME types are used to set the Content-Type of raw binary blobs. All text
|
||||
# blobs are served as text/plain regardless of their type to ensure they
|
||||
# open in the browser rather than downloading.
|
||||
#
|
||||
# The encoding helps determine whether a file should be treated as plain
|
||||
# text or binary. By default, a mime type's encoding is base64 (binary).
|
||||
# These types will show a "View Raw" link. To force a type to render as
|
||||
# plain text, set it to 8bit for UTF-8. text/* types will be treated as
|
||||
# text by default.
|
||||
#
|
||||
# <type> @<extensions> :<encoding>
|
||||
#
|
||||
# type - mediatype/subtype
|
||||
# extensions - comma seperated extension list
|
||||
# encoding - base64 (binary), 7bit (ASCII), 8bit (UTF-8), or
|
||||
# quoted-printable (Printable ASCII).
|
||||
#
|
||||
# Follows same format as mime-types data file
|
||||
# https://github.com/halostatue/mime-types/blob/master/lib/mime/types.rb.data
|
||||
#
|
||||
# Any additions or modifications (even trivial) should have corresponding
|
||||
# test change in `test/test_mime.rb`.
|
||||
|
||||
# TODO: Lookup actual types
|
||||
application/octet-stream @a,blend,gem,graffle,ipa,lib,mcz,nib,o,ogv,otf,pfx,pigx,plgx,psd,sib,spl,sqlite3,swc,ucode,xpi
|
||||
|
||||
# Please keep this list alphabetized
|
||||
application/java-archive @ear,war
|
||||
application/netcdf :8bit
|
||||
application/ogg @ogg
|
||||
application/postscript :base64
|
||||
application/vnd.adobe.air-application-installer-package+zip @air
|
||||
application/vnd.mozilla.xul+xml :8bit
|
||||
application/vnd.oasis.opendocument.presentation @odp
|
||||
application/vnd.oasis.opendocument.spreadsheet @ods
|
||||
application/vnd.oasis.opendocument.text @odt
|
||||
application/vnd.openofficeorg.extension @oxt
|
||||
application/vnd.openxmlformats-officedocument.presentationml.presentation @pptx
|
||||
application/x-chrome-extension @crx
|
||||
application/x-iwork-keynote-sffkey @key
|
||||
application/x-iwork-numbers-sffnumbers @numbers
|
||||
application/x-iwork-pages-sffpages @pages
|
||||
application/x-ms-xbap @xbap :8bit
|
||||
application/x-parrot-bytecode @pbc
|
||||
application/x-shockwave-flash @swf
|
||||
application/x-silverlight-app @xap
|
||||
application/x-supercollider @sc :8bit
|
||||
application/x-troff-ms :8bit
|
||||
application/x-wais-source :8bit
|
||||
application/xaml+xml @xaml :8bit
|
||||
application/xslt+xml @xslt :8bit
|
||||
image/x-icns @icns
|
||||
text/cache-manifest @manifest
|
||||
text/plain @cu,cxx
|
||||
text/x-logtalk @lgt
|
||||
text/x-nemerle @n
|
||||
text/x-nimrod @nim
|
||||
text/x-ocaml @ml,mli,mll,mly,sig,sml
|
||||
text/x-rust @rs,rc
|
||||
text/x-scheme @rkt,scm,sls,sps,ss
|
||||
@@ -8,6 +8,8 @@
|
||||
- C#
|
||||
- C++
|
||||
- CSS
|
||||
- Clojure
|
||||
- CoffeeScript
|
||||
- Common Lisp
|
||||
- Diff
|
||||
- Emacs Lisp
|
||||
@@ -25,5 +27,3 @@
|
||||
- SQL
|
||||
- Scala
|
||||
- Scheme
|
||||
- TeX
|
||||
- XML
|
||||
|
||||
@@ -67,14 +67,14 @@ module Linguist
|
||||
return if @computed_stats
|
||||
|
||||
@enum.each do |blob|
|
||||
# Skip binary file extensions
|
||||
next if blob.binary_mime_type?
|
||||
# Skip files that are likely binary
|
||||
next if blob.likely_binary?
|
||||
|
||||
# Skip vendored or generated blobs
|
||||
next if blob.vendored? || blob.generated? || blob.language.nil?
|
||||
|
||||
# Only include programming languages
|
||||
if blob.language.type == :programming
|
||||
# Only include programming languages and acceptable markup languages
|
||||
if blob.language.type == :programming || Language.detectable_markup.include?(blob.language.name)
|
||||
@sizes[blob.language.group] += blob.size
|
||||
end
|
||||
end
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,8 @@
|
||||
require 'yaml'
|
||||
begin
|
||||
require 'json'
|
||||
rescue LoadError
|
||||
require 'yaml'
|
||||
end
|
||||
|
||||
require 'linguist/md5'
|
||||
require 'linguist/classifier'
|
||||
@@ -14,7 +18,8 @@ module Linguist
|
||||
|
||||
# Hash of serialized samples object
|
||||
if File.exist?(PATH)
|
||||
DATA = YAML.load_file(PATH)
|
||||
serializer = defined?(JSON) ? JSON : YAML
|
||||
DATA = serializer.load(File.read(PATH))
|
||||
end
|
||||
|
||||
# Public: Iterate over each sample.
|
||||
@@ -52,6 +57,7 @@ module Linguist
|
||||
yield({
|
||||
:path => File.join(dirname, filename),
|
||||
:language => category,
|
||||
:interpreter => File.exist?(filename) ? Linguist.interpreter_from_shebang(File.read(filename)) : nil,
|
||||
:extname => File.extname(filename)
|
||||
})
|
||||
end
|
||||
@@ -67,6 +73,7 @@ module Linguist
|
||||
def self.data
|
||||
db = {}
|
||||
db['extnames'] = {}
|
||||
db['interpreters'] = {}
|
||||
db['filenames'] = {}
|
||||
|
||||
each do |sample|
|
||||
@@ -76,12 +83,22 @@ module Linguist
|
||||
db['extnames'][language_name] ||= []
|
||||
if !db['extnames'][language_name].include?(sample[:extname])
|
||||
db['extnames'][language_name] << sample[:extname]
|
||||
db['extnames'][language_name].sort!
|
||||
end
|
||||
end
|
||||
|
||||
if sample[:interpreter]
|
||||
db['interpreters'][language_name] ||= []
|
||||
if !db['interpreters'][language_name].include?(sample[:interpreter])
|
||||
db['interpreters'][language_name] << sample[:interpreter]
|
||||
db['interpreters'][language_name].sort!
|
||||
end
|
||||
end
|
||||
|
||||
if sample[:filename]
|
||||
db['filenames'][language_name] ||= []
|
||||
db['filenames'][language_name] << sample[:filename]
|
||||
db['filenames'][language_name].sort!
|
||||
end
|
||||
|
||||
data = File.read(sample[:path])
|
||||
@@ -93,4 +110,40 @@ module Linguist
|
||||
db
|
||||
end
|
||||
end
|
||||
|
||||
# Used to retrieve the interpreter from the shebang line of a file's
|
||||
# data.
|
||||
def self.interpreter_from_shebang(data)
|
||||
lines = data.lines.to_a
|
||||
|
||||
if lines.any? && (match = lines[0].match(/(.+)\n?/)) && (bang = match[0]) =~ /^#!/
|
||||
bang.sub!(/^#! /, '#!')
|
||||
tokens = bang.split(' ')
|
||||
pieces = tokens.first.split('/')
|
||||
|
||||
if pieces.size > 1
|
||||
script = pieces.last
|
||||
else
|
||||
script = pieces.first.sub('#!', '')
|
||||
end
|
||||
|
||||
script = script == 'env' ? tokens[1] : script
|
||||
|
||||
# "python2.6" -> "python"
|
||||
if script =~ /((?:\d+\.?)+)/
|
||||
script.sub! $1, ''
|
||||
end
|
||||
|
||||
# Check for multiline shebang hacks that call `exec`
|
||||
if script == 'sh' &&
|
||||
lines[0...5].any? { |l| l.match(/exec (\w+).+\$0.+\$@/) }
|
||||
script = $1
|
||||
end
|
||||
|
||||
script
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
@@ -16,21 +16,28 @@ module Linguist
|
||||
new.extract_tokens(data)
|
||||
end
|
||||
|
||||
# Read up to 100KB
|
||||
BYTE_LIMIT = 100_000
|
||||
|
||||
# Start state on token, ignore anything till the next newline
|
||||
SINGLE_LINE_COMMENTS = [
|
||||
'//', # C
|
||||
'#', # Ruby
|
||||
'%', # Tex
|
||||
]
|
||||
|
||||
# Start state on opening token, ignore anything until the closing
|
||||
# token is reached.
|
||||
MULTI_LINE_COMMENTS = [
|
||||
['/*', '*/'], # C
|
||||
['<!--', '-->'], # XML
|
||||
['{-', '-}'], # Haskell
|
||||
['(*', '*)'] # Coq
|
||||
['(*', '*)'], # Coq
|
||||
['"""', '"""'] # Python
|
||||
]
|
||||
|
||||
START_SINGLE_LINE_COMMENT = Regexp.compile(SINGLE_LINE_COMMENTS.map { |c|
|
||||
"^\s*#{Regexp.escape(c)} "
|
||||
"\s*#{Regexp.escape(c)} "
|
||||
}.join("|"))
|
||||
|
||||
START_MULTI_LINE_COMMENT = Regexp.compile(MULTI_LINE_COMMENTS.map { |c|
|
||||
@@ -52,22 +59,24 @@ module Linguist
|
||||
|
||||
tokens = []
|
||||
until s.eos?
|
||||
break if s.pos >= BYTE_LIMIT
|
||||
|
||||
if token = s.scan(/^#!.+$/)
|
||||
if name = extract_shebang(token)
|
||||
tokens << "SHEBANG#!#{name}"
|
||||
end
|
||||
|
||||
# Single line comment
|
||||
elsif token = s.scan(START_SINGLE_LINE_COMMENT)
|
||||
tokens << token.strip
|
||||
elsif s.beginning_of_line? && token = s.scan(START_SINGLE_LINE_COMMENT)
|
||||
# tokens << token.strip
|
||||
s.skip_until(/\n|\Z/)
|
||||
|
||||
# Multiline comments
|
||||
elsif token = s.scan(START_MULTI_LINE_COMMENT)
|
||||
tokens << token
|
||||
# tokens << token
|
||||
close_token = MULTI_LINE_COMMENTS.assoc(token)[1]
|
||||
s.skip_until(Regexp.compile(Regexp.escape(close_token)))
|
||||
tokens << close_token
|
||||
# tokens << close_token
|
||||
|
||||
# Skip single or double quoted strings
|
||||
elsif s.scan(/"/)
|
||||
@@ -130,7 +139,7 @@ module Linguist
|
||||
s.scan(/\s+/)
|
||||
script = s.scan(/\S+/)
|
||||
end
|
||||
script = script[/[^\d]+/, 0]
|
||||
script = script[/[^\d]+/, 0] if script
|
||||
return script
|
||||
end
|
||||
|
||||
|
||||
@@ -12,23 +12,43 @@
|
||||
# Caches
|
||||
- cache/
|
||||
|
||||
# Dependencies
|
||||
- ^[Dd]ependencies/
|
||||
|
||||
# C deps
|
||||
# https://github.com/joyent/node
|
||||
- ^deps/
|
||||
- ^tools/
|
||||
- (^|/)configure$
|
||||
- (^|/)configure.ac$
|
||||
- (^|/)config.guess$
|
||||
- (^|/)config.sub$
|
||||
|
||||
# Node depedencies
|
||||
# Node dependencies
|
||||
- node_modules/
|
||||
|
||||
# Vendored depedencies
|
||||
- vendor/
|
||||
# Erlang bundles
|
||||
- ^rebar$
|
||||
|
||||
# Bootstrap minified css and js
|
||||
- (^|/)bootstrap([^.]*)(\.min)\.(js|css)$
|
||||
|
||||
# Vendored dependencies
|
||||
- thirdparty/
|
||||
- vendors?/
|
||||
|
||||
# Debian packaging
|
||||
- ^debian/
|
||||
|
||||
## Commonly Bundled JavaScript frameworks ##
|
||||
|
||||
# jQuery
|
||||
- (^|/)jquery([^.]*)(\.min)?\.js$
|
||||
- (^|/)jquery\-\d\.\d(\.\d)?(\.min)?\.js$
|
||||
- (^|/)jquery\-\d\.\d+(\.\d+)?(\.min)?\.js$
|
||||
|
||||
# jQuery UI
|
||||
- (^|/)jquery\-ui(\-\d\.\d+(\.\d+)?)?(\.\w+)?(\.min)?\.(js|css)$
|
||||
- (^|/)jquery\.(ui|effects)\.([^.]*)(\.min)?\.(js|css)$
|
||||
|
||||
# Prototype
|
||||
- (^|/)prototype(.*)\.js$
|
||||
@@ -49,10 +69,6 @@
|
||||
- (^|/)yahoo-([^.]*)\.js$
|
||||
- (^|/)yui([^.]*)\.js$
|
||||
|
||||
# LESS css
|
||||
- (^|/)less([^.]*)(\.min)?\.js$
|
||||
- (^|/)less\-\d+\.\d+\.\d+(\.min)?\.js$
|
||||
|
||||
# WYS editors
|
||||
- (^|/)ckeditor\.js$
|
||||
- (^|/)tiny_mce([^.]*)\.js$
|
||||
@@ -61,14 +77,24 @@
|
||||
# MathJax
|
||||
- (^|/)MathJax/
|
||||
|
||||
# SyntaxHighlighter - http://alexgorbatchev.com/
|
||||
- (^|/)shBrush([^.]*)\.js$
|
||||
- (^|/)shCore\.js$
|
||||
- (^|/)shLegacy\.js$
|
||||
|
||||
## Python ##
|
||||
|
||||
# django
|
||||
- (^|/)admin_media/
|
||||
|
||||
# Fabric
|
||||
- ^fabfile\.py$
|
||||
|
||||
# WAF
|
||||
- ^waf$
|
||||
|
||||
# .osx
|
||||
- ^.osx$
|
||||
|
||||
## Obj-C ##
|
||||
|
||||
@@ -81,7 +107,8 @@
|
||||
- -vsdoc\.js$
|
||||
|
||||
# jQuery validation plugin (MS bundles this with asp.net mvc)
|
||||
- (^|/)jquery([^.]*)\.validate(\.min)?\.js$
|
||||
- (^|/)jquery([^.]*)\.validate(\.unobtrusive)?(\.min)?\.js$
|
||||
- (^|/)jquery([^.]*)\.unobtrusive\-ajax(\.min)?\.js$
|
||||
|
||||
# Microsoft Ajax
|
||||
- (^|/)[Mm]icrosoft([Mm]vc)?([Aa]jax|[Vv]alidation)(\.debug)?\.js$
|
||||
@@ -90,7 +117,44 @@
|
||||
- ^[Pp]ackages/
|
||||
|
||||
# ExtJS
|
||||
- (^|/)extjs/
|
||||
- (^|/)extjs/.*?\.js$
|
||||
- (^|/)extjs/.*?\.xml$
|
||||
- (^|/)extjs/.*?\.txt$
|
||||
- (^|/)extjs/.*?\.html$
|
||||
- (^|/)extjs/.*?\.properties$
|
||||
- (^|/)extjs/.sencha/
|
||||
- (^|/)extjs/docs/
|
||||
- (^|/)extjs/builds/
|
||||
- (^|/)extjs/cmd/
|
||||
- (^|/)extjs/examples/
|
||||
- (^|/)extjs/locale/
|
||||
- (^|/)extjs/packages/
|
||||
- (^|/)extjs/plugins/
|
||||
- (^|/)extjs/resources/
|
||||
- (^|/)extjs/src/
|
||||
- (^|/)extjs/welcome/
|
||||
|
||||
# Samples folders
|
||||
- ^[Ss]amples/
|
||||
|
||||
# LICENSE, README, git config files
|
||||
- ^COPYING$
|
||||
- LICENSE$
|
||||
- gitattributes$
|
||||
- gitignore$
|
||||
- gitmodules$
|
||||
- ^README$
|
||||
- ^readme$
|
||||
|
||||
# Test fixtures
|
||||
- ^[Tt]est/fixtures/
|
||||
|
||||
# PhoneGap/Cordova
|
||||
- (^|/)cordova([^.]*)(\.min)?\.js$
|
||||
- (^|/)cordova\-\d\.\d(\.\d)?(\.min)?\.js$
|
||||
|
||||
# Vagrant
|
||||
- ^Vagrantfile$
|
||||
|
||||
# .DS_Store's
|
||||
- .[Dd][Ss]_[Ss]tore$
|
||||
|
||||
219
samples/ABAP/cl_csv_parser.abap
Normal file
219
samples/ABAP/cl_csv_parser.abap
Normal file
@@ -0,0 +1,219 @@
|
||||
*/**
|
||||
* The MIT License (MIT)
|
||||
* Copyright (c) 2012 René van Mil
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining
|
||||
* a copy of this software and associated documentation files (the
|
||||
* "Software"), to deal in the Software without restriction, including
|
||||
* without limitation the rights to use, copy, modify, merge, publish,
|
||||
* distribute, sublicense, and/or sell copies of the Software, and to
|
||||
* permit persons to whom the Software is furnished to do so, subject to
|
||||
* the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be
|
||||
* included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
*----------------------------------------------------------------------*
|
||||
* CLASS CL_CSV_PARSER DEFINITION
|
||||
*----------------------------------------------------------------------*
|
||||
*
|
||||
*----------------------------------------------------------------------*
|
||||
class cl_csv_parser definition
|
||||
public
|
||||
inheriting from cl_object
|
||||
final
|
||||
create public .
|
||||
|
||||
public section.
|
||||
*"* public components of class CL_CSV_PARSER
|
||||
*"* do not include other source files here!!!
|
||||
|
||||
type-pools abap .
|
||||
methods constructor
|
||||
importing
|
||||
!delegate type ref to if_csv_parser_delegate
|
||||
!csvstring type string
|
||||
!separator type c
|
||||
!skip_first_line type abap_bool .
|
||||
methods parse
|
||||
raising
|
||||
cx_csv_parse_error .
|
||||
protected section.
|
||||
*"* protected components of class CL_CSV_PARSER
|
||||
*"* do not include other source files here!!!
|
||||
private section.
|
||||
*"* private components of class CL_CSV_PARSER
|
||||
*"* do not include other source files here!!!
|
||||
|
||||
constants _textindicator type c value '"'. "#EC NOTEXT
|
||||
data _delegate type ref to if_csv_parser_delegate .
|
||||
data _csvstring type string .
|
||||
data _separator type c .
|
||||
type-pools abap .
|
||||
data _skip_first_line type abap_bool .
|
||||
|
||||
methods _lines
|
||||
returning
|
||||
value(returning) type stringtab .
|
||||
methods _parse_line
|
||||
importing
|
||||
!line type string
|
||||
returning
|
||||
value(returning) type stringtab
|
||||
raising
|
||||
cx_csv_parse_error .
|
||||
endclass. "CL_CSV_PARSER DEFINITION
|
||||
|
||||
|
||||
|
||||
*----------------------------------------------------------------------*
|
||||
* CLASS CL_CSV_PARSER IMPLEMENTATION
|
||||
*----------------------------------------------------------------------*
|
||||
*
|
||||
*----------------------------------------------------------------------*
|
||||
class cl_csv_parser implementation.
|
||||
|
||||
|
||||
* <SIGNATURE>---------------------------------------------------------------------------------------+
|
||||
* | Instance Public Method CL_CSV_PARSER->CONSTRUCTOR
|
||||
* +-------------------------------------------------------------------------------------------------+
|
||||
* | [--->] DELEGATE TYPE REF TO IF_CSV_PARSER_DELEGATE
|
||||
* | [--->] CSVSTRING TYPE STRING
|
||||
* | [--->] SEPARATOR TYPE C
|
||||
* | [--->] SKIP_FIRST_LINE TYPE ABAP_BOOL
|
||||
* +--------------------------------------------------------------------------------------</SIGNATURE>
|
||||
method constructor.
|
||||
super->constructor( ).
|
||||
_delegate = delegate.
|
||||
_csvstring = csvstring.
|
||||
_separator = separator.
|
||||
_skip_first_line = skip_first_line.
|
||||
endmethod. "constructor
|
||||
|
||||
|
||||
* <SIGNATURE>---------------------------------------------------------------------------------------+
|
||||
* | Instance Public Method CL_CSV_PARSER->PARSE
|
||||
* +-------------------------------------------------------------------------------------------------+
|
||||
* | [!CX!] CX_CSV_PARSE_ERROR
|
||||
* +--------------------------------------------------------------------------------------</SIGNATURE>
|
||||
method parse.
|
||||
data msg type string.
|
||||
if _csvstring is initial.
|
||||
message e002(csv) into msg.
|
||||
raise exception type cx_csv_parse_error
|
||||
exporting
|
||||
message = msg.
|
||||
endif.
|
||||
|
||||
" Get the lines
|
||||
data is_first_line type abap_bool value abap_true.
|
||||
data lines type standard table of string.
|
||||
lines = _lines( ).
|
||||
field-symbols <line> type string.
|
||||
loop at lines assigning <line>.
|
||||
" Should we skip the first line?
|
||||
if _skip_first_line = abap_true and is_first_line = abap_true.
|
||||
is_first_line = abap_false.
|
||||
continue.
|
||||
endif.
|
||||
" Parse the line
|
||||
data values type standard table of string.
|
||||
values = _parse_line( <line> ).
|
||||
" Send values to delegate
|
||||
_delegate->values_found( values ).
|
||||
endloop.
|
||||
endmethod. "parse
|
||||
|
||||
|
||||
* <SIGNATURE>---------------------------------------------------------------------------------------+
|
||||
* | Instance Private Method CL_CSV_PARSER->_LINES
|
||||
* +-------------------------------------------------------------------------------------------------+
|
||||
* | [<-()] RETURNING TYPE STRINGTAB
|
||||
* +--------------------------------------------------------------------------------------</SIGNATURE>
|
||||
method _lines.
|
||||
split _csvstring at cl_abap_char_utilities=>cr_lf into table returning.
|
||||
endmethod. "_lines
|
||||
|
||||
|
||||
* <SIGNATURE>---------------------------------------------------------------------------------------+
|
||||
* | Instance Private Method CL_CSV_PARSER->_PARSE_LINE
|
||||
* +-------------------------------------------------------------------------------------------------+
|
||||
* | [--->] LINE TYPE STRING
|
||||
* | [<-()] RETURNING TYPE STRINGTAB
|
||||
* | [!CX!] CX_CSV_PARSE_ERROR
|
||||
* +--------------------------------------------------------------------------------------</SIGNATURE>
|
||||
method _parse_line.
|
||||
data msg type string.
|
||||
|
||||
data csvvalue type string.
|
||||
data csvvalues type standard table of string.
|
||||
|
||||
data char type c.
|
||||
data pos type i value 0.
|
||||
data len type i.
|
||||
len = strlen( line ).
|
||||
while pos < len.
|
||||
char = line+pos(1).
|
||||
if char <> _separator.
|
||||
if char = _textindicator.
|
||||
data text_ended type abap_bool.
|
||||
text_ended = abap_false.
|
||||
while text_ended = abap_false.
|
||||
pos = pos + 1.
|
||||
if pos < len.
|
||||
char = line+pos(1).
|
||||
if char = _textindicator.
|
||||
text_ended = abap_true.
|
||||
else.
|
||||
if char is initial. " Space
|
||||
concatenate csvvalue ` ` into csvvalue.
|
||||
else.
|
||||
concatenate csvvalue char into csvvalue.
|
||||
endif.
|
||||
endif.
|
||||
else.
|
||||
" Reached the end of the line while inside a text value
|
||||
" This indicates an error in the CSV formatting
|
||||
text_ended = abap_true.
|
||||
message e003(csv) into msg.
|
||||
raise exception type cx_csv_parse_error
|
||||
exporting
|
||||
message = msg.
|
||||
endif.
|
||||
endwhile.
|
||||
" Check if next character is a separator, otherwise the CSV formatting is incorrect
|
||||
data nextpos type i.
|
||||
nextpos = pos + 1.
|
||||
if nextpos < len and line+nextpos(1) <> _separator.
|
||||
message e003(csv) into msg.
|
||||
raise exception type cx_csv_parse_error
|
||||
exporting
|
||||
message = msg.
|
||||
endif.
|
||||
else.
|
||||
if char is initial. " Space
|
||||
concatenate csvvalue ` ` into csvvalue.
|
||||
else.
|
||||
concatenate csvvalue char into csvvalue.
|
||||
endif.
|
||||
endif.
|
||||
else.
|
||||
append csvvalue to csvvalues.
|
||||
clear csvvalue.
|
||||
endif.
|
||||
pos = pos + 1.
|
||||
endwhile.
|
||||
append csvvalue to csvvalues. " Don't forget the last value
|
||||
|
||||
returning = csvvalues.
|
||||
endmethod. "_parse_line
|
||||
endclass. "CL_CSV_PARSER IMPLEMENTATION
|
||||
39
samples/Agda/NatCat.agda
Normal file
39
samples/Agda/NatCat.agda
Normal file
@@ -0,0 +1,39 @@
|
||||
module NatCat where
|
||||
|
||||
open import Relation.Binary.PropositionalEquality
|
||||
|
||||
-- If you can show that a relation only ever has one inhabitant
|
||||
-- you get the category laws for free
|
||||
module
|
||||
EasyCategory
|
||||
(obj : Set)
|
||||
(_⟶_ : obj → obj → Set)
|
||||
(_∘_ : ∀ {x y z} → x ⟶ y → y ⟶ z → x ⟶ z)
|
||||
(id : ∀ x → x ⟶ x)
|
||||
(single-inhabitant : (x y : obj) (r s : x ⟶ y) → r ≡ s)
|
||||
where
|
||||
|
||||
idʳ : ∀ x y (r : x ⟶ y) → r ∘ id y ≡ r
|
||||
idʳ x y r = single-inhabitant x y (r ∘ id y) r
|
||||
|
||||
idˡ : ∀ x y (r : x ⟶ y) → id x ∘ r ≡ r
|
||||
idˡ x y r = single-inhabitant x y (id x ∘ r) r
|
||||
|
||||
∘-assoc : ∀ w x y z (r : w ⟶ x) (s : x ⟶ y) (t : y ⟶ z) → (r ∘ s) ∘ t ≡ r ∘ (s ∘ t)
|
||||
∘-assoc w x y z r s t = single-inhabitant w z ((r ∘ s) ∘ t) (r ∘ (s ∘ t))
|
||||
|
||||
open import Data.Nat
|
||||
|
||||
same : (x y : ℕ) (r s : x ≤ y) → r ≡ s
|
||||
same .0 y z≤n z≤n = refl
|
||||
same .(suc m) .(suc n) (s≤s {m} {n} r) (s≤s s) = cong s≤s (same m n r s)
|
||||
|
||||
≤-trans : ∀ x y z → x ≤ y → y ≤ z → x ≤ z
|
||||
≤-trans .0 y z z≤n s = z≤n
|
||||
≤-trans .(suc m) .(suc n) .(suc n₁) (s≤s {m} {n} r) (s≤s {.n} {n₁} s) = s≤s (≤-trans m n n₁ r s)
|
||||
|
||||
≤-refl : ∀ x → x ≤ x
|
||||
≤-refl zero = z≤n
|
||||
≤-refl (suc x) = s≤s (≤-refl x)
|
||||
|
||||
module Nat-EasyCategory = EasyCategory ℕ _≤_ (λ {x}{y}{z} → ≤-trans x y z) ≤-refl same
|
||||
26
samples/ApacheConf/filenames/.htaccess
Normal file
26
samples/ApacheConf/filenames/.htaccess
Normal file
@@ -0,0 +1,26 @@
|
||||
ServerSignature Off
|
||||
RewriteCond %{REQUEST_METHOD} ^(HEAD|TRACE|DELETE|TRACK) [NC,OR]
|
||||
RewriteCond %{THE_REQUEST} (\\r|\\n|%0A|%0D) [NC,OR]
|
||||
|
||||
RewriteCond %{HTTP_REFERER} (<|>|’|%0A|%0D|%27|%3C|%3E|%00) [NC,OR]
|
||||
RewriteCond %{HTTP_COOKIE} (<|>|’|%0A|%0D|%27|%3C|%3E|%00) [NC,OR]
|
||||
RewriteCond %{REQUEST_URI} ^/(,|;|:|<|>|”>|”<|/|\\\.\.\\).{0,9999} [NC,OR]
|
||||
|
||||
RewriteCond %{HTTP_USER_AGENT} ^$ [OR]
|
||||
RewriteCond %{HTTP_USER_AGENT} ^(java|curl|wget) [NC,OR]
|
||||
RewriteCond %{HTTP_USER_AGENT} (winhttp|HTTrack|clshttp|archiver|loader|email|harvest|extract|grab|miner) [NC,OR]
|
||||
RewriteCond %{HTTP_USER_AGENT} (libwww-perl|curl|wget|python|nikto|scan) [NC,OR]
|
||||
RewriteCond %{HTTP_USER_AGENT} (<|>|’|%0A|%0D|%27|%3C|%3E|%00) [NC,OR]
|
||||
|
||||
#Block mySQL injects
|
||||
RewriteCond %{QUERY_STRING} (;|<|>|’|”|\)|%0A|%0D|%22|%27|%3C|%3E|%00).*(/\*|union|select|insert|cast|set|declare|drop|update|md5|benchmark) [NC,OR]
|
||||
|
||||
RewriteCond %{QUERY_STRING} \.\./\.\. [OR]
|
||||
|
||||
RewriteCond %{QUERY_STRING} (localhost|loopback|127\.0\.0\.1) [NC,OR]
|
||||
RewriteCond %{QUERY_STRING} \.[a-z0-9] [NC,OR]
|
||||
RewriteCond %{QUERY_STRING} (<|>|’|%0A|%0D|%27|%3C|%3E|%00) [NC]
|
||||
# Note: The final RewriteCond must NOT use the [OR] flag.
|
||||
|
||||
# Return 403 Forbidden error.
|
||||
RewriteRule .* index.php [F]
|
||||
470
samples/ApacheConf/filenames/apache2.conf
Normal file
470
samples/ApacheConf/filenames/apache2.conf
Normal file
@@ -0,0 +1,470 @@
|
||||
# This is the main Apache HTTP server configuration file. It contains the
|
||||
# configuration directives that give the server its instructions.
|
||||
# See <URL:http://httpd.apache.org/docs/2.2> for detailed information.
|
||||
# In particular, see
|
||||
# <URL:http://httpd.apache.org/docs/2.2/mod/directives.html>
|
||||
# for a discussion of each configuration directive.
|
||||
#
|
||||
# Do NOT simply read the instructions in here without understanding
|
||||
# what they do. They're here only as hints or reminders. If you are unsure
|
||||
# consult the online docs. You have been warned.
|
||||
#
|
||||
# Configuration and logfile names: If the filenames you specify for many
|
||||
# of the server's control files begin with "/" (or "drive:/" for Win32), the
|
||||
# server will use that explicit path. If the filenames do *not* begin
|
||||
# with "/", the value of ServerRoot is prepended -- so "/var/log/apache2/foo.log"
|
||||
# with ServerRoot set to "" will be interpreted by the
|
||||
# server as "//var/log/apache2/foo.log".
|
||||
|
||||
#
|
||||
# ServerRoot: The top of the directory tree under which the server's
|
||||
# configuration, error, and log files are kept.
|
||||
#
|
||||
# Do not add a slash at the end of the directory path. If you point
|
||||
# ServerRoot at a non-local disk, be sure to point the LockFile directive
|
||||
# at a local disk. If you wish to share the same ServerRoot for multiple
|
||||
# httpd daemons, you will need to change at least LockFile and PidFile.
|
||||
#
|
||||
ServerRoot ""
|
||||
|
||||
#
|
||||
# Listen: Allows you to bind Apache to specific IP addresses and/or
|
||||
# ports, instead of the default. See also the <VirtualHost>
|
||||
# directive.
|
||||
#
|
||||
# Change this to Listen on specific IP addresses as shown below to
|
||||
# prevent Apache from glomming onto all bound IP addresses.
|
||||
#
|
||||
#Listen 12.34.56.78:80
|
||||
Listen 80
|
||||
|
||||
#
|
||||
# Dynamic Shared Object (DSO) Support
|
||||
#
|
||||
# To be able to use the functionality of a module which was built as a DSO you
|
||||
# have to place corresponding `LoadModule' lines at this location so the
|
||||
# directives contained in it are actually available _before_ they are used.
|
||||
# Statically compiled modules (those listed by `httpd -l') do not need
|
||||
# to be loaded here.
|
||||
#
|
||||
# Example:
|
||||
# LoadModule foo_module modules/mod_foo.so
|
||||
#
|
||||
LoadModule authn_file_module /usr/lib/apache2/modules/mod_authn_file.so
|
||||
LoadModule authn_dbm_module /usr/lib/apache2/modules/mod_authn_dbm.so
|
||||
LoadModule authn_anon_module /usr/lib/apache2/modules/mod_authn_anon.so
|
||||
LoadModule authn_dbd_module /usr/lib/apache2/modules/mod_authn_dbd.so
|
||||
LoadModule authn_default_module /usr/lib/apache2/modules/mod_authn_default.so
|
||||
LoadModule authn_alias_module /usr/lib/apache2/modules/mod_authn_alias.so
|
||||
LoadModule authz_host_module /usr/lib/apache2/modules/mod_authz_host.so
|
||||
LoadModule authz_groupfile_module /usr/lib/apache2/modules/mod_authz_groupfile.so
|
||||
LoadModule authz_user_module /usr/lib/apache2/modules/mod_authz_user.so
|
||||
LoadModule authz_dbm_module /usr/lib/apache2/modules/mod_authz_dbm.so
|
||||
LoadModule authz_owner_module /usr/lib/apache2/modules/mod_authz_owner.so
|
||||
LoadModule authnz_ldap_module /usr/lib/apache2/modules/mod_authnz_ldap.so
|
||||
LoadModule authz_default_module /usr/lib/apache2/modules/mod_authz_default.so
|
||||
LoadModule auth_basic_module /usr/lib/apache2/modules/mod_auth_basic.so
|
||||
LoadModule auth_digest_module /usr/lib/apache2/modules/mod_auth_digest.so
|
||||
LoadModule file_cache_module /usr/lib/apache2/modules/mod_file_cache.so
|
||||
LoadModule cache_module /usr/lib/apache2/modules/mod_cache.so
|
||||
LoadModule disk_cache_module /usr/lib/apache2/modules/mod_disk_cache.so
|
||||
LoadModule mem_cache_module /usr/lib/apache2/modules/mod_mem_cache.so
|
||||
LoadModule dbd_module /usr/lib/apache2/modules/mod_dbd.so
|
||||
LoadModule dumpio_module /usr/lib/apache2/modules/mod_dumpio.so
|
||||
LoadModule ext_filter_module /usr/lib/apache2/modules/mod_ext_filter.so
|
||||
LoadModule include_module /usr/lib/apache2/modules/mod_include.so
|
||||
LoadModule filter_module /usr/lib/apache2/modules/mod_filter.so
|
||||
LoadModule charset_lite_module /usr/lib/apache2/modules/mod_charset_lite.so
|
||||
LoadModule deflate_module /usr/lib/apache2/modules/mod_deflate.so
|
||||
LoadModule ldap_module /usr/lib/apache2/modules/mod_ldap.so
|
||||
LoadModule log_forensic_module /usr/lib/apache2/modules/mod_log_forensic.so
|
||||
LoadModule env_module /usr/lib/apache2/modules/mod_env.so
|
||||
LoadModule mime_magic_module /usr/lib/apache2/modules/mod_mime_magic.so
|
||||
LoadModule cern_meta_module /usr/lib/apache2/modules/mod_cern_meta.so
|
||||
LoadModule expires_module /usr/lib/apache2/modules/mod_expires.so
|
||||
LoadModule headers_module /usr/lib/apache2/modules/mod_headers.so
|
||||
LoadModule ident_module /usr/lib/apache2/modules/mod_ident.so
|
||||
LoadModule usertrack_module /usr/lib/apache2/modules/mod_usertrack.so
|
||||
LoadModule unique_id_module /usr/lib/apache2/modules/mod_unique_id.so
|
||||
LoadModule setenvif_module /usr/lib/apache2/modules/mod_setenvif.so
|
||||
LoadModule version_module /usr/lib/apache2/modules/mod_version.so
|
||||
LoadModule proxy_module /usr/lib/apache2/modules/mod_proxy.so
|
||||
LoadModule proxy_connect_module /usr/lib/apache2/modules/mod_proxy_connect.so
|
||||
LoadModule proxy_ftp_module /usr/lib/apache2/modules/mod_proxy_ftp.so
|
||||
LoadModule proxy_http_module /usr/lib/apache2/modules/mod_proxy_http.so
|
||||
LoadModule proxy_ajp_module /usr/lib/apache2/modules/mod_proxy_ajp.so
|
||||
LoadModule proxy_balancer_module /usr/lib/apache2/modules/mod_proxy_balancer.so
|
||||
LoadModule ssl_module /usr/lib/apache2/modules/mod_ssl.so
|
||||
LoadModule mime_module /usr/lib/apache2/modules/mod_mime.so
|
||||
LoadModule dav_module /usr/lib/apache2/modules/mod_dav.so
|
||||
LoadModule status_module /usr/lib/apache2/modules/mod_status.so
|
||||
LoadModule autoindex_module /usr/lib/apache2/modules/mod_autoindex.so
|
||||
LoadModule asis_module /usr/lib/apache2/modules/mod_asis.so
|
||||
LoadModule info_module /usr/lib/apache2/modules/mod_info.so
|
||||
LoadModule suexec_module /usr/lib/apache2/modules/mod_suexec.so
|
||||
LoadModule cgid_module /usr/lib/apache2/modules/mod_cgid.so
|
||||
LoadModule cgi_module /usr/lib/apache2/modules/mod_cgi.so
|
||||
LoadModule dav_fs_module /usr/lib/apache2/modules/mod_dav_fs.so
|
||||
LoadModule dav_lock_module /usr/lib/apache2/modules/mod_dav_lock.so
|
||||
LoadModule vhost_alias_module /usr/lib/apache2/modules/mod_vhost_alias.so
|
||||
LoadModule negotiation_module /usr/lib/apache2/modules/mod_negotiation.so
|
||||
LoadModule dir_module /usr/lib/apache2/modules/mod_dir.so
|
||||
LoadModule imagemap_module /usr/lib/apache2/modules/mod_imagemap.so
|
||||
LoadModule actions_module /usr/lib/apache2/modules/mod_actions.so
|
||||
LoadModule speling_module /usr/lib/apache2/modules/mod_speling.so
|
||||
LoadModule userdir_module /usr/lib/apache2/modules/mod_userdir.so
|
||||
LoadModule alias_module /usr/lib/apache2/modules/mod_alias.so
|
||||
LoadModule rewrite_module /usr/lib/apache2/modules/mod_rewrite.so
|
||||
|
||||
<IfModule !mpm_netware_module>
|
||||
#
|
||||
# If you wish httpd to run as a different user or group, you must run
|
||||
# httpd as root initially and it will switch.
|
||||
#
|
||||
# User/Group: The name (or #number) of the user/group to run httpd as.
|
||||
# It is usually good practice to create a dedicated user and group for
|
||||
# running httpd, as with most system services.
|
||||
#
|
||||
User daemon
|
||||
Group daemon
|
||||
</IfModule>
|
||||
|
||||
# 'Main' server configuration
|
||||
#
|
||||
# The directives in this section set up the values used by the 'main'
|
||||
# server, which responds to any requests that aren't handled by a
|
||||
# <VirtualHost> definition. These values also provide defaults for
|
||||
# any <VirtualHost> containers you may define later in the file.
|
||||
#
|
||||
# All of these directives may appear inside <VirtualHost> containers,
|
||||
# in which case these default settings will be overridden for the
|
||||
# virtual host being defined.
|
||||
#
|
||||
|
||||
#
|
||||
# ServerAdmin: Your address, where problems with the server should be
|
||||
# e-mailed. This address appears on some server-generated pages, such
|
||||
# as error documents. e.g. admin@your-domain.com
|
||||
#
|
||||
ServerAdmin you@example.com
|
||||
|
||||
#
|
||||
# ServerName gives the name and port that the server uses to identify itself.
|
||||
# This can often be determined automatically, but we recommend you specify
|
||||
# it explicitly to prevent problems during startup.
|
||||
#
|
||||
# If your host doesn't have a registered DNS name, enter its IP address here.
|
||||
#
|
||||
#ServerName www.example.com:80
|
||||
|
||||
#
|
||||
# DocumentRoot: The directory out of which you will serve your
|
||||
# documents. By default, all requests are taken from this directory, but
|
||||
# symbolic links and aliases may be used to point to other locations.
|
||||
#
|
||||
DocumentRoot "/usr/share/apache2/default-site/htdocs"
|
||||
|
||||
#
|
||||
# Each directory to which Apache has access can be configured with respect
|
||||
# to which services and features are allowed and/or disabled in that
|
||||
# directory (and its subdirectories).
|
||||
#
|
||||
# First, we configure the "default" to be a very restrictive set of
|
||||
# features.
|
||||
#
|
||||
<Directory />
|
||||
Options FollowSymLinks
|
||||
AllowOverride None
|
||||
Order deny,allow
|
||||
Deny from all
|
||||
</Directory>
|
||||
|
||||
#
|
||||
# Note that from this point forward you must specifically allow
|
||||
# particular features to be enabled - so if something's not working as
|
||||
# you might expect, make sure that you have specifically enabled it
|
||||
# below.
|
||||
#
|
||||
|
||||
#
|
||||
# This should be changed to whatever you set DocumentRoot to.
|
||||
#
|
||||
<Directory "/usr/share/apache2/default-site/htdocs">
|
||||
#
|
||||
# Possible values for the Options directive are "None", "All",
|
||||
# or any combination of:
|
||||
# Indexes Includes FollowSymLinks SymLinksifOwnerMatch ExecCGI MultiViews
|
||||
#
|
||||
# Note that "MultiViews" must be named *explicitly* --- "Options All"
|
||||
# doesn't give it to you.
|
||||
#
|
||||
# The Options directive is both complicated and important. Please see
|
||||
# http://httpd.apache.org/docs/2.2/mod/core.html#options
|
||||
# for more information.
|
||||
#
|
||||
Options Indexes FollowSymLinks
|
||||
|
||||
#
|
||||
# AllowOverride controls what directives may be placed in .htaccess files.
|
||||
# It can be "All", "None", or any combination of the keywords:
|
||||
# Options FileInfo AuthConfig Limit
|
||||
#
|
||||
AllowOverride None
|
||||
|
||||
#
|
||||
# Controls who can get stuff from this server.
|
||||
#
|
||||
Order allow,deny
|
||||
Allow from all
|
||||
|
||||
</Directory>
|
||||
|
||||
#
|
||||
# DirectoryIndex: sets the file that Apache will serve if a directory
|
||||
# is requested.
|
||||
#
|
||||
<IfModule dir_module>
|
||||
DirectoryIndex index.html
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# The following lines prevent .htaccess and .htpasswd files from being
|
||||
# viewed by Web clients.
|
||||
#
|
||||
<FilesMatch "^\.ht">
|
||||
Order allow,deny
|
||||
Deny from all
|
||||
Satisfy All
|
||||
</FilesMatch>
|
||||
|
||||
#
|
||||
# ErrorLog: The location of the error log file.
|
||||
# If you do not specify an ErrorLog directive within a <VirtualHost>
|
||||
# container, error messages relating to that virtual host will be
|
||||
# logged here. If you *do* define an error logfile for a <VirtualHost>
|
||||
# container, that host's errors will be logged there and not here.
|
||||
#
|
||||
ErrorLog /var/log/apache2/error_log
|
||||
|
||||
#
|
||||
# LogLevel: Control the number of messages logged to the error_log.
|
||||
# Possible values include: debug, info, notice, warn, error, crit,
|
||||
# alert, emerg.
|
||||
#
|
||||
LogLevel warn
|
||||
|
||||
<IfModule log_config_module>
|
||||
#
|
||||
# The following directives define some format nicknames for use with
|
||||
# a CustomLog directive (see below).
|
||||
#
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b" common
|
||||
|
||||
<IfModule logio_module>
|
||||
# You need to enable mod_logio.c to use %I and %O
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %I %O" combinedio
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# The location and format of the access logfile (Common Logfile Format).
|
||||
# If you do not define any access logfiles within a <VirtualHost>
|
||||
# container, they will be logged here. Contrariwise, if you *do*
|
||||
# define per-<VirtualHost> access logfiles, transactions will be
|
||||
# logged therein and *not* in this file.
|
||||
#
|
||||
CustomLog /var/log/apache2/access_log common
|
||||
|
||||
#
|
||||
# If you prefer a logfile with access, agent, and referer information
|
||||
# (Combined Logfile Format) you can use the following directive.
|
||||
#
|
||||
#CustomLog /var/log/apache2/access_log combined
|
||||
</IfModule>
|
||||
|
||||
<IfModule alias_module>
|
||||
#
|
||||
# Redirect: Allows you to tell clients about documents that used to
|
||||
# exist in your server's namespace, but do not anymore. The client
|
||||
# will make a new request for the document at its new location.
|
||||
# Example:
|
||||
# Redirect permanent /foo http://www.example.com/bar
|
||||
|
||||
#
|
||||
# Alias: Maps web paths into filesystem paths and is used to
|
||||
# access content that does not live under the DocumentRoot.
|
||||
# Example:
|
||||
# Alias /webpath /full/filesystem/path
|
||||
#
|
||||
# If you include a trailing / on /webpath then the server will
|
||||
# require it to be present in the URL. You will also likely
|
||||
# need to provide a <Directory> section to allow access to
|
||||
# the filesystem path.
|
||||
|
||||
#
|
||||
# ScriptAlias: This controls which directories contain server scripts.
|
||||
# ScriptAliases are essentially the same as Aliases, except that
|
||||
# documents in the target directory are treated as applications and
|
||||
# run by the server when requested rather than as documents sent to the
|
||||
# client. The same rules about trailing "/" apply to ScriptAlias
|
||||
# directives as to Alias.
|
||||
#
|
||||
ScriptAlias /cgi-bin/ "/usr/lib/cgi-bin/"
|
||||
|
||||
</IfModule>
|
||||
|
||||
<IfModule cgid_module>
|
||||
#
|
||||
# ScriptSock: On threaded servers, designate the path to the UNIX
|
||||
# socket used to communicate with the CGI daemon of mod_cgid.
|
||||
#
|
||||
#Scriptsock /var/run/apache2/cgisock
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# "/usr/lib/cgi-bin" should be changed to whatever your ScriptAliased
|
||||
# CGI directory exists, if you have that configured.
|
||||
#
|
||||
<Directory "/usr/lib/cgi-bin">
|
||||
AllowOverride None
|
||||
Options None
|
||||
Order allow,deny
|
||||
Allow from all
|
||||
</Directory>
|
||||
|
||||
#
|
||||
# DefaultType: the default MIME type the server will use for a document
|
||||
# if it cannot otherwise determine one, such as from filename extensions.
|
||||
# If your server contains mostly text or HTML documents, "text/plain" is
|
||||
# a good value. If most of your content is binary, such as applications
|
||||
# or images, you may want to use "application/octet-stream" instead to
|
||||
# keep browsers from trying to display binary files as though they are
|
||||
# text.
|
||||
#
|
||||
DefaultType text/plain
|
||||
|
||||
<IfModule mime_module>
|
||||
#
|
||||
# TypesConfig points to the file containing the list of mappings from
|
||||
# filename extension to MIME-type.
|
||||
#
|
||||
TypesConfig /etc/apache2/mime.types
|
||||
|
||||
#
|
||||
# AddType allows you to add to or override the MIME configuration
|
||||
# file specified in TypesConfig for specific file types.
|
||||
#
|
||||
#AddType application/x-gzip .tgz
|
||||
#
|
||||
# AddEncoding allows you to have certain browsers uncompress
|
||||
# information on the fly. Note: Not all browsers support this.
|
||||
#
|
||||
#AddEncoding x-compress .Z
|
||||
#AddEncoding x-gzip .gz .tgz
|
||||
#
|
||||
# If the AddEncoding directives above are commented-out, then you
|
||||
# probably should define those extensions to indicate media types:
|
||||
#
|
||||
AddType application/x-compress .Z
|
||||
AddType application/x-gzip .gz .tgz
|
||||
|
||||
#
|
||||
# AddHandler allows you to map certain file extensions to "handlers":
|
||||
# actions unrelated to filetype. These can be either built into the server
|
||||
# or added with the Action directive (see below)
|
||||
#
|
||||
# To use CGI scripts outside of ScriptAliased directories:
|
||||
# (You will also need to add "ExecCGI" to the "Options" directive.)
|
||||
#
|
||||
#AddHandler cgi-script .cgi
|
||||
|
||||
# For type maps (negotiated resources):
|
||||
#AddHandler type-map var
|
||||
|
||||
#
|
||||
# Filters allow you to process content before it is sent to the client.
|
||||
#
|
||||
# To parse .shtml files for server-side includes (SSI):
|
||||
# (You will also need to add "Includes" to the "Options" directive.)
|
||||
#
|
||||
#AddType text/html .shtml
|
||||
#AddOutputFilter INCLUDES .shtml
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# The mod_mime_magic module allows the server to use various hints from the
|
||||
# contents of the file itself to determine its type. The MIMEMagicFile
|
||||
# directive tells the module where the hint definitions are located.
|
||||
#
|
||||
#MIMEMagicFile /etc/apache2/magic
|
||||
|
||||
#
|
||||
# Customizable error responses come in three flavors:
|
||||
# 1) plain text 2) local redirects 3) external redirects
|
||||
#
|
||||
# Some examples:
|
||||
#ErrorDocument 500 "The server made a boo boo."
|
||||
#ErrorDocument 404 /missing.html
|
||||
#ErrorDocument 404 "/cgi-bin/missing_handler.pl"
|
||||
#ErrorDocument 402 http://www.example.com/subscription_info.html
|
||||
#
|
||||
|
||||
#
|
||||
# EnableMMAP and EnableSendfile: On systems that support it,
|
||||
# memory-mapping or the sendfile syscall is used to deliver
|
||||
# files. This usually improves server performance, but must
|
||||
# be turned off when serving from networked-mounted
|
||||
# filesystems or if support for these functions is otherwise
|
||||
# broken on your system.
|
||||
#
|
||||
#EnableMMAP off
|
||||
#EnableSendfile off
|
||||
|
||||
# Supplemental configuration
|
||||
#
|
||||
# The configuration files in the /etc/apache2/extra/ directory can be
|
||||
# included to add extra features or to modify the default configuration of
|
||||
# the server, or you may simply copy their contents here and change as
|
||||
# necessary.
|
||||
|
||||
# Server-pool management (MPM specific)
|
||||
#Include /etc/apache2/extra/httpd-mpm.conf
|
||||
|
||||
# Multi-language error messages
|
||||
#Include /etc/apache2/extra/httpd-multilang-errordoc.conf
|
||||
|
||||
# Fancy directory listings
|
||||
#Include /etc/apache2/extra/httpd-autoindex.conf
|
||||
|
||||
# Language settings
|
||||
#Include /etc/apache2/extra/httpd-languages.conf
|
||||
|
||||
# User home directories
|
||||
#Include /etc/apache2/extra/httpd-userdir.conf
|
||||
|
||||
# Real-time info on requests and configuration
|
||||
#Include /etc/apache2/extra/httpd-info.conf
|
||||
|
||||
# Virtual hosts
|
||||
#Include /etc/apache2/extra/httpd-vhosts.conf
|
||||
|
||||
# Local access to the Apache HTTP Server Manual
|
||||
#Include /etc/apache2/extra/httpd-manual.conf
|
||||
|
||||
# Distributed authoring and versioning (WebDAV)
|
||||
#Include /etc/apache2/extra/httpd-dav.conf
|
||||
|
||||
# Various default settings
|
||||
#Include /etc/apache2/extra/httpd-default.conf
|
||||
|
||||
# Secure (SSL/TLS) connections
|
||||
#Include /etc/apache2/extra/httpd-ssl.conf
|
||||
#
|
||||
# Note: The following must must be present to support
|
||||
# starting without SSL on platforms with no /dev/random equivalent
|
||||
# but a statically compiled-in mod_ssl.
|
||||
#
|
||||
<IfModule ssl_module>
|
||||
SSLRandomSeed startup builtin
|
||||
SSLRandomSeed connect builtin
|
||||
</IfModule>
|
||||
500
samples/ApacheConf/filenames/httpd.conf
Normal file
500
samples/ApacheConf/filenames/httpd.conf
Normal file
@@ -0,0 +1,500 @@
|
||||
#
|
||||
# This is the main Apache HTTP server configuration file. It contains the
|
||||
# configuration directives that give the server its instructions.
|
||||
# See <URL:http://httpd.apache.org/docs/2.2> for detailed information.
|
||||
# In particular, see
|
||||
# <URL:http://httpd.apache.org/docs/2.2/mod/directives.html>
|
||||
# for a discussion of each configuration directive.
|
||||
#
|
||||
# Do NOT simply read the instructions in here without understanding
|
||||
# what they do. They're here only as hints or reminders. If you are unsure
|
||||
# consult the online docs. You have been warned.
|
||||
#
|
||||
# Configuration and logfile names: If the filenames you specify for many
|
||||
# of the server's control files begin with "/" (or "drive:/" for Win32), the
|
||||
# server will use that explicit path. If the filenames do *not* begin
|
||||
# with "/", the value of ServerRoot is prepended -- so "log/foo_log"
|
||||
# with ServerRoot set to "/usr" will be interpreted by the
|
||||
# server as "/usr/log/foo_log".
|
||||
|
||||
#
|
||||
# ServerRoot: The top of the directory tree under which the server's
|
||||
# configuration, error, and log files are kept.
|
||||
#
|
||||
# Do not add a slash at the end of the directory path. If you point
|
||||
# ServerRoot at a non-local disk, be sure to point the LockFile directive
|
||||
# at a local disk. If you wish to share the same ServerRoot for multiple
|
||||
# httpd daemons, you will need to change at least LockFile and PidFile.
|
||||
#
|
||||
ServerRoot "/usr"
|
||||
|
||||
#
|
||||
# Listen: Allows you to bind Apache to specific IP addresses and/or
|
||||
# ports, instead of the default. See also the <VirtualHost>
|
||||
# directive.
|
||||
#
|
||||
# Change this to Listen on specific IP addresses as shown below to
|
||||
# prevent Apache from glomming onto all bound IP addresses.
|
||||
#
|
||||
#Listen 12.34.56.78:80
|
||||
Listen 80
|
||||
|
||||
#
|
||||
# Dynamic Shared Object (DSO) Support
|
||||
#
|
||||
# To be able to use the functionality of a module which was built as a DSO you
|
||||
# have to place corresponding `LoadModule' lines at this location so the
|
||||
# directives contained in it are actually available _before_ they are used.
|
||||
# Statically compiled modules (those listed by `httpd -l') do not need
|
||||
# to be loaded here.
|
||||
#
|
||||
# Example:
|
||||
# LoadModule foo_module modules/mod_foo.so
|
||||
#
|
||||
LoadModule authn_file_module libexec/apache2/mod_authn_file.so
|
||||
LoadModule authn_dbm_module libexec/apache2/mod_authn_dbm.so
|
||||
LoadModule authn_anon_module libexec/apache2/mod_authn_anon.so
|
||||
LoadModule authn_dbd_module libexec/apache2/mod_authn_dbd.so
|
||||
LoadModule authn_default_module libexec/apache2/mod_authn_default.so
|
||||
LoadModule authz_host_module libexec/apache2/mod_authz_host.so
|
||||
LoadModule authz_groupfile_module libexec/apache2/mod_authz_groupfile.so
|
||||
LoadModule authz_user_module libexec/apache2/mod_authz_user.so
|
||||
LoadModule authz_dbm_module libexec/apache2/mod_authz_dbm.so
|
||||
LoadModule authz_owner_module libexec/apache2/mod_authz_owner.so
|
||||
LoadModule authz_default_module libexec/apache2/mod_authz_default.so
|
||||
LoadModule auth_basic_module libexec/apache2/mod_auth_basic.so
|
||||
LoadModule auth_digest_module libexec/apache2/mod_auth_digest.so
|
||||
LoadModule cache_module libexec/apache2/mod_cache.so
|
||||
LoadModule disk_cache_module libexec/apache2/mod_disk_cache.so
|
||||
LoadModule mem_cache_module libexec/apache2/mod_mem_cache.so
|
||||
LoadModule dbd_module libexec/apache2/mod_dbd.so
|
||||
LoadModule dumpio_module libexec/apache2/mod_dumpio.so
|
||||
LoadModule reqtimeout_module libexec/apache2/mod_reqtimeout.so
|
||||
LoadModule ext_filter_module libexec/apache2/mod_ext_filter.so
|
||||
LoadModule include_module libexec/apache2/mod_include.so
|
||||
LoadModule filter_module libexec/apache2/mod_filter.so
|
||||
LoadModule substitute_module libexec/apache2/mod_substitute.so
|
||||
LoadModule deflate_module libexec/apache2/mod_deflate.so
|
||||
LoadModule log_config_module libexec/apache2/mod_log_config.so
|
||||
LoadModule log_forensic_module libexec/apache2/mod_log_forensic.so
|
||||
LoadModule logio_module libexec/apache2/mod_logio.so
|
||||
LoadModule env_module libexec/apache2/mod_env.so
|
||||
LoadModule mime_magic_module libexec/apache2/mod_mime_magic.so
|
||||
LoadModule cern_meta_module libexec/apache2/mod_cern_meta.so
|
||||
LoadModule expires_module libexec/apache2/mod_expires.so
|
||||
LoadModule headers_module libexec/apache2/mod_headers.so
|
||||
LoadModule ident_module libexec/apache2/mod_ident.so
|
||||
LoadModule usertrack_module libexec/apache2/mod_usertrack.so
|
||||
#LoadModule unique_id_module libexec/apache2/mod_unique_id.so
|
||||
LoadModule setenvif_module libexec/apache2/mod_setenvif.so
|
||||
LoadModule version_module libexec/apache2/mod_version.so
|
||||
LoadModule proxy_module libexec/apache2/mod_proxy.so
|
||||
LoadModule proxy_connect_module libexec/apache2/mod_proxy_connect.so
|
||||
LoadModule proxy_ftp_module libexec/apache2/mod_proxy_ftp.so
|
||||
LoadModule proxy_http_module libexec/apache2/mod_proxy_http.so
|
||||
LoadModule proxy_scgi_module libexec/apache2/mod_proxy_scgi.so
|
||||
LoadModule proxy_ajp_module libexec/apache2/mod_proxy_ajp.so
|
||||
LoadModule proxy_balancer_module libexec/apache2/mod_proxy_balancer.so
|
||||
LoadModule ssl_module libexec/apache2/mod_ssl.so
|
||||
LoadModule mime_module libexec/apache2/mod_mime.so
|
||||
LoadModule dav_module libexec/apache2/mod_dav.so
|
||||
LoadModule status_module libexec/apache2/mod_status.so
|
||||
LoadModule autoindex_module libexec/apache2/mod_autoindex.so
|
||||
LoadModule asis_module libexec/apache2/mod_asis.so
|
||||
LoadModule info_module libexec/apache2/mod_info.so
|
||||
LoadModule cgi_module libexec/apache2/mod_cgi.so
|
||||
LoadModule dav_fs_module libexec/apache2/mod_dav_fs.so
|
||||
LoadModule vhost_alias_module libexec/apache2/mod_vhost_alias.so
|
||||
LoadModule negotiation_module libexec/apache2/mod_negotiation.so
|
||||
LoadModule dir_module libexec/apache2/mod_dir.so
|
||||
LoadModule imagemap_module libexec/apache2/mod_imagemap.so
|
||||
LoadModule actions_module libexec/apache2/mod_actions.so
|
||||
LoadModule speling_module libexec/apache2/mod_speling.so
|
||||
LoadModule userdir_module libexec/apache2/mod_userdir.so
|
||||
LoadModule alias_module libexec/apache2/mod_alias.so
|
||||
LoadModule rewrite_module libexec/apache2/mod_rewrite.so
|
||||
#LoadModule perl_module libexec/apache2/mod_perl.so
|
||||
#LoadModule php5_module libexec/apache2/libphp5.so
|
||||
#LoadModule hfs_apple_module libexec/apache2/mod_hfs_apple.so
|
||||
|
||||
<IfModule !mpm_netware_module>
|
||||
<IfModule !mpm_winnt_module>
|
||||
#
|
||||
# If you wish httpd to run as a different user or group, you must run
|
||||
# httpd as root initially and it will switch.
|
||||
#
|
||||
# User/Group: The name (or #number) of the user/group to run httpd as.
|
||||
# It is usually good practice to create a dedicated user and group for
|
||||
# running httpd, as with most system services.
|
||||
#
|
||||
User _www
|
||||
Group _www
|
||||
|
||||
</IfModule>
|
||||
</IfModule>
|
||||
|
||||
# 'Main' server configuration
|
||||
#
|
||||
# The directives in this section set up the values used by the 'main'
|
||||
# server, which responds to any requests that aren't handled by a
|
||||
# <VirtualHost> definition. These values also provide defaults for
|
||||
# any <VirtualHost> containers you may define later in the file.
|
||||
#
|
||||
# All of these directives may appear inside <VirtualHost> containers,
|
||||
# in which case these default settings will be overridden for the
|
||||
# virtual host being defined.
|
||||
#
|
||||
|
||||
#
|
||||
# ServerAdmin: Your address, where problems with the server should be
|
||||
# e-mailed. This address appears on some server-generated pages, such
|
||||
# as error documents. e.g. admin@your-domain.com
|
||||
#
|
||||
ServerAdmin you@example.com
|
||||
|
||||
#
|
||||
# ServerName gives the name and port that the server uses to identify itself.
|
||||
# This can often be determined automatically, but we recommend you specify
|
||||
# it explicitly to prevent problems during startup.
|
||||
#
|
||||
# If your host doesn't have a registered DNS name, enter its IP address here.
|
||||
#
|
||||
#ServerName www.example.com:80
|
||||
|
||||
#
|
||||
# DocumentRoot: The directory out of which you will serve your
|
||||
# documents. By default, all requests are taken from this directory, but
|
||||
# symbolic links and aliases may be used to point to other locations.
|
||||
#
|
||||
DocumentRoot "/Library/WebServer/Documents"
|
||||
|
||||
#
|
||||
# Each directory to which Apache has access can be configured with respect
|
||||
# to which services and features are allowed and/or disabled in that
|
||||
# directory (and its subdirectories).
|
||||
#
|
||||
# First, we configure the "default" to be a very restrictive set of
|
||||
# features.
|
||||
#
|
||||
<Directory />
|
||||
Options FollowSymLinks
|
||||
AllowOverride None
|
||||
Order deny,allow
|
||||
Deny from all
|
||||
</Directory>
|
||||
|
||||
#
|
||||
# Note that from this point forward you must specifically allow
|
||||
# particular features to be enabled - so if something's not working as
|
||||
# you might expect, make sure that you have specifically enabled it
|
||||
# below.
|
||||
#
|
||||
|
||||
#
|
||||
# This should be changed to whatever you set DocumentRoot to.
|
||||
#
|
||||
<Directory "/Library/WebServer/Documents">
|
||||
#
|
||||
# Possible values for the Options directive are "None", "All",
|
||||
# or any combination of:
|
||||
# Indexes Includes FollowSymLinks SymLinksifOwnerMatch ExecCGI MultiViews
|
||||
#
|
||||
# Note that "MultiViews" must be named *explicitly* --- "Options All"
|
||||
# doesn't give it to you.
|
||||
#
|
||||
# The Options directive is both complicated and important. Please see
|
||||
# http://httpd.apache.org/docs/2.2/mod/core.html#options
|
||||
# for more information.
|
||||
#
|
||||
Options Indexes FollowSymLinks MultiViews
|
||||
|
||||
#
|
||||
# AllowOverride controls what directives may be placed in .htaccess files.
|
||||
# It can be "All", "None", or any combination of the keywords:
|
||||
# Options FileInfo AuthConfig Limit
|
||||
#
|
||||
AllowOverride None
|
||||
|
||||
#
|
||||
# Controls who can get stuff from this server.
|
||||
#
|
||||
Order allow,deny
|
||||
Allow from all
|
||||
|
||||
</Directory>
|
||||
|
||||
#
|
||||
# DirectoryIndex: sets the file that Apache will serve if a directory
|
||||
# is requested.
|
||||
#
|
||||
<IfModule dir_module>
|
||||
DirectoryIndex index.html
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# The following lines prevent .htaccess and .htpasswd files from being
|
||||
# viewed by Web clients.
|
||||
#
|
||||
<FilesMatch "^\.([Hh][Tt]|[Dd][Ss]_[Ss])">
|
||||
Order allow,deny
|
||||
Deny from all
|
||||
Satisfy All
|
||||
</FilesMatch>
|
||||
|
||||
#
|
||||
# Apple specific filesystem protection.
|
||||
#
|
||||
<Files "rsrc">
|
||||
Order allow,deny
|
||||
Deny from all
|
||||
Satisfy All
|
||||
</Files>
|
||||
<DirectoryMatch ".*\.\.namedfork">
|
||||
Order allow,deny
|
||||
Deny from all
|
||||
Satisfy All
|
||||
</DirectoryMatch>
|
||||
|
||||
#
|
||||
# ErrorLog: The location of the error log file.
|
||||
# If you do not specify an ErrorLog directive within a <VirtualHost>
|
||||
# container, error messages relating to that virtual host will be
|
||||
# logged here. If you *do* define an error logfile for a <VirtualHost>
|
||||
# container, that host's errors will be logged there and not here.
|
||||
#
|
||||
ErrorLog "/private/var/log/apache2/error_log"
|
||||
|
||||
#
|
||||
# LogLevel: Control the number of messages logged to the error_log.
|
||||
# Possible values include: debug, info, notice, warn, error, crit,
|
||||
# alert, emerg.
|
||||
#
|
||||
LogLevel warn
|
||||
|
||||
<IfModule log_config_module>
|
||||
#
|
||||
# The following directives define some format nicknames for use with
|
||||
# a CustomLog directive (see below).
|
||||
#
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b" common
|
||||
|
||||
<IfModule logio_module>
|
||||
# You need to enable mod_logio.c to use %I and %O
|
||||
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %I %O" combinedio
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# The location and format of the access logfile (Common Logfile Format).
|
||||
# If you do not define any access logfiles within a <VirtualHost>
|
||||
# container, they will be logged here. Contrariwise, if you *do*
|
||||
# define per-<VirtualHost> access logfiles, transactions will be
|
||||
# logged therein and *not* in this file.
|
||||
#
|
||||
CustomLog "/private/var/log/apache2/access_log" common
|
||||
|
||||
#
|
||||
# If you prefer a logfile with access, agent, and referer information
|
||||
# (Combined Logfile Format) you can use the following directive.
|
||||
#
|
||||
#CustomLog "/private/var/log/apache2/access_log" combined
|
||||
</IfModule>
|
||||
|
||||
<IfModule alias_module>
|
||||
#
|
||||
# Redirect: Allows you to tell clients about documents that used to
|
||||
# exist in your server's namespace, but do not anymore. The client
|
||||
# will make a new request for the document at its new location.
|
||||
# Example:
|
||||
# Redirect permanent /foo http://www.example.com/bar
|
||||
|
||||
#
|
||||
# Alias: Maps web paths into filesystem paths and is used to
|
||||
# access content that does not live under the DocumentRoot.
|
||||
# Example:
|
||||
# Alias /webpath /full/filesystem/path
|
||||
#
|
||||
# If you include a trailing / on /webpath then the server will
|
||||
# require it to be present in the URL. You will also likely
|
||||
# need to provide a <Directory> section to allow access to
|
||||
# the filesystem path.
|
||||
|
||||
#
|
||||
# ScriptAlias: This controls which directories contain server scripts.
|
||||
# ScriptAliases are essentially the same as Aliases, except that
|
||||
# documents in the target directory are treated as applications and
|
||||
# run by the server when requested rather than as documents sent to the
|
||||
# client. The same rules about trailing "/" apply to ScriptAlias
|
||||
# directives as to Alias.
|
||||
#
|
||||
ScriptAliasMatch ^/cgi-bin/((?!(?i:webobjects)).*$) "/Library/WebServer/CGI-Executables/$1"
|
||||
|
||||
</IfModule>
|
||||
|
||||
<IfModule cgid_module>
|
||||
#
|
||||
# ScriptSock: On threaded servers, designate the path to the UNIX
|
||||
# socket used to communicate with the CGI daemon of mod_cgid.
|
||||
#
|
||||
#Scriptsock /private/var/run/cgisock
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# "/Library/WebServer/CGI-Executables" should be changed to whatever your ScriptAliased
|
||||
# CGI directory exists, if you have that configured.
|
||||
#
|
||||
<Directory "/Library/WebServer/CGI-Executables">
|
||||
AllowOverride None
|
||||
Options None
|
||||
Order allow,deny
|
||||
Allow from all
|
||||
</Directory>
|
||||
|
||||
#
|
||||
# DefaultType: the default MIME type the server will use for a document
|
||||
# if it cannot otherwise determine one, such as from filename extensions.
|
||||
# If your server contains mostly text or HTML documents, "text/plain" is
|
||||
# a good value. If most of your content is binary, such as applications
|
||||
# or images, you may want to use "application/octet-stream" instead to
|
||||
# keep browsers from trying to display binary files as though they are
|
||||
# text.
|
||||
#
|
||||
DefaultType text/plain
|
||||
|
||||
<IfModule mime_module>
|
||||
#
|
||||
# TypesConfig points to the file containing the list of mappings from
|
||||
# filename extension to MIME-type.
|
||||
#
|
||||
TypesConfig /private/etc/apache2/mime.types
|
||||
|
||||
#
|
||||
# AddType allows you to add to or override the MIME configuration
|
||||
# file specified in TypesConfig for specific file types.
|
||||
#
|
||||
#AddType application/x-gzip .tgz
|
||||
#
|
||||
# AddEncoding allows you to have certain browsers uncompress
|
||||
# information on the fly. Note: Not all browsers support this.
|
||||
#
|
||||
#AddEncoding x-compress .Z
|
||||
#AddEncoding x-gzip .gz .tgz
|
||||
#
|
||||
# If the AddEncoding directives above are commented-out, then you
|
||||
# probably should define those extensions to indicate media types:
|
||||
#
|
||||
AddType application/x-compress .Z
|
||||
AddType application/x-gzip .gz .tgz
|
||||
|
||||
#
|
||||
# AddHandler allows you to map certain file extensions to "handlers":
|
||||
# actions unrelated to filetype. These can be either built into the server
|
||||
# or added with the Action directive (see below)
|
||||
#
|
||||
# To use CGI scripts outside of ScriptAliased directories:
|
||||
# (You will also need to add "ExecCGI" to the "Options" directive.)
|
||||
#
|
||||
#AddHandler cgi-script .cgi
|
||||
|
||||
# For type maps (negotiated resources):
|
||||
#AddHandler type-map var
|
||||
|
||||
#
|
||||
# Filters allow you to process content before it is sent to the client.
|
||||
#
|
||||
# To parse .shtml files for server-side includes (SSI):
|
||||
# (You will also need to add "Includes" to the "Options" directive.)
|
||||
#
|
||||
#AddType text/html .shtml
|
||||
#AddOutputFilter INCLUDES .shtml
|
||||
</IfModule>
|
||||
|
||||
#
|
||||
# The mod_mime_magic module allows the server to use various hints from the
|
||||
# contents of the file itself to determine its type. The MIMEMagicFile
|
||||
# directive tells the module where the hint definitions are located.
|
||||
#
|
||||
#MIMEMagicFile /private/etc/apache2/magic
|
||||
|
||||
#
|
||||
# Customizable error responses come in three flavors:
|
||||
# 1) plain text 2) local redirects 3) external redirects
|
||||
#
|
||||
# Some examples:
|
||||
#ErrorDocument 500 "The server made a boo boo."
|
||||
#ErrorDocument 404 /missing.html
|
||||
#ErrorDocument 404 "/cgi-bin/missing_handler.pl"
|
||||
#ErrorDocument 402 http://www.example.com/subscription_info.html
|
||||
#
|
||||
|
||||
#
|
||||
# MaxRanges: Maximum number of Ranges in a request before
|
||||
# returning the entire resource, or one of the special
|
||||
# values 'default', 'none' or 'unlimited'.
|
||||
# Default setting is to accept 200 Ranges.
|
||||
#MaxRanges unlimited
|
||||
|
||||
#
|
||||
# EnableMMAP and EnableSendfile: On systems that support it,
|
||||
# memory-mapping or the sendfile syscall is used to deliver
|
||||
# files. This usually improves server performance, but must
|
||||
# be turned off when serving from networked-mounted
|
||||
# filesystems or if support for these functions is otherwise
|
||||
# broken on your system.
|
||||
#
|
||||
#EnableMMAP off
|
||||
#EnableSendfile off
|
||||
|
||||
# 6894961
|
||||
TraceEnable off
|
||||
|
||||
# Supplemental configuration
|
||||
#
|
||||
# The configuration files in the /private/etc/apache2/extra/ directory can be
|
||||
# included to add extra features or to modify the default configuration of
|
||||
# the server, or you may simply copy their contents here and change as
|
||||
# necessary.
|
||||
|
||||
# Server-pool management (MPM specific)
|
||||
Include /private/etc/apache2/extra/httpd-mpm.conf
|
||||
|
||||
# Multi-language error messages
|
||||
#Include /private/etc/apache2/extra/httpd-multilang-errordoc.conf
|
||||
|
||||
# Fancy directory listings
|
||||
Include /private/etc/apache2/extra/httpd-autoindex.conf
|
||||
|
||||
# Language settings
|
||||
Include /private/etc/apache2/extra/httpd-languages.conf
|
||||
|
||||
# User home directories
|
||||
Include /private/etc/apache2/extra/httpd-userdir.conf
|
||||
|
||||
# Real-time info on requests and configuration
|
||||
#Include /private/etc/apache2/extra/httpd-info.conf
|
||||
|
||||
# Virtual hosts
|
||||
#Include /private/etc/apache2/extra/httpd-vhosts.conf
|
||||
|
||||
# Local access to the Apache HTTP Server Manual
|
||||
Include /private/etc/apache2/extra/httpd-manual.conf
|
||||
|
||||
# Distributed authoring and versioning (WebDAV)
|
||||
#Include /private/etc/apache2/extra/httpd-dav.conf
|
||||
|
||||
# Various default settings
|
||||
#Include /private/etc/apache2/extra/httpd-default.conf
|
||||
|
||||
# Secure (SSL/TLS) connections
|
||||
#Include /private/etc/apache2/extra/httpd-ssl.conf
|
||||
#
|
||||
# Note: The following must must be present to support
|
||||
# starting without SSL on platforms with no /dev/random equivalent
|
||||
# but a statically compiled-in mod_ssl.
|
||||
#
|
||||
<IfModule ssl_module>
|
||||
SSLRandomSeed startup builtin
|
||||
SSLRandomSeed connect builtin
|
||||
</IfModule>
|
||||
|
||||
Include /private/etc/apache2/other/*.conf
|
||||
13
samples/AsciiDoc/encoding.asciidoc
Normal file
13
samples/AsciiDoc/encoding.asciidoc
Normal file
@@ -0,0 +1,13 @@
|
||||
Gregory Romé has written an AsciiDoc plugin for the Redmine project management application.
|
||||
|
||||
https://github.com/foo-users/foo
|
||||
へと `vicmd` キーマップを足してみている試み、
|
||||
アニメーションgifです。
|
||||
|
||||
tag::romé[]
|
||||
Gregory Romé has written an AsciiDoc plugin for the Redmine project management application.
|
||||
end::romé[]
|
||||
|
||||
== Überschrift
|
||||
|
||||
* Codierungen sind verrückt auf älteren Versionen von Ruby
|
||||
10
samples/AsciiDoc/list.asc
Normal file
10
samples/AsciiDoc/list.asc
Normal file
@@ -0,0 +1,10 @@
|
||||
AsciiDoc Home Page
|
||||
==================
|
||||
|
||||
Example Articles
|
||||
~~~~~~~~~~~~~~~~
|
||||
- Item 1
|
||||
|
||||
- Item 2
|
||||
|
||||
- Item 3
|
||||
25
samples/AsciiDoc/sample.adoc
Normal file
25
samples/AsciiDoc/sample.adoc
Normal file
@@ -0,0 +1,25 @@
|
||||
Document Title
|
||||
==============
|
||||
Doc Writer <thedoc@asciidoctor.org>
|
||||
:idprefix: id_
|
||||
|
||||
Preamble paragraph.
|
||||
|
||||
NOTE: This is test, only a test.
|
||||
|
||||
== Section A
|
||||
|
||||
*Section A* paragraph.
|
||||
|
||||
=== Section A Subsection
|
||||
|
||||
*Section A* 'subsection' paragraph.
|
||||
|
||||
== Section B
|
||||
|
||||
*Section B* paragraph.
|
||||
|
||||
.Section B list
|
||||
* Item 1
|
||||
* Item 2
|
||||
* Item 3
|
||||
121
samples/Awk/test.awk
Normal file
121
samples/Awk/test.awk
Normal file
@@ -0,0 +1,121 @@
|
||||
#!/bin/awk -f
|
||||
|
||||
BEGIN {
|
||||
# It is not possible to define output file names here because
|
||||
# FILENAME is not define in the BEGIN section
|
||||
n = "";
|
||||
printf "Generating data files ...";
|
||||
network_max_bandwidth_in_byte = 10000000;
|
||||
network_max_packet_per_second = 1000000;
|
||||
last3 = 0;
|
||||
last4 = 0;
|
||||
last5 = 0;
|
||||
last6 = 0;
|
||||
}
|
||||
{
|
||||
if ($1 ~ /Average/)
|
||||
{ # Skip the Average values
|
||||
n = "";
|
||||
next;
|
||||
}
|
||||
|
||||
if ($2 ~ /all/)
|
||||
{ # This is the cpu info
|
||||
print $3 > FILENAME".cpu.user.dat";
|
||||
# print $4 > FILENAME".cpu.nice.dat";
|
||||
print $5 > FILENAME".cpu.system.dat";
|
||||
# print $6 > FILENAME".cpu.iowait.dat";
|
||||
print $7 > FILENAME".cpu.idle.dat";
|
||||
print 100-$7 > FILENAME".cpu.busy.dat";
|
||||
}
|
||||
if ($2 ~ /eth0/)
|
||||
{ # This is the eth0 network info
|
||||
if ($3 > network_max_packet_per_second)
|
||||
print last3 > FILENAME".net.rxpck.dat"; # Total number of packets received per second.
|
||||
else
|
||||
{
|
||||
last3 = $3;
|
||||
print $3 > FILENAME".net.rxpck.dat"; # Total number of packets received per second.
|
||||
}
|
||||
if ($4 > network_max_packet_per_second)
|
||||
print last4 > FILENAME".net.txpck.dat"; # Total number of packets transmitted per second.
|
||||
else
|
||||
{
|
||||
last4 = $4;
|
||||
print $4 > FILENAME".net.txpck.dat"; # Total number of packets transmitted per second.
|
||||
}
|
||||
if ($5 > network_max_bandwidth_in_byte)
|
||||
print last5 > FILENAME".net.rxbyt.dat"; # Total number of bytes received per second.
|
||||
else
|
||||
{
|
||||
last5 = $5;
|
||||
print $5 > FILENAME".net.rxbyt.dat"; # Total number of bytes received per second.
|
||||
}
|
||||
if ($6 > network_max_bandwidth_in_byte)
|
||||
print last6 > FILENAME".net.txbyt.dat"; # Total number of bytes transmitted per second.
|
||||
else
|
||||
{
|
||||
last6 = $6;
|
||||
print $6 > FILENAME".net.txbyt.dat"; # Total number of bytes transmitted per second.
|
||||
}
|
||||
# print $7 > FILENAME".net.rxcmp.dat"; # Number of compressed packets received per second (for cslip etc.).
|
||||
# print $8 > FILENAME".net.txcmp.dat"; # Number of compressed packets transmitted per second.
|
||||
# print $9 > FILENAME".net.rxmcst.dat"; # Number of multicast packets received per second.
|
||||
}
|
||||
|
||||
# Detect which is the next info to be parsed
|
||||
if ($2 ~ /proc|cswch|tps|kbmemfree|totsck/)
|
||||
{
|
||||
n = $2;
|
||||
}
|
||||
|
||||
# Only get lines with numbers (real data !)
|
||||
if ($2 ~ /[0-9]/)
|
||||
{
|
||||
if (n == "proc/s")
|
||||
{ # This is the proc/s info
|
||||
print $2 > FILENAME".proc.dat";
|
||||
# n = "";
|
||||
}
|
||||
if (n == "cswch/s")
|
||||
{ # This is the context switches per second info
|
||||
print $2 > FILENAME".ctxsw.dat";
|
||||
# n = "";
|
||||
}
|
||||
if (n == "tps")
|
||||
{ # This is the disk info
|
||||
print $2 > FILENAME".disk.tps.dat"; # total transfers per second
|
||||
print $3 > FILENAME".disk.rtps.dat"; # read requests per second
|
||||
print $4 > FILENAME".disk.wtps.dat"; # write requests per second
|
||||
print $5 > FILENAME".disk.brdps.dat"; # block reads per second
|
||||
print $6 > FILENAME".disk.bwrps.dat"; # block writes per second
|
||||
# n = "";
|
||||
}
|
||||
if (n == "kbmemfree")
|
||||
{ # This is the mem info
|
||||
print $2 > FILENAME".mem.kbmemfree.dat"; # Amount of free memory available in kilobytes.
|
||||
print $3 > FILENAME".mem.kbmemused.dat"; # Amount of used memory in kilobytes. This does not take into account memory used by the kernel itself.
|
||||
print $4 > FILENAME".mem.memused.dat"; # Percentage of used memory.
|
||||
# It appears the kbmemshrd has been removed from the sysstat output - ntolia
|
||||
# print $X > FILENAME".mem.kbmemshrd.dat"; # Amount of memory shared by the system in kilobytes. Always zero with 2.4 kernels.
|
||||
# print $5 > FILENAME".mem.kbbuffers.dat"; # Amount of memory used as buffers by the kernel in kilobytes.
|
||||
print $6 > FILENAME".mem.kbcached.dat"; # Amount of memory used to cache data by the kernel in kilobytes.
|
||||
# print $7 > FILENAME".mem.kbswpfree.dat"; # Amount of free swap space in kilobytes.
|
||||
# print $8 > FILENAME".mem.kbswpused.dat"; # Amount of used swap space in kilobytes.
|
||||
print $9 > FILENAME".mem.swpused.dat"; # Percentage of used swap space.
|
||||
# n = "";
|
||||
}
|
||||
if (n == "totsck")
|
||||
{ # This is the socket info
|
||||
print $2 > FILENAME".sock.totsck.dat"; # Total number of used sockets.
|
||||
print $3 > FILENAME".sock.tcpsck.dat"; # Number of TCP sockets currently in use.
|
||||
# print $4 > FILENAME".sock.udpsck.dat"; # Number of UDP sockets currently in use.
|
||||
# print $5 > FILENAME".sock.rawsck.dat"; # Number of RAW sockets currently in use.
|
||||
# print $6 > FILENAME".sock.ip-frag.dat"; # Number of IP fragments currently in use.
|
||||
# n = "";
|
||||
}
|
||||
}
|
||||
}
|
||||
END {
|
||||
print " '" FILENAME "' done.";
|
||||
}
|
||||
BIN
samples/Binary/cube.stl
Normal file
BIN
samples/Binary/cube.stl
Normal file
Binary file not shown.
147
samples/BlitzBasic/HalfAndDouble.bb
Normal file
147
samples/BlitzBasic/HalfAndDouble.bb
Normal file
@@ -0,0 +1,147 @@
|
||||
|
||||
Local bk = CreateBank(8)
|
||||
PokeFloat bk, 0, -1
|
||||
Print Bin(PeekInt(bk, 0))
|
||||
Print %1000000000000000
|
||||
Print Bin(1 Shl 31)
|
||||
Print $1f
|
||||
Print $ff
|
||||
Print $1f + (127 - 15)
|
||||
Print Hex(%01111111100000000000000000000000)
|
||||
Print Hex(~%11111111100000000000000000000000)
|
||||
|
||||
Print Bin(FloatToHalf(-2.5))
|
||||
Print HalfToFloat(FloatToHalf(-200000000000.0))
|
||||
|
||||
Print Bin(FToI(-2.5))
|
||||
|
||||
WaitKey
|
||||
End
|
||||
|
||||
|
||||
; Half-precision (16-bit) arithmetic library
|
||||
;============================================
|
||||
|
||||
Global Half_CBank_
|
||||
|
||||
Function FToI(f#)
|
||||
If Half_CBank_ = 0 Then Half_CBank_ = CreateBank(4)
|
||||
PokeFloat Half_CBank_, 0, f
|
||||
Return PeekInt(Half_CBank_, 0)
|
||||
End Function
|
||||
|
||||
Function HalfToFloat#(h)
|
||||
Local signBit, exponent, fraction, fBits
|
||||
|
||||
signBit = (h And 32768) <> 0
|
||||
exponent = (h And %0111110000000000) Shr 10
|
||||
fraction = (h And %0000001111111111)
|
||||
|
||||
If exponent = $1F Then exponent = $FF : ElseIf exponent Then exponent = (exponent - 15) + 127
|
||||
fBits = (signBit Shl 31) Or (exponent Shl 23) Or (fraction Shl 13)
|
||||
|
||||
If Half_CBank_ = 0 Then Half_CBank_ = CreateBank(4)
|
||||
PokeInt Half_CBank_, 0, fBits
|
||||
Return PeekFloat(Half_CBank_, 0)
|
||||
End Function
|
||||
|
||||
Function FloatToHalf(f#)
|
||||
Local signBit, exponent, fraction, fBits
|
||||
|
||||
If Half_CBank_ = 0 Then Half_CBank_ = CreateBank(4)
|
||||
PokeFloat Half_CBank_, 0, f
|
||||
fBits = PeekInt(Half_CBank_, 0)
|
||||
|
||||
signBit = (fBits And (1 Shl 31)) <> 0
|
||||
exponent = (fBits And $7F800000) Shr 23
|
||||
fraction = fBits And $007FFFFF
|
||||
|
||||
If exponent
|
||||
exponent = exponent - 127
|
||||
If Abs(exponent) > $1F
|
||||
If exponent <> ($FF - 127) Then fraction = 0
|
||||
exponent = $1F * Sgn(exponent)
|
||||
Else
|
||||
exponent = exponent + 15
|
||||
EndIf
|
||||
exponent = exponent And %11111
|
||||
EndIf
|
||||
fraction = fraction Shr 13
|
||||
|
||||
Return (signBit Shl 15) Or (exponent Shl 10) Or fraction
|
||||
End Function
|
||||
|
||||
Function HalfAdd(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function HalfSub(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function HalfMul(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function HalfDiv(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function HalfLT(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function HalfGT(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
|
||||
; Double-precision (64-bit) arithmetic library)
|
||||
;===============================================
|
||||
|
||||
Global DoubleOut[1], Double_CBank_
|
||||
|
||||
Function DoubleToFloat#(d[1])
|
||||
|
||||
End Function
|
||||
|
||||
Function FloatToDouble(f#)
|
||||
|
||||
End Function
|
||||
|
||||
Function IntToDouble(i)
|
||||
|
||||
End Function
|
||||
|
||||
Function SefToDouble(s, e, f)
|
||||
|
||||
End Function
|
||||
|
||||
Function DoubleAdd(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function DoubleSub(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function DoubleMul(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function DoubleDiv(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function DoubleLT(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
Function DoubleGT(l, r)
|
||||
|
||||
End Function
|
||||
|
||||
|
||||
;~IDEal Editor Parameters:
|
||||
;~F#1A#20#2F
|
||||
;~C#Blitz3D
|
||||
369
samples/BlitzBasic/LList.bb
Normal file
369
samples/BlitzBasic/LList.bb
Normal file
@@ -0,0 +1,369 @@
|
||||
|
||||
; Double-linked list container class
|
||||
;====================================
|
||||
|
||||
; with thanks to MusicianKool, for concept and issue fixes
|
||||
|
||||
|
||||
Type LList
|
||||
Field head_.ListNode
|
||||
Field tail_.ListNode
|
||||
End Type
|
||||
|
||||
Type ListNode
|
||||
Field pv_.ListNode
|
||||
Field nx_.ListNode
|
||||
Field Value
|
||||
End Type
|
||||
|
||||
Type Iterator
|
||||
Field Value
|
||||
Field l_.LList
|
||||
Field cn_.ListNode, cni_
|
||||
End Type
|
||||
|
||||
|
||||
;Create a new LList object
|
||||
Function CreateList.LList()
|
||||
Local l.LList = New LList
|
||||
|
||||
l\head_ = New ListNode
|
||||
l\tail_ = New ListNode
|
||||
|
||||
l\head_\nx_ = l\tail_ ;End caps
|
||||
l\head_\pv_ = l\head_ ;These make it more or less safe to iterate freely
|
||||
l\head_\Value = 0
|
||||
|
||||
l\tail_\nx_ = l\tail_
|
||||
l\tail_\pv_ = l\head_
|
||||
l\tail_\Value = 0
|
||||
|
||||
Return l
|
||||
End Function
|
||||
|
||||
;Free a list and all elements (not any values)
|
||||
Function FreeList(l.LList)
|
||||
ClearList l
|
||||
Delete l\head_
|
||||
Delete l\tail_
|
||||
Delete l
|
||||
End Function
|
||||
|
||||
;Remove all the elements from a list (does not free values)
|
||||
Function ClearList(l.LList)
|
||||
Local n.ListNode = l\head_\nx_
|
||||
While n <> l\tail_
|
||||
Local nx.ListNode = n\nx_
|
||||
Delete n
|
||||
n = nx
|
||||
Wend
|
||||
l\head_\nx_ = l\tail_
|
||||
l\tail_\pv_ = l\head_
|
||||
End Function
|
||||
|
||||
;Count the number of elements in a list (slow)
|
||||
Function ListLength(l.LList)
|
||||
Local i.Iterator = GetIterator(l), elems
|
||||
While EachIn(i)
|
||||
elems = elems + 1
|
||||
Wend
|
||||
Return elems
|
||||
End Function
|
||||
|
||||
;Return True if a list contains a given value
|
||||
Function ListContains(l.LList, Value)
|
||||
Return (ListFindNode(l, Value) <> Null)
|
||||
End Function
|
||||
|
||||
;Create a linked list from the intvalues in a bank (slow)
|
||||
Function ListFromBank.LList(bank)
|
||||
Local l.LList = CreateList()
|
||||
Local size = BankSize(bank), p
|
||||
|
||||
For p = 0 To size - 4 Step 4
|
||||
ListAddLast l, PeekInt(bank, p)
|
||||
Next
|
||||
|
||||
Return l
|
||||
End Function
|
||||
|
||||
;Create a bank containing all the values in a list (slow)
|
||||
Function ListToBank(l.LList)
|
||||
Local size = ListLength(l) * 4
|
||||
Local bank = CreateBank(size)
|
||||
|
||||
Local i.Iterator = GetIterator(l), p = 0
|
||||
While EachIn(i)
|
||||
PokeInt bank, p, i\Value
|
||||
p = p + 4
|
||||
Wend
|
||||
|
||||
Return bank
|
||||
End Function
|
||||
|
||||
;Swap the contents of two list objects
|
||||
Function SwapLists(l1.LList, l2.LList)
|
||||
Local tempH.ListNode = l1\head_, tempT.ListNode = l1\tail_
|
||||
l1\head_ = l2\head_
|
||||
l1\tail_ = l2\tail_
|
||||
l2\head_ = tempH
|
||||
l2\tail_ = tempT
|
||||
End Function
|
||||
|
||||
;Create a new list containing the same values as the first
|
||||
Function CopyList.LList(lo.LList)
|
||||
Local ln.LList = CreateList()
|
||||
Local i.Iterator = GetIterator(lo) : While EachIn(i)
|
||||
ListAddLast ln, i\Value
|
||||
Wend
|
||||
Return ln
|
||||
End Function
|
||||
|
||||
;Reverse the order of elements of a list
|
||||
Function ReverseList(l.LList)
|
||||
Local n1.ListNode, n2.ListNode, tmp.ListNode
|
||||
|
||||
n1 = l\head_
|
||||
n2 = l\head_\nx_
|
||||
|
||||
While n1 <> l\tail_
|
||||
n1\pv_ = n2
|
||||
tmp = n2\nx_
|
||||
n2\nx_ = n1
|
||||
n1 = n2
|
||||
n2 = tmp
|
||||
Wend
|
||||
|
||||
tmp = l\head_
|
||||
l\head_ = l\tail_
|
||||
l\tail_ = tmp
|
||||
|
||||
l\head_\pv_ = l\head_
|
||||
l\tail_\nx_ = l\tail_
|
||||
End Function
|
||||
|
||||
;Search a list to retrieve the first node with the given value
|
||||
Function ListFindNode.ListNode(l.LList, Value)
|
||||
Local n.ListNode = l\head_\nx_
|
||||
|
||||
While n <> l\tail_
|
||||
If n\Value = Value Then Return n
|
||||
n = n\nx_
|
||||
Wend
|
||||
|
||||
Return Null
|
||||
End Function
|
||||
|
||||
;Append a value to the end of a list (fast) and return the node
|
||||
Function ListAddLast.ListNode(l.LList, Value)
|
||||
Local n.ListNode = New ListNode
|
||||
|
||||
n\pv_ = l\tail_\pv_
|
||||
n\nx_ = l\tail_
|
||||
n\Value = Value
|
||||
|
||||
l\tail_\pv_ = n
|
||||
n\pv_\nx_ = n
|
||||
|
||||
Return n
|
||||
End Function
|
||||
|
||||
;Attach a value to the start of a list (fast) and return the node
|
||||
Function ListAddFirst.ListNode(l.LList, Value)
|
||||
Local n.ListNode = New ListNode
|
||||
|
||||
n\pv_ = l\head_
|
||||
n\nx_ = l\head_\nx_
|
||||
n\Value = Value
|
||||
|
||||
l\head_\nx_ = n
|
||||
n\nx_\pv_ = n
|
||||
|
||||
Return n
|
||||
End Function
|
||||
|
||||
;Remove the first occurence of the given value from a list
|
||||
Function ListRemove(l.LList, Value)
|
||||
Local n.ListNode = ListFindNode(l, Value)
|
||||
If n <> Null Then RemoveListNode n
|
||||
End Function
|
||||
|
||||
;Remove a node from a list
|
||||
Function RemoveListNode(n.ListNode)
|
||||
n\pv_\nx_ = n\nx_
|
||||
n\nx_\pv_ = n\pv_
|
||||
Delete n
|
||||
End Function
|
||||
|
||||
;Return the value of the element at the given position from the start of the list,
|
||||
;or backwards from the end of the list for a negative index
|
||||
Function ValueAtIndex(l.LList, index)
|
||||
Local n.ListNode = ListNodeAtIndex(l, index)
|
||||
If n <> Null Then Return n\Value : Else Return 0
|
||||
End Function
|
||||
|
||||
;Return the ListNode at the given position from the start of the list, or backwards
|
||||
;from the end of the list for a negative index, or Null if invalid
|
||||
Function ListNodeAtIndex.ListNode(l.LList, index)
|
||||
Local e, n.ListNode
|
||||
|
||||
If index >= 0
|
||||
n = l\head_
|
||||
For e = 0 To index
|
||||
n = n\nx_
|
||||
Next
|
||||
If n = l\tail_ Then n = Null ;Beyond the end of the list - not valid
|
||||
|
||||
Else ;Negative index - count backward
|
||||
n = l\tail_
|
||||
For e = 0 To index Step -1
|
||||
n = n\pv_
|
||||
Next
|
||||
If n = l\head_ Then n = Null ;Before the start of the list - not valid
|
||||
|
||||
EndIf
|
||||
|
||||
Return n
|
||||
End Function
|
||||
|
||||
;Replace a value at the given position (added by MusicianKool)
|
||||
Function ReplaceValueAtIndex(l.LList,index,value)
|
||||
Local n.ListNode = ListNodeAtIndex(l,index)
|
||||
If n <> Null Then n\Value = value:Else Return 0
|
||||
End Function
|
||||
|
||||
;Remove and return a value at the given position (added by MusicianKool)
|
||||
Function RemoveNodeAtIndex(l.LList,index)
|
||||
Local n.ListNode = ListNodeAtIndex(l,index),tval
|
||||
If n <> Null Then tval = n\Value:RemoveListNode(n):Return tval:Else Return 0
|
||||
End Function
|
||||
|
||||
;Retrieve the first value from a list
|
||||
Function ListFirst(l.LList)
|
||||
If l\head_\nx_ <> l\tail_ Then Return l\head_\nx_\Value
|
||||
End Function
|
||||
|
||||
;Retrieve the last value from a list
|
||||
Function ListLast(l.LList)
|
||||
If l\tail_\pv_ <> l\head_ Then Return l\tail_\pv_\Value
|
||||
End Function
|
||||
|
||||
;Remove the first element from a list, and return its value
|
||||
Function ListRemoveFirst(l.LList)
|
||||
Local val
|
||||
If l\head_\nx_ <> l\tail_
|
||||
val = l\head_\nx_\Value
|
||||
RemoveListNode l\head_\nx_
|
||||
EndIf
|
||||
Return val
|
||||
End Function
|
||||
|
||||
;Remove the last element from a list, and return its value
|
||||
Function ListRemoveLast(l.LList)
|
||||
Local val
|
||||
If l\tail_\pv_ <> l\head_
|
||||
val = l\tail_\pv_\Value
|
||||
RemoveListNode l\tail_\pv_
|
||||
EndIf
|
||||
Return val
|
||||
End Function
|
||||
|
||||
;Insert a value into a list before the specified node, and return the new node
|
||||
Function InsertBeforeNode.ListNode(Value, n.ListNode)
|
||||
Local bef.ListNode = New ListNode
|
||||
|
||||
bef\pv_ = n\pv_
|
||||
bef\nx_ = n
|
||||
bef\Value = Value
|
||||
|
||||
n\pv_ = bef
|
||||
bef\pv_\nx_ = bef
|
||||
|
||||
Return bef
|
||||
End Function
|
||||
|
||||
;Insert a value into a list after the specified node, and return then new node
|
||||
Function InsertAfterNode.ListNode(Value, n.ListNode)
|
||||
Local aft.ListNode = New ListNode
|
||||
|
||||
aft\nx_ = n\nx_
|
||||
aft\pv_ = n
|
||||
aft\Value = Value
|
||||
|
||||
n\nx_ = aft
|
||||
aft\nx_\pv_ = aft
|
||||
|
||||
Return aft
|
||||
End Function
|
||||
|
||||
;Get an iterator object to use with a loop
|
||||
;This function means that most programs won't have to think about deleting iterators manually
|
||||
;(in general only a small, constant number will be created)
|
||||
Function GetIterator.Iterator(l.LList)
|
||||
Local i.Iterator
|
||||
|
||||
If l = Null Then RuntimeError "Cannot create Iterator for Null"
|
||||
|
||||
For i = Each Iterator ;See if there's an available iterator at the moment
|
||||
If i\l_ = Null Then Exit
|
||||
Next
|
||||
|
||||
If i = Null Then i = New Iterator ;If there wasn't, create one
|
||||
|
||||
i\l_ = l
|
||||
i\cn_ = l\head_
|
||||
i\cni_ = -1
|
||||
i\Value = 0 ;No especial reason why this has to be anything, but meh
|
||||
|
||||
Return i
|
||||
End Function
|
||||
|
||||
;Use as the argument to While to iterate over the members of a list
|
||||
Function EachIn(i.Iterator)
|
||||
|
||||
i\cn_ = i\cn_\nx_
|
||||
|
||||
If i\cn_ <> i\l_\tail_ ;Still items in the list
|
||||
i\Value = i\cn_\Value
|
||||
i\cni_ = i\cni_ + 1
|
||||
Return True
|
||||
|
||||
Else
|
||||
i\l_ = Null ;Disconnect from the list, having reached the end
|
||||
i\cn_ = Null
|
||||
i\cni_ = -1
|
||||
Return False
|
||||
|
||||
EndIf
|
||||
End Function
|
||||
|
||||
;Remove from the containing list the element currently pointed to by an iterator
|
||||
Function IteratorRemove(i.Iterator)
|
||||
If (i\cn_ <> i\l_\head_) And (i\cn_ <> i\l_\tail_)
|
||||
Local temp.ListNode = i\cn_
|
||||
|
||||
i\cn_ = i\cn_\pv_
|
||||
i\cni_ = i\cni_ - 1
|
||||
i\Value = 0
|
||||
|
||||
RemoveListNode temp
|
||||
|
||||
Return True
|
||||
Else
|
||||
Return False
|
||||
EndIf
|
||||
End Function
|
||||
|
||||
;Call this before breaking out of an EachIn loop, to disconnect the iterator from the list
|
||||
Function IteratorBreak(i.Iterator)
|
||||
i\l_ = Null
|
||||
i\cn_ = Null
|
||||
i\cni_ = -1
|
||||
i\Value = 0
|
||||
End Function
|
||||
|
||||
|
||||
;~IDEal Editor Parameters:
|
||||
;~F#5#A#10#18#2A#32#3E#47#4C#58#66#6F#78#8F#9B#A9#B7#BD#C5#CC
|
||||
;~F#E3#E9#EF#F4#F9#103#10D#11B#12B#13F#152#163
|
||||
;~C#Blitz3D
|
||||
66
samples/BlitzBasic/PObj.bb
Normal file
66
samples/BlitzBasic/PObj.bb
Normal file
@@ -0,0 +1,66 @@
|
||||
|
||||
Local i, start, result
|
||||
|
||||
Local s.Sum3Obj = New Sum3Obj
|
||||
|
||||
For i = 1 To 100000
|
||||
s = New Sum3Obj
|
||||
result = Handle Before s
|
||||
Delete s
|
||||
Next
|
||||
|
||||
start = MilliSecs()
|
||||
For i = 1 To 1000000
|
||||
result = Sum3_(MakeSum3Obj(i, i, i))
|
||||
Next
|
||||
start = MilliSecs() - start
|
||||
Print start
|
||||
|
||||
start = MilliSecs()
|
||||
For i = 1 To 1000000
|
||||
result = Sum3(i, i, i)
|
||||
Next
|
||||
start = MilliSecs() - start
|
||||
Print start
|
||||
|
||||
WaitKey
|
||||
End
|
||||
|
||||
|
||||
Function Sum3(a, b, c)
|
||||
Return a + b + c
|
||||
End Function
|
||||
|
||||
|
||||
Type Sum3Obj
|
||||
Field isActive
|
||||
Field a, b, c
|
||||
End Type
|
||||
|
||||
Function MakeSum3Obj(a, b, c)
|
||||
Local s.Sum3Obj = Last Sum3Obj
|
||||
If s\isActive Then s = New Sum3Obj
|
||||
s\isActive = True
|
||||
s\a = a
|
||||
s\b = b
|
||||
s\c = c
|
||||
|
||||
Restore label
|
||||
Read foo
|
||||
|
||||
Return Handle(s)
|
||||
End Function
|
||||
|
||||
.label
|
||||
Data (10 + 2), 12, 14
|
||||
:
|
||||
Function Sum3_(a_)
|
||||
Local a.Sum3Obj = Object.Sum3Obj a_
|
||||
Local return_ = a\a + a\b + a\c
|
||||
Insert a Before First Sum3Obj :: a\isActive = False
|
||||
Return return_
|
||||
End Function
|
||||
|
||||
|
||||
;~IDEal Editor Parameters:
|
||||
;~C#Blitz3D
|
||||
167
samples/Bluespec/TL.bsv
Normal file
167
samples/Bluespec/TL.bsv
Normal file
@@ -0,0 +1,167 @@
|
||||
package TL;
|
||||
|
||||
interface TL;
|
||||
method Action ped_button_push();
|
||||
|
||||
(* always_enabled *)
|
||||
method Action set_car_state_N(Bool x);
|
||||
(* always_enabled *)
|
||||
method Action set_car_state_S(Bool x);
|
||||
(* always_enabled *)
|
||||
method Action set_car_state_E(Bool x);
|
||||
(* always_enabled *)
|
||||
method Action set_car_state_W(Bool x);
|
||||
|
||||
method Bool lampRedNS();
|
||||
method Bool lampAmberNS();
|
||||
method Bool lampGreenNS();
|
||||
|
||||
method Bool lampRedE();
|
||||
method Bool lampAmberE();
|
||||
method Bool lampGreenE();
|
||||
|
||||
method Bool lampRedW();
|
||||
method Bool lampAmberW();
|
||||
method Bool lampGreenW();
|
||||
|
||||
method Bool lampRedPed();
|
||||
method Bool lampAmberPed();
|
||||
method Bool lampGreenPed();
|
||||
endinterface: TL
|
||||
|
||||
typedef enum {
|
||||
AllRed,
|
||||
GreenNS, AmberNS,
|
||||
GreenE, AmberE,
|
||||
GreenW, AmberW,
|
||||
GreenPed, AmberPed} TLstates deriving (Eq, Bits);
|
||||
|
||||
typedef UInt#(5) Time32;
|
||||
typedef UInt#(20) CtrSize;
|
||||
|
||||
(* synthesize *)
|
||||
module sysTL(TL);
|
||||
Time32 allRedDelay = 2;
|
||||
Time32 amberDelay = 4;
|
||||
Time32 nsGreenDelay = 20;
|
||||
Time32 ewGreenDelay = 10;
|
||||
Time32 pedGreenDelay = 10;
|
||||
Time32 pedAmberDelay = 6;
|
||||
|
||||
CtrSize clocks_per_sec = 100;
|
||||
|
||||
Reg#(TLstates) state <- mkReg(AllRed);
|
||||
Reg#(TLstates) next_green <- mkReg(GreenNS);
|
||||
Reg#(Time32) secs <- mkReg(0);
|
||||
Reg#(Bool) ped_button_pushed <- mkReg(False);
|
||||
Reg#(Bool) car_present_N <- mkReg(True);
|
||||
Reg#(Bool) car_present_S <- mkReg(True);
|
||||
Reg#(Bool) car_present_E <- mkReg(True);
|
||||
Reg#(Bool) car_present_W <- mkReg(True);
|
||||
Bool car_present_NS = car_present_N || car_present_S;
|
||||
Reg#(CtrSize) cycle_ctr <- mkReg(0);
|
||||
|
||||
rule dec_cycle_ctr (cycle_ctr != 0);
|
||||
cycle_ctr <= cycle_ctr - 1;
|
||||
endrule
|
||||
|
||||
Rules low_priority_rule = (rules
|
||||
rule inc_sec (cycle_ctr == 0);
|
||||
secs <= secs + 1;
|
||||
cycle_ctr <= clocks_per_sec;
|
||||
endrule endrules);
|
||||
|
||||
function Action next_state(TLstates ns);
|
||||
action
|
||||
state <= ns;
|
||||
secs <= 0;
|
||||
endaction
|
||||
endfunction: next_state
|
||||
|
||||
function TLstates green_seq(TLstates x);
|
||||
case (x)
|
||||
GreenNS: return (GreenE);
|
||||
GreenE: return (GreenW);
|
||||
GreenW: return (GreenNS);
|
||||
endcase
|
||||
endfunction
|
||||
|
||||
function Bool car_present(TLstates x);
|
||||
case (x)
|
||||
GreenNS: return (car_present_NS);
|
||||
GreenE: return (car_present_E);
|
||||
GreenW: return (car_present_W);
|
||||
endcase
|
||||
endfunction
|
||||
|
||||
function Rules make_from_green_rule(TLstates green_state, Time32 delay, Bool car_is_present, TLstates ns);
|
||||
return (rules
|
||||
rule from_green (state == green_state && (secs >= delay || !car_is_present));
|
||||
next_state(ns);
|
||||
endrule endrules);
|
||||
endfunction: make_from_green_rule
|
||||
|
||||
function Rules make_from_amber_rule(TLstates amber_state, TLstates ng);
|
||||
return (rules
|
||||
rule from_amber (state == amber_state && secs >= amberDelay);
|
||||
next_state(AllRed);
|
||||
next_green <= ng;
|
||||
endrule endrules);
|
||||
endfunction: make_from_amber_rule
|
||||
|
||||
Rules hprs[7];
|
||||
|
||||
hprs[1] = make_from_green_rule(GreenNS, nsGreenDelay, car_present_NS, AmberNS);
|
||||
hprs[2] = make_from_amber_rule(AmberNS, GreenE);
|
||||
hprs[3] = make_from_green_rule(GreenE, ewGreenDelay, car_present_E, AmberE);
|
||||
hprs[4] = make_from_amber_rule(AmberE, GreenW);
|
||||
hprs[5] = make_from_green_rule(GreenW, ewGreenDelay, car_present_W, AmberW);
|
||||
hprs[6] = make_from_amber_rule(AmberW, GreenNS);
|
||||
|
||||
hprs[0] = (rules
|
||||
rule fromAllRed (state == AllRed && secs >= allRedDelay);
|
||||
if (ped_button_pushed) action
|
||||
ped_button_pushed <= False;
|
||||
next_state(GreenPed);
|
||||
endaction else if (car_present(next_green))
|
||||
next_state(next_green);
|
||||
else if (car_present(green_seq(next_green)))
|
||||
next_state(green_seq(next_green));
|
||||
else if (car_present(green_seq(green_seq(next_green))))
|
||||
next_state(green_seq(green_seq(next_green)));
|
||||
else
|
||||
noAction;
|
||||
endrule: fromAllRed endrules);
|
||||
|
||||
Rules high_priority_rules = hprs[0];
|
||||
for (Integer i = 1; i<7; i=i+1)
|
||||
high_priority_rules = rJoin(hprs[i], high_priority_rules);
|
||||
|
||||
addRules(preempts(high_priority_rules, low_priority_rule));
|
||||
|
||||
method Action ped_button_push();
|
||||
ped_button_pushed <= True;
|
||||
endmethod: ped_button_push
|
||||
|
||||
method Action set_car_state_N(b) ; car_present_N <= b; endmethod
|
||||
method Action set_car_state_S(b) ; car_present_S <= b; endmethod
|
||||
method Action set_car_state_E(b) ; car_present_E <= b; endmethod
|
||||
method Action set_car_state_W(b) ; car_present_W <= b; endmethod
|
||||
|
||||
method lampRedNS() = (!(state == GreenNS || state == AmberNS));
|
||||
method lampAmberNS() = (state == AmberNS);
|
||||
method lampGreenNS() = (state == GreenNS);
|
||||
method lampRedE() = (!(state == GreenE || state == AmberE));
|
||||
method lampAmberE() = (state == AmberE);
|
||||
method lampGreenE() = (state == GreenE);
|
||||
method lampRedW() = (!(state == GreenW || state == AmberW));
|
||||
method lampAmberW() = (state == AmberW);
|
||||
method lampGreenW() = (state == GreenW);
|
||||
|
||||
method lampRedPed() = (!(state == GreenPed || state == AmberPed));
|
||||
method lampAmberPed() = (state == AmberPed);
|
||||
method lampGreenPed() = (state == GreenPed);
|
||||
|
||||
endmodule: sysTL
|
||||
|
||||
endpackage: TL
|
||||
109
samples/Bluespec/TbTL.bsv
Normal file
109
samples/Bluespec/TbTL.bsv
Normal file
@@ -0,0 +1,109 @@
|
||||
package TbTL;
|
||||
|
||||
import TL::*;
|
||||
|
||||
interface Lamp;
|
||||
method Bool changed;
|
||||
method Action show_offs;
|
||||
method Action show_ons;
|
||||
method Action reset;
|
||||
endinterface
|
||||
|
||||
module mkLamp#(String name, Bool lamp)(Lamp);
|
||||
Reg#(Bool) prev <- mkReg(False);
|
||||
|
||||
method changed = (prev != lamp);
|
||||
|
||||
method Action show_offs;
|
||||
if (prev && !lamp)
|
||||
$write (name + " off, ");
|
||||
endmethod
|
||||
|
||||
method Action show_ons;
|
||||
if (!prev && lamp)
|
||||
$write (name + " on, ");
|
||||
endmethod
|
||||
|
||||
method Action reset;
|
||||
prev <= lamp;
|
||||
endmethod
|
||||
endmodule
|
||||
|
||||
|
||||
(* synthesize *)
|
||||
module mkTest();
|
||||
let dut <- sysTL;
|
||||
|
||||
Reg#(Bit#(16)) ctr <- mkReg(0);
|
||||
|
||||
Reg#(Bool) carN <- mkReg(False);
|
||||
Reg#(Bool) carS <- mkReg(False);
|
||||
Reg#(Bool) carE <- mkReg(False);
|
||||
Reg#(Bool) carW <- mkReg(False);
|
||||
|
||||
Lamp lamps[12];
|
||||
|
||||
lamps[0] <- mkLamp("0: NS red ", dut.lampRedNS);
|
||||
lamps[1] <- mkLamp("1: NS amber", dut.lampAmberNS);
|
||||
lamps[2] <- mkLamp("2: NS green", dut.lampGreenNS);
|
||||
lamps[3] <- mkLamp("3: E red ", dut.lampRedE);
|
||||
lamps[4] <- mkLamp("4: E amber", dut.lampAmberE);
|
||||
lamps[5] <- mkLamp("5: E green", dut.lampGreenE);
|
||||
lamps[6] <- mkLamp("6: W red ", dut.lampRedW);
|
||||
lamps[7] <- mkLamp("7: W amber", dut.lampAmberW);
|
||||
lamps[8] <- mkLamp("8: W green", dut.lampGreenW);
|
||||
|
||||
lamps[9] <- mkLamp("9: Ped red ", dut.lampRedPed);
|
||||
lamps[10] <- mkLamp("10: Ped amber", dut.lampAmberPed);
|
||||
lamps[11] <- mkLamp("11: Ped green", dut.lampGreenPed);
|
||||
|
||||
rule start (ctr == 0);
|
||||
$dumpvars;
|
||||
endrule
|
||||
|
||||
rule detect_cars;
|
||||
dut.set_car_state_N(carN);
|
||||
dut.set_car_state_S(carS);
|
||||
dut.set_car_state_E(carE);
|
||||
dut.set_car_state_W(carW);
|
||||
endrule
|
||||
|
||||
rule go;
|
||||
ctr <= ctr + 1;
|
||||
if (ctr == 5000) carN <= True;
|
||||
if (ctr == 6500) carN <= False;
|
||||
if (ctr == 12_000) dut.ped_button_push;
|
||||
endrule
|
||||
|
||||
rule stop (ctr > 32768);
|
||||
$display("TESTS FINISHED");
|
||||
$finish(0);
|
||||
endrule
|
||||
|
||||
function do_offs(l) = l.show_offs;
|
||||
function do_ons(l) = l.show_ons;
|
||||
function do_reset(l) = l.reset;
|
||||
|
||||
function do_it(f);
|
||||
action
|
||||
for (Integer i=0; i<12; i=i+1)
|
||||
f(lamps[i]);
|
||||
endaction
|
||||
endfunction
|
||||
|
||||
function any_changes();
|
||||
Bool b = False;
|
||||
for (Integer i=0; i<12; i=i+1)
|
||||
b = b || lamps[i].changed;
|
||||
return b;
|
||||
endfunction
|
||||
|
||||
rule show (any_changes());
|
||||
do_it(do_offs);
|
||||
do_it(do_ons);
|
||||
do_it(do_reset);
|
||||
$display("(at time %d)", $time);
|
||||
endrule
|
||||
endmodule
|
||||
|
||||
endpackage
|
||||
305
samples/Brightscript/SimpleGrid.brs
Normal file
305
samples/Brightscript/SimpleGrid.brs
Normal file
@@ -0,0 +1,305 @@
|
||||
' *********************************************************
|
||||
' ** Simple Grid Screen Demonstration App
|
||||
' ** Jun 2010
|
||||
' ** Copyright (c) 2010 Roku Inc. All Rights Reserved.
|
||||
' *********************************************************
|
||||
|
||||
'************************************************************
|
||||
'** Application startup
|
||||
'************************************************************
|
||||
Sub Main()
|
||||
|
||||
'initialize theme attributes like titles, logos and overhang color
|
||||
initTheme()
|
||||
|
||||
gridstyle = "Flat-Movie"
|
||||
|
||||
'set to go, time to get started
|
||||
while gridstyle <> ""
|
||||
print "starting grid style= ";gridstyle
|
||||
screen=preShowGridScreen(gridstyle)
|
||||
gridstyle = showGridScreen(screen, gridstyle)
|
||||
end while
|
||||
|
||||
End Sub
|
||||
|
||||
|
||||
'*************************************************************
|
||||
'** Set the configurable theme attributes for the application
|
||||
'**
|
||||
'** Configure the custom overhang and Logo attributes
|
||||
'** These attributes affect the branding of the application
|
||||
'** and are artwork, colors and offsets specific to the app
|
||||
'*************************************************************
|
||||
|
||||
Sub initTheme()
|
||||
app = CreateObject("roAppManager")
|
||||
app.SetTheme(CreateDefaultTheme())
|
||||
End Sub
|
||||
|
||||
'******************************************************
|
||||
'** @return The default application theme.
|
||||
'** Screens can make slight adjustments to the default
|
||||
'** theme by getting it from here and then overriding
|
||||
'** individual theme attributes.
|
||||
'******************************************************
|
||||
Function CreateDefaultTheme() as Object
|
||||
theme = CreateObject("roAssociativeArray")
|
||||
|
||||
theme.ThemeType = "generic-dark"
|
||||
|
||||
' All these are greyscales
|
||||
theme.GridScreenBackgroundColor = "#363636"
|
||||
theme.GridScreenMessageColor = "#808080"
|
||||
theme.GridScreenRetrievingColor = "#CCCCCC"
|
||||
theme.GridScreenListNameColor = "#FFFFFF"
|
||||
|
||||
' Color values work here
|
||||
theme.GridScreenDescriptionTitleColor = "#001090"
|
||||
theme.GridScreenDescriptionDateColor = "#FF005B"
|
||||
theme.GridScreenDescriptionRuntimeColor = "#5B005B"
|
||||
theme.GridScreenDescriptionSynopsisColor = "#606000"
|
||||
|
||||
'used in the Grid Screen
|
||||
theme.CounterTextLeft = "#FF0000"
|
||||
theme.CounterSeparator = "#00FF00"
|
||||
theme.CounterTextRight = "#0000FF"
|
||||
|
||||
theme.GridScreenLogoHD = "pkg:/images/Overhang_Test_HD.png"
|
||||
|
||||
theme.GridScreenLogoOffsetHD_X = "0"
|
||||
theme.GridScreenLogoOffsetHD_Y = "0"
|
||||
theme.GridScreenOverhangHeightHD = "99"
|
||||
|
||||
theme.GridScreenLogoSD = "pkg:/images/Overhang_Test_SD43.png"
|
||||
theme.GridScreenOverhangHeightSD = "66"
|
||||
theme.GridScreenLogoOffsetSD_X = "0"
|
||||
theme.GridScreenLogoOffsetSD_Y = "0"
|
||||
|
||||
' to use your own focus ring artwork
|
||||
'theme.GridScreenFocusBorderSD = "pkg:/images/GridCenter_Border_Movies_SD43.png"
|
||||
'theme.GridScreenBorderOffsetSD = "(-26,-25)"
|
||||
'theme.GridScreenFocusBorderHD = "pkg:/images/GridCenter_Border_Movies_HD.png"
|
||||
'theme.GridScreenBorderOffsetHD = "(-28,-20)"
|
||||
|
||||
' to use your own description background artwork
|
||||
'theme.GridScreenDescriptionImageSD = "pkg:/images/Grid_Description_Background_SD43.png"
|
||||
'theme.GridScreenDescriptionOffsetSD = "(125,170)"
|
||||
'theme.GridScreenDescriptionImageHD = "pkg:/images/Grid_Description_Background_HD.png"
|
||||
'theme.GridScreenDescriptionOffsetHD = "(190,255)"
|
||||
|
||||
|
||||
return theme
|
||||
End Function
|
||||
|
||||
'******************************************************
|
||||
'** Perform any startup/initialization stuff prior to
|
||||
'** initially showing the screen.
|
||||
'******************************************************
|
||||
Function preShowGridScreen(style as string) As Object
|
||||
|
||||
m.port=CreateObject("roMessagePort")
|
||||
screen = CreateObject("roGridScreen")
|
||||
screen.SetMessagePort(m.port)
|
||||
' screen.SetDisplayMode("best-fit")
|
||||
screen.SetDisplayMode("scale-to-fill")
|
||||
|
||||
screen.SetGridStyle(style)
|
||||
return screen
|
||||
|
||||
End Function
|
||||
|
||||
|
||||
'******************************************************
|
||||
'** Display the gird screen and wait for events from
|
||||
'** the screen. The screen will show retreiving while
|
||||
'** we fetch and parse the feeds for the show posters
|
||||
'******************************************************
|
||||
Function showGridScreen(screen As Object, gridstyle as string) As string
|
||||
|
||||
print "enter showGridScreen"
|
||||
|
||||
categoryList = getCategoryList()
|
||||
categoryList[0] = "GridStyle: " + gridstyle
|
||||
screen.setupLists(categoryList.count())
|
||||
screen.SetListNames(categoryList)
|
||||
StyleButtons = getGridControlButtons()
|
||||
screen.SetContentList(0, StyleButtons)
|
||||
for i = 1 to categoryList.count()-1
|
||||
screen.SetContentList(i, getShowsForCategoryItem(categoryList[i]))
|
||||
end for
|
||||
screen.Show()
|
||||
|
||||
while true
|
||||
print "Waiting for message"
|
||||
msg = wait(0, m.port)
|
||||
'msg = wait(0, screen.GetMessagePort()) ' getmessageport does not work on gridscreen
|
||||
print "Got Message:";type(msg)
|
||||
if type(msg) = "roGridScreenEvent" then
|
||||
print "msg= "; msg.GetMessage() " , index= "; msg.GetIndex(); " data= "; msg.getData()
|
||||
if msg.isListItemFocused() then
|
||||
print"list item focused | current show = "; msg.GetIndex()
|
||||
else if msg.isListItemSelected() then
|
||||
row = msg.GetIndex()
|
||||
selection = msg.getData()
|
||||
print "list item selected row= "; row; " selection= "; selection
|
||||
|
||||
' Did we get a selection from the gridstyle selection row?
|
||||
if (row = 0)
|
||||
' yes, return so we can come back with new style
|
||||
return StyleButtons[selection].Title
|
||||
endif
|
||||
|
||||
'm.curShow = displayShowDetailScreen(showList[msg.GetIndex()])
|
||||
else if msg.isScreenClosed() then
|
||||
return ""
|
||||
end if
|
||||
end If
|
||||
end while
|
||||
|
||||
|
||||
End Function
|
||||
|
||||
'**********************************************************
|
||||
'** When a poster on the home screen is selected, we call
|
||||
'** this function passing an roAssociativeArray with the
|
||||
'** ContentMetaData for the selected show. This data should
|
||||
'** be sufficient for the springboard to display
|
||||
'**********************************************************
|
||||
Function displayShowDetailScreen(category as Object, showIndex as Integer) As Integer
|
||||
|
||||
'add code to create springboard, for now we do nothing
|
||||
return 1
|
||||
|
||||
End Function
|
||||
|
||||
|
||||
'**************************************************************
|
||||
'** Return the list of categories to display in the filter
|
||||
'** banner. The result is an roArray containing the names of
|
||||
'** all of the categories. All just static data for the example.
|
||||
'***************************************************************
|
||||
Function getCategoryList() As Object
|
||||
|
||||
categoryList = [ "GridStyle", "Reality", "History", "News", "Comedy", "Drama"]
|
||||
return categoryList
|
||||
|
||||
End Function
|
||||
|
||||
|
||||
'********************************************************************
|
||||
'** Given the category from the filter banner, return an array
|
||||
'** of ContentMetaData objects (roAssociativeArray's) representing
|
||||
'** the shows for the category. For this example, we just cheat and
|
||||
'** create and return a static array with just the minimal items
|
||||
'** set, but ideally, you'd go to a feed service, fetch and parse
|
||||
'** this data dynamically, so content for each category is dynamic
|
||||
'********************************************************************
|
||||
Function getShowsForCategoryItem(category As Object) As Object
|
||||
|
||||
print "getting shows for category "; category
|
||||
|
||||
showList = [
|
||||
{
|
||||
Title: category + ": Header",
|
||||
releaseDate: "1976",
|
||||
length: 3600-600,
|
||||
Description:"This row is category " + category,
|
||||
hdBranded: true,
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/4/43/Gold_star_on_blue.gif",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/4/43/Gold_star_on_blue.gif",
|
||||
Description:"Short Synopsis #1",
|
||||
Synopsis:"Length",
|
||||
StarRating:10,
|
||||
}
|
||||
{
|
||||
Title: category + ": Beverly Hillbillies",
|
||||
releaseDate: "1969",
|
||||
rating: "PG",
|
||||
Description:"Come and listen to a story about a man named Jed: Poor mountaineer, barely kept his family fed. Then one day he was shootin at some food, and up through the ground came a bubblin crude. Oil that is, black gold, Texas tea.",
|
||||
numEpisodes:42,
|
||||
contentType:"season",
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/en/4/4e/The_Beverly_Hillbillies.jpg",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/en/4/4e/The_Beverly_Hillbillies.jpg",
|
||||
StarRating:80,
|
||||
UserStarRating:40
|
||||
}
|
||||
{
|
||||
Title: category + ": Babylon 5",
|
||||
releaseDate: "1996",
|
||||
rating: "PG",
|
||||
Description:"The show centers on the Babylon 5 space station: a focal point for politics, diplomacy, and conflict during the years 2257-2262.",
|
||||
numEpisodes:102,
|
||||
contentType:"season",
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/en/9/9d/Smb5-s4.jpg",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/en/9/9d/Smb5-s4.jpg",
|
||||
StarRating:80,
|
||||
UserStarRating:40
|
||||
}
|
||||
{
|
||||
Title: category + ": John F. Kennedy",
|
||||
releaseDate: "1961",
|
||||
rating: "PG",
|
||||
Description:"My fellow citizens of the world: ask not what America will do for you, but what together we can do for the freedom of man.",
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/en/5/52/Jfk_happy_birthday_1.jpg",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/en/5/52/Jfk_happy_birthday_1.jpg",
|
||||
StarRating:100
|
||||
}
|
||||
{
|
||||
Title: category + ": Man on the Moon",
|
||||
releaseDate: "1969",
|
||||
rating: "PG",
|
||||
Description:"That's one small step for a man, one giant leap for mankind.",
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/1/1e/Apollo_11_first_step.jpg",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/1/1e/Apollo_11_first_step.jpg",
|
||||
StarRating:100
|
||||
}
|
||||
{
|
||||
Title: category + ": I have a Dream",
|
||||
releaseDate: "1963",
|
||||
rating: "PG",
|
||||
Description:"I have a dream that my four little children will one day live in a nation where they will not be judged by the color of their skin, but by the content of their character.",
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/8/81/Martin_Luther_King_-_March_on_Washington.jpg",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/8/81/Martin_Luther_King_-_March_on_Washington.jpg",
|
||||
StarRating:100
|
||||
}
|
||||
]
|
||||
|
||||
return showList
|
||||
End Function
|
||||
|
||||
function getGridControlButtons() as object
|
||||
buttons = [
|
||||
{ Title: "Flat-Movie"
|
||||
ReleaseDate: "HD:5x2 SD:5x2"
|
||||
Description: "Flat-Movie (Netflix) style"
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/4/43/Gold_star_on_blue.gif"
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/4/43/Gold_star_on_blue.gif"
|
||||
}
|
||||
{ Title: "Flat-Landscape"
|
||||
ReleaseDate: "HD:5x3 SD:4x3"
|
||||
Description: "Channel Store"
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/thumb/9/96/Dunkery_Hill.jpg/800px-Dunkery_Hill.jpg",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/thumb/9/96/Dunkery_Hill.jpg/800px-Dunkery_Hill.jpg",
|
||||
}
|
||||
{ Title: "Flat-Portrait"
|
||||
ReleaseDate: "HD:5x2 SD:5x2"
|
||||
Description: "3x4 style posters"
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/9/9f/Kane_George_Gurnett.jpg",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/9/9f/Kane_George_Gurnett.jpg",
|
||||
}
|
||||
{ Title: "Flat-Square"
|
||||
ReleaseDate: "HD:7x3 SD:6x3"
|
||||
Description: "1x1 style posters"
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/thumb/d/de/SQUARE_SHAPE.svg/536px-SQUARE_SHAPE.svg.png",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/thumb/d/de/SQUARE_SHAPE.svg/536px-SQUARE_SHAPE.svg.png",
|
||||
}
|
||||
{ Title: "Flat-16x9"
|
||||
ReleaseDate: "HD:5x3 SD:4x3"
|
||||
Description: "HD style posters"
|
||||
HDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/thumb/2/22/%C3%89cran_TV_plat.svg/200px-%C3%89cran_TV_plat.svg.png",
|
||||
SDPosterUrl:"http://upload.wikimedia.org/wikipedia/commons/thumb/2/22/%C3%89cran_TV_plat.svg/200px-%C3%89cran_TV_plat.svg.png",
|
||||
}
|
||||
]
|
||||
return buttons
|
||||
End Function
|
||||
@@ -1,39 +0,0 @@
|
||||
void foo()
|
||||
{
|
||||
cudaArray* cu_array;
|
||||
texture<float, 2, cudaReadModeElementType> tex;
|
||||
|
||||
// Allocate array
|
||||
cudaChannelFormatDesc description = cudaCreateChannelDesc<float>();
|
||||
cudaMallocArray(&cu_array, &description, width, height);
|
||||
|
||||
// Copy image data to array
|
||||
cudaMemcpyToArray(cu_array, image, width*height*sizeof(float), cudaMemcpyHostToDevice);
|
||||
|
||||
// Set texture parameters (default)
|
||||
tex.addressMode[0] = cudaAddressModeClamp;
|
||||
tex.addressMode[1] = cudaAddressModeClamp;
|
||||
tex.filterMode = cudaFilterModePoint;
|
||||
tex.normalized = false; // do not normalize coordinates
|
||||
|
||||
// Bind the array to the texture
|
||||
cudaBindTextureToArray(tex, cu_array);
|
||||
|
||||
// Run kernel
|
||||
dim3 blockDim(16, 16, 1);
|
||||
dim3 gridDim((width + blockDim.x - 1)/ blockDim.x, (height + blockDim.y - 1) / blockDim.y, 1);
|
||||
kernel<<< gridDim, blockDim, 0 >>>(d_data, height, width);
|
||||
|
||||
// Unbind the array from the texture
|
||||
cudaUnbindTexture(tex);
|
||||
} //end foo()
|
||||
|
||||
__global__ void kernel(float* odata, int height, int width)
|
||||
{
|
||||
unsigned int x = blockIdx.x*blockDim.x + threadIdx.x;
|
||||
unsigned int y = blockIdx.y*blockDim.y + threadIdx.y;
|
||||
if (x < width && y < height) {
|
||||
float c = tex2D(tex, x, y);
|
||||
odata[y*width+x] = c;
|
||||
}
|
||||
}
|
||||
69
samples/C++/gdsdbreader.h
Normal file
69
samples/C++/gdsdbreader.h
Normal file
@@ -0,0 +1,69 @@
|
||||
#ifndef GDSDBREADER_H
|
||||
#define GDSDBREADER_H
|
||||
|
||||
// This file contains core structures, classes and types for the entire gds app
|
||||
// WARNING: DO NOT MODIFY UNTIL IT'S STRICTLY NECESSARY
|
||||
|
||||
#include <QDir>
|
||||
#include "diagramwidget/qgldiagramwidget.h"
|
||||
|
||||
#define GDS_DIR "gdsdata"
|
||||
|
||||
enum level {LEVEL_ONE, LEVEL_TWO, LEVEL_THREE};
|
||||
|
||||
// The internal structure of the db to store information about each node (each level)
|
||||
// this will be serialized before being written to file
|
||||
class dbDataStructure
|
||||
{
|
||||
public:
|
||||
QString label;
|
||||
quint32 depth;
|
||||
quint32 userIndex;
|
||||
QByteArray data; // This is COMPRESSED data, optimize ram and disk space, is decompressed
|
||||
// just when needed (to display the comments)
|
||||
|
||||
// The following ID is used to create second-third level files
|
||||
quint64 uniqueID;
|
||||
// All the next items linked to this one
|
||||
QVector<dbDataStructure*> nextItems;
|
||||
// Corresponding indices vector (used to store data)
|
||||
QVector<quint32> nextItemsIndices;
|
||||
// The father element (or NULL if it's root)
|
||||
dbDataStructure* father;
|
||||
// Corresponding indices vector (used to store data)
|
||||
quint32 fatherIndex;
|
||||
bool noFatherRoot; // Used to tell if this node is the root (so hasn't a father)
|
||||
|
||||
// These fields will be useful for levels 2 and 3
|
||||
QString fileName; // Relative filename for the associated code file
|
||||
QByteArray firstLineData; // Compressed first line data, this will be used with the line number to retrieve info
|
||||
QVector<quint32> linesNumbers; // First and next lines (next are relative to the first) numbers
|
||||
|
||||
// -- Generic system data not to be stored on disk
|
||||
void *glPointer; // GL pointer
|
||||
|
||||
// These operator overrides prevent the glPointer and other non-disk-necessary data serialization
|
||||
friend QDataStream& operator<<(QDataStream& stream, const dbDataStructure& myclass)
|
||||
// Notice: this function has to be "friend" because it cannot be a member function, member functions
|
||||
// have an additional parameter "this" which isn't in the argument list of an operator overload. A friend
|
||||
// function has full access to private data of the class without having the "this" argument
|
||||
{
|
||||
// Don't write glPointer and every pointer-dependent structure
|
||||
return stream << myclass.label << myclass.depth << myclass.userIndex << qCompress(myclass.data)
|
||||
<< myclass.uniqueID << myclass.nextItemsIndices << myclass.fatherIndex << myclass.noFatherRoot
|
||||
<< myclass.fileName << qCompress(myclass.firstLineData) << myclass.linesNumbers;
|
||||
}
|
||||
friend QDataStream& operator>>(QDataStream& stream, dbDataStructure& myclass)
|
||||
{
|
||||
//Don't read it, either
|
||||
stream >> myclass.label >> myclass.depth >> myclass.userIndex >> myclass.data
|
||||
>> myclass.uniqueID >> myclass.nextItemsIndices >> myclass.fatherIndex >> myclass.noFatherRoot
|
||||
>> myclass.fileName >> myclass.firstLineData >> myclass.linesNumbers;
|
||||
myclass.data = qUncompress(myclass.data);
|
||||
myclass.firstLineData = qUncompress(myclass.firstLineData);
|
||||
return stream;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
#endif // GDSDBREADER_H
|
||||
327
samples/C++/protocol-buffer.pb.cc
Normal file
327
samples/C++/protocol-buffer.pb.cc
Normal file
@@ -0,0 +1,327 @@
|
||||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: protocol-buffer.proto
|
||||
|
||||
#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION
|
||||
#include "protocol-buffer.pb.h"
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/stubs/once.h>
|
||||
#include <google/protobuf/io/coded_stream.h>
|
||||
#include <google/protobuf/wire_format_lite_inl.h>
|
||||
#include <google/protobuf/descriptor.h>
|
||||
#include <google/protobuf/generated_message_reflection.h>
|
||||
#include <google/protobuf/reflection_ops.h>
|
||||
#include <google/protobuf/wire_format.h>
|
||||
// @@protoc_insertion_point(includes)
|
||||
|
||||
namespace persons {
|
||||
|
||||
namespace {
|
||||
|
||||
const ::google::protobuf::Descriptor* Person_descriptor_ = NULL;
|
||||
const ::google::protobuf::internal::GeneratedMessageReflection*
|
||||
Person_reflection_ = NULL;
|
||||
|
||||
} // namespace
|
||||
|
||||
|
||||
void protobuf_AssignDesc_protocol_2dbuffer_2eproto() {
|
||||
protobuf_AddDesc_protocol_2dbuffer_2eproto();
|
||||
const ::google::protobuf::FileDescriptor* file =
|
||||
::google::protobuf::DescriptorPool::generated_pool()->FindFileByName(
|
||||
"protocol-buffer.proto");
|
||||
GOOGLE_CHECK(file != NULL);
|
||||
Person_descriptor_ = file->message_type(0);
|
||||
static const int Person_offsets_[1] = {
|
||||
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(Person, name_),
|
||||
};
|
||||
Person_reflection_ =
|
||||
new ::google::protobuf::internal::GeneratedMessageReflection(
|
||||
Person_descriptor_,
|
||||
Person::default_instance_,
|
||||
Person_offsets_,
|
||||
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(Person, _has_bits_[0]),
|
||||
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(Person, _unknown_fields_),
|
||||
-1,
|
||||
::google::protobuf::DescriptorPool::generated_pool(),
|
||||
::google::protobuf::MessageFactory::generated_factory(),
|
||||
sizeof(Person));
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
||||
GOOGLE_PROTOBUF_DECLARE_ONCE(protobuf_AssignDescriptors_once_);
|
||||
inline void protobuf_AssignDescriptorsOnce() {
|
||||
::google::protobuf::GoogleOnceInit(&protobuf_AssignDescriptors_once_,
|
||||
&protobuf_AssignDesc_protocol_2dbuffer_2eproto);
|
||||
}
|
||||
|
||||
void protobuf_RegisterTypes(const ::std::string&) {
|
||||
protobuf_AssignDescriptorsOnce();
|
||||
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
|
||||
Person_descriptor_, &Person::default_instance());
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
void protobuf_ShutdownFile_protocol_2dbuffer_2eproto() {
|
||||
delete Person::default_instance_;
|
||||
delete Person_reflection_;
|
||||
}
|
||||
|
||||
void protobuf_AddDesc_protocol_2dbuffer_2eproto() {
|
||||
static bool already_here = false;
|
||||
if (already_here) return;
|
||||
already_here = true;
|
||||
GOOGLE_PROTOBUF_VERIFY_VERSION;
|
||||
|
||||
::google::protobuf::DescriptorPool::InternalAddGeneratedFile(
|
||||
"\n\025protocol-buffer.proto\022\007persons\"\026\n\006Pers"
|
||||
"on\022\014\n\004name\030\001 \002(\t", 56);
|
||||
::google::protobuf::MessageFactory::InternalRegisterGeneratedFile(
|
||||
"protocol-buffer.proto", &protobuf_RegisterTypes);
|
||||
Person::default_instance_ = new Person();
|
||||
Person::default_instance_->InitAsDefaultInstance();
|
||||
::google::protobuf::internal::OnShutdown(&protobuf_ShutdownFile_protocol_2dbuffer_2eproto);
|
||||
}
|
||||
|
||||
// Force AddDescriptors() to be called at static initialization time.
|
||||
struct StaticDescriptorInitializer_protocol_2dbuffer_2eproto {
|
||||
StaticDescriptorInitializer_protocol_2dbuffer_2eproto() {
|
||||
protobuf_AddDesc_protocol_2dbuffer_2eproto();
|
||||
}
|
||||
} static_descriptor_initializer_protocol_2dbuffer_2eproto_;
|
||||
|
||||
// ===================================================================
|
||||
|
||||
#ifndef _MSC_VER
|
||||
const int Person::kNameFieldNumber;
|
||||
#endif // !_MSC_VER
|
||||
|
||||
Person::Person()
|
||||
: ::google::protobuf::Message() {
|
||||
SharedCtor();
|
||||
}
|
||||
|
||||
void Person::InitAsDefaultInstance() {
|
||||
}
|
||||
|
||||
Person::Person(const Person& from)
|
||||
: ::google::protobuf::Message() {
|
||||
SharedCtor();
|
||||
MergeFrom(from);
|
||||
}
|
||||
|
||||
void Person::SharedCtor() {
|
||||
_cached_size_ = 0;
|
||||
name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString);
|
||||
::memset(_has_bits_, 0, sizeof(_has_bits_));
|
||||
}
|
||||
|
||||
Person::~Person() {
|
||||
SharedDtor();
|
||||
}
|
||||
|
||||
void Person::SharedDtor() {
|
||||
if (name_ != &::google::protobuf::internal::kEmptyString) {
|
||||
delete name_;
|
||||
}
|
||||
if (this != default_instance_) {
|
||||
}
|
||||
}
|
||||
|
||||
void Person::SetCachedSize(int size) const {
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
|
||||
_cached_size_ = size;
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_END();
|
||||
}
|
||||
const ::google::protobuf::Descriptor* Person::descriptor() {
|
||||
protobuf_AssignDescriptorsOnce();
|
||||
return Person_descriptor_;
|
||||
}
|
||||
|
||||
const Person& Person::default_instance() {
|
||||
if (default_instance_ == NULL) protobuf_AddDesc_protocol_2dbuffer_2eproto();
|
||||
return *default_instance_;
|
||||
}
|
||||
|
||||
Person* Person::default_instance_ = NULL;
|
||||
|
||||
Person* Person::New() const {
|
||||
return new Person;
|
||||
}
|
||||
|
||||
void Person::Clear() {
|
||||
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
|
||||
if (has_name()) {
|
||||
if (name_ != &::google::protobuf::internal::kEmptyString) {
|
||||
name_->clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
::memset(_has_bits_, 0, sizeof(_has_bits_));
|
||||
mutable_unknown_fields()->Clear();
|
||||
}
|
||||
|
||||
bool Person::MergePartialFromCodedStream(
|
||||
::google::protobuf::io::CodedInputStream* input) {
|
||||
#define DO_(EXPRESSION) if (!(EXPRESSION)) return false
|
||||
::google::protobuf::uint32 tag;
|
||||
while ((tag = input->ReadTag()) != 0) {
|
||||
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
|
||||
// required string name = 1;
|
||||
case 1: {
|
||||
if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
|
||||
::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) {
|
||||
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
|
||||
input, this->mutable_name()));
|
||||
::google::protobuf::internal::WireFormat::VerifyUTF8String(
|
||||
this->name().data(), this->name().length(),
|
||||
::google::protobuf::internal::WireFormat::PARSE);
|
||||
} else {
|
||||
goto handle_uninterpreted;
|
||||
}
|
||||
if (input->ExpectAtEnd()) return true;
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
handle_uninterpreted:
|
||||
if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
|
||||
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
|
||||
return true;
|
||||
}
|
||||
DO_(::google::protobuf::internal::WireFormat::SkipField(
|
||||
input, tag, mutable_unknown_fields()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
#undef DO_
|
||||
}
|
||||
|
||||
void Person::SerializeWithCachedSizes(
|
||||
::google::protobuf::io::CodedOutputStream* output) const {
|
||||
// required string name = 1;
|
||||
if (has_name()) {
|
||||
::google::protobuf::internal::WireFormat::VerifyUTF8String(
|
||||
this->name().data(), this->name().length(),
|
||||
::google::protobuf::internal::WireFormat::SERIALIZE);
|
||||
::google::protobuf::internal::WireFormatLite::WriteString(
|
||||
1, this->name(), output);
|
||||
}
|
||||
|
||||
if (!unknown_fields().empty()) {
|
||||
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
|
||||
unknown_fields(), output);
|
||||
}
|
||||
}
|
||||
|
||||
::google::protobuf::uint8* Person::SerializeWithCachedSizesToArray(
|
||||
::google::protobuf::uint8* target) const {
|
||||
// required string name = 1;
|
||||
if (has_name()) {
|
||||
::google::protobuf::internal::WireFormat::VerifyUTF8String(
|
||||
this->name().data(), this->name().length(),
|
||||
::google::protobuf::internal::WireFormat::SERIALIZE);
|
||||
target =
|
||||
::google::protobuf::internal::WireFormatLite::WriteStringToArray(
|
||||
1, this->name(), target);
|
||||
}
|
||||
|
||||
if (!unknown_fields().empty()) {
|
||||
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
|
||||
unknown_fields(), target);
|
||||
}
|
||||
return target;
|
||||
}
|
||||
|
||||
int Person::ByteSize() const {
|
||||
int total_size = 0;
|
||||
|
||||
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
|
||||
// required string name = 1;
|
||||
if (has_name()) {
|
||||
total_size += 1 +
|
||||
::google::protobuf::internal::WireFormatLite::StringSize(
|
||||
this->name());
|
||||
}
|
||||
|
||||
}
|
||||
if (!unknown_fields().empty()) {
|
||||
total_size +=
|
||||
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
|
||||
unknown_fields());
|
||||
}
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
|
||||
_cached_size_ = total_size;
|
||||
GOOGLE_SAFE_CONCURRENT_WRITES_END();
|
||||
return total_size;
|
||||
}
|
||||
|
||||
void Person::MergeFrom(const ::google::protobuf::Message& from) {
|
||||
GOOGLE_CHECK_NE(&from, this);
|
||||
const Person* source =
|
||||
::google::protobuf::internal::dynamic_cast_if_available<const Person*>(
|
||||
&from);
|
||||
if (source == NULL) {
|
||||
::google::protobuf::internal::ReflectionOps::Merge(from, this);
|
||||
} else {
|
||||
MergeFrom(*source);
|
||||
}
|
||||
}
|
||||
|
||||
void Person::MergeFrom(const Person& from) {
|
||||
GOOGLE_CHECK_NE(&from, this);
|
||||
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
|
||||
if (from.has_name()) {
|
||||
set_name(from.name());
|
||||
}
|
||||
}
|
||||
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
|
||||
}
|
||||
|
||||
void Person::CopyFrom(const ::google::protobuf::Message& from) {
|
||||
if (&from == this) return;
|
||||
Clear();
|
||||
MergeFrom(from);
|
||||
}
|
||||
|
||||
void Person::CopyFrom(const Person& from) {
|
||||
if (&from == this) return;
|
||||
Clear();
|
||||
MergeFrom(from);
|
||||
}
|
||||
|
||||
bool Person::IsInitialized() const {
|
||||
if ((_has_bits_[0] & 0x00000001) != 0x00000001) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void Person::Swap(Person* other) {
|
||||
if (other != this) {
|
||||
std::swap(name_, other->name_);
|
||||
std::swap(_has_bits_[0], other->_has_bits_[0]);
|
||||
_unknown_fields_.Swap(&other->_unknown_fields_);
|
||||
std::swap(_cached_size_, other->_cached_size_);
|
||||
}
|
||||
}
|
||||
|
||||
::google::protobuf::Metadata Person::GetMetadata() const {
|
||||
protobuf_AssignDescriptorsOnce();
|
||||
::google::protobuf::Metadata metadata;
|
||||
metadata.descriptor = Person_descriptor_;
|
||||
metadata.reflection = Person_reflection_;
|
||||
return metadata;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(namespace_scope)
|
||||
|
||||
} // namespace persons
|
||||
|
||||
// @@protoc_insertion_point(global_scope)
|
||||
218
samples/C++/protocol-buffer.pb.h
Normal file
218
samples/C++/protocol-buffer.pb.h
Normal file
@@ -0,0 +1,218 @@
|
||||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: protocol-buffer.proto
|
||||
|
||||
#ifndef PROTOBUF_protocol_2dbuffer_2eproto__INCLUDED
|
||||
#define PROTOBUF_protocol_2dbuffer_2eproto__INCLUDED
|
||||
|
||||
#include <string>
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
|
||||
#if GOOGLE_PROTOBUF_VERSION < 2005000
|
||||
#error This file was generated by a newer version of protoc which is
|
||||
#error incompatible with your Protocol Buffer headers. Please update
|
||||
#error your headers.
|
||||
#endif
|
||||
#if 2005000 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION
|
||||
#error This file was generated by an older version of protoc which is
|
||||
#error incompatible with your Protocol Buffer headers. Please
|
||||
#error regenerate this file with a newer version of protoc.
|
||||
#endif
|
||||
|
||||
#include <google/protobuf/generated_message_util.h>
|
||||
#include <google/protobuf/message.h>
|
||||
#include <google/protobuf/repeated_field.h>
|
||||
#include <google/protobuf/extension_set.h>
|
||||
#include <google/protobuf/unknown_field_set.h>
|
||||
// @@protoc_insertion_point(includes)
|
||||
|
||||
namespace persons {
|
||||
|
||||
// Internal implementation detail -- do not call these.
|
||||
void protobuf_AddDesc_protocol_2dbuffer_2eproto();
|
||||
void protobuf_AssignDesc_protocol_2dbuffer_2eproto();
|
||||
void protobuf_ShutdownFile_protocol_2dbuffer_2eproto();
|
||||
|
||||
class Person;
|
||||
|
||||
// ===================================================================
|
||||
|
||||
class Person : public ::google::protobuf::Message {
|
||||
public:
|
||||
Person();
|
||||
virtual ~Person();
|
||||
|
||||
Person(const Person& from);
|
||||
|
||||
inline Person& operator=(const Person& from) {
|
||||
CopyFrom(from);
|
||||
return *this;
|
||||
}
|
||||
|
||||
inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const {
|
||||
return _unknown_fields_;
|
||||
}
|
||||
|
||||
inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() {
|
||||
return &_unknown_fields_;
|
||||
}
|
||||
|
||||
static const ::google::protobuf::Descriptor* descriptor();
|
||||
static const Person& default_instance();
|
||||
|
||||
void Swap(Person* other);
|
||||
|
||||
// implements Message ----------------------------------------------
|
||||
|
||||
Person* New() const;
|
||||
void CopyFrom(const ::google::protobuf::Message& from);
|
||||
void MergeFrom(const ::google::protobuf::Message& from);
|
||||
void CopyFrom(const Person& from);
|
||||
void MergeFrom(const Person& from);
|
||||
void Clear();
|
||||
bool IsInitialized() const;
|
||||
|
||||
int ByteSize() const;
|
||||
bool MergePartialFromCodedStream(
|
||||
::google::protobuf::io::CodedInputStream* input);
|
||||
void SerializeWithCachedSizes(
|
||||
::google::protobuf::io::CodedOutputStream* output) const;
|
||||
::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const;
|
||||
int GetCachedSize() const { return _cached_size_; }
|
||||
private:
|
||||
void SharedCtor();
|
||||
void SharedDtor();
|
||||
void SetCachedSize(int size) const;
|
||||
public:
|
||||
|
||||
::google::protobuf::Metadata GetMetadata() const;
|
||||
|
||||
// nested types ----------------------------------------------------
|
||||
|
||||
// accessors -------------------------------------------------------
|
||||
|
||||
// required string name = 1;
|
||||
inline bool has_name() const;
|
||||
inline void clear_name();
|
||||
static const int kNameFieldNumber = 1;
|
||||
inline const ::std::string& name() const;
|
||||
inline void set_name(const ::std::string& value);
|
||||
inline void set_name(const char* value);
|
||||
inline void set_name(const char* value, size_t size);
|
||||
inline ::std::string* mutable_name();
|
||||
inline ::std::string* release_name();
|
||||
inline void set_allocated_name(::std::string* name);
|
||||
|
||||
// @@protoc_insertion_point(class_scope:persons.Person)
|
||||
private:
|
||||
inline void set_has_name();
|
||||
inline void clear_has_name();
|
||||
|
||||
::google::protobuf::UnknownFieldSet _unknown_fields_;
|
||||
|
||||
::std::string* name_;
|
||||
|
||||
mutable int _cached_size_;
|
||||
::google::protobuf::uint32 _has_bits_[(1 + 31) / 32];
|
||||
|
||||
friend void protobuf_AddDesc_protocol_2dbuffer_2eproto();
|
||||
friend void protobuf_AssignDesc_protocol_2dbuffer_2eproto();
|
||||
friend void protobuf_ShutdownFile_protocol_2dbuffer_2eproto();
|
||||
|
||||
void InitAsDefaultInstance();
|
||||
static Person* default_instance_;
|
||||
};
|
||||
// ===================================================================
|
||||
|
||||
|
||||
// ===================================================================
|
||||
|
||||
// Person
|
||||
|
||||
// required string name = 1;
|
||||
inline bool Person::has_name() const {
|
||||
return (_has_bits_[0] & 0x00000001u) != 0;
|
||||
}
|
||||
inline void Person::set_has_name() {
|
||||
_has_bits_[0] |= 0x00000001u;
|
||||
}
|
||||
inline void Person::clear_has_name() {
|
||||
_has_bits_[0] &= ~0x00000001u;
|
||||
}
|
||||
inline void Person::clear_name() {
|
||||
if (name_ != &::google::protobuf::internal::kEmptyString) {
|
||||
name_->clear();
|
||||
}
|
||||
clear_has_name();
|
||||
}
|
||||
inline const ::std::string& Person::name() const {
|
||||
return *name_;
|
||||
}
|
||||
inline void Person::set_name(const ::std::string& value) {
|
||||
set_has_name();
|
||||
if (name_ == &::google::protobuf::internal::kEmptyString) {
|
||||
name_ = new ::std::string;
|
||||
}
|
||||
name_->assign(value);
|
||||
}
|
||||
inline void Person::set_name(const char* value) {
|
||||
set_has_name();
|
||||
if (name_ == &::google::protobuf::internal::kEmptyString) {
|
||||
name_ = new ::std::string;
|
||||
}
|
||||
name_->assign(value);
|
||||
}
|
||||
inline void Person::set_name(const char* value, size_t size) {
|
||||
set_has_name();
|
||||
if (name_ == &::google::protobuf::internal::kEmptyString) {
|
||||
name_ = new ::std::string;
|
||||
}
|
||||
name_->assign(reinterpret_cast<const char*>(value), size);
|
||||
}
|
||||
inline ::std::string* Person::mutable_name() {
|
||||
set_has_name();
|
||||
if (name_ == &::google::protobuf::internal::kEmptyString) {
|
||||
name_ = new ::std::string;
|
||||
}
|
||||
return name_;
|
||||
}
|
||||
inline ::std::string* Person::release_name() {
|
||||
clear_has_name();
|
||||
if (name_ == &::google::protobuf::internal::kEmptyString) {
|
||||
return NULL;
|
||||
} else {
|
||||
::std::string* temp = name_;
|
||||
name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString);
|
||||
return temp;
|
||||
}
|
||||
}
|
||||
inline void Person::set_allocated_name(::std::string* name) {
|
||||
if (name_ != &::google::protobuf::internal::kEmptyString) {
|
||||
delete name_;
|
||||
}
|
||||
if (name) {
|
||||
set_has_name();
|
||||
name_ = name;
|
||||
} else {
|
||||
clear_has_name();
|
||||
name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(namespace_scope)
|
||||
|
||||
} // namespace persons
|
||||
|
||||
#ifndef SWIG
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
|
||||
|
||||
} // namespace google
|
||||
} // namespace protobuf
|
||||
#endif // SWIG
|
||||
|
||||
// @@protoc_insertion_point(global_scope)
|
||||
|
||||
#endif // PROTOBUF_protocol_2dbuffer_2eproto__INCLUDED
|
||||
415
samples/C++/qscicommand.h
Normal file
415
samples/C++/qscicommand.h
Normal file
@@ -0,0 +1,415 @@
|
||||
// This defines the interface to the QsciCommand class.
|
||||
//
|
||||
// Copyright (c) 2011 Riverbank Computing Limited <info@riverbankcomputing.com>
|
||||
//
|
||||
// This file is part of QScintilla.
|
||||
//
|
||||
// This file may be used under the terms of the GNU General Public
|
||||
// License versions 2.0 or 3.0 as published by the Free Software
|
||||
// Foundation and appearing in the files LICENSE.GPL2 and LICENSE.GPL3
|
||||
// included in the packaging of this file. Alternatively you may (at
|
||||
// your option) use any later version of the GNU General Public
|
||||
// License if such license has been publicly approved by Riverbank
|
||||
// Computing Limited (or its successors, if any) and the KDE Free Qt
|
||||
// Foundation. In addition, as a special exception, Riverbank gives you
|
||||
// certain additional rights. These rights are described in the Riverbank
|
||||
// GPL Exception version 1.1, which can be found in the file
|
||||
// GPL_EXCEPTION.txt in this package.
|
||||
//
|
||||
// If you are unsure which license is appropriate for your use, please
|
||||
// contact the sales department at sales@riverbankcomputing.com.
|
||||
//
|
||||
// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
|
||||
// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
|
||||
|
||||
|
||||
#ifndef QSCICOMMAND_H
|
||||
#define QSCICOMMAND_H
|
||||
|
||||
#ifdef __APPLE__
|
||||
extern "C++" {
|
||||
#endif
|
||||
|
||||
#include <qstring.h>
|
||||
|
||||
#include <Qsci/qsciglobal.h>
|
||||
#include <Qsci/qsciscintillabase.h>
|
||||
|
||||
|
||||
class QsciScintilla;
|
||||
|
||||
|
||||
//! \brief The QsciCommand class represents an internal editor command that may
|
||||
//! have one or two keys bound to it.
|
||||
//!
|
||||
//! Methods are provided to change the keys bound to the command and to remove
|
||||
//! a key binding. Each command has a user friendly description of the command
|
||||
//! for use in key mapping dialogs.
|
||||
class QSCINTILLA_EXPORT QsciCommand
|
||||
{
|
||||
public:
|
||||
//! This enum defines the different commands that can be assigned to a key.
|
||||
enum Command {
|
||||
//! Move down one line.
|
||||
LineDown = QsciScintillaBase::SCI_LINEDOWN,
|
||||
|
||||
//! Extend the selection down one line.
|
||||
LineDownExtend = QsciScintillaBase::SCI_LINEDOWNEXTEND,
|
||||
|
||||
//! Extend the rectangular selection down one line.
|
||||
LineDownRectExtend = QsciScintillaBase::SCI_LINEDOWNRECTEXTEND,
|
||||
|
||||
//! Scroll the view down one line.
|
||||
LineScrollDown = QsciScintillaBase::SCI_LINESCROLLDOWN,
|
||||
|
||||
//! Move up one line.
|
||||
LineUp = QsciScintillaBase::SCI_LINEUP,
|
||||
|
||||
//! Extend the selection up one line.
|
||||
LineUpExtend = QsciScintillaBase::SCI_LINEUPEXTEND,
|
||||
|
||||
//! Extend the rectangular selection up one line.
|
||||
LineUpRectExtend = QsciScintillaBase::SCI_LINEUPRECTEXTEND,
|
||||
|
||||
//! Scroll the view up one line.
|
||||
LineScrollUp = QsciScintillaBase::SCI_LINESCROLLUP,
|
||||
|
||||
//! Scroll to the start of the document.
|
||||
ScrollToStart = QsciScintillaBase::SCI_SCROLLTOSTART,
|
||||
|
||||
//! Scroll to the end of the document.
|
||||
ScrollToEnd = QsciScintillaBase::SCI_SCROLLTOEND,
|
||||
|
||||
//! Scroll vertically to centre the current line.
|
||||
VerticalCentreCaret = QsciScintillaBase::SCI_VERTICALCENTRECARET,
|
||||
|
||||
//! Move down one paragraph.
|
||||
ParaDown = QsciScintillaBase::SCI_PARADOWN,
|
||||
|
||||
//! Extend the selection down one paragraph.
|
||||
ParaDownExtend = QsciScintillaBase::SCI_PARADOWNEXTEND,
|
||||
|
||||
//! Move up one paragraph.
|
||||
ParaUp = QsciScintillaBase::SCI_PARAUP,
|
||||
|
||||
//! Extend the selection up one paragraph.
|
||||
ParaUpExtend = QsciScintillaBase::SCI_PARAUPEXTEND,
|
||||
|
||||
//! Move left one character.
|
||||
CharLeft = QsciScintillaBase::SCI_CHARLEFT,
|
||||
|
||||
//! Extend the selection left one character.
|
||||
CharLeftExtend = QsciScintillaBase::SCI_CHARLEFTEXTEND,
|
||||
|
||||
//! Extend the rectangular selection left one character.
|
||||
CharLeftRectExtend = QsciScintillaBase::SCI_CHARLEFTRECTEXTEND,
|
||||
|
||||
//! Move right one character.
|
||||
CharRight = QsciScintillaBase::SCI_CHARRIGHT,
|
||||
|
||||
//! Extend the selection right one character.
|
||||
CharRightExtend = QsciScintillaBase::SCI_CHARRIGHTEXTEND,
|
||||
|
||||
//! Extend the rectangular selection right one character.
|
||||
CharRightRectExtend = QsciScintillaBase::SCI_CHARRIGHTRECTEXTEND,
|
||||
|
||||
//! Move left one word.
|
||||
WordLeft = QsciScintillaBase::SCI_WORDLEFT,
|
||||
|
||||
//! Extend the selection left one word.
|
||||
WordLeftExtend = QsciScintillaBase::SCI_WORDLEFTEXTEND,
|
||||
|
||||
//! Move right one word.
|
||||
WordRight = QsciScintillaBase::SCI_WORDRIGHT,
|
||||
|
||||
//! Extend the selection right one word.
|
||||
WordRightExtend = QsciScintillaBase::SCI_WORDRIGHTEXTEND,
|
||||
|
||||
//! Move to the end of the previous word.
|
||||
WordLeftEnd = QsciScintillaBase::SCI_WORDLEFTEND,
|
||||
|
||||
//! Extend the selection to the end of the previous word.
|
||||
WordLeftEndExtend = QsciScintillaBase::SCI_WORDLEFTENDEXTEND,
|
||||
|
||||
//! Move to the end of the next word.
|
||||
WordRightEnd = QsciScintillaBase::SCI_WORDRIGHTEND,
|
||||
|
||||
//! Extend the selection to the end of the next word.
|
||||
WordRightEndExtend = QsciScintillaBase::SCI_WORDRIGHTENDEXTEND,
|
||||
|
||||
//! Move left one word part.
|
||||
WordPartLeft = QsciScintillaBase::SCI_WORDPARTLEFT,
|
||||
|
||||
//! Extend the selection left one word part.
|
||||
WordPartLeftExtend = QsciScintillaBase::SCI_WORDPARTLEFTEXTEND,
|
||||
|
||||
//! Move right one word part.
|
||||
WordPartRight = QsciScintillaBase::SCI_WORDPARTRIGHT,
|
||||
|
||||
//! Extend the selection right one word part.
|
||||
WordPartRightExtend = QsciScintillaBase::SCI_WORDPARTRIGHTEXTEND,
|
||||
|
||||
//! Move to the start of the document line.
|
||||
Home = QsciScintillaBase::SCI_HOME,
|
||||
|
||||
//! Extend the selection to the start of the document line.
|
||||
HomeExtend = QsciScintillaBase::SCI_HOMEEXTEND,
|
||||
|
||||
//! Extend the rectangular selection to the start of the document line.
|
||||
HomeRectExtend = QsciScintillaBase::SCI_HOMERECTEXTEND,
|
||||
|
||||
//! Move to the start of the displayed line.
|
||||
HomeDisplay = QsciScintillaBase::SCI_HOMEDISPLAY,
|
||||
|
||||
//! Extend the selection to the start of the displayed line.
|
||||
HomeDisplayExtend = QsciScintillaBase::SCI_HOMEDISPLAYEXTEND,
|
||||
|
||||
//! Move to the start of the displayed or document line.
|
||||
HomeWrap = QsciScintillaBase::SCI_HOMEWRAP,
|
||||
|
||||
//! Extend the selection to the start of the displayed or document
|
||||
//! line.
|
||||
HomeWrapExtend = QsciScintillaBase::SCI_HOMEWRAPEXTEND,
|
||||
|
||||
//! Move to the first visible character in the document line.
|
||||
VCHome = QsciScintillaBase::SCI_VCHOME,
|
||||
|
||||
//! Extend the selection to the first visible character in the document
|
||||
//! line.
|
||||
VCHomeExtend = QsciScintillaBase::SCI_VCHOMEEXTEND,
|
||||
|
||||
//! Extend the rectangular selection to the first visible character in
|
||||
//! the document line.
|
||||
VCHomeRectExtend = QsciScintillaBase::SCI_VCHOMERECTEXTEND,
|
||||
|
||||
//! Move to the first visible character of the displayed or document
|
||||
//! line.
|
||||
VCHomeWrap = QsciScintillaBase::SCI_VCHOMEWRAP,
|
||||
|
||||
//! Extend the selection to the first visible character of the
|
||||
//! displayed or document line.
|
||||
VCHomeWrapExtend = QsciScintillaBase::SCI_VCHOMEWRAPEXTEND,
|
||||
|
||||
//! Move to the end of the document line.
|
||||
LineEnd = QsciScintillaBase::SCI_LINEEND,
|
||||
|
||||
//! Extend the selection to the end of the document line.
|
||||
LineEndExtend = QsciScintillaBase::SCI_LINEENDEXTEND,
|
||||
|
||||
//! Extend the rectangular selection to the end of the document line.
|
||||
LineEndRectExtend = QsciScintillaBase::SCI_LINEENDRECTEXTEND,
|
||||
|
||||
//! Move to the end of the displayed line.
|
||||
LineEndDisplay = QsciScintillaBase::SCI_LINEENDDISPLAY,
|
||||
|
||||
//! Extend the selection to the end of the displayed line.
|
||||
LineEndDisplayExtend = QsciScintillaBase::SCI_LINEENDDISPLAYEXTEND,
|
||||
|
||||
//! Move to the end of the displayed or document line.
|
||||
LineEndWrap = QsciScintillaBase::SCI_LINEENDWRAP,
|
||||
|
||||
//! Extend the selection to the end of the displayed or document line.
|
||||
LineEndWrapExtend = QsciScintillaBase::SCI_LINEENDWRAPEXTEND,
|
||||
|
||||
//! Move to the start of the document.
|
||||
DocumentStart = QsciScintillaBase::SCI_DOCUMENTSTART,
|
||||
|
||||
//! Extend the selection to the start of the document.
|
||||
DocumentStartExtend = QsciScintillaBase::SCI_DOCUMENTSTARTEXTEND,
|
||||
|
||||
//! Move to the end of the document.
|
||||
DocumentEnd = QsciScintillaBase::SCI_DOCUMENTEND,
|
||||
|
||||
//! Extend the selection to the end of the document.
|
||||
DocumentEndExtend = QsciScintillaBase::SCI_DOCUMENTENDEXTEND,
|
||||
|
||||
//! Move up one page.
|
||||
PageUp = QsciScintillaBase::SCI_PAGEUP,
|
||||
|
||||
//! Extend the selection up one page.
|
||||
PageUpExtend = QsciScintillaBase::SCI_PAGEUPEXTEND,
|
||||
|
||||
//! Extend the rectangular selection up one page.
|
||||
PageUpRectExtend = QsciScintillaBase::SCI_PAGEUPRECTEXTEND,
|
||||
|
||||
//! Move down one page.
|
||||
PageDown = QsciScintillaBase::SCI_PAGEDOWN,
|
||||
|
||||
//! Extend the selection down one page.
|
||||
PageDownExtend = QsciScintillaBase::SCI_PAGEDOWNEXTEND,
|
||||
|
||||
//! Extend the rectangular selection down one page.
|
||||
PageDownRectExtend = QsciScintillaBase::SCI_PAGEDOWNRECTEXTEND,
|
||||
|
||||
//! Stuttered move up one page.
|
||||
StutteredPageUp = QsciScintillaBase::SCI_STUTTEREDPAGEUP,
|
||||
|
||||
//! Stuttered extend the selection up one page.
|
||||
StutteredPageUpExtend = QsciScintillaBase::SCI_STUTTEREDPAGEUPEXTEND,
|
||||
|
||||
//! Stuttered move down one page.
|
||||
StutteredPageDown = QsciScintillaBase::SCI_STUTTEREDPAGEDOWN,
|
||||
|
||||
//! Stuttered extend the selection down one page.
|
||||
StutteredPageDownExtend = QsciScintillaBase::SCI_STUTTEREDPAGEDOWNEXTEND,
|
||||
|
||||
//! Delete the current character.
|
||||
Delete = QsciScintillaBase::SCI_CLEAR,
|
||||
|
||||
//! Delete the previous character.
|
||||
DeleteBack = QsciScintillaBase::SCI_DELETEBACK,
|
||||
|
||||
//! Delete the previous character if not at start of line.
|
||||
DeleteBackNotLine = QsciScintillaBase::SCI_DELETEBACKNOTLINE,
|
||||
|
||||
//! Delete the word to the left.
|
||||
DeleteWordLeft = QsciScintillaBase::SCI_DELWORDLEFT,
|
||||
|
||||
//! Delete the word to the right.
|
||||
DeleteWordRight = QsciScintillaBase::SCI_DELWORDRIGHT,
|
||||
|
||||
//! Delete right to the end of the next word.
|
||||
DeleteWordRightEnd = QsciScintillaBase::SCI_DELWORDRIGHTEND,
|
||||
|
||||
//! Delete the line to the left.
|
||||
DeleteLineLeft = QsciScintillaBase::SCI_DELLINELEFT,
|
||||
|
||||
//! Delete the line to the right.
|
||||
DeleteLineRight = QsciScintillaBase::SCI_DELLINERIGHT,
|
||||
|
||||
//! Delete the current line.
|
||||
LineDelete = QsciScintillaBase::SCI_LINEDELETE,
|
||||
|
||||
//! Cut the current line to the clipboard.
|
||||
LineCut = QsciScintillaBase::SCI_LINECUT,
|
||||
|
||||
//! Copy the current line to the clipboard.
|
||||
LineCopy = QsciScintillaBase::SCI_LINECOPY,
|
||||
|
||||
//! Transpose the current and previous lines.
|
||||
LineTranspose = QsciScintillaBase::SCI_LINETRANSPOSE,
|
||||
|
||||
//! Duplicate the current line.
|
||||
LineDuplicate = QsciScintillaBase::SCI_LINEDUPLICATE,
|
||||
|
||||
//! Select the whole document.
|
||||
SelectAll = QsciScintillaBase::SCI_SELECTALL,
|
||||
|
||||
//! Move the selected lines up one line.
|
||||
MoveSelectedLinesUp = QsciScintillaBase::SCI_MOVESELECTEDLINESUP,
|
||||
|
||||
//! Move the selected lines down one line.
|
||||
MoveSelectedLinesDown = QsciScintillaBase::SCI_MOVESELECTEDLINESDOWN,
|
||||
|
||||
//! Duplicate the selection.
|
||||
SelectionDuplicate = QsciScintillaBase::SCI_SELECTIONDUPLICATE,
|
||||
|
||||
//! Convert the selection to lower case.
|
||||
SelectionLowerCase = QsciScintillaBase::SCI_LOWERCASE,
|
||||
|
||||
//! Convert the selection to upper case.
|
||||
SelectionUpperCase = QsciScintillaBase::SCI_UPPERCASE,
|
||||
|
||||
//! Cut the selection to the clipboard.
|
||||
SelectionCut = QsciScintillaBase::SCI_CUT,
|
||||
|
||||
//! Copy the selection to the clipboard.
|
||||
SelectionCopy = QsciScintillaBase::SCI_COPY,
|
||||
|
||||
//! Paste from the clipboard.
|
||||
Paste = QsciScintillaBase::SCI_PASTE,
|
||||
|
||||
//! Toggle insert/overtype.
|
||||
EditToggleOvertype = QsciScintillaBase::SCI_EDITTOGGLEOVERTYPE,
|
||||
|
||||
//! Insert a platform dependent newline.
|
||||
Newline = QsciScintillaBase::SCI_NEWLINE,
|
||||
|
||||
//! Insert a formfeed.
|
||||
Formfeed = QsciScintillaBase::SCI_FORMFEED,
|
||||
|
||||
//! Indent one level.
|
||||
Tab = QsciScintillaBase::SCI_TAB,
|
||||
|
||||
//! De-indent one level.
|
||||
Backtab = QsciScintillaBase::SCI_BACKTAB,
|
||||
|
||||
//! Cancel any current operation.
|
||||
Cancel = QsciScintillaBase::SCI_CANCEL,
|
||||
|
||||
//! Undo the last command.
|
||||
Undo = QsciScintillaBase::SCI_UNDO,
|
||||
|
||||
//! Redo the last command.
|
||||
Redo = QsciScintillaBase::SCI_REDO,
|
||||
|
||||
//! Zoom in.
|
||||
ZoomIn = QsciScintillaBase::SCI_ZOOMIN,
|
||||
|
||||
//! Zoom out.
|
||||
ZoomOut = QsciScintillaBase::SCI_ZOOMOUT,
|
||||
};
|
||||
|
||||
//! Return the command that will be executed by this instance.
|
||||
Command command() const {return scicmd;}
|
||||
|
||||
//! Execute the command.
|
||||
void execute();
|
||||
|
||||
//! Binds the key \a key to the command. If \a key is 0 then the key
|
||||
//! binding is removed. If \a key is invalid then the key binding is
|
||||
//! unchanged. Valid keys are any visible or control character or any
|
||||
//! of \c Key_Down, \c Key_Up, \c Key_Left, \c Key_Right, \c Key_Home,
|
||||
//! \c Key_End, \c Key_PageUp, \c Key_PageDown, \c Key_Delete,
|
||||
//! \c Key_Insert, \c Key_Escape, \c Key_Backspace, \c Key_Tab and
|
||||
//! \c Key_Return. Keys may be modified with any combination of \c SHIFT,
|
||||
//! \c CTRL, \c ALT and \c META.
|
||||
//!
|
||||
//! \sa key(), setAlternateKey(), validKey()
|
||||
void setKey(int key);
|
||||
|
||||
//! Binds the alternate key \a altkey to the command. If \a key is 0
|
||||
//! then the alternate key binding is removed.
|
||||
//!
|
||||
//! \sa alternateKey(), setKey(), validKey()
|
||||
void setAlternateKey(int altkey);
|
||||
|
||||
//! The key that is currently bound to the command is returned.
|
||||
//!
|
||||
//! \sa setKey(), alternateKey()
|
||||
int key() const {return qkey;}
|
||||
|
||||
//! The alternate key that is currently bound to the command is
|
||||
//! returned.
|
||||
//!
|
||||
//! \sa setAlternateKey(), key()
|
||||
int alternateKey() const {return qaltkey;}
|
||||
|
||||
//! If the key \a key is valid then true is returned.
|
||||
static bool validKey(int key);
|
||||
|
||||
//! The user friendly description of the command is returned.
|
||||
QString description() const;
|
||||
|
||||
private:
|
||||
friend class QsciCommandSet;
|
||||
|
||||
QsciCommand(QsciScintilla *qs, Command cmd, int key, int altkey,
|
||||
const char *desc);
|
||||
|
||||
void bindKey(int key,int &qk,int &scik);
|
||||
|
||||
QsciScintilla *qsCmd;
|
||||
Command scicmd;
|
||||
int qkey, scikey, qaltkey, scialtkey;
|
||||
const char *descCmd;
|
||||
|
||||
QsciCommand(const QsciCommand &);
|
||||
QsciCommand &operator=(const QsciCommand &);
|
||||
};
|
||||
|
||||
#ifdef __APPLE__
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
116
samples/C++/qsciprinter.h
Normal file
116
samples/C++/qsciprinter.h
Normal file
@@ -0,0 +1,116 @@
|
||||
// This module defines interface to the QsciPrinter class.
|
||||
//
|
||||
// Copyright (c) 2011 Riverbank Computing Limited <info@riverbankcomputing.com>
|
||||
//
|
||||
// This file is part of QScintilla.
|
||||
//
|
||||
// This file may be used under the terms of the GNU General Public
|
||||
// License versions 2.0 or 3.0 as published by the Free Software
|
||||
// Foundation and appearing in the files LICENSE.GPL2 and LICENSE.GPL3
|
||||
// included in the packaging of this file. Alternatively you may (at
|
||||
// your option) use any later version of the GNU General Public
|
||||
// License if such license has been publicly approved by Riverbank
|
||||
// Computing Limited (or its successors, if any) and the KDE Free Qt
|
||||
// Foundation. In addition, as a special exception, Riverbank gives you
|
||||
// certain additional rights. These rights are described in the Riverbank
|
||||
// GPL Exception version 1.1, which can be found in the file
|
||||
// GPL_EXCEPTION.txt in this package.
|
||||
//
|
||||
// If you are unsure which license is appropriate for your use, please
|
||||
// contact the sales department at sales@riverbankcomputing.com.
|
||||
//
|
||||
// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
|
||||
// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
|
||||
|
||||
|
||||
#ifndef QSCIPRINTER_H
|
||||
#define QSCIPRINTER_H
|
||||
|
||||
#ifdef __APPLE__
|
||||
extern "C++" {
|
||||
#endif
|
||||
|
||||
#include <qprinter.h>
|
||||
|
||||
#include <Qsci/qsciglobal.h>
|
||||
#include <Qsci/qsciscintilla.h>
|
||||
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
class QRect;
|
||||
class QPainter;
|
||||
QT_END_NAMESPACE
|
||||
|
||||
class QsciScintillaBase;
|
||||
|
||||
|
||||
//! \brief The QsciPrinter class is a sub-class of the Qt QPrinter class that
|
||||
//! is able to print the text of a Scintilla document.
|
||||
//!
|
||||
//! The class can be further sub-classed to alter to layout of the text, adding
|
||||
//! headers and footers for example.
|
||||
class QSCINTILLA_EXPORT QsciPrinter : public QPrinter
|
||||
{
|
||||
public:
|
||||
//! Constructs a printer paint device with mode \a mode.
|
||||
QsciPrinter(PrinterMode mode = ScreenResolution);
|
||||
|
||||
//! Destroys the QsciPrinter instance.
|
||||
virtual ~QsciPrinter();
|
||||
|
||||
//! Format a page, by adding headers and footers for example, before the
|
||||
//! document text is drawn on it. \a painter is the painter to be used to
|
||||
//! add customised text and graphics. \a drawing is true if the page is
|
||||
//! actually being drawn rather than being sized. \a painter drawing
|
||||
//! methods must only be called when \a drawing is true. \a area is the
|
||||
//! area of the page that will be used to draw the text. This should be
|
||||
//! modified if it is necessary to reserve space for any customised text or
|
||||
//! graphics. By default the area is relative to the printable area of the
|
||||
//! page. Use QPrinter::setFullPage() because calling printRange() if you
|
||||
//! want to try and print over the whole page. \a pagenr is the number of
|
||||
//! the page. The first page is numbered 1.
|
||||
virtual void formatPage(QPainter &painter, bool drawing, QRect &area,
|
||||
int pagenr);
|
||||
|
||||
//! Return the number of points to add to each font when printing.
|
||||
//!
|
||||
//! \sa setMagnification()
|
||||
int magnification() const {return mag;}
|
||||
|
||||
//! Sets the number of points to add to each font when printing to \a
|
||||
//! magnification.
|
||||
//!
|
||||
//! \sa magnification()
|
||||
virtual void setMagnification(int magnification);
|
||||
|
||||
//! Print a range of lines from the Scintilla instance \a qsb. \a from is
|
||||
//! the first line to print and a negative value signifies the first line
|
||||
//! of text. \a to is the last line to print and a negative value
|
||||
//! signifies the last line of text. true is returned if there was no
|
||||
//! error.
|
||||
virtual int printRange(QsciScintillaBase *qsb, int from = -1, int to = -1);
|
||||
|
||||
//! Return the line wrap mode used when printing. The default is
|
||||
//! QsciScintilla::WrapWord.
|
||||
//!
|
||||
//! \sa setWrapMode()
|
||||
QsciScintilla::WrapMode wrapMode() const {return wrap;}
|
||||
|
||||
//! Sets the line wrap mode used when printing to \a wmode.
|
||||
//!
|
||||
//! \sa wrapMode()
|
||||
virtual void setWrapMode(QsciScintilla::WrapMode wmode);
|
||||
|
||||
private:
|
||||
int mag;
|
||||
QsciScintilla::WrapMode wrap;
|
||||
|
||||
QsciPrinter(const QsciPrinter &);
|
||||
QsciPrinter &operator=(const QsciPrinter &);
|
||||
};
|
||||
|
||||
#ifdef __APPLE__
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
4674
samples/C++/wrapper_inner.cpp
Normal file
4674
samples/C++/wrapper_inner.cpp
Normal file
File diff suppressed because it is too large
Load Diff
61
samples/C/jni_layer.h
Normal file
61
samples/C/jni_layer.h
Normal file
@@ -0,0 +1,61 @@
|
||||
/* DO NOT EDIT THIS FILE - it is machine generated */
|
||||
#include <jni.h>
|
||||
/* Header for class jni_JniLayer */
|
||||
|
||||
#ifndef _Included_jni_JniLayer
|
||||
#define _Included_jni_JniLayer
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
/*
|
||||
* Class: jni_JniLayer
|
||||
* Method: jni_layer_initialize
|
||||
* Signature: ([II)J
|
||||
*/
|
||||
JNIEXPORT jlong JNICALL Java_jni_JniLayer_jni_1layer_1initialize
|
||||
(JNIEnv *, jobject, jintArray, jint, jint);
|
||||
|
||||
/*
|
||||
* Class: jni_JniLayer
|
||||
* Method: jni_layer_mainloop
|
||||
* Signature: (J)V
|
||||
*/
|
||||
JNIEXPORT void JNICALL Java_jni_JniLayer_jni_1layer_1mainloop
|
||||
(JNIEnv *, jobject, jlong);
|
||||
|
||||
/*
|
||||
* Class: jni_JniLayer
|
||||
* Method: jni_layer_set_button
|
||||
* Signature: (JII)V
|
||||
*/
|
||||
JNIEXPORT void JNICALL Java_jni_JniLayer_jni_1layer_1set_1button
|
||||
(JNIEnv *, jobject, jlong, jint, jint);
|
||||
|
||||
/*
|
||||
* Class: jni_JniLayer
|
||||
* Method: jni_layer_set_analog
|
||||
* Signature: (JIIF)V
|
||||
*/
|
||||
JNIEXPORT void JNICALL Java_jni_JniLayer_jni_1layer_1set_1analog
|
||||
(JNIEnv *, jobject, jlong, jint, jint, jfloat);
|
||||
|
||||
/*
|
||||
* Class: jni_JniLayer
|
||||
* Method: jni_layer_report_analog_chg
|
||||
* Signature: (JI)V
|
||||
*/
|
||||
JNIEXPORT void JNICALL Java_jni_JniLayer_jni_1layer_1report_1analog_1chg
|
||||
(JNIEnv *, jobject, jlong, jint);
|
||||
|
||||
/*
|
||||
* Class: jni_JniLayer
|
||||
* Method: jni_layer_kill
|
||||
* Signature: (J)V
|
||||
*/
|
||||
JNIEXPORT void JNICALL Java_jni_JniLayer_jni_1layer_1kill
|
||||
(JNIEnv *, jobject, jlong);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
1267
samples/C/rf_io.c
Normal file
1267
samples/C/rf_io.c
Normal file
File diff suppressed because it is too large
Load Diff
682
samples/C/rf_io.h
Normal file
682
samples/C/rf_io.h
Normal file
@@ -0,0 +1,682 @@
|
||||
/**
|
||||
** Copyright (c) 2011-2012, Karapetsas Eleftherios
|
||||
** All rights reserved.
|
||||
**
|
||||
** Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
** 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
** 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in
|
||||
** the documentation and/or other materials provided with the distribution.
|
||||
** 3. Neither the name of the Original Author of Refu nor the names of its contributors may be used to endorse or promote products derived from
|
||||
**
|
||||
** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
|
||||
** INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
** DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
** SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
** SERVICES;LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
** WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
** OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
**/
|
||||
|
||||
|
||||
#ifndef REFU_IO_H
|
||||
#define REFU_IO_H
|
||||
|
||||
#include <rf_setup.h>
|
||||
#include <stdio.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C"
|
||||
{// opening bracket for calling from C++
|
||||
#endif
|
||||
|
||||
// New line feed
|
||||
#define RF_LF 0xA
|
||||
// Carriage Return
|
||||
#define RF_CR 0xD
|
||||
|
||||
#ifdef REFU_WIN32_VERSION
|
||||
#define i_PLUSB_WIN32 "b"
|
||||
#else
|
||||
#define i_PLUSB_WIN32 ""
|
||||
#endif
|
||||
|
||||
// This is the type that represents the file offset
|
||||
#ifdef _MSC_VER
|
||||
typedef __int64 foff_rft;
|
||||
#else
|
||||
#include <sys/types.h>
|
||||
typedef off64_t foff_rft;
|
||||
#endif
|
||||
///Fseek and Ftelll definitions
|
||||
#ifdef _MSC_VER
|
||||
#define rfFseek(i_FILE_,i_OFFSET_,i_WHENCE_) _fseeki64(i_FILE_,i_OFFSET_,i_WHENCE_)
|
||||
#define rfFtell(i_FILE_) _ftelli64(i_FILE_)
|
||||
#else
|
||||
#define rfFseek(i_FILE_,i_OFFSET_,i_WHENCE_) fseeko64(i_FILE_,i_OFFSET_,i_WHENCE_)
|
||||
#define rfFtell(i_FILE_) ftello64(i_FILE_)
|
||||
#endif
|
||||
|
||||
/**
|
||||
** @defgroup RF_IOGRP I/O
|
||||
** @addtogroup RF_IOGRP
|
||||
** @{
|
||||
**/
|
||||
|
||||
// @brief Reads a UTF-8 file descriptor until end of line or EOF is found and returns a UTF-8 byte buffer
|
||||
//
|
||||
// The file descriptor at @c f must have been opened in <b>binary</b> and not text mode. That means that if under
|
||||
// Windows make sure to call fopen with "wb", "rb" e.t.c. instead of the simple "w", "r" e.t.c. since the initial
|
||||
// default value under Windows is text mode. Alternatively you can set the initial value using _get_fmode() and
|
||||
// _set_fmode(). For more information take a look at the msdn pages here:
|
||||
// http://msdn.microsoft.com/en-us/library/ktss1a9b.aspx
|
||||
//
|
||||
// When the compile flag @c RF_NEWLINE_CRLF is defined (the default case at Windows) then this function
|
||||
// shall not be adding any CR character that is found in the file behind a newline character since this is
|
||||
// the Windows line ending scheme. Beware though that the returned read bytes value shall still count the CR character inside.
|
||||
//
|
||||
// @param[in] f The file descriptor to read
|
||||
// @param[out] utf8 Give here a refence to an unitialized char* that will be allocated inside the function
|
||||
// and contain the utf8 byte buffer. Needs to be freed by the caller explicitly later
|
||||
// @param[out] byteLength Give an @c uint32_t here to receive the length of the @c utf8 buffer in bytes
|
||||
// @param[out] bufferSize Give an @c uint32_t here to receive the capacity of the @c utf8 buffer in bytes
|
||||
// @param[out] eof Pass a pointer to a char to receive a true or false value in case the end of file
|
||||
// with reading this line
|
||||
// @return Returns either a positive number for success that represents the number of bytes read from @c f and and error in case something goes wrong.
|
||||
// The possible errors to return are the same as rfFgets_UTF8()
|
||||
i_DECLIMEX_ int32_t rfFReadLine_UTF8(FILE* f,char** utf8,uint32_t* byteLength,uint32_t* bufferSize,char* eof);
|
||||
// @brief Reads a Big Endian UTF-16 file descriptor until end of line or EOF is found and returns a UTF-8 byte buffer
|
||||
//
|
||||
// The file descriptor at @c f must have been opened in <b>binary</b> and not text mode. That means that if under
|
||||
// Windows make sure to call fopen with "wb", "rb" e.t.c. instead of the simple "w", "r" e.t.c. since the initial
|
||||
// default value under Windows is text mode. Alternatively you can set the initial value using _get_fmode() and
|
||||
// _set_fmode(). For more information take a look at the msdn pages here:
|
||||
// http://msdn.microsoft.com/en-us/library/ktss1a9b.aspx
|
||||
//
|
||||
// When the compile flag @c RF_NEWLINE_CRLF is defined (the default case at Windows) then this function
|
||||
// shall not be adding any CR character that is found in the file behind a newline character since this is
|
||||
// the Windows line ending scheme. Beware though that the returned read bytes value shall still count the CR character inside.
|
||||
//
|
||||
// @param[in] f The file descriptor to read
|
||||
// @param[out] utf8 Give here a refence to an unitialized char* that will be allocated inside the function
|
||||
// and contain the utf8 byte buffer. Needs to be freed by the caller explicitly later
|
||||
// @param[out] byteLength Give an @c uint32_t here to receive the length of the @c utf8 buffer in bytes
|
||||
// @param[out] eof Pass a pointer to a char to receive a true or false value in case the end of file
|
||||
// with reading this line
|
||||
// @return Returns either a positive number for success that represents the number of bytes read from @c f and and error in case something goes wrong.
|
||||
// + Any error that can be returned by @ref rfFgets_UTF16BE()
|
||||
// + @c RE_UTF16_INVALID_SEQUENCE: Failed to decode the UTF-16 byte stream of the file descriptor
|
||||
// + @c RE_UTF8_ENCODING: Failed to encode the UTF-16 of the file descriptor into UTF-8
|
||||
i_DECLIMEX_ int32_t rfFReadLine_UTF16BE(FILE* f,char** utf8,uint32_t* byteLength,char* eof);
|
||||
// @brief Reads a Little Endian UTF-16 file descriptor until end of line or EOF is found and returns a UTF-8 byte buffer
|
||||
//
|
||||
// The file descriptor at @c f must have been opened in <b>binary</b> and not text mode. That means that if under
|
||||
// Windows make sure to call fopen with "wb", "rb" e.t.c. instead of the simple "w", "r" e.t.c. since the initial
|
||||
// default value under Windows is text mode. Alternatively you can set the initial value using _get_fmode() and
|
||||
// _set_fmode(). For more information take a look at the msdn pages here:
|
||||
// http://msdn.microsoft.com/en-us/library/ktss1a9b.aspx
|
||||
//
|
||||
// When the compile flag @c RF_NEWLINE_CRLF is defined (the default case at Windows) then this function
|
||||
// shall not be adding any CR character that is found in the file behind a newline character since this is
|
||||
// the Windows line ending scheme. Beware though that the returned read bytes value shall still count the CR character inside.
|
||||
//
|
||||
// @param[in] f The file descriptor to read
|
||||
// @param[out] utf8 Give here a refence to an unitialized char* that will be allocated inside the function
|
||||
// and contain the utf8 byte buffer. Needs to be freed by the caller explicitly later
|
||||
// @param[out] byteLength Give an @c uint32_t here to receive the length of the @c utf8 buffer in bytes
|
||||
// @param[out] eof Pass a pointer to a char to receive a true or false value in case the end of file
|
||||
// with reading this line
|
||||
// @return Returns either a positive number for success that represents the number of bytes read from @c f and and error in case something goes wrong.
|
||||
// + Any error that can be returned by @ref rfFgets_UTF16LE()
|
||||
// + @c RE_UTF16_INVALID_SEQUENCE: Failed to decode the UTF-16 byte stream of the file descriptor
|
||||
// + @c RE_UTF8_ENCODING: Failed to encode the UTF-16 of the file descriptor into UTF-8
|
||||
i_DECLIMEX_ int32_t rfFReadLine_UTF16LE(FILE* f,char** utf8,uint32_t* byteLength,char* eof);
|
||||
|
||||
// @brief Reads a Big Endian UTF-32 file descriptor until end of line or EOF is found and returns a UTF-8 byte buffer
|
||||
//
|
||||
// The file descriptor at @c f must have been opened in <b>binary</b> and not text mode. That means that if under
|
||||
// Windows make sure to call fopen with "wb", "rb" e.t.c. instead of the simple "w", "r" e.t.c. since the initial
|
||||
// default value under Windows is text mode. Alternatively you can set the initial value using _get_fmode() and
|
||||
// _set_fmode(). For more information take a look at the msdn pages here:
|
||||
// http://msdn.microsoft.com/en-us/library/ktss1a9b.aspx
|
||||
//
|
||||
// When the compile flag @c RF_NEWLINE_CRLF is defined (the default case at Windows) then this function
|
||||
// shall not be adding any CR character that is found in the file behind a newline character since this is
|
||||
// the Windows line ending scheme. Beware though that the returned read bytes value shall still count the CR character inside.
|
||||
//
|
||||
// @param[in] f The file descriptor to read
|
||||
// @param[out] utf8 Give here a refence to an unitialized char* that will be allocated inside the function
|
||||
// and contain the utf8 byte buffer. Needs to be freed by the caller explicitly later
|
||||
// @param[out] byteLength Give an @c uint32_t here to receive the length of the @c utf8 buffer in bytes
|
||||
// @param[out] eof Pass a pointer to a char to receive a true or false value in case the end of file
|
||||
// with reading this line
|
||||
// @return Returns either a positive number for success that represents the number of bytes read from @c f and and error in case something goes wrong.
|
||||
// + Any error that can be returned by @ref rfFgets_UTF32BE()
|
||||
// + @c RE_UTF8_ENCODING: Failed to encode the UTF-16 of the file descriptor into UTF-8
|
||||
i_DECLIMEX_ int32_t rfFReadLine_UTF32BE(FILE* f,char** utf8,uint32_t* byteLength,char* eof);
|
||||
// @brief Reads a Little Endian UTF-32 file descriptor until end of line or EOF is found and returns a UTF-8 byte buffer
|
||||
//
|
||||
// The file descriptor at @c f must have been opened in <b>binary</b> and not text mode. That means that if under
|
||||
// Windows make sure to call fopen with "wb", "rb" e.t.c. instead of the simple "w", "r" e.t.c. since the initial
|
||||
// default value under Windows is text mode. Alternatively you can set the initial value using _get_fmode() and
|
||||
// _set_fmode(). For more information take a look at the msdn pages here:
|
||||
// http://msdn.microsoft.com/en-us/library/ktss1a9b.aspx
|
||||
//
|
||||
// When the compile flag @c RF_NEWLINE_CRLF is defined (the default case at Windows) then this function
|
||||
// shall not be adding any CR character that is found in the file behind a newline character since this is
|
||||
// the Windows line ending scheme. Beware though that the returned read bytes value shall still count the CR character inside.
|
||||
//
|
||||
// @param[in] f The file descriptor to read
|
||||
// @param[out] utf8 Give here a refence to an unitialized char* that will be allocated inside the function
|
||||
// and contain the utf8 byte buffer. Needs to be freed by the caller explicitly later
|
||||
// @param[out] byteLength Give an @c uint32_t here to receive the length of the @c utf8 buffer in bytes
|
||||
// @param[out] eof Pass a pointer to a char to receive a true or false value in case the end of file
|
||||
// with reading this line
|
||||
// @return Returns either a positive number for success that represents the number of bytes read from @c f and and error in case something goes wrong.
|
||||
// + Any error that can be returned by @ref rfFgets_UTF32LE()
|
||||
// + @c RE_UTF8_ENCODING: Failed to encode the UTF-16 of the file descriptor into UTF-8
|
||||
i_DECLIMEX_ int32_t rfFReadLine_UTF32LE(FILE* f,char** utf8,uint32_t* byteLength,char* eof);
|
||||
|
||||
// @brief Gets a number of bytes from a BIG endian UTF-32 file descriptor
|
||||
//
|
||||
// This is a function that's similar to c library fgets but it also returns the number of bytes read. Reads in from the file until @c num bytes
|
||||
// have been read or new line or EOF character has been encountered.
|
||||
//
|
||||
// The function will read until @c num characters are read and if @c num
|
||||
// would take us to the middle of a UTF32 character then the next character shall also be read
|
||||
// and the function will return the number of bytes read.
|
||||
// Since the function null terminates the buffer the given @c buff needs to be of at least
|
||||
// @c num+7 size to cater for the worst case.
|
||||
//
|
||||
// The final bytestream stored inside @c buff is in the endianess of the system.
|
||||
//
|
||||
// If right after the last character read comes the EOF, the function
|
||||
// shall detect so and assign @c true to @c eof.
|
||||
//
|
||||
// In Windows where file endings are in the form of 2 bytes CR-LF (Carriage return - NewLine) this function
|
||||
// shall just ignore the carriage returns and not return it inside the return buffer at @c buff.
|
||||
//
|
||||
// The file descriptor at @c f must have been opened in <b>binary</b> and not text mode. That means that if under
|
||||
// Windows make sure to call fopen with "wb", "rb" e.t.c. instead of the simple "w", "r" e.t.c. since the initial
|
||||
// default value under Windows is text mode. Alternatively you can set the initial value using _get_fmode() and
|
||||
// _set_fmode(). For more information take a look at the msdn pages here:
|
||||
// http://msdn.microsoft.com/en-us/library/ktss1a9b.aspx
|
||||
//
|
||||
// @param[in] buff A buffer to be filled with the contents of the file. Should be of size at least @c num+7
|
||||
// @param[in] num The maximum number of bytes to read from within the file NOT including the null terminating character(which in itelf is 4 bytes). Should be a multiple of 4
|
||||
// @param[in] f A valid FILE descriptor from which to read the bytes
|
||||
// @param[out] eof Pass a reference to a char to receive a true/false value for whether EOF has been reached.
|
||||
// @return Returns the actual number of bytes read or an error if there was a problem.
|
||||
// The possible errors are:
|
||||
// + @c RE_FILE_READ: If during reading the file there was an unknown read error
|
||||
// + @c RE_FILE_READ_BLOCK: If the read operation failed due to the file descriptor being occupied by another thread
|
||||
// + @c RE_FILE_MODE: If during reading the file the file descriptor's mode was not correctly set for reading
|
||||
// + @c RE_FILE_POS_OVERFLOW: If during reading, the current file position can't be represented by the system
|
||||
// + @c RE_INTERRUPT: If during reading, there was a system interrupt
|
||||
// + @c RE_FILE_IO: If there was a physical I/O error
|
||||
// + @c RE_FILE_NOSPACE: If reading failed due to insufficient storage space
|
||||
i_DECLIMEX_ int32_t rfFgets_UTF32BE(char* buff,uint32_t num,FILE* f,char* eof);
|
||||
// @brief Gets a number of bytes from a Little endian UTF-32 file descriptor
|
||||
//
|
||||
// This is a function that's similar to c library fgets but it also returns the number of bytes read. Reads in from the file until @c num bytes
|
||||
// have been read or new line or EOF character has been encountered.
|
||||
//
|
||||
// The function will read until @c num characters are read and if @c num
|
||||
// would take us to the middle of a UTF32 character then the next character shall also be read
|
||||
// and the function will return the number of bytes read.
|
||||
// Since the function null terminates the buffer the given @c buff needs to be of at least
|
||||
// @c num+7 size to cater for the worst case.
|
||||
//
|
||||
// The final bytestream stored inside @c buff is in the endianess of the system.
|
||||
//
|
||||
// If right after the last character read comes the EOF, the function
|
||||
// shall detect so and assign @c true to @c eof.
|
||||
//
|
||||
// In Windows where file endings are in the form of 2 bytes CR-LF (Carriage return - NewLine) this function
|
||||
// shall just ignore the carriage returns and not return it inside the return buffer at @c buff.
|
||||
//
|
||||
// The file descriptor at @c f must have been opened in <b>binary</b> and not text mode. That means that if under
|
||||
// Windows make sure to call fopen with "wb", "rb" e.t.c. instead of the simple "w", "r" e.t.c. since the initial
|
||||
// default value under Windows is text mode. Alternatively you can set the initial value using _get_fmode() and
|
||||
// _set_fmode(). For more information take a look at the msdn pages here:
|
||||
// http://msdn.microsoft.com/en-us/library/ktss1a9b.aspx
|
||||
//
|
||||
// @param[in] buff A buffer to be filled with the contents of the file. Should be of size at least @c num+7
|
||||
// @param[in] num The maximum number of bytes to read from within the file NOT including the null terminating character(which in itelf is 4 bytes). Should be a multiple of 4
|
||||
// @param[in] f A valid FILE descriptor from which to read the bytes
|
||||
// @param[out] eof Pass a reference to a char to receive a true/false value for whether EOF has been reached.
|
||||
// @return Returns the actual number of bytes read or an error if there was a problem.
|
||||
// The possible errors are:
|
||||
// + @c RE_FILE_READ: If during reading the file there was an unknown read error
|
||||
// + @c RE_FILE_READ_BLOCK: If the read operation failed due to the file descriptor being occupied by another thread
|
||||
// + @c RE_FILE_MODE: If during reading the file the file descriptor's mode was not correctly set for reading
|
||||
// + @c RE_FILE_POS_OVERFLOW: If during reading, the current file position can't be represented by the system
|
||||
// + @c RE_INTERRUPT: If during reading, there was a system interrupt
|
||||
// + @c RE_FILE_IO: If there was a physical I/O error
|
||||
// + @c RE_FILE_NOSPACE: If reading failed due to insufficient storage space
|
||||
i_DECLIMEX_ int32_t rfFgets_UTF32LE(char* buff,uint32_t num,FILE* f,char* eof);
|
||||
|
||||
// @brief Gets a number of bytes from a BIG endian UTF-16 file descriptor
|
||||
//
|
||||
// This is a function that's similar to c library fgets but it also returns the number of bytes read. Reads in from the file until @c num bytes
|
||||
// have been read or new line or EOF character has been encountered.
|
||||
//
|
||||
// The function will read until @c num characters are read and if @c num
|
||||
// would take us to the middle of a UTF16 character then the next character shall also be read
|
||||
// and the function will return the number of bytes read.
|
||||
// Since the function null terminates the buffer the given @c buff needs to be of at least
|
||||
// @c num+5 size to cater for the worst case.
|
||||
//
|
||||
// The final bytestream stored inside @c buff is in the endianess of the system.
|
||||
//
|
||||
// If right after the last character read comes the EOF, the function
|
||||
// shall detect so and assign @c true to @c eof.
|
||||
//
|
||||
// In Windows where file endings are in the form of 2 bytes CR-LF (Carriage return - NewLine) this function
|
||||
// shall just ignore the carriage returns and not return it inside the return buffer at @c buff.
|
||||
//
|
||||
// The file descriptor at @c f must have been opened in <b>binary</b> and not text mode. That means that if under
|
||||
// Windows make sure to call fopen with "wb", "rb" e.t.c. instead of the simple "w", "r" e.t.c. since the initial
|
||||
// default value under Windows is text mode. Alternatively you can set the initial value using _get_fmode() and
|
||||
// _set_fmode(). For more information take a look at the msdn pages here:
|
||||
// http://msdn.microsoft.com/en-us/library/ktss1a9b.aspx
|
||||
//
|
||||
// @param[in] buff A buffer to be filled with the contents of the file. Should be of size at least @c num+5
|
||||
// @param[in] num The maximum number of bytes to read from within the file NOT including the null terminating character(which in itelf is 2 bytes). Should be a multiple of 2
|
||||
// @param[in] f A valid FILE descriptor from which to read the bytes
|
||||
// @param[out] eof Pass a reference to a char to receive a true/false value for whether EOF has been reached.
|
||||
// @return Returns the actual number of bytes read or an error if there was a problem.
|
||||
// The possible errors are:
|
||||
// + @c RE_FILE_READ: If during reading the file there was an unknown read error
|
||||
// + @c RE_FILE_READ_BLOCK: If the read operation failed due to the file descriptor being occupied by another thread
|
||||
// + @c RE_FILE_MODE: If during reading the file the file descriptor's mode was not correctly set for reading
|
||||
// + @c RE_FILE_POS_OVERFLOW: If during reading, the current file position can't be represented by the system
|
||||
// + @c RE_INTERRUPT: If during reading, there was a system interrupt
|
||||
// + @c RE_FILE_IO: If there was a physical I/O error
|
||||
// + @c RE_FILE_NOSPACE: If reading failed due to insufficient storage space
|
||||
i_DECLIMEX_ int32_t rfFgets_UTF16BE(char* buff,uint32_t num,FILE* f,char* eof);
|
||||
// @brief Gets a number of bytes from a Little endian UTF-16 file descriptor
|
||||
//
|
||||
// This is a function that's similar to c library fgets but it also returns the number of bytes read. Reads in from the file until @c num bytes
|
||||
// have been read or new line or EOF character has been encountered.
|
||||
//
|
||||
// The function will read until @c num characters are read and if @c num
|
||||
// would take us to the middle of a UTF16 character then the next character shall also be read
|
||||
// and the function will return the number of bytes read.
|
||||
// Since the function null terminates the buffer the given @c buff needs to be of at least
|
||||
// @c num+5 size to cater for the worst case.
|
||||
//
|
||||
// The final bytestream stored inside @c buff is in the endianess of the system.
|
||||
//
|
||||
// If right after the last character read comes the EOF, the function
|
||||
// shall detect so and assign @c true to @c eof.
|
||||
//
|
||||
// In Windows where file endings are in the form of 2 bytes CR-LF (Carriage return - NewLine) this function
|
||||
// shall just ignore the carriage returns and not return it inside the return buffer at @c buff.
|
||||
//
|
||||
// The file descriptor at @c f must have been opened in <b>binary</b> and not text mode. That means that if under
|
||||
// Windows make sure to call fopen with "wb", "rb" e.t.c. instead of the simple "w", "r" e.t.c. since the initial
|
||||
// default value under Windows is text mode. Alternatively you can set the initial value using _get_fmode() and
|
||||
// _set_fmode(). For more information take a look at the msdn pages here:
|
||||
// http://msdn.microsoft.com/en-us/library/ktss1a9b.aspx
|
||||
//
|
||||
// @param[in] buff A buffer to be filled with the contents of the file. Should be of size at least @c num+2
|
||||
// @param[in] num The maximum number of bytes to read from within the file NOT including the null terminating character(which in itelf is 2 bytes). Should be a multiple of 2
|
||||
// @param[in] f A valid FILE descriptor from which to read the bytes
|
||||
// @param[out] eof Pass a reference to a char to receive a true/false value for whether EOF has been reached.
|
||||
// @return Returns the actual number of bytes read or an error if there was a problem.
|
||||
// The possible errors are:
|
||||
// + @c RE_FILE_READ: If during reading the file there was an unknown read error
|
||||
// + @c RE_FILE_READ_BLOCK: If the read operation failed due to the file descriptor being occupied by another thread
|
||||
// + @c RE_FILE_MODE: If during reading the file the file descriptor's mode was not correctly set for reading
|
||||
// + @c RE_FILE_POS_OVERFLOW: If during reading, the current file position can't be represented by the system
|
||||
// + @c RE_INTERRUPT: If during reading, there was a system interrupt
|
||||
// + @c RE_FILE_IO: If there was a physical I/O error
|
||||
// + @c RE_FILE_NOSPACE: If reading failed due to insufficient storage space
|
||||
i_DECLIMEX_ int32_t rfFgets_UTF16LE(char* buff,uint32_t num,FILE* f,char* eof);
|
||||
// @brief Gets a number of bytes from a UTF-8 file descriptor
|
||||
//
|
||||
// This is a function that's similar to c library fgets but it also returns the number of bytes read. Reads in from the file until @c num characters
|
||||
// have been read or new line or EOF character has been encountered.
|
||||
//
|
||||
// The function automatically adds a null termination character at the end of
|
||||
// @c buff but this character is not included in the returned actual number of bytes.
|
||||
//
|
||||
// The function will read until @c num characters are read and if @c num
|
||||
// would take us to the middle of a UTF8 character then the next character shall also be read
|
||||
// and the function will return the number of bytes read.
|
||||
// Since the function null terminates the buffer the given @c buff needs to be of at least
|
||||
// @c num+4 size to cater for the worst case.
|
||||
//
|
||||
// If right after the last character read comes the EOF, the function
|
||||
// shall detect so and assign @c true to @c eof.
|
||||
//
|
||||
// In Windows where file endings are in the form of 2 bytes CR-LF (Carriage return - NewLine) this function
|
||||
// shall just ignore the carriage returns and not return it inside the return buffer at @c buff.
|
||||
//
|
||||
// The file descriptor at @c f must have been opened in <b>binary</b> and not text mode. That means that if under
|
||||
// Windows make sure to call fopen with "wb", "rb" e.t.c. instead of the simple "w", "r" e.t.c. since the initial
|
||||
// default value under Windows is text mode. Alternatively you can set the initial value using _get_fmode() and
|
||||
// _set_fmode(). For more information take a look at the msdn pages here:
|
||||
// http://msdn.microsoft.com/en-us/library/ktss1a9b.aspx
|
||||
//
|
||||
// @param[in] buff A buffer to be filled with the contents of the file. Should of size at least @c num+4
|
||||
// @param[in] num The maximum number of bytes to read from within the file NOT including the null terminating character(which in itelf is 1 byte)
|
||||
// @param[in] f A valid FILE descriptor from which to read the bytes
|
||||
// @param[out] eof Pass a reference to a char to receive a true/false value for whether EOF has been reached.
|
||||
// @return Returns the actual number of bytes read or an error if there was a problem.
|
||||
// The possible errors are:
|
||||
// + @c RE_UTF8_INVALID_SEQUENCE_INVALID_BYTE: If an invalid UTF-8 byte has been found
|
||||
// + @c RE_UTF8_INVALID_SEQUENCE_CONBYTE: If during parsing the file we were expecting a continuation
|
||||
// byte and did not find it
|
||||
// + @c RE_UTF8_INVALID_SEQUENCE_END: If the null character is encountered in between bytes that should
|
||||
// have been continuation bytes
|
||||
// + @c RE_FILE_READ: If during reading the file there was an unknown read error
|
||||
// + @c RE_FILE_READ_BLOCK: If the read operation failed due to the file descriptor being occupied by another thread
|
||||
// + @c RE_FILE_MODE: If during reading the file the file descriptor's mode was not correctly set for reading
|
||||
// + @c RE_FILE_POS_OVERFLOW: If during reading, the current file position can't be represented by the system
|
||||
// + @c RE_INTERRUPT: If during reading, there was a system interrupt
|
||||
// + @c RE_FILE_IO: If there was a physical I/O error
|
||||
// + @c RE_FILE_NOSPACE: If reading failed due to insufficient storage space
|
||||
i_DECLIMEX_ int32_t rfFgets_UTF8(char* buff,uint32_t num,FILE* f,char* eof);
|
||||
|
||||
// @brief Gets a unicode character from a UTF-8 file descriptor
|
||||
//
|
||||
// This function attempts to assume a more modern fgetc() role for UTF-8 encoded files.
|
||||
// Reads bytes from the File descriptor @c f until a full UTF-8 unicode character has been read
|
||||
//
|
||||
// After this function the file pointer will have moved either by @c 1, @c 2, @c 3 or @c 4
|
||||
// bytes if the return value is positive. You can see how much by checking the return value.
|
||||
//
|
||||
// You shall need to provide an integer at @c c to contain either the decoded Unicode
|
||||
// codepoint or the UTF-8 endoced byte depending on the value of the @c cp argument.
|
||||
//
|
||||
// @param f A valid FILE descriptor from which to read the bytes
|
||||
// @param c Pass an int that will receive either the unicode code point value or
|
||||
// the UTF8 bytes depending on the value of the @c cp flag
|
||||
// @param cp A boolean flag. If @c true then the int passed at @c c will contain the unicode code point
|
||||
// of the read character, so the UTF-8 will be decoded.
|
||||
// If @c false the int passed at @c c will contain the value of the read bytes in UTF-8 without any decoding
|
||||
// @return Returns the number of bytes read (either @c 1, @c 2, @c 3 or @c 4) or an error if the function
|
||||
// fails for some reason. Possible error values are:
|
||||
// + @c RE_FILE_EOF: The end of file has been found while reading. If the end of file is encountered
|
||||
// in the middle of a UTF-8 encoded character where we would be expecting something different
|
||||
// and @c RE_UTF8_INVALID_SEQUENCE_END error is also logged
|
||||
// + @c RE_UTF8_INVALID_SEQUENCE_INVALID_BYTE: If an invalid UTF-8 byte has been found
|
||||
// + @c RE_UTF8_INVALID_SEQUENCE_CONBYTE: If during parsing the file we were expecting a continuation
|
||||
// byte and did not find it
|
||||
// + @c RE_UTF8_INVALID_SEQUENCE_END: If the null character is encountered in between bytes that should
|
||||
// have been continuation bytes
|
||||
// + @c RE_FILE_READ: If during reading the file there was an unknown read error
|
||||
// + @c RE_FILE_READ_BLOCK: If the read operation failed due to the file descriptor being occupied by another thread
|
||||
// + @c RE_FILE_MODE: If during reading the file the file descriptor's mode was not correctly set for reading
|
||||
// + @c RE_FILE_POS_OVERFLOW: If during reading, the current file position can't be represented by the system
|
||||
// + @c RE_INTERRUPT: If during reading, there was a system interrupt
|
||||
// + @c RE_FILE_IO: If there was a physical I/O error
|
||||
// + @c RE_FILE_NOSPACE: If reading failed due to insufficient storage space
|
||||
i_DECLIMEX_ int32_t rfFgetc_UTF8(FILE* f,uint32_t *c,char cp);
|
||||
// @brief Gets a unicode character from a UTF-16 Big Endian file descriptor
|
||||
//
|
||||
// This function attempts to assume a more modern fgetc() role for UTF-16 encoded files.
|
||||
// Reads bytes from the File descriptor @c f until a full UTF-16 unicode character has been read
|
||||
//
|
||||
// After this function the file pointer will have moved either by @c 2 or @c 4
|
||||
// bytes if the return value is positive. You can see how much by checking the return value.
|
||||
//
|
||||
// You shall need to provide an integer at @c c to contain either the decoded Unicode
|
||||
// codepoint or the Bigendian encoded UTF-16 bytes depending on the value of @c the cp argument.
|
||||
//
|
||||
// @param f A valid FILE descriptor from which to read the bytes
|
||||
// @param c Pass an int that will receive either the unicode code point value or
|
||||
// the UTF16 bytes depending on the value of the @c cp flag
|
||||
// @param cp A boolean flag. If @c true then the int passed at @c c will contain the unicode code point
|
||||
// of the read character, so the UTF-16 will be decoded.
|
||||
// If @c false the int passed at @c c will contain the value of the read bytes in UTF-16 without any decoding
|
||||
// @return Returns the number of bytes read (either @c 2 or @c 4) or an error if the function
|
||||
// fails for some reason. Possible error values are:
|
||||
// + @c RE_UTF16_INVALID_SEQUENCE: Either the read word or its surrogate pair if 4 bytes were read held illegal values
|
||||
// + @c RE_UTF16_NO_SURRPAIR: According to the first read word a surrogate pair was expected but none was found
|
||||
// + @c RE_FILE_EOF: The end of file has been found while reading. If the end of file is encountered
|
||||
// while we expect a UTF-16 surrogate pair an appropriate error is logged
|
||||
// + @c RE_FILE_READ: If during reading the file there was an unknown read error
|
||||
// + @c RE_FILE_READ_BLOCK: If the read operation failed due to the file descriptor being occupied by another thread
|
||||
// + @c RE_FILE_MODE: If during reading the file the file descriptor's mode was not correctly set for reading
|
||||
// + @c RE_FILE_POS_OVERFLOW: If during reading, the current file position can't be represented by the system
|
||||
// + @c RE_INTERRUPT: If during reading, there was a system interrupt
|
||||
// + @c RE_FILE_IO: If there was a physical I/O error
|
||||
// + @c RE_FILE_NOSPACE: If reading failed due to insufficient storage space
|
||||
i_DECLIMEX_ int32_t rfFgetc_UTF16BE(FILE* f,uint32_t *c,char cp);
|
||||
// @brief Gets a unicode character from a UTF-16 Little Endian file descriptor
|
||||
//
|
||||
// This function attempts to assume a more modern fgetc() role for UTF-16 encoded files.
|
||||
// Reads bytes from the File descriptor @c f until a full UTF-16 unicode character has been read
|
||||
//
|
||||
// After this function the file pointer will have moved either by @c 2 or @c 4
|
||||
// bytes if the return value is positive. You can see how much by checking the return value.
|
||||
//
|
||||
// You shall need to provide an integer at @c c to contain either the decoded Unicode
|
||||
// codepoint or the Bigendian encoded UTF-16 bytes depending on the value of @c the cp argument.
|
||||
//
|
||||
// @param f A valid FILE descriptor from which to read the bytes
|
||||
// @param c Pass an int that will receive either the unicode code point value or
|
||||
// the UTF16 bytes depending on the value of the @c cp flag
|
||||
// @param cp A boolean flag. If @c true then the int passed at @c c will contain the unicode code point
|
||||
// of the read character, so the UTF-16 will be decoded.
|
||||
// If @c false the int passed at @c c will contain the value of the read bytes in UTF-16 without any decoding
|
||||
// @return Returns the number of bytes read (either @c 2 or @c 4) or an error if the function
|
||||
// fails for some reason. Possible error values are:
|
||||
// + @c RE_UTF16_INVALID_SEQUENCE: Either the read word or its surrogate pair if 4 bytes were read held illegal values
|
||||
// + @c RE_UTF16_NO_SURRPAIR: According to the first read word a surrogate pair was expected but none was found
|
||||
// + @c RE_FILE_EOF: The end of file has been found while reading. If the end of file is encountered
|
||||
// while we expect a UTF-16 surrogate pair an appropriate error is logged
|
||||
// + @c RE_FILE_READ: If during reading the file there was an unknown read error
|
||||
// + @c RE_FILE_READ_BLOCK: If the read operation failed due to the file descriptor being occupied by another thread
|
||||
// + @c RE_FILE_MODE: If during reading the file the file descriptor's mode was not correctly set for reading
|
||||
// + @c RE_FILE_POS_OVERFLOW: If during reading, the current file position can't be represented by the system
|
||||
// + @c RE_INTERRUPT: If during reading, there was a system interrupt
|
||||
// + @c RE_FILE_IO: If there was a physical I/O error
|
||||
// + @c RE_FILE_NOSPACE: If reading failed due to insufficient storage space
|
||||
i_DECLIMEX_ int32_t rfFgetc_UTF16LE(FILE* f,uint32_t *c,char cp);
|
||||
// @brief Gets a unicode character from a UTF-32 Little Endian file descriptor
|
||||
//
|
||||
// This function attempts to assume a more modern fgetc() role for UTF-32 encoded files.
|
||||
// Reads bytes from the File descriptor @c f until a full UTF-32 unicode character has been read
|
||||
//
|
||||
// After this function the file pointer will have moved by @c 4
|
||||
// bytes if the return value is positive.
|
||||
//
|
||||
// You shall need to provide an integer at @c to contain the UTF-32 codepoint.
|
||||
//
|
||||
// @param f A valid FILE descriptor from which to read the bytes
|
||||
// @param c Pass an int that will receive either the unicode code point value or
|
||||
// the UTF16 bytes depending on the value of the @c cp flag
|
||||
// If @c false the int passed at @c c will contain the value of the read bytes in UTF-16 without any decoding
|
||||
// @return Returns either @c RF_SUCCESS for succesfull readin or one of the following errors:
|
||||
// + @c RE_FILE_EOF: The end of file has been found while reading.
|
||||
// + @c RE_FILE_READ: If during reading the file there was an unknown read error
|
||||
// + @c RE_FILE_READ_BLOCK: If the read operation failed due to the file descriptor being occupied by another thread
|
||||
// + @c RE_FILE_MODE: If during reading the file the file descriptor's mode was not correctly set for reading
|
||||
// + @c RE_FILE_POS_OVERFLOW: If during reading, the current file position can't be represented by the system
|
||||
// + @c RE_INTERRUPT: If during reading, there was a system interrupt
|
||||
// + @c RE_FILE_IO: If there was a physical I/O error
|
||||
// + @c RE_FILE_NOSPACE: If reading failed due to insufficient storage space
|
||||
i_DECLIMEX_ int32_t rfFgetc_UTF32LE(FILE* f,uint32_t *c);
|
||||
// @brief Gets a unicode character from a UTF-32 Big Endian file descriptor
|
||||
//
|
||||
// This function attempts to assume a more modern fgetc() role for UTF-32 encoded files.
|
||||
// Reads bytes from the File descriptor @c f until a full UTF-32 unicode character has been read
|
||||
//
|
||||
// After this function the file pointer will have moved by @c 4
|
||||
// bytes if the return value is positive.
|
||||
//
|
||||
// You shall need to provide an integer at @c to contain the UTF-32 codepoint.
|
||||
//
|
||||
// @param f A valid FILE descriptor from which to read the bytes
|
||||
// @param c Pass an int that will receive either the unicode code point value or
|
||||
// the UTF16 bytes depending on the value of the @c cp flag
|
||||
// If @c false the int passed at @c c will contain the value of the read bytes in UTF-16 without any decoding
|
||||
// @return Returns either @c RF_SUCCESS for succesfull readin or one of the following errors:
|
||||
// + @c RE_FILE_EOF: The end of file has been found while reading.
|
||||
// + @c RE_FILE_READ: If during reading the file there was an unknown read error
|
||||
// + @c RE_FILE_READ_BLOCK: If the read operation failed due to the file descriptor being occupied by another thread
|
||||
// + @c RE_FILE_MODE: If during reading the file the file descriptor's mode was not correctly set for reading
|
||||
// + @c RE_FILE_POS_OVERFLOW: If during reading, the current file position can't be represented by the system
|
||||
// + @c RE_INTERRUPT: If during reading, there was a system interrupt
|
||||
// + @c RE_FILE_IO: If there was a physical I/O error
|
||||
// + @c RE_FILE_NOSPACE: If reading failed due to insufficient storage space
|
||||
i_DECLIMEX_ int32_t rfFgetc_UTF32BE(FILE* f,uint32_t *c);
|
||||
|
||||
// @brief Moves a unicode character backwards in a big endian UTF-32 file stream
|
||||
//
|
||||
// @param f The file stream
|
||||
// @param c Returns the character we moved back to as a unicode codepoint
|
||||
// @return Returns either @c RF_SUCCESS for success or one of the following errors:
|
||||
// + @c RE_FILE_POS_OVERFLOW: If during trying to read the current file's position it can't be represented by the system
|
||||
// + @c RE_FILE_BAD: If The file descriptor is corrupt/illegal
|
||||
// + @c RE_FILE_NOTFILE: If the file descriptor is not a file but something else. e.g. socket.
|
||||
// + @c RE_FILE_GETFILEPOS: If the file's position could not be retrieved for some unknown reason
|
||||
// + @c RE_FILE_WRITE_BLOCK: While attempting to move the file pointer, it was occupied by another thread, and the no block flag was set
|
||||
// + @c RE_INTERRUPT: Operating on the file failed due to a system interrupt
|
||||
// + @c RE_FILE_IO: There was a physical I/O error
|
||||
// + @c RE_FILE_NOSPACE: There was no space on the device holding the file
|
||||
// + @c RE_FILE_NOTFILE: The device we attempted to manipulate is non-existent
|
||||
// + @c RE_FILE_READ: If during reading the file there was an error
|
||||
// + @c RE_FILE_READ_BLOCK: If during reading the file the read operation failed due to the file being occupied by another thread
|
||||
// + @c RE_FILE_MODE: If during reading the file the underlying file descriptor's mode was not correctly set for reading
|
||||
i_DECLIMEX_ int32_t rfFback_UTF32BE(FILE* f,uint32_t *c);
|
||||
// @brief Moves a unicode character backwards in a little endian UTF-32 file stream
|
||||
//
|
||||
// The file descriptor at @c f must have been opened in <b>binary</b> and not text mode. That means that if under
|
||||
// Windows make sure to call fopen with "wb", "rb" e.t.c. instead of the simple "w", "r" e.t.c. since the initial
|
||||
// default value under Windows is text mode. Alternatively you can set the initial value using _get_fmode() and
|
||||
// _set_fmode(). For more information take a look at the msdn pages here:
|
||||
// http://msdn.microsoft.com/en-us/library/ktss1a9b.aspx
|
||||
//
|
||||
// @param f The file stream
|
||||
// @param c Returns the character we moved back to as a unicode codepoint
|
||||
// @return Returns either @c RF_SUCCESS for success or one of the following errors:
|
||||
// + @c RE_FILE_POS_OVERFLOW: If during trying to read the current file's position it can't be represented by the system
|
||||
// + @c RE_FILE_BAD: If The file descriptor is corrupt/illegal
|
||||
// + @c RE_FILE_NOTFILE: If the file descriptor is not a file but something else. e.g. socket.
|
||||
// + @c RE_FILE_GETFILEPOS: If the file's position could not be retrieved for some unknown reason
|
||||
// + @c RE_FILE_WRITE_BLOCK: While attempting to move the file pointer, it was occupied by another thread, and the no block flag was set
|
||||
// + @c RE_INTERRUPT: Operating on the file failed due to a system interrupt
|
||||
// + @c RE_FILE_IO: There was a physical I/O error
|
||||
// + @c RE_FILE_NOSPACE: There was no space on the device holding the file
|
||||
// + @c RE_FILE_NOTFILE: The device we attempted to manipulate is non-existent
|
||||
// + @c RE_FILE_READ: If during reading the file there was an error
|
||||
// + @c RE_FILE_READ_BLOCK: If during reading the file the read operation failed due to the file being occupied by another thread
|
||||
// + @c RE_FILE_MODE: If during reading the file the underlying file descriptor's mode was not correctly set for reading
|
||||
i_DECLIMEX_ int32_t rfFback_UTF32LE(FILE* f,uint32_t *c);
|
||||
// @brief Moves a unicode character backwards in a big endian UTF-16 file stream
|
||||
//
|
||||
// The file descriptor at @c f must have been opened in <b>binary</b> and not text mode. That means that if under
|
||||
// Windows make sure to call fopen with "wb", "rb" e.t.c. instead of the simple "w", "r" e.t.c. since the initial
|
||||
// default value under Windows is text mode. Alternatively you can set the initial value using _get_fmode() and
|
||||
// _set_fmode(). For more information take a look at the msdn pages here:
|
||||
// http://msdn.microsoft.com/en-us/library/ktss1a9b.aspx
|
||||
//
|
||||
// @param f The file stream
|
||||
// @param c Returns the character we moved back to as a unicode codepoint
|
||||
// @return Returns either the number of bytes moved backwards (either @c 4 or @c 2) for success or one of the following errors:
|
||||
// + @c RE_UTF16_INVALID_SEQUENCE: Either the read word or its surrogate pair if 4 bytes were read held illegal values
|
||||
// + @c RE_FILE_POS_OVERFLOW: If during trying to read the current file's position it can't be represented by the system
|
||||
// + @c RE_FILE_BAD: If The file descriptor is corrupt/illegal
|
||||
// + @c RE_FILE_NOTFILE: If the file descriptor is not a file but something else. e.g. socket.
|
||||
// + @c RE_FILE_GETFILEPOS: If the file's position could not be retrieved for some unknown reason
|
||||
// + @c RE_FILE_WRITE_BLOCK: While attempting to move the file pointer, it was occupied by another thread, and the no block flag was set
|
||||
// + @c RE_INTERRUPT: Operating on the file failed due to a system interrupt
|
||||
// + @c RE_FILE_IO: There was a physical I/O error
|
||||
// + @c RE_FILE_NOSPACE: There was no space on the device holding the file
|
||||
// + @c RE_FILE_NOTFILE: The device we attempted to manipulate is non-existent
|
||||
// + @c RE_FILE_READ: If during reading the file there was an error
|
||||
// + @c RE_FILE_READ_BLOCK: If during reading the file the read operation failed due to the file being occupied by another thread
|
||||
// + @c RE_FILE_MODE: If during reading the file the underlying file descriptor's mode was not correctly set for reading
|
||||
i_DECLIMEX_ int32_t rfFback_UTF16BE(FILE* f,uint32_t *c);
|
||||
// @brief Moves a unicode character backwards in a little endian UTF-16 file stream
|
||||
//
|
||||
// The file descriptor at @c f must have been opened in <b>binary</b> and not text mode. That means that if under
|
||||
// Windows make sure to call fopen with "wb", "rb" e.t.c. instead of the simple "w", "r" e.t.c. since the initial
|
||||
// default value under Windows is text mode. Alternatively you can set the initial value using _get_fmode() and
|
||||
// _set_fmode(). For more information take a look at the msdn pages here:
|
||||
// http://msdn.microsoft.com/en-us/library/ktss1a9b.aspx
|
||||
//
|
||||
// @param f The file stream
|
||||
// @param c Returns the character we moved back to as a unicode codepoint
|
||||
// @return Returns either the number of bytes moved backwards (either @c 4 or @c 2) for success or one of the following errors:
|
||||
// + @c RE_UTF16_INVALID_SEQUENCE: Either the read word or its surrogate pair if 4 bytes were read held illegal values
|
||||
// + @c RE_FILE_POS_OVERFLOW: If during trying to read the current file's position it can't be represented by the system
|
||||
// + @c RE_FILE_BAD: If The file descriptor is corrupt/illegal
|
||||
// + @c RE_FILE_NOTFILE: If the file descriptor is not a file but something else. e.g. socket.
|
||||
// + @c RE_FILE_GETFILEPOS: If the file's position could not be retrieved for some unknown reason
|
||||
// + @c RE_FILE_WRITE_BLOCK: While attempting to move the file pointer, it was occupied by another thread, and the no block flag was set
|
||||
// + @c RE_INTERRUPT: Operating on the file failed due to a system interrupt
|
||||
// + @c RE_FILE_IO: There was a physical I/O error
|
||||
// + @c RE_FILE_NOSPACE: There was no space on the device holding the file
|
||||
// + @c RE_FILE_NOTFILE: The device we attempted to manipulate is non-existent
|
||||
// + @c RE_FILE_READ: If during reading the file there was an error
|
||||
// + @c RE_FILE_READ_BLOCK: If during reading the file the read operation failed due to the file being occupied by another thread
|
||||
// + @c RE_FILE_MODE: If during reading the file the underlying file descriptor's mode was not correctly set for reading
|
||||
i_DECLIMEX_ int32_t rfFback_UTF16LE(FILE* f,uint32_t *c);
|
||||
// @brief Moves a unicode character backwards in a UTF-8 file stream
|
||||
//
|
||||
// The file descriptor at @c f must have been opened in <b>binary</b> and not text mode. That means that if under
|
||||
// Windows make sure to call fopen with "wb", "rb" e.t.c. instead of the simple "w", "r" e.t.c. since the initial
|
||||
// default value under Windows is text mode. Alternatively you can set the initial value using _get_fmode() and
|
||||
// _set_fmode(). For more information take a look at the msdn pages here:
|
||||
// http://msdn.microsoft.com/en-us/library/ktss1a9b.aspx
|
||||
//
|
||||
// @param f The file stream
|
||||
// @param c Returns the character we moved back to as a unicode codepoint
|
||||
// @return Returns either the number of bytes moved backwards for success (either @c 4, @c 3, @c 2 or @c 1) or one of the following errors:
|
||||
// + @c RE_UTF8_INVALID_SEQUENCE: If during moving bacwards in the file unexpected UTF-8 bytes were found
|
||||
// + @c RE_FILE_POS_OVERFLOW: If during trying to read the current file's position it can't be represented by the system
|
||||
// + @c RE_FILE_BAD: If The file descriptor is corrupt/illegal
|
||||
// + @c RE_FILE_NOTFILE: If the file descriptor is not a file but something else. e.g. socket.
|
||||
// + @c RE_FILE_GETFILEPOS: If the file's position could not be retrieved for some unknown reason
|
||||
// + @c RE_FILE_WRITE_BLOCK: While attempting to move the file pointer, it was occupied by another thread, and the no block flag was set
|
||||
// + @c RE_INTERRUPT: Operating on the file failed due to a system interrupt
|
||||
// + @c RE_FILE_IO: There was a physical I/O error
|
||||
// + @c RE_FILE_NOSPACE: There was no space on the device holding the file
|
||||
// + @c RE_FILE_NOTFILE: The device we attempted to manipulate is non-existent
|
||||
// + @c RE_FILE_READ: If during reading the file there was an error
|
||||
// + @c RE_FILE_READ_BLOCK: If during reading the file the read operation failed due to the file being occupied by another thread
|
||||
// + @c RE_FILE_MODE: If during reading the file the underlying file descriptor's mode was not correctly set for reading
|
||||
i_DECLIMEX_ int32_t rfFback_UTF8(FILE* f,uint32_t *c);
|
||||
|
||||
// @brief Opens another process as a pipe
|
||||
//
|
||||
// This function is a cross-platform popen wrapper. In linux it uses popen and in Windows it uses
|
||||
// _popen.
|
||||
// @lmsFunction
|
||||
// @param command The string with the command to execute. Is basically the name of the program/process you want to spawn
|
||||
// with its full path and its parameters. @inhtype{String,StringX} @tmpSTR
|
||||
// @param mode The mode you want the pipe to work in. There are two possible values:
|
||||
// + @c "r" The calling process can read the spawned command's standard output via the returned stream.
|
||||
// + @c "w" The calling process can write to the spawned command's standard input via the returned stream.
|
||||
//
|
||||
// Anything else will result in an error
|
||||
// @return For success popen will return a FILE descriptor that can be used to either read or write from the pipe.
|
||||
// If there was an error @c 0 is returned and an error is logged.
|
||||
#ifdef RF_IAMHERE_FOR_DOXYGEN
|
||||
i_DECLIMEX_ FILE* rfPopen(void* command,const char* mode);
|
||||
#else
|
||||
i_DECLIMEX_ FILE* i_rfPopen(void* command,const char* mode);
|
||||
#define rfPopen(i_CMD_,i_MODE_) i_rfLMS_WRAP2(FILE*,i_rfPopen,i_CMD_,i_MODE_)
|
||||
#endif
|
||||
|
||||
// @brief Closes a pipe
|
||||
//
|
||||
// This function is a cross-platform wrapper for pclose. It closes a file descriptor opened with @ref rfPopen() and
|
||||
// returns the exit code of the process that was running
|
||||
// @param stream The file descriptor of the pipe returned by @ref rfPopen() that we want to close
|
||||
// @return Returns the exit code of the process or -1 if there was an error
|
||||
i_DECLIMEX_ int rfPclose(FILE* stream);
|
||||
|
||||
// @} End of I/O group
|
||||
|
||||
#ifdef __cplusplus
|
||||
}///closing bracket for calling from C++
|
||||
#endif
|
||||
|
||||
|
||||
#endif//include guards end
|
||||
2348
samples/C/rfc_string.c
Normal file
2348
samples/C/rfc_string.c
Normal file
File diff suppressed because it is too large
Load Diff
1459
samples/C/rfc_string.h
Normal file
1459
samples/C/rfc_string.h
Normal file
File diff suppressed because it is too large
Load Diff
15669
samples/C/sgd_fast.c
Normal file
15669
samples/C/sgd_fast.c
Normal file
File diff suppressed because it is too large
Load Diff
5
samples/C/syscalldefs.h
Normal file
5
samples/C/syscalldefs.h
Normal file
@@ -0,0 +1,5 @@
|
||||
static const syscalldef syscalldefs[] = {
|
||||
[SYSCALL_OR_NUM(0, SYS_restart_syscall)] = MAKE_UINT16(0, 1),
|
||||
[SYSCALL_OR_NUM(1, SYS_exit)] = MAKE_UINT16(1, 17),
|
||||
[SYSCALL_OR_NUM(2, SYS_fork)] = MAKE_UINT16(0, 22),
|
||||
};
|
||||
1363
samples/C/wglew.h
Normal file
1363
samples/C/wglew.h
Normal file
File diff suppressed because it is too large
Load Diff
5
samples/COBOL/hello_world.cbl
Normal file
5
samples/COBOL/hello_world.cbl
Normal file
@@ -0,0 +1,5 @@
|
||||
program-id. hello.
|
||||
procedure division.
|
||||
display "Hello, World!".
|
||||
stop run.
|
||||
|
||||
6
samples/COBOL/hello_world.ccp
Normal file
6
samples/COBOL/hello_world.ccp
Normal file
@@ -0,0 +1,6 @@
|
||||
IDENTIFICATION DIVISION.
|
||||
PROGRAM-ID. hello.
|
||||
PROCEDURE DIVISION.
|
||||
DISPLAY "Hello World, yet again.".
|
||||
STOP RUN.
|
||||
|
||||
6
samples/COBOL/hello_world.cob
Normal file
6
samples/COBOL/hello_world.cob
Normal file
@@ -0,0 +1,6 @@
|
||||
IDENTIFICATION DIVISION.
|
||||
PROGRAM-ID. hello.
|
||||
PROCEDURE DIVISION.
|
||||
DISPLAY "Hello World!".
|
||||
STOP RUN.
|
||||
|
||||
7
samples/COBOL/simple.cpy
Normal file
7
samples/COBOL/simple.cpy
Normal file
@@ -0,0 +1,7 @@
|
||||
01 COBOL-TEST-RECORD.
|
||||
05 COBOL-TEST-USAGES.
|
||||
10 COBOL-4-COMP PIC S9(4) COMP.
|
||||
10 COBOL-8-COMP PIC S9(8) COMP.
|
||||
10 COBOL-9-COMP PIC S9(9) COMP.
|
||||
10 COBOL-4-COMP2 PIC S9(4) COMP-2.
|
||||
10 COBOL-7-COMP2 PIC 9(7) COMP-2.
|
||||
6307
samples/CSS/bootstrap.css
vendored
Normal file
6307
samples/CSS/bootstrap.css
vendored
Normal file
File diff suppressed because it is too large
Load Diff
873
samples/CSS/bootstrap.min.css
vendored
Normal file
873
samples/CSS/bootstrap.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
17
samples/Clojure/for.clj
Normal file
17
samples/Clojure/for.clj
Normal file
@@ -0,0 +1,17 @@
|
||||
(defn prime? [n]
|
||||
(not-any? zero? (map #(rem n %) (range 2 n))))
|
||||
|
||||
(range 3 33 2)
|
||||
'(3 5 7 9 11 13 15 17 19 21 23 25 27 29 31)
|
||||
|
||||
;; :when continues through the collection even if some have the
|
||||
;; condition evaluate to false, like filter
|
||||
(for [x (range 3 33 2) :when (prime? x)]
|
||||
x)
|
||||
'(3 5 7 11 13 17 19 23 29 31)
|
||||
|
||||
;; :while stops at the first collection element that evaluates to
|
||||
;; false, like take-while
|
||||
(for [x (range 3 33 2) :while (prime? x)]
|
||||
x)
|
||||
'(3 5 7)
|
||||
8
samples/Clojure/hiccup.hic
Normal file
8
samples/Clojure/hiccup.hic
Normal file
@@ -0,0 +1,8 @@
|
||||
[:html
|
||||
[:head
|
||||
[:meta {:charset "utf-8"}]
|
||||
[:link {:rel "stylesheet" :href "css/bootstrap.min.css"}]
|
||||
[:script {:src "app.js"}]]
|
||||
[:body
|
||||
[:div.nav
|
||||
[:p "Hello world!"]]]]
|
||||
13
samples/Clojure/into-array.cljc
Normal file
13
samples/Clojure/into-array.cljc
Normal file
@@ -0,0 +1,13 @@
|
||||
(defn into-array
|
||||
([aseq]
|
||||
(into-array nil aseq))
|
||||
([type aseq]
|
||||
(let [n (count aseq)
|
||||
a (make-array n)]
|
||||
(loop [aseq (seq aseq)
|
||||
i 0]
|
||||
(if (< i n)
|
||||
(do
|
||||
(aset a i (first aseq))
|
||||
(recur (next aseq) (inc i)))
|
||||
a)))))
|
||||
15
samples/Clojure/protocol.cljs
Normal file
15
samples/Clojure/protocol.cljs
Normal file
@@ -0,0 +1,15 @@
|
||||
(defprotocol ISound (sound []))
|
||||
|
||||
(deftype Cat []
|
||||
ISound
|
||||
(sound [_] "Meow!"))
|
||||
|
||||
(deftype Dog []
|
||||
ISound
|
||||
(sound [_] "Woof!"))
|
||||
|
||||
(extend-type default
|
||||
ISound
|
||||
(sound [_] "... silence ..."))
|
||||
|
||||
(sound 1) ;; => "... silence ..."
|
||||
5
samples/Clojure/rand.cljscm
Normal file
5
samples/Clojure/rand.cljscm
Normal file
@@ -0,0 +1,5 @@
|
||||
(defn rand
|
||||
"Returns a random floating point number between 0 (inclusive) and
|
||||
n (default 1) (exclusive)."
|
||||
([] (scm* [n] (random-real)))
|
||||
([n] (* (rand) n)))
|
||||
20
samples/Clojure/svg.cljx
Normal file
20
samples/Clojure/svg.cljx
Normal file
@@ -0,0 +1,20 @@
|
||||
^:clj (ns c2.svg
|
||||
(:use [c2.core :only [unify]]
|
||||
[c2.maths :only [Pi Tau radians-per-degree
|
||||
sin cos mean]]))
|
||||
|
||||
^:cljs (ns c2.svg
|
||||
(:use [c2.core :only [unify]]
|
||||
[c2.maths :only [Pi Tau radians-per-degree
|
||||
sin cos mean]])
|
||||
(:require [c2.dom :as dom]))
|
||||
|
||||
;;Stub for float fn, which does not exist on cljs runtime
|
||||
^:cljs (def float identity)
|
||||
|
||||
(defn ->xy
|
||||
"Convert coordinates (potentially map of `{:x :y}`) to 2-vector."
|
||||
[coordinates]
|
||||
(cond
|
||||
(and (vector? coordinates) (= 2 (count coordinates))) coordinates
|
||||
(map? coordinates) [(:x coordinates) (:y coordinates)]))
|
||||
20
samples/Clojure/unit-test.cl2
Normal file
20
samples/Clojure/unit-test.cl2
Normal file
@@ -0,0 +1,20 @@
|
||||
(deftest function-tests
|
||||
(is (= 3
|
||||
(count [1 2 3])))
|
||||
(is (= false
|
||||
(not true)))
|
||||
(is (= true
|
||||
(contains? {:foo 1 :bar 2} :foo)))
|
||||
|
||||
(is (= {"foo" 1, "baz" 3}
|
||||
(select-keys {:foo 1 :bar 2 :baz 3} [:foo :baz])))
|
||||
|
||||
(is (= [1 2 3]
|
||||
(vals {:foo 1 :bar 2 :baz 3})))
|
||||
|
||||
(is (= ["foo" "bar" "baz"]
|
||||
(keys {:foo 1 :bar 2 :baz 3})))
|
||||
|
||||
(is (= [2 4 6]
|
||||
(filter (fn [x] (=== (rem x 2) 0)) [1 2 3 4 5 6]))))
|
||||
|
||||
21
samples/Common Lisp/sample.lisp
Normal file
21
samples/Common Lisp/sample.lisp
Normal file
@@ -0,0 +1,21 @@
|
||||
;;;; -*- lisp -*-
|
||||
|
||||
(in-package :foo)
|
||||
|
||||
;;; Header comment.
|
||||
(defvar *foo*)
|
||||
|
||||
(eval-when (:execute :compile-toplevel :load-toplevel)
|
||||
(defun add (x &optional y &key z)
|
||||
(declare (ignore z))
|
||||
;; Inline comment.
|
||||
(+ x (or y 1))))
|
||||
|
||||
#|
|
||||
Multi-line comment.
|
||||
|#
|
||||
|
||||
(defmacro foo (x &body b)
|
||||
(if x
|
||||
`(1+ ,x) ;After-line comment.
|
||||
42))
|
||||
@@ -1,13 +1,3 @@
|
||||
(************************************************************************)
|
||||
(* v * The Coq Proof Assistant / The Coq Development Team *)
|
||||
(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2010 *)
|
||||
(* \VV/ **************************************************************)
|
||||
(* // * This file is distributed under the terms of the *)
|
||||
(* * GNU Lesser General Public License Version 2.1 *)
|
||||
(************************************************************************)
|
||||
|
||||
(** This file is deprecated, for a tree on list, use [Mergesort.v]. *)
|
||||
|
||||
(** A development of Treesort on Heap trees. It has an average
|
||||
complexity of O(n.log n) but of O(n²) in the worst case (e.g. if
|
||||
the list is already sorted) *)
|
||||
@@ -88,9 +78,9 @@ Section defs.
|
||||
forall P:Tree -> Type,
|
||||
P Tree_Leaf ->
|
||||
(forall (a:A) (T1 T2:Tree),
|
||||
leA_Tree a T1 ->
|
||||
leA_Tree a T2 ->
|
||||
is_heap T1 -> P T1 -> is_heap T2 -> P T2 -> P (Tree_Node a T1 T2)) ->
|
||||
leA_Tree a T1 ->
|
||||
leA_Tree a T2 ->
|
||||
is_heap T1 -> P T1 -> is_heap T2 -> P T2 -> P (Tree_Node a T1 T2)) ->
|
||||
forall T:Tree, is_heap T -> P T.
|
||||
Proof.
|
||||
simple induction T; auto with datatypes.
|
||||
@@ -105,9 +95,9 @@ Section defs.
|
||||
forall P:Tree -> Set,
|
||||
P Tree_Leaf ->
|
||||
(forall (a:A) (T1 T2:Tree),
|
||||
leA_Tree a T1 ->
|
||||
leA_Tree a T2 ->
|
||||
is_heap T1 -> P T1 -> is_heap T2 -> P T2 -> P (Tree_Node a T1 T2)) ->
|
||||
leA_Tree a T1 ->
|
||||
leA_Tree a T2 ->
|
||||
is_heap T1 -> P T1 -> is_heap T2 -> P T2 -> P (Tree_Node a T1 T2)) ->
|
||||
forall T:Tree, is_heap T -> P T.
|
||||
Proof.
|
||||
simple induction T; auto with datatypes.
|
||||
@@ -135,13 +125,13 @@ Section defs.
|
||||
(forall a, HdRel leA a l1 -> HdRel leA a l2 -> HdRel leA a l) ->
|
||||
merge_lem l1 l2.
|
||||
Require Import Morphisms.
|
||||
|
||||
|
||||
Instance: Equivalence (@meq A).
|
||||
Proof. constructor; auto with datatypes. red. apply meq_trans. Defined.
|
||||
|
||||
Instance: Proper (@meq A ++> @meq _ ++> @meq _) (@munion A).
|
||||
Proof. intros x y H x' y' H'. now apply meq_congr. Qed.
|
||||
|
||||
|
||||
Lemma merge :
|
||||
forall l1:list A, Sorted leA l1 ->
|
||||
forall l2:list A, Sorted leA l2 -> merge_lem l1 l2.
|
||||
@@ -150,8 +140,8 @@ Section defs.
|
||||
apply merge_exist with l2; auto with datatypes.
|
||||
rename l1 into l.
|
||||
revert l2 H0. fix 1. intros.
|
||||
destruct l2 as [|a0 l0].
|
||||
apply merge_exist with (a :: l); simpl; auto with datatypes.
|
||||
destruct l2 as [|a0 l0].
|
||||
apply merge_exist with (a :: l); simpl; auto with datatypes.
|
||||
elim (leA_dec a a0); intros.
|
||||
|
||||
(* 1 (leA a a0) *)
|
||||
@@ -159,18 +149,18 @@ Section defs.
|
||||
destruct (merge l H (a0 :: l0) H0).
|
||||
apply merge_exist with (a :: l1). clear merge merge0.
|
||||
auto using cons_sort, cons_leA with datatypes.
|
||||
simpl. rewrite m. now rewrite munion_ass.
|
||||
intros. apply cons_leA.
|
||||
simpl. rewrite m. now rewrite munion_ass.
|
||||
intros. apply cons_leA.
|
||||
apply (@HdRel_inv _ leA) with l; trivial with datatypes.
|
||||
|
||||
(* 2 (leA a0 a) *)
|
||||
apply Sorted_inv in H0. destruct H0.
|
||||
destruct (merge0 l0 H0). clear merge merge0.
|
||||
apply merge_exist with (a0 :: l1);
|
||||
destruct (merge0 l0 H0). clear merge merge0.
|
||||
apply merge_exist with (a0 :: l1);
|
||||
auto using cons_sort, cons_leA with datatypes.
|
||||
simpl; rewrite m. simpl. setoid_rewrite munion_ass at 1. rewrite munion_comm.
|
||||
repeat rewrite munion_ass. setoid_rewrite munion_comm at 3. reflexivity.
|
||||
intros. apply cons_leA.
|
||||
intros. apply cons_leA.
|
||||
apply (@HdRel_inv _ leA) with l0; trivial with datatypes.
|
||||
Qed.
|
||||
|
||||
@@ -186,7 +176,7 @@ Section defs.
|
||||
match t with
|
||||
| Tree_Leaf => emptyBag
|
||||
| Tree_Node a t1 t2 =>
|
||||
munion (contents t1) (munion (contents t2) (singletonBag a))
|
||||
munion (contents t1) (munion (contents t2) (singletonBag a))
|
||||
end.
|
||||
|
||||
|
||||
@@ -272,11 +262,11 @@ Section defs.
|
||||
apply flat_exist with (a :: l); simpl; auto with datatypes.
|
||||
apply meq_trans with
|
||||
(munion (list_contents _ eqA_dec l1)
|
||||
(munion (list_contents _ eqA_dec l2) (singletonBag a))).
|
||||
(munion (list_contents _ eqA_dec l2) (singletonBag a))).
|
||||
apply meq_congr; auto with datatypes.
|
||||
apply meq_trans with
|
||||
(munion (singletonBag a)
|
||||
(munion (list_contents _ eqA_dec l1) (list_contents _ eqA_dec l2))).
|
||||
(munion (list_contents _ eqA_dec l1) (list_contents _ eqA_dec l2))).
|
||||
apply munion_rotate.
|
||||
apply meq_right; apply meq_sym; trivial with datatypes.
|
||||
Qed.
|
||||
|
||||
@@ -1,11 +1,3 @@
|
||||
(************************************************************************)
|
||||
(* v * The Coq Proof Assistant / The Coq Development Team *)
|
||||
(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2010 *)
|
||||
(* \VV/ **************************************************************)
|
||||
(* // * This file is distributed under the terms of the *)
|
||||
(* * GNU Lesser General Public License Version 2.1 *)
|
||||
(************************************************************************)
|
||||
|
||||
Require Import Omega Relations Multiset SetoidList.
|
||||
|
||||
(** This file is deprecated, use [Permutation.v] instead.
|
||||
@@ -154,7 +146,7 @@ Lemma permut_add_cons_inside :
|
||||
Proof.
|
||||
intros;
|
||||
replace (a :: l) with ([] ++ a :: l); trivial;
|
||||
apply permut_add_inside; trivial.
|
||||
apply permut_add_inside; trivial.
|
||||
Qed.
|
||||
|
||||
Lemma permut_middle :
|
||||
@@ -168,8 +160,8 @@ Lemma permut_sym_app :
|
||||
Proof.
|
||||
intros l1 l2;
|
||||
unfold permutation, meq;
|
||||
intro a; do 2 rewrite list_contents_app; simpl;
|
||||
auto with arith.
|
||||
intro a; do 2 rewrite list_contents_app; simpl;
|
||||
auto with arith.
|
||||
Qed.
|
||||
|
||||
Lemma permut_rev :
|
||||
|
||||
@@ -1,17 +1,5 @@
|
||||
(************************************************************************)
|
||||
(* v * The Coq Proof Assistant / The Coq Development Team *)
|
||||
(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2010 *)
|
||||
(* \VV/ **************************************************************)
|
||||
(* // * This file is distributed under the terms of the *)
|
||||
(* * GNU Lesser General Public License Version 2.1 *)
|
||||
(************************************************************************)
|
||||
|
||||
(*********************************************************************)
|
||||
(** * List permutations as a composition of adjacent transpositions *)
|
||||
(*********************************************************************)
|
||||
|
||||
(* Adapted in May 2006 by Jean-Marc Notin from initial contents by
|
||||
Laurent Théry (Huffmann contribution, October 2003) *)
|
||||
Laurent Thery (Huffmann contribution, October 2003) *)
|
||||
|
||||
Require Import List Setoid Compare_dec Morphisms.
|
||||
Import ListNotations. (* For notations [] and [a;b;c] *)
|
||||
|
||||
@@ -1,10 +1,2 @@
|
||||
(************************************************************************)
|
||||
(* v * The Coq Proof Assistant / The Coq Development Team *)
|
||||
(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2010 *)
|
||||
(* \VV/ **************************************************************)
|
||||
(* // * This file is distributed under the terms of the *)
|
||||
(* * GNU Lesser General Public License Version 2.1 *)
|
||||
(************************************************************************)
|
||||
|
||||
Require Export Sorted.
|
||||
Require Export Mergesort.
|
||||
|
||||
47
samples/Creole/creole.creole
Normal file
47
samples/Creole/creole.creole
Normal file
@@ -0,0 +1,47 @@
|
||||
= Creole
|
||||
|
||||
Creole is a Creole-to-HTML converter for Creole, the lightweight markup
|
||||
language (http://wikicreole.org/). Github uses this converter to render *.creole files.
|
||||
|
||||
Project page on github:
|
||||
|
||||
* http://github.com/minad/creole
|
||||
|
||||
Travis-CI:
|
||||
|
||||
* https://travis-ci.org/minad/creole
|
||||
|
||||
RDOC:
|
||||
|
||||
* http://rdoc.info/projects/minad/creole
|
||||
|
||||
== INSTALLATION
|
||||
|
||||
{{{
|
||||
gem install creole
|
||||
}}}
|
||||
|
||||
== SYNOPSIS
|
||||
|
||||
{{{
|
||||
require 'creole'
|
||||
html = Creole.creolize('== Creole text')
|
||||
}}}
|
||||
|
||||
== BUGS
|
||||
|
||||
If you found a bug, please report it at the Creole project's tracker
|
||||
on GitHub:
|
||||
|
||||
http://github.com/minad/creole/issues
|
||||
|
||||
== AUTHORS
|
||||
|
||||
* Lars Christensen (larsch)
|
||||
* Daniel Mendler (minad)
|
||||
|
||||
== LICENSE
|
||||
|
||||
Creole is Copyright (c) 2008 - 2013 Lars Christensen, Daniel Mendler. It is free software, and
|
||||
may be redistributed under the terms specified in the README file of
|
||||
the Ruby distribution.
|
||||
52
samples/Cuda/scalarProd_kernel.cuh
Normal file
52
samples/Cuda/scalarProd_kernel.cuh
Normal file
@@ -0,0 +1,52 @@
|
||||
__global__ void scalarProdGPU(
|
||||
float *d_C,
|
||||
float *d_A,
|
||||
float *d_B,
|
||||
int vectorN,
|
||||
int elementN
|
||||
)
|
||||
{
|
||||
//Accumulators cache
|
||||
__shared__ float accumResult[ACCUM_N];
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////
|
||||
// Cycle through every pair of vectors,
|
||||
// taking into account that vector counts can be different
|
||||
// from total number of thread blocks
|
||||
////////////////////////////////////////////////////////////////////////////
|
||||
for (int vec = blockIdx.x; vec < vectorN; vec += gridDim.x)
|
||||
{
|
||||
int vectorBase = IMUL(elementN, vec);
|
||||
int vectorEnd = vectorBase + elementN;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////
|
||||
// Each accumulator cycles through vectors with
|
||||
// stride equal to number of total number of accumulators ACCUM_N
|
||||
// At this stage ACCUM_N is only preferred be a multiple of warp size
|
||||
// to meet memory coalescing alignment constraints.
|
||||
////////////////////////////////////////////////////////////////////////
|
||||
for (int iAccum = threadIdx.x; iAccum < ACCUM_N; iAccum += blockDim.x)
|
||||
{
|
||||
float sum = 0;
|
||||
|
||||
for (int pos = vectorBase + iAccum; pos < vectorEnd; pos += ACCUM_N)
|
||||
sum += d_A[pos] * d_B[pos];
|
||||
|
||||
accumResult[iAccum] = sum;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////
|
||||
// Perform tree-like reduction of accumulators' results.
|
||||
// ACCUM_N has to be power of two at this stage
|
||||
////////////////////////////////////////////////////////////////////////
|
||||
for (int stride = ACCUM_N / 2; stride > 0; stride >>= 1)
|
||||
{
|
||||
__syncthreads();
|
||||
|
||||
for (int iAccum = threadIdx.x; iAccum < stride; iAccum += blockDim.x)
|
||||
accumResult[iAccum] += accumResult[stride + iAccum];
|
||||
}
|
||||
|
||||
if (threadIdx.x == 0) d_C[vec] = accumResult[0];
|
||||
}
|
||||
}
|
||||
46
samples/Cuda/vectorAdd.cu
Normal file
46
samples/Cuda/vectorAdd.cu
Normal file
@@ -0,0 +1,46 @@
|
||||
#include <stdio.h>
|
||||
#include <cuda_runtime.h>
|
||||
|
||||
/**
|
||||
* CUDA Kernel Device code
|
||||
*
|
||||
* Computes the vector addition of A and B into C. The 3 vectors have the same
|
||||
* number of elements numElements.
|
||||
*/
|
||||
__global__ void
|
||||
vectorAdd(const float *A, const float *B, float *C, int numElements)
|
||||
{
|
||||
int i = blockDim.x * blockIdx.x + threadIdx.x;
|
||||
|
||||
if (i < numElements)
|
||||
{
|
||||
C[i] = A[i] + B[i];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Host main routine
|
||||
*/
|
||||
int
|
||||
main(void)
|
||||
{
|
||||
// Error code to check return values for CUDA calls
|
||||
cudaError_t err = cudaSuccess;
|
||||
|
||||
// Launch the Vector Add CUDA Kernel
|
||||
int threadsPerBlock = 256;
|
||||
int blocksPerGrid =(numElements + threadsPerBlock - 1) / threadsPerBlock;
|
||||
vectorAdd<<<blocksPerGrid, threadsPerBlock>>>(d_A, d_B, d_C, numElements);
|
||||
err = cudaGetLastError();
|
||||
|
||||
if (err != cudaSuccess)
|
||||
{
|
||||
fprintf(stderr, "Failed to launch vectorAdd kernel (error code %s)!\n", cudaGetErrorString(err));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
// Reset the device and exit
|
||||
err = cudaDeviceReset();
|
||||
|
||||
return 0;
|
||||
}
|
||||
87
samples/DM/example.dm
Normal file
87
samples/DM/example.dm
Normal file
@@ -0,0 +1,87 @@
|
||||
// This is a single line comment.
|
||||
/*
|
||||
This is a multi-line comment
|
||||
*/
|
||||
|
||||
// Pre-processor keywords
|
||||
|
||||
#define PI 3.1415
|
||||
|
||||
#if PI == 4
|
||||
|
||||
#define G 5
|
||||
|
||||
#elif PI == 3
|
||||
|
||||
#define I 6
|
||||
|
||||
#else
|
||||
|
||||
#define K 7
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
var/GlobalCounter = 0
|
||||
var/const/CONST_VARIABLE = 2
|
||||
var/list/MyList = list("anything", 1, new /datum/entity)
|
||||
var/list/EmptyList[99] // creates a list of 99 null entries
|
||||
var/list/NullList = null
|
||||
|
||||
/*
|
||||
Entity Class
|
||||
*/
|
||||
|
||||
/datum/entity
|
||||
var/name = "Entity"
|
||||
var/number = 0
|
||||
|
||||
/datum/entity/proc/myFunction()
|
||||
world.log << "Entity has called myFunction"
|
||||
|
||||
/datum/entity/New()
|
||||
number = GlobalCounter++
|
||||
|
||||
/*
|
||||
Unit Class, Extends from Entity
|
||||
*/
|
||||
|
||||
/datum/entity/unit
|
||||
name = "Unit"
|
||||
|
||||
/datum/entity/unit/New()
|
||||
..() // calls the parent's proc; equal to super() and base() in other languages
|
||||
number = rand(1, 99)
|
||||
|
||||
/datum/entity/unit/myFunction()
|
||||
world.log << "Unit has overriden and called myFunction"
|
||||
|
||||
// Global Function
|
||||
/proc/ReverseList(var/list/input)
|
||||
var/list/output = list()
|
||||
for(var/i = input.len; i >= 1; i--) // IMPORTANT: List Arrays count from 1.
|
||||
output += input[i] // "+= x" is ".Add(x)"
|
||||
return output
|
||||
|
||||
// Bitflags
|
||||
/proc/DoStuff()
|
||||
var/bitflag = 0
|
||||
bitflag |= 8
|
||||
return bitflag
|
||||
|
||||
/proc/DoOtherStuff()
|
||||
var/bitflag = 65535 // 16 bits is the maximum amount
|
||||
bitflag &= ~8
|
||||
return bitflag
|
||||
|
||||
// Logic
|
||||
/proc/DoNothing()
|
||||
var/pi = PI
|
||||
if(pi == 4)
|
||||
world.log << "PI is 4"
|
||||
else if(pi == CONST_VARIABLE)
|
||||
world.log << "PI is [CONST_VARIABLE]!"
|
||||
else
|
||||
world.log << "PI is approximety [pi]"
|
||||
|
||||
#undef PI // Undefine PI
|
||||
42
samples/ECL/sample.ecl
Normal file
42
samples/ECL/sample.ecl
Normal file
@@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Multi-line comment
|
||||
*/
|
||||
#option ('slidingJoins', true);
|
||||
|
||||
namesRecord :=
|
||||
RECORD
|
||||
string20 surname;
|
||||
string10 forename;
|
||||
integer2 age;
|
||||
integer2 dadAge;
|
||||
integer2 mumAge;
|
||||
END;
|
||||
|
||||
namesRecord2 :=
|
||||
record
|
||||
string10 extra;
|
||||
namesRecord;
|
||||
end;
|
||||
|
||||
namesTable := dataset('x',namesRecord,FLAT);
|
||||
namesTable2 := dataset('y',namesRecord2,FLAT);
|
||||
|
||||
integer2 aveAgeL(namesRecord l) := (l.dadAge+l.mumAge)/2;
|
||||
integer2 aveAgeR(namesRecord2 r) := (r.dadAge+r.mumAge)/2;
|
||||
|
||||
// Standard join on a function of left and right
|
||||
output(join(namesTable, namesTable2, aveAgeL(left) = aveAgeR(right)));
|
||||
|
||||
//Several simple examples of sliding join syntax
|
||||
output(join(namesTable, namesTable2, left.age >= right.age - 10 and left.age <= right.age +10));
|
||||
output(join(namesTable, namesTable2, left.age between right.age - 10 and right.age +10));
|
||||
output(join(namesTable, namesTable2, left.age between right.age + 10 and right.age +30));
|
||||
output(join(namesTable, namesTable2, left.age between (right.age + 20) - 10 and (right.age +20) + 10));
|
||||
output(join(namesTable, namesTable2, aveAgeL(left) between aveAgeR(right)+10 and aveAgeR(right)+40));
|
||||
|
||||
//Same, but on strings. Also includes age to ensure sort is done by non-sliding before sliding.
|
||||
output(join(namesTable, namesTable2, left.surname between right.surname[1..10]+'AAAAAAAAAA' and right.surname[1..10]+'ZZZZZZZZZZ' and left.age=right.age));
|
||||
output(join(namesTable, namesTable2, left.surname between right.surname[1..10]+'AAAAAAAAAA' and right.surname[1..10]+'ZZZZZZZZZZ' and left.age=right.age,all));
|
||||
|
||||
//This should not generate a self join
|
||||
output(join(namesTable, namesTable, left.age between right.age - 10 and right.age +10));
|
||||
127
samples/Elm/Basic.elm
Normal file
127
samples/Elm/Basic.elm
Normal file
@@ -0,0 +1,127 @@
|
||||
|
||||
import List (intercalate,intersperse)
|
||||
import Website.Skeleton
|
||||
import Website.ColorScheme
|
||||
|
||||
addFolder folder lst =
|
||||
let add (x,y) = (x, folder ++ y ++ ".elm") in
|
||||
let f (n,xs) = (n, map add xs) in
|
||||
map f lst
|
||||
|
||||
elements = addFolder "Elements/"
|
||||
[ ("Primitives",
|
||||
[ ("Text" , "HelloWorld")
|
||||
, ("Images", "Image")
|
||||
, ("Fitted Images", "FittedImage")
|
||||
, ("Videos", "Video")
|
||||
, ("Markdown", "Markdown")
|
||||
])
|
||||
, ("Formatting",
|
||||
[ ("Size" , "Size")
|
||||
, ("Opacity" , "Opacity")
|
||||
, ("Text" , "Text")
|
||||
, ("Typeface", "Typeface")
|
||||
])
|
||||
, ("Layout",
|
||||
[ ("Simple Flow", "FlowDown1a")
|
||||
, ("Flow Down" , "FlowDown2")
|
||||
, ("Layers" , "Layers")
|
||||
, ("Positioning", "Position")
|
||||
, ("Spacers" , "Spacer")
|
||||
])
|
||||
, ("Collage", [ ("Lines" , "Lines")
|
||||
, ("Shapes" , "Shapes")
|
||||
, ("Sprites" , "Sprite")
|
||||
, ("Elements" , "ToForm")
|
||||
, ("Colors" , "Color")
|
||||
, ("Textures" , "Texture")
|
||||
, ("Transforms", "Transforms")
|
||||
])
|
||||
]
|
||||
|
||||
|
||||
functional = addFolder "Functional/"
|
||||
[ ("Recursion",
|
||||
[ ("Factorial" , "Factorial")
|
||||
, ("List Length", "Length")
|
||||
, ("Zip" , "Zip")
|
||||
, ("Quick Sort" , "QuickSort")
|
||||
])
|
||||
, ("Functions",
|
||||
[ ("Anonymous Functions", "Anonymous")
|
||||
, ("Application" , "Application")
|
||||
, ("Composition" , "Composition")
|
||||
, ("Infix Operators" , "Infix")
|
||||
])
|
||||
, ("Higher-Order",
|
||||
[ ("Map" , "Map")
|
||||
, ("Fold" , "Sum")
|
||||
, ("Filter" , "Filter")
|
||||
, ("ZipWith", "ZipWith")
|
||||
])
|
||||
, ("Data Types",
|
||||
[ ("Maybe", "Maybe")
|
||||
, ("Boolean Expressions", "BooleanExpressions")
|
||||
, ("Tree", "Tree")
|
||||
])
|
||||
]
|
||||
|
||||
reactive = addFolder "Reactive/"
|
||||
[ ("Mouse", [ ("Position", "Position")
|
||||
, ("Presses" , "IsDown")
|
||||
, ("Clicks" , "CountClicks")
|
||||
, ("Position+Image", "ResizeYogi")
|
||||
, ("Position+Collage" , "Transforms")
|
||||
-- , ("Hover" , "IsAbove")
|
||||
])
|
||||
,("Keyboard",[ ("Keys Down" , "KeysDown")
|
||||
, ("Key Presses", "CharPressed")
|
||||
])
|
||||
, ("Window", [ ("Size", "ResizePaint")
|
||||
, ("Centering", "Centering")
|
||||
])
|
||||
, ("Time", [ ("Before and After", "Between")
|
||||
, ("Every" , "Every")
|
||||
, ("Clock" , "Clock")
|
||||
])
|
||||
, ("Input", [ ("Text Fields", "TextField")
|
||||
, ("Passwords" , "Password")
|
||||
, ("Check Boxes", "CheckBox")
|
||||
, ("String Drop Down", "StringDropDown")
|
||||
, ("Drop Down", "DropDown")
|
||||
])
|
||||
, ("Random", [ ("Randomize", "Randomize") ])
|
||||
, ("HTTP", [ ("Zip Codes", "ZipCodes") ])
|
||||
, ("Filters",[ ("Sample", "SampleOn")
|
||||
, ("Keep If", "KeepIf")
|
||||
, ("Drop Repeats", "DropRepeats")
|
||||
])
|
||||
]
|
||||
|
||||
example (name, loc) = Text.link ("/edit/examples/" ++ loc) (toText name)
|
||||
toLinks (title, links) =
|
||||
flow right [ width 130 (text $ toText " " ++ italic (toText title))
|
||||
, text (intercalate (bold . Text.color accent4 $ toText " · ") $ map example links)
|
||||
]
|
||||
|
||||
insertSpace lst = case lst of { x:xs -> x : spacer 1 5 : xs ; [] -> [] }
|
||||
|
||||
subsection w (name,info) =
|
||||
flow down . insertSpace . intersperse (spacer 1 1) . map (width w) $
|
||||
(text . bold $ toText name) : map toLinks info
|
||||
|
||||
words = [markdown|
|
||||
|
||||
### Basic Examples
|
||||
|
||||
Each example listed below focuses on a single function or concept.
|
||||
These examples demonstrate all of the basic building blocks of Elm.
|
||||
|
||||
|]
|
||||
|
||||
content w =
|
||||
words : map (subsection w) [ ("Display",elements), ("React",reactive), ("Compute",functional) ]
|
||||
|
||||
exampleSets w = flow down . map (width w) . intersperse (plainText " ") $ content w
|
||||
|
||||
main = lift (skeleton exampleSets) Window.width
|
||||
32
samples/Elm/QuickSort.elm
Normal file
32
samples/Elm/QuickSort.elm
Normal file
@@ -0,0 +1,32 @@
|
||||
|
||||
main = asText (qsort [3,9,1,8,5,4,7])
|
||||
|
||||
qsort lst =
|
||||
case lst of
|
||||
x:xs -> qsort (filter ((>=)x) xs) ++ [x] ++ qsort (filter ((<)x) xs)
|
||||
[] -> []
|
||||
|
||||
|
||||
{---------------------
|
||||
|
||||
QuickSort works as follows:
|
||||
- Choose a pivot element which be placed in the "middle" of the sorted list.
|
||||
In our case we are choosing the first element as the pivot.
|
||||
- Gather all of the elements less than the pivot (the first filter).
|
||||
We know that these must come before our pivot element in the sorted list.
|
||||
Note: ((>=)x) === (\y -> (>=) x y) === (\y -> x >= y)
|
||||
- Gather all of the elements greater than the pivot (the second filter).
|
||||
We know that these must come after our pivot element in the sorted list.
|
||||
- Run `qsort` on the lesser elements, producing a sorted list that contains
|
||||
only elements less than the pivot. Put these before the pivot.
|
||||
- Run `qsort` on the greater elements, producing a sorted list. Put these
|
||||
after the pivot.
|
||||
|
||||
Note that choosing a bad pivot can have bad effects. Take a sorted list with
|
||||
N elements. The pivot will always be the lowest member, meaning that it does
|
||||
not divide the list very evenly. The list of lessers has 0 elements
|
||||
and the list of greaters has N-1 elemens. This means qsort will be called
|
||||
N times, each call looking through the entire list. This means, in the worst
|
||||
case, QuickSort will make N^2 comparisons.
|
||||
|
||||
----------------------}
|
||||
91
samples/Elm/Tree.elm
Normal file
91
samples/Elm/Tree.elm
Normal file
@@ -0,0 +1,91 @@
|
||||
|
||||
{-----------------------------------------------------------------
|
||||
|
||||
Overview: A "Tree" represents a binary tree. A "Node" in a binary
|
||||
tree always has two children. A tree can also be "Empty". Below
|
||||
I have defined "Tree" and a number of useful functions.
|
||||
|
||||
This example also includes some challenge problems :)
|
||||
|
||||
-----------------------------------------------------------------}
|
||||
|
||||
|
||||
data Tree a = Node a (Tree a) (Tree a) | Empty
|
||||
|
||||
empty = Empty
|
||||
singleton v = Node v Empty Empty
|
||||
|
||||
insert x tree =
|
||||
case tree of
|
||||
Empty -> singleton x
|
||||
Node y left right ->
|
||||
if x == y then tree else
|
||||
if x < y then Node y (insert x left) right
|
||||
else Node y left (insert x right)
|
||||
|
||||
fromList xs = foldl insert empty xs
|
||||
|
||||
depth tree =
|
||||
case tree of
|
||||
Node v left right -> 1 + max (depth left) (depth right)
|
||||
Empty -> 0
|
||||
|
||||
map f tree =
|
||||
case tree of
|
||||
Node v left right -> Node (f v) (map f left) (map f right)
|
||||
Empty -> Empty
|
||||
|
||||
t1 = fromList [1,2,3]
|
||||
t2 = fromList [2,1,3]
|
||||
|
||||
main = flow down [ display "depth" depth t1
|
||||
, display "depth" depth t2
|
||||
, display "map ((+)1)" (map ((+)1)) t2
|
||||
]
|
||||
|
||||
display name f v =
|
||||
text . monospace . toText $
|
||||
concat [ show (f v), " ⇐ ", name, " ", show v ]
|
||||
|
||||
{-----------------------------------------------------------------
|
||||
|
||||
Exercises:
|
||||
|
||||
(1) Sum all of the elements of a tree.
|
||||
|
||||
sum :: Tree Number -> Number
|
||||
|
||||
(2) Flatten a tree into a list.
|
||||
|
||||
flatten :: Tree a -> [a]
|
||||
|
||||
(3) Check to see if an element is in a given tree.
|
||||
|
||||
isElement :: a -> Tree a -> Bool
|
||||
|
||||
(4) Write a general fold function that acts on trees. The fold
|
||||
function does not need to guarantee a particular order of
|
||||
traversal.
|
||||
|
||||
fold :: (a -> b -> b) -> b -> Tree a -> b
|
||||
|
||||
(5) Use "fold" to do exercises 1-3 in one line each. The best
|
||||
readable versions I have come up have the following length
|
||||
in characters including spaces and function name:
|
||||
sum: 16
|
||||
flatten: 21
|
||||
isElement: 46
|
||||
See if you can match or beat me! Don't forget about currying
|
||||
and partial application!
|
||||
|
||||
(6) Can "fold" be used to implement "map" or "depth"?
|
||||
|
||||
(7) Try experimenting with different ways to traverse a
|
||||
tree: pre-order, in-order, post-order, depth-first, etc.
|
||||
More info at: http://en.wikipedia.org/wiki/Tree_traversal
|
||||
|
||||
-----------------------------------------------------------------}
|
||||
|
||||
|
||||
|
||||
|
||||
473
samples/Emacs Lisp/ess-julia.el
Normal file
473
samples/Emacs Lisp/ess-julia.el
Normal file
@@ -0,0 +1,473 @@
|
||||
;; ess-julia.el --- ESS julia mode and inferior interaction
|
||||
;;
|
||||
;; Copyright (C) 2012 Vitalie Spinu.
|
||||
;;
|
||||
;; Filename: ess-julia.el
|
||||
;; Author: Vitalie Spinu (based on julia-mode.el from julia-lang project)
|
||||
;; Maintainer: Vitalie Spinu
|
||||
;; Created: 02-04-2012 (ESS 12.03)
|
||||
;; Keywords: ESS, julia
|
||||
;;
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;;
|
||||
;; This file is *NOT* part of GNU Emacs.
|
||||
;; This file is part of ESS
|
||||
;;
|
||||
;; This program is free software; you can redistribute it and/or
|
||||
;; modify it under the terms of the GNU General Public License as
|
||||
;; published by the Free Software Foundation; either version 3, any later version.
|
||||
;;
|
||||
;; This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
;; FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
||||
;; details.
|
||||
;;
|
||||
;; You should have received a copy of the GNU General Public License along with
|
||||
;; this program; see the file COPYING. If not, write to the Free Software
|
||||
;; Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
|
||||
;; USA.
|
||||
;;
|
||||
;;
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;;
|
||||
;;; Commentary:
|
||||
;; customise inferior-julia-program-name to point to your julia-release-basic
|
||||
;; and start the inferior with M-x julia.
|
||||
;;
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
;;
|
||||
(require 'compile); for compilation-* below
|
||||
|
||||
;;; Code:
|
||||
|
||||
(defvar julia-mode-hook nil)
|
||||
|
||||
(add-to-list 'auto-mode-alist '("\\.jl\\'" . julia-mode))
|
||||
|
||||
(defvar julia-syntax-table
|
||||
(let ((table (make-syntax-table)))
|
||||
(modify-syntax-entry ?_ "_" table) ; underscores in words
|
||||
(modify-syntax-entry ?@ "_" table)
|
||||
(modify-syntax-entry ?. "_" table)
|
||||
(modify-syntax-entry ?# "<" table) ; # single-line comment start
|
||||
(modify-syntax-entry ?\n ">" table) ; \n single-line comment end
|
||||
(modify-syntax-entry ?\{ "(} " table)
|
||||
(modify-syntax-entry ?\} "){ " table)
|
||||
(modify-syntax-entry ?\[ "(] " table)
|
||||
(modify-syntax-entry ?\] ")[ " table)
|
||||
(modify-syntax-entry ?\( "() " table)
|
||||
(modify-syntax-entry ?\) ")( " table)
|
||||
;(modify-syntax-entry ?\\ "." table) ; \ is an operator outside quotes
|
||||
(modify-syntax-entry ?' "." table) ; character quote or transpose
|
||||
(modify-syntax-entry ?\" "\"" table)
|
||||
(modify-syntax-entry ?` "\"" table)
|
||||
;; (modify-syntax-entry ?\" "." table)
|
||||
(modify-syntax-entry ?? "." table)
|
||||
(modify-syntax-entry ?$ "." table)
|
||||
(modify-syntax-entry ?& "." table)
|
||||
(modify-syntax-entry ?* "." table)
|
||||
(modify-syntax-entry ?+ "." table)
|
||||
(modify-syntax-entry ?- "." table)
|
||||
(modify-syntax-entry ?< "." table)
|
||||
(modify-syntax-entry ?> "." table)
|
||||
(modify-syntax-entry ?= "." table)
|
||||
(modify-syntax-entry ?% "." table)
|
||||
table)
|
||||
"Syntax table for julia-mode")
|
||||
|
||||
;; syntax table that holds within strings
|
||||
(defvar julia-mode-string-syntax-table
|
||||
(let ((table (make-syntax-table)))
|
||||
table)
|
||||
"Syntax table for julia-mode")
|
||||
|
||||
;; disable " inside char quote
|
||||
(defvar julia-mode-char-syntax-table
|
||||
(let ((table (make-syntax-table)))
|
||||
(modify-syntax-entry ?\" "." table)
|
||||
table)
|
||||
"Syntax table for julia-mode")
|
||||
|
||||
;; not used
|
||||
;; (defconst julia-string-regex
|
||||
;; "\"[^\"]*?\\(\\(\\\\\\\\\\)*\\\\\"[^\"]*?\\)*\"")
|
||||
|
||||
(defconst julia-char-regex
|
||||
"\\(\\s(\\|\\s-\\|-\\|[,%=<>\\+*/?&|$!\\^~\\\\;:]\\|^\\)\\('\\(\\([^']*?[^\\\\]\\)\\|\\(\\\\\\\\\\)\\)'\\)")
|
||||
|
||||
(defconst julia-unquote-regex
|
||||
"\\(\\s(\\|\\s-\\|-\\|[,%=<>\\+*/?&|!\\^~\\\\;:]\\|^\\)\\($[a-zA-Z0-9_]+\\)")
|
||||
|
||||
(defconst julia-forloop-in-regex
|
||||
"for +[^
|
||||
]+ +.*\\(in\\)\\(\\s-\\|$\\)+")
|
||||
|
||||
(defconst ess-subset-regexp
|
||||
"\\[[0-9:, ]*\\]" )
|
||||
|
||||
(defconst julia-font-lock-defaults
|
||||
(list '("\\<\\(\\|Uint\\(8\\|16\\|32\\|64\\)\\|Int\\(8\\|16\\|32\\|64\\)\\|Integer\\|Float\\|Float32\\|Float64\\|Complex128\\|Complex64\\|ComplexNum\\|Bool\\|Char\\|Number\\|Scalar\\|Real\\|Int\\|Uint\\|Array\\|DArray\\|AbstractArray\\|AbstractVector\\|AbstractMatrix\\|SubArray\\|StridedArray\\|StridedVector\\|StridedMatrix\\|VecOrMat\\|StridedVecOrMat\\|Range\\|Range1\\|SparseMatrixCSC\\|Tuple\\|NTuple\\|Buffer\\|Size\\|Index\\|Symbol\\|Function\\|Vector\\|Matrix\\|Union\\|Type\\|Any\\|Complex\\|None\\|String\\|Ptr\\|Void\\|Exception\\|PtrInt\\|Long\\|Ulong\\)\\>" .
|
||||
font-lock-type-face)
|
||||
(cons
|
||||
(concat "\\<\\("
|
||||
(mapconcat
|
||||
'identity
|
||||
'("if" "else" "elseif" "while" "for" "begin" "end" "quote"
|
||||
"try" "catch" "return" "local" "abstract" "function" "macro" "ccall"
|
||||
"typealias" "break" "continue" "type" "global" "@\\w+"
|
||||
"module" "import" "export" "const" "let" "bitstype" "using")
|
||||
"\\|") "\\)\\>")
|
||||
'font-lock-keyword-face)
|
||||
'("\\<\\(true\\|false\\|C_NULL\\|Inf\\|NaN\\|Inf32\\|NaN32\\)\\>" . font-lock-constant-face)
|
||||
(list julia-unquote-regex 2 'font-lock-constant-face)
|
||||
(list julia-char-regex 2 'font-lock-string-face)
|
||||
(list julia-forloop-in-regex 1 'font-lock-keyword-face)
|
||||
;; (cons ess-subset-regexp 'font-lock-constant-face)
|
||||
(cons "\\(\\sw+\\) ?(" '(1 font-lock-function-name-face keep))
|
||||
;(list julia-string-regex 0 'font-lock-string-face)
|
||||
))
|
||||
|
||||
(defconst julia-block-start-keywords
|
||||
(list "if" "while" "for" "begin" "try" "function" "type" "let" "macro"
|
||||
"quote"))
|
||||
|
||||
(defconst julia-block-other-keywords
|
||||
(list "else" "elseif"))
|
||||
|
||||
(defconst julia-block-end-keywords
|
||||
(list "end" "else" "elseif" "catch"))
|
||||
|
||||
(defun ess-inside-brackets-p (&optional pos)
|
||||
(save-excursion
|
||||
(let* ((pos (or pos (point)))
|
||||
(beg (re-search-backward "\\[" (max (point-min) (- pos 1000)) t))
|
||||
(end (re-search-forward "\\]" (min (point-max) (+ pos 1000)) t)))
|
||||
(and beg end (> pos beg) (> end pos)))))
|
||||
|
||||
(defun julia-at-keyword (kw-list)
|
||||
; not a keyword if used as a field name, X.word, or quoted, :word
|
||||
(and (or (= (point) 1)
|
||||
(and (not (equal (char-before (point)) ?.))
|
||||
(not (equal (char-before (point)) ?:))))
|
||||
(not (ess-inside-string-or-comment-p (point)))
|
||||
(not (ess-inside-brackets-p (point)))
|
||||
(member (current-word) kw-list)))
|
||||
|
||||
; get the position of the last open block
|
||||
(defun julia-last-open-block-pos (min)
|
||||
(let ((count 0))
|
||||
(while (not (or (> count 0) (<= (point) min)))
|
||||
(backward-word 1)
|
||||
(setq count
|
||||
(cond ((julia-at-keyword julia-block-start-keywords)
|
||||
(+ count 1))
|
||||
((and (equal (current-word) "end")
|
||||
(not (ess-inside-comment-p)) (not (ess-inside-brackets-p)))
|
||||
(- count 1))
|
||||
(t count))))
|
||||
(if (> count 0)
|
||||
(point)
|
||||
nil)))
|
||||
|
||||
; get indent for last open block
|
||||
(defun julia-last-open-block (min)
|
||||
(let ((pos (julia-last-open-block-pos min)))
|
||||
(and pos
|
||||
(progn
|
||||
(goto-char pos)
|
||||
(+ julia-basic-offset (current-indentation))))))
|
||||
|
||||
; return indent implied by a special form opening on the previous line, if any
|
||||
(defun julia-form-indent ()
|
||||
(forward-line -1)
|
||||
(end-of-line)
|
||||
(backward-sexp)
|
||||
(if (julia-at-keyword julia-block-other-keywords)
|
||||
(+ julia-basic-offset (current-indentation))
|
||||
(if (char-equal (char-after (point)) ?\()
|
||||
(progn
|
||||
(backward-word 1)
|
||||
(let ((cur (current-indentation)))
|
||||
(if (julia-at-keyword julia-block-start-keywords)
|
||||
(+ julia-basic-offset cur)
|
||||
nil)))
|
||||
nil)))
|
||||
|
||||
(defun julia-paren-indent ()
|
||||
(let* ((p (parse-partial-sexp (save-excursion
|
||||
;; only indent by paren if the last open
|
||||
;; paren is closer than the last open
|
||||
;; block
|
||||
(or (julia-last-open-block-pos (point-min))
|
||||
(point-min)))
|
||||
(progn (beginning-of-line)
|
||||
(point))))
|
||||
(pos (cadr p)))
|
||||
(if (or (= 0 (car p)) (null pos))
|
||||
nil
|
||||
(progn (goto-char pos) (+ 1 (current-column))))))
|
||||
; (forward-line -1)
|
||||
; (end-of-line)
|
||||
; (let ((pos (condition-case nil
|
||||
; (scan-lists (point) -1 1)
|
||||
; (error nil))))
|
||||
; (if pos
|
||||
; (progn (goto-char pos) (+ 1 (current-column)))
|
||||
; nil)))
|
||||
|
||||
(defun julia-indent-line ()
|
||||
"Indent current line of julia code"
|
||||
(interactive)
|
||||
; (save-excursion
|
||||
(end-of-line)
|
||||
(indent-line-to
|
||||
(or (and (ess-inside-string-p (point-at-bol)) 0)
|
||||
(save-excursion (ignore-errors (julia-form-indent)))
|
||||
(save-excursion (ignore-errors (julia-paren-indent)))
|
||||
;; previous line ends in =
|
||||
(save-excursion
|
||||
(beginning-of-line)
|
||||
(skip-chars-backward " \t\n")
|
||||
(when (eql (char-before) ?=)
|
||||
(+ julia-basic-offset (current-indentation))))
|
||||
(save-excursion
|
||||
(let ((endtok (progn
|
||||
(beginning-of-line)
|
||||
(forward-to-indentation 0)
|
||||
(julia-at-keyword julia-block-end-keywords))))
|
||||
(ignore-errors (+ (julia-last-open-block (point-min))
|
||||
(if endtok (- julia-basic-offset) 0)))))
|
||||
;; take same indentation as previous line
|
||||
(save-excursion (forward-line -1)
|
||||
(current-indentation))
|
||||
0))
|
||||
(when (julia-at-keyword julia-block-end-keywords)
|
||||
(forward-word 1)))
|
||||
|
||||
(defvar julia-editing-alist
|
||||
'((paragraph-start . (concat "\\s-*$\\|" page-delimiter))
|
||||
(paragraph-separate . (concat "\\s-*$\\|" page-delimiter))
|
||||
(paragraph-ignore-fill-prefix . t)
|
||||
(require-final-newline . t)
|
||||
(comment-start . "# ")
|
||||
(comment-add . 1)
|
||||
(comment-start-skip . "#+\\s-*")
|
||||
(comment-column . 40)
|
||||
;;(comment-indent-function . 'S-comment-indent)
|
||||
;;(ess-comment-indent . 'S-comment-indent)
|
||||
;; (ess-indent-line . 'S-indent-line)
|
||||
;;(ess-calculate-indent . 'ess-calculate-indent)
|
||||
(ess-indent-line-function . 'julia-indent-line)
|
||||
(indent-line-function . 'julia-indent-line)
|
||||
(parse-sexp-ignore-comments . t)
|
||||
(ess-style . ess-default-style) ;; ignored
|
||||
(ess-local-process-name . nil)
|
||||
;;(ess-keep-dump-files . 'ask)
|
||||
(ess-mode-syntax-table . julia-syntax-table)
|
||||
;; For Changelog add, require ' ' before <- : "attr<-" is a function name :
|
||||
;; (add-log-current-defun-header-regexp . "^\\(.+\\)\\s-+=[ \t\n]*function")
|
||||
(add-log-current-defun-header-regexp . "^.*function[ \t]*\\([^ \t(]*\\)[ \t]*(")
|
||||
(font-lock-defaults . '(julia-font-lock-defaults
|
||||
nil nil ((?\_ . "w"))))
|
||||
)
|
||||
"General options for julia source files.")
|
||||
|
||||
(autoload 'inferior-ess "ess-inf" "Run an ESS process.")
|
||||
(autoload 'ess-mode "ess-mode" "Edit an ESS process.")
|
||||
|
||||
(defun julia-send-string-function (process string visibly)
|
||||
(let ((file (concat temporary-file-directory "julia_eval_region.jl")))
|
||||
(with-temp-file file
|
||||
(insert string))
|
||||
(process-send-string process (format ess-load-command file))))
|
||||
|
||||
(defun julia-get-help-topics (&optional proc)
|
||||
(ess-get-words-from-vector "ESS.all_help_topics()\n"))
|
||||
;; (ess-command com)))
|
||||
|
||||
(defvar julia-help-command "help(\"%s\")\n")
|
||||
|
||||
(defvar ess-julia-error-regexp-alist '(julia-in julia-at)
|
||||
"List of symbols which are looked up in `compilation-error-regexp-alist-alist'.")
|
||||
|
||||
(add-to-list 'compilation-error-regexp-alist-alist
|
||||
'(julia-in "^\\s-*in [^ \t\n]* \\(at \\(.*\\):\\([0-9]+\\)\\)" 2 3 nil 2 1))
|
||||
(add-to-list 'compilation-error-regexp-alist-alist
|
||||
'(julia-at "^\\S-+\\s-+\\(at \\(.*\\):\\([0-9]+\\)\\)" 2 3 nil 2 1))
|
||||
|
||||
(defvar julia-customize-alist
|
||||
'((comint-use-prompt-regexp . t)
|
||||
(ess-eldoc-function . 'ess-julia-eldoc-function)
|
||||
(inferior-ess-primary-prompt . "a> ") ;; from julia>
|
||||
(inferior-ess-secondary-prompt . nil)
|
||||
(inferior-ess-prompt . "\\w*> ")
|
||||
(ess-local-customize-alist . 'julia-customize-alist)
|
||||
(inferior-ess-program . inferior-julia-program-name)
|
||||
(inferior-ess-font-lock-defaults . julia-font-lock-defaults)
|
||||
(ess-get-help-topics-function . 'julia-get-help-topics)
|
||||
(ess-help-web-search-command . "http://docs.julialang.org/en/latest/search/?q=%s")
|
||||
(ess-load-command . "include(\"%s\")\n")
|
||||
(ess-funargs-command . "ESS.fun_args(\"%s\")\n")
|
||||
(ess-dump-error-re . "in \\w* at \\(.*\\):[0-9]+")
|
||||
(ess-error-regexp . "\\(^\\s-*at\\s-*\\(?3:.*\\):\\(?2:[0-9]+\\)\\)")
|
||||
(ess-error-regexp-alist . ess-julia-error-regexp-alist)
|
||||
(ess-send-string-function . nil);'julia-send-string-function)
|
||||
(ess-imenu-generic-expression . julia-imenu-generic-expression)
|
||||
;; (inferior-ess-objects-command . inferior-R-objects-command)
|
||||
;; (inferior-ess-search-list-command . "search()\n")
|
||||
(inferior-ess-help-command . julia-help-command)
|
||||
;; (inferior-ess-help-command . "help(\"%s\")\n")
|
||||
(ess-language . "julia")
|
||||
(ess-dialect . "julia")
|
||||
(ess-suffix . "jl")
|
||||
(ess-dump-filename-template . (ess-replace-regexp-in-string
|
||||
"S$" ess-suffix ; in the one from custom:
|
||||
ess-dump-filename-template-proto))
|
||||
(ess-mode-syntax-table . julia-syntax-table)
|
||||
(ess-mode-editing-alist . julia-editing-alist)
|
||||
(ess-change-sp-regexp . nil );ess-R-change-sp-regexp)
|
||||
(ess-help-sec-regex . ess-help-R-sec-regex)
|
||||
(ess-help-sec-keys-alist . ess-help-R-sec-keys-alist)
|
||||
(ess-loop-timeout . ess-S-loop-timeout);fixme: dialect spec.
|
||||
(ess-cmd-delay . ess-R-cmd-delay)
|
||||
(ess-function-pattern . ess-R-function-pattern)
|
||||
(ess-object-name-db-file . "ess-r-namedb.el" )
|
||||
(ess-smart-operators . ess-R-smart-operators)
|
||||
(inferior-ess-help-filetype . nil)
|
||||
(inferior-ess-exit-command . "exit()\n")
|
||||
;;harmful for shell-mode's C-a: -- but "necessary" for ESS-help?
|
||||
(inferior-ess-start-file . nil) ;; "~/.ess-R"
|
||||
(inferior-ess-start-args . "")
|
||||
(inferior-ess-language-start . nil)
|
||||
(ess-STERM . "iESS")
|
||||
(ess-editor . R-editor)
|
||||
(ess-pager . R-pager)
|
||||
)
|
||||
"Variables to customize for Julia -- set up later than emacs initialization.")
|
||||
|
||||
|
||||
(defvar ess-julia-versions '("julia")
|
||||
"List of partial strings for versions of Julia to access within ESS.
|
||||
Each string specifies the start of a filename. If a filename
|
||||
beginning with one of these strings is found on `exec-path', a M-x
|
||||
command for that version of Julia is made available. ")
|
||||
|
||||
(defcustom inferior-julia-args ""
|
||||
"String of arguments (see 'julia --help') used when starting julia."
|
||||
;; These arguments are currently not passed to other versions of julia that have
|
||||
;; been created using the variable `ess-r-versions'."
|
||||
:group 'ess-julia
|
||||
:type 'string)
|
||||
|
||||
;;;###autoload
|
||||
(defun julia-mode (&optional proc-name)
|
||||
"Major mode for editing julia source. See `ess-mode' for more help."
|
||||
(interactive "P")
|
||||
;; (setq ess-customize-alist julia-customize-alist)
|
||||
(ess-mode julia-customize-alist proc-name)
|
||||
;; for emacs < 24
|
||||
;; (add-hook 'comint-dynamic-complete-functions 'ess-complete-object-name nil 'local)
|
||||
;; for emacs >= 24
|
||||
;; (remove-hook 'completion-at-point-functions 'ess-filename-completion 'local) ;; should be first
|
||||
;; (add-hook 'completion-at-point-functions 'ess-object-completion nil 'local)
|
||||
;; (add-hook 'completion-at-point-functions 'ess-filename-completion nil 'local)
|
||||
(if (fboundp 'ess-add-toolbar) (ess-add-toolbar))
|
||||
(set (make-local-variable 'end-of-defun-function) 'ess-end-of-function)
|
||||
;; (local-set-key "\t" 'julia-indent-line) ;; temp workaround
|
||||
;; (set (make-local-variable 'indent-line-function) 'julia-indent-line)
|
||||
(set (make-local-variable 'julia-basic-offset) 4)
|
||||
(setq imenu-generic-expression julia-imenu-generic-expression)
|
||||
(imenu-add-to-menubar "Imenu-jl")
|
||||
(run-hooks 'julia-mode-hook))
|
||||
|
||||
|
||||
(defvar ess-julia-post-run-hook nil
|
||||
"Functions run in process buffer after the initialization of
|
||||
julia process.")
|
||||
|
||||
;;;###autoload
|
||||
(defun julia (&optional start-args)
|
||||
"Call 'julia',
|
||||
Optional prefix (C-u) allows to set command line arguments, such as
|
||||
--load=<file>. This should be OS agnostic.
|
||||
If you have certain command line arguments that should always be passed
|
||||
to julia, put them in the variable `inferior-julia-args'."
|
||||
(interactive "P")
|
||||
;; get settings, notably inferior-julia-program-name :
|
||||
(if (null inferior-julia-program-name)
|
||||
(error "'inferior-julia-program-name' does not point to 'julia-release-basic' executable")
|
||||
(setq ess-customize-alist julia-customize-alist)
|
||||
(ess-write-to-dribble-buffer ;; for debugging only
|
||||
(format
|
||||
"\n(julia): ess-dialect=%s, buf=%s, start-arg=%s\n current-prefix-arg=%s\n"
|
||||
ess-dialect (current-buffer) start-args current-prefix-arg))
|
||||
(let* ((jl-start-args
|
||||
(concat inferior-julia-args " " ; add space just in case
|
||||
(if start-args
|
||||
(read-string
|
||||
(concat "Starting Args"
|
||||
(if inferior-julia-args
|
||||
(concat " [other than '" inferior-julia-args "']"))
|
||||
" ? "))
|
||||
nil))))
|
||||
(inferior-ess jl-start-args) ;; -> .. (ess-multi ...) -> .. (inferior-ess-mode) ..
|
||||
(ess--tb-start)
|
||||
(set (make-local-variable 'julia-basic-offset) 4)
|
||||
;; remove ` from julia's logo
|
||||
(goto-char (point-min))
|
||||
(while (re-search-forward "`" nil t)
|
||||
(replace-match "'"))
|
||||
(goto-char (point-max))
|
||||
(ess--inject-code-from-file (format "%sess-julia.jl" ess-etc-directory))
|
||||
(with-ess-process-buffer nil
|
||||
(run-mode-hooks 'ess-julia-post-run-hook))
|
||||
)))
|
||||
|
||||
;;; ELDOC
|
||||
|
||||
(defun ess-julia-eldoc-function ()
|
||||
"Return the doc string, or nil.
|
||||
If an ESS process is not associated with the buffer, do not try
|
||||
to look up any doc strings."
|
||||
(interactive)
|
||||
(when (and (ess-process-live-p)
|
||||
(not (ess-process-get 'busy)))
|
||||
(let ((funname (or (and ess-eldoc-show-on-symbol ;; aggressive completion
|
||||
(symbol-at-point))
|
||||
(car (ess--funname.start)))))
|
||||
(when funname
|
||||
(let* ((args (copy-sequence (nth 2 (ess-function-arguments funname))))
|
||||
(W (- (window-width (minibuffer-window)) (+ 4 (length funname))))
|
||||
(doc (concat (propertize funname 'face font-lock-function-name-face) ": ")))
|
||||
(when args
|
||||
(setq args (sort args (lambda (s1 s2)
|
||||
(< (length s1) (length s2)))))
|
||||
(setq doc (concat doc (pop args)))
|
||||
(while (and args (< (length doc) W))
|
||||
(setq doc (concat doc " "
|
||||
(pop args))))
|
||||
(when (and args (< (length doc) W))
|
||||
(setq doc (concat doc " {--}"))))
|
||||
doc)))))
|
||||
|
||||
|
||||
;;; IMENU
|
||||
(defvar julia-imenu-generic-expression
|
||||
;; don't use syntax classes, screws egrep
|
||||
'(("Function (_)" "[ \t]*function[ \t]+\\(_[^ \t\n]*\\)" 1)
|
||||
("Function" "[ \t]*function[ \t]+\\([^_][^\t\n]*\\)" 1)
|
||||
("Const" "[ \t]*const \\([^ \t\n]*\\)" 1)
|
||||
("Type" "^[ \t]*[a-zA-Z0-9_]*type[a-zA-Z0-9_]* \\([^ \t\n]*\\)" 1)
|
||||
("Require" " *\\(\\brequire\\)(\\([^ \t\n)]*\\)" 2)
|
||||
("Include" " *\\(\\binclude\\)(\\([^ \t\n)]*\\)" 2)
|
||||
;; ("Classes" "^.*setClass(\\(.*\\)," 1)
|
||||
;; ("Coercions" "^.*setAs(\\([^,]+,[^,]*\\)," 1) ; show from and to
|
||||
;; ("Generics" "^.*setGeneric(\\([^,]*\\)," 1)
|
||||
;; ("Methods" "^.*set\\(Group\\|Replace\\)?Method(\"\\(.+\\)\"," 2)
|
||||
;; ;;[ ]*\\(signature=\\)?(\\(.*,?\\)*\\)," 1)
|
||||
;; ;;
|
||||
;; ;;("Other" "^\\(.+\\)\\s-*<-[ \t\n]*[^\\(function\\|read\\|.*data\.frame\\)]" 1)
|
||||
;; ("Package" "^.*\\(library\\|require\\)(\\(.*\\)," 2)
|
||||
;; ("Data" "^\\(.+\\)\\s-*<-[ \t\n]*\\(read\\|.*data\.frame\\).*(" 1)))
|
||||
))
|
||||
|
||||
21
samples/Erlang/factorial.script!
Executable file
21
samples/Erlang/factorial.script!
Executable file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env escript
|
||||
%% -*- erlang -*-
|
||||
%%! -smp enable -sname factorial -mnesia debug verbose
|
||||
main([String]) ->
|
||||
try
|
||||
N = list_to_integer(String),
|
||||
F = fac(N),
|
||||
io:format("factorial ~w = ~w\n", [N,F])
|
||||
catch
|
||||
_:_ ->
|
||||
usage()
|
||||
end;
|
||||
main(_) ->
|
||||
usage().
|
||||
|
||||
usage() ->
|
||||
io:format("usage: factorial integer\n"),
|
||||
halt(1).
|
||||
|
||||
fac(0) -> 1;
|
||||
fac(N) -> N * fac(N-1).
|
||||
4
samples/Erlang/hello.escript
Executable file
4
samples/Erlang/hello.escript
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env escript
|
||||
-export([main/1]).
|
||||
|
||||
main([]) -> io:format("Hello, World!~n").
|
||||
136
samples/Erlang/record_helper.erl
Normal file
136
samples/Erlang/record_helper.erl
Normal file
@@ -0,0 +1,136 @@
|
||||
%% For each header file, it scans thru all records and create helper functions
|
||||
%% Helper functions are:
|
||||
%% setters, getters, fields, fields_atom, type
|
||||
|
||||
-module(record_helper).
|
||||
|
||||
-export([make/1, make/2]).
|
||||
|
||||
make(HeaderFiles) ->
|
||||
make([ atom_to_list(X) || X <- HeaderFiles ], ".").
|
||||
|
||||
%% .hrl file, relative to current dir
|
||||
make(HeaderFiles, OutDir) ->
|
||||
ModuleName = "record_utils",
|
||||
HeaderComment = "%% This is auto generated file. Please don't edit it\n\n",
|
||||
ModuleDeclaration = "-module(" ++ ModuleName ++ ").\n"
|
||||
++ "-author(\"trung@mdkt.org\").\n"
|
||||
++ "-compile(export_all).\n"
|
||||
++ [ "-include(\"" ++ X ++ "\").\n" || X <- HeaderFiles ]
|
||||
++ "\n",
|
||||
Src = format_src(lists:sort(lists:flatten([read(X) || X <- HeaderFiles] ++ [generate_type_default_function()]))),
|
||||
file:write_file(OutDir++"/" ++ ModuleName ++ ".erl", list_to_binary([HeaderComment, ModuleDeclaration, Src])).
|
||||
|
||||
read(HeaderFile) ->
|
||||
try epp:parse_file(HeaderFile,[],[]) of
|
||||
{ok, Tree} ->
|
||||
parse(Tree);
|
||||
{error, Error} ->
|
||||
{error, {"Error parsing header file", HeaderFile, Error}}
|
||||
catch
|
||||
_:Error ->
|
||||
{catched_error, {"Error parsing header file", HeaderFile, Error}}
|
||||
end.
|
||||
|
||||
format_src([{_, _, _, Src}|T]) when length(T) == 0 ->
|
||||
Src ++ ".\n\n";
|
||||
format_src([{Type, _, _, Src}|[{Type, A, B, NSrc}|T]]) ->
|
||||
Src ++ ";\n\n" ++ format_src([{Type, A, B, NSrc}|T]);
|
||||
format_src([{_Type, _, _, Src}|[{Type1, A, B, NSrc}|T]]) ->
|
||||
Src ++ ".\n\n" ++ format_src([{Type1, A, B, NSrc}|T]);
|
||||
format_src([{_, _, _, Src}|T]) when length(T) > 0 ->
|
||||
Src ++ ";\n\n" ++ format_src(T).
|
||||
|
||||
parse(Tree) ->
|
||||
[ parse_record(X) || X <- Tree ].
|
||||
|
||||
parse_record({attribute, _, record, RecordInfo}) ->
|
||||
{RecordName, RecordFields} = RecordInfo,
|
||||
if
|
||||
length(RecordFields) == 1 ->
|
||||
lists:flatten([ generate_setter_getter_function(RecordName, X) || X <- RecordFields ]
|
||||
++ [generate_type_function(RecordName)]);
|
||||
true ->
|
||||
lists:flatten([generate_fields_function(RecordName, RecordFields)]
|
||||
++ [generate_fields_atom_function(RecordName, RecordFields)]
|
||||
++ [ generate_setter_getter_function(RecordName, X) || X <- RecordFields ]
|
||||
++ [generate_type_function(RecordName)])
|
||||
end;
|
||||
parse_record(_) -> [].
|
||||
|
||||
parse_field_name({record_field, _, {atom, _, FieldName}}) ->
|
||||
{field, "\"" ++ atom_to_list(FieldName) ++ "\""};
|
||||
parse_field_name({record_field, _, {atom, _, _FieldName}, {record, _, ParentRecordName, _}}) ->
|
||||
{parent_field, "fields(" ++ atom_to_list(ParentRecordName) ++ ")"};
|
||||
parse_field_name({record_field, _, {atom, _, FieldName}, _}) ->
|
||||
{field, "\"" ++ atom_to_list(FieldName) ++ "\""}.
|
||||
|
||||
parse_field_name_atom({record_field, _, {atom, _, FieldName}}) ->
|
||||
atom_to_list(FieldName);
|
||||
parse_field_name_atom({record_field, _, {atom, _, _FieldName}, {record, _, ParentRecordName, _}}) ->
|
||||
"fields_atom(" ++ atom_to_list(ParentRecordName) ++ ")";
|
||||
parse_field_name_atom({record_field, _, {atom, _, FieldName}, _}) ->
|
||||
atom_to_list(FieldName).
|
||||
|
||||
concat([], _S) -> [];
|
||||
concat([F|T], _S) when length(T) == 0 -> F;
|
||||
concat([F|T], S) -> F ++ S ++ concat(T, S).
|
||||
|
||||
concat_ext([], _S) -> [];
|
||||
concat_ext([F|T], S) -> F ++ S ++ concat_ext(T, S).
|
||||
|
||||
parse_field([], AccFields, AccParentFields) -> concat_ext(AccParentFields, " ++ ") ++ "[" ++ concat(AccFields, ", ") ++ "]";
|
||||
%parse_field([F|T], AccFields, AccParentFields) when length(T) == 0 -> parse_field_name(F);
|
||||
parse_field([F|T], AccFields, AccParentFields) ->
|
||||
case parse_field_name(F) of
|
||||
{field, Field} ->
|
||||
parse_field(T, AccFields ++ [Field], AccParentFields);
|
||||
{parent_field, PField} ->
|
||||
parse_field(T, AccFields, AccParentFields ++ [PField])
|
||||
end.
|
||||
|
||||
parse_field_atom([F|T]) when length(T) == 0 -> parse_field_name_atom(F);
|
||||
parse_field_atom([F|T]) ->
|
||||
parse_field_name_atom(F) ++ ", " ++ parse_field_atom(T).
|
||||
|
||||
generate_type_default_function() ->
|
||||
{type, zzz, 99, "type(_) -> undefined"}.
|
||||
|
||||
generate_type_function(RecordName) ->
|
||||
{type, RecordName, 0, "type(Obj) when is_record(Obj, " ++ atom_to_list(RecordName) ++ ") -> " ++ atom_to_list(RecordName)}.
|
||||
|
||||
generate_fields_function(RecordName, RecordFields) ->
|
||||
Fields = parse_field(RecordFields, [], []),
|
||||
{field, RecordName, 1, "fields(" ++ atom_to_list(RecordName) ++ ") -> \n\t" ++ Fields}.
|
||||
|
||||
generate_fields_atom_function(RecordName, RecordFields) ->
|
||||
Fields = parse_field_atom(RecordFields),
|
||||
{field_atom, RecordName, 1, "fields_atom(" ++ atom_to_list(RecordName) ++ ") -> \n\tlists:flatten([" ++ Fields ++ "])"}.
|
||||
|
||||
generate_setter_getter_function(RecordName, {record_field, _, {atom, _, FieldName}, {record, _, ParentRecordName, _}}) ->
|
||||
to_setter_getter_function(atom_to_list(RecordName), atom_to_list(FieldName), atom_to_list(ParentRecordName));
|
||||
generate_setter_getter_function(RecordName, {record_field, _, {atom, _, FieldName}, _}) ->
|
||||
to_setter_getter_function(atom_to_list(RecordName), atom_to_list(FieldName));
|
||||
generate_setter_getter_function(RecordName, {record_field, _, {atom, _, FieldName}}) ->
|
||||
to_setter_getter_function(atom_to_list(RecordName), atom_to_list(FieldName)).
|
||||
|
||||
to_setter_getter_function(RecordName, FieldName) ->
|
||||
[{setter, RecordName, 1, "set(Obj, " ++ FieldName ++ ", Value) when is_record(Obj, " ++ RecordName ++ ") -> \n"
|
||||
++ "\tNewObj = Obj#" ++ RecordName ++ "{" ++ FieldName ++ " = Value},\n"
|
||||
++ "\t{ok, NewObj, {" ++ FieldName ++ ", Value}}"},
|
||||
{getter, RecordName, 1, "get(Obj, " ++ FieldName ++ ") when is_record(Obj, " ++ RecordName ++ ") -> \n"
|
||||
++ "\t{ok, Obj#" ++ RecordName ++ "." ++ FieldName ++ "}"}
|
||||
].
|
||||
|
||||
to_setter_getter_function(RecordName, FieldName, ParentRecordName) ->
|
||||
[{setter, RecordName, 2, "set(Obj, " ++ FieldName ++ ", Value) when is_record(Obj, " ++ RecordName ++ ") and is_record(Value, " ++ ParentRecordName ++ ") -> \n"
|
||||
++ "\tNewObj = Obj#" ++ RecordName ++ "{" ++ FieldName ++ " = Value},\n"
|
||||
++ "\t{ok, NewObj, {" ++ FieldName ++ ", Value}};\n\n"
|
||||
++ "set(Obj, ParentProperty, Value) when is_record(Obj, " ++ RecordName ++ ") and is_atom(ParentProperty) -> \n"
|
||||
++ "\t{ok, NewParentObject, _} = set(Obj#" ++ RecordName ++ ".parent, ParentProperty, Value),\n"
|
||||
++ "\tset(Obj, parent, NewParentObject)"},
|
||||
{getter, RecordName, 2, "get(Obj, " ++ FieldName ++ ") when is_record(Obj, " ++ RecordName ++ ") -> \n"
|
||||
++ "\t{ok, Obj#" ++ RecordName ++ "." ++ FieldName ++ "};\n\n"
|
||||
++ "get(Obj, ParentProperty) when is_record(Obj, " ++ RecordName ++ ") and is_atom(ParentProperty) -> \n"
|
||||
++ "\tget(Obj#" ++ RecordName ++ ".parent, ParentProperty)"}
|
||||
].
|
||||
100
samples/Erlang/record_utils.erl
Normal file
100
samples/Erlang/record_utils.erl
Normal file
@@ -0,0 +1,100 @@
|
||||
%% This is auto generated file. Please don't edit it
|
||||
|
||||
-module(record_utils).
|
||||
-compile(export_all).
|
||||
-include("messages.hrl").
|
||||
|
||||
fields(abstract_message) ->
|
||||
["clientId", "destination", "messageId", "timestamp", "timeToLive", "headers", "body"];
|
||||
|
||||
fields(async_message) ->
|
||||
fields(abstract_message) ++ ["correlationId", "correlationIdBytes"].
|
||||
|
||||
fields_atom(abstract_message) ->
|
||||
lists:flatten([clientId, destination, messageId, timestamp, timeToLive, headers, body]);
|
||||
|
||||
fields_atom(async_message) ->
|
||||
lists:flatten([fields_atom(abstract_message), correlationId, correlationIdBytes]).
|
||||
|
||||
get(Obj, body) when is_record(Obj, abstract_message) ->
|
||||
{ok, Obj#abstract_message.body};
|
||||
|
||||
get(Obj, clientId) when is_record(Obj, abstract_message) ->
|
||||
{ok, Obj#abstract_message.clientId};
|
||||
|
||||
get(Obj, destination) when is_record(Obj, abstract_message) ->
|
||||
{ok, Obj#abstract_message.destination};
|
||||
|
||||
get(Obj, headers) when is_record(Obj, abstract_message) ->
|
||||
{ok, Obj#abstract_message.headers};
|
||||
|
||||
get(Obj, messageId) when is_record(Obj, abstract_message) ->
|
||||
{ok, Obj#abstract_message.messageId};
|
||||
|
||||
get(Obj, timeToLive) when is_record(Obj, abstract_message) ->
|
||||
{ok, Obj#abstract_message.timeToLive};
|
||||
|
||||
get(Obj, timestamp) when is_record(Obj, abstract_message) ->
|
||||
{ok, Obj#abstract_message.timestamp};
|
||||
|
||||
get(Obj, correlationId) when is_record(Obj, async_message) ->
|
||||
{ok, Obj#async_message.correlationId};
|
||||
|
||||
get(Obj, correlationIdBytes) when is_record(Obj, async_message) ->
|
||||
{ok, Obj#async_message.correlationIdBytes};
|
||||
|
||||
get(Obj, parent) when is_record(Obj, async_message) ->
|
||||
{ok, Obj#async_message.parent};
|
||||
|
||||
get(Obj, ParentProperty) when is_record(Obj, async_message) and is_atom(ParentProperty) ->
|
||||
get(Obj#async_message.parent, ParentProperty).
|
||||
|
||||
set(Obj, body, Value) when is_record(Obj, abstract_message) ->
|
||||
NewObj = Obj#abstract_message{body = Value},
|
||||
{ok, NewObj, {body, Value}};
|
||||
|
||||
set(Obj, clientId, Value) when is_record(Obj, abstract_message) ->
|
||||
NewObj = Obj#abstract_message{clientId = Value},
|
||||
{ok, NewObj, {clientId, Value}};
|
||||
|
||||
set(Obj, destination, Value) when is_record(Obj, abstract_message) ->
|
||||
NewObj = Obj#abstract_message{destination = Value},
|
||||
{ok, NewObj, {destination, Value}};
|
||||
|
||||
set(Obj, headers, Value) when is_record(Obj, abstract_message) ->
|
||||
NewObj = Obj#abstract_message{headers = Value},
|
||||
{ok, NewObj, {headers, Value}};
|
||||
|
||||
set(Obj, messageId, Value) when is_record(Obj, abstract_message) ->
|
||||
NewObj = Obj#abstract_message{messageId = Value},
|
||||
{ok, NewObj, {messageId, Value}};
|
||||
|
||||
set(Obj, timeToLive, Value) when is_record(Obj, abstract_message) ->
|
||||
NewObj = Obj#abstract_message{timeToLive = Value},
|
||||
{ok, NewObj, {timeToLive, Value}};
|
||||
|
||||
set(Obj, timestamp, Value) when is_record(Obj, abstract_message) ->
|
||||
NewObj = Obj#abstract_message{timestamp = Value},
|
||||
{ok, NewObj, {timestamp, Value}};
|
||||
|
||||
set(Obj, correlationId, Value) when is_record(Obj, async_message) ->
|
||||
NewObj = Obj#async_message{correlationId = Value},
|
||||
{ok, NewObj, {correlationId, Value}};
|
||||
|
||||
set(Obj, correlationIdBytes, Value) when is_record(Obj, async_message) ->
|
||||
NewObj = Obj#async_message{correlationIdBytes = Value},
|
||||
{ok, NewObj, {correlationIdBytes, Value}};
|
||||
|
||||
set(Obj, parent, Value) when is_record(Obj, async_message) and is_record(Value, abstract_message) ->
|
||||
NewObj = Obj#async_message{parent = Value},
|
||||
{ok, NewObj, {parent, Value}};
|
||||
|
||||
set(Obj, ParentProperty, Value) when is_record(Obj, async_message) and is_atom(ParentProperty) ->
|
||||
{ok, NewParentObject, _} = set(Obj#async_message.parent, ParentProperty, Value),
|
||||
set(Obj, parent, NewParentObject).
|
||||
|
||||
type(Obj) when is_record(Obj, abstract_message) -> abstract_message;
|
||||
|
||||
type(Obj) when is_record(Obj, async_message) -> async_message;
|
||||
|
||||
type(_) -> undefined.
|
||||
122
samples/Erlang/release.script!
Normal file
122
samples/Erlang/release.script!
Normal file
@@ -0,0 +1,122 @@
|
||||
#!/usr/bin/env escript
|
||||
%%!
|
||||
%-*-Mode:erlang;coding:utf-8;tab-width:4;c-basic-offset:4;indent-tabs-mode:()-*-
|
||||
% ex: set ft=erlang fenc=utf-8 sts=4 ts=4 sw=4 et:
|
||||
%%%
|
||||
%%%------------------------------------------------------------------------
|
||||
%%% BSD LICENSE
|
||||
%%%
|
||||
%%% Copyright (c) 2013, Michael Truog <mjtruog at gmail dot com>
|
||||
%%% All rights reserved.
|
||||
%%%
|
||||
%%% Redistribution and use in source and binary forms, with or without
|
||||
%%% modification, are permitted provided that the following conditions are met:
|
||||
%%%
|
||||
%%% * Redistributions of source code must retain the above copyright
|
||||
%%% notice, this list of conditions and the following disclaimer.
|
||||
%%% * Redistributions in binary form must reproduce the above copyright
|
||||
%%% notice, this list of conditions and the following disclaimer in
|
||||
%%% the documentation and/or other materials provided with the
|
||||
%%% distribution.
|
||||
%%% * All advertising materials mentioning features or use of this
|
||||
%%% software must display the following acknowledgment:
|
||||
%%% This product includes software developed by Michael Truog
|
||||
%%% * The name of the author may not be used to endorse or promote
|
||||
%%% products derived from this software without specific prior
|
||||
%%% written permission
|
||||
%%%
|
||||
%%% THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
|
||||
%%% CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
|
||||
%%% INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
%%% OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
%%% DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
|
||||
%%% CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
%%% SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
%%% BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
%%% SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
%%% INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
%%% WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
%%% NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
%%% OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||
%%% DAMAGE.
|
||||
%%%------------------------------------------------------------------------
|
||||
|
||||
-author('mjtruog [at] gmail (dot) com').
|
||||
|
||||
-mode(compile).
|
||||
|
||||
main(_) ->
|
||||
{ok,
|
||||
[{sys, _} = RelToolConfig,
|
||||
{target_dir, TargetDir},
|
||||
{overlay, OverlayConfig}]} = file:consult("reltool.config"),
|
||||
{ok, Spec} = reltool:get_target_spec([RelToolConfig]),
|
||||
case file:make_dir(TargetDir) of
|
||||
ok ->
|
||||
ok;
|
||||
{error, eexist} ->
|
||||
io:format("release already exists? (~p)~n", [TargetDir]),
|
||||
exit_code(1)
|
||||
end,
|
||||
ok = reltool:eval_target_spec(Spec, code:root_dir(), TargetDir),
|
||||
ok = process_overlay(RelToolConfig, TargetDir, OverlayConfig),
|
||||
exit_code(0).
|
||||
|
||||
shell(Command, Arguments) ->
|
||||
CommandSuffix = " && echo 0 || echo 1",
|
||||
case lists:reverse(os:cmd(lists:flatten(
|
||||
io_lib:format(Command ++ CommandSuffix, Arguments)))) of
|
||||
[_, $0 | _] ->
|
||||
ok;
|
||||
[_, $1 | _] ->
|
||||
io:format("\"~s\" failed!~n", [io_lib:format(Command, Arguments)]),
|
||||
error
|
||||
end.
|
||||
|
||||
boot_rel_vsn({sys, Config} = _RelToolConfig) ->
|
||||
{rel, _Name, Ver, _} = proplists:lookup(rel, Config),
|
||||
Ver.
|
||||
|
||||
%% minimal parsing for handling mustache syntax
|
||||
mustache(Body, Context) ->
|
||||
mustache(Body, "", Context).
|
||||
mustache([], Result, _Context) ->
|
||||
lists:reverse(Result);
|
||||
mustache([${, ${ | KeyStr], Result, Context) ->
|
||||
mustache_key(KeyStr, "", Result, Context);
|
||||
mustache([C | Rest], Result, Context) ->
|
||||
mustache(Rest, [C | Result], Context).
|
||||
mustache_key([$}, $} | Rest], KeyStr, Result, Context) ->
|
||||
Key = erlang:list_to_existing_atom(lists:reverse(KeyStr)),
|
||||
{ok, Value} = dict:find(Key, Context),
|
||||
mustache(Rest, lists:reverse(Value) ++ Result, Context);
|
||||
mustache_key([C | Rest], KeyStr, Result, Context) ->
|
||||
mustache_key(Rest, [C | KeyStr], Result, Context).
|
||||
|
||||
%% support minimal overlay based on rebar overlays
|
||||
process_overlay(RelToolConfig, TargetDir, OverlayConfig) ->
|
||||
BootRelVsn = boot_rel_vsn(RelToolConfig),
|
||||
OverlayVars =
|
||||
dict:from_list([{erts_vsn, "erts-" ++ erlang:system_info(version)},
|
||||
{rel_vsn, BootRelVsn},
|
||||
{target_dir, TargetDir},
|
||||
{hostname, net_adm:localhost()}]),
|
||||
{ok, BaseDir} = file:get_cwd(),
|
||||
execute_overlay(OverlayConfig, OverlayVars, BaseDir, TargetDir).
|
||||
|
||||
execute_overlay([], _Vars, _BaseDir, _TargetDir) ->
|
||||
ok;
|
||||
execute_overlay([{mkdir, Out} | Rest], Vars, BaseDir, TargetDir) ->
|
||||
OutDir = mustache(filename:join(TargetDir, Out), Vars),
|
||||
ok = shell("mkdir -p ~s", [OutDir]),
|
||||
execute_overlay(Rest, Vars, BaseDir, TargetDir);
|
||||
execute_overlay([{copy, In, Out} | Rest], Vars, BaseDir, TargetDir) ->
|
||||
InFile = mustache(filename:join(BaseDir, In), Vars),
|
||||
OutFile = mustache(filename:join(TargetDir, Out), Vars),
|
||||
true = filelib:is_file(InFile),
|
||||
ok = shell("cp -R ~s ~s", [InFile, OutFile]),
|
||||
execute_overlay(Rest, Vars, BaseDir, TargetDir).
|
||||
|
||||
exit_code(ExitCode) ->
|
||||
erlang:halt(ExitCode, [{flush, true}]).
|
||||
|
||||
79
samples/Forth/KataDiversion.fth
Normal file
79
samples/Forth/KataDiversion.fth
Normal file
@@ -0,0 +1,79 @@
|
||||
\ KataDiversion in Forth
|
||||
|
||||
\ -- utils
|
||||
|
||||
\ empty the stack
|
||||
: EMPTY
|
||||
DEPTH 0 <> IF BEGIN
|
||||
DROP DEPTH 0 =
|
||||
UNTIL
|
||||
THEN ;
|
||||
|
||||
\ power
|
||||
: ** ( n1 n2 -- n1_pow_n2 ) 1 SWAP ?DUP IF 0 DO OVER * LOOP THEN NIP ;
|
||||
|
||||
\ compute the highest power of 2 below N.
|
||||
\ e.g. : 31 -> 16, 4 -> 4
|
||||
: MAXPOW2 ( n -- log2_n ) DUP 1 < IF 1 ABORT" Maxpow2 need a positive value."
|
||||
ELSE DUP 1 = IF 1
|
||||
ELSE
|
||||
1 >R
|
||||
BEGIN ( n |R: i=1)
|
||||
DUP DUP I - 2 *
|
||||
( n n 2*[n-i])
|
||||
R> 2 * >R ( … |R: i*2)
|
||||
> ( n n>2*[n-i] )
|
||||
UNTIL
|
||||
R> 2 /
|
||||
THEN
|
||||
THEN NIP ;
|
||||
|
||||
\ -- kata
|
||||
|
||||
\ test if the given N has two adjacent 1 bits
|
||||
\ e.g. : 11 -> 1011 -> -1
|
||||
\ 9 -> 1001 -> 0
|
||||
: ?NOT-TWO-ADJACENT-1-BITS ( n -- bool )
|
||||
\ the word uses the following algorithm :
|
||||
\ (stack|return stack)
|
||||
\ ( A N | X ) A: 0, X: N LOG2
|
||||
\ loop: if N-X > 0 then A++ else A=0 ; X /= 2
|
||||
\ return 0 if A=2
|
||||
\ if X=1 end loop and return -1
|
||||
0 SWAP DUP DUP 0 <> IF
|
||||
MAXPOW2 >R
|
||||
BEGIN
|
||||
DUP I - 0 >= IF
|
||||
SWAP DUP 1 = IF 1+ SWAP
|
||||
ELSE DROP 1 SWAP I -
|
||||
THEN
|
||||
ELSE NIP 0 SWAP
|
||||
THEN
|
||||
OVER
|
||||
2 =
|
||||
I 1 = OR
|
||||
R> 2 / >R
|
||||
UNTIL
|
||||
R> 2DROP
|
||||
2 <>
|
||||
ELSE 2DROP INVERT
|
||||
THEN ;
|
||||
|
||||
\ return the maximum number which can be made with N (given number) bits
|
||||
: MAX-NB ( n -- m ) DUP 1 < IF DROP 0 ( 0 )
|
||||
ELSE
|
||||
DUP IF DUP 2 SWAP ** NIP 1 - ( 2**n - 1 )
|
||||
THEN
|
||||
THEN ;
|
||||
|
||||
|
||||
\ return the number of numbers which can be made with N (given number) bits
|
||||
\ or less, and which have not two adjacent 1 bits.
|
||||
\ see http://www.codekata.com/2007/01/code_kata_fifte.html
|
||||
: HOW-MANY-NB-NOT-TWO-ADJACENT-1-BITS ( n -- m )
|
||||
DUP 1 < IF DUP 0
|
||||
ELSE
|
||||
0 SWAP
|
||||
MAX-NB 1 + 0 DO I ?NOT-TWO-ADJACENT-1-BITS - LOOP
|
||||
THEN ;
|
||||
|
||||
42
samples/Forth/block.fth
Normal file
42
samples/Forth/block.fth
Normal file
@@ -0,0 +1,42 @@
|
||||
( Block words. )
|
||||
|
||||
variable blk
|
||||
variable current-block
|
||||
|
||||
: block ( n -- addr )
|
||||
current-block ! 0 ;
|
||||
|
||||
: buffer ( n -- addr )
|
||||
current-block ! 0 ;
|
||||
|
||||
\ evaluate (extended semantics)
|
||||
\ flush ( -- )
|
||||
|
||||
: load ( ... n -- ... )
|
||||
dup current-block !
|
||||
blk !
|
||||
save-input
|
||||
0 >in !
|
||||
blk @ block ''source ! 1024 ''#source !
|
||||
( interpret )
|
||||
restore-input ;
|
||||
|
||||
\ save-buffers ( -- )
|
||||
\ update ( -- )
|
||||
|
||||
( Block extension words. )
|
||||
|
||||
\ empty-buffers ( -- )
|
||||
|
||||
variable scr
|
||||
|
||||
: list ( n -- )
|
||||
dup scr !
|
||||
dup current-block !
|
||||
block 1024 bounds do i @ emit loop ;
|
||||
|
||||
\ refill (extended semantics)
|
||||
|
||||
: thru ( x y -- ) +1 swap do i load loop ;
|
||||
|
||||
\ \ (extended semantics)
|
||||
136
samples/Forth/core-ext.fth
Normal file
136
samples/Forth/core-ext.fth
Normal file
@@ -0,0 +1,136 @@
|
||||
\ -*- forth -*- Copyright 2004, 2013 Lars Brinkhoff
|
||||
|
||||
\ Kernel: #tib
|
||||
\ TODO: .r
|
||||
|
||||
: .( ( "<string><paren>" -- )
|
||||
[char] ) parse type ; immediate
|
||||
|
||||
: 0<> ( n -- flag ) 0 <> ;
|
||||
|
||||
: 0> ( n -- flag ) 0 > ;
|
||||
|
||||
\ Kernel: 2>r
|
||||
|
||||
: 2r> ( -- x1 x2 ) ( R: x1 x2 -- ) r> r> r> rot >r swap ;
|
||||
|
||||
: 2r@ ( -- x1 x2 ) ( R: x1 x2 -- x1 x2 ) 2r> 2dup 2>r ;
|
||||
|
||||
: :noname align here 0 c, 15 allot lastxt dup @ , !
|
||||
[ ' enter >code @ ] literal , 0 , ] lastxt @ ;
|
||||
|
||||
\ Kernel: <>
|
||||
|
||||
\ : ?do ( n1 n2 -- ) ( R: -- loop-sys ) ( C: -- do-sys )
|
||||
\ here postpone 2>r unresolved branch here ;
|
||||
|
||||
: again ( -- ) ( C: dest -- )
|
||||
postpone branch , ; immediate
|
||||
|
||||
: string+ ( caddr -- addr )
|
||||
count + aligned ;
|
||||
|
||||
: (c") ( -- caddr ) ( R: ret1 -- ret2 )
|
||||
r> dup string+ >r ;
|
||||
|
||||
: c" ( "<string><quote>" -- caddr )
|
||||
postpone (c") [char] " parse dup c, string, ; immediate
|
||||
|
||||
: case ( -- ) ( C: -- case-sys )
|
||||
0 ;
|
||||
|
||||
: compile, ( xt -- )
|
||||
, ;
|
||||
|
||||
\ TODO: convert
|
||||
|
||||
: endcase ( x -- ) ( C: case-sys -- )
|
||||
0 do postpone then loop
|
||||
postpone drop ;
|
||||
|
||||
: endof ( -- ) ( C: case-sys1 of-sys -- case-sys2 )
|
||||
postpone else swap 1+ ;
|
||||
|
||||
\ TODO: erase
|
||||
\ TODO: expect
|
||||
|
||||
: false ( -- 0 )
|
||||
0 ;
|
||||
|
||||
: hex ( -- )
|
||||
16 base ! ;
|
||||
|
||||
\ TODO: marker
|
||||
\ Kernel: nip
|
||||
|
||||
: of ( x x -- | x y -- x ) ( C: -- of-sys )
|
||||
postpone over postpone = postpone if postpone drop ;
|
||||
|
||||
\ Kernel: pad
|
||||
\ Kernel: parse
|
||||
|
||||
: pick ( xn ... x0 n -- xn ... x0 xn )
|
||||
2 + cells 'SP @ + @ ;
|
||||
|
||||
: query ( -- )
|
||||
tib ''source ! #tib ''#source ! 0 'source-id !
|
||||
refill drop ;
|
||||
|
||||
\ Kernel: refill
|
||||
\ Kernel: restore-input
|
||||
|
||||
\ TODO: roll ( xn xn-1 ... x0 n -- xn-1 ... x0 xn ) ;
|
||||
|
||||
\ Kernel: save-input
|
||||
\ Kernel: source-id
|
||||
\ TODO: span
|
||||
\ Kernel: tib
|
||||
|
||||
: to ( x "word" -- )
|
||||
' >body , ;
|
||||
|
||||
: true ( -- -1 )
|
||||
-1 ;
|
||||
|
||||
: tuck ( x y -- y x y )
|
||||
swap over ;
|
||||
|
||||
\ TODO: u.r
|
||||
|
||||
: u> ( x y -- flag )
|
||||
2dup u< if 2drop false else <> then ;
|
||||
|
||||
\ TODO: unused
|
||||
|
||||
: value ( x "word" -- )
|
||||
create ,
|
||||
does> ( -- x )
|
||||
@ ;
|
||||
|
||||
: within over - >r - r> u< ;
|
||||
|
||||
\ TODO: [compile]
|
||||
|
||||
\ Kernel: \
|
||||
|
||||
\ ----------------------------------------------------------------------
|
||||
|
||||
( Forth2012 core extension words. )
|
||||
|
||||
\ TODO: action-of
|
||||
|
||||
\ TODO: buffer:
|
||||
|
||||
: defer create ['] abort , does> @ execute ;
|
||||
|
||||
: defer! ( xt2 xt1 -- ) >body ! ;
|
||||
|
||||
: defer@ ( xt1 -- xt2 ) >body @ ;
|
||||
|
||||
\ TODO: holds
|
||||
|
||||
: is ( xt "word" -- ) ' defer! ;
|
||||
|
||||
\ TODO: parse-name
|
||||
|
||||
\ TODO: s\"
|
||||
252
samples/Forth/core.fth
Normal file
252
samples/Forth/core.fth
Normal file
@@ -0,0 +1,252 @@
|
||||
: immediate lastxt @ dup c@ negate swap c! ;
|
||||
|
||||
: \ source nip >in ! ; immediate \ Copyright 2004, 2012 Lars Brinkhoff
|
||||
|
||||
: char \ ( "word" -- char )
|
||||
bl-word here 1+ c@ ;
|
||||
|
||||
: ahead here 0 , ;
|
||||
|
||||
: resolve here swap ! ;
|
||||
|
||||
: ' bl-word here find 0branch [ ahead ] exit [ resolve ] 0 ;
|
||||
|
||||
: postpone-nonimmediate [ ' literal , ' compile, ] literal , ;
|
||||
|
||||
: create dovariable_code header, reveal ;
|
||||
|
||||
create postponers
|
||||
' postpone-nonimmediate ,
|
||||
' abort ,
|
||||
' , ,
|
||||
|
||||
: word \ ( char "<chars>string<char>" -- caddr )
|
||||
drop bl-word here ;
|
||||
|
||||
: postpone \ ( C: "word" -- )
|
||||
bl word find 1+ cells postponers + @ execute ; immediate
|
||||
|
||||
: unresolved \ ( C: "word" -- orig )
|
||||
postpone postpone postpone ahead ; immediate
|
||||
|
||||
: chars \ ( n1 -- n2 )
|
||||
;
|
||||
|
||||
: else \ ( -- ) ( C: orig1 -- orig2 )
|
||||
unresolved branch swap resolve ; immediate
|
||||
|
||||
: if \ ( flag -- ) ( C: -- orig )
|
||||
unresolved 0branch ; immediate
|
||||
|
||||
: then \ ( -- ) ( C: orig -- )
|
||||
resolve ; immediate
|
||||
|
||||
: [char] \ ( "word" -- )
|
||||
char postpone literal ; immediate
|
||||
|
||||
: (does>) lastxt @ dodoes_code over >code ! r> swap >does ! ;
|
||||
|
||||
: does> postpone (does>) ; immediate
|
||||
|
||||
: begin \ ( -- ) ( C: -- dest )
|
||||
here ; immediate
|
||||
|
||||
: while \ ( x -- ) ( C: dest -- orig dest )
|
||||
unresolved 0branch swap ; immediate
|
||||
|
||||
: repeat \ ( -- ) ( C: orig dest -- )
|
||||
postpone branch , resolve ; immediate
|
||||
|
||||
: until \ ( x -- ) ( C: dest -- )
|
||||
postpone 0branch , ; immediate
|
||||
|
||||
: recurse lastxt @ compile, ; immediate
|
||||
|
||||
: pad \ ( -- addr )
|
||||
here 1024 + ;
|
||||
|
||||
: parse \ ( char "string<char>" -- addr n )
|
||||
pad >r begin
|
||||
source? if <source 2dup <> else 0 0 then
|
||||
while
|
||||
r@ c! r> 1+ >r
|
||||
repeat 2drop pad r> over - ;
|
||||
|
||||
: ( \ ( "string<paren>" -- )
|
||||
[ char ) ] literal parse 2drop ; immediate
|
||||
\ TODO: If necessary, refill and keep parsing.
|
||||
|
||||
: string, ( addr n -- )
|
||||
here over allot align swap cmove ;
|
||||
|
||||
: (s") ( -- addr n ) ( R: ret1 -- ret2 )
|
||||
r> dup @ swap cell+ 2dup + aligned >r swap ;
|
||||
|
||||
create squote 128 allot
|
||||
|
||||
: s" ( "string<quote>" -- addr n )
|
||||
state @ if
|
||||
postpone (s") [char] " parse dup , string,
|
||||
else
|
||||
[char] " parse >r squote r@ cmove squote r>
|
||||
then ; immediate
|
||||
|
||||
: (abort") ( ... addr n -- ) ( R: ... -- )
|
||||
cr type cr abort ;
|
||||
|
||||
: abort" ( ... x "string<quote>" -- ) ( R: ... -- )
|
||||
postpone if postpone s" postpone (abort") postpone then ; immediate
|
||||
|
||||
\ ----------------------------------------------------------------------
|
||||
|
||||
( Core words. )
|
||||
|
||||
\ TODO: #
|
||||
\ TODO: #>
|
||||
\ TODO: #s
|
||||
|
||||
: and ( x y -- x&y ) nand invert ;
|
||||
|
||||
: * 1 2>r 0 swap begin r@ while
|
||||
r> r> swap 2dup dup + 2>r and if swap over + swap then dup +
|
||||
repeat r> r> 2drop drop ;
|
||||
|
||||
\ TODO: */mod
|
||||
|
||||
: +loop ( -- ) ( C: nest-sys -- )
|
||||
postpone (+loop) postpone 0branch , postpone unloop ; immediate
|
||||
|
||||
: space bl emit ;
|
||||
|
||||
: ?.- dup 0 < if [char] - emit negate then ;
|
||||
|
||||
: digit [char] 0 + emit ;
|
||||
|
||||
: (.) base @ /mod ?dup if recurse then digit ;
|
||||
|
||||
: ." ( "string<quote>" -- ) postpone s" postpone type ; immediate
|
||||
|
||||
: . ( x -- ) ?.- (.) space ;
|
||||
|
||||
: postpone-number ( caddr -- )
|
||||
0 0 rot count >number dup 0= if
|
||||
2drop nip
|
||||
postpone (literal) postpone (literal) postpone ,
|
||||
postpone literal postpone ,
|
||||
else
|
||||
." Undefined: " type cr abort
|
||||
then ;
|
||||
|
||||
' postpone-number postponers cell+ !
|
||||
|
||||
: / ( x y -- x/y ) /mod nip ;
|
||||
|
||||
: 0< ( n -- flag ) 0 < ;
|
||||
|
||||
: 1- ( n -- n-1 ) -1 + ;
|
||||
|
||||
: 2! ( x1 x2 addr -- ) swap over ! cell+ ! ;
|
||||
|
||||
: 2* ( n -- 2n ) dup + ;
|
||||
|
||||
\ Kernel: 2/
|
||||
|
||||
: 2@ ( addr -- x1 x2 ) dup cell+ @ swap @ ;
|
||||
|
||||
\ Kernel: 2drop
|
||||
\ Kernel: 2dup
|
||||
|
||||
\ TODO: 2over ( x1 x2 x3 x4 -- x1 x2 x3 x4 x1 x2 )
|
||||
\ 3 pick 3 pick ;
|
||||
|
||||
\ TODO: 2swap
|
||||
|
||||
\ TODO: <#
|
||||
|
||||
: abs ( n -- |n| )
|
||||
dup 0< if negate then ;
|
||||
|
||||
\ TODO: accept
|
||||
|
||||
: c, ( n -- )
|
||||
here c! 1 chars allot ;
|
||||
|
||||
: char+ ( n1 -- n2 )
|
||||
1+ ;
|
||||
|
||||
: constant create , does> @ ;
|
||||
|
||||
: decimal ( -- )
|
||||
10 base ! ;
|
||||
|
||||
: depth ( -- n )
|
||||
data_stack 100 cells + 'SP @ - /cell / 2 - ;
|
||||
|
||||
: do ( n1 n2 -- ) ( R: -- loop-sys ) ( C: -- do-sys )
|
||||
postpone 2>r here ; immediate
|
||||
|
||||
\ TODO: environment?
|
||||
\ TODO: evaluate
|
||||
\ TODO: fill
|
||||
\ TODO: fm/mod )
|
||||
\ TODO: hold
|
||||
|
||||
: j ( -- x1 ) ( R: x1 x2 x3 -- x1 x2 x3 )
|
||||
'RP @ 3 cells + @ ;
|
||||
|
||||
\ TODO: leave
|
||||
|
||||
: loop ( -- ) ( C: nest-sys -- )
|
||||
postpone 1 postpone (+loop)
|
||||
postpone 0branch ,
|
||||
postpone unloop ; immediate
|
||||
|
||||
: lshift begin ?dup while 1- swap dup + swap repeat ;
|
||||
|
||||
: rshift 1 begin over while dup + swap 1- swap repeat nip
|
||||
2>r 0 1 begin r@ while
|
||||
r> r> 2dup swap dup + 2>r and if swap over + swap then dup +
|
||||
repeat r> r> 2drop drop ;
|
||||
|
||||
: max ( x y -- max[x,y] )
|
||||
2dup > if drop else nip then ;
|
||||
|
||||
\ Kernel: min
|
||||
\ TODO: mod
|
||||
\ TODO: move
|
||||
|
||||
: (quit) ( R: ... -- )
|
||||
return_stack 100 cells + 'RP !
|
||||
0 'source-id ! tib ''source ! #tib ''#source !
|
||||
postpone [
|
||||
begin
|
||||
refill
|
||||
while
|
||||
interpret state @ 0= if ." ok" cr then
|
||||
repeat
|
||||
bye ;
|
||||
|
||||
' (quit) ' quit >body cell+ !
|
||||
|
||||
\ TODO: s>d
|
||||
\ TODO: sign
|
||||
\ TODO: sm/rem
|
||||
|
||||
: spaces ( n -- )
|
||||
0 do space loop ;
|
||||
|
||||
\ TODO: u.
|
||||
|
||||
: signbit ( -- n ) -1 1 rshift invert ;
|
||||
|
||||
: xor ( x y -- x^y ) 2dup nand >r r@ nand swap r> nand nand ;
|
||||
|
||||
: u< ( x y -- flag ) signbit xor swap signbit xor > ;
|
||||
|
||||
\ TODO: um/mod
|
||||
|
||||
: variable ( "word" -- )
|
||||
create /cell allot ;
|
||||
|
||||
: ['] \ ( C: "word" -- )
|
||||
' postpone literal ; immediate
|
||||
5
samples/Forth/hello-forth.forth
Normal file
5
samples/Forth/hello-forth.forth
Normal file
@@ -0,0 +1,5 @@
|
||||
: HELLO ( -- )
|
||||
." Hello Forth (forth)!" ;
|
||||
|
||||
HELLO
|
||||
|
||||
5
samples/Forth/hello-forth.fth
Normal file
5
samples/Forth/hello-forth.fth
Normal file
@@ -0,0 +1,5 @@
|
||||
: HELLO ( -- )
|
||||
." Hello Forth (fth)!" ;
|
||||
|
||||
HELLO
|
||||
|
||||
133
samples/Forth/tools.fth
Normal file
133
samples/Forth/tools.fth
Normal file
@@ -0,0 +1,133 @@
|
||||
\ -*- forth -*- Copyright 2004, 2013 Lars Brinkhoff
|
||||
|
||||
( Tools words. )
|
||||
|
||||
: .s ( -- )
|
||||
[char] < emit depth (.) ." > "
|
||||
'SP @ >r r@ depth 1- cells +
|
||||
begin
|
||||
dup r@ <>
|
||||
while
|
||||
dup @ .
|
||||
/cell -
|
||||
repeat r> 2drop ;
|
||||
|
||||
: ? @ . ;
|
||||
|
||||
: c? c@ . ;
|
||||
|
||||
: dump bounds do i ? /cell +loop cr ;
|
||||
|
||||
: cdump bounds do i c? loop cr ;
|
||||
|
||||
: again postpone branch , ; immediate
|
||||
|
||||
: see-find ( caddr -- end xt )
|
||||
>r here lastxt @
|
||||
begin
|
||||
dup 0= abort" Undefined word"
|
||||
dup r@ word= if r> drop exit then
|
||||
nip dup >nextxt
|
||||
again ;
|
||||
|
||||
: cabs ( char -- |char| ) dup 127 > if 256 swap - then ;
|
||||
|
||||
: xt. ( xt -- )
|
||||
( >name ) count cabs type ;
|
||||
|
||||
: xt? ( xt -- flag )
|
||||
>r lastxt @ begin
|
||||
?dup
|
||||
while
|
||||
dup r@ = if r> 2drop -1 exit then
|
||||
>nextxt
|
||||
repeat r> drop 0 ;
|
||||
|
||||
: disassemble ( x -- )
|
||||
dup xt? if
|
||||
( >name ) count
|
||||
dup 127 > if ." postpone " then
|
||||
cabs type
|
||||
else
|
||||
.
|
||||
then ;
|
||||
|
||||
: .addr dup . ;
|
||||
|
||||
: see-line ( addr -- )
|
||||
cr ." ( " .addr ." ) " @ disassemble ;
|
||||
|
||||
: see-word ( end xt -- )
|
||||
>r ." : " r@ xt.
|
||||
r@ >body do i see-line /cell +loop
|
||||
." ;" r> c@ 127 > if ." immediate" then ;
|
||||
|
||||
: see bl word see-find see-word cr ;
|
||||
|
||||
: #body bl word see-find >body - ;
|
||||
|
||||
: type-word ( end xt -- flag )
|
||||
xt. space drop 0 ;
|
||||
|
||||
: traverse-dictionary ( in.. xt -- out.. )
|
||||
\ xt execution: ( in.. end xt2 -- in.. 0 | in.. end xt2 -- out.. true )
|
||||
>r here lastxt @ begin
|
||||
?dup
|
||||
while
|
||||
r> 2dup >r >r execute
|
||||
if r> r> 2drop exit then
|
||||
r> dup >nextxt
|
||||
repeat r> 2drop ;
|
||||
|
||||
: words ( -- )
|
||||
['] type-word traverse-dictionary cr ;
|
||||
|
||||
\ ----------------------------------------------------------------------
|
||||
|
||||
( Tools extension words. )
|
||||
|
||||
\ ;code
|
||||
|
||||
\ assembler
|
||||
|
||||
\ in kernel: bye
|
||||
|
||||
\ code
|
||||
|
||||
\ cs-pick
|
||||
|
||||
\ cs-roll
|
||||
|
||||
\ editor
|
||||
|
||||
: forget ' dup >nextxt lastxt ! 'here ! reveal ;
|
||||
|
||||
\ Kernel: state
|
||||
|
||||
\ [else]
|
||||
|
||||
\ [if]
|
||||
|
||||
\ [then]
|
||||
|
||||
\ ----------------------------------------------------------------------
|
||||
|
||||
( Forth2012 tools extension words. )
|
||||
|
||||
\ TODO: n>r
|
||||
|
||||
\ TODO: nr>
|
||||
|
||||
\ TODO: synonym
|
||||
|
||||
: [undefined] bl-word find nip 0= ; immediate
|
||||
|
||||
: [defined] postpone [undefined] invert ; immediate
|
||||
|
||||
\ ----------------------------------------------------------------------
|
||||
|
||||
: @+ ( addr -- addr+/cell x ) dup cell+ swap @ ;
|
||||
|
||||
: !+ ( x addr -- addr+/cell ) tuck ! cell+ ;
|
||||
|
||||
: -rot swap >r swap r> ;
|
||||
161
samples/GLSL/SyLens.glsl
Normal file
161
samples/GLSL/SyLens.glsl
Normal file
@@ -0,0 +1,161 @@
|
||||
#version 120
|
||||
|
||||
/*
|
||||
Original Lens Distortion Algorithm from SSontech (Syntheyes)
|
||||
http://www.ssontech.com/content/lensalg.htm
|
||||
|
||||
r2 is radius squared.
|
||||
|
||||
r2 = image_aspect*image_aspect*u*u + v*v
|
||||
f = 1 + r2*(k + kcube*sqrt(r2))
|
||||
u' = f*u
|
||||
v' = f*v
|
||||
|
||||
*/
|
||||
|
||||
// Controls
|
||||
uniform float kCoeff, kCube, uShift, vShift;
|
||||
uniform float chroma_red, chroma_green, chroma_blue;
|
||||
uniform bool apply_disto;
|
||||
|
||||
// Uniform inputs
|
||||
uniform sampler2D input1;
|
||||
uniform float adsk_input1_w, adsk_input1_h, adsk_input1_aspect, adsk_input1_frameratio;
|
||||
uniform float adsk_result_w, adsk_result_h;
|
||||
|
||||
float distortion_f(float r) {
|
||||
float f = 1 + (r*r)*(kCoeff + kCube * r);
|
||||
return f;
|
||||
}
|
||||
|
||||
|
||||
float inverse_f(float r)
|
||||
{
|
||||
|
||||
// Build a lookup table on the radius, as a fixed-size table.
|
||||
// We will use a vec3 since we will store the multipled number in the Z coordinate.
|
||||
// So to recap: x will be the radius, y will be the f(x) distortion, and Z will be x * y;
|
||||
vec3[48] lut;
|
||||
|
||||
// Since out LUT is shader-global check if it's been computed alrite
|
||||
// Flame has no overflow bbox so we can safely max out at the image edge, plus some cushion
|
||||
float max_r = sqrt((adsk_input1_frameratio * adsk_input1_frameratio) + 1) + 0.1;
|
||||
float incr = max_r / 48;
|
||||
float lut_r = 0;
|
||||
float f;
|
||||
for(int i=0; i < 48; i++) {
|
||||
f = distortion_f(lut_r);
|
||||
lut[i] = vec3(lut_r, f, lut_r * f);
|
||||
lut_r += incr;
|
||||
}
|
||||
|
||||
float t;
|
||||
// Now find the nehgbouring elements
|
||||
// only iterate to 46 since we will need
|
||||
// 47 as i+1
|
||||
for(int i=0; i < 47; i++) {
|
||||
if(lut[i].z < r && lut[i+1].z > r) {
|
||||
// BAM! our value is between these two segments
|
||||
// get the T interpolant and mix
|
||||
t = (r - lut[i].z) / (lut[i+1].z - lut[i]).z;
|
||||
return mix(lut[i].y, lut[i+1].y, t );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
float aberrate(float f, float chroma)
|
||||
{
|
||||
return f + (f * chroma);
|
||||
}
|
||||
|
||||
vec3 chromaticize_and_invert(float f)
|
||||
{
|
||||
vec3 rgb_f = vec3(aberrate(f, chroma_red), aberrate(f, chroma_green), aberrate(f, chroma_blue));
|
||||
// We need to DIVIDE by F when we redistort, and x / y == x * (1 / y)
|
||||
if(apply_disto) {
|
||||
rgb_f = 1 / rgb_f;
|
||||
}
|
||||
return rgb_f;
|
||||
}
|
||||
|
||||
void main(void)
|
||||
{
|
||||
vec2 px, uv;
|
||||
float f = 1;
|
||||
float r = 1;
|
||||
|
||||
px = gl_FragCoord.xy;
|
||||
|
||||
// Make sure we are still centered
|
||||
px.x -= (adsk_result_w - adsk_input1_w) / 2;
|
||||
px.y -= (adsk_result_h - adsk_input1_h) / 2;
|
||||
|
||||
// Push the destination coordinates into the [0..1] range
|
||||
uv.x = px.x / adsk_input1_w;
|
||||
uv.y = px.y / adsk_input1_h;
|
||||
|
||||
|
||||
// And to Syntheyes UV which are [1..-1] on both X and Y
|
||||
uv.x = (uv.x *2 ) - 1;
|
||||
uv.y = (uv.y *2 ) - 1;
|
||||
|
||||
// Add UV shifts
|
||||
uv.x += uShift;
|
||||
uv.y += vShift;
|
||||
|
||||
// Make the X value the aspect value, so that the X coordinates go to [-aspect..aspect]
|
||||
uv.x = uv.x * adsk_input1_frameratio;
|
||||
|
||||
// Compute the radius
|
||||
r = sqrt(uv.x*uv.x + uv.y*uv.y);
|
||||
|
||||
// If we are redistorting, account for the oversize plate in the input, assume that
|
||||
// the input aspect is the same
|
||||
if(apply_disto) {
|
||||
r = r / (float(adsk_input1_w) / float(adsk_result_w));
|
||||
}
|
||||
|
||||
// Apply or remove disto, per channel honoring chromatic aberration
|
||||
if(apply_disto) {
|
||||
f = inverse_f(r);
|
||||
} else {
|
||||
f = distortion_f(r);
|
||||
}
|
||||
|
||||
vec2[3] rgb_uvs = vec2[](uv, uv, uv);
|
||||
|
||||
// Compute distortions per component
|
||||
vec3 rgb_f = chromaticize_and_invert(f);
|
||||
|
||||
// Apply the disto coefficients, per component
|
||||
rgb_uvs[0] = rgb_uvs[0] * rgb_f.rr;
|
||||
rgb_uvs[1] = rgb_uvs[1] * rgb_f.gg;
|
||||
rgb_uvs[2] = rgb_uvs[2] * rgb_f.bb;
|
||||
|
||||
// Convert all the UVs back to the texture space, per color component
|
||||
for(int i=0; i < 3; i++) {
|
||||
uv = rgb_uvs[i];
|
||||
|
||||
// Back from [-aspect..aspect] to [-1..1]
|
||||
uv.x = uv.x / adsk_input1_frameratio;
|
||||
|
||||
// Remove UV shifts
|
||||
uv.x -= uShift;
|
||||
uv.y -= vShift;
|
||||
|
||||
// Back to OGL UV
|
||||
uv.x = (uv.x + 1) / 2;
|
||||
uv.y = (uv.y + 1) / 2;
|
||||
|
||||
rgb_uvs[i] = uv;
|
||||
}
|
||||
|
||||
// Sample the input plate, per component
|
||||
vec4 sampled;
|
||||
sampled.r = texture2D(input1, rgb_uvs[0]).r;
|
||||
sampled.g = texture2D(input1, rgb_uvs[1]).g;
|
||||
sampled.b = texture2D(input1, rgb_uvs[2]).b;
|
||||
|
||||
// and assign to the output
|
||||
gl_FragColor.rgba = vec4(sampled.rgb, 1.0 );
|
||||
}
|
||||
630
samples/GLSL/islandScene.glsl
Normal file
630
samples/GLSL/islandScene.glsl
Normal file
@@ -0,0 +1,630 @@
|
||||
//// High quality (Some browsers may freeze or crash)
|
||||
//#define HIGHQUALITY
|
||||
|
||||
//// Medium quality (Should be fine on all systems, works on Intel HD2000 on Win7 but quite slow)
|
||||
//#define MEDIUMQUALITY
|
||||
|
||||
//// Defaults
|
||||
//#define REFLECTIONS
|
||||
#define SHADOWS
|
||||
//#define GRASS
|
||||
//#define SMALL_WAVES
|
||||
#define RAGGED_LEAVES
|
||||
//#define DETAILED_NOISE
|
||||
//#define LIGHT_AA // 2 sample SSAA
|
||||
//#define HEAVY_AA // 2x2 RG SSAA
|
||||
//#define TONEMAP
|
||||
|
||||
//// Configurations
|
||||
#ifdef MEDIUMQUALITY
|
||||
#define SHADOWS
|
||||
#define SMALL_WAVES
|
||||
#define RAGGED_LEAVES
|
||||
#define TONEMAP
|
||||
#endif
|
||||
|
||||
#ifdef HIGHQUALITY
|
||||
#define REFLECTIONS
|
||||
#define SHADOWS
|
||||
//#define GRASS
|
||||
#define SMALL_WAVES
|
||||
#define RAGGED_LEAVES
|
||||
#define DETAILED_NOISE
|
||||
#define LIGHT_AA
|
||||
#define TONEMAP
|
||||
#endif
|
||||
|
||||
// Constants
|
||||
const float eps = 1e-5;
|
||||
const float PI = 3.14159265359;
|
||||
|
||||
const vec3 sunDir = vec3(0.79057,-0.47434, 0.0);
|
||||
const vec3 skyCol = vec3(0.3, 0.5, 0.8);
|
||||
const vec3 sandCol = vec3(0.9, 0.8, 0.5);
|
||||
const vec3 treeCol = vec3(0.8, 0.65, 0.3);
|
||||
const vec3 grassCol = vec3(0.4, 0.5, 0.18);
|
||||
const vec3 leavesCol = vec3(0.3, 0.6, 0.2);
|
||||
const vec3 leavesPos = vec3(-5.1,13.4, 0.0);
|
||||
|
||||
#ifdef TONEMAP
|
||||
const vec3 sunCol = vec3(1.8, 1.7, 1.6);
|
||||
#else
|
||||
const vec3 sunCol = vec3(0.9, 0.85, 0.8);
|
||||
#endif
|
||||
|
||||
const float exposure = 1.1; // Only used when tonemapping
|
||||
|
||||
// Description : Array and textureless GLSL 2D/3D/4D simplex
|
||||
// noise functions.
|
||||
// Author : Ian McEwan, Ashima Arts.
|
||||
// License : Copyright (C) 2011 Ashima Arts. All rights reserved.
|
||||
// Distributed under the MIT License. See LICENSE file.
|
||||
// https://github.com/ashima/webgl-noise
|
||||
vec3 mod289(vec3 x) {
|
||||
return x - floor(x * (1.0 / 289.0)) * 289.0;
|
||||
}
|
||||
|
||||
vec4 mod289(vec4 x) {
|
||||
return x - floor(x * (1.0 / 289.0)) * 289.0;
|
||||
}
|
||||
|
||||
vec4 permute(vec4 x) {
|
||||
return mod289(((x*34.0)+1.0)*x);
|
||||
}
|
||||
|
||||
vec4 taylorInvSqrt(vec4 r) {
|
||||
return 1.79284291400159 - 0.85373472095314 * r;
|
||||
}
|
||||
|
||||
float snoise(vec3 v) {
|
||||
const vec2 C = vec2(1.0/6.0, 1.0/3.0) ;
|
||||
const vec4 D = vec4(0.0, 0.5, 1.0, 2.0);
|
||||
|
||||
// First corner
|
||||
vec3 i = floor(v + dot(v, C.yyy) );
|
||||
vec3 x0 = v - i + dot(i, C.xxx) ;
|
||||
|
||||
// Other corners
|
||||
vec3 g = step(x0.yzx, x0.xyz);
|
||||
vec3 l = 1.0 - g;
|
||||
vec3 i1 = min( g.xyz, l.zxy );
|
||||
vec3 i2 = max( g.xyz, l.zxy );
|
||||
|
||||
// x0 = x0 - 0.0 + 0.0 * C.xxx;
|
||||
// x1 = x0 - i1 + 1.0 * C.xxx;
|
||||
// x2 = x0 - i2 + 2.0 * C.xxx;
|
||||
// x3 = x0 - 1.0 + 3.0 * C.xxx;
|
||||
vec3 x1 = x0 - i1 + C.xxx;
|
||||
vec3 x2 = x0 - i2 + C.yyy; // 2.0*C.x = 1/3 = C.y
|
||||
vec3 x3 = x0 - D.yyy; // -1.0+3.0*C.x = -0.5 = -D.y
|
||||
|
||||
// Permutations
|
||||
i = mod289(i);
|
||||
vec4 p = permute( permute( permute(
|
||||
i.z + vec4(0.0, i1.z, i2.z, 1.0 ))
|
||||
+ i.y + vec4(0.0, i1.y, i2.y, 1.0 ))
|
||||
+ i.x + vec4(0.0, i1.x, i2.x, 1.0 ));
|
||||
|
||||
// Gradients: 7x7 points over a square, mapped onto an octahedron.
|
||||
// The ring size 17*17 = 289 is close to a multiple of 49 (49*6 = 294)
|
||||
float n_ = 0.142857142857; // 1.0/7.0
|
||||
vec3 ns = n_ * D.wyz - D.xzx;
|
||||
|
||||
vec4 j = p - 49.0 * floor(p * ns.z * ns.z); // mod(p,7*7)
|
||||
|
||||
vec4 x_ = floor(j * ns.z);
|
||||
vec4 y_ = floor(j - 7.0 * x_ ); // mod(j,N)
|
||||
|
||||
vec4 x = x_ *ns.x + ns.yyyy;
|
||||
vec4 y = y_ *ns.x + ns.yyyy;
|
||||
vec4 h = 1.0 - abs(x) - abs(y);
|
||||
|
||||
vec4 b0 = vec4( x.xy, y.xy );
|
||||
vec4 b1 = vec4( x.zw, y.zw );
|
||||
|
||||
//vec4 s0 = vec4(lessThan(b0,0.0))*2.0 - 1.0;
|
||||
//vec4 s1 = vec4(lessThan(b1,0.0))*2.0 - 1.0;
|
||||
vec4 s0 = floor(b0)*2.0 + 1.0;
|
||||
vec4 s1 = floor(b1)*2.0 + 1.0;
|
||||
vec4 sh = -step(h, vec4(0.0));
|
||||
|
||||
vec4 a0 = b0.xzyw + s0.xzyw*sh.xxyy ;
|
||||
vec4 a1 = b1.xzyw + s1.xzyw*sh.zzww ;
|
||||
|
||||
vec3 p0 = vec3(a0.xy,h.x);
|
||||
vec3 p1 = vec3(a0.zw,h.y);
|
||||
vec3 p2 = vec3(a1.xy,h.z);
|
||||
vec3 p3 = vec3(a1.zw,h.w);
|
||||
|
||||
//Normalise gradients
|
||||
vec4 norm = taylorInvSqrt(vec4(dot(p0,p0), dot(p1,p1), dot(p2, p2), dot(p3,p3)));
|
||||
p0 *= norm.x;
|
||||
p1 *= norm.y;
|
||||
p2 *= norm.z;
|
||||
p3 *= norm.w;
|
||||
|
||||
// Mix final noise value
|
||||
vec4 m = max(0.6 - vec4(dot(x0,x0), dot(x1,x1), dot(x2,x2), dot(x3,x3)), 0.0);
|
||||
m = m * m;
|
||||
return 42.0 * dot( m*m, vec4( dot(p0,x0), dot(p1,x1),
|
||||
dot(p2,x2), dot(p3,x3) ) );
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Main
|
||||
float fbm(vec3 p)
|
||||
{
|
||||
float final = snoise(p);
|
||||
p *= 1.94; final += snoise(p) * 0.5;
|
||||
#ifdef DETAILED_NOISE
|
||||
p *= 3.75; final += snoise(p) * 0.25;
|
||||
return final / 1.75;
|
||||
#else
|
||||
return final / 1.5;
|
||||
#endif
|
||||
}
|
||||
|
||||
float waterHeight(vec3 p)
|
||||
{
|
||||
float d = length(p.xz);
|
||||
float h = sin(d * 1.5 + iGlobalTime * 3.0) * 12.0 / d; // Island waves
|
||||
#ifdef SMALL_WAVES
|
||||
h += fbm(p*0.5); // Other waves
|
||||
#endif
|
||||
return h;
|
||||
}
|
||||
|
||||
vec3 bump(vec3 pos, vec3 rayDir)
|
||||
{
|
||||
float s = 2.0;
|
||||
|
||||
// Fade out waves to reduce aliasing
|
||||
float dist = dot(pos, rayDir);
|
||||
s *= dist < 2.0 ? 1.0 : 1.4142 / sqrt(dist);
|
||||
|
||||
// Calculate normal from heightmap
|
||||
vec2 e = vec2(1e-2, 0.0);
|
||||
vec3 p = vec3(pos.x, iGlobalTime*0.5, pos.z)*0.7;
|
||||
float m = waterHeight(p)*s;
|
||||
return normalize(vec3(
|
||||
waterHeight(p+e.xyy)*s-m,
|
||||
1.0,
|
||||
waterHeight(p+e.yxy)*s-m
|
||||
));
|
||||
}
|
||||
|
||||
// Ray intersections
|
||||
vec4 intersectSphere(vec3 rpos, vec3 rdir, vec3 pos, float rad)
|
||||
{
|
||||
vec3 op = pos - rpos;
|
||||
float b = dot(op, rdir);
|
||||
float det = b*b - dot(op, op) + rad*rad;
|
||||
|
||||
if (det > 0.0)
|
||||
{
|
||||
det = sqrt(det);
|
||||
float t = b - det;
|
||||
if (t > eps)
|
||||
return vec4(-normalize(rpos+rdir*t-pos), t);
|
||||
}
|
||||
|
||||
return vec4(0.0);
|
||||
}
|
||||
|
||||
vec4 intersectCylinder(vec3 rpos, vec3 rdir, vec3 pos, float rad)
|
||||
{
|
||||
vec3 op = pos - rpos;
|
||||
vec2 rdir2 = normalize(rdir.yz);
|
||||
float b = dot(op.yz, rdir2);
|
||||
float det = b*b - dot(op.yz, op.yz) + rad*rad;
|
||||
|
||||
if (det > 0.0)
|
||||
{
|
||||
det = sqrt(det);
|
||||
float t = b - det;
|
||||
if (t > eps)
|
||||
return vec4(-normalize(rpos.yz+rdir2*t-pos.yz), 0.0, t);
|
||||
t = b + det;
|
||||
if (t > eps)
|
||||
return vec4(-normalize(rpos.yz+rdir2*t-pos.yz), 0.0, t);
|
||||
}
|
||||
|
||||
return vec4(0.0);
|
||||
}
|
||||
|
||||
vec4 intersectPlane(vec3 rayPos, vec3 rayDir, vec3 n, float d)
|
||||
{
|
||||
float t = -(dot(rayPos, n) + d) / dot(rayDir, n);
|
||||
return vec4(n * sign(dot(rayDir, n)), t);
|
||||
}
|
||||
|
||||
// Helper functions
|
||||
vec3 rotate(vec3 p, float theta)
|
||||
{
|
||||
float c = cos(theta), s = sin(theta);
|
||||
return vec3(p.x * c + p.z * s, p.y,
|
||||
p.z * c - p.x * s);
|
||||
}
|
||||
|
||||
float impulse(float k, float x) // by iq
|
||||
{
|
||||
float h = k*x;
|
||||
return h * exp(1.0 - h);
|
||||
}
|
||||
|
||||
// Raymarched parts of scene
|
||||
float grass(vec3 pos)
|
||||
{
|
||||
float h = length(pos - vec3(0.0, -7.0, 0.0)) - 8.0;
|
||||
|
||||
if (h > 2.0) return h; // Optimization (Avoid noise if too far away)
|
||||
|
||||
return h + snoise(pos * 3.0) * 0.1 + pos.y * 0.9;
|
||||
}
|
||||
|
||||
float tree(vec3 pos)
|
||||
{
|
||||
pos.y -= 0.5;
|
||||
float s = sin(pos.y*0.03);
|
||||
float c = cos(pos.y*0.03);
|
||||
mat2 m = mat2(c, -s, s, c);
|
||||
vec3 p = vec3(m*pos.xy, pos.z);
|
||||
|
||||
float width = 1.0 - pos.y * 0.02 - clamp(sin(pos.y * 8.0) * 0.1, 0.05, 0.1);
|
||||
|
||||
return max(length(p.xz) - width, pos.y - 12.5);
|
||||
}
|
||||
|
||||
vec2 scene(vec3 pos)
|
||||
{
|
||||
float vtree = tree(pos);
|
||||
#ifdef GRASS
|
||||
float vgrass = grass(pos);
|
||||
float v = min(vtree, vgrass);
|
||||
#else
|
||||
float v = vtree;
|
||||
#endif
|
||||
return vec2(v, v == vtree ? 2.0 : 1.0);
|
||||
}
|
||||
|
||||
vec3 normal(vec3 pos)
|
||||
{
|
||||
vec2 eps = vec2(1e-3, 0.0);
|
||||
float h = scene(pos).x;
|
||||
return normalize(vec3(
|
||||
scene(pos-eps.xyy).x-h,
|
||||
scene(pos-eps.yxy).x-h,
|
||||
scene(pos-eps.yyx).x-h
|
||||
));
|
||||
}
|
||||
|
||||
float plantsShadow(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
// Soft shadow taken from iq
|
||||
float k = 6.0;
|
||||
float t = 0.0;
|
||||
float s = 1.0;
|
||||
for (int i = 0; i < 30; i++)
|
||||
{
|
||||
vec3 pos = rayPos+rayDir*t;
|
||||
vec2 res = scene(pos);
|
||||
if (res.x < 0.001) return 0.0;
|
||||
s = min(s, k*res.x/t);
|
||||
t += max(res.x, 0.01);
|
||||
}
|
||||
|
||||
return s*s*(3.0 - 2.0*s);
|
||||
}
|
||||
|
||||
// Ray-traced parts of scene
|
||||
vec4 intersectWater(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
float h = sin(20.5 + iGlobalTime * 2.0) * 0.03;
|
||||
float t = -(rayPos.y + 2.5 + h) / rayDir.y;
|
||||
return vec4(0.0, 1.0, 0.0, t);
|
||||
}
|
||||
|
||||
vec4 intersectSand(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
return intersectSphere(rayPos, rayDir, vec3(0.0,-24.1,0.0), 24.1);
|
||||
}
|
||||
|
||||
vec4 intersectTreasure(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
return vec4(0.0);
|
||||
}
|
||||
|
||||
vec4 intersectLeaf(vec3 rayPos, vec3 rayDir, float openAmount)
|
||||
{
|
||||
vec3 dir = normalize(vec3(0.0, 1.0, openAmount));
|
||||
float offset = 0.0;
|
||||
|
||||
vec4 res = intersectPlane(rayPos, rayDir, dir, 0.0);
|
||||
vec3 pos = rayPos+rayDir*res.w;
|
||||
#ifdef RAGGED_LEAVES
|
||||
offset = snoise(pos*0.8) * 0.3;
|
||||
#endif
|
||||
if (pos.y > 0.0 || length(pos * vec3(0.9, 2.0, 1.0)) > 4.0 - offset) res.w = 0.0;
|
||||
|
||||
vec4 res2 = intersectPlane(rayPos, rayDir, vec3(dir.xy, -dir.z), 0.0);
|
||||
pos = rayPos+rayDir*res2.w;
|
||||
#ifdef RAGGED_LEAVES
|
||||
offset = snoise(pos*0.8) * 0.3;
|
||||
#endif
|
||||
if (pos.y > 0.0 || length(pos * vec3(0.9, 2.0, 1.0)) > 4.0 - offset) res2.w = 0.0;
|
||||
|
||||
if (res2.w > 0.0 && res2.w < res.w || res.w <= 0.0)
|
||||
res = res2;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
vec4 leaves(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
float t = 1e20;
|
||||
vec3 n = vec3(0.0);
|
||||
|
||||
rayPos -= leavesPos;
|
||||
|
||||
float sway = impulse(15.0, fract(iGlobalTime / PI * 0.125));
|
||||
float upDownSway = sway * -sin(iGlobalTime) * 0.06;
|
||||
float openAmount = sway * max(-cos(iGlobalTime) * 0.4, 0.0);
|
||||
|
||||
float angleOffset = -0.1;
|
||||
for (float k = 0.0; k < 6.2; k += 0.75)
|
||||
{
|
||||
// Left-right
|
||||
float alpha = k + (k - PI) * sway * 0.015;
|
||||
vec3 p = rotate(rayPos, alpha);
|
||||
vec3 d = rotate(rayDir, alpha);
|
||||
|
||||
// Up-down
|
||||
angleOffset *= -1.0;
|
||||
float theta = -0.4 +
|
||||
angleOffset +
|
||||
cos(k) * 0.35 +
|
||||
upDownSway +
|
||||
sin(iGlobalTime+k*10.0) * 0.03 * (sway + 0.2);
|
||||
|
||||
p = rotate(p.xzy, theta).xzy;
|
||||
d = rotate(d.xzy, theta).xzy;
|
||||
|
||||
// Shift
|
||||
p -= vec3(5.4, 0.0, 0.0);
|
||||
|
||||
// Intersect individual leaf
|
||||
vec4 res = intersectLeaf(p, d, 1.0+openAmount);
|
||||
if (res.w > 0.0 && res.w < t)
|
||||
{
|
||||
t = res.w;
|
||||
n = res.xyz;
|
||||
}
|
||||
}
|
||||
|
||||
return vec4(n, t);
|
||||
}
|
||||
|
||||
// Lighting
|
||||
float shadow(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
float s = 1.0;
|
||||
|
||||
// Intersect sand
|
||||
//vec4 resSand = intersectSand(rayPos, rayDir);
|
||||
//if (resSand.w > 0.0) return 0.0;
|
||||
|
||||
// Intersect plants
|
||||
s = min(s, plantsShadow(rayPos, rayDir));
|
||||
if (s < 0.0001) return 0.0;
|
||||
|
||||
// Intersect leaves
|
||||
vec4 resLeaves = leaves(rayPos, rayDir);
|
||||
if (resLeaves.w > 0.0 && resLeaves.w < 1e7) return 0.0;
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
vec3 light(vec3 p, vec3 n)
|
||||
{
|
||||
float s = 1.0;
|
||||
|
||||
#ifdef SHADOWS
|
||||
s = shadow(p-sunDir*0.01, -sunDir);
|
||||
#endif
|
||||
|
||||
vec3 col = sunCol * min(max(dot(n, sunDir), 0.0), s);
|
||||
col += skyCol * (-n.y * 0.5 + 0.5) * 0.3;
|
||||
return col;
|
||||
}
|
||||
|
||||
vec3 lightLeaves(vec3 p, vec3 n)
|
||||
{
|
||||
float s = 1.0;
|
||||
|
||||
#ifdef SHADOWS
|
||||
s = shadow(p-sunDir*0.01, -sunDir);
|
||||
#endif
|
||||
|
||||
float ao = min(length(p - leavesPos) * 0.1, 1.0);
|
||||
|
||||
float ns = dot(n, sunDir);
|
||||
float d = sqrt(max(ns, 0.0));
|
||||
vec3 col = sunCol * min(d, s);
|
||||
col += sunCol * max(-ns, 0.0) * vec3(0.3, 0.3, 0.1) * ao;
|
||||
col += skyCol * (-n.y * 0.5 + 0.5) * 0.3 * ao;
|
||||
return col;
|
||||
}
|
||||
|
||||
vec3 sky(vec3 n)
|
||||
{
|
||||
return skyCol * (1.0 - n.y * 0.8);
|
||||
}
|
||||
|
||||
// Ray-marching
|
||||
vec4 plants(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
float t = 0.0;
|
||||
|
||||
for (int i = 0; i < 40; i++)
|
||||
{
|
||||
vec3 pos = rayPos+rayDir*t;
|
||||
vec2 res = scene(pos);
|
||||
float h = res.x;
|
||||
|
||||
if (h < 0.001)
|
||||
{
|
||||
vec3 col = res.y == 2.0 ? treeCol : grassCol;
|
||||
float uvFact = res.y == 2.0 ? 1.0 : 10.0;
|
||||
|
||||
vec3 n = normal(pos);
|
||||
vec2 uv = vec2(n.x, pos.y * 0.5) * 0.2 * uvFact;
|
||||
vec3 tex = texture2D(iChannel0, uv).rgb * 0.6 + 0.4;
|
||||
float ao = min(length(pos - leavesPos) * 0.1, 1.0);
|
||||
return vec4(col * light(pos, n) * ao * tex, t);
|
||||
}
|
||||
|
||||
t += h;
|
||||
}
|
||||
|
||||
return vec4(sky(rayDir), 1e8);
|
||||
}
|
||||
|
||||
// Final combination
|
||||
vec3 traceReflection(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
vec3 col = vec3(0.0);
|
||||
float t = 1e20;
|
||||
|
||||
// Intersect plants
|
||||
vec4 resPlants = plants(rayPos, rayDir);
|
||||
if (resPlants.w > 0.0 && resPlants.w < t)
|
||||
{
|
||||
t = resPlants.w;
|
||||
col = resPlants.xyz;
|
||||
}
|
||||
|
||||
// Intersect leaves
|
||||
vec4 resLeaves = leaves(rayPos, rayDir);
|
||||
if (resLeaves.w > 0.0 && resLeaves.w < t)
|
||||
{
|
||||
vec3 pos = rayPos + rayDir * resLeaves.w;
|
||||
vec2 uv = (pos.xz - leavesPos.xz) * 0.3;
|
||||
float tex = texture2D(iChannel0, uv).r * 0.6 + 0.5;
|
||||
|
||||
t = resLeaves.w;
|
||||
col = leavesCol * lightLeaves(pos, resLeaves.xyz) * tex;
|
||||
}
|
||||
|
||||
if (t > 1e7) return sky(rayDir);
|
||||
|
||||
return col;
|
||||
}
|
||||
|
||||
vec3 trace(vec3 rayPos, vec3 rayDir)
|
||||
{
|
||||
vec3 col = vec3(0.0);
|
||||
float t = 1e20;
|
||||
|
||||
// Intersect sand
|
||||
vec4 resSand = intersectSand(rayPos, rayDir);
|
||||
if (resSand.w > 0.0)
|
||||
{
|
||||
vec3 pos = rayPos + rayDir * resSand.w;
|
||||
t = resSand.w;
|
||||
|
||||
col = sandCol * light(pos, resSand.xyz);
|
||||
}
|
||||
|
||||
// Intersect treasure chest
|
||||
vec4 resTreasure = intersectTreasure(rayPos, rayDir);
|
||||
if (resTreasure.w > 0.0 && resTreasure.w < t)
|
||||
{
|
||||
vec3 pos = rayPos + rayDir * resTreasure.w;
|
||||
t = resTreasure.w;
|
||||
col = leavesCol * light(pos, resTreasure.xyz);
|
||||
}
|
||||
|
||||
// Intersect leaves
|
||||
vec4 resLeaves = leaves(rayPos, rayDir);
|
||||
if (resLeaves.w > 0.0 && resLeaves.w < t)
|
||||
{
|
||||
vec3 pos = rayPos + rayDir * resLeaves.w;
|
||||
vec2 uv = (pos.xz - leavesPos.xz) * 0.3;
|
||||
float tex = texture2D(iChannel0, uv).r * 0.6 + 0.5;
|
||||
|
||||
t = resLeaves.w;
|
||||
col = leavesCol * lightLeaves(pos, resLeaves.xyz) * tex;
|
||||
}
|
||||
|
||||
// Intersect plants
|
||||
vec4 resPlants = plants(rayPos, rayDir);
|
||||
if (resPlants.w > 0.0 && resPlants.w < t)
|
||||
{
|
||||
t = resPlants.w;
|
||||
col = resPlants.xyz;
|
||||
}
|
||||
|
||||
// Intersect water
|
||||
vec4 resWater = intersectWater(rayPos, rayDir);
|
||||
if (resWater.w > 0.0 && resWater.w < t)
|
||||
{
|
||||
vec3 pos = rayPos + rayDir * resWater.w;
|
||||
float dist = t - resWater.w;
|
||||
vec3 n = bump(pos, rayDir);
|
||||
|
||||
float ct = -min(dot(n,rayDir), 0.0);
|
||||
float fresnel = 0.9 - 0.9 * pow(1.0 - ct, 5.0);
|
||||
|
||||
vec3 trans = col * exp(-dist * vec3(1.0, 0.7, 0.4) * 3.0);
|
||||
vec3 reflDir = normalize(reflect(rayDir, n));
|
||||
vec3 refl = sky(reflDir);
|
||||
|
||||
#ifdef REFLECTIONS
|
||||
if (dot(pos, rayDir) < -2.0)
|
||||
refl = traceReflection(pos, reflDir).rgb;
|
||||
#endif
|
||||
|
||||
t = resWater.t;
|
||||
col = mix(refl, trans, fresnel);
|
||||
}
|
||||
|
||||
if (t > 1e7) return sky(rayDir);
|
||||
|
||||
return col;
|
||||
}
|
||||
|
||||
// Ray-generation
|
||||
vec3 camera(vec2 px)
|
||||
{
|
||||
vec2 rd = (px / iResolution.yy - vec2(iResolution.x/iResolution.y*0.5-0.5, 0.0)) * 2.0 - 1.0;
|
||||
float t = sin(iGlobalTime * 0.1) * 0.2;
|
||||
vec3 rayDir = normalize(vec3(rd.x, rd.y, 1.0));
|
||||
vec3 rayPos = vec3(0.0, 3.0, -18.0);
|
||||
return trace(rayPos, rayDir);
|
||||
}
|
||||
|
||||
void main(void)
|
||||
{
|
||||
#ifdef HEAVY_AA
|
||||
vec3 col = camera(gl_FragCoord.xy+vec2(0.0,0.5))*0.25;
|
||||
col += camera(gl_FragCoord.xy+vec2(0.25,0.0))*0.25;
|
||||
col += camera(gl_FragCoord.xy+vec2(0.5,0.75))*0.25;
|
||||
col += camera(gl_FragCoord.xy+vec2(0.75,0.25))*0.25;
|
||||
#else
|
||||
vec3 col = camera(gl_FragCoord.xy);
|
||||
#ifdef LIGHT_AA
|
||||
col = col * 0.5 + camera(gl_FragCoord.xy+vec2(0.5,0.5))*0.5;
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifdef TONEMAP
|
||||
// Optimized Haarm-Peter Duiker’s curve
|
||||
vec3 x = max(vec3(0.0),col*exposure-0.004);
|
||||
col = (x*(6.2*x+.5))/(x*(6.2*x+1.7)+0.06);
|
||||
#else
|
||||
col = pow(col, vec3(0.4545));
|
||||
#endif
|
||||
|
||||
gl_FragColor = vec4(col, 1.0);
|
||||
}
|
||||
68
samples/GLSL/shader.fp
Normal file
68
samples/GLSL/shader.fp
Normal file
@@ -0,0 +1,68 @@
|
||||
/*
|
||||
* Copyright (C) 2010 Josh A. Beam
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions
|
||||
* are met:
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
||||
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
const int NUM_LIGHTS = 3;
|
||||
const vec3 AMBIENT = vec3(0.1, 0.1, 0.1);
|
||||
const float MAX_DIST = 2.5;
|
||||
const float MAX_DIST_SQUARED = MAX_DIST * MAX_DIST;
|
||||
|
||||
uniform vec3 lightColor[NUM_LIGHTS];
|
||||
|
||||
varying vec3 fragmentNormal;
|
||||
varying vec3 cameraVector;
|
||||
varying vec3 lightVector[NUM_LIGHTS];
|
||||
|
||||
void
|
||||
main()
|
||||
{
|
||||
// initialize diffuse/specular lighting
|
||||
vec3 diffuse = vec3(0.0, 0.0, 0.0);
|
||||
vec3 specular = vec3(0.0, 0.0, 0.0);
|
||||
|
||||
// normalize the fragment normal and camera direction
|
||||
vec3 normal = normalize(fragmentNormal);
|
||||
vec3 cameraDir = normalize(cameraVector);
|
||||
|
||||
// loop through each light
|
||||
for(int i = 0; i < NUM_LIGHTS; ++i) {
|
||||
// calculate distance between 0.0 and 1.0
|
||||
float dist = min(dot(lightVector[i], lightVector[i]), MAX_DIST_SQUARED) / MAX_DIST_SQUARED;
|
||||
float distFactor = 1.0 - dist;
|
||||
|
||||
// diffuse
|
||||
vec3 lightDir = normalize(lightVector[i]);
|
||||
float diffuseDot = dot(normal, lightDir);
|
||||
diffuse += lightColor[i] * clamp(diffuseDot, 0.0, 1.0) * distFactor;
|
||||
|
||||
// specular
|
||||
vec3 halfAngle = normalize(cameraDir + lightDir);
|
||||
vec3 specularColor = min(lightColor[i] + 0.5, 1.0);
|
||||
float specularDot = dot(normal, halfAngle);
|
||||
specular += specularColor * pow(clamp(specularDot, 0.0, 1.0), 16.0) * distFactor;
|
||||
}
|
||||
|
||||
vec4 sample = vec4(1.0, 1.0, 1.0, 1.0);
|
||||
gl_FragColor = vec4(clamp(sample.rgb * (diffuse + AMBIENT) + specular, 0.0, 1.0), sample.a);
|
||||
}
|
||||
6
samples/Handlebars/basic.handlebars
Normal file
6
samples/Handlebars/basic.handlebars
Normal file
@@ -0,0 +1,6 @@
|
||||
<div class="entry">
|
||||
<h1>{{title}}</h1>
|
||||
<div class="body">
|
||||
{{body}}
|
||||
</div>
|
||||
</div>
|
||||
11
samples/Handlebars/each.hbs
Normal file
11
samples/Handlebars/each.hbs
Normal file
@@ -0,0 +1,11 @@
|
||||
<div class="post">
|
||||
<h1>By {{fullName author}}</h1>
|
||||
<div class="body">{{body}}</div>
|
||||
|
||||
<h1>Comments</h1>
|
||||
|
||||
{{#each comments}}
|
||||
<h2>By {{fullName author}}</h2>
|
||||
<div class="body">{{body}}</div>
|
||||
{{/each}}
|
||||
</div>
|
||||
10
samples/INI/filenames/.editorconfig
Normal file
10
samples/INI/filenames/.editorconfig
Normal file
@@ -0,0 +1,10 @@
|
||||
; editorconfig.org
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
42
samples/Idris/Chars.idr
Normal file
42
samples/Idris/Chars.idr
Normal file
@@ -0,0 +1,42 @@
|
||||
module Prelude.Char
|
||||
|
||||
import Builtins
|
||||
|
||||
isUpper : Char -> Bool
|
||||
isUpper x = x >= 'A' && x <= 'Z'
|
||||
|
||||
isLower : Char -> Bool
|
||||
isLower x = x >= 'a' && x <= 'z'
|
||||
|
||||
isAlpha : Char -> Bool
|
||||
isAlpha x = isUpper x || isLower x
|
||||
|
||||
isDigit : Char -> Bool
|
||||
isDigit x = (x >= '0' && x <= '9')
|
||||
|
||||
isAlphaNum : Char -> Bool
|
||||
isAlphaNum x = isDigit x || isAlpha x
|
||||
|
||||
isSpace : Char -> Bool
|
||||
isSpace x = x == ' ' || x == '\t' || x == '\r' ||
|
||||
x == '\n' || x == '\f' || x == '\v' ||
|
||||
x == '\xa0'
|
||||
|
||||
isNL : Char -> Bool
|
||||
isNL x = x == '\r' || x == '\n'
|
||||
|
||||
toUpper : Char -> Char
|
||||
toUpper x = if (isLower x)
|
||||
then (prim__intToChar (prim__charToInt x - 32))
|
||||
else x
|
||||
|
||||
toLower : Char -> Char
|
||||
toLower x = if (isUpper x)
|
||||
then (prim__intToChar (prim__charToInt x + 32))
|
||||
else x
|
||||
|
||||
isHexDigit : Char -> Bool
|
||||
isHexDigit x = elem (toUpper x) hexChars where
|
||||
hexChars : List Char
|
||||
hexChars = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
|
||||
'A', 'B', 'C', 'D', 'E', 'F']
|
||||
267
samples/JSON/composer.lock
generated
Normal file
267
samples/JSON/composer.lock
generated
Normal file
@@ -0,0 +1,267 @@
|
||||
{
|
||||
"_readme": [
|
||||
"This file locks the dependencies of your project to a known state",
|
||||
"Read more about it at http://getcomposer.org/doc/01-basic-usage.md#composer-lock-the-lock-file"
|
||||
],
|
||||
"hash": "d8ff8fcb71824f5199f3499bf71862f1",
|
||||
"packages": [
|
||||
{
|
||||
"name": "arbit/system-process",
|
||||
"version": "1.0",
|
||||
"source": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/Arbitracker/system-process.git",
|
||||
"reference": "1.0"
|
||||
},
|
||||
"dist": {
|
||||
"type": "zip",
|
||||
"url": "https://api.github.com/repos/Arbitracker/system-process/zipball/1.0",
|
||||
"reference": "1.0",
|
||||
"shasum": ""
|
||||
},
|
||||
"type": "library",
|
||||
"autoload": {
|
||||
"psr-0": {
|
||||
"SystemProcess": "src/main/php/"
|
||||
}
|
||||
},
|
||||
"notification-url": "http://packagist.org/downloads/",
|
||||
"description": "System process execution library",
|
||||
"time": "2013-03-31 12:42:56"
|
||||
},
|
||||
{
|
||||
"name": "pdepend/staticReflection",
|
||||
"version": "0.1",
|
||||
"source": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/manuelpichler/staticReflection.git",
|
||||
"reference": "origin/master"
|
||||
},
|
||||
"type": "library"
|
||||
},
|
||||
{
|
||||
"name": "qafoo/rmf",
|
||||
"version": "dev-master",
|
||||
"source": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/Qafoo/REST-Micro-Framework.git",
|
||||
"reference": "5f43983f15a8aa12be42ad6068675d4008bfb9ed"
|
||||
},
|
||||
"dist": {
|
||||
"type": "zip",
|
||||
"url": "https://api.github.com/repos/Qafoo/REST-Micro-Framework/zipball/5f43983f15a8aa12be42ad6068675d4008bfb9ed",
|
||||
"reference": "5f43983f15a8aa12be42ad6068675d4008bfb9ed",
|
||||
"shasum": ""
|
||||
},
|
||||
"type": "library",
|
||||
"autoload": {
|
||||
"psr-0": {
|
||||
"Qafoo\\RMF": "src/main/"
|
||||
}
|
||||
},
|
||||
"description": "Very simple VC framework which makes it easy to build HTTP applications / REST webservices",
|
||||
"support": {
|
||||
"source": "https://github.com/Qafoo/REST-Micro-Framework/tree/master",
|
||||
"issues": "https://github.com/Qafoo/REST-Micro-Framework/issues"
|
||||
},
|
||||
"time": "2012-12-07 13:33:01"
|
||||
},
|
||||
{
|
||||
"name": "twig/twig",
|
||||
"version": "1.6.0",
|
||||
"source": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/fabpot/Twig.git",
|
||||
"reference": "v1.6.0"
|
||||
},
|
||||
"dist": {
|
||||
"type": "zip",
|
||||
"url": "https://github.com/fabpot/Twig/zipball/v1.6.0",
|
||||
"reference": "v1.6.0",
|
||||
"shasum": ""
|
||||
},
|
||||
"require": {
|
||||
"php": ">=5.2.4"
|
||||
},
|
||||
"type": "library",
|
||||
"autoload": {
|
||||
"psr-0": {
|
||||
"Twig_": "lib/"
|
||||
}
|
||||
},
|
||||
"license": [
|
||||
"BSD"
|
||||
],
|
||||
"authors": [
|
||||
{
|
||||
"name": "Fabien Potencier",
|
||||
"email": "fabien@symfony.com"
|
||||
},
|
||||
{
|
||||
"name": "Armin Ronacher",
|
||||
"email": "armin.ronacher@active-4.com"
|
||||
}
|
||||
],
|
||||
"description": "Twig, the flexible, fast, and secure template language for PHP",
|
||||
"homepage": "http://twig.sensiolabs.org",
|
||||
"keywords": [
|
||||
"templating"
|
||||
],
|
||||
"time": "2012-02-03 23:34:52"
|
||||
},
|
||||
{
|
||||
"name": "twitter/bootstrap",
|
||||
"version": "0.1",
|
||||
"source": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/twitter/bootstrap/",
|
||||
"reference": "origin/master"
|
||||
},
|
||||
"type": "library"
|
||||
},
|
||||
{
|
||||
"name": "zetacomponents/base",
|
||||
"version": "1.8",
|
||||
"source": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zetacomponents/Base.git",
|
||||
"reference": "1.8"
|
||||
},
|
||||
"dist": {
|
||||
"type": "zip",
|
||||
"url": "https://github.com/zetacomponents/Base/zipball/1.8",
|
||||
"reference": "1.8",
|
||||
"shasum": ""
|
||||
},
|
||||
"type": "library",
|
||||
"autoload": {
|
||||
"classmap": [
|
||||
"src"
|
||||
]
|
||||
},
|
||||
"license": [
|
||||
"apache2"
|
||||
],
|
||||
"authors": [
|
||||
{
|
||||
"name": "Sergey Alexeev"
|
||||
},
|
||||
{
|
||||
"name": "Sebastian Bergmann"
|
||||
},
|
||||
{
|
||||
"name": "Jan Borsodi"
|
||||
},
|
||||
{
|
||||
"name": "Raymond Bosman"
|
||||
},
|
||||
{
|
||||
"name": "Frederik Holljen"
|
||||
},
|
||||
{
|
||||
"name": "Kore Nordmann"
|
||||
},
|
||||
{
|
||||
"name": "Derick Rethans"
|
||||
},
|
||||
{
|
||||
"name": "Vadym Savchuk"
|
||||
},
|
||||
{
|
||||
"name": "Tobias Schlitt"
|
||||
},
|
||||
{
|
||||
"name": "Alexandru Stanoi"
|
||||
}
|
||||
],
|
||||
"description": "The Base package provides the basic infrastructure that all packages rely on. Therefore every component relies on this package.",
|
||||
"homepage": "https://github.com/zetacomponents",
|
||||
"time": "2009-12-21 04:14:16"
|
||||
},
|
||||
{
|
||||
"name": "zetacomponents/graph",
|
||||
"version": "1.5",
|
||||
"source": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zetacomponents/Graph.git",
|
||||
"reference": "1.5"
|
||||
},
|
||||
"dist": {
|
||||
"type": "zip",
|
||||
"url": "https://github.com/zetacomponents/Graph/zipball/1.5",
|
||||
"reference": "1.5",
|
||||
"shasum": ""
|
||||
},
|
||||
"type": "library",
|
||||
"autoload": {
|
||||
"classmap": [
|
||||
"src"
|
||||
]
|
||||
},
|
||||
"license": [
|
||||
"apache2"
|
||||
],
|
||||
"authors": [
|
||||
{
|
||||
"name": "Sergey Alexeev"
|
||||
},
|
||||
{
|
||||
"name": "Sebastian Bergmann"
|
||||
},
|
||||
{
|
||||
"name": "Jan Borsodi"
|
||||
},
|
||||
{
|
||||
"name": "Raymond Bosman"
|
||||
},
|
||||
{
|
||||
"name": "Frederik Holljen"
|
||||
},
|
||||
{
|
||||
"name": "Kore Nordmann"
|
||||
},
|
||||
{
|
||||
"name": "Derick Rethans"
|
||||
},
|
||||
{
|
||||
"name": "Vadym Savchuk"
|
||||
},
|
||||
{
|
||||
"name": "Tobias Schlitt"
|
||||
},
|
||||
{
|
||||
"name": "Alexandru Stanoi"
|
||||
},
|
||||
{
|
||||
"name": "Lars Jankowski"
|
||||
},
|
||||
{
|
||||
"name": "Elger Thiele"
|
||||
},
|
||||
{
|
||||
"name": "Michael Maclean"
|
||||
}
|
||||
],
|
||||
"description": "A component for creating pie charts, line graphs and other kinds of diagrams.",
|
||||
"homepage": "https://github.com/zetacomponents",
|
||||
"time": "2009-12-21 04:26:17"
|
||||
}
|
||||
],
|
||||
"packages-dev": [
|
||||
|
||||
],
|
||||
"aliases": [
|
||||
|
||||
],
|
||||
"minimum-stability": "stable",
|
||||
"stability-flags": {
|
||||
"qafoo/rmf": 20,
|
||||
"arbit/system-process": 0
|
||||
},
|
||||
"platform": [
|
||||
|
||||
],
|
||||
"platform-dev": [
|
||||
|
||||
]
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user