Compare commits

...

230 Commits

Author SHA1 Message Date
Arfon Smith
8c8434ed64 Merge pull request #1335 from github/1318-local
1318 local
2014-07-01 11:48:24 -05:00
Arfon Smith
9281bd043a Version 2014-07-01 11:19:05 -05:00
Arfon Smith
6771f7c272 Merge branch 'master' into 1318-local 2014-07-01 11:12:44 -05:00
Vicent Marti
df09a746a0 b3 2014-06-27 16:57:58 +02:00
Vicent Marti
d9be472ccb Skip submodules when diffing 2014-06-27 16:41:23 +02:00
Vicent Marti
32828a9af5 b2 2014-06-27 13:51:56 +02:00
Vicent Marti
d206131df0 Hardcode OIDs for test 2014-06-27 13:51:37 +02:00
Vicent Marti
65eaf98d0b docs 2014-06-26 21:26:26 +02:00
Vicent Marti
12429b90fe Bring back missing test 2014-06-26 21:24:30 +02:00
Vicent Marti
621042e639 Remove whitespace 2014-06-26 18:42:43 +02:00
Arfon Smith
526244be11 Samples 2014-06-26 17:38:39 +01:00
Arfon Smith
bc53d0b55e Merge pull request #1311 from maximusvladimir/master
Added 3 character glsl extensions.
2014-06-26 17:37:42 +01:00
Vicent Marti
907d3c5a36 b1 2014-06-26 18:17:51 +02:00
Max K.
898f1e215e Added sample files for glsl. 2014-06-26 09:25:40 -05:00
Vicent Marti
324ac83489 Use the new Rugged release 2014-06-26 14:12:00 +02:00
Vicent Marti
00a873dcc7 Bump 3.0.0b0 2014-06-26 13:03:41 +02:00
Vicent Marti
bc34345a56 Fix the linguist binary 2014-06-26 13:03:30 +02:00
Vicent Marti
659d27cae5 DOCS 2014-06-26 12:54:08 +02:00
Vicent Marti
29072d6eae Fix travis build 2014-06-26 12:27:02 +02:00
Vicent Marti
1fd59361b5 Proper incremental diffing 2014-06-25 20:26:44 +02:00
Vicent Marti
5896bb8fa3 Missing file. Duh. 2014-06-24 17:52:43 +02:00
Vicent Marti
ea1fc90cf5 Handle nil blob names 2014-06-24 17:43:01 +02:00
Vicent Marti
463f48f04f Mode must always be a String 2014-06-24 17:41:16 +02:00
Vicent Marti
cd58a30c7c Only cache strings, thanks 2014-06-24 17:41:16 +02:00
Vicent Marti
c4260ae681 Use Rugged when computing Repository stats 2014-06-24 17:41:16 +02:00
Arfon Smith
d40b4a33de Sorted samples 2014-06-24 10:43:20 +01:00
Arfon Smith
87498679bd Merge pull request #1313 from pchaigno/samples-order
Set a sort order for the samples.json file's content
2014-06-24 10:42:25 +01:00
Paul Chaignon
f4e254202b Set a sort order for the samples.json file's content 2014-06-24 10:50:03 +02:00
Arfon Smith
e91d225e7d Merge pull request #1305 from neersighted/patch-1
Also ignore extern(al)
2014-06-23 23:12:34 +01:00
neersighted
b90d940aef Add tests for extern(al) being vendored 2014-06-23 14:50:31 -07:00
Max K.
b83a364b0e Added 3 character glsl extensions. 2014-06-23 15:46:28 -05:00
Vicent Marti
dbff196b08 Merge pull request #1309 from github/gameover-0x10c
Gameover 0x10c
2014-06-23 17:54:53 +02:00
Joshua Peek
5b7316fb2a Remove DCPU-16 ASM language 2014-06-23 10:48:09 -05:00
Arfon Smith
fa4dfe39ba Merge pull request #1306 from github/1100-local
1100 local
2014-06-23 10:33:01 +01:00
Arfon Smith
89999e60bf Merge branch 'master' into 1100-local
Conflicts:
	lib/linguist/languages.yml
2014-06-23 10:28:41 +01:00
neersighted
4819fb12a3 Also ignore extern(al)
...because some of us don't like 'vendor'
2014-06-22 17:18:10 -07:00
Arfon Smith
27a4eeb206 Samples update 2014-06-22 16:19:04 +01:00
Arfon Smith
bacf4d5780 Merge pull request #1303 from geekflyer/master
Add .xsjs and .xsjslib as JavaScript extension (SAP HANA XS)
2014-06-22 16:18:04 +01:00
Christian Theilemann
f92fed60f8 Add .xsjs and .xsjslib as JavaScript file extension
.xsjs and .xsjslib is used to denote server-side JavaScript files in SAP
HANA XS
2014-06-22 15:24:21 +02:00
Arfon Smith
5e797b548c Merge pull request #1273 from k2b6s9j/maven
Add Mavenfile and Jarfile as Ruby files.
2014-06-22 10:17:54 +01:00
Arfon Smith
700e2f1b2b Merge pull request #1299 from pchaigno/idl-lexer
Lexer for IDL
2014-06-21 17:45:16 +01:00
Paul Chaignon
861656978b Lexer for IDL 2014-06-21 17:31:49 +02:00
Arfon Smith
9c05bdac85 Samples 2014-06-21 13:19:38 +01:00
Arfon Smith
bd34c16c8f Merge pull request #1297 from github/map-pryrc-to-ruby
Add .pryrc support
2014-06-21 13:13:10 +01:00
Kevin Sawicki
13109bb9b8 Sort filenames 2014-06-20 11:28:35 -07:00
Kevin Sawicki
84f3b3720b Move .pryrc to filenames 2014-06-20 11:27:28 -07:00
Kevin Sawicki
858a66ccc8 Add .pryrc support 2014-06-20 11:18:08 -07:00
Arfon Smith
abb05eace6 Merge pull request #1295 from github/820-local
820 local
2014-06-20 12:42:21 +01:00
Arfon Smith
62bd96a778 Merge branch 'master' into 820-local
Conflicts:
	lib/linguist/samples.json
2014-06-20 12:37:20 +01:00
Arfon Smith
8cb736adfa Merge pull request #1294 from github/675-local
675 local
2014-06-20 12:29:20 +01:00
Arfon Smith
0758c05186 Merge branch 'master' into 675-local
Conflicts:
	.gitignore
	lib/linguist/languages.yml
	lib/linguist/samples.json
2014-06-20 12:22:58 +01:00
Arfon Smith
62bc6f0457 Merge pull request #1293 from github/1232-local
1232 local
2014-06-20 12:17:09 +01:00
Arfon Smith
ea7e894139 Explicit lexer 2014-06-20 12:13:04 +01:00
Arfon Smith
21f0ac99e6 Merge branch 'master' into 1232-local
Conflicts:
	lib/linguist/samples.json
2014-06-20 12:11:43 +01:00
Arfon Smith
b251866a29 Merge pull request #1292 from github/1261-local
1261 local
2014-06-20 11:02:17 +01:00
Arfon Smith
bf3db20a9d Samples 2014-06-20 10:58:44 +01:00
Arfon Smith
bd55147847 Merge branch 'master' into 1261-local 2014-06-20 10:58:21 +01:00
Arfon Smith
f4d64af39b Merge pull request #1291 from github/997-local
997 local
2014-06-20 10:49:46 +01:00
Arfon Smith
b7bda34645 Samples update 2014-06-20 10:46:38 +01:00
Arfon Smith
b13dea6df0 Merge branch 'master' into 997-local 2014-06-20 10:46:04 +01:00
Arfon Smith
28a64c9318 Samples 2014-06-20 10:27:47 +01:00
Arfon Smith
74be618fff Merge pull request #1290 from github/1258-local
1258 local
2014-06-20 10:26:59 +01:00
Arfon Smith
8bbe10bf50 Reordering 2014-06-20 10:22:14 +01:00
Arfon Smith
d275911624 Merge pull request #1289 from github/1132-local
1132 local
2014-06-20 10:17:44 +01:00
Arfon Smith
c26382301c Merge branch 'master' into 1132-local
Conflicts:
	lib/linguist/samples.json
2014-06-20 10:12:04 +01:00
Arfon Smith
e12bc07041 Samples 2014-06-19 16:03:05 +01:00
Arfon Smith
20416369ac Merge pull request #1282 from github/map-nuspec-to-xml
Add .nuspec extension to XML
2014-06-19 16:02:36 +01:00
Kevin Sawicki
2be91e9b2e Add .nuspec sample 2014-06-19 07:53:52 -07:00
Arfon Smith
dc1b8d9e80 Samples 2014-06-19 15:03:30 +01:00
Arfon Smith
bf0a814514 Merge pull request #1283 from LyricsMaster/add-xojo
Add Xojo language support
2014-06-19 15:03:04 +01:00
Kenichi Maehashi
b14267d40f add more samples for Xojo language 2014-06-19 22:59:12 +09:00
Arfon Smith
195a4115d8 Samples 2014-06-19 14:50:41 +01:00
Arfon Smith
e1da8eb841 Merge pull request #1280 from diekmann/patch-1
Added language Isabelle
2014-06-19 14:50:14 +01:00
Kenichi Maehashi
14738f037f remove non-source file extensions of Xojo language 2014-06-19 07:27:54 +09:00
Kenichi Maehashi
a437943516 Add Xojo language and example 2014-06-19 01:29:54 +09:00
Kevin Sawicki
900ee57de8 Add .nuspec extension to XML 2014-06-18 08:58:18 -07:00
diekmann
947f4e1c57 alphabetic sorting 2014-06-18 09:34:26 +02:00
diekmann
d9f17a65dd Isabelle language - fixed lexer and added sample
Also, Isabelle is very polular in academia.
See for example http://scholar.google.de/scholar?q=isabelle%2FHOL

In around 40 days, the seL4 microkernel [1] with its
Isabelle proofs is (probably) released on github [2].
[1] http://sel4.systems/
[2] https://lists.cam.ac.uk/mailman/htdig/cl-isabelle-users/2014-June/msg00011.html
2014-06-18 09:16:31 +02:00
Arfon Smith
f71def19ae Merge pull request #1279 from github/jdennes-vendored-octicon-styles
Vendor Octicon styles
2014-06-17 21:42:11 -05:00
Charles Strahan
e452e85cae add nix support 2014-06-17 19:32:22 -04:00
diekmann
5059fe90b0 Added language Isabelle
Isabelle is a generic proof assistant. It is comparables (to some degree) to Coq.

Used in
* diekmann/topoS
* 3of8/sturm
* formare/auctions
* larsrh/hol-falso
* dpthayer/MetaProof

Hello Wolrd example (file must be named HelloWorld.thy):
  theory HelloWorld
  imports Main
  begin
  (*put content here*)
  end
2014-06-17 21:27:03 +02:00
Arfon Smith
b90da731d6 Samples 2014-06-16 16:28:21 -05:00
Arfon Smith
d2012519ba Merge pull request #1268 from metopa/patch-1
Support of the .inc extension in Assembly group.
2014-06-16 16:27:35 -05:00
Gusakov Nikita
1b7f26091c Added generated rule for Zephir language 2014-06-16 19:20:43 +04:00
James Dennes
548e4f1845 Add Octicons entries to vendor.yml 2014-06-15 16:22:18 +02:00
James Dennes
625bed8fca Add failing test for vendored Octicons 2014-06-15 16:19:59 +02:00
Viacheslav Kroilov
db15367775 Rename X86_64.INC to X86_64.inc 2014-06-14 20:13:38 +04:00
Viacheslav Kroilov
309d14a955 Rename SYSTEM.INC to SYSTEM.inc 2014-06-14 20:13:19 +04:00
Viacheslav Kroilov
5ff16e1195 Rename FASM.ASM to FASM.asm 2014-06-14 20:12:50 +04:00
Viacheslav Kroilov
cf43aa9111 Rename ASSEMBLE.INC to ASSEMBLE.inc 2014-06-14 20:11:58 +04:00
metopa
138c1e6024 Added examples for Assembly
From FASM source under BSD
2014-06-14 19:21:02 +04:00
Kepler Sticka-Jones
382870a881 Add Mavenfile and Jarfile as Ruby files. 2014-06-12 13:11:56 -06:00
Andy Lindeman
31921838cd Merge pull request #1272 from github/cut-release-v2.12.0
Bumps to 2.12.0
2014-06-11 17:02:17 -04:00
Andy Lindeman
a707587182 Bumps to 2.12.0 2014-06-11 14:00:46 -04:00
Andy Lindeman
bc482af999 Merge pull request #1269 from github/pygments-bump
Bumps pygments.rb to 0.6.0
2014-06-11 13:58:37 -04:00
Andy Lindeman
6818744dae Merge remote-tracking branch 'origin/master' into pygments-bump 2014-06-11 13:56:53 -04:00
Andy Lindeman
607185ac61 Be explicit about lexer 2014-06-11 13:56:40 -04:00
Brian Lopez
81b7a412c3 Merge pull request #1270 from github/cut-release-v2.11.5
Bump version for 2.11.5 release
2014-06-10 15:28:27 -07:00
Brian Lopez
09b9a8b441 bump version for 2.11.5 release 2014-06-10 16:00:08 -05:00
Andy Lindeman
85479cc2de Swift has a lexer now 2014-06-10 15:54:56 -04:00
Andy Lindeman
3ad4eb2b59 Adds supports for Slim 2014-06-10 15:54:56 -04:00
Andy Lindeman
878fe95ec3 Upgrades to pygments.rb 0.6.0 2014-06-10 15:54:55 -04:00
Viacheslav Kroilov
3d23d1be69 Added .inc extension in Assembly group.
It`s include file for assembler source that helps to structure code. Usually contains normal assembly source.
2014-06-10 21:25:27 +04:00
Arfon Smith
701e720ab8 Merge pull request #1259 from github/bump-charlock-again
Bump charlock to 0.7.3
2014-06-09 11:10:30 -05:00
Arfon Smith
e709ce7d56 Samples 2014-06-09 06:27:26 -05:00
Arfon Smith
32c89a5405 Merge pull request #1260 from dalehenrich/patch-1
.ston extension for "Smalltalk Object Notation"
2014-06-09 06:26:34 -05:00
Dale Henrichs
1735982a73 Merge branch 'ston_test' of github.com:dalehenrich/linguist into ston_test 2014-06-08 21:21:56 -07:00
Dale Henrichs
625e0aa1af add sample files 2014-06-08 21:21:29 -07:00
Dale Henrichs
00e1a3f8fd ahhh, caps are sorted before lower case ... 2014-06-08 20:56:09 -07:00
Dale Henrichs
539256b08e send to travis 2014-06-08 20:49:45 -07:00
Dale Henrichs
ff791f5a39 "looks like I should have used JSON lexer
- let's see what travis has to say
2014-06-08 20:41:41 -07:00
William Woodruff
de4d48b0fe added two notebook samples 2014-06-08 23:11:19 -04:00
William Woodruff
b5c49f6d1c added a sample package 2014-06-08 22:57:25 -04:00
William Woodruff
03cb7d6ffb Merge remote-tracking branch 'upstream/master' 2014-06-08 22:54:13 -04:00
Arfon Smith
304fc344a1 Merge pull request #1257 from simonwistow/master
Add support for the Varnish VCL edge scripting language
2014-06-08 21:46:32 -05:00
William Woodruff
33c42638e9 added two more common mathematica suffixes 2014-06-07 21:21:47 -04:00
Dale Henrichs
9d940755e7 .ston extension for "Smalltalk Object Notation"
See https://github.com/svenvc/ston
2014-06-07 14:49:39 -07:00
Brian Lopez
bc04232f87 add the fixture 2014-06-07 15:32:29 -05:00
Brian Lopez
e17ebec098 Bump charlock to 0.7.3
This version includes a fix for the encoding lookup table for some
encoding aliases in the ICU detection API
2014-06-07 15:25:44 -05:00
Niklas Rosenstein
a7cba23526 added .pyp suffix and an example source file. closes issue #1 2014-06-07 01:29:30 +02:00
Simon Wistow
7cd23036a7 Add support for the Varnish VCL edge scripting language 2014-06-06 12:59:50 -07:00
Arfon Smith
44c5413abf Merge pull request #1256 from github/1042-update
1042 update
2014-06-06 12:38:45 -05:00
Arfon Smith
50ab58e91f Merge commit 'refs/pull/1042/head' of github.com:github/linguist into 1042
Conflicts:
	lib/linguist/vendor.yml
2014-06-06 12:32:30 -05:00
Arfon Smith
1fd0732390 Merge branch 'master' into 1042
Conflicts:
	lib/linguist/vendor.yml
2014-06-06 12:31:56 -05:00
Arfon Smith
ed1b9ee899 Merge pull request #1255 from github/680-update
680 update
2014-06-06 12:12:09 -05:00
Arfon Smith
d3c04d6310 nesC 2014-06-06 12:08:38 -05:00
Julian Gehring
f66ffe305f Change R package ignores to absolute paths 2014-06-06 08:57:11 -07:00
Arfon Smith
2a9ff0083c Merge branch 'master' into 814
Conflicts:
	lib/linguist/languages.yml
2014-06-06 09:55:08 -05:00
Brian Lopez
c1cf7ea825 Merge pull request #1254 from github/cut-release-v2.11.4
v2.11.4 release
2014-06-05 10:03:43 -07:00
Brian Lopez
67f7268a55 bump version for v2.11.4 release 2014-06-05 10:55:32 -05:00
Brian Lopez
a55ee7eb09 Merge pull request #1253 from github/newer-charlock
Use the :ruby_encoding value from charlock 0.7.2
2014-06-05 08:51:46 -07:00
Brian Lopez
203f6d1944 forgot to add the test fixture 2014-06-04 17:15:33 -05:00
Brian Lopez
42c68f21d1 test ruby_encoding 2014-06-04 15:59:42 -05:00
Brian Lopez
7e8be1293e Use the :ruby_encoding value from charlock 0.7.2 2014-06-04 15:51:33 -05:00
Arfon Smith
09c234ec26 Merge pull request #1190 from Madsn/patch-1
Add html5shiv to vendor.yml
2014-06-04 10:30:03 -05:00
Arfon Smith
65a26c3e73 Merge pull request #1248 from github/1035-update
1035 update
2014-06-03 22:06:54 -05:00
Arfon Smith
98f35aefdc Merge branch 'master' into 1035
Conflicts:
	lib/linguist/languages.yml
	lib/linguist/samples.json
2014-06-03 22:03:59 -05:00
Arfon Smith
38a3714514 Samples update 2014-06-03 21:37:25 -05:00
Arfon Smith
491700f925 Merge pull request #1198 from Spirit-of-Oberon/master
Support of the Component Pascal language
2014-06-03 21:36:52 -05:00
Andy Lindeman
4d033e7e83 Merge pull request #1246 from github/cut-release-v2.11.3
Bumps to 2.11.3
2014-06-03 15:15:14 -04:00
Andy Lindeman
efc3638065 Freshens up the release docs 2014-06-03 14:50:27 -04:00
Andy Lindeman
b7685ab317 Bumps to 2.11.3 2014-06-03 14:50:17 -04:00
Andy Lindeman
83c5f6a004 Merge pull request #1245 from alindeman/binarylike_data
Handle case where newline chars don't transcode to detected encoding
2014-06-03 12:55:33 -04:00
Andy Lindeman
aa5a94cc3e Handle case where newline chars don't transcode to detected encoding
We've seen cases where binary files are detected as encodings such as
ISO-8859-8-I. This usually happens when the binary files are short, so
while the detector is mistaken, there is also not very much data for use
in the detection algorithm in the first place so it's understandable
that the detector was wrong.

In these cases, the code to convert ASCII newline characters to
encodings such as ISO-8859-8-I fails because there is no conversion
between them.

We now simply assume that the data is all one line in those cases. In
reality the data is binary, but this obviously difficult to detect
reliably.
2014-06-03 12:26:23 -04:00
Arfon Smith
a5b6331ab5 Merge pull request #1244 from akashivskyy/master
Add orange color to Apple Swift language
2014-06-03 10:53:55 -05:00
Adrian Kashivskyy
2164b28c64 Update Swift color 2014-06-03 16:03:49 +02:00
Adrian Kashivskyy
0fb824b345 Add orange color to Swift 2014-06-03 15:54:15 +02:00
Arfon Smith
29ee094d66 Merge pull request #1241 from github/1239-update
1239 update
2014-06-02 21:11:40 -05:00
Arfon Smith
4a7ae50ec8 Dammit 2014-06-02 21:07:11 -05:00
Arfon Smith
398439a937 Pedantic 2014-06-02 21:04:39 -05:00
Arfon Smith
a3bc3a7615 Merge branch 'master' into 1239-update
Conflicts:
	lib/linguist/samples.json
2014-06-02 21:01:00 -05:00
Arfon Smith
7989fbd613 Samples 2014-06-02 21:00:03 -05:00
john howard
c389c79be9 fixed zimpl declaration position collation order problem 2014-06-02 18:30:45 -07:00
john howard
1fd2f921fd added yet another zimpl extension that is in use 2014-06-02 18:28:00 -07:00
john howard
ed851849db added extra extension for zimpl 2014-06-02 18:23:07 -07:00
john howard
cfb9f6f0a4 smaller code sample 2014-06-02 18:14:57 -07:00
John Howard
3d5a0da62e rename sample directory 2014-06-02 18:08:54 -07:00
john howard
4e15369f9a added missing lexer for zimpl 2014-06-02 17:27:24 -07:00
john howard
5b3152d99d Create sample.zmpl 2014-06-02 15:16:00 -07:00
john howard
a6955f4edb added zmpl language declaration 2014-06-02 15:09:41 -07:00
Arfon Smith
280ef7d1bd Merge pull request #1238 from github/cut-release-2.11.2
Bumping to 2.11.2
2014-06-02 15:51:32 -05:00
Arfon Smith
8d2ea90a5b Bumping to 2.11.2 2014-06-02 14:59:12 -05:00
Arfon Smith
4bf7abd73d Merge pull request #1237 from alindeman/swift
Adds basic support for the Swift programming language
2014-06-02 14:57:23 -05:00
Andy Lindeman
8f251e6756 Adds basic support for the Swift programming language
Text only lexer for now until Pygments catches up
2014-06-02 15:54:05 -04:00
Arfon Smith
4cd35c1f33 Samples update 2014-05-31 09:20:20 -05:00
Arfon Smith
78fda33707 Merge pull request #1209 from jkeirstead/GAMS-language
Added the General Algebraic Modeling System (GAMS) to languages with example
2014-05-31 09:19:57 -05:00
Arfon Smith
5c6a98f479 Merge pull request #1230 from github/1206-update
1206 update
2014-05-31 09:15:47 -05:00
Arfon Smith
efbcb942c3 Merge branch 'master' into 1206
Conflicts:
	lib/linguist/samples.json
2014-05-31 09:13:42 -05:00
Arfon Smith
f3da1bc3b1 Merge pull request #1228 from christianbundy/add-ox
Add Ox
2014-05-31 09:10:10 -05:00
Christian Bundy
72a6186f08 Fix Ox implementation
Remove .h from Ox, fix `lex` typo, and add samples for Ox.
2014-05-30 15:47:42 -07:00
Christian Bundy
8cde6d2e8f Merge branch 'master' of https://github.com/github/linguist into add-ox 2014-05-30 15:33:51 -07:00
Arfon Smith
4f2c7fdc3c Merge pull request #1227 from github/1178-update
1178 update
2014-05-30 16:25:47 -05:00
Arfon Smith
5a830504a4 Merge branch 'master' into 1178
Conflicts:
	lib/linguist/samples.json
2014-05-30 16:15:28 -05:00
Arfon Smith
086fb09038 Merge pull request #1226 from christianbundy/patch-2
Add Cheat Engine's .ct as an XML extension
2014-05-30 16:11:54 -05:00
Arfon Smith
5544a041ce Samples update 2014-05-30 16:11:01 -05:00
Arfon Smith
6447333368 Merge pull request #1208 from jkeirstead/R-documentation
R documentation
2014-05-30 16:08:41 -05:00
Christian Bundy
1d6a42f0eb Add Cheat Engine's .ct as an XML extension
Add .ct as an XML extension instead of its own language, as recommended by @arfon in #1199
2014-05-30 14:04:59 -07:00
Arfon Smith
de14b75517 Samples update 2014-05-30 15:59:30 -05:00
Arfon Smith
0f302713da Merge pull request #1188 from kaendfinger/master
Groovy: Add .gvy, .grt, and .gtpl to the list of extensions
2014-05-30 15:58:59 -05:00
Arfon Smith
a66d064d4a Merge pull request #1088 from github/815-update
815 update
2014-05-30 15:46:08 -05:00
Arfon Smith
4fefe2020f Merge branch 'master' into 815-update
Conflicts:
	lib/linguist/samples.json
2014-05-30 15:38:55 -05:00
Arfon Smith
72fab07a14 Text only 2014-05-30 15:37:38 -05:00
Arfon Smith
adbf4f6b17 Samples update 2014-05-30 15:33:36 -05:00
Arfon Smith
cfcf4ca915 Merge pull request #1203 from kostko/master
Add .ipp extension for C++
2014-05-30 15:21:16 -05:00
Arfon Smith
c427fba87f Merge pull request #1200 from andyli/patch-1
Haxe: Use haxe logo color.
2014-05-30 15:20:38 -05:00
Arfon Smith
ab14bcab03 Merge pull request #1215 from felixphew/patch-1
Add .mkdn as a Markdown extension
2014-05-30 14:59:12 -05:00
ferrall
78de3fb959 Update languages.yml
added explicit lexer
2014-05-28 14:03:27 -04:00
ferrall
b9eda90ddd Update languages.yml
Adding Ox to the list http://www.doornik.com/ox/
2014-05-27 14:47:20 -04:00
Arfon Smith
66b346c8fb Merge pull request #1219 from github/sql-data
SQL -> data
2014-05-27 05:59:17 -05:00
Arfon Smith
8215b225d9 Searchable 2014-05-27 05:54:44 -05:00
Arfon Smith
41da8c6352 SQL -> data 2014-05-27 05:50:40 -05:00
felixphew
b7dad4df5e Add .mkdn as a Markdown extension 2014-05-26 06:55:37 +10:00
James Keirstead
1a98ccbf5f Added an example Rd file from the scholar package 2014-05-23 18:12:36 +01:00
James Keirstead
8d16a3365e Added documentation format to R language 2014-05-23 18:12:23 +01:00
Arfon Smith
67bf48fafc Merge pull request #1212 from christianbundy/patch-1
Javascript and LESS being labeled as 100% "shell"
2014-05-22 16:49:39 -05:00
Christian Bundy
f5895216a8 Update NuGet regex to be more specific
Change NuGet regex to look for packages that end with a period and 1+ digits, as NuGet always appends a version number to the end of packages.
2014-05-21 13:43:29 -07:00
James Keirstead
e96096f786 Added the General Algebraic Modeling System (GAMS) to languages, with example 2014-05-21 14:36:11 +01:00
James Adams
0a850eeddd Add support for Pan Language
As found in repositories related to @quattor, e.g. https://github.com/quattor/template-library-core
The test file provided matches the one I submitted to Pygments.

At some point in the future when the Pygments patches land at GitHub the lexer should be updated from "Text only" to "pan".
2014-05-20 16:54:07 +01:00
Jernej Kos
42658ffd61 Added .ipp extension for C++. 2014-05-19 19:12:49 +02:00
Andy Li
24fc2842d2 Haxe: Use haxe logo color. 2014-05-17 16:38:44 +08:00
ilovb
ac2723abe3 example 2 for Component Pascal 2014-05-17 00:02:34 +04:00
ilovb
0d0e219532 add example for Component Pascal 2014-05-16 23:47:27 +04:00
ilovb
cf35807709 add lexer for Component Pascal 2014-05-16 23:29:52 +04:00
ilovb
c2b53db96d remove Component Pascal examples 2014-05-16 23:08:28 +04:00
ilovb
8e6efc3a7d remove Component Pascal Document 2014-05-16 22:39:18 +04:00
ilovb
4b6f05b4d1 remove ace mode 2014-05-16 22:17:05 +04:00
ilovb
7aad5f93e4 Support of the Component Pascal language
http://en.wikipedia.org/wiki/Component_Pascal
2014-05-16 21:36:23 +04:00
Kenneth Endfinger
9b6a7622d2 Groovy: Remove .tpl from list 2014-05-16 10:37:49 -04:00
Kenneth Endfinger
6c666075b5 Groovy: Add '.tpl' to the list of extensions. 2014-05-16 10:32:41 -04:00
Kenneth Endfinger
6d26bf5c82 Groovy: .groovy is now first in the array 2014-05-16 10:20:57 -04:00
Daniël W. Crompton
6d5da4c9ec update 2014-05-14 01:38:55 +02:00
Kenneth Endfinger
51dde1f6a4 Created Sample for .gvy 2014-05-13 16:34:20 -04:00
Kenneth Endfinger
13c9259d23 Created Sample for .grt 2014-05-13 16:33:43 -04:00
Kenneth Endfinger
a22c2d678b Created a Sample for .gtpl 2014-05-13 16:32:21 -04:00
Kenneth Endfinger
5c36f8df85 Groovy: Sorted Extensions 2014-05-13 15:37:20 -04:00
Mikkel Madsen
dcc598442b Handle minified html5shiv 2014-05-13 09:01:37 +02:00
Mikkel Madsen
91877056fb Extend vendor test for html5shiv 2014-05-13 09:00:43 +02:00
Mikkel Madsen
868e9df434 Add html5shiv to vendor.yml 2014-05-13 08:56:08 +02:00
Kenneth Endfinger
c3642ba7ed Groovy: Add .gvy, .grt, and .gtpl to the list of extensions 2014-05-12 20:51:20 -04:00
Paul Chaignon
a148d52aed .frag file extension added for JavaScript with some new samples 2014-05-07 13:42:46 +02:00
Ricky Elrod
9d569c8bd5 Idris is upstream in Pygments now: https://bitbucket.org/birkenfeld/pygments-main/pull-request/210 2014-04-22 02:54:27 -04:00
Arfon Smith
26fbc45baf Merge branch 'master' into 815
Conflicts:
	lib/linguist/samples.json
2014-04-21 11:37:49 -05:00
Julian Gehring
9ae0bdbb43 Add R package ignores to vendor.yml
Ignore vignette and external data directories which contain no R source code
2014-04-03 21:29:18 +02:00
Aleks Kissinger
a3aaa1ec4d included sample and extension .ML extension for Standard ML files 2014-04-02 12:41:54 +01:00
Michael Hendricks
ee3c9bcdbd Add misclassified Prolog samples
These two files are incorrectly classified as Perl.  They should be
classified as Prolog.  There are many distinctive tokens in each file
which clearly differentiate between Perl and Prolog.
2014-03-17 08:56:45 -06:00
waddlesplash
aa78060e41 Adding QMake (Make-like) language.
Mostly because the file extension conflicts with that of Prolog.
2013-12-10 10:23:13 -05:00
elofgren
89795ebd1f bundle fix
Lets see if this fixes the failing tests
2013-12-05 17:33:20 -05:00
Pat Pannuto
5fb6f34d8a Add misser lexer entry for nesC to languages.yml
The nesC entry in the languages.yml file was missing a lexer entry
and thus wasn't getting picked up. This adds the required lexer line.
2013-12-05 14:55:00 -05:00
Eric Lofgren
3ecc1f883c Basic SAS
Just an entry for SAS with the basic .sas file extension and two
examples.
2013-12-03 14:48:55 -05:00
Daniël W. Crompton
7c1716aa1e This pull request solves issue #674, see it for details. 2013-09-06 01:47:14 +02:00
126 changed files with 29937 additions and 6777 deletions

View File

@@ -1,4 +1,6 @@
before_install:
- git fetch origin master:master
- git fetch origin v2.0.0:v2.0.0
- sudo apt-get install libicu-dev -y
- gem update --system 2.1.11
rvm:

View File

@@ -143,8 +143,8 @@ If you are the current maintainer of this gem:
0. Make sure your local dependencies are up to date: `bundle install`
0. Ensure that samples are updated: `bundle exec rake samples`
0. Ensure that tests are green: `bundle exec rake test`
0. Bump gem version in github-linguist.gemspec. For example, [like this](https://github.com/github/linguist/commit/97908204a385940e47251af9ecb689e8f6515c48).
0. Make a PR to github/linguist. For example, [#1075](https://github.com/github/linguist/pull/1075).
0. Bump gem version in `lib/linguist/version.rb`. For example, [like this](https://github.com/github/linguist/commit/8d2ea90a5ba3b2fe6e1508b7155aa4632eea2985).
0. Make a PR to github/linguist. For example, [#1238](https://github.com/github/linguist/pull/1238).
0. Build a local gem: `gem build github-linguist.gemspec`
0. Testing:
0. Bump the Gemfile and Gemfile.lock versions for an app which relies on this gem

View File

@@ -5,6 +5,7 @@
require 'linguist/file_blob'
require 'linguist/repository'
require 'rugged'
path = ARGV[0] || Dir.pwd
@@ -18,7 +19,8 @@ ARGV.shift
breakdown = true if ARGV[0] == "--breakdown"
if File.directory?(path)
repo = Linguist::Repository.from_directory(path)
rugged = Rugged::Repository.new(path)
repo = Linguist::Repository.new(rugged, rugged.head.target_id)
repo.languages.sort_by { |_, size| size }.reverse.each do |language, size|
percentage = ((size / repo.size.to_f) * 100)
percentage = sprintf '%.2f' % percentage

View File

@@ -13,10 +13,11 @@ Gem::Specification.new do |s|
s.files = Dir['lib/**/*']
s.executables << 'linguist'
s.add_dependency 'charlock_holmes', '~> 0.7.1'
s.add_dependency 'charlock_holmes', '~> 0.7.3'
s.add_dependency 'escape_utils', '~> 1.0.1'
s.add_dependency 'mime-types', '~> 1.19'
s.add_dependency 'pygments.rb', '~> 0.5.4'
s.add_dependency 'pygments.rb', '~> 0.6.0'
s.add_dependency 'rugged', '~> 0.21.0'
s.add_development_dependency 'json'
s.add_development_dependency 'mocha'

View File

@@ -112,6 +112,12 @@ module Linguist
end
end
def ruby_encoding
if hash = detect_encoding
hash[:ruby_encoding]
end
end
# Try to guess the encoding
#
# Returns: a Hash, with :encoding, :confidence, :type
@@ -256,10 +262,16 @@ module Linguist
# without changing the encoding of `data`, and
# also--importantly--without having to duplicate many (potentially
# large) strings.
encoded_newlines = ["\r\n", "\r", "\n"].
map { |nl| nl.encode(encoding).force_encoding(data.encoding) }
begin
encoded_newlines = ["\r\n", "\r", "\n"].
map { |nl| nl.encode(ruby_encoding, "ASCII-8BIT").force_encoding(data.encoding) }
data.split(Regexp.union(encoded_newlines), -1)
data.split(Regexp.union(encoded_newlines), -1)
rescue Encoding::ConverterNotFoundError
# The data is not splittable in the detected encoding. Assume it's
# one big line.
[data]
end
else
[]
end
@@ -301,15 +313,7 @@ module Linguist
#
# Returns a Language or nil if none is detected
def language
return @language if defined? @language
if defined?(@data) && @data.is_a?(String)
data = @data
else
data = lambda { (binary_mime_type? || binary?) ? "" : self.data }
end
@language = Language.detect(name.to_s, data, mode)
@language ||= Language.detect(self)
end
# Internal: Get the lexer of the blob.

View File

@@ -63,7 +63,8 @@ module Linguist
generated_jni_header? ||
composer_lock? ||
node_modules? ||
vcr_cassette?
vcr_cassette? ||
generated_by_zephir?
end
# Internal: Is the blob an XCode project file?
@@ -237,6 +238,13 @@ module Linguist
!!name.match(/composer.lock/)
end
# Internal: Is the blob a generated by Zephir
#
# Returns true or false.
def generated_by_zephir?
!!name.match(/.\.zep\.(?:c|h|php)$/)
end
# Is the blob a VCR Cassette file?
#
# Returns true or false

View File

@@ -92,18 +92,17 @@ module Linguist
# Public: Detects the Language of the blob.
#
# name - String filename
# data - String blob data. A block also maybe passed in for lazy
# loading. This behavior is deprecated and you should always
# pass in a String.
# mode - Optional String mode (defaults to nil)
# blob - an object that implements the Linguist `Blob` interface;
# see Linguist::LazyBlob and Linguist::FileBlob for examples
#
# Returns Language or nil.
def self.detect(name, data, mode = nil)
def self.detect(blob)
name = blob.name.to_s
# A bit of an elegant hack. If the file is executable but extensionless,
# append a "magic" extension so it can be classified with other
# languages that have shebang scripts.
if File.extname(name).empty? && mode && (mode.to_i(8) & 05) == 05
if File.extname(name).empty? && blob.mode && (blob.mode.to_i(8) & 05) == 05
name += ".script!"
end
@@ -114,7 +113,7 @@ module Linguist
# extension at all, in the case of extensionless scripts), we need to continue
# our detection work
if possible_languages.length > 1
data = data.call() if data.respond_to?(:call)
data = blob.data
possible_language_names = possible_languages.map(&:name)
# Don't bother with emptiness

View File

@@ -157,6 +157,7 @@ Assembly:
- nasm
extensions:
- .asm
- .inc
Augeas:
type: programming
@@ -293,6 +294,7 @@ C++:
- .inl
- .tcc
- .tpp
- .ipp
C-ObjDump:
type: data
@@ -429,6 +431,14 @@ Common Lisp:
- clisp
- ecl
Component Pascal:
type: programming
lexer: Delphi
color: "#b0ce4e"
extensions:
- .cp
- .cps
Coq:
type: programming
extensions:
@@ -519,15 +529,6 @@ Dart:
extensions:
- .dart
DCPU-16 ASM:
type: programming
lexer: dasm16
extensions:
- .dasm16
- .dasm
aliases:
- dasm16
Diff:
extensions:
- .diff
@@ -705,6 +706,12 @@ Game Maker Language:
extensions:
- .gml
GAMS:
type: programming
lexer: Text only
extensions:
- .gms
GAP:
type: programming
lexer: Text only
@@ -728,12 +735,14 @@ GLSL:
- .glsl
- .fp
- .frag
- .frg
- .fshader
- .geom
- .glslv
- .gshader
- .shader
- .vert
- .vrx
- .vshader
Genshi:
@@ -819,6 +828,9 @@ Groovy:
color: "#e69f56"
extensions:
- .groovy
- .grt
- .gtpl
- .gvy
interpreters:
- groovy
@@ -907,7 +919,7 @@ Haskell:
Haxe:
type: programming
ace_mode: haxe
color: "#346d51"
color: "#f7941e"
extensions:
- .hx
- .hxsl
@@ -922,7 +934,7 @@ Hy:
IDL:
type: programming
lexer: Text only
lexer: IDL
color: "#e3592c"
extensions:
- .pro
@@ -941,7 +953,7 @@ Inno Setup:
Idris:
type: programming
lexer: Text only
lexer: Idris
extensions:
- .idr
- .lidr
@@ -980,6 +992,13 @@ Ioke:
extensions:
- .ik
Isabelle:
type: programming
lexer: Text only
color: "#fdcd00"
extensions:
- .thy
J:
type: programming
lexer: Text only
@@ -1059,6 +1078,7 @@ JavaScript:
- ._js
- .bones
- .es6
- .frag
- .jake
- .jsfl
- .jsm
@@ -1068,6 +1088,8 @@ JavaScript:
- .pac
- .sjs
- .ssjs
- .xsjs
- .xsjslib
filenames:
- Jakefile
interpreters:
@@ -1250,6 +1272,7 @@ Markdown:
- .md
- .markdown
- .mkd
- .mkdn
- .mkdown
- .ron
@@ -1265,6 +1288,8 @@ Mathematica:
type: programming
extensions:
- .mathematica
- .m
- .nb
lexer: Text only
Matlab:
@@ -1372,6 +1397,12 @@ Nimrod:
- .nim
- .nimrod
Nix:
type: programming
lexer: Nix
extensions:
- .nix
Nu:
type: programming
lexer: Scheme
@@ -1471,6 +1502,14 @@ Org:
extensions:
- .org
Ox:
type: programming
lexer: Text only
extensions:
- .ox
- .oxh
- .oxo
Oxygene:
type: programming
lexer: Text only
@@ -1500,6 +1539,13 @@ PHP:
filenames:
- Phakefile
Pan:
type: programming
lexer: Text only
color: '#cc0000'
extensions:
- .pan
Parrot:
type: programming
color: "#f3ca0a"
@@ -1566,7 +1612,7 @@ Perl6:
Pike:
type: programming
color: "#066ab2"
lexer: C
lexer: Pike
extensions:
- .pike
- .pmod
@@ -1663,6 +1709,7 @@ Python:
- .gyp
- .lmi
- .pyde
- .pyp
- .pyt
- .pyw
- .wsgi
@@ -1688,6 +1735,12 @@ QML:
extensions:
- .qml
QMake:
lexer: Text only
extensions:
- .pro
- .pri
R:
type: programming
color: "#198ce7"
@@ -1698,6 +1751,8 @@ R:
extensions:
- .r
- .R
- .Rd
- .rd
- .rsx
filenames:
- .Rprofile
@@ -1779,7 +1834,7 @@ Red:
extensions:
- .red
- .reds
Redcode:
extensions:
- .cw
@@ -1825,12 +1880,15 @@ Ruby:
interpreters:
- ruby
filenames:
- .pryrc
- Appraisals
- Berksfile
- Buildfile
- Gemfile
- Gemfile.lock
- Guardfile
- Jarfile
- Mavenfile
- Podfile
- Thorfile
- Vagrantfile
@@ -1842,6 +1900,13 @@ Rust:
extensions:
- .rs
SAS:
type: programming
color: "#1E90FF"
lexer: Text only
extensions:
- .sas
SCSS:
type: markup
group: CSS
@@ -1850,7 +1915,7 @@ SCSS:
- .scss
SQL:
type: programming
type: data
ace_mode: sql
extensions:
- .sql
@@ -1859,6 +1924,13 @@ SQL:
- .udf
- .viw
STON:
type: data
group: Smalltalk
lexer: JSON
extensions:
- .ston
Sage:
type: programming
lexer: Python
@@ -1951,6 +2023,14 @@ Slash:
extensions:
- .sl
Slim:
group: HTML
type: markup
lexer: Slim
color: "#ff8877"
extensions:
- .slim
Smalltalk:
type: programming
color: "#596706"
@@ -1981,8 +2061,9 @@ Standard ML:
aliases:
- sml
extensions:
- .sml
- .ML
- .fun
- .sml
Stata:
type: programming
@@ -2010,6 +2091,13 @@ SuperCollider:
extensions:
- .scd
Swift:
type: programming
lexer: Swift
color: "#ffac45"
extensions:
- .swift
SystemVerilog:
type: programming
color: "#343761"
@@ -2118,6 +2206,14 @@ UnrealScript:
extensions:
- .uc
VCL:
type: programming
lexer: Perl
ace_mode: perl
color: "#0298c3"
extensions:
- .vcl
VHDL:
type: programming
lexer: vhdl
@@ -2200,6 +2296,7 @@ XML:
- .clixml
- .cproject
- .csproj
- .ct
- .dita
- .ditamap
- .ditaval
@@ -2212,6 +2309,7 @@ XML:
- .launch
- .mxml
- .nproj
- .nuspec
- .osm
- .plist
- .pluginspec
@@ -2283,6 +2381,17 @@ XSLT:
- .xslt
- .xsl
Xojo:
type: programming
lexer: VB.net
extensions:
- .xojo_code
- .xojo_menu
- .xojo_report
- .xojo_script
- .xojo_toolbar
- .xojo_window
Xtend:
type: programming
extensions:
@@ -2305,6 +2414,14 @@ Zephir:
extensions:
- .zep
Zimpl:
type: programming
lexer: Text only
extensions:
- .zimpl
- .zmpl
- .zpl
eC:
type: programming
search_term: ec
@@ -2335,6 +2452,7 @@ mupad:
nesC:
type: programming
color: "#ffce3b"
lexer: nesC
extensions:
- .nc

37
lib/linguist/lazy_blob.rb Normal file
View File

@@ -0,0 +1,37 @@
require 'linguist/blob_helper'
require 'rugged'
module Linguist
class LazyBlob
include BlobHelper
MAX_SIZE = 128 * 1024
attr_reader :repository
attr_reader :oid
attr_reader :name
attr_reader :mode
def initialize(repo, oid, name, mode = nil)
@repository = repo
@oid = oid
@name = name
@mode = mode
end
def data
load_blob!
@data
end
def size
load_blob!
@size
end
protected
def load_blob!
@data, @size = Rugged::Blob.to_buffer(repository, oid, MAX_SIZE) if @data.nil?
end
end
end

View File

@@ -1,4 +1,5 @@
require 'linguist/file_blob'
require 'linguist/lazy_blob'
require 'rugged'
module Linguist
# A Repository is an abstraction of a Grit::Repo or a basic file
@@ -7,100 +8,146 @@ module Linguist
# Its primary purpose is for gathering language statistics across
# the entire project.
class Repository
# Public: Initialize a new Repository from a File directory
#
# base_path - A path String
#
# Returns a Repository
def self.from_directory(base_path)
new Dir["#{base_path}/**/*"].
select { |f| File.file?(f) }.
map { |path| FileBlob.new(path, base_path) }
attr_reader :repository
# Public: Create a new Repository based on the stats of
# an existing one
def self.incremental(repo, commit_oid, old_commit_oid, old_stats)
repo = self.new(repo, commit_oid)
repo.load_existing_stats(old_commit_oid, old_stats)
repo
end
# Public: Initialize a new Repository
# Public: Initialize a new Repository to be analyzed for language
# data
#
# enum - Enumerator that responds to `each` and
# yields Blob objects
# repo - a Rugged::Repository object
# commit_oid - the sha1 of the commit that will be analyzed;
# this is usually the master branch
#
# Returns a Repository
def initialize(enum)
@enum = enum
@computed_stats = false
@language = @size = nil
@sizes = Hash.new { 0 }
@file_breakdown = Hash.new { |h,k| h[k] = Array.new }
def initialize(repo, commit_oid)
@repository = repo
@commit_oid = commit_oid
raise TypeError, 'commit_oid must be a commit SHA1' unless commit_oid.is_a?(String)
end
# Public: Load the results of a previous analysis on this repository
# to speed up the new scan.
#
# The new analysis will be performed incrementally as to only take
# into account the file changes since the last time the repository
# was scanned
#
# old_commit_oid - the sha1 of the commit that was previously analyzed
# old_stats - the result of the previous analysis, obtained by calling
# Repository#cache on the old repository
#
# Returns nothing
def load_existing_stats(old_commit_oid, old_stats)
@old_commit_oid = old_commit_oid
@old_stats = old_stats
nil
end
# Public: Returns a breakdown of language stats.
#
# Examples
#
# # => { Language['Ruby'] => 46319,
# Language['JavaScript'] => 258 }
# # => { 'Ruby' => 46319,
# 'JavaScript' => 258 }
#
# Returns a Hash of Language keys and Integer size values.
# Returns a Hash of language names and Integer size values.
def languages
compute_stats
@sizes
@sizes ||= begin
sizes = Hash.new { 0 }
cache.each do |_, (language, size)|
sizes[language] += size
end
sizes
end
end
# Public: Get primary Language of repository.
#
# Returns a Language
# Returns a language name
def language
compute_stats
@language
@language ||= begin
primary = languages.max_by { |(_, size)| size }
primary && primary[0]
end
end
# Public: Get the total size of the repository.
#
# Returns a byte size Integer
def size
compute_stats
@size
@size ||= languages.inject(0) { |s,(_,v)| s + v }
end
# Public: Return the language breakdown of this repository by file
#
# Returns a map of language names => [filenames...]
def breakdown_by_file
compute_stats
@file_breakdown
@file_breakdown ||= begin
breakdown = Hash.new { |h,k| h[k] = Array.new }
cache.each do |filename, (language, _)|
breakdown[language] << filename
end
breakdown
end
end
# Internal: Compute language breakdown for each blob in the Repository.
# Public: Return the cached results of the analysis
#
# Returns nothing
def compute_stats
return if @computed_stats
# This is a per-file breakdown that can be passed to other instances
# of Linguist::Repository to perform incremental scans
#
# Returns a map of filename => [language, size]
def cache
@cache ||= begin
if @old_commit_oid == @commit_oid
@old_stats
else
compute_stats(@old_commit_oid, @commit_oid, @old_stats)
end
end
end
@enum.each do |blob|
# Skip files that are likely binary
next if blob.likely_binary?
protected
def compute_stats(old_commit_oid, commit_oid, cache = nil)
file_map = cache ? cache.dup : {}
old_tree = old_commit_oid && Rugged::Commit.lookup(repository, old_commit_oid).tree
new_tree = Rugged::Commit.lookup(repository, commit_oid).tree
# Skip vendored or generated blobs
next if blob.vendored? || blob.generated? || blob.language.nil?
diff = Rugged::Tree.diff(repository, old_tree, new_tree)
# Only include programming languages and acceptable markup languages
if blob.language.type == :programming || Language.detectable_markup.include?(blob.language.name)
diff.each_delta do |delta|
old = delta.old_file[:path]
new = delta.new_file[:path]
# Build up the per-file breakdown stats
@file_breakdown[blob.language.group.name] << blob.name
file_map.delete(old)
next if delta.binary
@sizes[blob.language.group] += blob.size
if [:added, :modified].include? delta.status
# Skip submodules
mode = delta.new_file[:mode]
next if (mode & 040000) != 0
blob = Linguist::LazyBlob.new(repository, delta.new_file[:oid], new, mode.to_s(8))
# Skip vendored or generated blobs
next if blob.vendored? || blob.generated? || blob.language.nil?
# Only include programming languages and acceptable markup languages
if blob.language.type == :programming || Language.detectable_markup.include?(blob.language.name)
file_map[new] = [blob.language.group.name, blob.size]
end
end
end
# Compute total size
@size = @sizes.inject(0) { |s,(_,v)| s + v }
# Get primary language
if primary = @sizes.max_by { |(_, size)| size }
@language = primary[0]
end
@computed_stats = true
nil
file_map
end
end
end

File diff suppressed because it is too large Load Diff

View File

@@ -28,7 +28,7 @@ module Linguist
#
# Returns nothing.
def self.each(&block)
Dir.entries(ROOT).each do |category|
Dir.entries(ROOT).sort!.each do |category|
next if category == '.' || category == '..'
# Skip text and binary for now

View File

@@ -43,6 +43,7 @@
# Vendored dependencies
- thirdparty/
- vendors?/
- extern(al)?/
# Debian packaging
- ^debian/
@@ -148,7 +149,7 @@
- (^|/)[Mm]icrosoft([Mm]vc)?([Aa]jax|[Vv]alidation)(\.debug)?\.js$
# NuGet
- ^[Pp]ackages/
- ^[Pp]ackages\/.+\.\d+\/
# ExtJS
- (^|/)extjs/.*?\.js$
@@ -168,6 +169,9 @@
- (^|/)extjs/src/
- (^|/)extjs/welcome/
# Html5shiv
- (^|/)html5shiv(\.min)?\.js$
# Samples folders
- ^[Ss]amples/
@@ -196,3 +200,12 @@
# Mercury --use-subdirs
- Mercury/
# R packages
- ^vignettes/
- ^inst/extdata/
# Octicons
- octicons.css
- octicons.min.css
- sprockets-octicons.scss

View File

@@ -1,3 +1,3 @@
module Linguist
VERSION = "2.11.1"
VERSION = "3.0.0"
end

File diff suppressed because it is too large Load Diff

350
samples/Assembly/FASM.asm Normal file
View File

@@ -0,0 +1,350 @@
; flat assembler interface for Win32
; Copyright (c) 1999-2014, Tomasz Grysztar.
; All rights reserved.
format PE console
section '.text' code readable executable
start:
mov [con_handle],STD_OUTPUT_HANDLE
mov esi,_logo
call display_string
call get_params
jc information
call init_memory
mov esi,_memory_prefix
call display_string
mov eax,[memory_end]
sub eax,[memory_start]
add eax,[additional_memory_end]
sub eax,[additional_memory]
shr eax,10
call display_number
mov esi,_memory_suffix
call display_string
call [GetTickCount]
mov [start_time],eax
call preprocessor
call parser
call assembler
call formatter
call display_user_messages
movzx eax,[current_pass]
inc eax
call display_number
mov esi,_passes_suffix
call display_string
call [GetTickCount]
sub eax,[start_time]
xor edx,edx
mov ebx,100
div ebx
or eax,eax
jz display_bytes_count
xor edx,edx
mov ebx,10
div ebx
push edx
call display_number
mov dl,'.'
call display_character
pop eax
call display_number
mov esi,_seconds_suffix
call display_string
display_bytes_count:
mov eax,[written_size]
call display_number
mov esi,_bytes_suffix
call display_string
xor al,al
jmp exit_program
information:
mov esi,_usage
call display_string
mov al,1
jmp exit_program
get_params:
mov [input_file],0
mov [output_file],0
mov [symbols_file],0
mov [memory_setting],0
mov [passes_limit],100
call [GetCommandLine]
mov esi,eax
mov edi,params
find_command_start:
lodsb
cmp al,20h
je find_command_start
cmp al,22h
je skip_quoted_name
skip_name:
lodsb
cmp al,20h
je find_param
or al,al
jz all_params
jmp skip_name
skip_quoted_name:
lodsb
cmp al,22h
je find_param
or al,al
jz all_params
jmp skip_quoted_name
find_param:
lodsb
cmp al,20h
je find_param
cmp al,'-'
je option_param
cmp al,0Dh
je all_params
or al,al
jz all_params
cmp [input_file],0
jne get_output_file
mov [input_file],edi
jmp process_param
get_output_file:
cmp [output_file],0
jne bad_params
mov [output_file],edi
process_param:
cmp al,22h
je string_param
copy_param:
stosb
lodsb
cmp al,20h
je param_end
cmp al,0Dh
je param_end
or al,al
jz param_end
jmp copy_param
string_param:
lodsb
cmp al,22h
je string_param_end
cmp al,0Dh
je param_end
or al,al
jz param_end
stosb
jmp string_param
option_param:
lodsb
cmp al,'m'
je memory_option
cmp al,'M'
je memory_option
cmp al,'p'
je passes_option
cmp al,'P'
je passes_option
cmp al,'s'
je symbols_option
cmp al,'S'
je symbols_option
bad_params:
stc
ret
get_option_value:
xor eax,eax
mov edx,eax
get_option_digit:
lodsb
cmp al,20h
je option_value_ok
cmp al,0Dh
je option_value_ok
or al,al
jz option_value_ok
sub al,30h
jc invalid_option_value
cmp al,9
ja invalid_option_value
imul edx,10
jo invalid_option_value
add edx,eax
jc invalid_option_value
jmp get_option_digit
option_value_ok:
dec esi
clc
ret
invalid_option_value:
stc
ret
memory_option:
lodsb
cmp al,20h
je memory_option
cmp al,0Dh
je bad_params
or al,al
jz bad_params
dec esi
call get_option_value
or edx,edx
jz bad_params
cmp edx,1 shl (32-10)
jae bad_params
mov [memory_setting],edx
jmp find_param
passes_option:
lodsb
cmp al,20h
je passes_option
cmp al,0Dh
je bad_params
or al,al
jz bad_params
dec esi
call get_option_value
or edx,edx
jz bad_params
cmp edx,10000h
ja bad_params
mov [passes_limit],dx
jmp find_param
symbols_option:
mov [symbols_file],edi
find_symbols_file_name:
lodsb
cmp al,20h
jne process_param
jmp find_symbols_file_name
param_end:
dec esi
string_param_end:
xor al,al
stosb
jmp find_param
all_params:
cmp [input_file],0
je bad_params
clc
ret
include 'system.inc'
include '..\errors.inc'
include '..\symbdump.inc'
include '..\preproce.inc'
include '..\parser.inc'
include '..\exprpars.inc'
include '..\assemble.inc'
include '..\exprcalc.inc'
include '..\formats.inc'
include '..\x86_64.inc'
include '..\avx.inc'
include '..\tables.inc'
include '..\messages.inc'
section '.data' data readable writeable
include '..\version.inc'
_copyright db 'Copyright (c) 1999-2014, Tomasz Grysztar',0Dh,0Ah,0
_logo db 'flat assembler version ',VERSION_STRING,0
_usage db 0Dh,0Ah
db 'usage: fasm <source> [output]',0Dh,0Ah
db 'optional settings:',0Dh,0Ah
db ' -m <limit> set the limit in kilobytes for the available memory',0Dh,0Ah
db ' -p <limit> set the maximum allowed number of passes',0Dh,0Ah
db ' -s <file> dump symbolic information for debugging',0Dh,0Ah
db 0
_memory_prefix db ' (',0
_memory_suffix db ' kilobytes memory)',0Dh,0Ah,0
_passes_suffix db ' passes, ',0
_seconds_suffix db ' seconds, ',0
_bytes_suffix db ' bytes.',0Dh,0Ah,0
align 4
include '..\variable.inc'
con_handle dd ?
memory_setting dd ?
start_time dd ?
bytes_count dd ?
displayed_count dd ?
character db ?
last_displayed rb 2
params rb 1000h
options rb 1000h
buffer rb 4000h
stack 10000h
section '.idata' import data readable writeable
dd 0,0,0,rva kernel_name,rva kernel_table
dd 0,0,0,0,0
kernel_table:
ExitProcess dd rva _ExitProcess
CreateFile dd rva _CreateFileA
ReadFile dd rva _ReadFile
WriteFile dd rva _WriteFile
CloseHandle dd rva _CloseHandle
SetFilePointer dd rva _SetFilePointer
GetCommandLine dd rva _GetCommandLineA
GetEnvironmentVariable dd rva _GetEnvironmentVariable
GetStdHandle dd rva _GetStdHandle
VirtualAlloc dd rva _VirtualAlloc
VirtualFree dd rva _VirtualFree
GetTickCount dd rva _GetTickCount
GetSystemTime dd rva _GetSystemTime
GlobalMemoryStatus dd rva _GlobalMemoryStatus
dd 0
kernel_name db 'KERNEL32.DLL',0
_ExitProcess dw 0
db 'ExitProcess',0
_CreateFileA dw 0
db 'CreateFileA',0
_ReadFile dw 0
db 'ReadFile',0
_WriteFile dw 0
db 'WriteFile',0
_CloseHandle dw 0
db 'CloseHandle',0
_SetFilePointer dw 0
db 'SetFilePointer',0
_GetCommandLineA dw 0
db 'GetCommandLineA',0
_GetEnvironmentVariable dw 0
db 'GetEnvironmentVariableA',0
_GetStdHandle dw 0
db 'GetStdHandle',0
_VirtualAlloc dw 0
db 'VirtualAlloc',0
_VirtualFree dw 0
db 'VirtualFree',0
_GetTickCount dw 0
db 'GetTickCount',0
_GetSystemTime dw 0
db 'GetSystemTime',0
_GlobalMemoryStatus dw 0
db 'GlobalMemoryStatus',0
section '.reloc' fixups data readable discardable

503
samples/Assembly/SYSTEM.inc Normal file
View File

@@ -0,0 +1,503 @@
; flat assembler interface for Win32
; Copyright (c) 1999-2014, Tomasz Grysztar.
; All rights reserved.
CREATE_NEW = 1
CREATE_ALWAYS = 2
OPEN_EXISTING = 3
OPEN_ALWAYS = 4
TRUNCATE_EXISTING = 5
FILE_SHARE_READ = 1
FILE_SHARE_WRITE = 2
FILE_SHARE_DELETE = 4
GENERIC_READ = 80000000h
GENERIC_WRITE = 40000000h
STD_INPUT_HANDLE = 0FFFFFFF6h
STD_OUTPUT_HANDLE = 0FFFFFFF5h
STD_ERROR_HANDLE = 0FFFFFFF4h
MEM_COMMIT = 1000h
MEM_RESERVE = 2000h
MEM_DECOMMIT = 4000h
MEM_RELEASE = 8000h
MEM_FREE = 10000h
MEM_PRIVATE = 20000h
MEM_MAPPED = 40000h
MEM_RESET = 80000h
MEM_TOP_DOWN = 100000h
PAGE_NOACCESS = 1
PAGE_READONLY = 2
PAGE_READWRITE = 4
PAGE_WRITECOPY = 8
PAGE_EXECUTE = 10h
PAGE_EXECUTE_READ = 20h
PAGE_EXECUTE_READWRITE = 40h
PAGE_EXECUTE_WRITECOPY = 80h
PAGE_GUARD = 100h
PAGE_NOCACHE = 200h
init_memory:
xor eax,eax
mov [memory_start],eax
mov eax,esp
and eax,not 0FFFh
add eax,1000h-10000h
mov [stack_limit],eax
mov eax,[memory_setting]
shl eax,10
jnz allocate_memory
push buffer
call [GlobalMemoryStatus]
mov eax,dword [buffer+20]
mov edx,dword [buffer+12]
cmp eax,0
jl large_memory
cmp edx,0
jl large_memory
shr eax,2
add eax,edx
jmp allocate_memory
large_memory:
mov eax,80000000h
allocate_memory:
mov edx,eax
shr edx,2
mov ecx,eax
sub ecx,edx
mov [memory_end],ecx
mov [additional_memory_end],edx
push PAGE_READWRITE
push MEM_COMMIT
push eax
push 0
call [VirtualAlloc]
or eax,eax
jz not_enough_memory
mov [memory_start],eax
add eax,[memory_end]
mov [memory_end],eax
mov [additional_memory],eax
add [additional_memory_end],eax
ret
not_enough_memory:
mov eax,[additional_memory_end]
shl eax,1
cmp eax,4000h
jb out_of_memory
jmp allocate_memory
exit_program:
movzx eax,al
push eax
mov eax,[memory_start]
test eax,eax
jz do_exit
push MEM_RELEASE
push 0
push eax
call [VirtualFree]
do_exit:
call [ExitProcess]
get_environment_variable:
mov ecx,[memory_end]
sub ecx,edi
cmp ecx,4000h
jbe buffer_for_variable_ok
mov ecx,4000h
buffer_for_variable_ok:
push ecx
push edi
push esi
call [GetEnvironmentVariable]
add edi,eax
cmp edi,[memory_end]
jae out_of_memory
ret
open:
push 0
push 0
push OPEN_EXISTING
push 0
push FILE_SHARE_READ
push GENERIC_READ
push edx
call [CreateFile]
cmp eax,-1
je file_error
mov ebx,eax
clc
ret
file_error:
stc
ret
create:
push 0
push 0
push CREATE_ALWAYS
push 0
push FILE_SHARE_READ
push GENERIC_WRITE
push edx
call [CreateFile]
cmp eax,-1
je file_error
mov ebx,eax
clc
ret
write:
push 0
push bytes_count
push ecx
push edx
push ebx
call [WriteFile]
or eax,eax
jz file_error
clc
ret
read:
mov ebp,ecx
push 0
push bytes_count
push ecx
push edx
push ebx
call [ReadFile]
or eax,eax
jz file_error
cmp ebp,[bytes_count]
jne file_error
clc
ret
close:
push ebx
call [CloseHandle]
ret
lseek:
movzx eax,al
push eax
push 0
push edx
push ebx
call [SetFilePointer]
ret
display_string:
push [con_handle]
call [GetStdHandle]
mov ebp,eax
mov edi,esi
or ecx,-1
xor al,al
repne scasb
neg ecx
sub ecx,2
push 0
push bytes_count
push ecx
push esi
push ebp
call [WriteFile]
ret
display_character:
push ebx
mov [character],dl
push [con_handle]
call [GetStdHandle]
mov ebx,eax
push 0
push bytes_count
push 1
push character
push ebx
call [WriteFile]
pop ebx
ret
display_number:
push ebx
mov ecx,1000000000
xor edx,edx
xor bl,bl
display_loop:
div ecx
push edx
cmp ecx,1
je display_digit
or bl,bl
jnz display_digit
or al,al
jz digit_ok
not bl
display_digit:
mov dl,al
add dl,30h
push ecx
call display_character
pop ecx
digit_ok:
mov eax,ecx
xor edx,edx
mov ecx,10
div ecx
mov ecx,eax
pop eax
or ecx,ecx
jnz display_loop
pop ebx
ret
display_user_messages:
mov [displayed_count],0
call show_display_buffer
cmp [displayed_count],1
jb line_break_ok
je make_line_break
mov ax,word [last_displayed]
cmp ax,0A0Dh
je line_break_ok
cmp ax,0D0Ah
je line_break_ok
make_line_break:
mov word [buffer],0A0Dh
push [con_handle]
call [GetStdHandle]
push 0
push bytes_count
push 2
push buffer
push eax
call [WriteFile]
line_break_ok:
ret
display_block:
add [displayed_count],ecx
cmp ecx,1
ja take_last_two_characters
jb block_displayed
mov al,[last_displayed+1]
mov ah,[esi]
mov word [last_displayed],ax
jmp block_ok
take_last_two_characters:
mov ax,[esi+ecx-2]
mov word [last_displayed],ax
block_ok:
push ecx
push [con_handle]
call [GetStdHandle]
pop ecx
push 0
push bytes_count
push ecx
push esi
push eax
call [WriteFile]
block_displayed:
ret
fatal_error:
mov [con_handle],STD_ERROR_HANDLE
mov esi,error_prefix
call display_string
pop esi
call display_string
mov esi,error_suffix
call display_string
mov al,0FFh
jmp exit_program
assembler_error:
mov [con_handle],STD_ERROR_HANDLE
call display_user_messages
push dword 0
mov ebx,[current_line]
get_error_lines:
mov eax,[ebx]
cmp byte [eax],0
je get_next_error_line
push ebx
test byte [ebx+7],80h
jz display_error_line
mov edx,ebx
find_definition_origin:
mov edx,[edx+12]
test byte [edx+7],80h
jnz find_definition_origin
push edx
get_next_error_line:
mov ebx,[ebx+8]
jmp get_error_lines
display_error_line:
mov esi,[ebx]
call display_string
mov esi,line_number_start
call display_string
mov eax,[ebx+4]
and eax,7FFFFFFFh
call display_number
mov dl,']'
call display_character
pop esi
cmp ebx,esi
je line_number_ok
mov dl,20h
call display_character
push esi
mov esi,[esi]
movzx ecx,byte [esi]
inc esi
call display_block
mov esi,line_number_start
call display_string
pop esi
mov eax,[esi+4]
and eax,7FFFFFFFh
call display_number
mov dl,']'
call display_character
line_number_ok:
mov esi,line_data_start
call display_string
mov esi,ebx
mov edx,[esi]
call open
mov al,2
xor edx,edx
call lseek
mov edx,[esi+8]
sub eax,edx
jz line_data_displayed
push eax
xor al,al
call lseek
mov ecx,[esp]
mov edx,[additional_memory]
lea eax,[edx+ecx]
cmp eax,[additional_memory_end]
ja out_of_memory
call read
call close
pop ecx
mov esi,[additional_memory]
get_line_data:
mov al,[esi]
cmp al,0Ah
je display_line_data
cmp al,0Dh
je display_line_data
cmp al,1Ah
je display_line_data
or al,al
jz display_line_data
inc esi
loop get_line_data
display_line_data:
mov ecx,esi
mov esi,[additional_memory]
sub ecx,esi
call display_block
line_data_displayed:
mov esi,cr_lf
call display_string
pop ebx
or ebx,ebx
jnz display_error_line
mov esi,error_prefix
call display_string
pop esi
call display_string
mov esi,error_suffix
call display_string
mov al,2
jmp exit_program
make_timestamp:
push buffer
call [GetSystemTime]
movzx ecx,word [buffer]
mov eax,ecx
sub eax,1970
mov ebx,365
mul ebx
mov ebp,eax
mov eax,ecx
sub eax,1969
shr eax,2
add ebp,eax
mov eax,ecx
sub eax,1901
mov ebx,100
div ebx
sub ebp,eax
mov eax,ecx
xor edx,edx
sub eax,1601
mov ebx,400
div ebx
add ebp,eax
movzx ecx,word [buffer+2]
mov eax,ecx
dec eax
mov ebx,30
mul ebx
add ebp,eax
cmp ecx,8
jbe months_correction
mov eax,ecx
sub eax,7
shr eax,1
add ebp,eax
mov ecx,8
months_correction:
mov eax,ecx
shr eax,1
add ebp,eax
cmp ecx,2
jbe day_correction_ok
sub ebp,2
movzx ecx,word [buffer]
test ecx,11b
jnz day_correction_ok
xor edx,edx
mov eax,ecx
mov ebx,100
div ebx
or edx,edx
jnz day_correction
mov eax,ecx
mov ebx,400
div ebx
or edx,edx
jnz day_correction_ok
day_correction:
inc ebp
day_correction_ok:
movzx eax,word [buffer+6]
dec eax
add eax,ebp
mov ebx,24
mul ebx
movzx ecx,word [buffer+8]
add eax,ecx
mov ebx,60
mul ebx
movzx ecx,word [buffer+10]
add eax,ecx
mov ebx,60
mul ebx
movzx ecx,word [buffer+12]
add eax,ecx
adc edx,0
ret
error_prefix db 'error: ',0
error_suffix db '.'
cr_lf db 0Dh,0Ah,0
line_number_start db ' [',0
line_data_start db ':',0Dh,0Ah,0

7060
samples/Assembly/X86_64.inc Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,664 @@
//
// detail/impl/epoll_reactor.ipp
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
//
// Copyright (c) 2003-2013 Christopher M. Kohlhoff (chris at kohlhoff dot com)
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
#ifndef BOOST_ASIO_DETAIL_IMPL_EPOLL_REACTOR_IPP
#define BOOST_ASIO_DETAIL_IMPL_EPOLL_REACTOR_IPP
#if defined(_MSC_VER) && (_MSC_VER >= 1200)
# pragma once
#endif // defined(_MSC_VER) && (_MSC_VER >= 1200)
#include <boost/asio/detail/config.hpp>
#if defined(BOOST_ASIO_HAS_EPOLL)
#include <cstddef>
#include <sys/epoll.h>
#include <boost/asio/detail/epoll_reactor.hpp>
#include <boost/asio/detail/throw_error.hpp>
#include <boost/asio/error.hpp>
#if defined(BOOST_ASIO_HAS_TIMERFD)
# include <sys/timerfd.h>
#endif // defined(BOOST_ASIO_HAS_TIMERFD)
#include <boost/asio/detail/push_options.hpp>
namespace boost {
namespace asio {
namespace detail {
epoll_reactor::epoll_reactor(boost::asio::io_service& io_service)
: boost::asio::detail::service_base<epoll_reactor>(io_service),
io_service_(use_service<io_service_impl>(io_service)),
mutex_(),
interrupter_(),
epoll_fd_(do_epoll_create()),
timer_fd_(do_timerfd_create()),
shutdown_(false)
{
// Add the interrupter's descriptor to epoll.
epoll_event ev = { 0, { 0 } };
ev.events = EPOLLIN | EPOLLERR | EPOLLET;
ev.data.ptr = &interrupter_;
epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, interrupter_.read_descriptor(), &ev);
interrupter_.interrupt();
// Add the timer descriptor to epoll.
if (timer_fd_ != -1)
{
ev.events = EPOLLIN | EPOLLERR;
ev.data.ptr = &timer_fd_;
epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, timer_fd_, &ev);
}
}
epoll_reactor::~epoll_reactor()
{
if (epoll_fd_ != -1)
close(epoll_fd_);
if (timer_fd_ != -1)
close(timer_fd_);
}
void epoll_reactor::shutdown_service()
{
mutex::scoped_lock lock(mutex_);
shutdown_ = true;
lock.unlock();
op_queue<operation> ops;
while (descriptor_state* state = registered_descriptors_.first())
{
for (int i = 0; i < max_ops; ++i)
ops.push(state->op_queue_[i]);
state->shutdown_ = true;
registered_descriptors_.free(state);
}
timer_queues_.get_all_timers(ops);
io_service_.abandon_operations(ops);
}
void epoll_reactor::fork_service(boost::asio::io_service::fork_event fork_ev)
{
if (fork_ev == boost::asio::io_service::fork_child)
{
if (epoll_fd_ != -1)
::close(epoll_fd_);
epoll_fd_ = -1;
epoll_fd_ = do_epoll_create();
if (timer_fd_ != -1)
::close(timer_fd_);
timer_fd_ = -1;
timer_fd_ = do_timerfd_create();
interrupter_.recreate();
// Add the interrupter's descriptor to epoll.
epoll_event ev = { 0, { 0 } };
ev.events = EPOLLIN | EPOLLERR | EPOLLET;
ev.data.ptr = &interrupter_;
epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, interrupter_.read_descriptor(), &ev);
interrupter_.interrupt();
// Add the timer descriptor to epoll.
if (timer_fd_ != -1)
{
ev.events = EPOLLIN | EPOLLERR;
ev.data.ptr = &timer_fd_;
epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, timer_fd_, &ev);
}
update_timeout();
// Re-register all descriptors with epoll.
mutex::scoped_lock descriptors_lock(registered_descriptors_mutex_);
for (descriptor_state* state = registered_descriptors_.first();
state != 0; state = state->next_)
{
ev.events = state->registered_events_;
ev.data.ptr = state;
int result = epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, state->descriptor_, &ev);
if (result != 0)
{
boost::system::error_code ec(errno,
boost::asio::error::get_system_category());
boost::asio::detail::throw_error(ec, "epoll re-registration");
}
}
}
}
void epoll_reactor::init_task()
{
io_service_.init_task();
}
int epoll_reactor::register_descriptor(socket_type descriptor,
epoll_reactor::per_descriptor_data& descriptor_data)
{
descriptor_data = allocate_descriptor_state();
{
mutex::scoped_lock descriptor_lock(descriptor_data->mutex_);
descriptor_data->reactor_ = this;
descriptor_data->descriptor_ = descriptor;
descriptor_data->shutdown_ = false;
}
epoll_event ev = { 0, { 0 } };
ev.events = EPOLLIN | EPOLLERR | EPOLLHUP | EPOLLPRI | EPOLLET;
descriptor_data->registered_events_ = ev.events;
ev.data.ptr = descriptor_data;
int result = epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, descriptor, &ev);
if (result != 0)
return errno;
return 0;
}
int epoll_reactor::register_internal_descriptor(
int op_type, socket_type descriptor,
epoll_reactor::per_descriptor_data& descriptor_data, reactor_op* op)
{
descriptor_data = allocate_descriptor_state();
{
mutex::scoped_lock descriptor_lock(descriptor_data->mutex_);
descriptor_data->reactor_ = this;
descriptor_data->descriptor_ = descriptor;
descriptor_data->shutdown_ = false;
descriptor_data->op_queue_[op_type].push(op);
}
epoll_event ev = { 0, { 0 } };
ev.events = EPOLLIN | EPOLLERR | EPOLLHUP | EPOLLPRI | EPOLLET;
descriptor_data->registered_events_ = ev.events;
ev.data.ptr = descriptor_data;
int result = epoll_ctl(epoll_fd_, EPOLL_CTL_ADD, descriptor, &ev);
if (result != 0)
return errno;
return 0;
}
void epoll_reactor::move_descriptor(socket_type,
epoll_reactor::per_descriptor_data& target_descriptor_data,
epoll_reactor::per_descriptor_data& source_descriptor_data)
{
target_descriptor_data = source_descriptor_data;
source_descriptor_data = 0;
}
void epoll_reactor::start_op(int op_type, socket_type descriptor,
epoll_reactor::per_descriptor_data& descriptor_data, reactor_op* op,
bool is_continuation, bool allow_speculative)
{
if (!descriptor_data)
{
op->ec_ = boost::asio::error::bad_descriptor;
post_immediate_completion(op, is_continuation);
return;
}
mutex::scoped_lock descriptor_lock(descriptor_data->mutex_);
if (descriptor_data->shutdown_)
{
post_immediate_completion(op, is_continuation);
return;
}
if (descriptor_data->op_queue_[op_type].empty())
{
if (allow_speculative
&& (op_type != read_op
|| descriptor_data->op_queue_[except_op].empty()))
{
if (op->perform())
{
descriptor_lock.unlock();
io_service_.post_immediate_completion(op, is_continuation);
return;
}
if (op_type == write_op)
{
if ((descriptor_data->registered_events_ & EPOLLOUT) == 0)
{
epoll_event ev = { 0, { 0 } };
ev.events = descriptor_data->registered_events_ | EPOLLOUT;
ev.data.ptr = descriptor_data;
if (epoll_ctl(epoll_fd_, EPOLL_CTL_MOD, descriptor, &ev) == 0)
{
descriptor_data->registered_events_ |= ev.events;
}
else
{
op->ec_ = boost::system::error_code(errno,
boost::asio::error::get_system_category());
io_service_.post_immediate_completion(op, is_continuation);
return;
}
}
}
}
else
{
if (op_type == write_op)
{
descriptor_data->registered_events_ |= EPOLLOUT;
}
epoll_event ev = { 0, { 0 } };
ev.events = descriptor_data->registered_events_;
ev.data.ptr = descriptor_data;
epoll_ctl(epoll_fd_, EPOLL_CTL_MOD, descriptor, &ev);
}
}
descriptor_data->op_queue_[op_type].push(op);
io_service_.work_started();
}
void epoll_reactor::cancel_ops(socket_type,
epoll_reactor::per_descriptor_data& descriptor_data)
{
if (!descriptor_data)
return;
mutex::scoped_lock descriptor_lock(descriptor_data->mutex_);
op_queue<operation> ops;
for (int i = 0; i < max_ops; ++i)
{
while (reactor_op* op = descriptor_data->op_queue_[i].front())
{
op->ec_ = boost::asio::error::operation_aborted;
descriptor_data->op_queue_[i].pop();
ops.push(op);
}
}
descriptor_lock.unlock();
io_service_.post_deferred_completions(ops);
}
void epoll_reactor::deregister_descriptor(socket_type descriptor,
epoll_reactor::per_descriptor_data& descriptor_data, bool closing)
{
if (!descriptor_data)
return;
mutex::scoped_lock descriptor_lock(descriptor_data->mutex_);
if (!descriptor_data->shutdown_)
{
if (closing)
{
// The descriptor will be automatically removed from the epoll set when
// it is closed.
}
else
{
epoll_event ev = { 0, { 0 } };
epoll_ctl(epoll_fd_, EPOLL_CTL_DEL, descriptor, &ev);
}
op_queue<operation> ops;
for (int i = 0; i < max_ops; ++i)
{
while (reactor_op* op = descriptor_data->op_queue_[i].front())
{
op->ec_ = boost::asio::error::operation_aborted;
descriptor_data->op_queue_[i].pop();
ops.push(op);
}
}
descriptor_data->descriptor_ = -1;
descriptor_data->shutdown_ = true;
descriptor_lock.unlock();
free_descriptor_state(descriptor_data);
descriptor_data = 0;
io_service_.post_deferred_completions(ops);
}
}
void epoll_reactor::deregister_internal_descriptor(socket_type descriptor,
epoll_reactor::per_descriptor_data& descriptor_data)
{
if (!descriptor_data)
return;
mutex::scoped_lock descriptor_lock(descriptor_data->mutex_);
if (!descriptor_data->shutdown_)
{
epoll_event ev = { 0, { 0 } };
epoll_ctl(epoll_fd_, EPOLL_CTL_DEL, descriptor, &ev);
op_queue<operation> ops;
for (int i = 0; i < max_ops; ++i)
ops.push(descriptor_data->op_queue_[i]);
descriptor_data->descriptor_ = -1;
descriptor_data->shutdown_ = true;
descriptor_lock.unlock();
free_descriptor_state(descriptor_data);
descriptor_data = 0;
}
}
void epoll_reactor::run(bool block, op_queue<operation>& ops)
{
// This code relies on the fact that the task_io_service queues the reactor
// task behind all descriptor operations generated by this function. This
// means, that by the time we reach this point, any previously returned
// descriptor operations have already been dequeued. Therefore it is now safe
// for us to reuse and return them for the task_io_service to queue again.
// Calculate a timeout only if timerfd is not used.
int timeout;
if (timer_fd_ != -1)
timeout = block ? -1 : 0;
else
{
mutex::scoped_lock lock(mutex_);
timeout = block ? get_timeout() : 0;
}
// Block on the epoll descriptor.
epoll_event events[128];
int num_events = epoll_wait(epoll_fd_, events, 128, timeout);
#if defined(BOOST_ASIO_HAS_TIMERFD)
bool check_timers = (timer_fd_ == -1);
#else // defined(BOOST_ASIO_HAS_TIMERFD)
bool check_timers = true;
#endif // defined(BOOST_ASIO_HAS_TIMERFD)
// Dispatch the waiting events.
for (int i = 0; i < num_events; ++i)
{
void* ptr = events[i].data.ptr;
if (ptr == &interrupter_)
{
// No need to reset the interrupter since we're leaving the descriptor
// in a ready-to-read state and relying on edge-triggered notifications
// to make it so that we only get woken up when the descriptor's epoll
// registration is updated.
#if defined(BOOST_ASIO_HAS_TIMERFD)
if (timer_fd_ == -1)
check_timers = true;
#else // defined(BOOST_ASIO_HAS_TIMERFD)
check_timers = true;
#endif // defined(BOOST_ASIO_HAS_TIMERFD)
}
#if defined(BOOST_ASIO_HAS_TIMERFD)
else if (ptr == &timer_fd_)
{
check_timers = true;
}
#endif // defined(BOOST_ASIO_HAS_TIMERFD)
else
{
// The descriptor operation doesn't count as work in and of itself, so we
// don't call work_started() here. This still allows the io_service to
// stop if the only remaining operations are descriptor operations.
descriptor_state* descriptor_data = static_cast<descriptor_state*>(ptr);
descriptor_data->set_ready_events(events[i].events);
ops.push(descriptor_data);
}
}
if (check_timers)
{
mutex::scoped_lock common_lock(mutex_);
timer_queues_.get_ready_timers(ops);
#if defined(BOOST_ASIO_HAS_TIMERFD)
if (timer_fd_ != -1)
{
itimerspec new_timeout;
itimerspec old_timeout;
int flags = get_timeout(new_timeout);
timerfd_settime(timer_fd_, flags, &new_timeout, &old_timeout);
}
#endif // defined(BOOST_ASIO_HAS_TIMERFD)
}
}
void epoll_reactor::interrupt()
{
epoll_event ev = { 0, { 0 } };
ev.events = EPOLLIN | EPOLLERR | EPOLLET;
ev.data.ptr = &interrupter_;
epoll_ctl(epoll_fd_, EPOLL_CTL_MOD, interrupter_.read_descriptor(), &ev);
}
int epoll_reactor::do_epoll_create()
{
#if defined(EPOLL_CLOEXEC)
int fd = epoll_create1(EPOLL_CLOEXEC);
#else // defined(EPOLL_CLOEXEC)
int fd = -1;
errno = EINVAL;
#endif // defined(EPOLL_CLOEXEC)
if (fd == -1 && (errno == EINVAL || errno == ENOSYS))
{
fd = epoll_create(epoll_size);
if (fd != -1)
::fcntl(fd, F_SETFD, FD_CLOEXEC);
}
if (fd == -1)
{
boost::system::error_code ec(errno,
boost::asio::error::get_system_category());
boost::asio::detail::throw_error(ec, "epoll");
}
return fd;
}
int epoll_reactor::do_timerfd_create()
{
#if defined(BOOST_ASIO_HAS_TIMERFD)
# if defined(TFD_CLOEXEC)
int fd = timerfd_create(CLOCK_MONOTONIC, TFD_CLOEXEC);
# else // defined(TFD_CLOEXEC)
int fd = -1;
errno = EINVAL;
# endif // defined(TFD_CLOEXEC)
if (fd == -1 && errno == EINVAL)
{
fd = timerfd_create(CLOCK_MONOTONIC, 0);
if (fd != -1)
::fcntl(fd, F_SETFD, FD_CLOEXEC);
}
return fd;
#else // defined(BOOST_ASIO_HAS_TIMERFD)
return -1;
#endif // defined(BOOST_ASIO_HAS_TIMERFD)
}
epoll_reactor::descriptor_state* epoll_reactor::allocate_descriptor_state()
{
mutex::scoped_lock descriptors_lock(registered_descriptors_mutex_);
return registered_descriptors_.alloc();
}
void epoll_reactor::free_descriptor_state(epoll_reactor::descriptor_state* s)
{
mutex::scoped_lock descriptors_lock(registered_descriptors_mutex_);
registered_descriptors_.free(s);
}
void epoll_reactor::do_add_timer_queue(timer_queue_base& queue)
{
mutex::scoped_lock lock(mutex_);
timer_queues_.insert(&queue);
}
void epoll_reactor::do_remove_timer_queue(timer_queue_base& queue)
{
mutex::scoped_lock lock(mutex_);
timer_queues_.erase(&queue);
}
void epoll_reactor::update_timeout()
{
#if defined(BOOST_ASIO_HAS_TIMERFD)
if (timer_fd_ != -1)
{
itimerspec new_timeout;
itimerspec old_timeout;
int flags = get_timeout(new_timeout);
timerfd_settime(timer_fd_, flags, &new_timeout, &old_timeout);
return;
}
#endif // defined(BOOST_ASIO_HAS_TIMERFD)
interrupt();
}
int epoll_reactor::get_timeout()
{
// By default we will wait no longer than 5 minutes. This will ensure that
// any changes to the system clock are detected after no longer than this.
return timer_queues_.wait_duration_msec(5 * 60 * 1000);
}
#if defined(BOOST_ASIO_HAS_TIMERFD)
int epoll_reactor::get_timeout(itimerspec& ts)
{
ts.it_interval.tv_sec = 0;
ts.it_interval.tv_nsec = 0;
long usec = timer_queues_.wait_duration_usec(5 * 60 * 1000 * 1000);
ts.it_value.tv_sec = usec / 1000000;
ts.it_value.tv_nsec = usec ? (usec % 1000000) * 1000 : 1;
return usec ? 0 : TFD_TIMER_ABSTIME;
}
#endif // defined(BOOST_ASIO_HAS_TIMERFD)
struct epoll_reactor::perform_io_cleanup_on_block_exit
{
explicit perform_io_cleanup_on_block_exit(epoll_reactor* r)
: reactor_(r), first_op_(0)
{
}
~perform_io_cleanup_on_block_exit()
{
if (first_op_)
{
// Post the remaining completed operations for invocation.
if (!ops_.empty())
reactor_->io_service_.post_deferred_completions(ops_);
// A user-initiated operation has completed, but there's no need to
// explicitly call work_finished() here. Instead, we'll take advantage of
// the fact that the task_io_service will call work_finished() once we
// return.
}
else
{
// No user-initiated operations have completed, so we need to compensate
// for the work_finished() call that the task_io_service will make once
// this operation returns.
reactor_->io_service_.work_started();
}
}
epoll_reactor* reactor_;
op_queue<operation> ops_;
operation* first_op_;
};
epoll_reactor::descriptor_state::descriptor_state()
: operation(&epoll_reactor::descriptor_state::do_complete)
{
}
operation* epoll_reactor::descriptor_state::perform_io(uint32_t events)
{
mutex_.lock();
perform_io_cleanup_on_block_exit io_cleanup(reactor_);
mutex::scoped_lock descriptor_lock(mutex_, mutex::scoped_lock::adopt_lock);
// Exception operations must be processed first to ensure that any
// out-of-band data is read before normal data.
static const int flag[max_ops] = { EPOLLIN, EPOLLOUT, EPOLLPRI };
for (int j = max_ops - 1; j >= 0; --j)
{
if (events & (flag[j] | EPOLLERR | EPOLLHUP))
{
while (reactor_op* op = op_queue_[j].front())
{
if (op->perform())
{
op_queue_[j].pop();
io_cleanup.ops_.push(op);
}
else
break;
}
}
}
// The first operation will be returned for completion now. The others will
// be posted for later by the io_cleanup object's destructor.
io_cleanup.first_op_ = io_cleanup.ops_.front();
io_cleanup.ops_.pop();
return io_cleanup.first_op_;
}
void epoll_reactor::descriptor_state::do_complete(
io_service_impl* owner, operation* base,
const boost::system::error_code& ec, std::size_t bytes_transferred)
{
if (owner)
{
descriptor_state* descriptor_data = static_cast<descriptor_state*>(base);
uint32_t events = static_cast<uint32_t>(bytes_transferred);
if (operation* op = descriptor_data->perform_io(events))
{
op->complete(*owner, ec, 0);
}
}
}
} // namespace detail
} // namespace asio
} // namespace boost
#include <boost/asio/detail/pop_options.hpp>
#endif // defined(BOOST_ASIO_HAS_EPOLL)
#endif // BOOST_ASIO_DETAIL_IMPL_EPOLL_REACTOR_IPP

View File

@@ -0,0 +1,130 @@
MODULE ObxControls;
(**
project = "BlackBox"
organization = "www.oberon.ch"
contributors = "Oberon microsystems"
version = "System/Rsrc/About"
copyright = "System/Rsrc/About"
license = "Docu/BB-License"
changes = ""
issues = ""
**)
IMPORT Dialog, Ports, Properties, Views;
CONST beginner = 0; advanced = 1; expert = 2; guru = 3; (* user classes *)
TYPE
View = POINTER TO RECORD (Views.View)
size: INTEGER (* border size in mm *)
END;
VAR
data*: RECORD
class*: INTEGER; (* current user class *)
list*: Dialog.List; (* list of currently available sizes, derived from class *)
width*: INTEGER (* width of next view to be opened. Derived from
class, or entered through a text entry field *)
END;
predef: ARRAY 6 OF INTEGER; (* table of predefined sizes *)
PROCEDURE SetList;
BEGIN
IF data.class = beginner THEN
data.list.SetLen(1);
data.list.SetItem(0, "default")
ELSIF data.class = advanced THEN
data.list.SetLen(4);
data.list.SetItem(0, "default");
data.list.SetItem(1, "small");
data.list.SetItem(2, "medium");
data.list.SetItem(3, "large");
ELSE
data.list.SetLen(6);
data.list.SetItem(0, "default");
data.list.SetItem(1, "small");
data.list.SetItem(2, "medium");
data.list.SetItem(3, "large");
data.list.SetItem(4, "tiny");
data.list.SetItem(5, "huge");
END
END SetList;
(* View *)
PROCEDURE (v: View) CopyFromSimpleView (source: Views.View);
BEGIN
v.size := source(View).size
END CopyFromSimpleView;
PROCEDURE (v: View) Restore (f: Views.Frame; l, t, r, b: INTEGER);
BEGIN (* fill view with a red square of size v.size *)
IF v.size = 0 THEN v.size := predef[0] END; (* lazy initialization of size *)
f.DrawRect(0, 0, v.size, v.size, Ports.fill, Ports.red)
END Restore;
PROCEDURE (v: View) HandlePropMsg (VAR msg: Views.PropMessage);
BEGIN
WITH msg: Properties.SizePref DO
IF v.size = 0 THEN v.size := predef[0] END; (* lazy initialization of size *)
msg.w := v.size; msg.h := v.size (* tell environment about desired width and height *)
ELSE (* ignore other messages *)
END
END HandlePropMsg;
(* notifiers *)
PROCEDURE ClassNotify* (op, from, to: INTEGER);
BEGIN (* react to change in data.class *)
IF op = Dialog.changed THEN
IF (to = beginner) OR (to = advanced) & (data.list.index > 3) THEN
(* if class is reduced, make sure that selection contains legal elements *)
data.list.index := 0; data.width := predef[0]; (* modify interactor *)
Dialog.Update(data) (* redraw controls where necessary *)
END;
SetList;
Dialog.UpdateList(data.list) (* reconstruct list box contents *)
END
END ClassNotify;
PROCEDURE ListNotify* (op, from, to: INTEGER);
BEGIN (* reacto to change in data.list (index to was selected) *)
IF op = Dialog.changed THEN
data.width := predef[to]; (* modify interactor *)
Dialog.Update(data) (* redraw controls where necessary *)
END
END ListNotify;
(* guards *)
PROCEDURE ListGuard* (VAR par: Dialog.Par);
BEGIN (* disable list box for a beginner *)
par.disabled := data.class = beginner
END ListGuard;
PROCEDURE WidthGuard* (VAR par: Dialog.Par);
BEGIN (* make text entry field read-only if user is not guru *)
par.readOnly := data.class # guru
END WidthGuard;
(* commands *)
PROCEDURE Open*;
VAR v: View;
BEGIN
NEW(v); (* create and initialize a new view *)
v.size := data.width * Ports.mm; (* define view's size in function of class *)
Views.OpenAux(v, "Example") (* open the view in a window *)
END Open;
BEGIN (* initialization of global variables *)
predef[0] := 40; predef[1] := 30; predef[2] := 50; (* predefined sizes *)
predef[3] := 70; predef[4] := 20; predef[5] := 100;
data.class := beginner; (* default values *)
data.list.index := 0;
data.width := predef[0];
SetList
END ObxControls.

View File

@@ -0,0 +1,71 @@
MODULE ObxFact;
(**
project = "BlackBox"
organization = "www.oberon.ch"
contributors = "Oberon microsystems"
version = "System/Rsrc/About"
copyright = "System/Rsrc/About"
license = "Docu/BB-License"
changes = ""
issues = ""
**)
IMPORT
Stores, Models, TextModels, TextControllers, Integers;
PROCEDURE Read(r: TextModels.Reader; VAR x: Integers.Integer);
VAR i, len, beg: INTEGER; ch: CHAR; buf: POINTER TO ARRAY OF CHAR;
BEGIN
r.ReadChar(ch);
WHILE ~r.eot & (ch <= " ") DO r.ReadChar(ch) END;
ASSERT(~r.eot & (((ch >= "0") & (ch <= "9")) OR (ch = "-")));
beg := r.Pos() - 1; len := 0;
REPEAT INC(len); r.ReadChar(ch) UNTIL r.eot OR (ch < "0") OR (ch > "9");
NEW(buf, len + 1);
i := 0; r.SetPos(beg);
REPEAT r.ReadChar(buf[i]); INC(i) UNTIL i = len;
buf[i] := 0X;
Integers.ConvertFromString(buf^, x)
END Read;
PROCEDURE Write(w: TextModels.Writer; x: Integers.Integer);
VAR i: INTEGER;
BEGIN
IF Integers.Sign(x) < 0 THEN w.WriteChar("-") END;
i := Integers.Digits10Of(x);
IF i # 0 THEN
REPEAT DEC(i); w.WriteChar(Integers.ThisDigit10(x, i)) UNTIL i = 0
ELSE w.WriteChar("0")
END
END Write;
PROCEDURE Compute*;
VAR beg, end, i, n: INTEGER; ch: CHAR;
s: Stores.Operation;
r: TextModels.Reader; w: TextModels.Writer; attr: TextModels.Attributes;
c: TextControllers.Controller;
x: Integers.Integer;
BEGIN
c := TextControllers.Focus();
IF (c # NIL) & c.HasSelection() THEN
c.GetSelection(beg, end);
r := c.text.NewReader(NIL); r.SetPos(beg); r.ReadChar(ch);
WHILE ~r.eot & (beg < end) & (ch <= " ") DO r.ReadChar(ch); INC(beg) END;
IF ~r.eot & (beg < end) THEN
r.ReadPrev; Read(r, x);
end := r.Pos(); r.ReadPrev; attr :=r.attr;
IF (Integers.Sign(x) > 0) & (Integers.Compare(x, Integers.Long(MAX(LONGINT))) <= 0) THEN
n := SHORT(Integers.Short(x)); i := 2; x := Integers.Long(1);
WHILE i <= n DO x := Integers.Product(x, Integers.Long(i)); INC(i) END;
Models.BeginScript(c.text, "computation", s);
c.text.Delete(beg, end);
w := c.text.NewWriter(NIL); w.SetPos(beg); w.SetAttr(attr);
Write(w, x);
Models.EndScript(c.text, s)
END
END
END
END Compute;
END ObxFact.

View File

@@ -0,0 +1,76 @@
*Basic example of transport model from GAMS model library
$Title A Transportation Problem (TRNSPORT,SEQ=1)
$Ontext
This problem finds a least cost shipping schedule that meets
requirements at markets and supplies at factories.
Dantzig, G B, Chapter 3.3. In Linear Programming and Extensions.
Princeton University Press, Princeton, New Jersey, 1963.
This formulation is described in detail in:
Rosenthal, R E, Chapter 2: A GAMS Tutorial. In GAMS: A User's Guide.
The Scientific Press, Redwood City, California, 1988.
The line numbers will not match those in the book because of these
comments.
$Offtext
Sets
i canning plants / seattle, san-diego /
j markets / new-york, chicago, topeka / ;
Parameters
a(i) capacity of plant i in cases
/ seattle 350
san-diego 600 /
b(j) demand at market j in cases
/ new-york 325
chicago 300
topeka 275 / ;
Table d(i,j) distance in thousands of miles
new-york chicago topeka
seattle 2.5 1.7 1.8
san-diego 2.5 1.8 1.4 ;
Scalar f freight in dollars per case per thousand miles /90/ ;
Parameter c(i,j) transport cost in thousands of dollars per case ;
c(i,j) = f * d(i,j) / 1000 ;
Variables
x(i,j) shipment quantities in cases
z total transportation costs in thousands of dollars ;
Positive Variable x ;
Equations
cost define objective function
supply(i) observe supply limit at plant i
demand(j) satisfy demand at market j ;
cost .. z =e= sum((i,j), c(i,j)*x(i,j)) ;
supply(i) .. sum(j, x(i,j)) =l= a(i) ;
demand(j) .. sum(i, x(i,j)) =g= b(j) ;
Model transport /all/ ;
Solve transport using lp minimizing z ;
Display x.l, x.m ;
$ontext
#user model library stuff
Main topic Basic GAMS
Featured item 1 Trnsport model
Featured item 2
Featured item 3
Featured item 4
Description
Basic example of transport model from GAMS model library
$offtext

View File

@@ -0,0 +1,9 @@
static const char* SimpleFragmentShader = STRINGIFY(
varying vec4 FrontColor;
void main(void)
{
gl_FragColor = FrontColor;
}
);

View File

@@ -0,0 +1,6 @@
varying vec4 v_color;
void main()
{
gl_FragColor = v_color;
}

12
samples/GLSL/myvertex.vrx Normal file
View File

@@ -0,0 +1,12 @@
uniform mat4 u_MVPMatrix;
attribute vec4 a_position;
attribute vec4 a_color;
varying vec4 v_color;
void main()
{
v_color = a_color;
gl_Position = u_MVPMatrix * pos;
}

View File

@@ -0,0 +1,48 @@
#version 330 core
// cross-unit recursion
void main() {}
// two-level recursion
float cbar(int);
void cfoo(float)
{
cbar(2);
}
// four-level, out of order
void CB();
void CD();
void CA() { CB(); }
void CC() { CD(); }
// high degree
void CBT();
void CDT();
void CAT() { CBT(); CBT(); CBT(); }
void CCT() { CDT(); CDT(); CBT(); }
// not recursive
void norA() {}
void norB() { norA(); }
void norC() { norA(); }
void norD() { norA(); }
void norE() { norB(); }
void norF() { norB(); }
void norG() { norE(); }
void norH() { norE(); }
void norI() { norE(); }
// not recursive, but with a call leading into a cycle if ignoring direction
void norcA() { }
void norcB() { norcA(); }
void norcC() { norcB(); }
void norcD() { norcC(); norcB(); } // head of cycle
void norcE() { norcD(); } // lead into cycle

View File

@@ -0,0 +1,2 @@
#!/usr/bin/env groovy
println "Hello World"

View File

@@ -0,0 +1,9 @@
html {
head {
component "bootstrap"
title "Bootstrap Template"
}
html {
}
}

View File

@@ -0,0 +1,9 @@
html {
head {
title "Example Template"
}
body {
p "This is a quick template example"
}
}

View File

@@ -0,0 +1,46 @@
theory HelloWorld
imports Main
begin
section{*Playing around with Isabelle*}
text{* creating a lemma with the name hello_world*}
lemma hello_world: "True" by simp
(*inspecting it*)
thm hello_world
text{* defining a string constant HelloWorld *}
definition HelloWorld :: "string" where
"HelloWorld \<equiv> ''Hello World!''"
(*reversing HelloWorld twice yilds HelloWorld again*)
theorem "rev (rev HelloWorld) = HelloWorld"
by (fact List.rev_rev_ident)
text{*now we delete the already proven List.rev_rev_ident lema and show it by hand*}
declare List.rev_rev_ident[simp del]
hide_fact List.rev_rev_ident
(*It's trivial since we can just 'execute' it*)
corollary "rev (rev HelloWorld) = HelloWorld"
apply(simp add: HelloWorld_def)
done
text{*does it hold in general?*}
theorem rev_rev_ident:"rev (rev l) = l"
proof(induction l)
case Nil thus ?case by simp
next
case (Cons l ls)
assume IH: "rev (rev ls) = ls"
have "rev (l#ls) = (rev ls) @ [l]" by simp
hence "rev (rev (l#ls)) = rev ((rev ls) @ [l])" by simp
also have "\<dots> = [l] @ rev (rev ls)" by simp
finally show "rev (rev (l#ls)) = l#ls" using IH by simp
qed
corollary "\<forall>(l::string). rev (rev l) = l" by(fastforce intro: rev_rev_ident)
end

View File

@@ -0,0 +1,24 @@
/*
invoke endpoint by calling in a browser:
http://<hanaserveradress>:<xsengineport(usually 8000)>/<path>/<to>/<endpoint>/helloHanaMath.xsjslib?x=4&y=2
e.g.:
http://192.168.178.20:8000/geekflyer/linguist/helloHanaEndpoint.xsjs?x=4&y=2
*/
var hanaMath = $.import("./helloHanaMath.xsjslib");
var x = parseFloat($.request.parameters.get("x"));
var y = parseFloat($.request.parameters.get("y"));
var result = hanaMath.multiply(x, y);
var output = {
title: "Hello HANA XS - do some simple math",
input: {x: x, y: y},
result: result
};
$.response.contentType = "application/json";
$.response.statusCode = $.net.http.OK;
$.response.setBody(JSON.stringify(output));

View File

@@ -0,0 +1,9 @@
/* simple hana xs demo library, which can be used by multiple endpoints */
function multiply(x, y) {
return x * y;
}
function add(x, y) {
return x + y;
}

View File

@@ -0,0 +1,7 @@
(function(window, angular) {
Array.prototype.last = function() {
return this[this.length-1];
};
var app = angular.module('ConwayGameOfLife', []);

View File

@@ -0,0 +1,3 @@
})(window, window.angular);

View File

@@ -0,0 +1,232 @@
(* Content-type: application/vnd.wolfram.mathematica *)
(*** Wolfram Notebook File ***)
(* http://www.wolfram.com/nb *)
(* CreatedBy='Mathematica 9.0' *)
(*CacheID: 234*)
(* Internal cache information:
NotebookFileLineBreakTest
NotebookFileLineBreakTest
NotebookDataPosition[ 157, 7]
NotebookDataLength[ 7164, 223]
NotebookOptionsPosition[ 6163, 182]
NotebookOutlinePosition[ 6508, 197]
CellTagsIndexPosition[ 6465, 194]
WindowFrame->Normal*)
(* Beginning of Notebook Content *)
Notebook[{
Cell[CellGroupData[{
Cell[BoxData[
RowBox[{
RowBox[{"Solve", "[",
RowBox[{
RowBox[{"y", "'"}], "\[Equal]", " ", "xy"}], "]"}],
"\[IndentingNewLine]"}]], "Input",
CellChangeTimes->{{3.6112716342092056`*^9, 3.6112716549793935`*^9}}],
Cell[BoxData[
RowBox[{"{",
RowBox[{"{",
RowBox[{"xy", "\[Rule]",
SuperscriptBox["y", "\[Prime]",
MultilineFunction->None]}], "}"}], "}"}]], "Output",
CellChangeTimes->{3.6112716579295626`*^9}]
}, Open ]],
Cell[CellGroupData[{
Cell[BoxData[
RowBox[{"Log", "[",
RowBox[{"Sin", "[", "38", "]"}], "]"}]], "Input",
CellChangeTimes->{{3.611271663920905*^9, 3.6112716759275913`*^9}}],
Cell[BoxData[
RowBox[{"Log", "[",
RowBox[{"Sin", "[", "38", "]"}], "]"}]], "Output",
CellChangeTimes->{3.611271678256725*^9}]
}, Open ]],
Cell[CellGroupData[{
Cell[BoxData[
RowBox[{"N", "[",
RowBox[{"Log", "[",
RowBox[{"Sin", "[", "38", "]"}], "]"}], "]"}]], "Input",
NumberMarks->False],
Cell[BoxData[
RowBox[{"-", "1.2161514009320473`"}]], "Output",
CellChangeTimes->{3.611271682061942*^9}]
}, Open ]],
Cell[CellGroupData[{
Cell[BoxData[
RowBox[{"Abs", "[",
RowBox[{"-", "1.2161514009320473`"}], "]"}]], "Input",
NumberMarks->False],
Cell[BoxData["1.2161514009320473`"], "Output",
CellChangeTimes->{3.6112716842780695`*^9}]
}, Open ]],
Cell[CellGroupData[{
Cell[BoxData[
RowBox[{"RealDigits", "[", "1.2161514009320473`", "]"}]], "Input",
NumberMarks->False],
Cell[BoxData[
RowBox[{"{",
RowBox[{
RowBox[{"{",
RowBox[{
"1", ",", "2", ",", "1", ",", "6", ",", "1", ",", "5", ",", "1", ",", "4",
",", "0", ",", "0", ",", "9", ",", "3", ",", "2", ",", "0", ",", "4",
",", "7"}], "}"}], ",", "1"}], "}"}]], "Output",
CellChangeTimes->{3.611271685319129*^9}]
}, Open ]],
Cell[CellGroupData[{
Cell[BoxData[
RowBox[{
RowBox[{"Graph", "[",
RowBox[{"Log", "[", "x", "]"}], "]"}], "\[IndentingNewLine]"}]], "Input",
CellChangeTimes->{{3.611271689258354*^9, 3.611271702038085*^9}}],
Cell[BoxData[
RowBox[{"Graph", "[",
RowBox[{"Log", "[", "x", "]"}], "]"}]], "Output",
CellChangeTimes->{3.611271704295214*^9}]
}, Open ]],
Cell[BoxData[""], "Input",
CellChangeTimes->{{3.611271712769699*^9, 3.6112717423153887`*^9}}],
Cell[CellGroupData[{
Cell[BoxData[
RowBox[{
RowBox[{"Plot", "[",
RowBox[{
RowBox[{"Log", "[", "x", "]"}], ",", " ",
RowBox[{"{",
RowBox[{"x", ",", " ", "0", ",", " ", "10"}], "}"}]}], "]"}],
"\[IndentingNewLine]"}]], "Input",
CellChangeTimes->{{3.6112717573482485`*^9, 3.6112717747822456`*^9}}],
Cell[BoxData[
GraphicsBox[{{}, {},
{Hue[0.67, 0.6, 0.6], LineBox[CompressedData["
1:eJwVzXs81Pkex/GZH7XlsutSQprwqxTSZVfJGp9P6UYqlyxHUhTaLrq4JpVK
0SHRisGWjYiEbHSvb+Q27rllmYwaY6JpwxgZTI7zx/vxejz/eht4H3PyoRgM
Rsj0/t+1MEPjP1Zc8O6L0tCYkJERTokxP5YLLR+MQy2qZWSzX62gWcaFn9s7
5sVFyohY4ZvLs5Ya6AheLQxnyIgFe4fllag6yH4zayhMcYw0FU5SRl8bweS/
wyVFa0aJBsz2VDVrAl8V299DGKPk1yWJllEHmqD42vuI4RopiRvJlYS9bYLZ
a2c4j3pJyS8JbT7eeW/By6ht44vkEXKuxtRu1d4WOB5QmStjSUhO0eMleTda
4EZtHmU5PEyaORsUFte1QFHRg6WjFcNkkZ/bC+11rVC0s8n9nf8wqVGINGNo
tkFRzD3HsYohosXu0misbAdxXml1VdQgKSi80nXErBNo/oP47aliMqAxEGvn
1QlVgoRvezzExCjYznppYifkn+K6CVli8peV8m2BrBNM20LljlmfyXVurK97
RRfcVCpPCXg8QIIF14a2eLyHn6Y4909//UTSlWsvqm/qge1fVjduzhISa/Zp
jwjPHvCM6ZD7BQgJz9/E/GtIDyRsSj3Svl5ItJtj+uru9cBdE2PXZH4vSeDY
20arfYAT6Z3e8axecnFxw49TXR/gU5X5vDu5H4kfvE0RnxSAsqvDMcduPmFk
jD7rihGA7RmZ5qlYPuEo6vFq7gigR67QPetXPqnm+rJy2wUA0hVVHindZOmu
yQwfy17Y4OU185n7e/LpoNH9bqYQPPrPvwn+2kkOXT/zqim+DzJ72WEzdrcT
SprBJ7l9UD/Fag2c005SXasZhWV9kH51Z/aqhjZSo6dpc3WkD4L1tqolbGgj
JndzqmzdRPD67PLxVrNWIn7e0lS28BMs6Ba9FM1pJv7CZYLign6IeWFYmrqk
jvR4/jOrlNsPoqNsieZftcS5I9qsvrcf8tnmIzq6tcSiVnRKqDsALqbKTVU/
1RCFoiw1ragBULG3LYphVhNOuIF1yN7PkFMpYVXI35BSTZ2UdWpfgMls07e/
84QoGUQa8S0GgVn/55MIdixUWyWsOLtpEAIiTazYlglw2e3W2gVOg5BMOVFO
zolAxT/ZsvvwIJAvj7SczqbC+Hex37ubgxD8udJ0tkcmfOa55DRSQ8DwsFzc
6lkIdRyjZa/rhsAywLBSze45xKnVGt/eJwFLB1UN7sVq8O7aRRTqRsFbq7Mr
JqcdTlREeh8zGoeOsKZ1bgF8KDqu4qxtK4c/T0q26boJ4PbpwwMrXRn4N9vd
qamzDy6kTzqOiJmo6OOuteZtPzBaevBFmALy6nNqfwkTw5JA39BdxjPwSH3B
vlWGX6FXmvyb8suZeCtkhRV5NAh2wkNnrp+YhaOXrkQMdg/Bjt54ExZLCdti
v+y2+XcYBt54R1TnKyOH4R+txpOAmXr7Apu9quiaByGbG0dACaRePMmPmLmw
vX84Swpbvrh/M3RRQziRFnP5wih0lB1gupuqY0FCbZyewzcoiS731JeqY4Zj
3+qZP4yB74ygnoYGDcz5GOJ8uXwM9p88XaKSqonn9R26+EdlsMLPpMHeaw4K
rc1neaqOQ6OGqXLQurmYKexKyno4Ds8LLqSZKmhhhvxW6cjWCTjNNHaoe6+F
pidKHHi9E6DEC9vqXzwPGaH7eO6hkyDMNkhMD9fGsUD+Knv5JCQu1VF86qKD
h3vll15HyyE+1bfKS18XbTje/KqZ38E9cU+DikgXNYxUk++f/Q5jG7Nk6a/m
49yHih6fJ7+DQLghtCxKD9We/pFtf2wKMtir5td7LcDHFdUyrmgK8i8Fqfst
Z2H5rdC2ZGMGRrns36YgZWHfc/sj7Z4MNOfdzo2qX4jaWiITpSQGcpal5ddv
08c4nrYPVjPw3OurnG1P9ZGdfship5yB2+e7ZNUsMsAzD/MLtFcycb1/1W71
Kwb4qn7LsIcnE9P1vBfVSQ1QUbd5z75rTFz05m7Sjt2GeHJ9UIrOCybGLy8z
bn5liLETFcsURUz0lSi+5RrTGL/GlX1jDoXeRcP6V67R6DRvQNHcmsIjF5wn
7RJoPPVD0ph42kHOxe9U/qDR/97LrjtAYbQ0KC4+iUa6N+b4nPUUFqyTTSTf
pDFTFtw6bEOhrHSqPTuPRo1786Pv21IY36xytbyKxo0v5z7UdKEwNfPowctc
GuUeojTutDMDG2y21tIYpHQ98NxvFD7Sih+vbaBRfeZZ6YArhTx3zYMtbTRC
CmNNqTuFRgIdm48CGveGmxUf2kfhyuIw1h0hjasPiNIWelFoealL5iOiMZKf
HdA6bXujmw/6B2gk7zZK2PspPHlYnzU0RGN40raf1XwpDLc6L/tbMv0vikor
n/Yl1Y+tgVIayzZ/kIT6UcgpzIwZG6Px0d7RwA8HKcyIUPR7Nk7j8sLHN2/8
TmGeo8+G8Ekab1ncfmR7iMJiw8oF1t9pnF9RQuTTfiVZIpuaonFCb+xJ0WEK
/wc13qzo
"]]}},
AspectRatio->NCache[GoldenRatio^(-1), 0.6180339887498948],
Axes->True,
AxesLabel->{None, None},
AxesOrigin->{0, 0},
Method->{},
PlotRange->{{0, 10}, {-1.623796532045525, 2.3025850725858823`}},
PlotRangeClipping->True,
PlotRangePadding->{
Scaled[0.02],
Scaled[0.02]}]], "Output",
CellChangeTimes->{3.6112717778594217`*^9}]
}, Open ]]
},
WindowSize->{716, 833},
WindowMargins->{{Automatic, 214}, {Automatic, 26}},
FrontEndVersion->"9.0 for Microsoft Windows (64-bit) (January 25, 2013)",
StyleDefinitions->"Default.nb"
]
(* End of Notebook Content *)
(* Internal cache information *)
(*CellTagsOutline
CellTagsIndex->{}
*)
(*CellTagsIndex
CellTagsIndex->{}
*)
(*NotebookFileOutline
Notebook[{
Cell[CellGroupData[{
Cell[579, 22, 224, 6, 52, "Input"],
Cell[806, 30, 211, 6, 31, "Output"]
}, Open ]],
Cell[CellGroupData[{
Cell[1054, 41, 155, 3, 31, "Input"],
Cell[1212, 46, 130, 3, 31, "Output"]
}, Open ]],
Cell[CellGroupData[{
Cell[1379, 54, 137, 4, 31, "Input"],
Cell[1519, 60, 105, 2, 31, "Output"]
}, Open ]],
Cell[CellGroupData[{
Cell[1661, 67, 113, 3, 31, "Input"],
Cell[1777, 72, 90, 1, 31, "Output"]
}, Open ]],
Cell[CellGroupData[{
Cell[1904, 78, 102, 2, 31, "Input"],
Cell[2009, 82, 321, 8, 31, "Output"]
}, Open ]],
Cell[CellGroupData[{
Cell[2367, 95, 191, 4, 52, "Input"],
Cell[2561, 101, 131, 3, 31, "Output"]
}, Open ]],
Cell[2707, 107, 94, 1, 31, "Input"],
Cell[CellGroupData[{
Cell[2826, 112, 299, 8, 52, "Input"],
Cell[3128, 122, 3019, 57, 265, "Output"]
}, Open ]]
}
]
*)
(* End of internal cache information *)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,8 @@
(* ::Package:: *)
(* Problem12.m *)
(* Author: William Woodruff *)
(* Problem: What is the value of the first triangle number to have over five hundred divisors? *)
Do[If[Length[Divisors[Binomial[i + 1, 2]]] > 500,
Print[Binomial[i + 1, 2]]; Break[]], {i, 1000000}]

80
samples/Nix/nginx.nix Normal file
View File

@@ -0,0 +1,80 @@
{ stdenv, fetchurl, fetchgit, openssl, zlib, pcre, libxml2, libxslt, expat
, rtmp ? false
, fullWebDAV ? false
, syslog ? false
, moreheaders ? false, ...}:
let
version = "1.4.4";
mainSrc = fetchurl {
url = "http://nginx.org/download/nginx-${version}.tar.gz";
sha256 = "1f82845mpgmhvm151fhn2cnqjggw9w7cvsqbva9rb320wmc9m63w";
};
rtmp-ext = fetchgit {
url = git://github.com/arut/nginx-rtmp-module.git;
rev = "1cfb7aeb582789f3b15a03da5b662d1811e2a3f1";
sha256 = "03ikfd2l8mzsjwx896l07rdrw5jn7jjfdiyl572yb9jfrnk48fwi";
};
dav-ext = fetchgit {
url = git://github.com/arut/nginx-dav-ext-module.git;
rev = "54cebc1f21fc13391aae692c6cce672fa7986f9d";
sha256 = "1dvpq1fg5rslnl05z8jc39sgnvh3akam9qxfl033akpczq1bh8nq";
};
syslog-ext = fetchgit {
url = https://github.com/yaoweibin/nginx_syslog_patch.git;
rev = "165affd9741f0e30c4c8225da5e487d33832aca3";
sha256 = "14dkkafjnbapp6jnvrjg9ip46j00cr8pqc2g7374z9aj7hrvdvhs";
};
moreheaders-ext = fetchgit {
url = https://github.com/agentzh/headers-more-nginx-module.git;
rev = "refs/tags/v0.23";
sha256 = "12pbjgsxnvcf2ff2i2qdn39q4cm5czlgrng96j8ml4cgxvnbdh39";
};
in
stdenv.mkDerivation rec {
name = "nginx-${version}";
src = mainSrc;
buildInputs = [ openssl zlib pcre libxml2 libxslt
] ++ stdenv.lib.optional fullWebDAV expat;
patches = if syslog then [ "${syslog-ext}/syslog_1.4.0.patch" ] else [];
configureFlags = [
"--with-http_ssl_module"
"--with-http_spdy_module"
"--with-http_xslt_module"
"--with-http_sub_module"
"--with-http_dav_module"
"--with-http_gzip_static_module"
"--with-http_secure_link_module"
"--with-ipv6"
# Install destination problems
# "--with-http_perl_module"
] ++ stdenv.lib.optional rtmp "--add-module=${rtmp-ext}"
++ stdenv.lib.optional fullWebDAV "--add-module=${dav-ext}"
++ stdenv.lib.optional syslog "--add-module=${syslog-ext}"
++ stdenv.lib.optional moreheaders "--add-module=${moreheaders-ext}";
preConfigure = ''
export NIX_CFLAGS_COMPILE="$NIX_CFLAGS_COMPILE -I${libxml2 }/include/libxml2"
'';
# escape example
postInstall = ''
mv $out/sbin $out/bin ''' ''${
${ if true then ${ "" } else false }
'';
meta = {
description = "A reverse proxy and lightweight webserver";
maintainers = [ stdenv.lib.maintainers.raskin];
platforms = stdenv.lib.platforms.all;
inherit version;
};
}

View File

@@ -0,0 +1,72 @@
/** Replicate Imai, Jain and Ching Econometrica 2009 (incomplete).
**/
#include "IJCEmet2009.h"
Kapital::Kapital(L,const N,const entrant,const exit,const KP){
StateVariable(L,N);
this.entrant = entrant;
this.exit = exit;
this.KP = KP;
actual = Kbar*vals/(N-1);
upper = log(actual~.Inf);
}
Kapital::Transit(FeasA) {
decl ent =CV(entrant), stayout = FeasA[][exit.pos], tprob, sigu = CV(KP[SigU]);
if (!v && !ent) return { <0>, ones(stayout) };
tprob = ent ? probn( (upper-CV(KP[Kbe]))/sigu )
: probn( (upper-(CV(KP[Kb0])+CV(KP[Kb2])*upper[v])) / sigu );
tprob = tprob[1:] - tprob[:N-1];
return { vals, tprob.*(1-stayout)+(1.0~zeros(1,N-1)).*stayout };
}
FirmEntry::Run() {
Initialize();
GenerateSample();
BDP->BayesianDP();
}
FirmEntry::Initialize() {
Rust::Initialize(Reachable,0);
sige = new StDeviations("sige",<0.3,0.3>,0);
entrant = new LaggedAction("entrant",d);
KP = new array[Kparams];
KP[Kbe] = new Positive("be",0.5);
KP[Kb0] = new Free("b0",0.0);
KP[Kb1] = new Determined("b1",0.0);
KP[Kb2] = new Positive("b2",0.4);
KP[SigU] = new Positive("sigu",0.4);
EndogenousStates(K = new Kapital("K",KN,entrant,d,KP),entrant);
SetDelta(new Probability("delta",0.85));
kcoef = new Positive("kcoef",0.1);
ecost = new Negative("ec",-0.4);
CreateSpaces();
}
FirmEntry::GenerateSample() {
Volume = LOUD;
EM = new ValueIteration(0);
// EM -> Solve(0,0);
data = new DataSet(0,EM);
data->Simulate(DataN,DataT,0,FALSE);
data->Print("firmentry.xls");
BDP = new ImaiJainChing("FMH",data,EM,ecost,sige,kcoef,KP,delta);
}
/** Capital stock can be positive only for incumbents.
**/
FirmEntry::Reachable() { return CV(entrant)*CV(K) ? 0 : new FirmEntry() ; }
/** The one period return.
<DD>
<pre>U = </pre>
</DD>
**/
FirmEntry::Utility() {
decl ent = CV(entrant),
u =
ent*CV(ecost)+(1-ent)*CV(kcoef)*AV(K)
| 0.0;
return u;
}

View File

@@ -0,0 +1,63 @@
/** Client and Server classes for parallel optimization using CFMPI.**/
#include "ParallelObjective.h"
/** Set up MPI Client-Server support for objective optimization.
@param obj `Objective' to parallelize
@param DONOTUSECLIENT TRUE (default): client node does no object evaluation<br>FALSE after putting servers to work Client node does one evaluation.
**/
ParallelObjective(obj,DONOTUSECLIENT) {
if (isclass(obj.p2p)) {oxwarning("P2P object already exists for "+obj.L+". Nothing changed"); return;}
obj.p2p = new P2P(DONOTUSECLIENT,new ObjClient(obj),new ObjServer(obj));
}
ObjClient::ObjClient(obj) { this.obj = obj; }
ObjClient::Execute() { }
ObjServer::ObjServer(obj) {
this.obj = obj;
basetag = P2P::STOP_TAG+1;
iml = obj.NvfuncTerms;
Nparams = obj.nstruct;
}
/** Wait on the objective client.
**/
ObjServer::Loop(nxtmsgsz) {
Nparams = nxtmsgsz; //free param length is no greater than Nparams
if (Volume>QUIET) println("ObjServer server ",ID," Nparams ",Nparams);
Server::Loop(Nparams);
Recv(ANY_TAG); //receive the ending parameter vector
obj->Encode(Buffer[:Nparams-1]); //encode it.
}
/** Do the objective evaluation.
Receive structural parameter vector and `Objective::Encode`() it.
Call `Objective::vfunc`().
@return Nparams (max. length of next expected message);
**/
ObjServer::Execute() {
obj->Decode(Buffer[:obj.nfree-1]);
Buffer = obj.cur.V[] = obj->vfunc();
if (Volume>QUIET) println("Server Executive: ",ID," vfunc[0]= ",Buffer[0]);
return obj.nstruct;
}
CstrServer::CstrServer(obj) { ObjServer(obj); }
SepServer::SepServer(obj) { ObjServer(obj); }
CstrServer::Execute() {
obj->Encode(Buffer);
obj->Lagrangian(0);
return rows(Buffer = obj.cur->Vec());
}
/** Separable objective evaluations.
**/
SepServer::Execute() {
obj.Kvar.v = imod(Tag-basetag,obj.K);
obj->Encode(Buffer,TRUE);
Buffer = obj.Kvar->PDF() * obj->vfunc();
return obj.NvfuncTerms;
}

38
samples/Ox/particle.oxo Normal file
View File

@@ -0,0 +1,38 @@
nldge::ParticleLogLikeli()
{ decl it, ip,
mss, mbas, ms, my, mx, vw, vwi, dws,
mhi, mhdet, loglikeli, mData,
vxm, vxs, mxm=<>, mxsu=<>, mxsl=<>,
time, timeall, timeran=0, timelik=0, timefun=0, timeint=0, timeres=0;
mData = GetData(m_asY);
mhdet = sqrt((2*M_PI)^m_cY * determinant(m_mMSbE.^2)); // covariance determinant
mhi = invert(m_mMSbE.^2); // invert covariance of measurement shocks
ms = m_vSss + zeros(m_cPar, m_cS); // start particles
mx = m_vXss + zeros(m_cPar, m_cX); // steady state of state and policy
loglikeli = 0; // init likelihood
//timeall=timer();
for(it = 0; it < sizer(mData); it++)
{
mss = rann(m_cPar, m_cSS) * m_mSSbE; // state noise
fg(&ms, ms, mx, mss); // transition prior as proposal
mx = m_oApprox.FastInterpolate(ms); // interpolate
fy(&my, ms, mx, zeros(m_cPar, m_cMS)); // evaluate importance weights
my -= mData[it][]; // observation error
vw = exp(-0.5 * outer(my,mhi,'d')' )/mhdet; // vw = exp(-0.5 * sumr(my*mhi .*my ) )/mhdet;
vw = vw .== .NaN .? 0 .: vw; // no policy can happen for extrem particles
dws = sumc(vw);
if(dws==0) return -.Inf; // or extremely wrong parameters
loglikeli += log(dws/m_cPar) ; // loglikelihood contribution
//timelik += (timer()-time)/100;
//time=timer();
vwi = resample(vw/dws)-1; // selection step in c++
ms = ms[vwi][]; // on normalized weights
mx = mx[vwi][];
}
return loglikeli;
}

54
samples/Pan/test.pan Normal file
View File

@@ -0,0 +1,54 @@
object template pantest;
# Very simple pan test file
"/long/decimal" = 123;
"/long/octal" = 0755;
"/long/hexadecimal" = 0xFF;
"/double/simple" = 0.01;
"/double/pi" = 3.14159;
"/double/exponent" = 1e-8;
"/double/scientific" = 1.3E10;
"/string/single" = 'Faster, but escapes like \t, \n and \x3d don''t work, but '' should work.';
"/string/double" = "Slower, but escapes like \t, \n and \x3d do work";
variable TEST = 2;
"/x2" = to_string(TEST);
"/x2" ?= 'Default value';
"/x3" = 1 + 2 + value("/long/decimal");
"/x4" = undef;
"/x5" = null;
variable e ?= error("Test error message");
# include gmond config for services-monitoring
include { 'site/ganglia/gmond/services-monitoring' };
"/software/packages"=pkg_repl("httpd","2.2.3-43.sl5.3",PKG_ARCH_DEFAULT);
"/software/packages"=pkg_repl("php");
# Example function
function show_things_view_for_stuff = {
thing = ARGV[0];
foreach( i; mything; STUFF ) {
if ( thing == mything ) {
return( true );
} else {
return SELF;
};
};
false;
};
variable HERE = <<EOF;
; This example demonstrates an in-line heredoc style config file
[main]
awesome = true
EOF
variable small = false;#This should be highlighted normally again.

38
samples/Pike/Error.pmod Normal file
View File

@@ -0,0 +1,38 @@
#pike __REAL_VERSION__
constant Generic = __builtin.GenericError;
constant Index = __builtin.IndexError;
constant BadArgument = __builtin.BadArgumentError;
constant Math = __builtin.MathError;
constant Resource = __builtin.ResourceError;
constant Permission = __builtin.PermissionError;
constant Decode = __builtin.DecodeError;
constant Cpp = __builtin.CppError;
constant Compilation = __builtin.CompilationError;
constant MasterLoad = __builtin.MasterLoadError;
constant ModuleLoad = __builtin.ModuleLoadError;
//! Returns an Error object for any argument it receives. If the
//! argument already is an Error object or is empty, it does nothing.
object mkerror(mixed error)
{
if (error == UNDEFINED)
return error;
if (objectp(error) && error->is_generic_error)
return error;
if (arrayp(error))
return Error.Generic(@error);
if (stringp(error))
return Error.Generic(error);
return Error.Generic(sprintf("%O", error));
}

360
samples/Pike/FakeFile.pike Normal file
View File

@@ -0,0 +1,360 @@
#pike __REAL_VERSION__
//! A string wrapper that pretends to be a @[Stdio.File] object
//! in addition to some features of a @[Stdio.FILE] object.
//! This constant can be used to distinguish a FakeFile object
//! from a real @[Stdio.File] object.
constant is_fake_file = 1;
protected string data;
protected int ptr;
protected int(0..1) r;
protected int(0..1) w;
protected int mtime;
protected function read_cb;
protected function read_oob_cb;
protected function write_cb;
protected function write_oob_cb;
protected function close_cb;
//! @seealso
//! @[Stdio.File()->close()]
int close(void|string direction) {
direction = lower_case(direction||"rw");
int cr = has_value(direction, "r");
int cw = has_value(direction, "w");
if(cr) {
r = 0;
}
if(cw) {
w = 0;
}
// FIXME: Close callback
return 1;
}
//! @decl void create(string data, void|string type, void|int pointer)
//! @seealso
//! @[Stdio.File()->create()]
void create(string _data, void|string type, int|void _ptr) {
if(!_data) error("No data string given to FakeFile.\n");
data = _data;
ptr = _ptr;
mtime = time();
if(type) {
type = lower_case(type);
if(has_value(type, "r"))
r = 1;
if(has_value(type, "w"))
w = 1;
}
else
r = w = 1;
}
protected string make_type_str() {
string type = "";
if(r) type += "r";
if(w) type += "w";
return type;
}
//! @seealso
//! @[Stdio.File()->dup()]
this_program dup() {
return this_program(data, make_type_str(), ptr);
}
//! Always returns 0.
//! @seealso
//! @[Stdio.File()->errno()]
int errno() { return 0; }
//! Returns size and the creation time of the string.
Stdio.Stat stat() {
Stdio.Stat st = Stdio.Stat();
st->size = sizeof(data);
st->mtime=st->ctime=mtime;
st->atime=time();
return st;
}
//! @seealso
//! @[Stdio.File()->line_iterator()]
String.SplitIterator line_iterator(int|void trim) {
if(trim)
return String.SplitIterator( data-"\r", '\n' );
return String.SplitIterator( data, '\n' );
}
protected mixed id;
//! @seealso
//! @[Stdio.File()->query_id()]
mixed query_id() { return id; }
//! @seealso
//! @[Stdio.File()->set_id()]
void set_id(mixed _id) { id = _id; }
//! @seealso
//! @[Stdio.File()->read_function()]
function(:string) read_function(int nbytes) {
return lambda() { return read(nbytes); };
}
//! @seealso
//! @[Stdio.File()->peek()]
int(-1..1) peek(int|float|void timeout) {
if(!r) return -1;
if(ptr >= sizeof(data)) return 0;
return 1;
}
//! Always returns 0.
//! @seealso
//! @[Stdio.File()->query_address()]
string query_address(void|int(0..1) is_local) { return 0; }
//! @seealso
//! @[Stdio.File()->read()]
string read(void|int(0..) len, void|int(0..1) not_all) {
if(!r) return 0;
if (len < 0) error("Cannot read negative number of characters.\n");
int start=ptr;
ptr += len;
if(zero_type(len) || ptr>sizeof(data))
ptr = sizeof(data);
// FIXME: read callback
return data[start..ptr-1];
}
//! @seealso
//! @[Stdio.FILE()->gets()]
string gets() {
if(!r) return 0;
string ret;
sscanf(data,"%*"+(string)ptr+"s%[^\n]",ret);
if(ret)
{
ptr+=sizeof(ret)+1;
if(ptr>sizeof(data))
{
ptr=sizeof(data);
if(!sizeof(ret))
ret = 0;
}
}
// FIXME: read callback
return ret;
}
//! @seealso
//! @[Stdio.FILE()->getchar()]
int getchar() {
if(!r) return 0;
int c;
if(catch(c=data[ptr]))
c=-1;
else
ptr++;
// FIXME: read callback
return c;
}
//! @seealso
//! @[Stdio.FILE()->unread()]
void unread(string s) {
if(!r) return;
if(data[ptr-sizeof(s)..ptr-1]==s)
ptr-=sizeof(s);
else
{
data=s+data[ptr..];
ptr=0;
}
}
//! @seealso
//! @[Stdio.File()->seek()]
int seek(int pos, void|int mult, void|int add) {
if(mult)
pos = pos*mult+add;
if(pos<0)
{
pos = sizeof(data)+pos;
if( pos < 0 )
pos = 0;
}
ptr = pos;
if( ptr > strlen( data ) )
ptr = strlen(data);
return ptr;
}
//! Always returns 1.
//! @seealso
//! @[Stdio.File()->sync()]
int(1..1) sync() { return 1; }
//! @seealso
//! @[Stdio.File()->tell()]
int tell() { return ptr; }
//! @seealso
//! @[Stdio.File()->truncate()]
int(0..1) truncate(int length) {
data = data[..length-1];
return sizeof(data)==length;
}
//! @seealso
//! @[Stdio.File()->write()]
int(-1..) write(string|array(string) str, mixed ... extra) {
if(!w) return -1;
if(arrayp(str)) str=str*"";
if(sizeof(extra)) str=sprintf(str, @extra);
if(ptr==sizeof(data)) {
data += str;
ptr = sizeof(data);
}
else if(sizeof(str)==1)
data[ptr++] = str[0];
else {
data = data[..ptr-1] + str + data[ptr+sizeof(str)..];
ptr += sizeof(str);
}
// FIXME: write callback
return sizeof(str);
}
//! @seealso
//! @[Stdio.File()->set_blocking]
void set_blocking() {
close_cb = 0;
read_cb = 0;
read_oob_cb = 0;
write_cb = 0;
write_oob_cb = 0;
}
//! @seealso
//! @[Stdio.File()->set_blocking_keep_callbacks]
void set_blocking_keep_callbacks() { }
//! @seealso
//! @[Stdio.File()->set_blocking]
void set_nonblocking(function rcb, function wcb, function ccb,
function rocb, function wocb) {
read_cb = rcb;
write_cb = wcb;
close_cb = ccb;
read_oob_cb = rocb;
write_oob_cb = wocb;
}
//! @seealso
//! @[Stdio.File()->set_blocking_keep_callbacks]
void set_nonblocking_keep_callbacks() { }
//! @seealso
//! @[Stdio.File()->set_close_callback]
void set_close_callback(function cb) { close_cb = cb; }
//! @seealso
//! @[Stdio.File()->set_read_callback]
void set_read_callback(function cb) { read_cb = cb; }
//! @seealso
//! @[Stdio.File()->set_read_oob_callback]
void set_read_oob_callback(function cb) { read_oob_cb = cb; }
//! @seealso
//! @[Stdio.File()->set_write_callback]
void set_write_callback(function cb) { write_cb = cb; }
//! @seealso
//! @[Stdio.File()->set_write_oob_callback]
void set_write_oob_callback(function cb) { write_oob_cb = cb; }
//! @seealso
//! @[Stdio.File()->query_close_callback]
function query_close_callback() { return close_cb; }
//! @seealso
//! @[Stdio.File()->query_read_callback]
function query_read_callback() { return read_cb; }
//! @seealso
//! @[Stdio.File()->query_read_oob_callback]
function query_read_oob_callback() { return read_oob_cb; }
//! @seealso
//! @[Stdio.File()->query_write_callback]
function query_write_callback() { return write_cb; }
//! @seealso
//! @[Stdio.File()->query_write_oob_callback]
function query_write_oob_callback() { return write_oob_cb; }
string _sprintf(int t) {
return t=='O' && sprintf("%O(%d,%O)", this_program, sizeof(data),
make_type_str());
}
// FakeFile specials.
//! A FakeFile can be casted to a string.
mixed cast(string to) {
switch(to) {
case "string": return data;
case "object": return this;
}
error("Can not cast object to %O.\n", to);
}
//! Sizeof on a FakeFile returns the size of its contents.
int(0..) _sizeof() {
return sizeof(data);
}
//! @ignore
#define NOPE(X) mixed X (mixed ... args) { error("This is a FakeFile. %s is not available.\n", #X); }
NOPE(assign);
NOPE(async_connect);
NOPE(connect);
NOPE(connect_unix);
NOPE(open);
NOPE(open_socket);
NOPE(pipe);
NOPE(tcgetattr);
NOPE(tcsetattr);
// Stdio.Fd
NOPE(dup2);
NOPE(lock); // We could implement this
NOPE(mode); // We could implement this
NOPE(proxy); // We could implement this
NOPE(query_fd);
NOPE(read_oob);
NOPE(set_close_on_exec);
NOPE(set_keepalive);
NOPE(trylock); // We could implement this
NOPE(write_oob);
//! @endignore

View File

@@ -0,0 +1,260 @@
:- module(format_spec, [ format_error/2
, format_spec/2
, format_spec//1
, spec_arity/2
, spec_types/2
]).
:- use_module(library(dcg/basics), [eos//0, integer//1, string_without//2]).
:- use_module(library(error)).
:- use_module(library(when), [when/2]).
% TODO loading this module is optional
% TODO it's for my own convenience during development
%:- use_module(library(mavis)).
%% format_error(+Goal, -Error:string) is nondet.
%
% True if Goal exhibits an Error in its format string. The
% Error string describes what is wrong with Goal. Iterates each
% error on backtracking.
%
% Goal may be one of the following predicates:
%
% * format/2
% * format/3
% * debug/3
format_error(format(Format,Args), Error) :-
format_error_(Format, Args,Error).
format_error(format(_,Format,Args), Error) :-
format_error_(Format,Args,Error).
format_error(debug(_,Format,Args), Error) :-
format_error_(Format,Args,Error).
format_error_(Format,Args,Error) :-
format_spec(Format, Spec),
!,
is_list(Args),
spec_types(Spec, Types),
types_error(Args, Types, Error).
format_error_(Format,_,Error) :-
% \+ format_spec(Format, _),
format(string(Error), "Invalid format string: ~q", [Format]).
types_error(Args, Types, Error) :-
length(Types, TypesLen),
length(Args, ArgsLen),
TypesLen =\= ArgsLen,
!,
format( string(Error)
, "Wrong argument count. Expected ~d, got ~d"
, [TypesLen, ArgsLen]
).
types_error(Args, Types, Error) :-
types_error_(Args, Types, Error).
types_error_([Arg|_],[Type|_],Error) :-
ground(Arg),
\+ is_of_type(Type,Arg),
message_to_string(error(type_error(Type,Arg),_Location),Error).
types_error_([_|Args],[_|Types],Error) :-
types_error_(Args, Types, Error).
% check/0 augmentation
:- multifile check:checker/2.
:- dynamic check:checker/2.
check:checker(format_spec:checker, "format/2 strings and arguments").
:- dynamic format_fail/3.
checker :-
prolog_walk_code([ module_class([user])
, infer_meta_predicates(false)
, autoload(false) % format/{2,3} are always loaded
, undefined(ignore)
, trace_reference(_)
, on_trace(check_format)
]),
retract(format_fail(Goal,Location,Error)),
print_message(warning, format_error(Goal,Location,Error)),
fail. % iterate all errors
checker. % succeed even if no errors are found
check_format(Module:Goal, _Caller, Location) :-
predicate_property(Module:Goal, imported_from(Source)),
memberchk(Source, [system,prolog_debug]),
can_check(Goal),
format_error(Goal, Error),
assert(format_fail(Goal, Location, Error)),
fail.
check_format(_,_,_). % succeed to avoid printing goals
% true if format_error/2 can check this goal
can_check(Goal) :-
once(clause(format_error(Goal,_),_)).
prolog:message(format_error(Goal,Location,Error)) -->
prolog:message_location(Location),
['~n In goal: ~q~n ~s'-[Goal,Error]].
%% format_spec(-Spec)//
%
% DCG for parsing format strings. It doesn't yet generate format
% strings from a spec. See format_spec/2 for details.
format_spec([]) -->
eos.
format_spec([escape(Numeric,Modifier,Action)|Rest]) -->
"~",
numeric_argument(Numeric),
modifier_argument(Modifier),
action(Action),
format_spec(Rest).
format_spec([text(String)|Rest]) -->
{ when((ground(String);ground(Codes)),string_codes(String, Codes)) },
string_without("~", Codes),
{ Codes \= [] },
format_spec(Rest).
%% format_spec(+Format, -Spec:list) is semidet.
%
% Parse a format string. Each element of Spec is one of the following:
%
% * `text(Text)` - text sent to the output as is
% * `escape(Num,Colon,Action)` - a format escape
%
% `Num` represents the optional numeric portion of an esape. `Colon`
% represents the optional colon in an escape. `Action` is an atom
% representing the action to be take by this escape.
format_spec(Format, Spec) :-
when((ground(Format);ground(Codes)),text_codes(Format, Codes)),
once(phrase(format_spec(Spec), Codes, [])).
%% spec_arity(+FormatSpec, -Arity:positive_integer) is det.
%
% True if FormatSpec requires format/2 to have Arity arguments.
spec_arity(Spec, Arity) :-
spec_types(Spec, Types),
length(Types, Arity).
%% spec_types(+FormatSpec, -Types:list(type)) is det.
%
% True if FormatSpec requires format/2 to have arguments of Types. Each
% value of Types is a type as described by error:has_type/2. This
% notion of types is compatible with library(mavis).
spec_types(Spec, Types) :-
phrase(spec_types(Spec), Types).
spec_types([]) -->
[].
spec_types([Item|Items]) -->
item_types(Item),
spec_types(Items).
item_types(text(_)) -->
[].
item_types(escape(Numeric,_,Action)) -->
numeric_types(Numeric),
action_types(Action).
numeric_types(number(_)) -->
[].
numeric_types(character(_)) -->
[].
numeric_types(star) -->
[number].
numeric_types(nothing) -->
[].
action_types(Action) -->
{ atom_codes(Action, [Code]) },
{ action_types(Code, Types) },
phrase(Types).
%% text_codes(Text:text, Codes:codes).
text_codes(Var, Codes) :-
var(Var),
!,
string_codes(Var, Codes).
text_codes(Atom, Codes) :-
atom(Atom),
!,
atom_codes(Atom, Codes).
text_codes(String, Codes) :-
string(String),
!,
string_codes(String, Codes).
text_codes(Codes, Codes) :-
is_of_type(codes, Codes).
numeric_argument(number(N)) -->
integer(N).
numeric_argument(character(C)) -->
"`",
[C].
numeric_argument(star) -->
"*".
numeric_argument(nothing) -->
"".
modifier_argument(colon) -->
":".
modifier_argument(no_colon) -->
\+ ":".
action(Action) -->
[C],
{ is_action(C) },
{ atom_codes(Action, [C]) }.
%% is_action(+Action:integer) is semidet.
%% is_action(-Action:integer) is multi.
%
% True if Action is a valid format/2 action character. Iterates all
% acceptable action characters, if Action is unbound.
is_action(Action) :-
action_types(Action, _).
%% action_types(?Action:integer, ?Types:list(type))
%
% True if Action consumes arguments matching Types. An action (like
% `~`), which consumes no arguments, has `Types=[]`. For example,
%
% ?- action_types(0'~, Types).
% Types = [].
% ?- action_types(0'a, Types).
% Types = [atom].
action_types(0'~, []).
action_types(0'a, [atom]).
action_types(0'c, [integer]). % specifically, a code
action_types(0'd, [integer]).
action_types(0'D, [integer]).
action_types(0'e, [float]).
action_types(0'E, [float]).
action_types(0'f, [float]).
action_types(0'g, [float]).
action_types(0'G, [float]).
action_types(0'i, [any]).
action_types(0'I, [integer]).
action_types(0'k, [any]).
action_types(0'n, []).
action_types(0'N, []).
action_types(0'p, [any]).
action_types(0'q, [any]).
action_types(0'r, [integer]).
action_types(0'R, [integer]).
action_types(0's, [text]).
action_types(0'@, [callable]).
action_types(0't, []).
action_types(0'|, []).
action_types(0'+, []).
action_types(0'w, [any]).
action_types(0'W, [any, list]).

194
samples/Prolog/func.pl Normal file
View File

@@ -0,0 +1,194 @@
:- module(func, [ op(675, xfy, ($))
, op(650, xfy, (of))
, ($)/2
, (of)/2
]).
:- use_module(library(list_util), [xfy_list/3]).
:- use_module(library(function_expansion)).
:- use_module(library(arithmetic)).
:- use_module(library(error)).
% true if the module whose terms are being read has specifically
% imported library(func).
wants_func :-
prolog_load_context(module, Module),
Module \== func, % we don't want func sugar ourselves
predicate_property(Module:of(_,_),imported_from(func)).
%% compile_function(+Term, -In, -Out, -Goal) is semidet.
%
% True if Term represents a function from In to Out
% implemented by calling Goal. This multifile hook is
% called by $/2 and of/2 to convert a term into a goal.
% It's used at compile time for macro expansion.
% It's used at run time to handle functions which aren't
% known at compile time.
% When called as a hook, Term is guaranteed to be =nonvar=.
%
% For example, to treat library(assoc) terms as functions which
% map a key to a value, one might define:
%
% :- multifile compile_function/4.
% compile_function(Assoc, Key, Value, Goal) :-
% is_assoc(Assoc),
% Goal = get_assoc(Key, Assoc, Value).
%
% Then one could write:
%
% list_to_assoc([a-1, b-2, c-3], Assoc),
% Two = Assoc $ b,
:- multifile compile_function/4.
compile_function(Var, _, _, _) :-
% variables storing functions must be evaluated at run time
% and can't be compiled, a priori, into a goal
var(Var),
!,
fail.
compile_function(Expr, In, Out, Out is Expr) :-
% arithmetic expression of one variable are simply evaluated
\+ string(Expr), % evaluable/1 throws exception with strings
arithmetic:evaluable(Expr),
term_variables(Expr, [In]).
compile_function(F, In, Out, func:Goal) :-
% composed functions
function_composition_term(F),
user:function_expansion(F, func:Functor, true),
Goal =.. [Functor,In,Out].
compile_function(F, In, Out, Goal) :-
% string interpolation via format templates
format_template(F),
( atom(F) ->
Goal = format(atom(Out), F, In)
; string(F) ->
Goal = format(string(Out), F, In)
; error:has_type(codes, F) ->
Goal = format(codes(Out), F, In)
; fail % to be explicit
).
compile_function(Dict, In, Out, Goal) :-
is_dict(Dict),
Goal = get_dict(In, Dict, Out).
%% $(+Function, +Argument) is det.
%
% Apply Function to an Argument. A Function is any predicate
% whose final argument generates output and whose penultimate argument
% accepts input.
%
% This is realized by expanding function application to chained
% predicate calls at compile time. Function application itself can
% be chained.
%
% ==
% Reversed = reverse $ sort $ [c,d,b].
% ==
:- meta_predicate $(2,+).
$(_,_) :-
throw(error(permission_error(call, predicate, ($)/2),
context(_, '$/2 must be subject to goal expansion'))).
user:function_expansion($(F,X), Y, Goal) :-
wants_func,
( func:compile_function(F, X, Y, Goal) ->
true
; var(F) -> Goal = % defer until run time
( func:compile_function(F, X, Y, P) ->
call(P)
; call(F, X, Y)
)
; Goal = call(F, X, Y)
).
%% of(+F, +G) is det.
%
% Creates a new function by composing F and G. The functions are
% composed at compile time to create a new, compiled predicate which
% behaves like a function. Function composition can be chained.
% Composed functions can also be applied with $/2.
%
% ==
% Reversed = reverse of sort $ [c,d,b].
% ==
:- meta_predicate of(2,2).
of(_,_).
%% format_template(Format) is semidet.
%
% True if Format is a template string suitable for format/3.
% The current check is very naive and should be improved.
format_template(Format) :-
atom(Format), !,
atom_codes(Format, Codes),
format_template(Codes).
format_template(Format) :-
string(Format),
!,
string_codes(Format, Codes),
format_template(Codes).
format_template(Format) :-
error:has_type(codes, Format),
memberchk(0'~, Format). % ' fix syntax highlighting
% True if the argument is a function composition term
function_composition_term(of(_,_)).
% Converts a function composition term into a list of functions to compose
functions_to_compose(Term, Funcs) :-
functor(Term, Op, 2),
Op = (of),
xfy_list(Op, Term, Funcs).
% Thread a state variable through a list of functions. This is similar
% to a DCG expansion, but much simpler.
thread_state([], [], Out, Out).
thread_state([F|Funcs], [Goal|Goals], In, Out) :-
( compile_function(F, In, Tmp, Goal) ->
true
; var(F) ->
instantiation_error(F)
; F =.. [Functor|Args],
append(Args, [In, Tmp], NewArgs),
Goal =.. [Functor|NewArgs]
),
thread_state(Funcs, Goals, Tmp, Out).
user:function_expansion(Term, func:Functor, true) :-
wants_func,
functions_to_compose(Term, Funcs),
debug(func, 'building composed function for: ~w', [Term]),
variant_sha1(Funcs, Sha),
format(atom(Functor), 'composed_function_~w', [Sha]),
debug(func, ' name: ~s', [Functor]),
( func:current_predicate(Functor/2) ->
debug(func, ' composed predicate already exists', [])
; true ->
reverse(Funcs, RevFuncs),
thread_state(RevFuncs, Threaded, In, Out),
xfy_list(',', Body, Threaded),
Head =.. [Functor, In, Out],
func:assert(Head :- Body),
func:compile_predicates([Functor/2])
).
% support foo(x,~,y) evaluation
user:function_expansion(Term, Output, Goal) :-
wants_func,
compound(Term),
% has a single ~ argument
setof( X
, ( arg(X,Term,Arg), Arg == '~' )
, [N]
),
% replace ~ with a variable
Term =.. [Name|Args0],
nth1(N, Args0, ~, Rest),
nth1(N, Args, Output, Rest),
Goal =.. [Name|Args].

View File

@@ -0,0 +1,241 @@
#
# Cinema 4D Python Plugin Source file
# https://github.com/nr-plugins/nr-xpresso-alignment-tools
#
# coding: utf-8
#
# Copyright (C) 2012, Niklas Rosenstein
# Licensed under the GNU General Public License
#
# XPAT - XPresso Alignment Tools
# ==============================
#
# The XPAT plugin provides tools for aligning nodes in the Cinema 4D
# XPresso Editor, improving readability of complex XPresso set-ups
# immensively.
#
# Requirements:
# - MAXON Cinema 4D R13+
# - Python `c4dtools` library. Get it from
# http://github.com/NiklasRosenstein/c4dtools
#
# Author: Niklas Rosenstein <rosensteinniklas@gmail.com>
# Version: 1.1 (01/06/2012)
import os
import sys
import json
import c4d
import c4dtools
import itertools
from c4d.modules import graphview as gv
from c4dtools.misc import graphnode
res, importer = c4dtools.prepare(__file__, __res__)
settings = c4dtools.helpers.Attributor({
'options_filename': res.file('config.json'),
})
def align_nodes(nodes, mode, spacing):
r"""
Aligns the passed nodes horizontally and apply the minimum spacing
between them.
"""
modes = ['horizontal', 'vertical']
if not nodes:
return
if mode not in modes:
raise ValueError('invalid mode, choices are: ' + ', '.join(modes))
get_0 = lambda x: x.x
get_1 = lambda x: x.y
set_0 = lambda x, v: setattr(x, 'x', v)
set_1 = lambda x, v: setattr(x, 'y', v)
if mode == 'vertical':
get_0, get_1 = get_1, get_0
set_0, set_1 = set_1, set_0
nodes = [graphnode.GraphNode(n) for n in nodes]
nodes.sort(key=lambda n: get_0(n.position))
midpoint = graphnode.find_nodes_mid(nodes)
# Apply the spacing between the nodes relative to the coordinate-systems
# origin. We can offset them later because we now the nodes' midpoint
# already.
first_position = nodes[0].position
new_positions = []
prev_offset = 0
for node in nodes:
# Compute the relative position of the node.
position = node.position
set_0(position, get_0(position) - get_0(first_position))
# Obtain it's size and check if the node needs to be re-placed.
size = node.size
if get_0(position) < prev_offset:
set_0(position, prev_offset)
prev_offset += spacing + get_0(size)
else:
prev_offset = get_0(position) + get_0(size) + spacing
set_1(position, get_1(midpoint))
new_positions.append(position)
# Center the nodes again.
bbox_size = prev_offset - spacing
bbox_size_2 = bbox_size * 0.5
for node, position in itertools.izip(nodes, new_positions):
# TODO: Here is some issue with offsetting the nodes. Some value
# dependent on the spacing must be added here to not make the nodes
# move horizontally/vertically although they have already been
# aligned.
set_0(position, get_0(midpoint) + get_0(position) - bbox_size_2 + spacing)
node.position = position
def align_nodes_shortcut(mode, spacing):
master = gv.GetMaster(0)
if not master:
return
root = master.GetRoot()
if not root:
return
nodes = graphnode.find_selected_nodes(root)
if nodes:
master.AddUndo()
align_nodes(nodes, mode, spacing)
c4d.EventAdd()
return True
class XPAT_Options(c4dtools.helpers.Attributor):
r"""
This class organizes the options for the XPAT plugin, i.e.
validating, loading and saving.
"""
defaults = {
'hspace': 50,
'vspace': 20,
}
def __init__(self, filename=None):
super(XPAT_Options, self).__init__()
self.load(filename)
def load(self, filename=None):
r"""
Load the options from file pointed to by filename. If filename
is None, it defaults to the filename defined in options in the
global scope.
"""
if filename is None:
filename = settings.options_filename
if os.path.isfile(filename):
self.dict_ = self.defaults.copy()
with open(filename, 'rb') as fp:
self.dict_.update(json.load(fp))
else:
self.dict_ = self.defaults.copy()
self.save()
def save(self, filename=None):
r"""
Save the options defined in XPAT_Options instance to HD.
"""
if filename is None:
filename = settings.options_filename
values = dict((k, v) for k, v in self.dict_.iteritems()
if k in self.defaults)
with open(filename, 'wb') as fp:
json.dump(values, fp)
class XPAT_OptionsDialog(c4d.gui.GeDialog):
r"""
This class implements the behavior of the XPAT options dialog,
taking care of storing the options on the HD and loading them
again on startup.
"""
# c4d.gui.GeDialog
def CreateLayout(self):
return self.LoadDialogResource(res.DLG_OPTIONS)
def InitValues(self):
self.SetLong(res.EDT_HSPACE, options.hspace)
self.SetLong(res.EDT_VSPACE, options.vspace)
return True
def Command(self, id, msg):
if id == res.BTN_SAVE:
options.hspace = self.GetLong(res.EDT_HSPACE)
options.vspace = self.GetLong(res.EDT_VSPACE)
options.save()
self.Close()
return True
class XPAT_Command_OpenOptionsDialog(c4dtools.plugins.Command):
r"""
This Cinema 4D CommandData plugin opens the XPAT options dialog
when being executed.
"""
def __init__(self):
super(XPAT_Command_OpenOptionsDialog, self).__init__()
self._dialog = None
@property
def dialog(self):
if not self._dialog:
self._dialog = XPAT_OptionsDialog()
return self._dialog
# c4dtools.plugins.Command
PLUGIN_ID = 1029621
PLUGIN_NAME = res.string.XPAT_COMMAND_OPENOPTIONSDIALOG()
PLUGIN_HELP = res.string.XPAT_COMMAND_OPENOPTIONSDIALOG_HELP()
# c4d.gui.CommandData
def Execute(self, doc):
return self.dialog.Open(c4d.DLG_TYPE_MODAL)
class XPAT_Command_AlignHorizontal(c4dtools.plugins.Command):
PLUGIN_ID = 1029538
PLUGIN_NAME = res.string.XPAT_COMMAND_ALIGNHORIZONTAL()
PLUGIN_ICON = res.file('xpresso-align-h.png')
PLUGIN_HELP = res.string.XPAT_COMMAND_ALIGNHORIZONTAL_HELP()
def Execute(self, doc):
align_nodes_shortcut('horizontal', options.hspace)
return True
class XPAT_Command_AlignVertical(c4dtools.plugins.Command):
PLUGIN_ID = 1029539
PLUGIN_NAME = res.string.XPAT_COMMAND_ALIGNVERTICAL()
PLUGIN_ICON = res.file('xpresso-align-v.png')
PLUGIN_HELP = res.string.XPAT_COMMAND_ALIGNVERTICAL_HELP()
def Execute(self, doc):
align_nodes_shortcut('vertical', options.vspace)
return True
options = XPAT_Options()
if __name__ == '__main__':
c4dtools.plugins.main()

30
samples/QMake/complex.pro Normal file
View File

@@ -0,0 +1,30 @@
# This QMake file is complex, as it usese
# boolean operators and function calls
QT += core gui
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
# We could use some OpenGL right now
contains(QT_CONFIG, opengl) | contains(QT_CONFIG, opengles2) {
QT += opengl
} else {
DEFINES += QT_NO_OPENGL
}
TEMPLATE = app
win32 {
TARGET = BlahApp
RC_FILE = Resources/winres.rc
}
!win32 { TARGET = blahapp }
# Let's add a PRI file!
include(functions.pri)
SOURCES += file.cpp
HEADERS += file.h
FORMS += file.ui
RESOURCES += res.qrc

View File

@@ -0,0 +1,8 @@
# QMake include file that calls some functions
# and does nothing else...
exists(.git/HEAD) {
system(git rev-parse HEAD >rev.txt)
} else {
system(echo ThisIsNotAGitRepo >rev.txt)
}

View File

@@ -0,0 +1,2 @@
#!/usr/bin/qmake
message(This is QMake.)

17
samples/QMake/simple.pro Normal file
View File

@@ -0,0 +1,17 @@
# Simple QMake file
CONFIG += qt
QT += core gui
TEMPLATE = app
TARGET = simpleapp
SOURCES += file.cpp \
file2.c \
This/Is/Folder/file3.cpp
HEADERS += file.h \
file2.h \
This/Is/Folder/file3.h
FORMS += This/Is/Folder/file3.ui \
Test.ui

25
samples/R/scholar.Rd Normal file
View File

@@ -0,0 +1,25 @@
\docType{package}
\name{scholar}
\alias{scholar}
\alias{scholar-package}
\title{scholar}
\source{
The package reads data from
\url{http://scholar.google.com}. Dates and citation
counts are estimated and are determined automatically by
a computer program. Use at your own risk.
}
\description{
The \code{scholar} package provides functions to extract
citation data from Google Scholar. There are also
convenience functions for comparing multiple scholars and
predicting h-index scores based on past publication
records.
}
\note{
A complementary set of Google Scholar functions can be
found at
\url{http://biostat.jhsph.edu/~jleek/code/googleCite.r}.
The \code{scholar} package was developed independently.
}

View File

@@ -0,0 +1,19 @@
Pry.config.commands.import Pry::ExtendedCommands::Experimental
Pry.config.pager = false
Pry.config.color = false
Pry.config.commands.alias_command "lM", "ls -M"
Pry.config.commands.command "add", "Add a list of numbers together" do |*args|
output.puts "Result is: #{args.map(&:to_i).inject(&:+)}"
end
Pry.config.history.should_save = false
Pry.config.prompt = [proc { "input> " },
proc { " | " }]
# Disable pry-buggy-plug:
Pry.plugins["buggy-plug"].disable!

17
samples/SAS/data.sas Normal file
View File

@@ -0,0 +1,17 @@
/* Example DATA step code for linguist */
libname source 'C:\path\to\file'
data work.working_copy;
set source.original_file.sas7bdat;
run;
data work.working_copy;
set work.working_copy;
if Purge = 1 then delete;
run;
data work.working_copy;
set work.working_copy;
if ImportantVariable = . then MissingFlag = 1;
run;

15
samples/SAS/proc.sas Normal file
View File

@@ -0,0 +1,15 @@
/* PROC examples for Linguist */
proc surveyselect data=work.data out=work.boot method=urs reps=20000 seed=2156 sampsize=28 outhits;
samplingunit Site;
run;
PROC MI data=work.boot out=work.bootmi nimpute=30 seed=5686 round = 1;
By Replicate;
VAR Variable1 Variable2;
run;
proc logistic data=work.bootmi descending;
By Replicate _Imputation_;
model Outcome = Variable1 Variable2 / risklimits;
run;

1
samples/STON/Array.ston Normal file
View File

@@ -0,0 +1 @@
[1, 2, 3]

View File

@@ -0,0 +1 @@
{#a : 1, #b : 2}

View File

@@ -0,0 +1,4 @@
Rectangle {
#origin : Point [ -40, -15 ],
#corner : Point [ 60, 35 ]
}

View File

@@ -0,0 +1,15 @@
TestDomainObject {
#created : DateAndTime [ '2012-02-14T16:40:15+01:00' ],
#modified : DateAndTime [ '2012-02-14T16:40:18+01:00' ],
#integer : 39581,
#float : 73.84789359463944,
#description : 'This is a test',
#color : #green,
#tags : [
#two,
#beta,
#medium
],
#bytes : ByteArray [ 'afabfdf61d030f43eb67960c0ae9f39f' ],
#boolean : false
}

View File

@@ -0,0 +1,30 @@
ZnResponse {
#headers : ZnHeaders {
#headers : ZnMultiValueDictionary {
'Date' : 'Fri, 04 May 2012 20:09:23 GMT',
'Modification-Date' : 'Thu, 10 Feb 2011 08:32:30 GMT',
'Content-Length' : '113',
'Server' : 'Zinc HTTP Components 1.0',
'Vary' : 'Accept-Encoding',
'Connection' : 'close',
'Content-Type' : 'text/html;charset=utf-8'
}
},
#entity : ZnStringEntity {
#contentType : ZnMimeType {
#main : 'text',
#sub : 'html',
#parameters : {
'charset' : 'utf-8'
}
},
#contentLength : 113,
#string : '<html>\n<head><title>Small</title></head>\n<body><h1>Small</h1><p>This is a small HTML document</p></body>\n</html>\n',
#encoder : ZnUTF8Encoder { }
},
#statusLine : ZnStatusLine {
#version : 'HTTP/1.1',
#code : 200,
#reason : 'OK'
}
}

View File

@@ -0,0 +1,24 @@
{
"class" : {
},
"instance" : {
"clientList:listElement:" : "dkh 03/20/2014 16:27",
"copyObjectMenuAction:selectionIndex:" : "dkh 10/13/2013 10:20",
"definitionForSelection:" : "dkh 10/13/2013 10:15",
"editMenuActionSpec" : "dkh 10/13/2013 10:19",
"itemSelected:listElement:selectedIndex:shiftPressed:" : "dkh 10/20/2013 11:06",
"menuActionSpec:" : "dkh 10/19/2013 17:12",
"repository:" : "dkh 10/19/2013 17:36",
"theList" : "dkh 10/12/2013 15:51",
"versionInfoBlock:" : "dkh 10/19/2013 17:08",
"versionInfoDiffVsSelection:selectedIndex:" : "dkh 10/19/2013 17:48",
"versionInfoDiffVsWorkingCopy:selectedIndex:" : "dkh 10/20/2013 12:36",
"versionInfoSelect:selectedIndex:" : "dkh 10/12/2013 17:04",
"versionInfos" : "dkh 10/19/2013 17:13",
"versionSummaryIsClosing" : "dkh 10/20/2013 10:19",
"windowIsClosing:" : "dkh 10/20/2013 10:39",
"windowLabel" : "dkh 05/20/2014 11:00",
"windowLocation" : "dkh 05/23/2014 10:17",
"windowName" : "dkh 10/12/2013 16:00",
"workingCopy" : "dkh 10/12/2013 16:16",
"workingCopy:" : "dkh 10/12/2013 16:17" } }

View File

@@ -0,0 +1,19 @@
{
"category" : "Topez-Server-Core",
"classinstvars" : [
],
"classvars" : [
],
"commentStamp" : "",
"instvars" : [
"workingCopy",
"repository",
"versionInfos",
"versionInfoBlock",
"selectedVersionInfo",
"versionInfoSummaryWindowId" ],
"name" : "TDVersionInfoBrowser",
"pools" : [
],
"super" : "TDAbstractMonticelloToolBuilder",
"type" : "normal" }

31
samples/Slim/sample.slim Normal file
View File

@@ -0,0 +1,31 @@
doctype html
html
head
title Slim Examples
meta name="keywords" content="template language"
meta name="author" content=author
javascript:
alert('Slim supports embedded javascript!')
body
h1 Markup examples
#content
p This example shows you how a basic Slim file looks like.
== yield
- unless items.empty?
table
- for item in items do
tr
td.name = item.name
td.price = item.price
- else
p
| No items found. Please add some inventory.
Thank you!
div id="footer"
= render 'footer'
| Copyright © #{year} #{author}

View File

@@ -0,0 +1,75 @@
structure LazyBase:> LAZY_BASE =
struct
type 'a lazy = unit -> 'a
exception Undefined
fun delay f = f
fun force f = f()
val undefined = fn () => raise Undefined
end
structure LazyMemoBase:> LAZY_BASE =
struct
datatype 'a susp = NotYet of unit -> 'a
| Done of 'a
type 'a lazy = unit -> 'a susp ref
exception Undefined
fun delay f =
let
val r = ref (NotYet f)
in
fn () => r
end
fun force f =
case f() of
ref (Done x) => x
| r as ref (NotYet f') =>
let
val a = f'()
in
r := Done a
; a
end
val undefined = fn () => raise Undefined
end
functor LazyFn(B: LAZY_BASE): LAZY' =
struct
open B
fun inject x = delay (fn () => x)
fun isUndefined x =
(ignore (force x)
; false)
handle Undefined => true
fun toString f x = if isUndefined x then "_|_" else f (force x)
fun eqBy p (x,y) = p(force x,force y)
fun eq (x,y) = eqBy op= (x,y)
fun compare p (x,y) = p(force x,force y)
structure Ops =
struct
val ! = force
val ? = inject
end
fun map f x = delay (fn () => f (force x))
end
structure Lazy' = LazyFn(LazyBase)
structure LazyMemo = LazyFn(LazyMemoBase)

View File

@@ -0,0 +1,4 @@
let apples = 3
let oranges = 5
let appleSummary = "I have \(apples) apples."
let fruitSummary = "I have \(apples + oranges) pieces of fruit."

View File

@@ -0,0 +1,8 @@
var shoppingList = ["catfish", "water", "tulips", "blue paint"]
shoppingList[1] = "bottle of water"
var occupations = [
"Malcolm": "Captain",
"Kaylee": "Mechanic",
]
occupations["Jayne"] = "Public Relations"

View File

@@ -0,0 +1,2 @@
let emptyArray = String[]()
let emptyDictionary = Dictionary<String, Float>()

View File

@@ -0,0 +1 @@
shoppingList = [] // Went shopping and bought everything.

View File

@@ -0,0 +1,10 @@
let individualScores = [75, 43, 103, 87, 12]
var teamScore = 0
for score in individualScores {
if score > 50 {
teamScore += 3
} else {
teamScore += 1
}
}
teamScore

View File

@@ -0,0 +1,8 @@
var optionalString: String? = "Hello"
optionalString == nil
var optionalName: String? = "John Appleseed"
var greeting = "Hello!"
if let name = optionalName {
greeting = "Hello, \(name)"
}

View File

@@ -0,0 +1,11 @@
let vegetable = "red pepper"
switch vegetable {
case "celery":
let vegetableComment = "Add some raisins and make ants on a log."
case "cucumber", "watercress":
let vegetableComment = "That would make a good tea sandwich."
case let x where x.hasSuffix("pepper"):
let vegetableComment = "Is it a spicy \(x)?"
default:
let vegetableComment = "Everything tastes good in soup."
}

View File

@@ -0,0 +1,14 @@
let interestingNumbers = [
"Prime": [2, 3, 5, 7, 11, 13],
"Fibonacci": [1, 1, 2, 3, 5, 8],
"Square": [1, 4, 9, 16, 25],
]
var largest = 0
for (kind, numbers) in interestingNumbers {
for number in numbers {
if number > largest {
largest = number
}
}
}
largest

View File

@@ -0,0 +1,11 @@
var n = 2
while n < 100 {
n = n * 2
}
n
var m = 2
do {
m = m * 2
} while m < 100
m

View File

@@ -0,0 +1,11 @@
var firstForLoop = 0
for i in 0..3 {
firstForLoop += i
}
firstForLoop
var secondForLoop = 0
for var i = 0; i < 3; ++i {
secondForLoop += 1
}
secondForLoop

View File

@@ -0,0 +1 @@
println("Hello, world")

View File

@@ -0,0 +1,4 @@
func greet(name: String, day: String) -> String {
return "Hello \(name), today is \(day)."
}
greet("Bob", "Tuesday")

View File

@@ -0,0 +1,4 @@
func getGasPrices() -> (Double, Double, Double) {
return (3.59, 3.69, 3.79)
}
getGasPrices()

View File

@@ -0,0 +1,9 @@
func sumOf(numbers: Int...) -> Int {
var sum = 0
for number in numbers {
sum += number
}
return sum
}
sumOf()
sumOf(42, 597, 12)

View File

@@ -0,0 +1,9 @@
func returnFifteen() -> Int {
var y = 10
func add() {
y += 5
}
add()
return y
}
returnFifteen()

View File

@@ -0,0 +1,8 @@
func makeIncrementer() -> (Int -> Int) {
func addOne(number: Int) -> Int {
return 1 + number
}
return addOne
}
var increment = makeIncrementer()
increment(7)

View File

@@ -0,0 +1,13 @@
func hasAnyMatches(list: Int[], condition: Int -> Bool) -> Bool {
for item in list {
if condition(item) {
return true
}
}
return false
}
func lessThanTen(number: Int) -> Bool {
return number < 10
}
var numbers = [20, 19, 7, 12]
hasAnyMatches(numbers, lessThanTen)

View File

@@ -0,0 +1,5 @@
numbers.map({
(number: Int) -> Int in
let result = 3 * number
return result
})

View File

@@ -0,0 +1 @@
numbers.map({ number in 3 * number })

View File

@@ -0,0 +1 @@
sort([1, 5, 3, 12, 2]) { $0 > $1 }

View File

@@ -0,0 +1,6 @@
class Shape {
var numberOfSides = 0
func simpleDescription() -> String {
return "A shape with \(numberOfSides) sides."
}
}

View File

@@ -0,0 +1,3 @@
var myVariable = 42
myVariable = 50
let myConstant = 42

View File

@@ -0,0 +1,3 @@
var shape = Shape()
shape.numberOfSides = 7
var shapeDescription = shape.simpleDescription()

View File

@@ -0,0 +1,12 @@
class NamedShape {
var numberOfSides: Int = 0
var name: String
init(name: String) {
self.name = name
}
func simpleDescription() -> String {
return "A shape with \(numberOfSides) sides."
}
}

View File

@@ -0,0 +1,20 @@
class Square: NamedShape {
var sideLength: Double
init(sideLength: Double, name: String) {
self.sideLength = sideLength
super.init(name: name)
numberOfSides = 4
}
func area() -> Double {
return sideLength * sideLength
}
override func simpleDescription() -> String {
return "A square with sides of length \(sideLength)."
}
}
let test = Square(sideLength: 5.2, name: "my test square")
test.area()
test.simpleDescription()

View File

@@ -0,0 +1,26 @@
class EquilateralTriangle: NamedShape {
var sideLength: Double = 0.0
init(sideLength: Double, name: String) {
self.sideLength = sideLength
super.init(name: name)
numberOfSides = 3
}
var perimeter: Double {
get {
return 3.0 * sideLength
}
set {
sideLength = newValue / 3.0
}
}
override func simpleDescription() -> String {
return "An equilateral triagle with sides of length \(sideLength)."
}
}
var triangle = EquilateralTriangle(sideLength: 3.1, name: "a triangle")
triangle.perimeter
triangle.perimeter = 9.9
triangle.sideLength

View File

@@ -0,0 +1,21 @@
class TriangleAndSquare {
var triangle: EquilateralTriangle {
willSet {
square.sideLength = newValue.sideLength
}
}
var square: Square {
willSet {
triangle.sideLength = newValue.sideLength
}
}
init(size: Double, name: String) {
square = Square(sideLength: size, name: name)
triangle = EquilateralTriangle(sideLength: size, name: name)
}
}
var triangleAndSquare = TriangleAndSquare(size: 10, name: "another test shape")
triangleAndSquare.square.sideLength
triangleAndSquare.triangle.sideLength
triangleAndSquare.square = Square(sideLength: 50, name: "larger square")
triangleAndSquare.triangle.sideLength

View File

@@ -0,0 +1,8 @@
class Counter {
var count: Int = 0
func incrementBy(amount: Int, numberOfTimes times: Int) {
count += amount * times
}
}
var counter = Counter()
counter.incrementBy(2, numberOfTimes: 7)

View File

@@ -0,0 +1,2 @@
let optionalSquare: Square? = Square(sideLength: 2.5, name: "optional square")
let sideLength = optionalSquare?.sideLength

View File

@@ -0,0 +1,21 @@
enum Rank: Int {
case Ace = 1
case Two, Three, Four, Five, Six, Seven, Eight, Nine, Ten
case Jack, Queen, King
func simpleDescription() -> String {
switch self {
case .Ace:
return "ace"
case .Jack:
return "jack"
case .Queen:
return "queen"
case .King:
return "king"
default:
return String(self.toRaw())
}
}
}
let ace = Rank.Ace
let aceRawValue = ace.toRaw()

View File

@@ -0,0 +1,3 @@
if let convertedRank = Rank.fromRaw(3) {
let threeDescription = convertedRank.simpleDescription()
}

View File

@@ -0,0 +1,17 @@
enum Suit {
case Spades, Hearts, Diamonds, Clubs
func simpleDescription() -> String {
switch self {
case .Spades:
return "spades"
case .Hearts:
return "hearts"
case .Diamonds:
return "diamonds"
case .Clubs:
return "clubs"
}
}
}
let hearts = Suit.Hearts
let heartsDescription = hearts.simpleDescription()

View File

@@ -0,0 +1,3 @@
let implicitInteger = 70
let implicitDouble = 70.0
let explicitDouble: Double = 70

View File

@@ -0,0 +1,9 @@
struct Card {
var rank: Rank
var suit: Suit
func simpleDescription() -> String {
return "The \(rank.simpleDescription()) of \(suit.simpleDescription())"
}
}
let threeOfSpades = Card(rank: .Three, suit: .Spades)
let threeOfSpadesDescription = threeOfSpades.simpleDescription()

View File

@@ -0,0 +1,14 @@
enum ServerResponse {
case Result(String, String)
case Error(String)
}
let success = ServerResponse.Result("6:00 am", "8:09 pm")
let failure = ServerResponse.Error("Out of cheese.")
switch success {
case let .Result(sunrise, sunset):
let serverResponse = "Sunrise is at \(sunrise) and sunset is at \(sunset)."
case let .Error(error):
let serverResponse = "Failure... \(error)"
}

View File

@@ -0,0 +1,4 @@
protocol ExampleProtocol {
var simpleDescription: String { get }
mutating func adjust()
}

Some files were not shown because too many files have changed in this diff Show More