Compare commits

...

374 Commits

Author SHA1 Message Date
Joshua Peek
84dc918729 Linguist 2.7.0 2013-06-10 11:08:49 -05:00
Joshua Peek
032125b114 Axe indexable? 2013-06-10 11:06:18 -05:00
Joshua Peek
b1a137135e Axe colorize_without_wrapper 2013-06-10 10:58:33 -05:00
Joshua Peek
1a53d1973a ws 2013-06-10 10:39:59 -05:00
Joshua Peek
490afdddd1 some air 2013-06-10 10:37:55 -05:00
Joshua Peek
9822b153eb ws 2013-06-10 10:36:56 -05:00
Ted Nyman
bf4596c26d Merge pull request #530 from github/not-really-mac
Less clever newline detection
2013-06-09 21:48:11 -07:00
Joshua Peek
3e3fb0cdfe Say why 2013-06-09 21:02:55 -05:00
Joshua Peek
d907ab9940 Kill mac_format check, buggy 2013-06-09 21:02:11 -05:00
Joshua Peek
9c1d6e154c Always split lines on \n or \r 2013-06-09 21:01:03 -05:00
Joshua Peek
b5681ca559 Correct count 2013-06-09 21:00:20 -05:00
Joshua Peek
4b8f362eb7 Merge test cases 2013-06-09 20:53:48 -05:00
Joshua Peek
2e39d1d582 Rebuild samples 2013-06-09 20:53:33 -05:00
Joshua Peek
fa797df0c7 Note that BlobHelper is a turd 2013-06-09 20:51:26 -05:00
Joshua Peek
c7100be139 Make mac_format? private 2013-06-09 20:48:45 -05:00
Joshua Peek
91284e5530 Add failing test bad mac format 2013-06-09 20:45:59 -05:00
Patrick Reynolds
e5cf7ac764 bump version to include the new sample files 2013-06-06 22:46:11 -05:00
Patrick Reynolds
3ae785605e Merge pull request #529 from github/more-samples
More samples
2013-06-06 20:40:12 -07:00
Patrick Reynolds
e7ac4e0a29 helpful comments 2013-06-06 17:04:28 -05:00
Patrick Reynolds
b275e53b08 use LINGUIST_DEBUG to debug the Bayesian filter 2013-06-06 16:54:18 -05:00
Patrick Reynolds
f363b198e1 more and better samples for Nu, Racket, Scala
- 99 bottles of beer is more substantial than hello world
 - also fixed chmod 755 on several .script! files
2013-06-06 16:53:16 -05:00
Ted Nyman
37c5570cec Merge pull request #528 from github/erlang-samples
Erlang samples
2013-06-06 13:46:53 -07:00
Patrick Reynolds
2db2f5a46d add erlang, more-complex shell examples
- some Erlang and escript files
 - .escript extension
 - .erlang extension
 - shell script with %, ##, name tokens
2013-06-06 15:41:44 -05:00
Patrick Reynolds
e33f4ca96e remove redundant OCaml extensions entry 2013-06-06 15:21:49 -05:00
Ted Nyman
246580fb43 Update README.md 2013-05-31 15:27:00 -06:00
Ted Nyman
912f635d2a Merge pull request #515 from Turbo87/jinja
Added .jinja extension to HTML+Django language
2013-05-27 13:38:13 -07:00
Tobias Bieniek
4ae5dd360f Added .jinja extension to HTML+Django language 2013-05-27 22:05:17 +02:00
Ted Nyman
329f9a0fc8 Merge pull request #503 from Drup/patch-1
Add .eliom to ocaml extensions
2013-05-21 23:24:30 -07:00
Ted Nyman
d62257b149 Merge pull request #504 from wjlroe/riemann-configs-are-clojure
Recognise riemann.config files as Clojure files
2013-05-21 23:22:56 -07:00
Ted Nyman
19539404a4 Merge pull request #510 from Gozala/wisp
Add wisp language support.
2013-05-21 23:20:37 -07:00
Irakli Gozalishvili
9ee0523cad Add wisp language support. 2013-05-21 14:03:16 -07:00
William Roe
846e84fc8c Recognise riemann.config files as Clojure files 2013-05-13 18:25:27 +01:00
Drup
cd006487b3 Add .eliom to ocaml extensions 2013-05-13 17:10:07 +02:00
Ted Nyman
597ce9adc3 Add Clojure and just use the existing Bash record 2013-05-11 00:26:13 -06:00
Ted Nyman
61040402df Actually remove the languages 2013-05-11 00:23:10 -06:00
Ted Nyman
8013cd081a Based on current stats, add Shell, Coffeescript to popular; drop TeX, XML 2013-05-11 00:22:27 -06:00
Ted Nyman
99c296264a Merge pull request #483 from KevinT/master
Added scriptcs language detection
2013-05-06 13:16:14 -07:00
Ted Nyman
ba51461604 Merge pull request #493 from josegonzalez/patch-1
Consider .reek files as yaml
2013-05-03 01:16:01 -07:00
Ted Nyman
6610d0dd46 Merge pull request #494 from josegonzalez/patch-2
Consider .factor-rc and .factor-boot-rc factor files. Closes #492
2013-05-03 01:15:12 -07:00
Jose Diaz-Gonzalez
3adc0e1b16 Reorder extensions in order to pass tests 2013-04-30 15:54:41 -03:00
Jose Diaz-Gonzalez
0a47b4865a Consider .factor-rc and .factor-boot-rc factor files. Closes #492 2013-04-30 15:50:51 -03:00
Jose Diaz-Gonzalez
13f1a1fc74 Consider .reek files as yaml 2013-04-30 15:49:00 -03:00
Ted Nyman
3ad129e6e6 Update samples.json 2013-04-28 22:38:07 -07:00
Kevin Trethewey
475e865809 Added scriptcs file extention to C# section 2013-04-28 07:40:12 +03:00
Ted Nyman
0c3dcb0a9b Update color for UPC 2013-04-27 21:01:33 +08:00
Ted Nyman
3138fa79a0 Merge pull request #484 from waltherg/patch-1
Added support for Unified Parallel C
2013-04-27 06:00:17 -07:00
waltherg
c88170b6f6 Added support for Unified Parallel C
http://upc.gwu.edu/
2013-04-27 13:12:03 +02:00
Ted Nyman
f3ee7072a6 Merge pull request #479 from CodeBlock/gemfile-https
Make Gemfile use https://rubygems.org
2013-04-24 09:09:25 -07:00
Ted Nyman
5b5d9da33c Merge pull request #477 from liluo/patch-1
added multi line comment flag for python
2013-04-24 09:07:28 -07:00
Ricky Elrod
dc1d17a051 Make Gemfile use https://rubygems.org 2013-04-21 00:35:13 -04:00
0bc28d9424 added multi line comment flag for python 2013-04-19 15:33:02 +08:00
Ted Nyman
8b5b8a9760 Merge pull request #471 from mihaip/master
Detect source files generated by the Protocol Buffer compiler
2013-04-16 23:20:24 -07:00
Mihai Parparita
6c98bbf02c Detect source files generated by the Protocol Buffer compiler 2013-04-16 22:14:50 -07:00
Ted Nyman
9f0964cd7d Merge pull request #461 from github/detect-csv
Add `csv?` BlobHelper
2013-04-04 14:36:08 -07:00
Yaroslav Shirokov
b68732f0c7 Add detection for CSV 2013-04-04 14:01:09 -07:00
Ted Nyman
b99abba27f Merge pull request #455 from github/axml
Add axml extension to xml
2013-04-01 19:57:47 -07:00
Ted Nyman
9c12823d38 Add axml extension to xml 2013-04-01 19:56:38 -07:00
Ted Nyman
28bee50e6a Merge pull request #451 from github/pdfs
Add PDF detection
2013-03-25 21:14:16 -07:00
Garen Torikian
4148ff1c29 Add PDF detection 2013-03-25 15:45:58 -07:00
Ted Nyman
009bff6cc2 Merge pull request #448 from github/update-db
Update samples
2013-03-22 21:37:22 -07:00
Ted Nyman
c918c5b742 Update samples 2013-03-22 21:35:02 -07:00
Ted Nyman
4a33b7ae8e Merge pull request #150 from lparenteau/master
Add detection for the M programming language (aka MUMPS).
2013-03-22 21:32:50 -07:00
Ted Nyman
777952adcb Merge pull request #446 from github/ceylon-as-ceylon-not-textonly
Render Ceylon as Ceylon since it is now in Pygments
2013-03-18 17:40:17 -07:00
Matthew McCullough
ef4c47347d Render Ceylon as Ceylon since it is now in Pygments 2013-03-18 15:34:37 -07:00
Ted Nyman
5e34315bb3 Merge pull request #349 from tucnak/master
Support of Qt Designer .ui files
2013-03-18 12:50:19 -07:00
Illya Kovalevskyy
4f5624cd5f Order is fixed 2013-03-18 01:40:40 +02:00
Illya Kovalevskyy
f76d64f9aa Merge branch 'master' of github.com:github/linguist
Conflicts:
	lib/linguist/languages.yml
2013-03-18 01:36:19 +02:00
Ted Nyman
4444b6daa1 Merge pull request #441 from rdeltour/xml-group
Remove XProc and XSLT from the group XML
2013-03-17 16:14:59 -07:00
Romain Deltour
7ca58f8dd9 Remove XProc and XSLT from the group XML 2013-03-15 12:40:59 +01:00
Laurent Parenteau
58420f62d9 Merged with upstream. Updated M (aka MUMPS) detection to use the new bayesian / samples method. 2013-03-14 11:33:09 -04:00
Ted Nyman
a20631af04 Merge pull request #373 from vincentwoo/patch-1
Add extension support for Iced Coffeescript
2013-03-13 23:10:33 -07:00
Ted Nyman
44995d6f62 Merge pull request #438 from richo/bugs/sample_db
Bugs/sample db
2013-03-12 23:32:31 -07:00
richo
2d7dea2d97 Don't emit the diff if samples db is out of date
There's a warning message emitted with instructions, a 2000 line diff
does nothing to help the user track down the issue.
2013-03-13 17:29:05 +11:00
richo
2cdbe64b66 Update samples db 2013-03-13 15:09:51 +11:00
Ted Nyman
030ad89a14 Bump to 2.6.8 2013-03-12 01:09:28 -07:00
Ted Nyman
a34ee513c0 Merge pull request #436 from github/ignore-test-fixtures
Vendor test/fixtures
2013-03-12 01:07:50 -07:00
Ted Nyman
96d29b7662 Vendor test/fixtures 2013-03-12 01:06:26 -07:00
Ted Nyman
3f077ea71e Merge pull request #383 from REAS/master
Update to include Processing as a new language
2013-03-11 18:39:03 -07:00
Ted Nyman
de94b85c0d Merge pull request #295 from yandy/patch-1
downcase extname when we determin whether it's a image
2013-03-10 15:39:55 -07:00
Ted Nyman
1c771cc27d Remove sample for now until test structure changes 2013-03-10 15:36:49 -07:00
Ted Nyman
a41ec3a801 Merge pull request #321 from mndrix/patch-1
Add a misclassified Prolog file
2013-03-10 15:34:34 -07:00
Ted Nyman
d9d9e01242 Update samples database 2013-03-10 15:26:46 -07:00
Ted Nyman
04abb5310a Add .pluginspec sample 2013-03-10 15:25:02 -07:00
Ted Nyman
c7ed9bd7b3 Better regex 2013-03-10 15:23:14 -07:00
Ted Nyman
8aadb5eeaa Merge pull request #312 from HerbertKoelman/master
Added to vendor.yml dependencies related to automake and autoconf
2013-03-10 15:22:17 -07:00
Casey Reas
e4b5593728 Add Processing to languages.yml, includes lexer: Java 2013-03-08 16:10:34 -08:00
Ted Nyman
f8389f0d93 Bump to 2.6.7 2013-03-07 20:18:44 -08:00
Ted Nyman
af12db9276 Update samples database 2013-03-07 20:18:07 -08:00
Ted Nyman
688a6bb581 Don't include .inc.
Format is used by too many other non lasso repos
2013-03-07 20:15:17 -08:00
Ted Nyman
5d5935965a Merge pull request #423 from gentoo90/nsis-lexer
Add NSIS installer scripting language
2013-03-07 17:26:14 -08:00
Ted Nyman
f795b20582 Merge pull request #391 from bfontaine/forth-samples
More Forth samples
2013-03-07 17:07:07 -08:00
Ted Nyman
c2023d33b9 Merge pull request #363 from dveeden/master
Add DOT language
2013-03-07 14:12:07 -08:00
gentoo90
d9c375b74a Add .nsh extension 2013-03-07 22:39:16 +02:00
gentoo90
7179ec56ef Add NSIS installer scripting language 2013-03-07 21:39:37 +02:00
Ted Nyman
26c850c37f Update samples.json to latest data 2013-03-06 19:59:33 -08:00
Ted Nyman
2023f35af7 Merge pull request #396 from elehcim/master
Added Matlab code samples
2013-03-06 19:58:42 -08:00
Ted Nyman
c0a57dbd1b Merge pull request #386 from rdeltour/xproc
New language: XProc - an XML Pipeline language (W3C)
2013-03-06 19:57:45 -08:00
Ted Nyman
78f072b46a 2.6.6 2013-03-06 15:29:25 -08:00
Ted Nyman
da51510597 Nix this generated check for now 2013-03-06 15:28:55 -08:00
Ted Nyman
47389cc827 Update samples and bump to 2.6.5 2013-03-06 14:50:50 -08:00
Ted Nyman
f035203e1c Bump to 2.6.4 2013-03-06 14:49:30 -08:00
Ted Nyman
083f6fc3b4 Merge pull request #421 from rvanmil/master
Add ABAP
2013-03-06 14:47:44 -08:00
Ted Nyman
d5bfe40f37 Fix deprecation warning 2013-03-06 14:47:01 -08:00
Ted Nyman
0b350defb5 Merge pull request #422 from brson/rust
Turn on Rust lexing. Add a bigger sample
2013-03-06 14:44:02 -08:00
Ted Nyman
88d0408875 Merge pull request #294 from DHowett/master
Add support for the Logos language.
2013-03-06 14:42:46 -08:00
Brian Anderson
c7a155efef Turn on Rust lexing. Add a bigger sample 2013-03-06 12:40:31 -08:00
Dustin L. Howett
9187fffc48 Update samples.json to include Logos. 2013-03-06 12:34:42 -08:00
Dustin L. Howett
7d2603ceb7 Add support for the Logos language. 2013-03-06 12:30:06 -08:00
René
c5bb287c74 Add ABAP 2013-03-06 09:24:42 +01:00
Ted Nyman
6b6f5eaaff Remove out of date notes 2013-03-04 13:31:05 -08:00
Ted Nyman
f3fa2317a6 Update samples.json, bump to 2.6.3 2013-03-04 13:19:40 -08:00
Ted Nyman
d096187196 Remove extra Forth extension 2013-03-04 12:40:10 -08:00
Ted Nyman
c5a3b34546 Merge pull request #419 from pborreli/typos
Fixed typos
2013-03-04 12:17:42 -08:00
Pascal Borreli
70eafb2ffc Fixed typos 2013-03-03 21:26:31 +00:00
Ted Nyman
983a3e6073 Minor README fixes 2013-03-02 23:19:10 -08:00
Ted Nyman
cf6eeec22a Merge pull request #408 from soimort/master
Add support for Literate CoffeeScript
2013-02-26 22:29:46 -08:00
Mort Yao
583e6fe2e8 Add sample file for Literate CoffeeScript 2013-02-27 05:32:51 +01:00
Brian Lopez
500f8cd869 bump version to 2.6.2 2013-02-26 17:43:24 -08:00
Brian Lopez
2e5866e6d8 Merge pull request #413 from github/bump-escape-utils
Bump escape_utils
2013-02-26 17:42:33 -08:00
Brian Lopez
600648c8af bump escape_utils 2013-02-26 17:41:04 -08:00
Ted Nyman
1ac51d2261 Merge pull request #410 from skalnik/remove-obj
Remove OBJ from supported solids
2013-02-26 14:20:45 -08:00
Mike Skalnik
1766123448 Fix typo in comment 2013-02-26 14:00:42 -08:00
Mike Skalnik
5ea039a74e Remove OBJ files as support solids 2013-02-26 14:00:29 -08:00
Michele Mastropietro
0af1a49cbd Added one more file 2013-02-26 09:23:19 +01:00
Mort Yao
151b7d53b0 Add support for Literate CoffeeScript 2013-02-26 02:51:42 +01:00
Ted Nyman
6e82d2a689 Merge pull request #354 from mrorii/master
Detect Cython-generated C/C++ files
2013-02-25 17:11:17 -08:00
Ted Nyman
b02c6c1e54 Bump to 2.6.1 2013-02-25 15:47:48 -08:00
Ted Nyman
cd406cc6b9 Remove extra extensions.
This are covered by samples so we do not
need to mention them here
2013-02-25 15:46:18 -08:00
Ted Nyman
52d46ddc8c Merge pull request #385 from rdeltour/xslt
XSLT as a programming language
2013-02-25 15:03:27 -08:00
Ted Nyman
188fad1814 Update samples database 2013-02-25 15:01:13 -08:00
Ted Nyman
a86ff11084 Merge pull request #405 from github/new-pygments
New pygments
2013-02-25 00:54:59 -08:00
Ted Nyman
6630f3bc4a Just name 2013-02-25 00:53:56 -08:00
Ted Nyman
2164f285f5 Bump version, add toml 2013-02-25 00:52:58 -08:00
Ted Nyman
086855fcce Merge pull request #404 from github/new-pygments
Bump to latest pygments.rb
2013-02-25 00:20:51 -08:00
Ted Nyman
33b421ff0b Bump pygments 2013-02-25 00:19:02 -08:00
Ted Nyman
36e8fe1b25 Begin 2.6.0 series 2013-02-25 00:13:57 -08:00
Ted Nyman
9696ee589e Bump to pygments.rb 0.4.0 2013-02-25 00:13:21 -08:00
Romain Deltour
f66da93e64 Remove extension from the XML (it is declared in XSLT) 2013-02-25 09:12:31 +01:00
Daniël van Eeden
d766c14305 Update lib/linguist/languages.yml
Set lexer to Text only for DOT. This hopefully fixed the failure on Travis.
2013-02-25 08:15:37 +01:00
Daniël van Eeden
5b749060a4 Update lib/linguist/languages.yml
Change sort order
2013-02-25 08:08:06 +01:00
Ted Nyman
9c76078b4f Remove extra extension list 2013-02-24 22:53:49 -08:00
Ted Nyman
c54ffa78f4 Alphabetize Pike 2013-02-24 22:53:06 -08:00
Ted Nyman
dde1addced Merge pull request #170 from johan/detect-pike-language
Added detection for the Pike language.
2013-02-24 22:50:43 -08:00
Ted Nyman
6108d53eb2 Merge pull request #400 from kevinjalbert/add-txl
Add TXL language
2013-02-24 22:49:39 -08:00
Casey Reas
7ae475a811 Put Processing language into alphabetical order, re: #383 2013-02-23 19:27:05 -08:00
Ted Nyman
c3c2c9c7fe Merge pull request #402 from PulsarBlow/language-typescript
TypeScript language support
2013-02-23 15:22:29 -08:00
Ted Nyman
f8955e919b Merge pull request #401 from jdutil/patch-2
Add deface extension support.
2013-02-23 15:21:44 -08:00
PulsarBlow
dc9ad22ec4 TypeScript language support
Signed-off-by: PulsarBlow <pulsarblow@gmail.com>
2013-02-23 23:40:40 +01:00
Jeff Dutil
e33cf5f933 Add deface extension support. 2013-02-23 16:03:51 -05:00
Kevin Jalbert
4c7b432090 Rename sample file's extension to match languages.yml 2013-02-23 13:32:36 -05:00
Ted Nyman
8afd6a1bd8 Merge pull request #342 from svenefftinge/master
languages.yml: add Xtend
2013-02-23 10:21:01 -08:00
Kevin Jalbert
7725bbb36b Add TXL language
Add:
 * TXL language
 * Sample TXL file
2013-02-23 13:19:10 -05:00
Ted Nyman
333d9cfffb Merge pull request #399 from BPScott/add-editorconfig
Add .editorconfig as an INI file
2013-02-23 10:18:40 -08:00
Ben Scott
495b50cbda Add .editorconfig as an INI file
See http://editorconfig.org
2013-02-23 16:27:24 +00:00
Sven Efftinge
fe8dbd662b Update lib/linguist/languages.yml
added primary_extension: .xtend
2013-02-23 13:50:04 +01:00
Illya
cdde73f5ee The extension list is alphabetized 2013-02-23 12:51:59 +02:00
Ted Nyman
05c49245b0 Fix whitespace 2013-02-23 02:39:44 -08:00
Ted Nyman
0955dd2ef0 Merge pull request #278 from DrItanium/master
Add support for the CLIPS programming language
2013-02-23 02:38:50 -08:00
Ted Nyman
6c5a9e97fe Merge pull request #376 from evanmiller/detect-opencl
Treat .opencl files as OpenCL
2013-02-23 02:37:44 -08:00
Ted Nyman
e5d2795ec0 Alphabetize 2013-02-23 02:29:17 -08:00
Ted Nyman
61aa378c45 Remove extra lexer 2013-02-23 02:26:10 -08:00
Ted Nyman
db296bee80 Merge pull request #318 from stuarthalloway/master
Datomic DTM files
2013-02-23 02:25:27 -08:00
Ted Nyman
3e091eacc2 Merge pull request #397 from unnali/rouge
Rouge
2013-02-22 19:47:40 -08:00
Arlen Christian Mart Cuss
b2303eac1e Add Rouge. 2013-02-23 14:13:12 +11:00
Arlen Christian Mart Cuss
c01e347bc0 Correct documentation, README grammar. 2013-02-23 14:13:12 +11:00
Ted Nyman
6d8583a0b4 Merge pull request #395 from featurist/master
add PogoScript language (no samples.json!)
2013-02-22 11:30:36 -08:00
Michele Mastropietro
c85255c5af Added matlab code samples.
All of these code samples currently are mis-identified in my repositories. I'm
donating them to the cause.
2013-02-22 10:57:51 +01:00
Tim Macfarlane
5fac67cea5 add PogoScript detection 2013-02-22 09:31:06 +00:00
Johan Sundström
7b9e0afef9 Reverted pike tests until such time as we have a pike lexer here. 2013-02-21 23:23:00 -08:00
Ted Nyman
b45c4f5379 Merge pull request #335 from rofl0r/dpryml
languages.yml: add .dpr and .dfm extension to Delphi
2013-02-21 22:58:44 -08:00
Ted Nyman
1fa4ed6bc2 Merge pull request #255 from seanupton/master
Syntax highlighting (XML) for Zope .zcml and .pt files
2013-02-21 22:49:18 -08:00
Ted Nyman
2d16f863f7 Revert "Merge pull request #171 from ianmjones/patch-1"
This reverts commit f5ebbd42d3, reversing
changes made to b998a5c282.
2013-02-21 22:09:59 -08:00
Ted Nyman
f5ebbd42d3 Merge pull request #171 from ianmjones/patch-1
Added REALbasic language.
2013-02-21 22:04:53 -08:00
Ted Nyman
b998a5c282 Merge pull request #239 from db0company/master
Add .eliom extension for Ocsigen (OCaml web framework)
2013-02-21 22:01:03 -08:00
Ted Nyman
58a9b56f4d Merge pull request #253 from Tass/master
Binary mime type override if languages.yml says so
2013-02-21 21:49:09 -08:00
Ted Nyman
3ceae6b5c1 Merge pull request #164 from michaelmior/master
Add Awk lexer
2013-02-21 21:41:56 -08:00
Ted Nyman
2612ea35bc Merge pull request #259 from afronski/master
Adding vendor files for django (admin_media) and SyntaxHightlighter JavaScript library
2013-02-21 21:28:37 -08:00
Ted Nyman
5bf2299461 Alphabetize python extensions 2013-02-20 16:38:55 -08:00
Kevin Sawicki
b26e4a7556 Add .gyp to Python extensions 2013-02-20 16:36:10 -08:00
Ted Nyman
c9bd6096b9 Merge pull request #364 from zacstewart/ragel-ruby
Add Ragel Ruby to languages
2013-02-20 16:27:32 -08:00
Ted Nyman
7d50697701 Merge pull request #390 from boredomist/patch-1
Add ASDF files to Common Lisp
2013-02-17 21:02:31 -08:00
Erik Price
e2314b57fe Alphabetize Common Lisp extensions. 2013-02-17 22:59:44 -06:00
Baptiste Fontaine
055743f886 More Forth samples. 2013-02-18 00:21:46 +01:00
Erik Price
152151bd44 Add ASDF files to Common Lisp 2013-02-17 13:50:48 -06:00
Ted Nyman
2431f2120c Merge pull request #388 from tinnet/master
Added Monkey Language
2013-02-16 18:08:29 -08:00
Tinnet Coronam
6a8e14dcf3 added monkey language (new in pygments 1.6) 2013-02-16 18:01:47 +01:00
Ted Nyman
a07d6f82ee Bump to 2.5.1 2013-02-15 18:48:32 -08:00
Ted Nyman
116d158336 Update samples.json 2013-02-15 18:48:05 -08:00
Ted Nyman
4863d16657 Bump to 2.5.0 2013-02-15 17:35:03 -08:00
Romain Deltour
da97f1af28 added XML lexer 2013-02-15 11:27:59 +01:00
Romain Deltour
6a03ea048b New language: XProc - an XML Pipeline language (W3C) 2013-02-15 11:22:35 +01:00
Romain Deltour
7924d0d8f8 XSLT as a programming language 2013-02-15 11:05:45 +01:00
Ted Nyman
781cd4069c Merge pull request #384 from ruv/more-forth-extenstions
Add .4th as alternate Forth file extension
2013-02-14 15:54:50 -08:00
ruv
505a361d98 '.4th' is also often used for the Forth language 2013-02-15 01:51:37 +04:00
Kevin Sawicki
c493c436da Register TextMate extensions as XML 2013-02-13 10:32:25 -08:00
Casey Reas
fb7c97c83f Samples for Processing language, changes to languages.yml 2013-02-13 09:12:30 -08:00
Sven Efftinge
b13001c5cc Added samples for Xtend 2013-02-13 08:33:22 +01:00
Ted Nyman
4e916ce94b Merge pull request #380 from github/tml
Add tapesty (.tml) to XML
2013-02-11 16:11:56 -08:00
Ted Nyman
1fad3be12a Add tapesty (.tml) to XML 2013-02-11 16:10:31 -08:00
Ted Nyman
6b688ba696 Merge pull request #251 from ptrv/add-scd-supercollider-extension
Add .scd extension to SuperCollider.
2013-02-11 16:05:53 -08:00
Ted Nyman
48d8919043 Merge pull request #359 from ntkme/master
Add fish support (.fish)
2013-02-11 16:03:46 -08:00
Michael Mior
1877c8c383 Add Awk lexer and sample 2013-02-08 14:19:26 -05:00
Evan Miller
5f6d74d849 Treat .opencl files as OpenCL 2013-02-07 18:24:36 -06:00
なつき
72ae6cd8ca Add fish support 2013-02-04 02:08:50 +08:00
Vincent Woo
8457f6397d Add extension support for Iced Coffeescript 2013-02-03 04:23:37 -08:00
Ted Nyman
24820ed935 Merge pull request #372 from github/more-shell-extensions
Add .bash and .tmux as alternate shell extensions
2013-02-01 15:00:41 -08:00
Ted Nyman
ad6947eeb4 Add .bash and .tmux as alternate shell extensions 2013-02-01 22:58:58 +00:00
Ted Nyman
9c27ec0313 Alphabetize verilog extension list 2013-02-01 22:30:01 +00:00
Ted Nyman
7a21d66877 Merge pull request #360 from skalnik/add-solid-support
Add Blob#solid? helper
2013-02-01 14:24:36 -08:00
Ted Nyman
7c1265cd2d Merge pull request #368 from cjdrake/master
Add Verilog (.vh) and SystemVerilog (.sv, .svh) filename extensions
2013-02-01 14:23:17 -08:00
Ted Nyman
6d73ae58b6 Regenerate samples.json 2013-02-01 22:17:44 +00:00
Ted Nyman
2d9d6f5669 Merge pull request #367 from moorepants/matlab-samples
Added matlab code samples.
2013-02-01 14:13:01 -08:00
Chris Drake
0a49062a02 Add Verilog/SystemVerilog filename extensions
Most Verilog files use the *.vh extension for header files.

Since the IEEE 1800-2009 SystemVerilog standard, it is common for
hardware and verification files written using the newer language
constructs to use the *.sv extension for design elements, and *.svh for
headers.
2013-01-30 22:02:31 -08:00
Jason Moore
04bab94c89 Removed copyrighted file. 2013-01-30 13:36:33 -08:00
Jason Moore
9bb230d7c8 Added matlab code samples.
All of these code samples currently are mis-identified in my repositories. I'm
donating them to the cause.
2013-01-30 13:12:45 -08:00
Ted Nyman
121f096173 Merge pull request #357 from uo-hrsys/patch-2
Add dita file extention to the XML type
2013-01-27 22:39:17 -08:00
Ted Nyman
c06f3fbc57 Merge pull request #358 from nicolasdanet/maxmsp
Added Max/MSP extensions in languages.yml
2013-01-27 22:31:21 -08:00
Ted Nyman
831f8a1f1f Merge pull request #361 from mattdbridges/patch-1
Adding homepage to gemspec
2013-01-27 22:19:34 -08:00
Zac Stewart
5e4623a44a Rename ragel ruby samples to match language name 2013-01-22 17:43:08 -05:00
Zac Stewart
1a60a00d3e Add Ragel Ruby to languages 2013-01-21 21:38:40 -05:00
Daniël van Eeden
08eef5f110 Update lib/linguist/languages.yml
Add .gv (GraphViz) file extension to DOT language.
2013-01-21 12:18:16 +01:00
Daniël van Eeden
0e2d3a2ac1 Update lib/linguist/languages.yml
Add DOT language: http://www.graphviz.org/content/dot-language
2013-01-20 14:06:36 +01:00
Matt Bridges
f852df397b Adding homepage to gemspec 2013-01-18 12:58:44 -06:00
Mike Skalnik
041ab041ae Add binary & ascii STLs and OBJs 2013-01-17 14:15:01 -08:00
nicolasdanet
ad9a57f8f9 Added Max/MSP extensions in languages.yml 2013-01-17 08:08:10 +01:00
Human Resources
b2bf4b0bd9 Add dita file extention to the XML type
2nd try. Add dita file extention to the XML markup.
DITA is the OASIS Darwin Information Typing Architecture used for technical documentation.
@see https://www.oasis-open.org/committees/tc_home.php?wg_abbrev=dita
2013-01-16 10:21:09 -05:00
Kevin Sawicki
c625642845 Add .cson to CoffeeScript extensions 2013-01-15 09:44:04 -08:00
Naoki Orii
35e077ce86 Detect cython-generated files 2013-01-12 23:48:04 -05:00
herbertkoelman
7839459607 Merge branch 'master' of https://github.com/github/linguist 2013-01-11 00:43:24 +01:00
Illya
212be40710 .ui file extension added for XML language
Qt uses .ui files to store qtdesinger ui in xml
2013-01-10 02:06:40 +02:00
Stuart Halloway
dc8685f918 remove redundant specification 2013-01-09 08:18:10 -05:00
Ted Nyman
75072ae5cc README code fencing 2013-01-08 17:11:16 -08:00
Ted Nyman
3edd765076 Merge pull request #232 from strangewarp/patch-1
Add .pd_lua extension for Lua
2013-01-08 16:25:55 -08:00
Ted Nyman
1d66e593e2 Merge pull request #346 from github/remove-extra-extensions
Remove extra extensions
2013-01-08 16:14:49 -08:00
C.D. Madsen
b8bafd246e Add examples of .pd_lua files
Added examples of .pd_lua files, which create Lua objects that are
interpreted by PureData.
2013-01-08 15:50:31 -07:00
Ted Nyman
95c822457a Merge pull request #231 from bfontaine/master
Detection added for Forth & Omgrofl
2013-01-08 04:37:18 -08:00
Ted Nyman
26df1034ec Merge pull request #221 from fkg/master
Add new extensions to lib/linguist/languages.yml
2013-01-08 04:31:44 -08:00
Ted Nyman
c495d19540 Merge pull request #222 from tiwe-de/master
ignore Debian packaging
2013-01-08 04:23:17 -08:00
Ted Nyman
b405847573 Merge pull request #261 from justinclift/typofixes
Trivial typo fixes.
2013-01-08 04:21:28 -08:00
Ted Nyman
1abcb2edb7 Merge pull request #246 from leafo/master
Add MoonScript
2013-01-08 04:15:37 -08:00
Ted Nyman
e3669d2bb6 Keep bash alias 2013-01-07 19:08:29 -08:00
Ted Nyman
1b9a49e226 Add field for Ada 2013-01-07 19:04:56 -08:00
Ted Nyman
0ee716b1e9 Fix up batchfile extension 2013-01-07 19:03:40 -08:00
Ted Nyman
9469f481f3 Keep cmake extensions field 2013-01-07 19:00:44 -08:00
Ted Nyman
acc190bb04 Remove extensions if we already have the primary_extension 2013-01-07 18:59:18 -08:00
leaf corcoran
5953e22efb drop extra extension information for MoonScript 2013-01-07 18:57:36 -08:00
Ted Nyman
2c26486588 Merge pull request #324 from paulmillr/topics/livescript
Add LiveScript support.
2013-01-07 18:50:01 -08:00
leaf corcoran
e9d2c0cf28 add MoonScript sample 2013-01-07 18:49:02 -08:00
Paul Miller
a35c3ca739 Change LiveScript colour. 2013-01-08 04:43:54 +02:00
Ted Nyman
0ee2f17a61 Merge pull request #344 from BPScott/add-less
Add LESS support (.less)
2013-01-07 18:39:16 -08:00
Ben Scott
83ce189a82 Add LESS support (.less)
Cheating slightly as it uses the CSS lexer, as pygments currently does
not have a dedicated less lexer. But I figure language recognition and
90% percent correct syntax highlighting is better than neither.
2013-01-07 15:52:09 +00:00
Sven Efftinge
c97e112c72 Added Xtend (xtend-lang.org) to languages.yml 2013-01-06 19:39:56 +01:00
Paul Miller
eee124f6c6 Add LiveScript support. 2013-01-03 22:45:08 +02:00
Ted Nyman
adc9246f66 Merge branch 'lasso' 2013-01-02 14:12:58 -08:00
Steve Piercy
560555bcd8 sorted extensions for Lasso in lib/linguist/languages.yml 2013-01-02 14:09:09 -08:00
Steve Piercy
900a6bc2b8 add extensions for Lasso in lib/linguist/languages.yml 2013-01-02 14:09:09 -08:00
Steve Piercy
3613d09c38 add Ecl to lib/linguist/languages.yml 2013-01-02 14:09:08 -08:00
Ted Nyman
02749dd5cf Merge pull request #331 from github/latest-pygments
Pessimistic versioning for pygments.rb, and bump to latest
2013-01-02 13:56:31 -08:00
Ted Nyman
abda879d5a Merge pull request #325 from greghendershott/racket-lexer
Use new Racket lexer from pygments.rb 0.3.3
2013-01-02 13:48:49 -08:00
rofl0r
d2e909677b languages.yml: rearrange .dpr and .dfm 2013-01-02 15:27:28 +01:00
rofl0r
baa42daae8 languages.yml: add .dpr and .dfm extension to Delphi
.dfm is Delphi formulars
.dpr is the main source file, before any .pas.

if your Delphi app does not use any formulars or units
(e.g. console app), there is basically only one .dpr file.
2013-01-02 15:03:24 +01:00
Greg Hendershott
0b2465482a Update test: Racket language uses Racket lexer.
This is https://github.com/greghendershott/linguist/pull/1 from @tnm.
That pull request is onto my master branch, not my `racket-lexer`
topic branch. If there is a way to accept the pull request onto my
topic branch, I don't have time to figure it out right now. As a
result I'm making my own commit.
2013-01-02 07:46:58 -05:00
Ted Nyman
453a097c22 Pessimistic versioning for pygments, and bump to latest 2013-01-02 02:42:12 -08:00
Steve Piercy
4b26a56e64 Merge remote branch 'upstream/master' into lasso 2013-01-02 02:33:35 -08:00
Steve Piercy
c1d54db2cc One more try to pass Travis build. Crossing fingers... 2013-01-02 02:20:09 -08:00
Ted Nyman
bcaeb5d464 Fix readme link 2013-01-02 02:12:17 -08:00
Ted Nyman
d65bbfbe8d Update README.md 2013-01-02 01:28:37 -08:00
Steve Piercy
4c9b16aa08 Forcing another Travis build, now that GitHub's pygments.rb is at v 0.3.5. See https://github.com/github/linguist/pull/325#issuecomment-11802593 2013-01-02 01:23:31 -08:00
Greg Hendershott
8355f5031a Use new Racket lexer from pygments.rb 0.3.3
Racket files had been using the Scheme lexer.
2012-12-28 22:28:22 -05:00
Michael Hendricks
c794c6e24b Add a misclassified Prolog file
This Prolog file was misclassified as Perl.  I assume linguist
was confused because the file has many comments.  Nevertheless,
there are plenty of Prolog-distinguishing tokens such as `:-`,
`module`, `%%`, capitalized variables names, `foo/2`, etc.
2012-12-22 13:08:06 -08:00
Herbert Koelman
611b790a2c Merge remote-tracking branch 'upstream/master' 2012-12-20 22:21:20 +01:00
Stuart Halloway
78708df79d better: edn is generic 2012-12-18 09:16:59 -05:00
Stuart Halloway
54a4af75b5 (BFDD) build-system failure driven development 2012-12-18 08:59:39 -05:00
Stuart Halloway
72d698ebaa Datomic dtm files 2012-12-18 08:11:44 -05:00
Steve Piercy
209f9f0072 Force Travis run 2012-12-18 00:50:48 -08:00
Steve Piercy
93457746ac Merge remote branch 'upstream/master' into lasso 2012-12-18 00:49:00 -08:00
Joshua Peek
2696a9c5e7 Linguist 2.4.0 2012-12-10 09:47:42 -06:00
Joshua Peek
7c170972a0 Add shell samples 2012-12-10 09:45:54 -06:00
Joshua Peek
d00dfd82c1 Add samples for apache and nginx confs 2012-12-10 09:37:42 -06:00
Joshua Peek
9003139119 Can't have 2 same primary extensions 2012-12-10 09:30:55 -06:00
Joshua Peek
36e867ec76 Require newer pygments 2012-12-10 09:18:35 -06:00
Joshua Peek
cf4813979c Remove already defined extensions 2012-12-10 09:14:19 -06:00
Joshua Peek
7e12c3eff1 Update samples 2012-12-10 09:13:14 -06:00
Joshua Peek
281cc985bf Merge pull request #288 from wagenet/handlebars
Add Handlebars
2012-12-10 07:06:59 -08:00
Joshua Peek
dcc2be0781 Merge branch 'master' into dont-explode-on-invalid-shebang
Conflicts:
	lib/linguist/samples.json
	test/test_tokenizer.rb
2012-12-10 09:02:24 -06:00
Joshua Peek
161d076bfd Remove duplicate extension 2012-12-10 09:00:17 -06:00
Joshua Peek
09fbcc9a72 Merge pull request #298 from johanatan/master
Adds Elm.
2012-12-10 06:58:32 -08:00
Joshua Peek
ee2b92cf82 Merge pull request #307 from mislav/aliases
A couple of useful language aliases
2012-12-10 06:55:09 -08:00
Herbert Koelman
3511380c72 Added to vendor.yml the following dependencies related to automake and autoconf:
- (^|/)configure
- (^|/)configure.ac
- (^|/)config.guess
- (^|/)config.sub

Before changing:
[herbert@vps11071 linguist]$ bundle exec linguist ../atmi++/
75%  Shell
15%  C++
10%  C
0%   Perl

After changing:
54%  C++
37%  C
9%   Shell
0%   Perl
2012-12-10 00:07:20 +01:00
Steve Piercy
38736a2db9 force travis update 2012-12-07 02:33:23 -08:00
Mislav Marohnić
720914b290 add filename tests for shell config files 2012-12-06 23:54:22 +01:00
Daniel Micay
16f8e54ed7 detect common shell config files 2012-12-06 23:53:55 +01:00
Andy Li
50ecb63058 haXe is now "Haxe"
According to https://groups.google.com/forum/#!topic/haxelang/O7PB-ZrX4i4/discussion

The lexer in Pygments is not renamed yet, so just stay as is at the moment.
2012-12-06 23:42:04 +01:00
Tobin Fricke
586650f01c add .C and .H as file extensions for C++
"C" and "H" are two file extensions recognized by gcc as indicating C++
source code. The full list may be found here:
http://gcc.gnu.org/onlinedocs/gcc-4.4.1/gcc/Overall-Options.html#index-file-name-suffix-71
2012-12-06 23:28:32 +01:00
Mislav Marohnić
ae753e6e88 add Nginx language 2012-12-06 23:25:54 +01:00
Mislav Marohnić
04a2845e91 add ApacheConf language
Recognizes httpd/apache2.conf and .htaccess files
2012-12-06 23:25:29 +01:00
Mislav Marohnić
acb20d95ca "coffee-script" ☞ CoffeeScript 2012-12-06 23:04:53 +01:00
Steve Piercy
5a9ef5eac2 Merge remote branch 'upstream/master' into lasso
Conflicts:
	lib/linguist/languages.yml
2012-12-05 12:55:30 -08:00
Steve Piercy
287e1b855d Forcing travis check 2012-12-05 12:30:06 -08:00
Mislav Marohnić
d3ebe1844d add HTTP language
Useful for `curl -i` dumps. Had to add primary_extension although this
data is usually not saved in files, but shown as code blocks.
2012-12-04 16:26:11 +01:00
Mislav Marohnić
fc8492e8f7 "yml" ☞ YAML 2012-12-04 16:11:52 +01:00
Mislav Marohnić
ff5ffd0482 "rss/xsd/xsl/wsdl" ☞ XML 2012-12-04 16:11:52 +01:00
Mislav Marohnić
50db6d0150 "latex" ☞ TeX 2012-12-04 16:11:52 +01:00
Mislav Marohnić
2e0b854428 "obj-j" ☞ Objective-J 2012-12-04 16:11:52 +01:00
Mislav Marohnić
1dfb44cff7 "obj-c/objc" ☞ Objective-C 2012-12-04 16:11:51 +01:00
Mislav Marohnić
0a8fad2040 "make" ☞ Makefile 2012-12-04 16:11:51 +01:00
Mislav Marohnić
9b97d3ac8a "erb" ☞ RHTML 2012-12-04 16:11:51 +01:00
Mislav Marohnić
26e78c0c1b "xhtml" ☞ HTML 2012-12-04 16:11:51 +01:00
Joshua Peek
b036e8d3c2 Merge pull request #305 from DominikTo/php-cli
Fixed detection of PHP CLI scripts (added samples)
2012-12-02 07:54:14 -08:00
Dominik Tobschall
f84a904ad8 fixed typo 2012-12-02 14:11:04 +01:00
Dominik Tobschall
b1684037d6 added php cli samples 2012-12-02 14:05:52 +01:00
Jonathan Leonard
1c85d0b38a Added Elm. 2012-11-25 20:39:58 -08:00
Michael Ding
97c998946b determine image with downcase extname 2012-11-22 20:30:59 +08:00
Michael Ding
8529c90a4d use downcase string for extname 2012-11-22 17:14:45 +08:00
Ben Lavender
ec3434cf1d Don't explode on invalid shebang 2012-11-18 20:56:06 -06:00
Peter Wagenet
0e20f6d454 Added Handlebars language 2012-11-12 17:16:18 -08:00
Joshua Scoggins
696573b14c Fixed an issue where the lexer was not explicitly stated for CLIPS 2012-10-22 00:00:08 -07:00
Joshua Scoggins
fbb31f018c Added support for the CLIPS programming language
CLIPS or C language integrated production system is a tool for writing expert
systems.
2012-10-21 23:46:09 -07:00
Joshua Peek
d92d208a45 Fix tests for pygments.rb 0.3.x 2012-10-07 15:39:02 -05:00
Joshua Peek
b798e28bfb No warnings 2012-10-07 15:37:09 -05:00
Joshua Peek
ebd6077cd7 Add wrap flag to text languages 2012-10-07 15:34:13 -05:00
Joshua Peek
9e9500dfa9 Linguist 2.3.4 2012-09-24 10:54:17 -05:00
Joshua Peek
04cc100fba Rebuild samples db 2012-09-24 10:52:05 -05:00
Joshua Peek
31e33f99f2 Ensure lang is skipped on any binary file 2012-09-24 10:51:39 -05:00
Joshua Peek
7c51b90586 Skip empty sample 2012-09-24 10:50:49 -05:00
Joshua Peek
2b36f73da6 Some comments are triggering charlock binary 2012-09-24 10:48:22 -05:00
Joshua Peek
d96dd473b8 Rebuild samples db 2012-09-24 10:12:18 -05:00
Joshua Peek
f9066ffb7b Sort exts and filenames 2012-09-24 10:12:05 -05:00
Joshua Peek
945941d529 Update samples db 2012-09-24 10:07:58 -05:00
Joshua Peek
10e875e899 Print out samples db diffs 2012-09-24 10:07:08 -05:00
Justin Clift
7f87d22d78 Trivial typo fixes. 2012-09-22 20:32:56 +10:00
Wojciech Gawroński
d890b73c2f Adding vendor files for SyntaxHighlighter and django (admin_media directory). 2012-09-21 13:57:08 +02:00
Justin Palmer
d24e5c938e sample directory needs uppercase E 2012-09-20 15:23:58 -07:00
Justin Palmer
aa069a336f add color to ecl language 2012-09-20 15:16:06 -07:00
Justin Palmer
662fc2ee9d Merge remote-tracking branch 'rengolin/ecl' 2012-09-20 15:07:41 -07:00
Sean Upton
eca1f61dab Merge branch 'master' of github.com:seanupton/linguist 2012-09-18 14:28:01 -06:00
Sean Upton
4126d0e445 Added extensions to languages.yml for XML highlighting of Zope Page Templates (.pt) and Zope Configuration Markup Language (.zcml). 2012-09-18 14:27:36 -06:00
Sean Upton
1d3cffc6dd Added extensions to languages.xml for XML highlighting of Zope Page Templates (.pt) and Zope Configuration Markup Language (.zcml). 2012-09-18 14:24:35 -06:00
Simon Hafner
675d0865da fixed typo 2012-09-13 14:56:44 -05:00
Simon Hafner
b954d22eba Override for binary mime type based on languages.yml
If the extension already exists in languages.yml, it's probably not a
binary, but code.
2012-09-13 14:55:31 -05:00
Ryan Tomayko
567cd6ef68 Merge pull request #250 from github/mac-format
Handle Mac Format when splitting lines
2012-09-11 14:17:21 -07:00
ptrv
01981c310d Add .scd extension to SuperCollider. 2012-09-11 00:26:54 +02:00
Ryan Tomayko
887a050db9 Only search the first 4K chars for \r 2012-09-10 01:56:08 -07:00
Ryan Tomayko
bda895eaae Test Mac Format detection and line splitting 2012-09-10 01:52:30 -07:00
Ryan Tomayko
2e49c06f47 Handle Mac Format when splitting lines 2012-09-10 01:05:48 -07:00
Joshua Peek
ae137847b4 Linguist 2.3.3 2012-09-04 09:32:21 -05:00
Scott J. Goldman
5443dc50a3 Merge pull request #247 from github/check-size-first
When testing if a blob is indexable or safe to colorize, check size first
2012-09-02 00:09:51 -07:00
Scott J. Goldman
fc435a2541 Linguist 2.3.2 2012-09-02 00:08:37 -07:00
Scott J. Goldman
04394750e7 When testing if a blob is safe to colorize, check size first
Similar to e415a13
2012-09-02 00:08:37 -07:00
Scott J. Goldman
e415a1351b When testing if a blob is indexable, check size first
Otherwise, charlock_holmes will allocate another large binary
buffer for testing the encoding, which is a problem if the binary
blob is many hundreds of MB large. It'll just fail and crash ruby.
2012-08-31 22:47:19 -07:00
leaf corcoran
0ff50a6b02 add MoonScript (again) 2012-08-29 21:18:50 -07:00
Joshua Peek
6ec907a915 Merge pull request #245 from jcazevedo/master
Add Shell sample
2012-08-28 10:55:11 -07:00
Joao Azevedo
1f55f01fa9 Add Shell sample 2012-08-28 18:01:46 +01:00
db0
e857b23429 .eliom extension in OCaml extensions properly sorted 2012-08-27 12:16:47 +02:00
db0
09c76246f6 Add .eliom extension for Ocsigen (OCaml web framework) 2012-08-27 11:41:43 +02:00
Steve Piercy
31d6b110d2 Add more samples with listed extensions. Remove extension specification. Clarify comments at top of languages.yml. 2012-08-19 16:49:20 -07:00
Steve Piercy
29a0db402c Lasso lexer name added 2012-08-19 06:47:02 -07:00
Steve Piercy
21a7fe9f12 Lasso extentions sorted 2012-08-19 06:40:15 -07:00
Steve Piercy
3b558db518 adding Lasso language and sample files 2012-08-19 06:29:16 -07:00
C.D. Madsen
44066fbb0b Add .pd_lua extension for Lua
.pd_lua is the required extension for any Lua files written to directly communicate with Puredata, via the pdlua library.
2012-08-18 06:14:41 -06:00
Baptiste Fontaine
0c2794e9de Forth extensions sorted 2012-08-17 18:09:06 +02:00
Baptiste Fontaine
69a9ac9366 Forth & Omgrofl lexers set to Text Only 2012-08-17 18:03:09 +02:00
Baptiste Fontaine
59e199d0c3 Detection added for Forth & Omgrofl 2012-08-17 16:52:00 +02:00
Timo Weingärtner
a572b467b4 testcase for 90f1ba9 2012-08-15 02:11:15 +03:00
Timo Weingärtner
90f1ba95a4 lib/linguist/vendor.yml: ignore Debian packaging
This should prevent files like debian/$package.cron.d from being recognized as D source.
2012-08-15 02:07:53 +03:00
fkg
286c8a1b4a Added .ccxml, .grxml, .scxml, .vxml to the XML syntax group 2012-08-14 12:00:07 -07:00
Renato Golin
da6cf8dbb4 Add ECL programming language and test 2012-07-12 09:09:32 +01:00
Ian M. Jones
a41631d9fa Added REALbasic language. 2012-06-06 23:37:47 +02:00
Johan Sundström
645f4d6194 Added detection for the Pike language:
http://pike.ida.liu.se/
2012-06-06 00:02:47 -07:00
Laurent Parenteau
46cde87c09 Fixed M lexer name. Merged with upstream's latest changes. 2012-05-22 13:43:47 -04:00
Laurent Parenteau
91364a9769 Improved comment. 2012-05-14 09:56:00 -04:00
Laurent Parenteau
23b6b4c499 Use Common Lisp lexer for M syntax highlighting, which gives pretty good results. 2012-04-27 10:09:37 -04:00
Laurent Parenteau
1e34faa920 Improved M detection to be more specific. 2012-03-28 20:30:24 -04:00
Laurent Parenteau
e0190a5a6e Added detection for the new M (aka MUMPS) language. 2012-03-27 11:47:52 -04:00
188 changed files with 71981 additions and 1165 deletions

View File

@@ -1,2 +1,2 @@
source :rubygems
source 'https://rubygems.org'
gemspec

View File

@@ -10,13 +10,16 @@ Linguist defines the list of all languages known to GitHub in a [yaml file](http
Most languages are detected by their file extension. This is the fastest and most common situation.
For disambiguating between files with common extensions, we use a [bayesian classifier](https://github.com/github/linguist/blob/master/lib/linguist/classifier.rb). For an example, this helps us tell the difference between `.h` files which could be either C, C++, or Obj-C.
For disambiguating between files with common extensions, we use a [Bayesian classifier](https://github.com/github/linguist/blob/master/lib/linguist/classifier.rb). For an example, this helps us tell the difference between `.h` files which could be either C, C++, or Obj-C.
In the actual GitHub app we deal with `Grit::Blob` objects. For testing, there is a simple `FileBlob` API.
Linguist::FileBlob.new("lib/linguist.rb").language.name #=> "Ruby"
```ruby
Linguist::FileBlob.new("bin/linguist").language.name #=> "Ruby"
Linguist::FileBlob.new("lib/linguist.rb").language.name #=> "Ruby"
Linguist::FileBlob.new("bin/linguist").language.name #=> "Ruby"
```
See [lib/linguist/language.rb](https://github.com/github/linguist/blob/master/lib/linguist/language.rb) and [lib/linguist/languages.yml](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml).
@@ -24,7 +27,7 @@ See [lib/linguist/language.rb](https://github.com/github/linguist/blob/master/li
The actual syntax highlighting is handled by our Pygments wrapper, [pygments.rb](https://github.com/tmm1/pygments.rb). It also provides a [Lexer abstraction](https://github.com/tmm1/pygments.rb/blob/master/lib/pygments/lexer.rb) that determines which highlighter should be used on a file.
We typically run on a prerelease version of Pygments, [pygments.rb](https://github.com/tmm1/pygments.rb), to get early access to new lexers. The [lexers.yml](https://github.com/github/linguist/blob/master/lib/linguist/lexers.yml) file is a dump of the lexers we have available on our server.
We typically run on a pre-release version of Pygments, [pygments.rb](https://github.com/tmm1/pygments.rb), to get early access to new lexers. The [languages.yml](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml) file is a dump of the lexers we have available on our server.
### Stats
@@ -32,10 +35,11 @@ The Language Graph you see on every repository is built by aggregating the langu
The repository stats API can be used on a directory:
project = Linguist::Repository.from_directory(".")
project.language.name #=> "Ruby"
project.languages #=> { "Ruby" => 0.98,
"Shell" => 0.02 }
```ruby
project = Linguist::Repository.from_directory(".")
project.language.name #=> "Ruby"
project.languages #=> { "Ruby" => 0.98, "Shell" => 0.02 }
```
These stats are also printed out by the binary. Try running `linguist` on itself:
@@ -46,17 +50,21 @@ These stats are also printed out by the binary. Try running `linguist` on itself
Checking other code into your git repo is a common practice. But this often inflates your project's language stats and may even cause your project to be labeled as another language. We are able to identify some of these files and directories and exclude them.
Linguist::FileBlob.new("vendor/plugins/foo.rb").vendored? # => true
```ruby
Linguist::FileBlob.new("vendor/plugins/foo.rb").vendored? # => true
```
See [Linguist::BlobHelper#vendored?](https://github.com/github/linguist/blob/master/lib/linguist/blob_helper.rb) and [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml).
#### Generated file detection
Not all plain text files are true source files. Generated files like minified js and compiled CoffeeScript can be detected and excluded from language stats. As an extra bonus, these files are suppressed in Diffs.
Not all plain text files are true source files. Generated files like minified js and compiled CoffeeScript can be detected and excluded from language stats. As an extra bonus, these files are suppressed in diffs.
Linguist::FileBlob.new("underscore.min.js").generated? # => true
```ruby
Linguist::FileBlob.new("underscore.min.js").generated? # => true
```
See [Linguist::BlobHelper#generated?](https://github.com/github/linguist/blob/master/lib/linguist/blob_helper.rb).
See [Linguist::Generated#generated?](https://github.com/github/linguist/blob/master/lib/linguist/generated.rb).
## Installation
@@ -76,10 +84,12 @@ To run the tests:
The majority of patches won't need to touch any Ruby code at all. The [master language list](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml) is just a configuration file.
We try to only add languages once they have a some usage on GitHub, so please note in-the-wild usage examples in your pull request.
Almost all bug fixes or new language additions should come with some additional code samples. Just drop them under [`samples/`](https://github.com/github/linguist/tree/master/samples) in the correct subdirectory and our test suite will automatically test them. In most cases you shouldn't need to add any new assertions.
### Testing
Sometimes getting the tests running can be to much work especially if you don't have much Ruby experience. Its okay, be lazy and let our build bot [Travis](http://travis-ci.org/#!/github/linguist) run the tests for you. Just open a pull request and the bot will start cranking away.
Sometimes getting the tests running can be too much work, especially if you don't have much Ruby experience. It's okay, be lazy and let our build bot [Travis](http://travis-ci.org/#!/github/linguist) run the tests for you. Just open a pull request and the bot will start cranking away.
Heres our current build status, which is hopefully green: [![Build Status](https://secure.travis-ci.org/github/linguist.png?branch=master)](http://travis-ci.org/github/linguist)
Here's our current build status, which is hopefully green: [![Build Status](https://secure.travis-ci.org/github/linguist.png?branch=master)](http://travis-ci.org/github/linguist)

View File

@@ -3,9 +3,7 @@ require 'rake/testtask'
task :default => :test
Rake::TestTask.new do |t|
t.warning = true
end
Rake::TestTask.new
task :samples do
require 'linguist/samples'

View File

@@ -1,17 +1,19 @@
Gem::Specification.new do |s|
s.name = 'github-linguist'
s.version = '2.3.1'
s.version = '2.7.0'
s.summary = "GitHub Language detection"
s.authors = "GitHub"
s.authors = "GitHub"
s.homepage = "https://github.com/github/linguist"
s.files = Dir['lib/**/*']
s.executables << 'linguist'
s.add_dependency 'charlock_holmes', '~> 0.6.6'
s.add_dependency 'escape_utils', '~> 0.2.3'
s.add_dependency 'escape_utils', '~> 0.3.1'
s.add_dependency 'mime-types', '~> 1.19'
s.add_dependency 'pygments.rb', '>= 0.2.13'
s.add_dependency 'pygments.rb', '~> 0.4.2'
s.add_development_dependency 'mocha'
s.add_development_dependency 'json'
s.add_development_dependency 'rake'
s.add_development_dependency 'yajl-ruby'

View File

@@ -8,6 +8,12 @@ require 'pygments'
require 'yaml'
module Linguist
# DEPRECATED Avoid mixing into Blob classes. Prefer functional interfaces
# like `Language.detect` over `Blob#language`. Functions are much easier to
# cache and compose.
#
# Avoid adding additional bloat to this module.
#
# BlobHelper is a mixin for Blobish classes that respond to "name",
# "data" and "size" such as Grit::Blob.
module BlobHelper
@@ -58,6 +64,15 @@ module Linguist
_mime_type ? _mime_type.binary? : false
end
# Internal: Is the blob binary according to its mime type,
# overriding it if we have better data from the languages.yml
# database.
#
# Return true or false
def likely_binary?
binary_mime_type? && !Language.find_by_filename(name)
end
# Public: Get the Content-Type header value
#
# This value is used when serving raw blobs.
@@ -139,7 +154,28 @@ module Linguist
#
# Return true or false
def image?
['.png', '.jpg', '.jpeg', '.gif'].include?(extname)
['.png', '.jpg', '.jpeg', '.gif'].include?(extname.downcase)
end
# Public: Is the blob a supported 3D model format?
#
# Return true or false
def solid?
extname.downcase == '.stl'
end
# Public: Is this blob a CSV file?
#
# Return true or false
def csv?
text? && extname.downcase == '.csv'
end
# Public: Is the blob a PDF?
#
# Return true or false
def pdf?
extname.downcase == '.pdf'
end
MEGABYTE = 1024 * 1024
@@ -160,7 +196,7 @@ module Linguist
#
# Return true or false
def safe_to_colorize?
text? && !large? && !high_ratio_of_long_lines?
!large? && text? && !high_ratio_of_long_lines?
end
# Internal: Does the blob have a ratio of long lines?
@@ -204,7 +240,12 @@ module Linguist
#
# Returns an Array of lines
def lines
@lines ||= (viewable? && data) ? data.split("\n", -1) : []
@lines ||=
if viewable? && data
data.split(/\r\n|\r|\n/, -1)
else
[]
end
end
# Public: Get number of lines of code
@@ -227,7 +268,7 @@ module Linguist
# Public: Is the blob a generated file?
#
# Generated source code is supressed in diffs and is ignored by
# Generated source code is suppressed in diffs and is ignored by
# language statistics.
#
# May load Blob#data
@@ -237,36 +278,6 @@ module Linguist
@_generated ||= Generated.generated?(name, lambda { data })
end
# Public: Should the blob be indexed for searching?
#
# Excluded:
# - Files over 0.1MB
# - Non-text files
# - Langauges marked as not searchable
# - Generated source files
#
# Please add additional test coverage to
# `test/test_blob.rb#test_indexable` if you make any changes.
#
# Return true or false
def indexable?
if binary?
false
elsif extname == '.txt'
true
elsif language.nil?
false
elsif !language.searchable?
false
elsif generated?
false
elsif size > 100 * 1024
false
else
true
end
end
# Public: Detects the Language of the blob.
#
# May load Blob#data
@@ -278,7 +289,7 @@ module Linguist
if defined?(@data) && @data.is_a?(String)
data = @data
else
data = lambda { binary_mime_type? ? "" : self.data }
data = lambda { (binary_mime_type? || binary?) ? "" : self.data }
end
@language = Language.detect(name.to_s, data, mode)
@@ -302,19 +313,5 @@ module Linguist
options[:options][:encoding] ||= encoding
lexer.highlight(data, options)
end
# Public: Highlight syntax of blob without the outer highlight div
# wrapper.
#
# options - A Hash of options (defaults to {})
#
# Returns html String
def colorize_without_wrapper(options = {})
if text = colorize(options)
text[%r{<div class="highlight"><pre>(.*?)</pre>\s*</div>}m, 1]
else
''
end
end
end
end

View File

@@ -14,6 +14,9 @@ module Linguist
# Classifier.train(db, 'Ruby', "def hello; end")
#
# Returns nothing.
#
# Set LINGUIST_DEBUG=1 or =2 to see probabilities per-token,
# per-language. See also dump_all_tokens, below.
def self.train!(db, language, data)
tokens = Tokenizer.tokenize(data)
@@ -40,7 +43,7 @@ module Linguist
# Public: Guess language of data.
#
# db - Hash of classifer tokens database.
# db - Hash of classifier tokens database.
# data - Array of tokens or String data to analyze.
# languages - Array of language name Strings to restrict to.
#
@@ -77,15 +80,22 @@ module Linguist
tokens = Tokenizer.tokenize(tokens) if tokens.is_a?(String)
scores = {}
if verbosity >= 2
dump_all_tokens(tokens, languages)
end
languages.each do |language|
scores[language] = tokens_probability(tokens, language) +
language_probability(language)
if verbosity >= 1
printf "%10s = %10.3f + %7.3f = %10.3f\n",
language, tokens_probability(tokens, language), language_probability(language), scores[language]
end
end
scores.sort { |a, b| b[1] <=> a[1] }.map { |score| [score[0], score[1]] }
end
# Internal: Probably of set of tokens in a language occuring - P(D | C)
# Internal: Probably of set of tokens in a language occurring - P(D | C)
#
# tokens - Array of String tokens.
# language - Language to check.
@@ -97,7 +107,7 @@ module Linguist
end
end
# Internal: Probably of token in language occuring - P(F | C)
# Internal: Probably of token in language occurring - P(F | C)
#
# token - String token.
# language - Language to check.
@@ -111,7 +121,7 @@ module Linguist
end
end
# Internal: Probably of a language occuring - P(C)
# Internal: Probably of a language occurring - P(C)
#
# language - Language to check.
#
@@ -119,5 +129,39 @@ module Linguist
def language_probability(language)
Math.log(@languages[language].to_f / @languages_total.to_f)
end
private
def verbosity
@verbosity ||= (ENV['LINGUIST_DEBUG'] || 0).to_i
end
# Internal: show a table of probabilities for each <token,language> pair.
#
# The number in each table entry is the number of "points" that each
# token contributes toward the belief that the file under test is a
# particular language. Points are additive.
#
# Points are the number of times a token appears in the file, times
# how much more likely (log of probability ratio) that token is to
# appear in one language vs. the least-likely language. Dashes
# indicate the least-likely language (and zero points) for each token.
def dump_all_tokens(tokens, languages)
maxlen = tokens.map { |tok| tok.size }.max
printf "%#{maxlen}s", ""
puts " #" + languages.map { |lang| sprintf("%10s", lang) }.join
tokmap = Hash.new(0)
tokens.each { |tok| tokmap[tok] += 1 }
tokmap.sort.each { |tok, count|
arr = languages.map { |lang| [lang, token_probability(tok, lang)] }
min = arr.map { |a,b| b }.min
minlog = Math.log(min)
if !arr.inject(true) { |result, n| result && n[1] == arr[0][1] }
printf "%#{maxlen}s%5d", tok, count
puts arr.map { |ent|
ent[1] == min ? " -" : sprintf("%10.3f", count * (Math.log(ent[1]) - minlog))
}.join
end
}
end
end
end

View File

@@ -43,7 +43,7 @@ module Linguist
# Internal: Is the blob a generated file?
#
# Generated source code is supressed in diffs and is ignored by
# Generated source code is suppressed in diffs and is ignored by
# language statistics.
#
# Please add additional test coverage to
@@ -56,7 +56,8 @@ module Linguist
compiled_coffeescript? ||
xcode_project_file? ||
generated_net_docfile? ||
generated_parser?
generated_parser? ||
generated_protocol_buffer?
end
# Internal: Is the blob an XCode project file?
@@ -86,7 +87,7 @@ module Linguist
# Internal: Is the blob of JS generated by CoffeeScript?
#
# CoffeScript is meant to output JS that would be difficult to
# CoffeeScript is meant to output JS that would be difficult to
# tell if it was generated or not. Look for a number of patterns
# output by the CS compiler.
#
@@ -158,5 +159,16 @@ module Linguist
false
end
# Internal: Is the blob a C++, Java or Python source file generated by the
# Protocol Buffer compiler?
#
# Returns true of false.
def generated_protocol_buffer?
return false unless ['.py', '.java', '.h', '.cc', '.cpp'].include?(extname)
return false unless lines.count > 1
return lines[0].include?("Generated by the protocol buffer compiler. DO NOT EDIT!")
end
end
end

View File

@@ -73,7 +73,7 @@ module Linguist
#
# Returns Language or nil.
def self.detect(name, data, mode = nil)
# A bit of an elegant hack. If the file is exectable but extensionless,
# A bit of an elegant hack. If the file is executable but extensionless,
# append a "magic" extension so it can be classified with other
# languages that have shebang scripts.
if File.extname(name).empty? && mode && (mode.to_i(8) & 05) == 05
@@ -84,7 +84,9 @@ module Linguist
if possible_languages.length > 1
data = data.call() if data.respond_to?(:call)
if result = Classifier.classify(Samples::DATA, data, possible_languages.map(&:name)).first
if data.nil? || data == ""
nil
elsif result = Classifier.classify(Samples::DATA, data, possible_languages.map(&:name)).first
Language[result[0]]
end
else
@@ -220,6 +222,7 @@ module Linguist
raise(ArgumentError, "#{@name} is missing lexer")
@ace_mode = attributes[:ace_mode]
@wrap = attributes[:wrap] || false
# Set legacy search term
@search_term = attributes[:search_term] || default_alias_name
@@ -310,6 +313,11 @@ module Linguist
# Returns a String name or nil
attr_reader :ace_mode
# Public: Should language lines be wrapped
#
# Returns true or false
attr_reader :wrap
# Public: Get extensions
#
# Examples
@@ -321,7 +329,7 @@ module Linguist
# Deprecated: Get primary extension
#
# Defaults to the first extension but can be overriden
# Defaults to the first extension but can be overridden
# in the languages.yml.
#
# The primary extension can not be nil. Tests should verify this.
@@ -460,6 +468,7 @@ module Linguist
:aliases => options['aliases'],
:lexer => options['lexer'],
:ace_mode => options['ace_mode'],
:wrap => options['wrap'],
:group_name => options['group'],
:searchable => options.key?('searchable') ? options['searchable'] : true,
:search_term => options['search_term'],

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,7 @@ module Linguist
module MD5
# Public: Create deep nested digest of value object.
#
# Useful for object comparsion.
# Useful for object comparison.
#
# obj - Object to digest.
#

View File

@@ -8,6 +8,8 @@
- C#
- C++
- CSS
- Clojure
- CoffeeScript
- Common Lisp
- Diff
- Emacs Lisp
@@ -25,5 +27,3 @@
- SQL
- Scala
- Scheme
- TeX
- XML

View File

@@ -67,8 +67,8 @@ module Linguist
return if @computed_stats
@enum.each do |blob|
# Skip binary file extensions
next if blob.binary_mime_type?
# Skip files that are likely binary
next if blob.likely_binary?
# Skip vendored or generated blobs
next if blob.vendored? || blob.generated? || blob.language.nil?

File diff suppressed because it is too large Load Diff

View File

@@ -76,12 +76,14 @@ module Linguist
db['extnames'][language_name] ||= []
if !db['extnames'][language_name].include?(sample[:extname])
db['extnames'][language_name] << sample[:extname]
db['extnames'][language_name].sort!
end
end
if sample[:filename]
db['filenames'][language_name] ||= []
db['filenames'][language_name] << sample[:filename]
db['filenames'][language_name].sort!
end
data = File.read(sample[:path])

View File

@@ -32,7 +32,8 @@ module Linguist
['/*', '*/'], # C
['<!--', '-->'], # XML
['{-', '-}'], # Haskell
['(*', '*)'] # Coq
['(*', '*)'], # Coq
['"""', '"""'] # Python
]
START_SINGLE_LINE_COMMENT = Regexp.compile(SINGLE_LINE_COMMENTS.map { |c|
@@ -138,7 +139,7 @@ module Linguist
s.scan(/\s+/)
script = s.scan(/\S+/)
end
script = script[/[^\d]+/, 0]
script = script[/[^\d]+/, 0] if script
return script
end

View File

@@ -16,13 +16,19 @@
# https://github.com/joyent/node
- ^deps/
- ^tools/
- (^|/)configure$
- (^|/)configure.ac$
- (^|/)config.guess$
- (^|/)config.sub$
# Node depedencies
# Node dependencies
- node_modules/
# Vendored depedencies
# Vendored dependencies
- vendor/
# Debian packaging
- ^debian/
## Commonly Bundled JavaScript frameworks ##
@@ -61,8 +67,16 @@
# MathJax
- (^|/)MathJax/
# SyntaxHighlighter - http://alexgorbatchev.com/
- (^|/)shBrush([^.]*)\.js$
- (^|/)shCore\.js$
- (^|/)shLegacy\.js$
## Python ##
# django
- (^|/)admin_media/
# Fabric
- ^fabfile\.py$
@@ -94,3 +108,6 @@
# Samples folders
- ^[Ss]amples/
# Test fixtures
- ^[Tt]est/fixtures/

View File

@@ -0,0 +1,219 @@
*/**
* The MIT License (MIT)
* Copyright (c) 2012 René van Mil
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
*----------------------------------------------------------------------*
* CLASS CL_CSV_PARSER DEFINITION
*----------------------------------------------------------------------*
*
*----------------------------------------------------------------------*
class cl_csv_parser definition
public
inheriting from cl_object
final
create public .
public section.
*"* public components of class CL_CSV_PARSER
*"* do not include other source files here!!!
type-pools abap .
methods constructor
importing
!delegate type ref to if_csv_parser_delegate
!csvstring type string
!separator type c
!skip_first_line type abap_bool .
methods parse
raising
cx_csv_parse_error .
protected section.
*"* protected components of class CL_CSV_PARSER
*"* do not include other source files here!!!
private section.
*"* private components of class CL_CSV_PARSER
*"* do not include other source files here!!!
constants _textindicator type c value '"'. "#EC NOTEXT
data _delegate type ref to if_csv_parser_delegate .
data _csvstring type string .
data _separator type c .
type-pools abap .
data _skip_first_line type abap_bool .
methods _lines
returning
value(returning) type stringtab .
methods _parse_line
importing
!line type string
returning
value(returning) type stringtab
raising
cx_csv_parse_error .
endclass. "CL_CSV_PARSER DEFINITION
*----------------------------------------------------------------------*
* CLASS CL_CSV_PARSER IMPLEMENTATION
*----------------------------------------------------------------------*
*
*----------------------------------------------------------------------*
class cl_csv_parser implementation.
* <SIGNATURE>---------------------------------------------------------------------------------------+
* | Instance Public Method CL_CSV_PARSER->CONSTRUCTOR
* +-------------------------------------------------------------------------------------------------+
* | [--->] DELEGATE TYPE REF TO IF_CSV_PARSER_DELEGATE
* | [--->] CSVSTRING TYPE STRING
* | [--->] SEPARATOR TYPE C
* | [--->] SKIP_FIRST_LINE TYPE ABAP_BOOL
* +--------------------------------------------------------------------------------------</SIGNATURE>
method constructor.
super->constructor( ).
_delegate = delegate.
_csvstring = csvstring.
_separator = separator.
_skip_first_line = skip_first_line.
endmethod. "constructor
* <SIGNATURE>---------------------------------------------------------------------------------------+
* | Instance Public Method CL_CSV_PARSER->PARSE
* +-------------------------------------------------------------------------------------------------+
* | [!CX!] CX_CSV_PARSE_ERROR
* +--------------------------------------------------------------------------------------</SIGNATURE>
method parse.
data msg type string.
if _csvstring is initial.
message e002(csv) into msg.
raise exception type cx_csv_parse_error
exporting
message = msg.
endif.
" Get the lines
data is_first_line type abap_bool value abap_true.
data lines type standard table of string.
lines = _lines( ).
field-symbols <line> type string.
loop at lines assigning <line>.
" Should we skip the first line?
if _skip_first_line = abap_true and is_first_line = abap_true.
is_first_line = abap_false.
continue.
endif.
" Parse the line
data values type standard table of string.
values = _parse_line( <line> ).
" Send values to delegate
_delegate->values_found( values ).
endloop.
endmethod. "parse
* <SIGNATURE>---------------------------------------------------------------------------------------+
* | Instance Private Method CL_CSV_PARSER->_LINES
* +-------------------------------------------------------------------------------------------------+
* | [<-()] RETURNING TYPE STRINGTAB
* +--------------------------------------------------------------------------------------</SIGNATURE>
method _lines.
split _csvstring at cl_abap_char_utilities=>cr_lf into table returning.
endmethod. "_lines
* <SIGNATURE>---------------------------------------------------------------------------------------+
* | Instance Private Method CL_CSV_PARSER->_PARSE_LINE
* +-------------------------------------------------------------------------------------------------+
* | [--->] LINE TYPE STRING
* | [<-()] RETURNING TYPE STRINGTAB
* | [!CX!] CX_CSV_PARSE_ERROR
* +--------------------------------------------------------------------------------------</SIGNATURE>
method _parse_line.
data msg type string.
data csvvalue type string.
data csvvalues type standard table of string.
data char type c.
data pos type i value 0.
data len type i.
len = strlen( line ).
while pos < len.
char = line+pos(1).
if char <> _separator.
if char = _textindicator.
data text_ended type abap_bool.
text_ended = abap_false.
while text_ended = abap_false.
pos = pos + 1.
if pos < len.
char = line+pos(1).
if char = _textindicator.
text_ended = abap_true.
else.
if char is initial. " Space
concatenate csvvalue ` ` into csvvalue.
else.
concatenate csvvalue char into csvvalue.
endif.
endif.
else.
" Reached the end of the line while inside a text value
" This indicates an error in the CSV formatting
text_ended = abap_true.
message e003(csv) into msg.
raise exception type cx_csv_parse_error
exporting
message = msg.
endif.
endwhile.
" Check if next character is a separator, otherwise the CSV formatting is incorrect
data nextpos type i.
nextpos = pos + 1.
if nextpos < len and line+nextpos(1) <> _separator.
message e003(csv) into msg.
raise exception type cx_csv_parse_error
exporting
message = msg.
endif.
else.
if char is initial. " Space
concatenate csvvalue ` ` into csvvalue.
else.
concatenate csvvalue char into csvvalue.
endif.
endif.
else.
append csvvalue to csvvalues.
clear csvvalue.
endif.
pos = pos + 1.
endwhile.
append csvvalue to csvvalues. " Don't forget the last value
returning = csvvalues.
endmethod. "_parse_line
endclass. "CL_CSV_PARSER IMPLEMENTATION

View File

@@ -0,0 +1,26 @@
ServerSignature Off
RewriteCond %{REQUEST_METHOD} ^(HEAD|TRACE|DELETE|TRACK) [NC,OR]
RewriteCond %{THE_REQUEST} (\\r|\\n|%0A|%0D) [NC,OR]
RewriteCond %{HTTP_REFERER} (<|>||%0A|%0D|%27|%3C|%3E|%00) [NC,OR]
RewriteCond %{HTTP_COOKIE} (<|>||%0A|%0D|%27|%3C|%3E|%00) [NC,OR]
RewriteCond %{REQUEST_URI} ^/(,|;|:|<|>|”>|”<|/|\\\.\.\\).{0,9999} [NC,OR]
RewriteCond %{HTTP_USER_AGENT} ^$ [OR]
RewriteCond %{HTTP_USER_AGENT} ^(java|curl|wget) [NC,OR]
RewriteCond %{HTTP_USER_AGENT} (winhttp|HTTrack|clshttp|archiver|loader|email|harvest|extract|grab|miner) [NC,OR]
RewriteCond %{HTTP_USER_AGENT} (libwww-perl|curl|wget|python|nikto|scan) [NC,OR]
RewriteCond %{HTTP_USER_AGENT} (<|>||%0A|%0D|%27|%3C|%3E|%00) [NC,OR]
#Block mySQL injects
RewriteCond %{QUERY_STRING} (;|<|>||”|\)|%0A|%0D|%22|%27|%3C|%3E|%00).*(/\*|union|select|insert|cast|set|declare|drop|update|md5|benchmark) [NC,OR]
RewriteCond %{QUERY_STRING} \.\./\.\. [OR]
RewriteCond %{QUERY_STRING} (localhost|loopback|127\.0\.0\.1) [NC,OR]
RewriteCond %{QUERY_STRING} \.[a-z0-9] [NC,OR]
RewriteCond %{QUERY_STRING} (<|>||%0A|%0D|%27|%3C|%3E|%00) [NC]
# Note: The final RewriteCond must NOT use the [OR] flag.
# Return 403 Forbidden error.
RewriteRule .* index.php [F]

View File

@@ -0,0 +1,470 @@
# This is the main Apache HTTP server configuration file. It contains the
# configuration directives that give the server its instructions.
# See <URL:http://httpd.apache.org/docs/2.2> for detailed information.
# In particular, see
# <URL:http://httpd.apache.org/docs/2.2/mod/directives.html>
# for a discussion of each configuration directive.
#
# Do NOT simply read the instructions in here without understanding
# what they do. They're here only as hints or reminders. If you are unsure
# consult the online docs. You have been warned.
#
# Configuration and logfile names: If the filenames you specify for many
# of the server's control files begin with "/" (or "drive:/" for Win32), the
# server will use that explicit path. If the filenames do *not* begin
# with "/", the value of ServerRoot is prepended -- so "/var/log/apache2/foo.log"
# with ServerRoot set to "" will be interpreted by the
# server as "//var/log/apache2/foo.log".
#
# ServerRoot: The top of the directory tree under which the server's
# configuration, error, and log files are kept.
#
# Do not add a slash at the end of the directory path. If you point
# ServerRoot at a non-local disk, be sure to point the LockFile directive
# at a local disk. If you wish to share the same ServerRoot for multiple
# httpd daemons, you will need to change at least LockFile and PidFile.
#
ServerRoot ""
#
# Listen: Allows you to bind Apache to specific IP addresses and/or
# ports, instead of the default. See also the <VirtualHost>
# directive.
#
# Change this to Listen on specific IP addresses as shown below to
# prevent Apache from glomming onto all bound IP addresses.
#
#Listen 12.34.56.78:80
Listen 80
#
# Dynamic Shared Object (DSO) Support
#
# To be able to use the functionality of a module which was built as a DSO you
# have to place corresponding `LoadModule' lines at this location so the
# directives contained in it are actually available _before_ they are used.
# Statically compiled modules (those listed by `httpd -l') do not need
# to be loaded here.
#
# Example:
# LoadModule foo_module modules/mod_foo.so
#
LoadModule authn_file_module /usr/lib/apache2/modules/mod_authn_file.so
LoadModule authn_dbm_module /usr/lib/apache2/modules/mod_authn_dbm.so
LoadModule authn_anon_module /usr/lib/apache2/modules/mod_authn_anon.so
LoadModule authn_dbd_module /usr/lib/apache2/modules/mod_authn_dbd.so
LoadModule authn_default_module /usr/lib/apache2/modules/mod_authn_default.so
LoadModule authn_alias_module /usr/lib/apache2/modules/mod_authn_alias.so
LoadModule authz_host_module /usr/lib/apache2/modules/mod_authz_host.so
LoadModule authz_groupfile_module /usr/lib/apache2/modules/mod_authz_groupfile.so
LoadModule authz_user_module /usr/lib/apache2/modules/mod_authz_user.so
LoadModule authz_dbm_module /usr/lib/apache2/modules/mod_authz_dbm.so
LoadModule authz_owner_module /usr/lib/apache2/modules/mod_authz_owner.so
LoadModule authnz_ldap_module /usr/lib/apache2/modules/mod_authnz_ldap.so
LoadModule authz_default_module /usr/lib/apache2/modules/mod_authz_default.so
LoadModule auth_basic_module /usr/lib/apache2/modules/mod_auth_basic.so
LoadModule auth_digest_module /usr/lib/apache2/modules/mod_auth_digest.so
LoadModule file_cache_module /usr/lib/apache2/modules/mod_file_cache.so
LoadModule cache_module /usr/lib/apache2/modules/mod_cache.so
LoadModule disk_cache_module /usr/lib/apache2/modules/mod_disk_cache.so
LoadModule mem_cache_module /usr/lib/apache2/modules/mod_mem_cache.so
LoadModule dbd_module /usr/lib/apache2/modules/mod_dbd.so
LoadModule dumpio_module /usr/lib/apache2/modules/mod_dumpio.so
LoadModule ext_filter_module /usr/lib/apache2/modules/mod_ext_filter.so
LoadModule include_module /usr/lib/apache2/modules/mod_include.so
LoadModule filter_module /usr/lib/apache2/modules/mod_filter.so
LoadModule charset_lite_module /usr/lib/apache2/modules/mod_charset_lite.so
LoadModule deflate_module /usr/lib/apache2/modules/mod_deflate.so
LoadModule ldap_module /usr/lib/apache2/modules/mod_ldap.so
LoadModule log_forensic_module /usr/lib/apache2/modules/mod_log_forensic.so
LoadModule env_module /usr/lib/apache2/modules/mod_env.so
LoadModule mime_magic_module /usr/lib/apache2/modules/mod_mime_magic.so
LoadModule cern_meta_module /usr/lib/apache2/modules/mod_cern_meta.so
LoadModule expires_module /usr/lib/apache2/modules/mod_expires.so
LoadModule headers_module /usr/lib/apache2/modules/mod_headers.so
LoadModule ident_module /usr/lib/apache2/modules/mod_ident.so
LoadModule usertrack_module /usr/lib/apache2/modules/mod_usertrack.so
LoadModule unique_id_module /usr/lib/apache2/modules/mod_unique_id.so
LoadModule setenvif_module /usr/lib/apache2/modules/mod_setenvif.so
LoadModule version_module /usr/lib/apache2/modules/mod_version.so
LoadModule proxy_module /usr/lib/apache2/modules/mod_proxy.so
LoadModule proxy_connect_module /usr/lib/apache2/modules/mod_proxy_connect.so
LoadModule proxy_ftp_module /usr/lib/apache2/modules/mod_proxy_ftp.so
LoadModule proxy_http_module /usr/lib/apache2/modules/mod_proxy_http.so
LoadModule proxy_ajp_module /usr/lib/apache2/modules/mod_proxy_ajp.so
LoadModule proxy_balancer_module /usr/lib/apache2/modules/mod_proxy_balancer.so
LoadModule ssl_module /usr/lib/apache2/modules/mod_ssl.so
LoadModule mime_module /usr/lib/apache2/modules/mod_mime.so
LoadModule dav_module /usr/lib/apache2/modules/mod_dav.so
LoadModule status_module /usr/lib/apache2/modules/mod_status.so
LoadModule autoindex_module /usr/lib/apache2/modules/mod_autoindex.so
LoadModule asis_module /usr/lib/apache2/modules/mod_asis.so
LoadModule info_module /usr/lib/apache2/modules/mod_info.so
LoadModule suexec_module /usr/lib/apache2/modules/mod_suexec.so
LoadModule cgid_module /usr/lib/apache2/modules/mod_cgid.so
LoadModule cgi_module /usr/lib/apache2/modules/mod_cgi.so
LoadModule dav_fs_module /usr/lib/apache2/modules/mod_dav_fs.so
LoadModule dav_lock_module /usr/lib/apache2/modules/mod_dav_lock.so
LoadModule vhost_alias_module /usr/lib/apache2/modules/mod_vhost_alias.so
LoadModule negotiation_module /usr/lib/apache2/modules/mod_negotiation.so
LoadModule dir_module /usr/lib/apache2/modules/mod_dir.so
LoadModule imagemap_module /usr/lib/apache2/modules/mod_imagemap.so
LoadModule actions_module /usr/lib/apache2/modules/mod_actions.so
LoadModule speling_module /usr/lib/apache2/modules/mod_speling.so
LoadModule userdir_module /usr/lib/apache2/modules/mod_userdir.so
LoadModule alias_module /usr/lib/apache2/modules/mod_alias.so
LoadModule rewrite_module /usr/lib/apache2/modules/mod_rewrite.so
<IfModule !mpm_netware_module>
#
# If you wish httpd to run as a different user or group, you must run
# httpd as root initially and it will switch.
#
# User/Group: The name (or #number) of the user/group to run httpd as.
# It is usually good practice to create a dedicated user and group for
# running httpd, as with most system services.
#
User daemon
Group daemon
</IfModule>
# 'Main' server configuration
#
# The directives in this section set up the values used by the 'main'
# server, which responds to any requests that aren't handled by a
# <VirtualHost> definition. These values also provide defaults for
# any <VirtualHost> containers you may define later in the file.
#
# All of these directives may appear inside <VirtualHost> containers,
# in which case these default settings will be overridden for the
# virtual host being defined.
#
#
# ServerAdmin: Your address, where problems with the server should be
# e-mailed. This address appears on some server-generated pages, such
# as error documents. e.g. admin@your-domain.com
#
ServerAdmin you@example.com
#
# ServerName gives the name and port that the server uses to identify itself.
# This can often be determined automatically, but we recommend you specify
# it explicitly to prevent problems during startup.
#
# If your host doesn't have a registered DNS name, enter its IP address here.
#
#ServerName www.example.com:80
#
# DocumentRoot: The directory out of which you will serve your
# documents. By default, all requests are taken from this directory, but
# symbolic links and aliases may be used to point to other locations.
#
DocumentRoot "/usr/share/apache2/default-site/htdocs"
#
# Each directory to which Apache has access can be configured with respect
# to which services and features are allowed and/or disabled in that
# directory (and its subdirectories).
#
# First, we configure the "default" to be a very restrictive set of
# features.
#
<Directory />
Options FollowSymLinks
AllowOverride None
Order deny,allow
Deny from all
</Directory>
#
# Note that from this point forward you must specifically allow
# particular features to be enabled - so if something's not working as
# you might expect, make sure that you have specifically enabled it
# below.
#
#
# This should be changed to whatever you set DocumentRoot to.
#
<Directory "/usr/share/apache2/default-site/htdocs">
#
# Possible values for the Options directive are "None", "All",
# or any combination of:
# Indexes Includes FollowSymLinks SymLinksifOwnerMatch ExecCGI MultiViews
#
# Note that "MultiViews" must be named *explicitly* --- "Options All"
# doesn't give it to you.
#
# The Options directive is both complicated and important. Please see
# http://httpd.apache.org/docs/2.2/mod/core.html#options
# for more information.
#
Options Indexes FollowSymLinks
#
# AllowOverride controls what directives may be placed in .htaccess files.
# It can be "All", "None", or any combination of the keywords:
# Options FileInfo AuthConfig Limit
#
AllowOverride None
#
# Controls who can get stuff from this server.
#
Order allow,deny
Allow from all
</Directory>
#
# DirectoryIndex: sets the file that Apache will serve if a directory
# is requested.
#
<IfModule dir_module>
DirectoryIndex index.html
</IfModule>
#
# The following lines prevent .htaccess and .htpasswd files from being
# viewed by Web clients.
#
<FilesMatch "^\.ht">
Order allow,deny
Deny from all
Satisfy All
</FilesMatch>
#
# ErrorLog: The location of the error log file.
# If you do not specify an ErrorLog directive within a <VirtualHost>
# container, error messages relating to that virtual host will be
# logged here. If you *do* define an error logfile for a <VirtualHost>
# container, that host's errors will be logged there and not here.
#
ErrorLog /var/log/apache2/error_log
#
# LogLevel: Control the number of messages logged to the error_log.
# Possible values include: debug, info, notice, warn, error, crit,
# alert, emerg.
#
LogLevel warn
<IfModule log_config_module>
#
# The following directives define some format nicknames for use with
# a CustomLog directive (see below).
#
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
LogFormat "%h %l %u %t \"%r\" %>s %b" common
<IfModule logio_module>
# You need to enable mod_logio.c to use %I and %O
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %I %O" combinedio
</IfModule>
#
# The location and format of the access logfile (Common Logfile Format).
# If you do not define any access logfiles within a <VirtualHost>
# container, they will be logged here. Contrariwise, if you *do*
# define per-<VirtualHost> access logfiles, transactions will be
# logged therein and *not* in this file.
#
CustomLog /var/log/apache2/access_log common
#
# If you prefer a logfile with access, agent, and referer information
# (Combined Logfile Format) you can use the following directive.
#
#CustomLog /var/log/apache2/access_log combined
</IfModule>
<IfModule alias_module>
#
# Redirect: Allows you to tell clients about documents that used to
# exist in your server's namespace, but do not anymore. The client
# will make a new request for the document at its new location.
# Example:
# Redirect permanent /foo http://www.example.com/bar
#
# Alias: Maps web paths into filesystem paths and is used to
# access content that does not live under the DocumentRoot.
# Example:
# Alias /webpath /full/filesystem/path
#
# If you include a trailing / on /webpath then the server will
# require it to be present in the URL. You will also likely
# need to provide a <Directory> section to allow access to
# the filesystem path.
#
# ScriptAlias: This controls which directories contain server scripts.
# ScriptAliases are essentially the same as Aliases, except that
# documents in the target directory are treated as applications and
# run by the server when requested rather than as documents sent to the
# client. The same rules about trailing "/" apply to ScriptAlias
# directives as to Alias.
#
ScriptAlias /cgi-bin/ "/usr/lib/cgi-bin/"
</IfModule>
<IfModule cgid_module>
#
# ScriptSock: On threaded servers, designate the path to the UNIX
# socket used to communicate with the CGI daemon of mod_cgid.
#
#Scriptsock /var/run/apache2/cgisock
</IfModule>
#
# "/usr/lib/cgi-bin" should be changed to whatever your ScriptAliased
# CGI directory exists, if you have that configured.
#
<Directory "/usr/lib/cgi-bin">
AllowOverride None
Options None
Order allow,deny
Allow from all
</Directory>
#
# DefaultType: the default MIME type the server will use for a document
# if it cannot otherwise determine one, such as from filename extensions.
# If your server contains mostly text or HTML documents, "text/plain" is
# a good value. If most of your content is binary, such as applications
# or images, you may want to use "application/octet-stream" instead to
# keep browsers from trying to display binary files as though they are
# text.
#
DefaultType text/plain
<IfModule mime_module>
#
# TypesConfig points to the file containing the list of mappings from
# filename extension to MIME-type.
#
TypesConfig /etc/apache2/mime.types
#
# AddType allows you to add to or override the MIME configuration
# file specified in TypesConfig for specific file types.
#
#AddType application/x-gzip .tgz
#
# AddEncoding allows you to have certain browsers uncompress
# information on the fly. Note: Not all browsers support this.
#
#AddEncoding x-compress .Z
#AddEncoding x-gzip .gz .tgz
#
# If the AddEncoding directives above are commented-out, then you
# probably should define those extensions to indicate media types:
#
AddType application/x-compress .Z
AddType application/x-gzip .gz .tgz
#
# AddHandler allows you to map certain file extensions to "handlers":
# actions unrelated to filetype. These can be either built into the server
# or added with the Action directive (see below)
#
# To use CGI scripts outside of ScriptAliased directories:
# (You will also need to add "ExecCGI" to the "Options" directive.)
#
#AddHandler cgi-script .cgi
# For type maps (negotiated resources):
#AddHandler type-map var
#
# Filters allow you to process content before it is sent to the client.
#
# To parse .shtml files for server-side includes (SSI):
# (You will also need to add "Includes" to the "Options" directive.)
#
#AddType text/html .shtml
#AddOutputFilter INCLUDES .shtml
</IfModule>
#
# The mod_mime_magic module allows the server to use various hints from the
# contents of the file itself to determine its type. The MIMEMagicFile
# directive tells the module where the hint definitions are located.
#
#MIMEMagicFile /etc/apache2/magic
#
# Customizable error responses come in three flavors:
# 1) plain text 2) local redirects 3) external redirects
#
# Some examples:
#ErrorDocument 500 "The server made a boo boo."
#ErrorDocument 404 /missing.html
#ErrorDocument 404 "/cgi-bin/missing_handler.pl"
#ErrorDocument 402 http://www.example.com/subscription_info.html
#
#
# EnableMMAP and EnableSendfile: On systems that support it,
# memory-mapping or the sendfile syscall is used to deliver
# files. This usually improves server performance, but must
# be turned off when serving from networked-mounted
# filesystems or if support for these functions is otherwise
# broken on your system.
#
#EnableMMAP off
#EnableSendfile off
# Supplemental configuration
#
# The configuration files in the /etc/apache2/extra/ directory can be
# included to add extra features or to modify the default configuration of
# the server, or you may simply copy their contents here and change as
# necessary.
# Server-pool management (MPM specific)
#Include /etc/apache2/extra/httpd-mpm.conf
# Multi-language error messages
#Include /etc/apache2/extra/httpd-multilang-errordoc.conf
# Fancy directory listings
#Include /etc/apache2/extra/httpd-autoindex.conf
# Language settings
#Include /etc/apache2/extra/httpd-languages.conf
# User home directories
#Include /etc/apache2/extra/httpd-userdir.conf
# Real-time info on requests and configuration
#Include /etc/apache2/extra/httpd-info.conf
# Virtual hosts
#Include /etc/apache2/extra/httpd-vhosts.conf
# Local access to the Apache HTTP Server Manual
#Include /etc/apache2/extra/httpd-manual.conf
# Distributed authoring and versioning (WebDAV)
#Include /etc/apache2/extra/httpd-dav.conf
# Various default settings
#Include /etc/apache2/extra/httpd-default.conf
# Secure (SSL/TLS) connections
#Include /etc/apache2/extra/httpd-ssl.conf
#
# Note: The following must must be present to support
# starting without SSL on platforms with no /dev/random equivalent
# but a statically compiled-in mod_ssl.
#
<IfModule ssl_module>
SSLRandomSeed startup builtin
SSLRandomSeed connect builtin
</IfModule>

View File

@@ -0,0 +1,500 @@
#
# This is the main Apache HTTP server configuration file. It contains the
# configuration directives that give the server its instructions.
# See <URL:http://httpd.apache.org/docs/2.2> for detailed information.
# In particular, see
# <URL:http://httpd.apache.org/docs/2.2/mod/directives.html>
# for a discussion of each configuration directive.
#
# Do NOT simply read the instructions in here without understanding
# what they do. They're here only as hints or reminders. If you are unsure
# consult the online docs. You have been warned.
#
# Configuration and logfile names: If the filenames you specify for many
# of the server's control files begin with "/" (or "drive:/" for Win32), the
# server will use that explicit path. If the filenames do *not* begin
# with "/", the value of ServerRoot is prepended -- so "log/foo_log"
# with ServerRoot set to "/usr" will be interpreted by the
# server as "/usr/log/foo_log".
#
# ServerRoot: The top of the directory tree under which the server's
# configuration, error, and log files are kept.
#
# Do not add a slash at the end of the directory path. If you point
# ServerRoot at a non-local disk, be sure to point the LockFile directive
# at a local disk. If you wish to share the same ServerRoot for multiple
# httpd daemons, you will need to change at least LockFile and PidFile.
#
ServerRoot "/usr"
#
# Listen: Allows you to bind Apache to specific IP addresses and/or
# ports, instead of the default. See also the <VirtualHost>
# directive.
#
# Change this to Listen on specific IP addresses as shown below to
# prevent Apache from glomming onto all bound IP addresses.
#
#Listen 12.34.56.78:80
Listen 80
#
# Dynamic Shared Object (DSO) Support
#
# To be able to use the functionality of a module which was built as a DSO you
# have to place corresponding `LoadModule' lines at this location so the
# directives contained in it are actually available _before_ they are used.
# Statically compiled modules (those listed by `httpd -l') do not need
# to be loaded here.
#
# Example:
# LoadModule foo_module modules/mod_foo.so
#
LoadModule authn_file_module libexec/apache2/mod_authn_file.so
LoadModule authn_dbm_module libexec/apache2/mod_authn_dbm.so
LoadModule authn_anon_module libexec/apache2/mod_authn_anon.so
LoadModule authn_dbd_module libexec/apache2/mod_authn_dbd.so
LoadModule authn_default_module libexec/apache2/mod_authn_default.so
LoadModule authz_host_module libexec/apache2/mod_authz_host.so
LoadModule authz_groupfile_module libexec/apache2/mod_authz_groupfile.so
LoadModule authz_user_module libexec/apache2/mod_authz_user.so
LoadModule authz_dbm_module libexec/apache2/mod_authz_dbm.so
LoadModule authz_owner_module libexec/apache2/mod_authz_owner.so
LoadModule authz_default_module libexec/apache2/mod_authz_default.so
LoadModule auth_basic_module libexec/apache2/mod_auth_basic.so
LoadModule auth_digest_module libexec/apache2/mod_auth_digest.so
LoadModule cache_module libexec/apache2/mod_cache.so
LoadModule disk_cache_module libexec/apache2/mod_disk_cache.so
LoadModule mem_cache_module libexec/apache2/mod_mem_cache.so
LoadModule dbd_module libexec/apache2/mod_dbd.so
LoadModule dumpio_module libexec/apache2/mod_dumpio.so
LoadModule reqtimeout_module libexec/apache2/mod_reqtimeout.so
LoadModule ext_filter_module libexec/apache2/mod_ext_filter.so
LoadModule include_module libexec/apache2/mod_include.so
LoadModule filter_module libexec/apache2/mod_filter.so
LoadModule substitute_module libexec/apache2/mod_substitute.so
LoadModule deflate_module libexec/apache2/mod_deflate.so
LoadModule log_config_module libexec/apache2/mod_log_config.so
LoadModule log_forensic_module libexec/apache2/mod_log_forensic.so
LoadModule logio_module libexec/apache2/mod_logio.so
LoadModule env_module libexec/apache2/mod_env.so
LoadModule mime_magic_module libexec/apache2/mod_mime_magic.so
LoadModule cern_meta_module libexec/apache2/mod_cern_meta.so
LoadModule expires_module libexec/apache2/mod_expires.so
LoadModule headers_module libexec/apache2/mod_headers.so
LoadModule ident_module libexec/apache2/mod_ident.so
LoadModule usertrack_module libexec/apache2/mod_usertrack.so
#LoadModule unique_id_module libexec/apache2/mod_unique_id.so
LoadModule setenvif_module libexec/apache2/mod_setenvif.so
LoadModule version_module libexec/apache2/mod_version.so
LoadModule proxy_module libexec/apache2/mod_proxy.so
LoadModule proxy_connect_module libexec/apache2/mod_proxy_connect.so
LoadModule proxy_ftp_module libexec/apache2/mod_proxy_ftp.so
LoadModule proxy_http_module libexec/apache2/mod_proxy_http.so
LoadModule proxy_scgi_module libexec/apache2/mod_proxy_scgi.so
LoadModule proxy_ajp_module libexec/apache2/mod_proxy_ajp.so
LoadModule proxy_balancer_module libexec/apache2/mod_proxy_balancer.so
LoadModule ssl_module libexec/apache2/mod_ssl.so
LoadModule mime_module libexec/apache2/mod_mime.so
LoadModule dav_module libexec/apache2/mod_dav.so
LoadModule status_module libexec/apache2/mod_status.so
LoadModule autoindex_module libexec/apache2/mod_autoindex.so
LoadModule asis_module libexec/apache2/mod_asis.so
LoadModule info_module libexec/apache2/mod_info.so
LoadModule cgi_module libexec/apache2/mod_cgi.so
LoadModule dav_fs_module libexec/apache2/mod_dav_fs.so
LoadModule vhost_alias_module libexec/apache2/mod_vhost_alias.so
LoadModule negotiation_module libexec/apache2/mod_negotiation.so
LoadModule dir_module libexec/apache2/mod_dir.so
LoadModule imagemap_module libexec/apache2/mod_imagemap.so
LoadModule actions_module libexec/apache2/mod_actions.so
LoadModule speling_module libexec/apache2/mod_speling.so
LoadModule userdir_module libexec/apache2/mod_userdir.so
LoadModule alias_module libexec/apache2/mod_alias.so
LoadModule rewrite_module libexec/apache2/mod_rewrite.so
#LoadModule perl_module libexec/apache2/mod_perl.so
#LoadModule php5_module libexec/apache2/libphp5.so
#LoadModule hfs_apple_module libexec/apache2/mod_hfs_apple.so
<IfModule !mpm_netware_module>
<IfModule !mpm_winnt_module>
#
# If you wish httpd to run as a different user or group, you must run
# httpd as root initially and it will switch.
#
# User/Group: The name (or #number) of the user/group to run httpd as.
# It is usually good practice to create a dedicated user and group for
# running httpd, as with most system services.
#
User _www
Group _www
</IfModule>
</IfModule>
# 'Main' server configuration
#
# The directives in this section set up the values used by the 'main'
# server, which responds to any requests that aren't handled by a
# <VirtualHost> definition. These values also provide defaults for
# any <VirtualHost> containers you may define later in the file.
#
# All of these directives may appear inside <VirtualHost> containers,
# in which case these default settings will be overridden for the
# virtual host being defined.
#
#
# ServerAdmin: Your address, where problems with the server should be
# e-mailed. This address appears on some server-generated pages, such
# as error documents. e.g. admin@your-domain.com
#
ServerAdmin you@example.com
#
# ServerName gives the name and port that the server uses to identify itself.
# This can often be determined automatically, but we recommend you specify
# it explicitly to prevent problems during startup.
#
# If your host doesn't have a registered DNS name, enter its IP address here.
#
#ServerName www.example.com:80
#
# DocumentRoot: The directory out of which you will serve your
# documents. By default, all requests are taken from this directory, but
# symbolic links and aliases may be used to point to other locations.
#
DocumentRoot "/Library/WebServer/Documents"
#
# Each directory to which Apache has access can be configured with respect
# to which services and features are allowed and/or disabled in that
# directory (and its subdirectories).
#
# First, we configure the "default" to be a very restrictive set of
# features.
#
<Directory />
Options FollowSymLinks
AllowOverride None
Order deny,allow
Deny from all
</Directory>
#
# Note that from this point forward you must specifically allow
# particular features to be enabled - so if something's not working as
# you might expect, make sure that you have specifically enabled it
# below.
#
#
# This should be changed to whatever you set DocumentRoot to.
#
<Directory "/Library/WebServer/Documents">
#
# Possible values for the Options directive are "None", "All",
# or any combination of:
# Indexes Includes FollowSymLinks SymLinksifOwnerMatch ExecCGI MultiViews
#
# Note that "MultiViews" must be named *explicitly* --- "Options All"
# doesn't give it to you.
#
# The Options directive is both complicated and important. Please see
# http://httpd.apache.org/docs/2.2/mod/core.html#options
# for more information.
#
Options Indexes FollowSymLinks MultiViews
#
# AllowOverride controls what directives may be placed in .htaccess files.
# It can be "All", "None", or any combination of the keywords:
# Options FileInfo AuthConfig Limit
#
AllowOverride None
#
# Controls who can get stuff from this server.
#
Order allow,deny
Allow from all
</Directory>
#
# DirectoryIndex: sets the file that Apache will serve if a directory
# is requested.
#
<IfModule dir_module>
DirectoryIndex index.html
</IfModule>
#
# The following lines prevent .htaccess and .htpasswd files from being
# viewed by Web clients.
#
<FilesMatch "^\.([Hh][Tt]|[Dd][Ss]_[Ss])">
Order allow,deny
Deny from all
Satisfy All
</FilesMatch>
#
# Apple specific filesystem protection.
#
<Files "rsrc">
Order allow,deny
Deny from all
Satisfy All
</Files>
<DirectoryMatch ".*\.\.namedfork">
Order allow,deny
Deny from all
Satisfy All
</DirectoryMatch>
#
# ErrorLog: The location of the error log file.
# If you do not specify an ErrorLog directive within a <VirtualHost>
# container, error messages relating to that virtual host will be
# logged here. If you *do* define an error logfile for a <VirtualHost>
# container, that host's errors will be logged there and not here.
#
ErrorLog "/private/var/log/apache2/error_log"
#
# LogLevel: Control the number of messages logged to the error_log.
# Possible values include: debug, info, notice, warn, error, crit,
# alert, emerg.
#
LogLevel warn
<IfModule log_config_module>
#
# The following directives define some format nicknames for use with
# a CustomLog directive (see below).
#
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
LogFormat "%h %l %u %t \"%r\" %>s %b" common
<IfModule logio_module>
# You need to enable mod_logio.c to use %I and %O
LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %I %O" combinedio
</IfModule>
#
# The location and format of the access logfile (Common Logfile Format).
# If you do not define any access logfiles within a <VirtualHost>
# container, they will be logged here. Contrariwise, if you *do*
# define per-<VirtualHost> access logfiles, transactions will be
# logged therein and *not* in this file.
#
CustomLog "/private/var/log/apache2/access_log" common
#
# If you prefer a logfile with access, agent, and referer information
# (Combined Logfile Format) you can use the following directive.
#
#CustomLog "/private/var/log/apache2/access_log" combined
</IfModule>
<IfModule alias_module>
#
# Redirect: Allows you to tell clients about documents that used to
# exist in your server's namespace, but do not anymore. The client
# will make a new request for the document at its new location.
# Example:
# Redirect permanent /foo http://www.example.com/bar
#
# Alias: Maps web paths into filesystem paths and is used to
# access content that does not live under the DocumentRoot.
# Example:
# Alias /webpath /full/filesystem/path
#
# If you include a trailing / on /webpath then the server will
# require it to be present in the URL. You will also likely
# need to provide a <Directory> section to allow access to
# the filesystem path.
#
# ScriptAlias: This controls which directories contain server scripts.
# ScriptAliases are essentially the same as Aliases, except that
# documents in the target directory are treated as applications and
# run by the server when requested rather than as documents sent to the
# client. The same rules about trailing "/" apply to ScriptAlias
# directives as to Alias.
#
ScriptAliasMatch ^/cgi-bin/((?!(?i:webobjects)).*$) "/Library/WebServer/CGI-Executables/$1"
</IfModule>
<IfModule cgid_module>
#
# ScriptSock: On threaded servers, designate the path to the UNIX
# socket used to communicate with the CGI daemon of mod_cgid.
#
#Scriptsock /private/var/run/cgisock
</IfModule>
#
# "/Library/WebServer/CGI-Executables" should be changed to whatever your ScriptAliased
# CGI directory exists, if you have that configured.
#
<Directory "/Library/WebServer/CGI-Executables">
AllowOverride None
Options None
Order allow,deny
Allow from all
</Directory>
#
# DefaultType: the default MIME type the server will use for a document
# if it cannot otherwise determine one, such as from filename extensions.
# If your server contains mostly text or HTML documents, "text/plain" is
# a good value. If most of your content is binary, such as applications
# or images, you may want to use "application/octet-stream" instead to
# keep browsers from trying to display binary files as though they are
# text.
#
DefaultType text/plain
<IfModule mime_module>
#
# TypesConfig points to the file containing the list of mappings from
# filename extension to MIME-type.
#
TypesConfig /private/etc/apache2/mime.types
#
# AddType allows you to add to or override the MIME configuration
# file specified in TypesConfig for specific file types.
#
#AddType application/x-gzip .tgz
#
# AddEncoding allows you to have certain browsers uncompress
# information on the fly. Note: Not all browsers support this.
#
#AddEncoding x-compress .Z
#AddEncoding x-gzip .gz .tgz
#
# If the AddEncoding directives above are commented-out, then you
# probably should define those extensions to indicate media types:
#
AddType application/x-compress .Z
AddType application/x-gzip .gz .tgz
#
# AddHandler allows you to map certain file extensions to "handlers":
# actions unrelated to filetype. These can be either built into the server
# or added with the Action directive (see below)
#
# To use CGI scripts outside of ScriptAliased directories:
# (You will also need to add "ExecCGI" to the "Options" directive.)
#
#AddHandler cgi-script .cgi
# For type maps (negotiated resources):
#AddHandler type-map var
#
# Filters allow you to process content before it is sent to the client.
#
# To parse .shtml files for server-side includes (SSI):
# (You will also need to add "Includes" to the "Options" directive.)
#
#AddType text/html .shtml
#AddOutputFilter INCLUDES .shtml
</IfModule>
#
# The mod_mime_magic module allows the server to use various hints from the
# contents of the file itself to determine its type. The MIMEMagicFile
# directive tells the module where the hint definitions are located.
#
#MIMEMagicFile /private/etc/apache2/magic
#
# Customizable error responses come in three flavors:
# 1) plain text 2) local redirects 3) external redirects
#
# Some examples:
#ErrorDocument 500 "The server made a boo boo."
#ErrorDocument 404 /missing.html
#ErrorDocument 404 "/cgi-bin/missing_handler.pl"
#ErrorDocument 402 http://www.example.com/subscription_info.html
#
#
# MaxRanges: Maximum number of Ranges in a request before
# returning the entire resource, or one of the special
# values 'default', 'none' or 'unlimited'.
# Default setting is to accept 200 Ranges.
#MaxRanges unlimited
#
# EnableMMAP and EnableSendfile: On systems that support it,
# memory-mapping or the sendfile syscall is used to deliver
# files. This usually improves server performance, but must
# be turned off when serving from networked-mounted
# filesystems or if support for these functions is otherwise
# broken on your system.
#
#EnableMMAP off
#EnableSendfile off
# 6894961
TraceEnable off
# Supplemental configuration
#
# The configuration files in the /private/etc/apache2/extra/ directory can be
# included to add extra features or to modify the default configuration of
# the server, or you may simply copy their contents here and change as
# necessary.
# Server-pool management (MPM specific)
Include /private/etc/apache2/extra/httpd-mpm.conf
# Multi-language error messages
#Include /private/etc/apache2/extra/httpd-multilang-errordoc.conf
# Fancy directory listings
Include /private/etc/apache2/extra/httpd-autoindex.conf
# Language settings
Include /private/etc/apache2/extra/httpd-languages.conf
# User home directories
Include /private/etc/apache2/extra/httpd-userdir.conf
# Real-time info on requests and configuration
#Include /private/etc/apache2/extra/httpd-info.conf
# Virtual hosts
#Include /private/etc/apache2/extra/httpd-vhosts.conf
# Local access to the Apache HTTP Server Manual
Include /private/etc/apache2/extra/httpd-manual.conf
# Distributed authoring and versioning (WebDAV)
#Include /private/etc/apache2/extra/httpd-dav.conf
# Various default settings
#Include /private/etc/apache2/extra/httpd-default.conf
# Secure (SSL/TLS) connections
#Include /private/etc/apache2/extra/httpd-ssl.conf
#
# Note: The following must must be present to support
# starting without SSL on platforms with no /dev/random equivalent
# but a statically compiled-in mod_ssl.
#
<IfModule ssl_module>
SSLRandomSeed startup builtin
SSLRandomSeed connect builtin
</IfModule>
Include /private/etc/apache2/other/*.conf

121
samples/Awk/test.awk Normal file
View File

@@ -0,0 +1,121 @@
#!/bin/awk -f
BEGIN {
# It is not possible to define output file names here because
# FILENAME is not define in the BEGIN section
n = "";
printf "Generating data files ...";
network_max_bandwidth_in_byte = 10000000;
network_max_packet_per_second = 1000000;
last3 = 0;
last4 = 0;
last5 = 0;
last6 = 0;
}
{
if ($1 ~ /Average/)
{ # Skip the Average values
n = "";
next;
}
if ($2 ~ /all/)
{ # This is the cpu info
print $3 > FILENAME".cpu.user.dat";
# print $4 > FILENAME".cpu.nice.dat";
print $5 > FILENAME".cpu.system.dat";
# print $6 > FILENAME".cpu.iowait.dat";
print $7 > FILENAME".cpu.idle.dat";
print 100-$7 > FILENAME".cpu.busy.dat";
}
if ($2 ~ /eth0/)
{ # This is the eth0 network info
if ($3 > network_max_packet_per_second)
print last3 > FILENAME".net.rxpck.dat"; # Total number of packets received per second.
else
{
last3 = $3;
print $3 > FILENAME".net.rxpck.dat"; # Total number of packets received per second.
}
if ($4 > network_max_packet_per_second)
print last4 > FILENAME".net.txpck.dat"; # Total number of packets transmitted per second.
else
{
last4 = $4;
print $4 > FILENAME".net.txpck.dat"; # Total number of packets transmitted per second.
}
if ($5 > network_max_bandwidth_in_byte)
print last5 > FILENAME".net.rxbyt.dat"; # Total number of bytes received per second.
else
{
last5 = $5;
print $5 > FILENAME".net.rxbyt.dat"; # Total number of bytes received per second.
}
if ($6 > network_max_bandwidth_in_byte)
print last6 > FILENAME".net.txbyt.dat"; # Total number of bytes transmitted per second.
else
{
last6 = $6;
print $6 > FILENAME".net.txbyt.dat"; # Total number of bytes transmitted per second.
}
# print $7 > FILENAME".net.rxcmp.dat"; # Number of compressed packets received per second (for cslip etc.).
# print $8 > FILENAME".net.txcmp.dat"; # Number of compressed packets transmitted per second.
# print $9 > FILENAME".net.rxmcst.dat"; # Number of multicast packets received per second.
}
# Detect which is the next info to be parsed
if ($2 ~ /proc|cswch|tps|kbmemfree|totsck/)
{
n = $2;
}
# Only get lines with numbers (real data !)
if ($2 ~ /[0-9]/)
{
if (n == "proc/s")
{ # This is the proc/s info
print $2 > FILENAME".proc.dat";
# n = "";
}
if (n == "cswch/s")
{ # This is the context switches per second info
print $2 > FILENAME".ctxsw.dat";
# n = "";
}
if (n == "tps")
{ # This is the disk info
print $2 > FILENAME".disk.tps.dat"; # total transfers per second
print $3 > FILENAME".disk.rtps.dat"; # read requests per second
print $4 > FILENAME".disk.wtps.dat"; # write requests per second
print $5 > FILENAME".disk.brdps.dat"; # block reads per second
print $6 > FILENAME".disk.bwrps.dat"; # block writes per second
# n = "";
}
if (n == "kbmemfree")
{ # This is the mem info
print $2 > FILENAME".mem.kbmemfree.dat"; # Amount of free memory available in kilobytes.
print $3 > FILENAME".mem.kbmemused.dat"; # Amount of used memory in kilobytes. This does not take into account memory used by the kernel itself.
print $4 > FILENAME".mem.memused.dat"; # Percentage of used memory.
# It appears the kbmemshrd has been removed from the sysstat output - ntolia
# print $X > FILENAME".mem.kbmemshrd.dat"; # Amount of memory shared by the system in kilobytes. Always zero with 2.4 kernels.
# print $5 > FILENAME".mem.kbbuffers.dat"; # Amount of memory used as buffers by the kernel in kilobytes.
print $6 > FILENAME".mem.kbcached.dat"; # Amount of memory used to cache data by the kernel in kilobytes.
# print $7 > FILENAME".mem.kbswpfree.dat"; # Amount of free swap space in kilobytes.
# print $8 > FILENAME".mem.kbswpused.dat"; # Amount of used swap space in kilobytes.
print $9 > FILENAME".mem.swpused.dat"; # Percentage of used swap space.
# n = "";
}
if (n == "totsck")
{ # This is the socket info
print $2 > FILENAME".sock.totsck.dat"; # Total number of used sockets.
print $3 > FILENAME".sock.tcpsck.dat"; # Number of TCP sockets currently in use.
# print $4 > FILENAME".sock.udpsck.dat"; # Number of UDP sockets currently in use.
# print $5 > FILENAME".sock.rawsck.dat"; # Number of RAW sockets currently in use.
# print $6 > FILENAME".sock.ip-frag.dat"; # Number of IP fragments currently in use.
# n = "";
}
}
}
END {
print " '" FILENAME "' done.";
}

BIN
samples/Binary/cube.stl Normal file

Binary file not shown.

View File

@@ -0,0 +1,327 @@
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: protocol-buffer.proto
#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION
#include "protocol-buffer.pb.h"
#include <algorithm>
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/stubs/once.h>
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/wire_format_lite_inl.h>
#include <google/protobuf/descriptor.h>
#include <google/protobuf/generated_message_reflection.h>
#include <google/protobuf/reflection_ops.h>
#include <google/protobuf/wire_format.h>
// @@protoc_insertion_point(includes)
namespace persons {
namespace {
const ::google::protobuf::Descriptor* Person_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
Person_reflection_ = NULL;
} // namespace
void protobuf_AssignDesc_protocol_2dbuffer_2eproto() {
protobuf_AddDesc_protocol_2dbuffer_2eproto();
const ::google::protobuf::FileDescriptor* file =
::google::protobuf::DescriptorPool::generated_pool()->FindFileByName(
"protocol-buffer.proto");
GOOGLE_CHECK(file != NULL);
Person_descriptor_ = file->message_type(0);
static const int Person_offsets_[1] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(Person, name_),
};
Person_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
Person_descriptor_,
Person::default_instance_,
Person_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(Person, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(Person, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(Person));
}
namespace {
GOOGLE_PROTOBUF_DECLARE_ONCE(protobuf_AssignDescriptors_once_);
inline void protobuf_AssignDescriptorsOnce() {
::google::protobuf::GoogleOnceInit(&protobuf_AssignDescriptors_once_,
&protobuf_AssignDesc_protocol_2dbuffer_2eproto);
}
void protobuf_RegisterTypes(const ::std::string&) {
protobuf_AssignDescriptorsOnce();
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
Person_descriptor_, &Person::default_instance());
}
} // namespace
void protobuf_ShutdownFile_protocol_2dbuffer_2eproto() {
delete Person::default_instance_;
delete Person_reflection_;
}
void protobuf_AddDesc_protocol_2dbuffer_2eproto() {
static bool already_here = false;
if (already_here) return;
already_here = true;
GOOGLE_PROTOBUF_VERIFY_VERSION;
::google::protobuf::DescriptorPool::InternalAddGeneratedFile(
"\n\025protocol-buffer.proto\022\007persons\"\026\n\006Pers"
"on\022\014\n\004name\030\001 \002(\t", 56);
::google::protobuf::MessageFactory::InternalRegisterGeneratedFile(
"protocol-buffer.proto", &protobuf_RegisterTypes);
Person::default_instance_ = new Person();
Person::default_instance_->InitAsDefaultInstance();
::google::protobuf::internal::OnShutdown(&protobuf_ShutdownFile_protocol_2dbuffer_2eproto);
}
// Force AddDescriptors() to be called at static initialization time.
struct StaticDescriptorInitializer_protocol_2dbuffer_2eproto {
StaticDescriptorInitializer_protocol_2dbuffer_2eproto() {
protobuf_AddDesc_protocol_2dbuffer_2eproto();
}
} static_descriptor_initializer_protocol_2dbuffer_2eproto_;
// ===================================================================
#ifndef _MSC_VER
const int Person::kNameFieldNumber;
#endif // !_MSC_VER
Person::Person()
: ::google::protobuf::Message() {
SharedCtor();
}
void Person::InitAsDefaultInstance() {
}
Person::Person(const Person& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
}
void Person::SharedCtor() {
_cached_size_ = 0;
name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString);
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
Person::~Person() {
SharedDtor();
}
void Person::SharedDtor() {
if (name_ != &::google::protobuf::internal::kEmptyString) {
delete name_;
}
if (this != default_instance_) {
}
}
void Person::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* Person::descriptor() {
protobuf_AssignDescriptorsOnce();
return Person_descriptor_;
}
const Person& Person::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_protocol_2dbuffer_2eproto();
return *default_instance_;
}
Person* Person::default_instance_ = NULL;
Person* Person::New() const {
return new Person;
}
void Person::Clear() {
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (has_name()) {
if (name_ != &::google::protobuf::internal::kEmptyString) {
name_->clear();
}
}
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool Person::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) return false
::google::protobuf::uint32 tag;
while ((tag = input->ReadTag()) != 0) {
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// required string name = 1;
case 1: {
if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED) {
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->mutable_name()));
::google::protobuf::internal::WireFormat::VerifyUTF8String(
this->name().data(), this->name().length(),
::google::protobuf::internal::WireFormat::PARSE);
} else {
goto handle_uninterpreted;
}
if (input->ExpectAtEnd()) return true;
break;
}
default: {
handle_uninterpreted:
if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
return true;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
return true;
#undef DO_
}
void Person::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// required string name = 1;
if (has_name()) {
::google::protobuf::internal::WireFormat::VerifyUTF8String(
this->name().data(), this->name().length(),
::google::protobuf::internal::WireFormat::SERIALIZE);
::google::protobuf::internal::WireFormatLite::WriteString(
1, this->name(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
}
::google::protobuf::uint8* Person::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// required string name = 1;
if (has_name()) {
::google::protobuf::internal::WireFormat::VerifyUTF8String(
this->name().data(), this->name().length(),
::google::protobuf::internal::WireFormat::SERIALIZE);
target =
::google::protobuf::internal::WireFormatLite::WriteStringToArray(
1, this->name(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
return target;
}
int Person::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// required string name = 1;
if (has_name()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::StringSize(
this->name());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void Person::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const Person* source =
::google::protobuf::internal::dynamic_cast_if_available<const Person*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void Person::MergeFrom(const Person& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_name()) {
set_name(from.name());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void Person::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void Person::CopyFrom(const Person& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool Person::IsInitialized() const {
if ((_has_bits_[0] & 0x00000001) != 0x00000001) return false;
return true;
}
void Person::Swap(Person* other) {
if (other != this) {
std::swap(name_, other->name_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata Person::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = Person_descriptor_;
metadata.reflection = Person_reflection_;
return metadata;
}
// @@protoc_insertion_point(namespace_scope)
} // namespace persons
// @@protoc_insertion_point(global_scope)

View File

@@ -0,0 +1,218 @@
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: protocol-buffer.proto
#ifndef PROTOBUF_protocol_2dbuffer_2eproto__INCLUDED
#define PROTOBUF_protocol_2dbuffer_2eproto__INCLUDED
#include <string>
#include <google/protobuf/stubs/common.h>
#if GOOGLE_PROTOBUF_VERSION < 2005000
#error This file was generated by a newer version of protoc which is
#error incompatible with your Protocol Buffer headers. Please update
#error your headers.
#endif
#if 2005000 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION
#error This file was generated by an older version of protoc which is
#error incompatible with your Protocol Buffer headers. Please
#error regenerate this file with a newer version of protoc.
#endif
#include <google/protobuf/generated_message_util.h>
#include <google/protobuf/message.h>
#include <google/protobuf/repeated_field.h>
#include <google/protobuf/extension_set.h>
#include <google/protobuf/unknown_field_set.h>
// @@protoc_insertion_point(includes)
namespace persons {
// Internal implementation detail -- do not call these.
void protobuf_AddDesc_protocol_2dbuffer_2eproto();
void protobuf_AssignDesc_protocol_2dbuffer_2eproto();
void protobuf_ShutdownFile_protocol_2dbuffer_2eproto();
class Person;
// ===================================================================
class Person : public ::google::protobuf::Message {
public:
Person();
virtual ~Person();
Person(const Person& from);
inline Person& operator=(const Person& from) {
CopyFrom(from);
return *this;
}
inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const {
return _unknown_fields_;
}
inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() {
return &_unknown_fields_;
}
static const ::google::protobuf::Descriptor* descriptor();
static const Person& default_instance();
void Swap(Person* other);
// implements Message ----------------------------------------------
Person* New() const;
void CopyFrom(const ::google::protobuf::Message& from);
void MergeFrom(const ::google::protobuf::Message& from);
void CopyFrom(const Person& from);
void MergeFrom(const Person& from);
void Clear();
bool IsInitialized() const;
int ByteSize() const;
bool MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input);
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const;
::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const;
int GetCachedSize() const { return _cached_size_; }
private:
void SharedCtor();
void SharedDtor();
void SetCachedSize(int size) const;
public:
::google::protobuf::Metadata GetMetadata() const;
// nested types ----------------------------------------------------
// accessors -------------------------------------------------------
// required string name = 1;
inline bool has_name() const;
inline void clear_name();
static const int kNameFieldNumber = 1;
inline const ::std::string& name() const;
inline void set_name(const ::std::string& value);
inline void set_name(const char* value);
inline void set_name(const char* value, size_t size);
inline ::std::string* mutable_name();
inline ::std::string* release_name();
inline void set_allocated_name(::std::string* name);
// @@protoc_insertion_point(class_scope:persons.Person)
private:
inline void set_has_name();
inline void clear_has_name();
::google::protobuf::UnknownFieldSet _unknown_fields_;
::std::string* name_;
mutable int _cached_size_;
::google::protobuf::uint32 _has_bits_[(1 + 31) / 32];
friend void protobuf_AddDesc_protocol_2dbuffer_2eproto();
friend void protobuf_AssignDesc_protocol_2dbuffer_2eproto();
friend void protobuf_ShutdownFile_protocol_2dbuffer_2eproto();
void InitAsDefaultInstance();
static Person* default_instance_;
};
// ===================================================================
// ===================================================================
// Person
// required string name = 1;
inline bool Person::has_name() const {
return (_has_bits_[0] & 0x00000001u) != 0;
}
inline void Person::set_has_name() {
_has_bits_[0] |= 0x00000001u;
}
inline void Person::clear_has_name() {
_has_bits_[0] &= ~0x00000001u;
}
inline void Person::clear_name() {
if (name_ != &::google::protobuf::internal::kEmptyString) {
name_->clear();
}
clear_has_name();
}
inline const ::std::string& Person::name() const {
return *name_;
}
inline void Person::set_name(const ::std::string& value) {
set_has_name();
if (name_ == &::google::protobuf::internal::kEmptyString) {
name_ = new ::std::string;
}
name_->assign(value);
}
inline void Person::set_name(const char* value) {
set_has_name();
if (name_ == &::google::protobuf::internal::kEmptyString) {
name_ = new ::std::string;
}
name_->assign(value);
}
inline void Person::set_name(const char* value, size_t size) {
set_has_name();
if (name_ == &::google::protobuf::internal::kEmptyString) {
name_ = new ::std::string;
}
name_->assign(reinterpret_cast<const char*>(value), size);
}
inline ::std::string* Person::mutable_name() {
set_has_name();
if (name_ == &::google::protobuf::internal::kEmptyString) {
name_ = new ::std::string;
}
return name_;
}
inline ::std::string* Person::release_name() {
clear_has_name();
if (name_ == &::google::protobuf::internal::kEmptyString) {
return NULL;
} else {
::std::string* temp = name_;
name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString);
return temp;
}
}
inline void Person::set_allocated_name(::std::string* name) {
if (name_ != &::google::protobuf::internal::kEmptyString) {
delete name_;
}
if (name) {
set_has_name();
name_ = name;
} else {
clear_has_name();
name_ = const_cast< ::std::string*>(&::google::protobuf::internal::kEmptyString);
}
}
// @@protoc_insertion_point(namespace_scope)
} // namespace persons
#ifndef SWIG
namespace google {
namespace protobuf {
} // namespace google
} // namespace protobuf
#endif // SWIG
// @@protoc_insertion_point(global_scope)
#endif // PROTOBUF_protocol_2dbuffer_2eproto__INCLUDED

File diff suppressed because it is too large Load Diff

15669
samples/C/sgd_fast.c Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,13 +1,3 @@
(************************************************************************)
(* v * The Coq Proof Assistant / The Coq Development Team *)
(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2010 *)
(* \VV/ **************************************************************)
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(** This file is deprecated, for a tree on list, use [Mergesort.v]. *)
(** A development of Treesort on Heap trees. It has an average
complexity of O(n.log n) but of O() in the worst case (e.g. if
the list is already sorted) *)
@@ -88,9 +78,9 @@ Section defs.
forall P:Tree -> Type,
P Tree_Leaf ->
(forall (a:A) (T1 T2:Tree),
leA_Tree a T1 ->
leA_Tree a T2 ->
is_heap T1 -> P T1 -> is_heap T2 -> P T2 -> P (Tree_Node a T1 T2)) ->
leA_Tree a T1 ->
leA_Tree a T2 ->
is_heap T1 -> P T1 -> is_heap T2 -> P T2 -> P (Tree_Node a T1 T2)) ->
forall T:Tree, is_heap T -> P T.
Proof.
simple induction T; auto with datatypes.
@@ -105,9 +95,9 @@ Section defs.
forall P:Tree -> Set,
P Tree_Leaf ->
(forall (a:A) (T1 T2:Tree),
leA_Tree a T1 ->
leA_Tree a T2 ->
is_heap T1 -> P T1 -> is_heap T2 -> P T2 -> P (Tree_Node a T1 T2)) ->
leA_Tree a T1 ->
leA_Tree a T2 ->
is_heap T1 -> P T1 -> is_heap T2 -> P T2 -> P (Tree_Node a T1 T2)) ->
forall T:Tree, is_heap T -> P T.
Proof.
simple induction T; auto with datatypes.
@@ -135,13 +125,13 @@ Section defs.
(forall a, HdRel leA a l1 -> HdRel leA a l2 -> HdRel leA a l) ->
merge_lem l1 l2.
Require Import Morphisms.
Instance: Equivalence (@meq A).
Proof. constructor; auto with datatypes. red. apply meq_trans. Defined.
Instance: Proper (@meq A ++> @meq _ ++> @meq _) (@munion A).
Proof. intros x y H x' y' H'. now apply meq_congr. Qed.
Lemma merge :
forall l1:list A, Sorted leA l1 ->
forall l2:list A, Sorted leA l2 -> merge_lem l1 l2.
@@ -150,8 +140,8 @@ Section defs.
apply merge_exist with l2; auto with datatypes.
rename l1 into l.
revert l2 H0. fix 1. intros.
destruct l2 as [|a0 l0].
apply merge_exist with (a :: l); simpl; auto with datatypes.
destruct l2 as [|a0 l0].
apply merge_exist with (a :: l); simpl; auto with datatypes.
elim (leA_dec a a0); intros.
(* 1 (leA a a0) *)
@@ -159,18 +149,18 @@ Section defs.
destruct (merge l H (a0 :: l0) H0).
apply merge_exist with (a :: l1). clear merge merge0.
auto using cons_sort, cons_leA with datatypes.
simpl. rewrite m. now rewrite munion_ass.
intros. apply cons_leA.
simpl. rewrite m. now rewrite munion_ass.
intros. apply cons_leA.
apply (@HdRel_inv _ leA) with l; trivial with datatypes.
(* 2 (leA a0 a) *)
apply Sorted_inv in H0. destruct H0.
destruct (merge0 l0 H0). clear merge merge0.
apply merge_exist with (a0 :: l1);
destruct (merge0 l0 H0). clear merge merge0.
apply merge_exist with (a0 :: l1);
auto using cons_sort, cons_leA with datatypes.
simpl; rewrite m. simpl. setoid_rewrite munion_ass at 1. rewrite munion_comm.
repeat rewrite munion_ass. setoid_rewrite munion_comm at 3. reflexivity.
intros. apply cons_leA.
intros. apply cons_leA.
apply (@HdRel_inv _ leA) with l0; trivial with datatypes.
Qed.
@@ -186,7 +176,7 @@ Section defs.
match t with
| Tree_Leaf => emptyBag
| Tree_Node a t1 t2 =>
munion (contents t1) (munion (contents t2) (singletonBag a))
munion (contents t1) (munion (contents t2) (singletonBag a))
end.
@@ -272,11 +262,11 @@ Section defs.
apply flat_exist with (a :: l); simpl; auto with datatypes.
apply meq_trans with
(munion (list_contents _ eqA_dec l1)
(munion (list_contents _ eqA_dec l2) (singletonBag a))).
(munion (list_contents _ eqA_dec l2) (singletonBag a))).
apply meq_congr; auto with datatypes.
apply meq_trans with
(munion (singletonBag a)
(munion (list_contents _ eqA_dec l1) (list_contents _ eqA_dec l2))).
(munion (list_contents _ eqA_dec l1) (list_contents _ eqA_dec l2))).
apply munion_rotate.
apply meq_right; apply meq_sym; trivial with datatypes.
Qed.

View File

@@ -1,11 +1,3 @@
(************************************************************************)
(* v * The Coq Proof Assistant / The Coq Development Team *)
(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2010 *)
(* \VV/ **************************************************************)
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
Require Import Omega Relations Multiset SetoidList.
(** This file is deprecated, use [Permutation.v] instead.
@@ -154,7 +146,7 @@ Lemma permut_add_cons_inside :
Proof.
intros;
replace (a :: l) with ([] ++ a :: l); trivial;
apply permut_add_inside; trivial.
apply permut_add_inside; trivial.
Qed.
Lemma permut_middle :
@@ -168,8 +160,8 @@ Lemma permut_sym_app :
Proof.
intros l1 l2;
unfold permutation, meq;
intro a; do 2 rewrite list_contents_app; simpl;
auto with arith.
intro a; do 2 rewrite list_contents_app; simpl;
auto with arith.
Qed.
Lemma permut_rev :

View File

@@ -1,17 +1,5 @@
(************************************************************************)
(* v * The Coq Proof Assistant / The Coq Development Team *)
(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2010 *)
(* \VV/ **************************************************************)
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(*********************************************************************)
(** * List permutations as a composition of adjacent transpositions *)
(*********************************************************************)
(* Adapted in May 2006 by Jean-Marc Notin from initial contents by
Laurent Théry (Huffmann contribution, October 2003) *)
Laurent Thery (Huffmann contribution, October 2003) *)
Require Import List Setoid Compare_dec Morphisms.
Import ListNotations. (* For notations [] and [a;b;c] *)

View File

@@ -1,10 +1,2 @@
(************************************************************************)
(* v * The Coq Proof Assistant / The Coq Development Team *)
(* <O___,, * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999-2010 *)
(* \VV/ **************************************************************)
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
Require Export Sorted.
Require Export Mergesort.

42
samples/Ecl/sample.ecl Normal file
View File

@@ -0,0 +1,42 @@
/*
* Multi-line comment
*/
#option ('slidingJoins', true);
namesRecord :=
RECORD
string20 surname;
string10 forename;
integer2 age;
integer2 dadAge;
integer2 mumAge;
END;
namesRecord2 :=
record
string10 extra;
namesRecord;
end;
namesTable := dataset('x',namesRecord,FLAT);
namesTable2 := dataset('y',namesRecord2,FLAT);
integer2 aveAgeL(namesRecord l) := (l.dadAge+l.mumAge)/2;
integer2 aveAgeR(namesRecord2 r) := (r.dadAge+r.mumAge)/2;
// Standard join on a function of left and right
output(join(namesTable, namesTable2, aveAgeL(left) = aveAgeR(right)));
//Several simple examples of sliding join syntax
output(join(namesTable, namesTable2, left.age >= right.age - 10 and left.age <= right.age +10));
output(join(namesTable, namesTable2, left.age between right.age - 10 and right.age +10));
output(join(namesTable, namesTable2, left.age between right.age + 10 and right.age +30));
output(join(namesTable, namesTable2, left.age between (right.age + 20) - 10 and (right.age +20) + 10));
output(join(namesTable, namesTable2, aveAgeL(left) between aveAgeR(right)+10 and aveAgeR(right)+40));
//Same, but on strings. Also includes age to ensure sort is done by non-sliding before sliding.
output(join(namesTable, namesTable2, left.surname between right.surname[1..10]+'AAAAAAAAAA' and right.surname[1..10]+'ZZZZZZZZZZ' and left.age=right.age));
output(join(namesTable, namesTable2, left.surname between right.surname[1..10]+'AAAAAAAAAA' and right.surname[1..10]+'ZZZZZZZZZZ' and left.age=right.age,all));
//This should not generate a self join
output(join(namesTable, namesTable, left.age between right.age - 10 and right.age +10));

127
samples/Elm/Basic.elm Normal file
View File

@@ -0,0 +1,127 @@
import List (intercalate,intersperse)
import Website.Skeleton
import Website.ColorScheme
addFolder folder lst =
let add (x,y) = (x, folder ++ y ++ ".elm") in
let f (n,xs) = (n, map add xs) in
map f lst
elements = addFolder "Elements/"
[ ("Primitives",
[ ("Text" , "HelloWorld")
, ("Images", "Image")
, ("Fitted Images", "FittedImage")
, ("Videos", "Video")
, ("Markdown", "Markdown")
])
, ("Formatting",
[ ("Size" , "Size")
, ("Opacity" , "Opacity")
, ("Text" , "Text")
, ("Typeface", "Typeface")
])
, ("Layout",
[ ("Simple Flow", "FlowDown1a")
, ("Flow Down" , "FlowDown2")
, ("Layers" , "Layers")
, ("Positioning", "Position")
, ("Spacers" , "Spacer")
])
, ("Collage", [ ("Lines" , "Lines")
, ("Shapes" , "Shapes")
, ("Sprites" , "Sprite")
, ("Elements" , "ToForm")
, ("Colors" , "Color")
, ("Textures" , "Texture")
, ("Transforms", "Transforms")
])
]
functional = addFolder "Functional/"
[ ("Recursion",
[ ("Factorial" , "Factorial")
, ("List Length", "Length")
, ("Zip" , "Zip")
, ("Quick Sort" , "QuickSort")
])
, ("Functions",
[ ("Anonymous Functions", "Anonymous")
, ("Application" , "Application")
, ("Composition" , "Composition")
, ("Infix Operators" , "Infix")
])
, ("Higher-Order",
[ ("Map" , "Map")
, ("Fold" , "Sum")
, ("Filter" , "Filter")
, ("ZipWith", "ZipWith")
])
, ("Data Types",
[ ("Maybe", "Maybe")
, ("Boolean Expressions", "BooleanExpressions")
, ("Tree", "Tree")
])
]
reactive = addFolder "Reactive/"
[ ("Mouse", [ ("Position", "Position")
, ("Presses" , "IsDown")
, ("Clicks" , "CountClicks")
, ("Position+Image", "ResizeYogi")
, ("Position+Collage" , "Transforms")
-- , ("Hover" , "IsAbove")
])
,("Keyboard",[ ("Keys Down" , "KeysDown")
, ("Key Presses", "CharPressed")
])
, ("Window", [ ("Size", "ResizePaint")
, ("Centering", "Centering")
])
, ("Time", [ ("Before and After", "Between")
, ("Every" , "Every")
, ("Clock" , "Clock")
])
, ("Input", [ ("Text Fields", "TextField")
, ("Passwords" , "Password")
, ("Check Boxes", "CheckBox")
, ("String Drop Down", "StringDropDown")
, ("Drop Down", "DropDown")
])
, ("Random", [ ("Randomize", "Randomize") ])
, ("HTTP", [ ("Zip Codes", "ZipCodes") ])
, ("Filters",[ ("Sample", "SampleOn")
, ("Keep If", "KeepIf")
, ("Drop Repeats", "DropRepeats")
])
]
example (name, loc) = Text.link ("/edit/examples/" ++ loc) (toText name)
toLinks (title, links) =
flow right [ width 130 (text $ toText " " ++ italic (toText title))
, text (intercalate (bold . Text.color accent4 $ toText " &middot; ") $ map example links)
]
insertSpace lst = case lst of { x:xs -> x : spacer 1 5 : xs ; [] -> [] }
subsection w (name,info) =
flow down . insertSpace . intersperse (spacer 1 1) . map (width w) $
(text . bold $ toText name) : map toLinks info
words = [markdown|
### Basic Examples
Each example listed below focuses on a single function or concept.
These examples demonstrate all of the basic building blocks of Elm.
|]
content w =
words : map (subsection w) [ ("Display",elements), ("React",reactive), ("Compute",functional) ]
exampleSets w = flow down . map (width w) . intersperse (plainText " ") $ content w
main = lift (skeleton exampleSets) Window.width

32
samples/Elm/QuickSort.elm Normal file
View File

@@ -0,0 +1,32 @@
main = asText (qsort [3,9,1,8,5,4,7])
qsort lst =
case lst of
x:xs -> qsort (filter ((>=)x) xs) ++ [x] ++ qsort (filter ((<)x) xs)
[] -> []
{---------------------
QuickSort works as follows:
- Choose a pivot element which be placed in the "middle" of the sorted list.
In our case we are choosing the first element as the pivot.
- Gather all of the elements less than the pivot (the first filter).
We know that these must come before our pivot element in the sorted list.
Note: ((>=)x) === (\y -> (>=) x y) === (\y -> x >= y)
- Gather all of the elements greater than the pivot (the second filter).
We know that these must come after our pivot element in the sorted list.
- Run `qsort` on the lesser elements, producing a sorted list that contains
only elements less than the pivot. Put these before the pivot.
- Run `qsort` on the greater elements, producing a sorted list. Put these
after the pivot.
Note that choosing a bad pivot can have bad effects. Take a sorted list with
N elements. The pivot will always be the lowest member, meaning that it does
not divide the list very evenly. The list of lessers has 0 elements
and the list of greaters has N-1 elemens. This means qsort will be called
N times, each call looking through the entire list. This means, in the worst
case, QuickSort will make N^2 comparisons.
----------------------}

91
samples/Elm/Tree.elm Normal file
View File

@@ -0,0 +1,91 @@
{-----------------------------------------------------------------
Overview: A "Tree" represents a binary tree. A "Node" in a binary
tree always has two children. A tree can also be "Empty". Below
I have defined "Tree" and a number of useful functions.
This example also includes some challenge problems :)
-----------------------------------------------------------------}
data Tree a = Node a (Tree a) (Tree a) | Empty
empty = Empty
singleton v = Node v Empty Empty
insert x tree =
case tree of
Empty -> singleton x
Node y left right ->
if x == y then tree else
if x < y then Node y (insert x left) right
else Node y left (insert x right)
fromList xs = foldl insert empty xs
depth tree =
case tree of
Node v left right -> 1 + max (depth left) (depth right)
Empty -> 0
map f tree =
case tree of
Node v left right -> Node (f v) (map f left) (map f right)
Empty -> Empty
t1 = fromList [1,2,3]
t2 = fromList [2,1,3]
main = flow down [ display "depth" depth t1
, display "depth" depth t2
, display "map ((+)1)" (map ((+)1)) t2
]
display name f v =
text . monospace . toText $
concat [ show (f v), " &lArr; ", name, " ", show v ]
{-----------------------------------------------------------------
Exercises:
(1) Sum all of the elements of a tree.
sum :: Tree Number -> Number
(2) Flatten a tree into a list.
flatten :: Tree a -> [a]
(3) Check to see if an element is in a given tree.
isElement :: a -> Tree a -> Bool
(4) Write a general fold function that acts on trees. The fold
function does not need to guarantee a particular order of
traversal.
fold :: (a -> b -> b) -> b -> Tree a -> b
(5) Use "fold" to do exercises 1-3 in one line each. The best
readable versions I have come up have the following length
in characters including spaces and function name:
sum: 16
flatten: 21
isElement: 46
See if you can match or beat me! Don't forget about currying
and partial application!
(6) Can "fold" be used to implement "map" or "depth"?
(7) Try experimenting with different ways to traverse a
tree: pre-order, in-order, post-order, depth-first, etc.
More info at: http://en.wikipedia.org/wiki/Tree_traversal
-----------------------------------------------------------------}

View File

@@ -0,0 +1,473 @@
;; ess-julia.el --- ESS julia mode and inferior interaction
;;
;; Copyright (C) 2012 Vitalie Spinu.
;;
;; Filename: ess-julia.el
;; Author: Vitalie Spinu (based on julia-mode.el from julia-lang project)
;; Maintainer: Vitalie Spinu
;; Created: 02-04-2012 (ESS 12.03)
;; Keywords: ESS, julia
;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;; This file is *NOT* part of GNU Emacs.
;; This file is part of ESS
;;
;; This program is free software; you can redistribute it and/or
;; modify it under the terms of the GNU General Public License as
;; published by the Free Software Foundation; either version 3, any later version.
;;
;; This program is distributed in the hope that it will be useful, but WITHOUT
;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
;; FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
;; details.
;;
;; You should have received a copy of the GNU General Public License along with
;; this program; see the file COPYING. If not, write to the Free Software
;; Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
;; USA.
;;
;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;;; Commentary:
;; customise inferior-julia-program-name to point to your julia-release-basic
;; and start the inferior with M-x julia.
;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
(require 'compile); for compilation-* below
;;; Code:
(defvar julia-mode-hook nil)
(add-to-list 'auto-mode-alist '("\\.jl\\'" . julia-mode))
(defvar julia-syntax-table
(let ((table (make-syntax-table)))
(modify-syntax-entry ?_ "_" table) ; underscores in words
(modify-syntax-entry ?@ "_" table)
(modify-syntax-entry ?. "_" table)
(modify-syntax-entry ?# "<" table) ; # single-line comment start
(modify-syntax-entry ?\n ">" table) ; \n single-line comment end
(modify-syntax-entry ?\{ "(} " table)
(modify-syntax-entry ?\} "){ " table)
(modify-syntax-entry ?\[ "(] " table)
(modify-syntax-entry ?\] ")[ " table)
(modify-syntax-entry ?\( "() " table)
(modify-syntax-entry ?\) ")( " table)
;(modify-syntax-entry ?\\ "." table) ; \ is an operator outside quotes
(modify-syntax-entry ?' "." table) ; character quote or transpose
(modify-syntax-entry ?\" "\"" table)
(modify-syntax-entry ?` "\"" table)
;; (modify-syntax-entry ?\" "." table)
(modify-syntax-entry ?? "." table)
(modify-syntax-entry ?$ "." table)
(modify-syntax-entry ?& "." table)
(modify-syntax-entry ?* "." table)
(modify-syntax-entry ?+ "." table)
(modify-syntax-entry ?- "." table)
(modify-syntax-entry ?< "." table)
(modify-syntax-entry ?> "." table)
(modify-syntax-entry ?= "." table)
(modify-syntax-entry ?% "." table)
table)
"Syntax table for julia-mode")
;; syntax table that holds within strings
(defvar julia-mode-string-syntax-table
(let ((table (make-syntax-table)))
table)
"Syntax table for julia-mode")
;; disable " inside char quote
(defvar julia-mode-char-syntax-table
(let ((table (make-syntax-table)))
(modify-syntax-entry ?\" "." table)
table)
"Syntax table for julia-mode")
;; not used
;; (defconst julia-string-regex
;; "\"[^\"]*?\\(\\(\\\\\\\\\\)*\\\\\"[^\"]*?\\)*\"")
(defconst julia-char-regex
"\\(\\s(\\|\\s-\\|-\\|[,%=<>\\+*/?&|$!\\^~\\\\;:]\\|^\\)\\('\\(\\([^']*?[^\\\\]\\)\\|\\(\\\\\\\\\\)\\)'\\)")
(defconst julia-unquote-regex
"\\(\\s(\\|\\s-\\|-\\|[,%=<>\\+*/?&|!\\^~\\\\;:]\\|^\\)\\($[a-zA-Z0-9_]+\\)")
(defconst julia-forloop-in-regex
"for +[^
]+ +.*\\(in\\)\\(\\s-\\|$\\)+")
(defconst ess-subset-regexp
"\\[[0-9:, ]*\\]" )
(defconst julia-font-lock-defaults
(list '("\\<\\(\\|Uint\\(8\\|16\\|32\\|64\\)\\|Int\\(8\\|16\\|32\\|64\\)\\|Integer\\|Float\\|Float32\\|Float64\\|Complex128\\|Complex64\\|ComplexNum\\|Bool\\|Char\\|Number\\|Scalar\\|Real\\|Int\\|Uint\\|Array\\|DArray\\|AbstractArray\\|AbstractVector\\|AbstractMatrix\\|SubArray\\|StridedArray\\|StridedVector\\|StridedMatrix\\|VecOrMat\\|StridedVecOrMat\\|Range\\|Range1\\|SparseMatrixCSC\\|Tuple\\|NTuple\\|Buffer\\|Size\\|Index\\|Symbol\\|Function\\|Vector\\|Matrix\\|Union\\|Type\\|Any\\|Complex\\|None\\|String\\|Ptr\\|Void\\|Exception\\|PtrInt\\|Long\\|Ulong\\)\\>" .
font-lock-type-face)
(cons
(concat "\\<\\("
(mapconcat
'identity
'("if" "else" "elseif" "while" "for" "begin" "end" "quote"
"try" "catch" "return" "local" "abstract" "function" "macro" "ccall"
"typealias" "break" "continue" "type" "global" "@\\w+"
"module" "import" "export" "const" "let" "bitstype" "using")
"\\|") "\\)\\>")
'font-lock-keyword-face)
'("\\<\\(true\\|false\\|C_NULL\\|Inf\\|NaN\\|Inf32\\|NaN32\\)\\>" . font-lock-constant-face)
(list julia-unquote-regex 2 'font-lock-constant-face)
(list julia-char-regex 2 'font-lock-string-face)
(list julia-forloop-in-regex 1 'font-lock-keyword-face)
;; (cons ess-subset-regexp 'font-lock-constant-face)
(cons "\\(\\sw+\\) ?(" '(1 font-lock-function-name-face keep))
;(list julia-string-regex 0 'font-lock-string-face)
))
(defconst julia-block-start-keywords
(list "if" "while" "for" "begin" "try" "function" "type" "let" "macro"
"quote"))
(defconst julia-block-other-keywords
(list "else" "elseif"))
(defconst julia-block-end-keywords
(list "end" "else" "elseif" "catch"))
(defun ess-inside-brackets-p (&optional pos)
(save-excursion
(let* ((pos (or pos (point)))
(beg (re-search-backward "\\[" (max (point-min) (- pos 1000)) t))
(end (re-search-forward "\\]" (min (point-max) (+ pos 1000)) t)))
(and beg end (> pos beg) (> end pos)))))
(defun julia-at-keyword (kw-list)
; not a keyword if used as a field name, X.word, or quoted, :word
(and (or (= (point) 1)
(and (not (equal (char-before (point)) ?.))
(not (equal (char-before (point)) ?:))))
(not (ess-inside-string-or-comment-p (point)))
(not (ess-inside-brackets-p (point)))
(member (current-word) kw-list)))
; get the position of the last open block
(defun julia-last-open-block-pos (min)
(let ((count 0))
(while (not (or (> count 0) (<= (point) min)))
(backward-word 1)
(setq count
(cond ((julia-at-keyword julia-block-start-keywords)
(+ count 1))
((and (equal (current-word) "end")
(not (ess-inside-comment-p)) (not (ess-inside-brackets-p)))
(- count 1))
(t count))))
(if (> count 0)
(point)
nil)))
; get indent for last open block
(defun julia-last-open-block (min)
(let ((pos (julia-last-open-block-pos min)))
(and pos
(progn
(goto-char pos)
(+ julia-basic-offset (current-indentation))))))
; return indent implied by a special form opening on the previous line, if any
(defun julia-form-indent ()
(forward-line -1)
(end-of-line)
(backward-sexp)
(if (julia-at-keyword julia-block-other-keywords)
(+ julia-basic-offset (current-indentation))
(if (char-equal (char-after (point)) ?\()
(progn
(backward-word 1)
(let ((cur (current-indentation)))
(if (julia-at-keyword julia-block-start-keywords)
(+ julia-basic-offset cur)
nil)))
nil)))
(defun julia-paren-indent ()
(let* ((p (parse-partial-sexp (save-excursion
;; only indent by paren if the last open
;; paren is closer than the last open
;; block
(or (julia-last-open-block-pos (point-min))
(point-min)))
(progn (beginning-of-line)
(point))))
(pos (cadr p)))
(if (or (= 0 (car p)) (null pos))
nil
(progn (goto-char pos) (+ 1 (current-column))))))
; (forward-line -1)
; (end-of-line)
; (let ((pos (condition-case nil
; (scan-lists (point) -1 1)
; (error nil))))
; (if pos
; (progn (goto-char pos) (+ 1 (current-column)))
; nil)))
(defun julia-indent-line ()
"Indent current line of julia code"
(interactive)
; (save-excursion
(end-of-line)
(indent-line-to
(or (and (ess-inside-string-p (point-at-bol)) 0)
(save-excursion (ignore-errors (julia-form-indent)))
(save-excursion (ignore-errors (julia-paren-indent)))
;; previous line ends in =
(save-excursion
(beginning-of-line)
(skip-chars-backward " \t\n")
(when (eql (char-before) ?=)
(+ julia-basic-offset (current-indentation))))
(save-excursion
(let ((endtok (progn
(beginning-of-line)
(forward-to-indentation 0)
(julia-at-keyword julia-block-end-keywords))))
(ignore-errors (+ (julia-last-open-block (point-min))
(if endtok (- julia-basic-offset) 0)))))
;; take same indentation as previous line
(save-excursion (forward-line -1)
(current-indentation))
0))
(when (julia-at-keyword julia-block-end-keywords)
(forward-word 1)))
(defvar julia-editing-alist
'((paragraph-start . (concat "\\s-*$\\|" page-delimiter))
(paragraph-separate . (concat "\\s-*$\\|" page-delimiter))
(paragraph-ignore-fill-prefix . t)
(require-final-newline . t)
(comment-start . "# ")
(comment-add . 1)
(comment-start-skip . "#+\\s-*")
(comment-column . 40)
;;(comment-indent-function . 'S-comment-indent)
;;(ess-comment-indent . 'S-comment-indent)
;; (ess-indent-line . 'S-indent-line)
;;(ess-calculate-indent . 'ess-calculate-indent)
(ess-indent-line-function . 'julia-indent-line)
(indent-line-function . 'julia-indent-line)
(parse-sexp-ignore-comments . t)
(ess-style . ess-default-style) ;; ignored
(ess-local-process-name . nil)
;;(ess-keep-dump-files . 'ask)
(ess-mode-syntax-table . julia-syntax-table)
;; For Changelog add, require ' ' before <- : "attr<-" is a function name :
;; (add-log-current-defun-header-regexp . "^\\(.+\\)\\s-+=[ \t\n]*function")
(add-log-current-defun-header-regexp . "^.*function[ \t]*\\([^ \t(]*\\)[ \t]*(")
(font-lock-defaults . '(julia-font-lock-defaults
nil nil ((?\_ . "w"))))
)
"General options for julia source files.")
(autoload 'inferior-ess "ess-inf" "Run an ESS process.")
(autoload 'ess-mode "ess-mode" "Edit an ESS process.")
(defun julia-send-string-function (process string visibly)
(let ((file (concat temporary-file-directory "julia_eval_region.jl")))
(with-temp-file file
(insert string))
(process-send-string process (format ess-load-command file))))
(defun julia-get-help-topics (&optional proc)
(ess-get-words-from-vector "ESS.all_help_topics()\n"))
;; (ess-command com)))
(defvar julia-help-command "help(\"%s\")\n")
(defvar ess-julia-error-regexp-alist '(julia-in julia-at)
"List of symbols which are looked up in `compilation-error-regexp-alist-alist'.")
(add-to-list 'compilation-error-regexp-alist-alist
'(julia-in "^\\s-*in [^ \t\n]* \\(at \\(.*\\):\\([0-9]+\\)\\)" 2 3 nil 2 1))
(add-to-list 'compilation-error-regexp-alist-alist
'(julia-at "^\\S-+\\s-+\\(at \\(.*\\):\\([0-9]+\\)\\)" 2 3 nil 2 1))
(defvar julia-customize-alist
'((comint-use-prompt-regexp . t)
(ess-eldoc-function . 'ess-julia-eldoc-function)
(inferior-ess-primary-prompt . "a> ") ;; from julia>
(inferior-ess-secondary-prompt . nil)
(inferior-ess-prompt . "\\w*> ")
(ess-local-customize-alist . 'julia-customize-alist)
(inferior-ess-program . inferior-julia-program-name)
(inferior-ess-font-lock-defaults . julia-font-lock-defaults)
(ess-get-help-topics-function . 'julia-get-help-topics)
(ess-help-web-search-command . "http://docs.julialang.org/en/latest/search/?q=%s")
(ess-load-command . "include(\"%s\")\n")
(ess-funargs-command . "ESS.fun_args(\"%s\")\n")
(ess-dump-error-re . "in \\w* at \\(.*\\):[0-9]+")
(ess-error-regexp . "\\(^\\s-*at\\s-*\\(?3:.*\\):\\(?2:[0-9]+\\)\\)")
(ess-error-regexp-alist . ess-julia-error-regexp-alist)
(ess-send-string-function . nil);'julia-send-string-function)
(ess-imenu-generic-expression . julia-imenu-generic-expression)
;; (inferior-ess-objects-command . inferior-R-objects-command)
;; (inferior-ess-search-list-command . "search()\n")
(inferior-ess-help-command . julia-help-command)
;; (inferior-ess-help-command . "help(\"%s\")\n")
(ess-language . "julia")
(ess-dialect . "julia")
(ess-suffix . "jl")
(ess-dump-filename-template . (ess-replace-regexp-in-string
"S$" ess-suffix ; in the one from custom:
ess-dump-filename-template-proto))
(ess-mode-syntax-table . julia-syntax-table)
(ess-mode-editing-alist . julia-editing-alist)
(ess-change-sp-regexp . nil );ess-R-change-sp-regexp)
(ess-help-sec-regex . ess-help-R-sec-regex)
(ess-help-sec-keys-alist . ess-help-R-sec-keys-alist)
(ess-loop-timeout . ess-S-loop-timeout);fixme: dialect spec.
(ess-cmd-delay . ess-R-cmd-delay)
(ess-function-pattern . ess-R-function-pattern)
(ess-object-name-db-file . "ess-r-namedb.el" )
(ess-smart-operators . ess-R-smart-operators)
(inferior-ess-help-filetype . nil)
(inferior-ess-exit-command . "exit()\n")
;;harmful for shell-mode's C-a: -- but "necessary" for ESS-help?
(inferior-ess-start-file . nil) ;; "~/.ess-R"
(inferior-ess-start-args . "")
(inferior-ess-language-start . nil)
(ess-STERM . "iESS")
(ess-editor . R-editor)
(ess-pager . R-pager)
)
"Variables to customize for Julia -- set up later than emacs initialization.")
(defvar ess-julia-versions '("julia")
"List of partial strings for versions of Julia to access within ESS.
Each string specifies the start of a filename. If a filename
beginning with one of these strings is found on `exec-path', a M-x
command for that version of Julia is made available. ")
(defcustom inferior-julia-args ""
"String of arguments (see 'julia --help') used when starting julia."
;; These arguments are currently not passed to other versions of julia that have
;; been created using the variable `ess-r-versions'."
:group 'ess-julia
:type 'string)
;;;###autoload
(defun julia-mode (&optional proc-name)
"Major mode for editing julia source. See `ess-mode' for more help."
(interactive "P")
;; (setq ess-customize-alist julia-customize-alist)
(ess-mode julia-customize-alist proc-name)
;; for emacs < 24
;; (add-hook 'comint-dynamic-complete-functions 'ess-complete-object-name nil 'local)
;; for emacs >= 24
;; (remove-hook 'completion-at-point-functions 'ess-filename-completion 'local) ;; should be first
;; (add-hook 'completion-at-point-functions 'ess-object-completion nil 'local)
;; (add-hook 'completion-at-point-functions 'ess-filename-completion nil 'local)
(if (fboundp 'ess-add-toolbar) (ess-add-toolbar))
(set (make-local-variable 'end-of-defun-function) 'ess-end-of-function)
;; (local-set-key "\t" 'julia-indent-line) ;; temp workaround
;; (set (make-local-variable 'indent-line-function) 'julia-indent-line)
(set (make-local-variable 'julia-basic-offset) 4)
(setq imenu-generic-expression julia-imenu-generic-expression)
(imenu-add-to-menubar "Imenu-jl")
(run-hooks 'julia-mode-hook))
(defvar ess-julia-post-run-hook nil
"Functions run in process buffer after the initialization of
julia process.")
;;;###autoload
(defun julia (&optional start-args)
"Call 'julia',
Optional prefix (C-u) allows to set command line arguments, such as
--load=<file>. This should be OS agnostic.
If you have certain command line arguments that should always be passed
to julia, put them in the variable `inferior-julia-args'."
(interactive "P")
;; get settings, notably inferior-julia-program-name :
(if (null inferior-julia-program-name)
(error "'inferior-julia-program-name' does not point to 'julia-release-basic' executable")
(setq ess-customize-alist julia-customize-alist)
(ess-write-to-dribble-buffer ;; for debugging only
(format
"\n(julia): ess-dialect=%s, buf=%s, start-arg=%s\n current-prefix-arg=%s\n"
ess-dialect (current-buffer) start-args current-prefix-arg))
(let* ((jl-start-args
(concat inferior-julia-args " " ; add space just in case
(if start-args
(read-string
(concat "Starting Args"
(if inferior-julia-args
(concat " [other than '" inferior-julia-args "']"))
" ? "))
nil))))
(inferior-ess jl-start-args) ;; -> .. (ess-multi ...) -> .. (inferior-ess-mode) ..
(ess--tb-start)
(set (make-local-variable 'julia-basic-offset) 4)
;; remove ` from julia's logo
(goto-char (point-min))
(while (re-search-forward "`" nil t)
(replace-match "'"))
(goto-char (point-max))
(ess--inject-code-from-file (format "%sess-julia.jl" ess-etc-directory))
(with-ess-process-buffer nil
(run-mode-hooks 'ess-julia-post-run-hook))
)))
;;; ELDOC
(defun ess-julia-eldoc-function ()
"Return the doc string, or nil.
If an ESS process is not associated with the buffer, do not try
to look up any doc strings."
(interactive)
(when (and (ess-process-live-p)
(not (ess-process-get 'busy)))
(let ((funname (or (and ess-eldoc-show-on-symbol ;; aggressive completion
(symbol-at-point))
(car (ess--funname.start)))))
(when funname
(let* ((args (copy-sequence (nth 2 (ess-function-arguments funname))))
(W (- (window-width (minibuffer-window)) (+ 4 (length funname))))
(doc (concat (propertize funname 'face font-lock-function-name-face) ": ")))
(when args
(setq args (sort args (lambda (s1 s2)
(< (length s1) (length s2)))))
(setq doc (concat doc (pop args)))
(while (and args (< (length doc) W))
(setq doc (concat doc " "
(pop args))))
(when (and args (< (length doc) W))
(setq doc (concat doc " {--}"))))
doc)))))
;;; IMENU
(defvar julia-imenu-generic-expression
;; don't use syntax classes, screws egrep
'(("Function (_)" "[ \t]*function[ \t]+\\(_[^ \t\n]*\\)" 1)
("Function" "[ \t]*function[ \t]+\\([^_][^\t\n]*\\)" 1)
("Const" "[ \t]*const \\([^ \t\n]*\\)" 1)
("Type" "^[ \t]*[a-zA-Z0-9_]*type[a-zA-Z0-9_]* \\([^ \t\n]*\\)" 1)
("Require" " *\\(\\brequire\\)(\\([^ \t\n)]*\\)" 2)
("Include" " *\\(\\binclude\\)(\\([^ \t\n)]*\\)" 2)
;; ("Classes" "^.*setClass(\\(.*\\)," 1)
;; ("Coercions" "^.*setAs(\\([^,]+,[^,]*\\)," 1) ; show from and to
;; ("Generics" "^.*setGeneric(\\([^,]*\\)," 1)
;; ("Methods" "^.*set\\(Group\\|Replace\\)?Method(\"\\(.+\\)\"," 2)
;; ;;[ ]*\\(signature=\\)?(\\(.*,?\\)*\\)," 1)
;; ;;
;; ;;("Other" "^\\(.+\\)\\s-*<-[ \t\n]*[^\\(function\\|read\\|.*data\.frame\\)]" 1)
;; ("Package" "^.*\\(library\\|require\\)(\\(.*\\)," 2)
;; ("Data" "^\\(.+\\)\\s-*<-[ \t\n]*\\(read\\|.*data\.frame\\).*(" 1)))
))

View File

@@ -0,0 +1,21 @@
#!/usr/bin/env escript
%% -*- erlang -*-
%%! -smp enable -sname factorial -mnesia debug verbose
main([String]) ->
try
N = list_to_integer(String),
F = fac(N),
io:format("factorial ~w = ~w\n", [N,F])
catch
_:_ ->
usage()
end;
main(_) ->
usage().
usage() ->
io:format("usage: factorial integer\n"),
halt(1).
fac(0) -> 1;
fac(N) -> N * fac(N-1).

4
samples/Erlang/hello.escript Executable file
View File

@@ -0,0 +1,4 @@
#!/usr/bin/env escript
-export([main/1]).
main([]) -> io:format("Hello, World!~n").

View File

@@ -0,0 +1,136 @@
%% For each header file, it scans thru all records and create helper functions
%% Helper functions are:
%% setters, getters, fields, fields_atom, type
-module(record_helper).
-export([make/1, make/2]).
make(HeaderFiles) ->
make([ atom_to_list(X) || X <- HeaderFiles ], ".").
%% .hrl file, relative to current dir
make(HeaderFiles, OutDir) ->
ModuleName = "record_utils",
HeaderComment = "%% This is auto generated file. Please don't edit it\n\n",
ModuleDeclaration = "-module(" ++ ModuleName ++ ").\n"
++ "-author(\"trung@mdkt.org\").\n"
++ "-compile(export_all).\n"
++ [ "-include(\"" ++ X ++ "\").\n" || X <- HeaderFiles ]
++ "\n",
Src = format_src(lists:sort(lists:flatten([read(X) || X <- HeaderFiles] ++ [generate_type_default_function()]))),
file:write_file(OutDir++"/" ++ ModuleName ++ ".erl", list_to_binary([HeaderComment, ModuleDeclaration, Src])).
read(HeaderFile) ->
try epp:parse_file(HeaderFile,[],[]) of
{ok, Tree} ->
parse(Tree);
{error, Error} ->
{error, {"Error parsing header file", HeaderFile, Error}}
catch
_:Error ->
{catched_error, {"Error parsing header file", HeaderFile, Error}}
end.
format_src([{_, _, _, Src}|T]) when length(T) == 0 ->
Src ++ ".\n\n";
format_src([{Type, _, _, Src}|[{Type, A, B, NSrc}|T]]) ->
Src ++ ";\n\n" ++ format_src([{Type, A, B, NSrc}|T]);
format_src([{_Type, _, _, Src}|[{Type1, A, B, NSrc}|T]]) ->
Src ++ ".\n\n" ++ format_src([{Type1, A, B, NSrc}|T]);
format_src([{_, _, _, Src}|T]) when length(T) > 0 ->
Src ++ ";\n\n" ++ format_src(T).
parse(Tree) ->
[ parse_record(X) || X <- Tree ].
parse_record({attribute, _, record, RecordInfo}) ->
{RecordName, RecordFields} = RecordInfo,
if
length(RecordFields) == 1 ->
lists:flatten([ generate_setter_getter_function(RecordName, X) || X <- RecordFields ]
++ [generate_type_function(RecordName)]);
true ->
lists:flatten([generate_fields_function(RecordName, RecordFields)]
++ [generate_fields_atom_function(RecordName, RecordFields)]
++ [ generate_setter_getter_function(RecordName, X) || X <- RecordFields ]
++ [generate_type_function(RecordName)])
end;
parse_record(_) -> [].
parse_field_name({record_field, _, {atom, _, FieldName}}) ->
{field, "\"" ++ atom_to_list(FieldName) ++ "\""};
parse_field_name({record_field, _, {atom, _, _FieldName}, {record, _, ParentRecordName, _}}) ->
{parent_field, "fields(" ++ atom_to_list(ParentRecordName) ++ ")"};
parse_field_name({record_field, _, {atom, _, FieldName}, _}) ->
{field, "\"" ++ atom_to_list(FieldName) ++ "\""}.
parse_field_name_atom({record_field, _, {atom, _, FieldName}}) ->
atom_to_list(FieldName);
parse_field_name_atom({record_field, _, {atom, _, _FieldName}, {record, _, ParentRecordName, _}}) ->
"fields_atom(" ++ atom_to_list(ParentRecordName) ++ ")";
parse_field_name_atom({record_field, _, {atom, _, FieldName}, _}) ->
atom_to_list(FieldName).
concat([], _S) -> [];
concat([F|T], _S) when length(T) == 0 -> F;
concat([F|T], S) -> F ++ S ++ concat(T, S).
concat_ext([], _S) -> [];
concat_ext([F|T], S) -> F ++ S ++ concat_ext(T, S).
parse_field([], AccFields, AccParentFields) -> concat_ext(AccParentFields, " ++ ") ++ "[" ++ concat(AccFields, ", ") ++ "]";
%parse_field([F|T], AccFields, AccParentFields) when length(T) == 0 -> parse_field_name(F);
parse_field([F|T], AccFields, AccParentFields) ->
case parse_field_name(F) of
{field, Field} ->
parse_field(T, AccFields ++ [Field], AccParentFields);
{parent_field, PField} ->
parse_field(T, AccFields, AccParentFields ++ [PField])
end.
parse_field_atom([F|T]) when length(T) == 0 -> parse_field_name_atom(F);
parse_field_atom([F|T]) ->
parse_field_name_atom(F) ++ ", " ++ parse_field_atom(T).
generate_type_default_function() ->
{type, zzz, 99, "type(_) -> undefined"}.
generate_type_function(RecordName) ->
{type, RecordName, 0, "type(Obj) when is_record(Obj, " ++ atom_to_list(RecordName) ++ ") -> " ++ atom_to_list(RecordName)}.
generate_fields_function(RecordName, RecordFields) ->
Fields = parse_field(RecordFields, [], []),
{field, RecordName, 1, "fields(" ++ atom_to_list(RecordName) ++ ") -> \n\t" ++ Fields}.
generate_fields_atom_function(RecordName, RecordFields) ->
Fields = parse_field_atom(RecordFields),
{field_atom, RecordName, 1, "fields_atom(" ++ atom_to_list(RecordName) ++ ") -> \n\tlists:flatten([" ++ Fields ++ "])"}.
generate_setter_getter_function(RecordName, {record_field, _, {atom, _, FieldName}, {record, _, ParentRecordName, _}}) ->
to_setter_getter_function(atom_to_list(RecordName), atom_to_list(FieldName), atom_to_list(ParentRecordName));
generate_setter_getter_function(RecordName, {record_field, _, {atom, _, FieldName}, _}) ->
to_setter_getter_function(atom_to_list(RecordName), atom_to_list(FieldName));
generate_setter_getter_function(RecordName, {record_field, _, {atom, _, FieldName}}) ->
to_setter_getter_function(atom_to_list(RecordName), atom_to_list(FieldName)).
to_setter_getter_function(RecordName, FieldName) ->
[{setter, RecordName, 1, "set(Obj, " ++ FieldName ++ ", Value) when is_record(Obj, " ++ RecordName ++ ") -> \n"
++ "\tNewObj = Obj#" ++ RecordName ++ "{" ++ FieldName ++ " = Value},\n"
++ "\t{ok, NewObj, {" ++ FieldName ++ ", Value}}"},
{getter, RecordName, 1, "get(Obj, " ++ FieldName ++ ") when is_record(Obj, " ++ RecordName ++ ") -> \n"
++ "\t{ok, Obj#" ++ RecordName ++ "." ++ FieldName ++ "}"}
].
to_setter_getter_function(RecordName, FieldName, ParentRecordName) ->
[{setter, RecordName, 2, "set(Obj, " ++ FieldName ++ ", Value) when is_record(Obj, " ++ RecordName ++ ") and is_record(Value, " ++ ParentRecordName ++ ") -> \n"
++ "\tNewObj = Obj#" ++ RecordName ++ "{" ++ FieldName ++ " = Value},\n"
++ "\t{ok, NewObj, {" ++ FieldName ++ ", Value}};\n\n"
++ "set(Obj, ParentProperty, Value) when is_record(Obj, " ++ RecordName ++ ") and is_atom(ParentProperty) -> \n"
++ "\t{ok, NewParentObject, _} = set(Obj#" ++ RecordName ++ ".parent, ParentProperty, Value),\n"
++ "\tset(Obj, parent, NewParentObject)"},
{getter, RecordName, 2, "get(Obj, " ++ FieldName ++ ") when is_record(Obj, " ++ RecordName ++ ") -> \n"
++ "\t{ok, Obj#" ++ RecordName ++ "." ++ FieldName ++ "};\n\n"
++ "get(Obj, ParentProperty) when is_record(Obj, " ++ RecordName ++ ") and is_atom(ParentProperty) -> \n"
++ "\tget(Obj#" ++ RecordName ++ ".parent, ParentProperty)"}
].

View File

@@ -0,0 +1,100 @@
%% This is auto generated file. Please don't edit it
-module(record_utils).
-compile(export_all).
-include("messages.hrl").
fields(abstract_message) ->
["clientId", "destination", "messageId", "timestamp", "timeToLive", "headers", "body"];
fields(async_message) ->
fields(abstract_message) ++ ["correlationId", "correlationIdBytes"].
fields_atom(abstract_message) ->
lists:flatten([clientId, destination, messageId, timestamp, timeToLive, headers, body]);
fields_atom(async_message) ->
lists:flatten([fields_atom(abstract_message), correlationId, correlationIdBytes]).
get(Obj, body) when is_record(Obj, abstract_message) ->
{ok, Obj#abstract_message.body};
get(Obj, clientId) when is_record(Obj, abstract_message) ->
{ok, Obj#abstract_message.clientId};
get(Obj, destination) when is_record(Obj, abstract_message) ->
{ok, Obj#abstract_message.destination};
get(Obj, headers) when is_record(Obj, abstract_message) ->
{ok, Obj#abstract_message.headers};
get(Obj, messageId) when is_record(Obj, abstract_message) ->
{ok, Obj#abstract_message.messageId};
get(Obj, timeToLive) when is_record(Obj, abstract_message) ->
{ok, Obj#abstract_message.timeToLive};
get(Obj, timestamp) when is_record(Obj, abstract_message) ->
{ok, Obj#abstract_message.timestamp};
get(Obj, correlationId) when is_record(Obj, async_message) ->
{ok, Obj#async_message.correlationId};
get(Obj, correlationIdBytes) when is_record(Obj, async_message) ->
{ok, Obj#async_message.correlationIdBytes};
get(Obj, parent) when is_record(Obj, async_message) ->
{ok, Obj#async_message.parent};
get(Obj, ParentProperty) when is_record(Obj, async_message) and is_atom(ParentProperty) ->
get(Obj#async_message.parent, ParentProperty).
set(Obj, body, Value) when is_record(Obj, abstract_message) ->
NewObj = Obj#abstract_message{body = Value},
{ok, NewObj, {body, Value}};
set(Obj, clientId, Value) when is_record(Obj, abstract_message) ->
NewObj = Obj#abstract_message{clientId = Value},
{ok, NewObj, {clientId, Value}};
set(Obj, destination, Value) when is_record(Obj, abstract_message) ->
NewObj = Obj#abstract_message{destination = Value},
{ok, NewObj, {destination, Value}};
set(Obj, headers, Value) when is_record(Obj, abstract_message) ->
NewObj = Obj#abstract_message{headers = Value},
{ok, NewObj, {headers, Value}};
set(Obj, messageId, Value) when is_record(Obj, abstract_message) ->
NewObj = Obj#abstract_message{messageId = Value},
{ok, NewObj, {messageId, Value}};
set(Obj, timeToLive, Value) when is_record(Obj, abstract_message) ->
NewObj = Obj#abstract_message{timeToLive = Value},
{ok, NewObj, {timeToLive, Value}};
set(Obj, timestamp, Value) when is_record(Obj, abstract_message) ->
NewObj = Obj#abstract_message{timestamp = Value},
{ok, NewObj, {timestamp, Value}};
set(Obj, correlationId, Value) when is_record(Obj, async_message) ->
NewObj = Obj#async_message{correlationId = Value},
{ok, NewObj, {correlationId, Value}};
set(Obj, correlationIdBytes, Value) when is_record(Obj, async_message) ->
NewObj = Obj#async_message{correlationIdBytes = Value},
{ok, NewObj, {correlationIdBytes, Value}};
set(Obj, parent, Value) when is_record(Obj, async_message) and is_record(Value, abstract_message) ->
NewObj = Obj#async_message{parent = Value},
{ok, NewObj, {parent, Value}};
set(Obj, ParentProperty, Value) when is_record(Obj, async_message) and is_atom(ParentProperty) ->
{ok, NewParentObject, _} = set(Obj#async_message.parent, ParentProperty, Value),
set(Obj, parent, NewParentObject).
type(Obj) when is_record(Obj, abstract_message) -> abstract_message;
type(Obj) when is_record(Obj, async_message) -> async_message;
type(_) -> undefined.

View File

@@ -0,0 +1,79 @@
\ KataDiversion in Forth
\ -- utils
\ empty the stack
: EMPTY
DEPTH 0 <> IF BEGIN
DROP DEPTH 0 =
UNTIL
THEN ;
\ power
: ** ( n1 n2 -- n1_pow_n2 ) 1 SWAP ?DUP IF 0 DO OVER * LOOP THEN NIP ;
\ compute the highest power of 2 below N.
\ e.g. : 31 -> 16, 4 -> 4
: MAXPOW2 ( n -- log2_n ) DUP 1 < IF 1 ABORT" Maxpow2 need a positive value."
ELSE DUP 1 = IF 1
ELSE
1 >R
BEGIN ( n |R: i=1)
DUP DUP I - 2 *
( n n 2*[n-i])
R> 2 * >R ( … |R: i*2)
> ( n n>2*[n-i] )
UNTIL
R> 2 /
THEN
THEN NIP ;
\ -- kata
\ test if the given N has two adjacent 1 bits
\ e.g. : 11 -> 1011 -> -1
\ 9 -> 1001 -> 0
: ?NOT-TWO-ADJACENT-1-BITS ( n -- bool )
\ the word uses the following algorithm :
\ (stack|return stack)
\ ( A N | X ) A: 0, X: N LOG2
\ loop: if N-X > 0 then A++ else A=0 ; X /= 2
\ return 0 if A=2
\ if X=1 end loop and return -1
0 SWAP DUP DUP 0 <> IF
MAXPOW2 >R
BEGIN
DUP I - 0 >= IF
SWAP DUP 1 = IF 1+ SWAP
ELSE DROP 1 SWAP I -
THEN
ELSE NIP 0 SWAP
THEN
OVER
2 =
I 1 = OR
R> 2 / >R
UNTIL
R> 2DROP
2 <>
ELSE 2DROP INVERT
THEN ;
\ return the maximum number which can be made with N (given number) bits
: MAX-NB ( n -- m ) DUP 1 < IF DROP 0 ( 0 )
ELSE
DUP IF DUP 2 SWAP ** NIP 1 - ( 2**n - 1 )
THEN
THEN ;
\ return the number of numbers which can be made with N (given number) bits
\ or less, and which have not two adjacent 1 bits.
\ see http://www.codekata.com/2007/01/code_kata_fifte.html
: HOW-MANY-NB-NOT-TWO-ADJACENT-1-BITS ( n -- m )
DUP 1 < IF DUP 0
ELSE
0 SWAP
MAX-NB 1 + 0 DO I ?NOT-TWO-ADJACENT-1-BITS - LOOP
THEN ;

42
samples/Forth/block.fth Normal file
View File

@@ -0,0 +1,42 @@
( Block words. )
variable blk
variable current-block
: block ( n -- addr )
current-block ! 0 ;
: buffer ( n -- addr )
current-block ! 0 ;
\ evaluate (extended semantics)
\ flush ( -- )
: load ( ... n -- ... )
dup current-block !
blk !
save-input
0 >in !
blk @ block ''source ! 1024 ''#source !
( interpret )
restore-input ;
\ save-buffers ( -- )
\ update ( -- )
( Block extension words. )
\ empty-buffers ( -- )
variable scr
: list ( n -- )
dup scr !
dup current-block !
block 1024 bounds do i @ emit loop ;
\ refill (extended semantics)
: thru ( x y -- ) +1 swap do i load loop ;
\ \ (extended semantics)

136
samples/Forth/core-ext.fth Normal file
View File

@@ -0,0 +1,136 @@
\ -*- forth -*- Copyright 2004, 2013 Lars Brinkhoff
\ Kernel: #tib
\ TODO: .r
: .( ( "<string><paren>" -- )
[char] ) parse type ; immediate
: 0<> ( n -- flag ) 0 <> ;
: 0> ( n -- flag ) 0 > ;
\ Kernel: 2>r
: 2r> ( -- x1 x2 ) ( R: x1 x2 -- ) r> r> r> rot >r swap ;
: 2r@ ( -- x1 x2 ) ( R: x1 x2 -- x1 x2 ) 2r> 2dup 2>r ;
: :noname align here 0 c, 15 allot lastxt dup @ , !
[ ' enter >code @ ] literal , 0 , ] lastxt @ ;
\ Kernel: <>
\ : ?do ( n1 n2 -- ) ( R: -- loop-sys ) ( C: -- do-sys )
\ here postpone 2>r unresolved branch here ;
: again ( -- ) ( C: dest -- )
postpone branch , ; immediate
: string+ ( caddr -- addr )
count + aligned ;
: (c") ( -- caddr ) ( R: ret1 -- ret2 )
r> dup string+ >r ;
: c" ( "<string><quote>" -- caddr )
postpone (c") [char] " parse dup c, string, ; immediate
: case ( -- ) ( C: -- case-sys )
0 ;
: compile, ( xt -- )
, ;
\ TODO: convert
: endcase ( x -- ) ( C: case-sys -- )
0 do postpone then loop
postpone drop ;
: endof ( -- ) ( C: case-sys1 of-sys -- case-sys2 )
postpone else swap 1+ ;
\ TODO: erase
\ TODO: expect
: false ( -- 0 )
0 ;
: hex ( -- )
16 base ! ;
\ TODO: marker
\ Kernel: nip
: of ( x x -- | x y -- x ) ( C: -- of-sys )
postpone over postpone = postpone if postpone drop ;
\ Kernel: pad
\ Kernel: parse
: pick ( xn ... x0 n -- xn ... x0 xn )
2 + cells 'SP @ + @ ;
: query ( -- )
tib ''source ! #tib ''#source ! 0 'source-id !
refill drop ;
\ Kernel: refill
\ Kernel: restore-input
\ TODO: roll ( xn xn-1 ... x0 n -- xn-1 ... x0 xn ) ;
\ Kernel: save-input
\ Kernel: source-id
\ TODO: span
\ Kernel: tib
: to ( x "word" -- )
' >body , ;
: true ( -- -1 )
-1 ;
: tuck ( x y -- y x y )
swap over ;
\ TODO: u.r
: u> ( x y -- flag )
2dup u< if 2drop false else <> then ;
\ TODO: unused
: value ( x "word" -- )
create ,
does> ( -- x )
@ ;
: within over - >r - r> u< ;
\ TODO: [compile]
\ Kernel: \
\ ----------------------------------------------------------------------
( Forth2012 core extension words. )
\ TODO: action-of
\ TODO: buffer:
: defer create ['] abort , does> @ execute ;
: defer! ( xt2 xt1 -- ) >body ! ;
: defer@ ( xt1 -- xt2 ) >body @ ;
\ TODO: holds
: is ( xt "word" -- ) ' defer! ;
\ TODO: parse-name
\ TODO: s\"

252
samples/Forth/core.fth Normal file
View File

@@ -0,0 +1,252 @@
: immediate lastxt @ dup c@ negate swap c! ;
: \ source nip >in ! ; immediate \ Copyright 2004, 2012 Lars Brinkhoff
: char \ ( "word" -- char )
bl-word here 1+ c@ ;
: ahead here 0 , ;
: resolve here swap ! ;
: ' bl-word here find 0branch [ ahead ] exit [ resolve ] 0 ;
: postpone-nonimmediate [ ' literal , ' compile, ] literal , ;
: create dovariable_code header, reveal ;
create postponers
' postpone-nonimmediate ,
' abort ,
' , ,
: word \ ( char "<chars>string<char>" -- caddr )
drop bl-word here ;
: postpone \ ( C: "word" -- )
bl word find 1+ cells postponers + @ execute ; immediate
: unresolved \ ( C: "word" -- orig )
postpone postpone postpone ahead ; immediate
: chars \ ( n1 -- n2 )
;
: else \ ( -- ) ( C: orig1 -- orig2 )
unresolved branch swap resolve ; immediate
: if \ ( flag -- ) ( C: -- orig )
unresolved 0branch ; immediate
: then \ ( -- ) ( C: orig -- )
resolve ; immediate
: [char] \ ( "word" -- )
char postpone literal ; immediate
: (does>) lastxt @ dodoes_code over >code ! r> swap >does ! ;
: does> postpone (does>) ; immediate
: begin \ ( -- ) ( C: -- dest )
here ; immediate
: while \ ( x -- ) ( C: dest -- orig dest )
unresolved 0branch swap ; immediate
: repeat \ ( -- ) ( C: orig dest -- )
postpone branch , resolve ; immediate
: until \ ( x -- ) ( C: dest -- )
postpone 0branch , ; immediate
: recurse lastxt @ compile, ; immediate
: pad \ ( -- addr )
here 1024 + ;
: parse \ ( char "string<char>" -- addr n )
pad >r begin
source? if <source 2dup <> else 0 0 then
while
r@ c! r> 1+ >r
repeat 2drop pad r> over - ;
: ( \ ( "string<paren>" -- )
[ char ) ] literal parse 2drop ; immediate
\ TODO: If necessary, refill and keep parsing.
: string, ( addr n -- )
here over allot align swap cmove ;
: (s") ( -- addr n ) ( R: ret1 -- ret2 )
r> dup @ swap cell+ 2dup + aligned >r swap ;
create squote 128 allot
: s" ( "string<quote>" -- addr n )
state @ if
postpone (s") [char] " parse dup , string,
else
[char] " parse >r squote r@ cmove squote r>
then ; immediate
: (abort") ( ... addr n -- ) ( R: ... -- )
cr type cr abort ;
: abort" ( ... x "string<quote>" -- ) ( R: ... -- )
postpone if postpone s" postpone (abort") postpone then ; immediate
\ ----------------------------------------------------------------------
( Core words. )
\ TODO: #
\ TODO: #>
\ TODO: #s
: and ( x y -- x&y ) nand invert ;
: * 1 2>r 0 swap begin r@ while
r> r> swap 2dup dup + 2>r and if swap over + swap then dup +
repeat r> r> 2drop drop ;
\ TODO: */mod
: +loop ( -- ) ( C: nest-sys -- )
postpone (+loop) postpone 0branch , postpone unloop ; immediate
: space bl emit ;
: ?.- dup 0 < if [char] - emit negate then ;
: digit [char] 0 + emit ;
: (.) base @ /mod ?dup if recurse then digit ;
: ." ( "string<quote>" -- ) postpone s" postpone type ; immediate
: . ( x -- ) ?.- (.) space ;
: postpone-number ( caddr -- )
0 0 rot count >number dup 0= if
2drop nip
postpone (literal) postpone (literal) postpone ,
postpone literal postpone ,
else
." Undefined: " type cr abort
then ;
' postpone-number postponers cell+ !
: / ( x y -- x/y ) /mod nip ;
: 0< ( n -- flag ) 0 < ;
: 1- ( n -- n-1 ) -1 + ;
: 2! ( x1 x2 addr -- ) swap over ! cell+ ! ;
: 2* ( n -- 2n ) dup + ;
\ Kernel: 2/
: 2@ ( addr -- x1 x2 ) dup cell+ @ swap @ ;
\ Kernel: 2drop
\ Kernel: 2dup
\ TODO: 2over ( x1 x2 x3 x4 -- x1 x2 x3 x4 x1 x2 )
\ 3 pick 3 pick ;
\ TODO: 2swap
\ TODO: <#
: abs ( n -- |n| )
dup 0< if negate then ;
\ TODO: accept
: c, ( n -- )
here c! 1 chars allot ;
: char+ ( n1 -- n2 )
1+ ;
: constant create , does> @ ;
: decimal ( -- )
10 base ! ;
: depth ( -- n )
data_stack 100 cells + 'SP @ - /cell / 2 - ;
: do ( n1 n2 -- ) ( R: -- loop-sys ) ( C: -- do-sys )
postpone 2>r here ; immediate
\ TODO: environment?
\ TODO: evaluate
\ TODO: fill
\ TODO: fm/mod )
\ TODO: hold
: j ( -- x1 ) ( R: x1 x2 x3 -- x1 x2 x3 )
'RP @ 3 cells + @ ;
\ TODO: leave
: loop ( -- ) ( C: nest-sys -- )
postpone 1 postpone (+loop)
postpone 0branch ,
postpone unloop ; immediate
: lshift begin ?dup while 1- swap dup + swap repeat ;
: rshift 1 begin over while dup + swap 1- swap repeat nip
2>r 0 1 begin r@ while
r> r> 2dup swap dup + 2>r and if swap over + swap then dup +
repeat r> r> 2drop drop ;
: max ( x y -- max[x,y] )
2dup > if drop else nip then ;
\ Kernel: min
\ TODO: mod
\ TODO: move
: (quit) ( R: ... -- )
return_stack 100 cells + 'RP !
0 'source-id ! tib ''source ! #tib ''#source !
postpone [
begin
refill
while
interpret state @ 0= if ." ok" cr then
repeat
bye ;
' (quit) ' quit >body cell+ !
\ TODO: s>d
\ TODO: sign
\ TODO: sm/rem
: spaces ( n -- )
0 do space loop ;
\ TODO: u.
: signbit ( -- n ) -1 1 rshift invert ;
: xor ( x y -- x^y ) 2dup nand >r r@ nand swap r> nand nand ;
: u< ( x y -- flag ) signbit xor swap signbit xor > ;
\ TODO: um/mod
: variable ( "word" -- )
create /cell allot ;
: ['] \ ( C: "word" -- )
' postpone literal ; immediate

View File

@@ -0,0 +1,5 @@
: HELLO ( -- )
." Hello Forth (forth)!" ;
HELLO

View File

@@ -0,0 +1,5 @@
: HELLO ( -- )
." Hello Forth (fth)!" ;
HELLO

133
samples/Forth/tools.fth Normal file
View File

@@ -0,0 +1,133 @@
\ -*- forth -*- Copyright 2004, 2013 Lars Brinkhoff
( Tools words. )
: .s ( -- )
[char] < emit depth (.) ." > "
'SP @ >r r@ depth 1- cells +
begin
dup r@ <>
while
dup @ .
/cell -
repeat r> 2drop ;
: ? @ . ;
: c? c@ . ;
: dump bounds do i ? /cell +loop cr ;
: cdump bounds do i c? loop cr ;
: again postpone branch , ; immediate
: see-find ( caddr -- end xt )
>r here lastxt @
begin
dup 0= abort" Undefined word"
dup r@ word= if r> drop exit then
nip dup >nextxt
again ;
: cabs ( char -- |char| ) dup 127 > if 256 swap - then ;
: xt. ( xt -- )
( >name ) count cabs type ;
: xt? ( xt -- flag )
>r lastxt @ begin
?dup
while
dup r@ = if r> 2drop -1 exit then
>nextxt
repeat r> drop 0 ;
: disassemble ( x -- )
dup xt? if
( >name ) count
dup 127 > if ." postpone " then
cabs type
else
.
then ;
: .addr dup . ;
: see-line ( addr -- )
cr ." ( " .addr ." ) " @ disassemble ;
: see-word ( end xt -- )
>r ." : " r@ xt.
r@ >body do i see-line /cell +loop
." ;" r> c@ 127 > if ." immediate" then ;
: see bl word see-find see-word cr ;
: #body bl word see-find >body - ;
: type-word ( end xt -- flag )
xt. space drop 0 ;
: traverse-dictionary ( in.. xt -- out.. )
\ xt execution: ( in.. end xt2 -- in.. 0 | in.. end xt2 -- out.. true )
>r here lastxt @ begin
?dup
while
r> 2dup >r >r execute
if r> r> 2drop exit then
r> dup >nextxt
repeat r> 2drop ;
: words ( -- )
['] type-word traverse-dictionary cr ;
\ ----------------------------------------------------------------------
( Tools extension words. )
\ ;code
\ assembler
\ in kernel: bye
\ code
\ cs-pick
\ cs-roll
\ editor
: forget ' dup >nextxt lastxt ! 'here ! reveal ;
\ Kernel: state
\ [else]
\ [if]
\ [then]
\ ----------------------------------------------------------------------
( Forth2012 tools extension words. )
\ TODO: n>r
\ TODO: nr>
\ TODO: synonym
: [undefined] bl-word find nip 0= ; immediate
: [defined] postpone [undefined] invert ; immediate
\ ----------------------------------------------------------------------
: @+ ( addr -- addr+/cell x ) dup cell+ swap @ ;
: !+ ( x addr -- addr+/cell ) tuck ! cell+ ;
: -rot swap >r swap r> ;

View File

@@ -0,0 +1,6 @@
<div class="entry">
<h1>{{title}}</h1>
<div class="body">
{{body}}
</div>
</div>

View File

@@ -0,0 +1,11 @@
<div class="post">
<h1>By {{fullName author}}</h1>
<div class="body">{{body}}</div>
<h1>Comments</h1>
{{#each comments}}
<h2>By {{fullName author}}</h2>
<div class="body">{{body}}</div>
{{/each}}
</div>

View File

@@ -0,0 +1,10 @@
; editorconfig.org
root = true
[*]
indent_style = space
indent_size = 4
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true

View File

@@ -0,0 +1,528 @@
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: protocol-buffer.proto
package persons;
public final class ProtocolBuffer {
private ProtocolBuffer() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public interface PersonOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string name = 1;
/**
* <code>required string name = 1;</code>
*/
boolean hasName();
/**
* <code>required string name = 1;</code>
*/
java.lang.String getName();
/**
* <code>required string name = 1;</code>
*/
com.google.protobuf.ByteString
getNameBytes();
}
/**
* Protobuf type {@code persons.Person}
*/
public static final class Person extends
com.google.protobuf.GeneratedMessage
implements PersonOrBuilder {
// Use Person.newBuilder() to construct.
private Person(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Person(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Person defaultInstance;
public static Person getDefaultInstance() {
return defaultInstance;
}
public Person getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Person(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
name_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return persons.ProtocolBuffer.internal_static_persons_Person_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return persons.ProtocolBuffer.internal_static_persons_Person_fieldAccessorTable
.ensureFieldAccessorsInitialized(
persons.ProtocolBuffer.Person.class, persons.ProtocolBuffer.Person.Builder.class);
}
public static com.google.protobuf.Parser<Person> PARSER =
new com.google.protobuf.AbstractParser<Person>() {
public Person parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Person(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Person> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string name = 1;
public static final int NAME_FIELD_NUMBER = 1;
private java.lang.Object name_;
/**
* <code>required string name = 1;</code>
*/
public boolean hasName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
name_ = s;
}
return s;
}
}
/**
* <code>required string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
name_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasName()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getNameBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getNameBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static persons.ProtocolBuffer.Person parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static persons.ProtocolBuffer.Person parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static persons.ProtocolBuffer.Person parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static persons.ProtocolBuffer.Person parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static persons.ProtocolBuffer.Person parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static persons.ProtocolBuffer.Person parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static persons.ProtocolBuffer.Person parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static persons.ProtocolBuffer.Person parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static persons.ProtocolBuffer.Person parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static persons.ProtocolBuffer.Person parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(persons.ProtocolBuffer.Person prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code persons.Person}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements persons.ProtocolBuffer.PersonOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return persons.ProtocolBuffer.internal_static_persons_Person_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return persons.ProtocolBuffer.internal_static_persons_Person_fieldAccessorTable
.ensureFieldAccessorsInitialized(
persons.ProtocolBuffer.Person.class, persons.ProtocolBuffer.Person.Builder.class);
}
// Construct using persons.ProtocolBuffer.Person.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
name_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return persons.ProtocolBuffer.internal_static_persons_Person_descriptor;
}
public persons.ProtocolBuffer.Person getDefaultInstanceForType() {
return persons.ProtocolBuffer.Person.getDefaultInstance();
}
public persons.ProtocolBuffer.Person build() {
persons.ProtocolBuffer.Person result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public persons.ProtocolBuffer.Person buildPartial() {
persons.ProtocolBuffer.Person result = new persons.ProtocolBuffer.Person(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.name_ = name_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof persons.ProtocolBuffer.Person) {
return mergeFrom((persons.ProtocolBuffer.Person)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(persons.ProtocolBuffer.Person other) {
if (other == persons.ProtocolBuffer.Person.getDefaultInstance()) return this;
if (other.hasName()) {
bitField0_ |= 0x00000001;
name_ = other.name_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasName()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
persons.ProtocolBuffer.Person parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (persons.ProtocolBuffer.Person) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string name = 1;
private java.lang.Object name_ = "";
/**
* <code>required string name = 1;</code>
*/
public boolean hasName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string name = 1;</code>
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
name_ = value;
onChanged();
return this;
}
/**
* <code>required string name = 1;</code>
*/
public Builder clearName() {
bitField0_ = (bitField0_ & ~0x00000001);
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* <code>required string name = 1;</code>
*/
public Builder setNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
name_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:persons.Person)
}
static {
defaultInstance = new Person(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:persons.Person)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_persons_Person_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_persons_Person_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\025protocol-buffer.proto\022\007persons\"\026\n\006Pers" +
"on\022\014\n\004name\030\001 \002(\t"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_persons_Person_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_persons_Person_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_persons_Person_descriptor,
new java.lang.String[] { "Name", });
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}

0
samples/JavaScript/js2.script! Normal file → Executable file
View File

301
samples/Lasso/json.lasso Normal file
View File

@@ -0,0 +1,301 @@
<?LassoScript
//
// JSON Encoding and Decoding
//
// Copyright 2007-2012 LassoSoft Inc.
//
// <http://json.org/>
// <http://json-rpc.org/>
// <http://www.ietf.org/rfc/rfc4627.txt?number=4627>
// This tag is now incorporated in Lasso 8.6.0.1
//
If: (Lasso_TagExists: 'Encode_JSON') == False;
Define_Tag: 'JSON', -Namespace='Encode_', -Required='value', -Optional='options';
Local: 'escapes' = Map('\\' = '\\', '"' = '"', '\r' = 'r', '\n' = 'n', '\t' = 't', '\f' = 'f', '\b' = 'b');
Local: 'output' = '';
Local: 'newoptions' = (Array: -Internal);
If: !(Local_Defined: 'options') || (#options->(IsA: 'array') == False);
Local: 'options' = (Array);
/If;
If: (#options >> -UseNative) || (Params >> -UseNative);
#newoptions->(Insert: -UseNative);
/If;
If: (#options >> -NoNative) || (Params >> -NoNative);
#newoptions->(Insert: -NoNative);
/If;
If: (#options !>> -UseNative) && ((#value->(IsA: 'set')) || (#value->(IsA: 'list')) || (#value->(IsA: 'queue')) || (#value->(IsA: 'priorityqueue')) || (#value->(IsA: 'stack')));
#output += (Encode_JSON: Array->(insertfrom: #value->iterator) &, -Options=#newoptions);
Else: (#options !>> -UseNative) && (#value->(IsA: 'pair'));
#output += (Encode_JSON: (Array: #value->First, #value->Second));
Else: (#options !>> -Internal) && (#value->(Isa: 'array') == False) && (#value->(IsA: 'map') == False);
#output += '[' + (Encode_JSON: #value, -Options=#newoptions) + ']';
Else: (#value->(IsA: 'literal'));
#output += #value;
Else: (#value->(IsA: 'string'));
#output += '"';
Loop: (#value->Length);
Local('character' = #value->(Get: Loop_Count));
#output->(Append:
(Match_RegExp('[\\x{0020}-\\x{21}\\x{23}-\\x{5b}\\x{5d}-\\x{10fff}]') == #character) ? #character |
'\\' + (#escapes->(Contains: #character) ? #escapes->(Find: #character) | 'u' + String(Encode_Hex(#character))->PadLeading(4, '0')&)
);
/Loop;
#output += '"';
Else: (#value->(IsA: 'integer')) || (#value->(IsA: 'decimal')) || (#value->(IsA: 'boolean'));
#output += (String: #value);
Else: (#value->(IsA: 'null'));
#output += 'null';
Else: (#value->(IsA: 'date'));
If: #value->gmt;
#output += '"' + #value->(format: '%QT%TZ') + '"';
Else;
#output += '"' + #value->(format: '%QT%T') + '"';
/If;
Else: (#value->(IsA: 'array'));
#output += '[';
Iterate: #value, (Local: 'temp');
#output += (Encode_JSON: #temp, -Options=#newoptions);
If: #value->Size != Loop_Count;
#output += ', ';
/If;
/Iterate;
#output += ']';
Else: (#value->(IsA: 'object'));
#output += '{';
Iterate: #value, (Local: 'temp');
#output += #temp->First + ': ' + (Encode_JSON: #temp->Second, -Options=#newoptions);
If: (#value->Size != Loop_Count);
#output += ', ';
/If;
/Iterate;
#output += '}';
Else: (#value->(IsA: 'map'));
#output += '{';
Iterate: #value, (Local: 'temp');
#output += (Encode_JSON: #temp->First, -Options=#newoptions) + ': ' + (Encode_JSON: #temp->Second, -Options=#newoptions);
If: (#value->Size != Loop_Count);
#output += ', ';
/If;
/Iterate;
#output += '}';
Else: (#value->(IsA: 'client_ip')) || (#value->(IsA: 'client_address'));
#output += (Encode_JSON: (String: #value), -Options=#newoptions);
Else: (#options !>> -UseNative) && (#value->(IsA: 'set')) || (#value->(IsA: 'list')) || (#value->(IsA: 'queue')) || (#value->(IsA: 'priorityqueue')) || (#value->(IsA: 'stack'));
#output += (Encode_JSON: Array->(insertfrom: #value->iterator) &, -Options=#newoptions);
Else: (#options !>> -NoNative);
#output += (Encode_JSON: (Map: '__jsonclass__'=(Array:'deserialize',(Array:'<LassoNativeType>' + #value->Serialize + '</LassoNativeType>'))));
/If;
Return: @#output;
/Define_Tag;
/If;
If: (Lasso_TagExists: 'Decode_JSON') == False;
Define_Tag: 'JSON', -Namespace='Decode_', -Required='value';
(#value == '') ? Return: Null;
Define_Tag: 'consume_string', -Required='ibytes';
Local: 'unescapes' = (map: 34 = '"', 92 = '\\', 98 = '\b', 102 = '\f', 110 = '\n', 114 = '\r', 116 = '\t');
Local: 'temp' = 0, 'obytes' = Bytes;
While: ((#temp := #ibytes->export8bits) != 34); // '"'
If: (#temp === 92); // '\'
#temp = #ibytes->export8bits;
If: (#temp === 117); // 'u'
#obytes->(ImportString: (Decode_Hex: (String: #ibytes->(GetRange: #ibytes->Position + 1, 4)))->(ExportString: 'UTF-16'), 'UTF-8');
#ibytes->(SetPosition: #ibytes->Position + 4);
Else;
If: (#unescapes->(Contains: #temp));
#obytes->(ImportString: #unescapes->(Find: #temp), 'UTF-8');
Else;
#obytes->(Import8Bits: #temp);
/If;
/If;
Else;
#obytes->(Import8Bits: #temp);
/If;
/While;
Local('output' = #obytes->(ExportString: 'UTF-8'));
If: #output->(BeginsWith: '<LassoNativeType>') && #output->(EndsWith: '</LassoNativeType>');
Local: 'temp' = #output - '<LassoNativeType>' - '</LassoNativeType>';
Local: 'output' = null;
Protect;
#output->(Deserialize: #temp);
/Protect;
Else: (Valid_Date: #output, -Format='%QT%TZ');
Local: 'output' = (Date: #output, -Format='%QT%TZ');
Else: (Valid_Date: #output, -Format='%QT%T');
Local: 'output' = (Date: #output, -Format='%QT%T');
/If;
Return: @#output;
/Define_Tag;
Define_Tag: 'consume_token', -Required='ibytes', -required='temp';
Local: 'obytes' = bytes->(import8bits: #temp) &;
local: 'delimit' = (array: 9, 10, 13, 32, 44, 58, 93, 125); // \t\r\n ,:]}
While: (#delimit !>> (#temp := #ibytes->export8bits));
#obytes->(import8bits: #temp);
/While;
Local: 'output' = (String: #obytes);
If: (#output == 'true') || (#output == 'false');
Return: (Boolean: #output);
Else: (#output == 'null');
Return: Null;
Else: (String_IsNumeric: #output);
Return: (#output >> '.') ? (Decimal: #output) | (Integer: #output);
/If;
Return: @#output;
/Define_Tag;
Define_Tag: 'consume_array', -Required='ibytes';
Local: 'output' = array;
local: 'delimit' = (array: 9, 10, 13, 32, 44); // \t\r\n ,
local: 'temp' = 0;
While: ((#temp := #ibytes->export8bits) != 93); // ]
If: (#delimit >> #temp);
// Discard whitespace
Else: (#temp == 34); // "
#output->(insert: (consume_string: @#ibytes));
Else: (#temp == 91); // [
#output->(insert: (consume_array: @#ibytes));
Else: (#temp == 123); // {
#output->(insert: (consume_object: @#ibytes));
Else;
#output->(insert: (consume_token: @#ibytes, @#temp));
(#temp == 93) ? Loop_Abort;
/If;
/While;
Return: @#output;
/Define_Tag;
Define_Tag: 'consume_object', -Required='ibytes';
Local: 'output' = map;
local: 'delimit' = (array: 9, 10, 13, 32, 44); // \t\r\n ,
local: 'temp' = 0;
local: 'key' = null;
local: 'val' = null;
While: ((#temp := #ibytes->export8bits) != 125); // }
If: (#delimit >> #temp);
// Discard whitespace
Else: (#key !== null) && (#temp == 34); // "
#output->(insert: #key = (consume_string: @#ibytes));
#key = null;
Else: (#key !== null) && (#temp == 91); // [
#output->(insert: #key = (consume_array: @#ibytes));
#key = null;
Else: (#key !== null) && (#temp == 123); // {
#output->(insert: #key = (consume_object: @#ibytes));
#key = null;
Else: (#key !== null);
#output->(insert: #key = (consume_token: @#ibytes, @#temp));
(#temp == 125) ? Loop_abort;
#key = null;
Else;
#key = (consume_string: @#ibytes);
while(#delimit >> (#temp := #ibytes->export8bits));
/while;
#temp != 58 ? Loop_Abort;
/If;
/While;
If: (#output >> '__jsonclass__') && (#output->(Find: '__jsonclass__')->(isa: 'array')) && (#output->(Find: '__jsonclass__')->size >= 2) && (#output->(Find: '__jsonclass__')->First == 'deserialize');
Return: #output->(find: '__jsonclass__')->Second->First;
Else: (#output >> 'native') && (#output >> 'comment') && (#output->(find: 'comment') == 'http://www.lassosoft.com/json');
Return: #output->(find: 'native');
/If;
Return: @#output;
/Define_Tag;
Local: 'ibytes' = (bytes: #value);
Local: 'start' = 1;
#ibytes->removeLeading(BOM_UTF8);
Local: 'temp' = #ibytes->export8bits;
If: (#temp == 91); // [
Local: 'output' = (consume_array: @#ibytes);
Return: @#output;
Else: (#temp == 123); // {
Local: 'output' = (consume_object: @#ibytes);
Return: @#output;
/If;
/Define_Tag;
/If;
If: (Lasso_TagExists: 'Literal') == False;
Define_Type: 'Literal', 'String';
/Define_Type;
/If;
If: (Lasso_TagExists: 'Object') == False;
Define_Type: 'Object', 'Map';
/Define_Type;
/If;
If: (Lasso_TagExists: 'JSON_RPCCall') == False;
Define_Tag: 'RPCCall', -Namespace='JSON_',
-Required='method',
-Optional='params',
-Optional='id',
-Optional='host';
!(Local_Defined: 'host') ? Local: 'host' = 'http://localhost/lassoapps.8/rpc/rpc.lasso';
!(Local_Defined: 'id') ? Local: 'id' = Lasso_UniqueID;
Local: 'request' = (Map: 'method' = #method, 'params' = #params, 'id' = #id);
Local: 'request' = (Encode_JSON: #request);
Local: 'result' = (Include_URL: #host, -PostParams=#request);
Local: 'result' = (Decode_JSON: #result);
Return: @#result;
/Define_Tag;
/If;
If: (Lasso_TagExists: 'JSON_Records') == False;
Define_Tag: 'JSON_Records',
-Optional='KeyField',
-Optional='ReturnField',
-Optional='ExcludeField',
-Optional='Fields';
Local: '_fields' = (Local_Defined: 'fields') && #fields->(IsA: 'array') ? #fields | Field_Names;
Fail_If: #_fields->size == 0, -1, 'No fields found for [JSON_Records]';
Local: '_keyfield' = (Local: 'keyfield');
If: #_fields !>> #_keyfield;
Local: '_keyfield' = (KeyField_Name);
If: #_fields !>> #_keyfield;
Local: '_keyfield' = 'ID';
If: #_fields !>> #_keyfield;
Local: '_keyfield' = #_fields->First;
/If;
/If;
/If;
Local: '_index' = #_fields->(FindPosition: #_keyfield)->First;
Local: '_return' = (Local_Defined: 'returnfield') ? (Params->(Find: -ReturnField)->(ForEach: {Params->First = Params->First->Second; Return: True}) &) | @#_fields;
Local: '_exclude' = (Local_Defined: 'excludefield') ? (Params->(Find: -ExcludeField)->(ForEach: {Params->First = Params->First->Second; Return: True}) &) | Array;
Local: '_records' = Array;
Iterate: Records_Array, (Local: '_record');
Local: '_temp' = Map;
Iterate: #_fields, (Local: '_field');
((#_return >> #_field) && (#_exclude !>> #_field)) ? #_temp->Insert(#_field = #_record->(Get: Loop_Count));
/Iterate;
#_records->Insert(#_temp);
/Iterate;
Local: '_output' = (Encode_JSON: (Object: 'error_msg'=Error_Msg, 'error_code'=Error_Code, 'found_count'=Found_Count, 'keyfield'=#_keyfield, 'rows'=#_records));
Return: @#_output;
/Define_Tag;
/If;
?>

213
samples/Lasso/json.lasso9 Normal file
View File

@@ -0,0 +1,213 @@
/**
trait_json_serialize
Objects with this trait will be assumed to convert to json data
when its ->asString method is called
*/
define trait_json_serialize => trait {
require asString()
}
define json_serialize(e::bytes)::string => ('"' + (string(#e)->Replace(`\`, `\\`) & Replace('\"', '\\"') & Replace('\r', '\\r') & Replace('\n', '\\n') & Replace('\t', '\\t') & Replace('\f', '\\f') & Replace('\b', '\\b') &) + '"')
define json_serialize(e::string)::string => ('"' + (string(#e)->Replace(`\`, `\\`) & Replace('\"', '\\"') & Replace('\r', '\\r') & Replace('\n', '\\n') & Replace('\t', '\\t') & Replace('\f', '\\f') & Replace('\b', '\\b') &) + '"')
define json_serialize(e::json_literal)::string => (#e->asstring)
define json_serialize(e::integer)::string => (#e->asstring)
define json_serialize(e::decimal)::string => (#e->asstring)
define json_serialize(e::boolean)::string => (#e->asstring)
define json_serialize(e::null)::string => ('null')
define json_serialize(e::date)::string => ('"' + #e->format(#e->gmt ? '%QT%TZ' | '%Q%T') + '"')
/*
define json_serialize(e::array)::string => {
local(output) = '';
local(delimit) = '';
#e->foreach => { #output += #delimit + json_serialize(#1); #delimit = ', '; }
return('[' + #output + ']');
}
define json_serialize(e::staticarray)::string => {
local(output) = '';
local(delimit) = '';
#e->foreach => { #output += #delimit + json_serialize(#1); #delimit = ', '; }
return('[' + #output + ']');
}
*/
define json_serialize(e::trait_forEach)::string => {
local(output) = '';
local(delimit) = '';
#e->foreach => { #output += #delimit + json_serialize(#1); #delimit = ', '; }
return('[' + #output + ']');
}
define json_serialize(e::map)::string => {
local(output = with pr in #e->eachPair
select json_serialize(#pr->first->asString) + ': ' + json_serialize(#pr->second))
return '{' + #output->join(',') + '}'
}
define json_serialize(e::json_object)::string => {
local(output) = '';
local(delimit) = '';
#e->foreachpair => { #output += #delimit + #1->first + ': ' + json_serialize(#1->second); #delimit = ', '; }
return('{' + #output + '}');
}
define json_serialize(e::trait_json_serialize) => #e->asString
define json_serialize(e::any)::string => json_serialize('<LassoNativeType>' + #e->serialize + '</LassoNativeType>')
// Bil Corry fixes for decoding json
define json_consume_string(ibytes::bytes) => {
local(obytes) = bytes;
local(temp) = 0;
while((#temp := #ibytes->export8bits) != 34);
#obytes->import8bits(#temp);
(#temp == 92) ? #obytes->import8bits(#ibytes->export8bits); // Escape \
/while;
local(output = string(#obytes)->unescape)
//Replace('\\"', '\"') & Replace('\\r', '\r') & Replace('\\n', '\n') & Replace('\\t', '\t') & Replace('\\f', '\f') & Replace('\\b', '\b') &;
if(#output->BeginsWith('<LassoNativeType>') && #output->EndsWith('</LassoNativeType>'));
Protect;
return serialization_reader(xml(#output - '<LassoNativeType>' - '</LassoNativeType>'))->read
/Protect;
else( (#output->size == 16 or #output->size == 15) and regexp(`\d{8}T\d{6}Z?`, '', #output)->matches)
return date(#output, -Format=#output->size == 16?`yyyyMMdd'T'HHmmssZ`|`yyyyMMdd'T'HHmmss`)
/if
return #output
}
// Bil Corry fix + Ke fix
define json_consume_token(ibytes::bytes, temp::integer) => {
local(obytes = bytes->import8bits(#temp) &,
delimit = array(9, 10, 13, 32, 44, 58, 93, 125)) // \t\r\n ,:]}
while(#delimit !>> (#temp := #ibytes->export8bits))
#obytes->import8bits(#temp)
/while
#temp == 125? // }
#ibytes->marker -= 1
//============================================================================
// Is also end of token if end of array[]
#temp == 93? // ]
#ibytes->marker -= 1
//............................................................................
local(output = string(#obytes))
#output == 'true'?
return true
#output == 'false'?
return false
#output == 'null'?
return null
string_IsNumeric(#output)?
return (#output >> '.')? decimal(#output) | integer(#output)
return #output
}
// Bil Corry fix
define json_consume_array(ibytes::bytes)::array => {
Local(output) = array;
local(delimit) = array( 9, 10, 13, 32, 44); // \t\r\n ,
local(temp) = 0;
While((#temp := #ibytes->export8bits) != 93); // ]
If(#delimit >> #temp);
// Discard whitespace
Else(#temp == 34); // "
#output->insert(json_consume_string(#ibytes));
Else(#temp == 91); // [
#output->insert(json_consume_array(#ibytes));
Else(#temp == 123); // {
#output->insert(json_consume_object(#ibytes));
Else;
#output->insert(json_consume_token(#ibytes, #temp));
(#temp == 93) ? Loop_Abort;
/If;
/While;
Return(#output);
}
// Bil Corry fix
define json_consume_object(ibytes::bytes)::map => {
Local('output' = map,
'delimit' = array( 9, 10, 13, 32, 44), // \t\r\n ,
'temp' = 0,
'key' = null,
'val' = null);
While((#temp := #ibytes->export8bits) != 125); // }
If(#delimit >> #temp);
// Discard whitespace
Else((#key !== null) && (#temp == 34)); // "
#output->insert(#key = json_consume_string(#ibytes));
#key = null;
Else((#key !== null) && (#temp == 91)); // [
#output->insert(#key = json_consume_array(#ibytes));
#key = null;
Else((#key !== null) && (#temp == 123)); // {
#output->insert(#key = json_consume_object(#ibytes));
#key = null;
Else((#key !== null));
#output->insert(#key = json_consume_token(#ibytes, #temp));
#key = null;
Else;
#key = json_consume_string(#ibytes);
while(#delimit >> (#temp := #ibytes->export8bits));
/while;
#temp != 58 ? Loop_Abort;
/If;
/While;
If((#output >> '__jsonclass__') && (#output->Find('__jsonclass__')->isa('array')) && (#output->Find('__jsonclass__')->size >= 2) && (#output->Find('__jsonclass__')->First == 'deserialize'));
Return(#output->find('__jsonclass__')->Second->First);
Else((#output >> 'native') && (#output >> 'comment') && (#output->find('comment') == 'http://www.lassosoft.com/json'));
Return(#output->find('native'));
/If;
Return(#output);
}
// Bil Corry fix + Ke fix
define json_deserialize(ibytes::bytes)::any => {
#ibytes->removeLeading(bom_utf8);
//============================================================================
// Reset marker on provided bytes
#ibytes->marker = 0
//............................................................................
Local(temp) = #ibytes->export8bits;
If(#temp == 91); // [
Return(json_consume_array(#ibytes));
Else(#temp == 123); // {
Return(json_consume_object(#ibytes));
else(#temp == 34) // "
return json_consume_string(#ibytes)
/If;
}
define json_deserialize(s::string) => json_deserialize(bytes(#s))
/**! json_literal - This is a subclass of String used for JSON encoding.
A json_literal works exactly like a string, but will be inserted directly
rather than being encoded into JSON. This allows JavaScript elements
like functions to be inserted into JSON objects. This is most useful
when the JSON object will be used within a JavaScript on the local page.
[Map: 'fn'=Literal('function(){ ...})] => {'fn': function(){ ...}}
**/
define json_literal => type {
parent string
}
/**! json_object - This is a subclass of Map used for JSON encoding.
An object works exactly like a map, but when it is encoded into JSON all
of the keys will be inserted literally. This makes it easy to create a
JavaScript object without extraneous quote marks.
Object('name'='value') => {name: "value"}
**/
define json_object => type {
parent map
public onCreate(...) => ..onCreate(:#rest or (:))
}
define json_rpccall(method::string, params=map, id='', host='') => {
#id == '' ? #host = Lasso_UniqueID;
#host == '' ? #host = 'http://localhost/lassoapps.8/rpc/rpc.lasso';
Return(Decode_JSON(Include_URL(#host, -PostParams=Encode_JSON(Map('method' = #method, 'params' = #params, 'id' = #id)))));
}

8342
samples/Lasso/knop.las Normal file

File diff suppressed because it is too large Load Diff

8342
samples/Lasso/knop.ldml Normal file

File diff suppressed because it is too large Load Diff

14
samples/Less/screen.less Normal file
View File

@@ -0,0 +1,14 @@
@blue: #3bbfce;
@margin: 16px;
.content-navigation {
border-color: @blue;
color:
darken(@blue, 9%);
}
.border {
padding: @margin / 2;
margin: @margin / 2;
border-color: @blue;
}

View File

@@ -0,0 +1,117 @@
The **Scope** class regulates lexical scoping within CoffeeScript. As you
generate code, you create a tree of scopes in the same shape as the nested
function bodies. Each scope knows about the variables declared within it,
and has a reference to its parent enclosing scope. In this way, we know which
variables are new and need to be declared with `var`, and which are shared
with external scopes.
Import the helpers we plan to use.
{extend, last} = require './helpers'
exports.Scope = class Scope
The `root` is the top-level **Scope** object for a given file.
@root: null
Initialize a scope with its parent, for lookups up the chain,
as well as a reference to the **Block** node it belongs to, which is
where it should declare its variables, and a reference to the function that
it belongs to.
constructor: (@parent, @expressions, @method) ->
@variables = [{name: 'arguments', type: 'arguments'}]
@positions = {}
Scope.root = this unless @parent
Adds a new variable or overrides an existing one.
add: (name, type, immediate) ->
return @parent.add name, type, immediate if @shared and not immediate
if Object::hasOwnProperty.call @positions, name
@variables[@positions[name]].type = type
else
@positions[name] = @variables.push({name, type}) - 1
When `super` is called, we need to find the name of the current method we're
in, so that we know how to invoke the same method of the parent class. This
can get complicated if super is being called from an inner function.
`namedMethod` will walk up the scope tree until it either finds the first
function object that has a name filled in, or bottoms out.
namedMethod: ->
return @method if @method.name or !@parent
@parent.namedMethod()
Look up a variable name in lexical scope, and declare it if it does not
already exist.
find: (name) ->
return yes if @check name
@add name, 'var'
no
Reserve a variable name as originating from a function parameter for this
scope. No `var` required for internal references.
parameter: (name) ->
return if @shared and @parent.check name, yes
@add name, 'param'
Just check to see if a variable has already been declared, without reserving,
walks up to the root scope.
check: (name) ->
!!(@type(name) or @parent?.check(name))
Generate a temporary variable name at the given index.
temporary: (name, index) ->
if name.length > 1
'_' + name + if index > 1 then index - 1 else ''
else
'_' + (index + parseInt name, 36).toString(36).replace /\d/g, 'a'
Gets the type of a variable.
type: (name) ->
return v.type for v in @variables when v.name is name
null
If we need to store an intermediate result, find an available name for a
compiler-generated variable. `_var`, `_var2`, and so on...
freeVariable: (name, reserve=true) ->
index = 0
index++ while @check((temp = @temporary name, index))
@add temp, 'var', yes if reserve
temp
Ensure that an assignment is made at the top of this scope
(or at the top-level scope, if requested).
assign: (name, value) ->
@add name, {value, assigned: yes}, yes
@hasAssignments = yes
Does this scope have any declared variables?
hasDeclarations: ->
!!@declaredVariables().length
Return the list of variables first declared in this scope.
declaredVariables: ->
realVars = []
tempVars = []
for v in @variables when v.type is 'var'
(if v.name.charAt(0) is '_' then tempVars else realVars).push v.name
realVars.sort().concat tempVars.sort()
Return the list of assignments that are supposed to be made at the top
of this scope.
assignedVariables: ->
"#{v.name} = #{v.type.value}" for v in @variables when v.type.assigned

View File

@@ -0,0 +1,35 @@
a = -> 1
const b = --> 2
var c = ~> 3
d = ~~> 10_000_000km * 500ms
e = (a) -> (b) ~> (c) --> (d, e) ~~> 5
dashes-identifiers = ->
a - a
b -- c
1-1 1- -1
a- a
a -a
underscores_i$d = ->
/regexp1/ and //regexp2//g
'strings' and "strings" and \strings
([2 til 10] or [1 to 50])
|> map (* 2)
|> filter (> 5)
|> fold (+)
class Class extends Anc-est-or
(args) ->
copy = (from, to, callback) -->
error, data <- read file
return callback error if error?
error <~ write file, data
return callback error if error?
callback()
->
~>
~~>
-->
# Comment
/* Comment */

28
samples/Logos/example.xm Normal file
View File

@@ -0,0 +1,28 @@
%hook ABC
- (id)a:(B)b {
%log;
return %orig(nil);
}
%end
%subclass DEF: NSObject
- (id)init {
[%c(RuntimeAccessibleClass) alloc];
return nil;
}
%end
%group OptionalHooks
%hook ABC
- (void)release {
[self retain];
%orig;
}
%end
%end
%ctor {
%init;
if(OptionalCondition)
%init(OptionalHooks);
}

View File

@@ -0,0 +1,20 @@
-- A simple counting object that increments an internal counter whenever it receives a bang at its first inlet, or changes to whatever number it receives at its second inlet.
local HelloCounter = pd.Class:new():register("h-counter")
function HelloCounter:initialize(sel, atoms)
self.inlets = 2
self.outlets = 1
self.num = 0
return true
end
function HelloCounter:in_1_bang()
self:outlet(1, "float", {self.num})
self.num = self.num + 1
end
function HelloCounter:in_2_float(f)
self.num = f
end

View File

@@ -0,0 +1,43 @@
local FileListParser = pd.Class:new():register("vidya-file-list-parser")
function FileListParser:initialize(sel, atoms)
-- 1. Base filename
-- 2. File extension
-- 3. Number of files in batch
self.inlets = 3
-- 1. To [list trim]-[binfile]
-- 2. To [vidya-file-modder]'s filename variables
-- 3. Sends a bang to [vidya-file-modder], triggering the object's mechanisms
self.outlets = 3
-- File extension
self.extension = "jpg"
-- Number of the last file in the batch
self.batchlimit = 0
return true
end
function FileListParser:in_1_symbol(s)
for i = 0, self.batchlimit do
self:outlet(2, "list", {s, i})
self:outlet(1, "read", {s .. i .. "." .. self.extension})
self:outlet(1, "info", {})
self:outlet(3, "bang", {})
end
end
function FileListParser:in_2_list(d)
self.extension = d[1]
end
function FileListParser:in_3_float(f)
self.batchlimit = f
end

View File

@@ -0,0 +1,137 @@
local FileModder = pd.Class:new():register("vidya-file-modder")
function FileModder:initialize(sel, atoms)
-- 1. Object-triggering bang
-- 2. Incoming single data bytes from [binfile]
-- 3. Total bytes in file, from [route buflength]
-- 4. Glitch type
-- 5. Glitch point
-- 6. Number of times to glitch a file
-- 7. Toggle for a randomized number of glitches within the bounds of (6)
-- 8. Active filename
self.inlets = 8
-- 1. To [binfile] inlet - bang(get next byte), clear(clear the buffer), FLOAT(write a byte to buffer), write(write to file)
self.outlets = 1
-- Currently active file's namedata
self.filedata = {
"default-filename",
0,
}
-- Glitch type (pattern, random, or splice)
self.glitchtype = "random"
-- Minimum glitch point in image data
self.glitchpoint = 500
-- Number of times to repeat random glitches on a given file
self.randrepeat = 1
-- Toggles whether the number of repeating glitches should be random, within the bounds of 1 to self.randrepeat
self.randtoggle = "concrete"
-- Hold all bytes, which are converted to ints in the 0-255 range
self.bytebuffer = {}
-- Buffer length of currently active file
self.buflength = 0
return true
end
function FileModder:in_1_bang()
for i = 1, self.buflength do
self:outlet(1, "bang", {})
end
self:outlet(1, "clear", {})
if self.glitchtype == "pattern" then
local plen = math.random(2, 1000)
local patbuffer = {}
for i = 1, plen do
table.insert(patbuffer, math.random(1, 254))
end
for i = self.glitchpoint, self.buflength do
self.bytebuffer[i] = patbuffer[((i - 1) % #patbuffer) + 1]
end
elseif self.glitchtype == "random" then
local randlimit = 0
if self.randtoggle == "random" then
randlimit = math.random(1, self.randrepeat)
else
randlimit = self.randrepeat
end
for i = 1, randlimit do
self.bytebuffer[math.random(self.glitchpoint, self.buflength)] = math.random(1, 244)
end
elseif self.glitchtype == "splice" then
local sloc = math.random(self.glitchpoint, self.buflength)
local schunksize = math.random(1, self.buflength - sloc)
local splicebuffer = {}
for i = 1, schunksize do
table.insert(splicebuffer, table.remove(self.bytebuffer, sloc))
end
local insertpoint = math.random(self.glitchpoint, #self.bytebuffer)
for _, v in ipairs(splicebuffer) do
table.insert(self.bytebuffer, insertpoint, v)
end
end
for _, v in ipairs(self.bytebuffer) do
self:outlet(1, "float", {v})
end
local outname = self.filedata[1] .. "-glitch" .. self.filedata[2] .. ".jpeg"
self:outlet(1, "write", {outname})
pd.post("New glitched image: " .. outname)
self:outlet(1, "clear", {})
self.bytebuffer = {}
end
function FileModder:in_2_float(f)
table.insert(self.bytebuffer, f)
end
function FileModder:in_3_list(f)
self.buflength = f[1] + 1 -- Shift from 0-indexed to 1-indexed
end
function FileModder:in_4_list(d)
self.glitchtype = d[1]
end
function FileModder:in_5_float(f)
self.glitchpoint = f
end
function FileModder:in_6_float(f)
self.randrepeat = f
end
function FileModder:in_7_list(d)
self.randtoggle = d[1]
end
function FileModder:in_8_list(d)
self.filedata = {d[1], d[2]}
end

23
samples/M/GMRGPNB0.m Normal file
View File

@@ -0,0 +1,23 @@
GMRGPNB0 ;CISC/JH/RM-NARRATIVE BUILDER FOR TEXT GENERATOR (cont.) ;6/20/91
;;3.0;Text Generator;;Jan 24, 1996
TEXT ; ENTRY WITH GMRGA SET TO POINT AT WHICH WANT TO START BUILDING TEXT
S (GMRGE0,GMRGADD)=""
Q:'$D(^GMR(124.3,GMRGPDA,1,"ALIST",GMRGA0))&GMRGCSW Q:('$D(^GMR(124.3,"B",GMRGA0,GMRGPDA))&'$D(^GMR(124.3,GMRGPDA,1,"B",GMRGA0)))&'GMRGCSW D NOW^%DTC
S GMRGB0=$O(^GMR(124.3,GMRGPDA,1,"B",GMRGA0,0)) I GMRGB0>0 S GMRGST=GMRGB0,GMRGST(1)=GMRGPDA,GMRGST(2)=$S(GMRGCSW=1:%,1:GMRGPDT) D STAT^GMRGRUT0 S GMRGF0=GMRGSTAT,GMRGST=GMRGB0,GMRGST(1)=GMRGPDA,GMRGST(2)=GMRGPDT D STAT^GMRGRUT0
I GMRGB0>0 S GMRGE0=$S($P(GMRGSTAT,"^",3)=1:"",$P(GMRGSTAT,"^")=$P(GMRGF0,"^"):$S($D(^GMR(124.3,GMRGPDA,1,GMRGB0,0)):$P(^(0),"^",2),1:""),1:$S($D(^GMR(124.3,GMRGPDA,1,GMRGB0,2,+$P(GMRGSTAT,"^"),0)):$P(^(0),"^",4),1:""))
I S GMRGADD=$S($P(GMRGSTAT,"^",3)=1:"",$P(GMRGSTAT,"^")=$P(GMRGF0,"^"):$S($D(^GMR(124.3,GMRGPDA,1,GMRGB0,"ADD")):"1;"_GMRGB0_";0",1:""),1:$S($D(^GMR(124.3,GMRGPDA,1,GMRGB0,2,$P(GMRGSTAT,"^"),"ADD")):"1;"_GMRGB0_";2;"_$P(GMRGSTAT,"^"),1:""))
S GMRGE0(0)=$S($D(^GMRD(124.2,GMRGA0,0)):^(0),1:""),GMRGE0(4)=$S($D(^GMRD(124.2,GMRGA0,4)):^(4),1:""),GMRGE0(5)=$S($D(^GMRD(124.2,GMRGA0,5)):^(5),1:"") Q:$P(GMRGE0(0),"^",2)=3&GMRGSSW
I "S"[$P(GMRGE0(0),"^",8)!GMRGSSW D SNT^GMRGPNB1 Q
S GMRGNAR=GMRGPAR_"^"_$P(GMRGE0(0),"^",8)_"^"_GMRGSPC_"^^"_GMRGRM,GMRGNAR(0)=$P(GMRGE0(0),"^")_"^"_GMRGE0,GMRGNAR("LEAD")=GMRGE0(4),GMRGNAR("TRAIL")=GMRGE0(5) D STORETXT^GMRGRUT1
S GMRGSPC=GMRGSPC+3
F GMRGD0=0:0 S GMRGD0=$O(^GMRD(124.2,GMRGA0,1,GMRGD0)) Q:GMRGD0'>0 D RECUR
Q:'GMRGADD
S GMRGNAR=GMRGPAR_"^T^"_GMRGSPC_"^^"_GMRGRM,GMRGNAR("LEAD")="Additional Text: ",GMRGNAR("TRAIL")=""
S GMRGNAR(0)=$S('$P(GMRGADD,";",3):^GMR(124.3,GMRGPDA,1,$P(GMRGADD,";",2),"ADD"),1:^GMR(124.3,GMRGPDA,1,$P(GMRGADD,";",2),2,$P(GMRGADD,";",4),"ADD")) D STORETXT^GMRGRUT1
Q
RECUR ;
Q:'$$ALIST^GMRGRUT0(GMRGPDA,GMRGA0,+$G(^GMRD(124.2,+GMRGA0,1,+GMRGD0,0)))
S ^TMP($J,"GMRGPLVL",GMRGPLVL)=GMRGA0_"^"_GMRGD0_"^"_GMRGSPC_"^"_GMRGSSW_"^"_GMRGADD,GMRGPLVL=GMRGPLVL+1,GMRGA0=$S($D(^GMRD(124.2,GMRGA0,1,GMRGD0,0)):$P(^(0),"^"),1:"")
D TEXT
S GMRGPLVL=GMRGPLVL-1,GMRGI0=^TMP($J,"GMRGPLVL",GMRGPLVL),GMRGA0=$P(GMRGI0,"^"),GMRGD0=$P(GMRGI0,"^",2),GMRGSPC=$P(GMRGI0,"^",3),GMRGSSW=$P(GMRGI0,"^",4),GMRGADD=$P(GMRGI0,"^",5)
Q

2460
samples/M/MDB.m Normal file

File diff suppressed because it is too large Load Diff

34
samples/M/PRCAAPR.m Normal file
View File

@@ -0,0 +1,34 @@
PRCAAPR ;WASH-ISC@ALTOONA,PA/RGY-PATIENT ACCOUNT PROFILE (CONT) ;3/9/94 8:41 AM
V ;;4.5;Accounts Receivable;**198,221**;Mar 20, 1995
;;Per VHA Directive 10-93-142, this routine should not be modified.
EN(PRCATY) ;
NEW DIC,X,Y,DEBT,PRCADB,DA,PRCA,COUNT,OUT,SEL,BILL,BAT,TRAN,DR,DXS,DTOUT,DIROUT,DIRUT,DUOUT
ASK N DPTNOFZY,DPTNOFZK S (DPTNOFZY,DPTNOFZK)=1
K OUT S COUNT=0 R !,"Select DEBTOR NAME or BILL NUMBER: ",X:DTIME I "^"[$E(X) S $P(DEBT,"^",2)="" G Q
S X=$$UPPER^VALM1(X)
S Y=$S($O(^PRCA(430,"B",X,0)):$O(^(0)),$O(^PRCA(430,"D",X,0)):$O(^(0)),1:-1)
I Y>0 S DEBT=$P($G(^PRCA(430,Y,0)),"^",9) I DEBT S PRCADB=$P($G(^RCD(340,DEBT,0)),"^"),^DISV(DUZ,"^PRCA(430,")=Y,$P(DEBT,"^",2)=$$NAM^RCFN01(DEBT) D COMP,EN1^PRCAATR(Y) G:$D(DTOUT) Q G ASK
S DIC="^RCD(340,",DIC(0)="E" D ^DIC G:Y<0 ASK
S ^DISV(DUZ,"^RCD(340,")=+Y,PRCADB=$P(Y,"^",2),DEBT=+Y_"^"_$P(@("^"_$P(PRCADB,";",2)_+PRCADB_",0)"),"^")
D COMP,HDR^PRCAAPR1,HDR2^PRCAAPR1,DIS^PRCAAPR1 G:'$D(DTOUT) ASK
Q K ^TMP("PRCAAPR",$J) Q
COMP ;Compile patient bills
K ^TMP("PRCAAPR",$J)
NEW STAT,STAT1,CNT,Y
S STAT1=0
F CNT=1:1 S STAT1=+$S(PRCATY="ALL":$O(^PRCA(430,"AS",+DEBT,STAT1)),1:$O(^PRCA(430.3,"AC",+$P(PRCATY,",",CNT),0))) Q:'STAT1 F BILL=0:0 S BILL=$O(^PRCA(430,"AS",+DEBT,STAT1,BILL)) Q:'BILL D COMP1
I PRCADB[";DPT(" F BILL=0:0 S BILL=$O(^PRCA(430,"E",+PRCADB,BILL)) Q:'BILL I PRCATY="ALL"!((","_PRCATY_",")[(","_$P($G(^PRCA(430.3,+$P($G(^PRCA(430,BILL,0)),"^",8),0)),"^",3)_",")) D COMP1
F BAT=0:0 S BAT=$O(^RCY(344,"AC",PRCADB,BAT)) Q:'BAT F TRAN=0:0 S TRAN=$O(^RCY(344,"AC",PRCADB,BAT,TRAN)) Q:'TRAN I $G(^RCY(344,BAT,1,TRAN,0))]"",$P(^(0),"^",5)="" D COMP2
Q
COMP1 S STAT=$P($G(^PRCA(430.3,+$P($G(^PRCA(430,BILL,0)),"^",8),0)),"^",3) Q:STAT=""
S X=$G(^PRCA(430,BILL,7)),Y=$P(X,"^")+$P(X,"^",2)+$P(X,"^",3)+$P(X,"^",4)+$P(X,"^",5)
I $P(^PRCA(430,BILL,0),"^",2)=$O(^PRCA(430.2,"AC",33,0)) S Y=-Y
S Y=$S($P(^PRCA(430,BILL,0),"^",2)=$O(^PRCA(430.2,"AC",33,0))&(STAT'=112):0,$P(^PRCA(430,BILL,0),"^",9)'=+DEBT:0,",102,107,112,"[(","_STAT_","):Y,1:0)
S ^TMP("PRCAAPR",$J,"C")=$G(^TMP("PRCAAPR",$J,"C"))+Y
S ^TMP("PRCAAPR",$J,"C",STAT)=$G(^TMP("PRCAAPR",$J,"C",STAT))+Y_"^"_STAT,^(STAT,BILL)=$P(X,"^",1,5)
Q
COMP2 ;Compile payments
S Y=$P(^RCY(344,BAT,1,TRAN,0),"^",4)
S ^TMP("PRCAAPR",$J,"C")=$G(^TMP("PRCAAPR",$J,"C"))-Y
S ^TMP("PRCAAPR",$J,"C",99)=$G(^TMP("PRCAAPR",$J,"C",99))-Y_"^99",^TMP("PRCAAPR",$J,"C",99,$P(^RCY(344,BAT,0),"^")_"-"_TRAN)=$P(^RCY(344,BAT,1,TRAN,0),"^",4)
Q

203
samples/M/PXAI.m Normal file
View File

@@ -0,0 +1,203 @@
PXAI ;ISL/JVS,ISA/KWP,ESW - PCE DRIVING RTN FOR 'DATA2PCE' API ;6/20/03 11:15am
;;1.0;PCE PATIENT CARE ENCOUNTER;**15,74,69,102,111,112,130,164,168**;Aug 12, 1996;Build 14
Q
;
;+ 1 2 3 4 5 6 7 8 9
DATA2PCE(PXADATA,PXAPKG,PXASOURC,PXAVISIT,PXAUSER,PXANOT,ERRRET,PXAPREDT,PXAPROB,PXACCNT) ;+API to pass data for add/edit/delete to PCE.
;+ PXADATA (required)
;+ PXAPKG (required)
;+ PXASOURC (required)
;+ PXAVISIT (optional) is pointer to a visit for which the data is to
;+ be related. If the visit is not known then there must be
;+ the ENCOUNTER nodes needed to lookup/create the visit.
;+ PXAUSER (optional) this is a pointer to the user adding the data.
;+ PXANOT (optional) set to 1 if errors are to be displayed to the screen should only be set while writing and debugging the initial code.
;+ ERRRET (optional) passed by reference. If present will return PXKERROR
;+ array elements to the caller.
;+ PXAPREDT (optional) Set to 1 if you want to edit the Primary Provider
;+ only use if for the moment that editing is being done. (dangerous)
;+ PXAPROB (optional) A dotted variable name. When errors and
;+ warnings occur, They will be passed back in the form
;+ of an array with the general description of the problem.
;+ IF ERROR1 - (GENERAL ERRORS)
;+ PXAPROB($J,SUBSCRIPT,"ERROR1",PASSED IN 'FILE',PASSED IN FIELD,
;+ SUBSCRIPT FROM PXADATA)
;+ PXAPROB(23432234,2,"ERROR1","PROVIDER","NAME",7)="BECAUSE..."
;+ IF WARNING2 - (GENERAL WARNINGS)
;+ PXAPROB($J,SUBSCRIPT,"WARNING2",PASSED IN 'FILE',PASSED IN FIELD,
;+ SUBSCRIPT FROM PXADATA)
;+ PXAPROB(23432234,3,"WARNING2","PROCEDURE","QTY",3)="BECAUSE..."
;+ IF WARNING3 - (WARNINGS FOR SERVICE CONNECTION)
;+ PXAPROB($J,1,"WARNING3","ENCOUNTER",1,"AO")=REASON
;+ PXAPROB($J,1,"WARNING3","ENCOUNTER",1,"EC")=REASON
;+ PXAPROB($J,1,"WARNING3","ENCOUNTER",1,"IR")=REASON
;+ PXAPROB($J,1,"WARNING3","ENCOUNTER",1,"SC")=REASON
;+ PXAPROB($J,1,"WARNING3","ENCOUNTER",1,"MST")=REASON
;+ PXAPROB($J,1,"WARNING3","ENCOUNTER",1,"HNC")=REASON
;+ PXAPROB($J,1,"WARNING3","ENCOUNTER",1,"CV")=REASON
;+ PXAPROB($J,1,"WARNING3","ENCOUNTER",1,"SHAD")=REASON
;+ IF ERROR4 - (PROBLEM LIST ERRORS)
;+ PXAPROB($J,6,"ERROR4","PX/DL",(SUBSCRIPT FROM PXADATA))=REASON
;+ PXACCNT (optional) passed by reference. Returns the PFSS Account Reference if known.
; Returned as null if the PFSS Account Reference is located in the Order file(#100)
;+
;+
;+ Returns:
;+ 1 if no errors and process completely
;+ -1 if errors occurred but processed completely as possible
;+ -2 if could not get a visit
;+ -3 if called incorrectly
;
NEW ;--NEW VARIABLES
N NOVSIT,PXAK,DFN,PXAERRF,PXADEC,PXELAP,PXASUB
N PATIENT,VALQUIET,PRIMFND
K PXAERROR,PXKERROR,PXAERR,PRVDR
S PXASUB=0,VALQUIET=1
; needs to look up if not passed.
I '$G(PXAVISIT),'$D(@PXADATA@("ENCOUNTER")) Q -3
I $G(PXAUSER)<1 S PXAUSER=DUZ
;
K ^TMP("PXK",$J),^TMP("DIERR",$J),^TMP("PXAIADDPRV",$J)
SOR ;--SOURCE
I PXAPKG=+PXAPKG S PXAPKG=PXAPKG
E S PXAPKG=$$PKG2IEN^VSIT(PXAPKG)
I PXASOURC=+PXASOURC S PXASOURC=PXASOURC
E S PXASOURC=$$SOURCE^PXAPIUTL(PXASOURC)
;
D TMPSOURC^PXAPIUTL(PXASOURC) ;-SAVES & CREATES ^TMP("PXK",$J,"SOR")
VST ;--VISIT
;--KILL VISIT
I $G(PXAVISIT) D VPTR^PXAIVSTV I $G(PXAERRF) D ERR Q -2
D VST^PXAIVST
I $G(PXAVISIT)<0 Q -2
I $G(PXAERRF) D ERR K PXAERR Q -2
PRV ;--PROVIDER
S PATIENT=$P($G(^AUPNVSIT(PXAVISIT,0)),"^",5)
S (PXAK,PRIMFND)=0
F S PXAK=$O(@PXADATA@("PROVIDER",PXAK)) Q:(PRIMFND)!(PXAK="") D
.I $D(@PXADATA@("PROVIDER",PXAK,"PRIMARY")) D
..S PRIMFND=$G(@PXADATA@("PROVIDER",PXAK,"PRIMARY"))
I 'PRIMFND D ;Check for each provider's status as Primary or Secondary
.S PXAK=0 F S PXAK=$O(@PXADATA@("PROVIDER",PXAK)) Q:PXAK="" D
..I '$D(@PXADATA@("PROVIDER",PXAK,"PRIMARY")) D PROVDRST
S PXAK=0 F S PXAK=$O(@PXADATA@("PROVIDER",PXAK)) Q:PXAK="" D
. D PRV^PXAIPRV I $G(PXAERRF) D ERR
K PRI ;--FLAG FOR PRIMARY PROVIDER
K PXAERR
POV ;--DIAGNOSIS
S (PXAK,PRIMFND)=0
F S PXAK=$O(@PXADATA@("DX/PL",PXAK)) Q:(PXAK="") D Q:PRIMFND
.I +$G(@PXADATA@("DX/PL",PXAK,"PRIMARY"))=1 D
..S PRIMFND=$G(@PXADATA@("DX/PL",PXAK,"DIAGNOSIS"))
I $D(@PXADATA@("DX/PL")) D POVPRM(PXAVISIT,PRIMFND,.PXADATA) D
.S PXAK=0 F S PXAK=$O(@PXADATA@("DX/PL",PXAK)) Q:PXAK="" D
..D POV^PXAIPOV I $G(PXAERRF) D ERR
K PXAERR
;
CPT ;--PROCEDURE
S PXAK=0 F S PXAK=$O(@PXADATA@("PROCEDURE",PXAK)) Q:PXAK="" D
. D CPT^PXAICPT I $G(PXAERRF) D ERR
K PXAERR
;
EDU ;--PATIENT EDUCATION
S PXAK=0 F S PXAK=$O(@PXADATA@("PATIENT ED",PXAK)) Q:PXAK="" D
. D EDU^PXAIPED I $G(PXAERRF) D ERR
K PXAERR
;
EXAM ;--EXAMINATION
S PXAK=0 F S PXAK=$O(@PXADATA@("EXAM",PXAK)) Q:PXAK="" D
. D EXAM^PXAIXAM I $G(PXAERRF) D ERR
K PXAERR
;
HF ;--HEALTH FACTOR
S PXAK=0 F S PXAK=$O(@PXADATA@("HEALTH FACTOR",PXAK)) Q:PXAK="" D
. D HF^PXAIHF I $G(PXAERRF) D ERR
K PXAERR
;
IMM ;--IMMUNIZATION
S PXAK=0 F S PXAK=$O(@PXADATA@("IMMUNIZATION",PXAK)) Q:PXAK="" D
. D IMM^PXAIIMM I $G(PXAERRF) D ERR
K PXAERR
;
SKIN ;--SKIN TEST
S PXAK=0 F S PXAK=$O(@PXADATA@("SKIN TEST",PXAK)) Q:PXAK="" D
. D SKIN^PXAISK I $G(PXAERRF) D ERR
K PXAERR
;
;
D OTHER^PXAIPRV
;
;
I $D(^TMP("PXK",$J)) D
. D EN1^PXKMAIN
. M ERRRET=PXKERROR
. D PRIM^PXAIPRV K PRVDR
. D EVENT^PXKMAIN
S PXACCNT=$P($G(^AUPNVSIT(PXAVISIT,0)),"^",26) ;PX*1.0*164 ;Sets the PFSS Account Reference, if any
K ^TMP("PXK",$J),PXAERR,PXKERROR
Q $S($G(PXAERRF):-1,1:1)
;
;
EXIT ;--EXIT AND CLEAN UP
D EVENT^PXKMAIN
K ^TMP("PXK",$J),PRVDR
K PXAERR
Q
;-----------------SUBROUTINES-----------------------
ERR ;
;
;
I '$D(PXADI("DIALOG")) Q
N NODE,SCREEN
S PXAERR(1)=$G(PXADATA),PXAERR(2)=$G(PXAPKG),PXAERR(3)=$G(PXASOURC)
S PXAERR(4)=$G(PXAVISIT),PXAERR(5)=$G(PXAUSER)_" "_$P($G(^VA(200,PXAUSER,0)),"^",1)
I $G(PXANOT)=1 D EXTERNAL
E D INTERNAL
D ARRAY^PXAICPTV
K PXADI("DIALOG")
Q
;
EXTERNAL ;---SEND ERRORS TO SCREEN
W !,"-----------------------------------------------------------------"
D BLD^DIALOG($G(PXADI("DIALOG")),.PXAERR,"","SCREEN","F")
D MSG^DIALOG("ESW","",50,10,"SCREEN")
;
Q
INTERNAL ;---SET ERRORS TO GLOBAL ARRAY
S NODE=PXADATA
D BLD^DIALOG($G(PXADI("DIALOG")),.PXAERR,.PXAERR,NODE,"F")
S NODE=$NA(@PXADATA@("DIERR",$J)) D MSG^DIALOG("ESW","",50,10,NODE)
Q
;
PROVDRST ; Check provider status (Primary or Secondary)
N PRVIEN,DETS,DIC,DR,DA,DIQ,PRI,PRVPRIM
I $G(PXAK)="" QUIT
S PRVIEN=0
F S PRVIEN=$O(^AUPNVPRV("AD",PXAVISIT,PRVIEN)) Q:PRVIEN="" D
.S DETS=$G(^AUPNVPRV(PRVIEN,0))
.I $P(DETS,U)=$G(@PXADATA@("PROVIDER",PXAK,"NAME")) D
..S DIC=9000010.06,DR=.04,DA=PRVIEN
..S DIQ="PRVPRIM(",DIQ(0)="EI" D EN^DIQ1
..S PRI=$E($G(PRVPRIM(9000010.06,DA,DR,"E")),1,1)
..S @PXADATA@("PROVIDER",PXAK,"PRIMARY")=$S(PRI="P":1,1:0)
Q
POVPRM(VISIT,PRIMFND,POVARR) ;
N PRVIEN,DETS,STOP,LPXAK,ORDX,NDX,ORDXP
S PRVIEN=0
;create array of existing DX; ORDX - pointer to ^ICD9(
F S PRVIEN=$O(^AUPNVPOV("AD",PXAVISIT,PRVIEN)) Q:PRVIEN="" D
.S DETS=$G(^AUPNVPOV(PRVIEN,0)),ORDX=$P(DETS,U)
.S ORDX(ORDX)=PRVIEN I $P(DETS,U,12)="P" S ORDXP(ORDX)=""
; create array of passed DX; NDX - pointer to ^ICD9(
S PXAK=0 F S PXAK=$O(@POVARR@("DX/PL",PXAK)) Q:PXAK="" D
.S NDX=$G(@POVARR@("DX/PL",PXAK,"DIAGNOSIS")) S NDX(NDX)=PXAK
; force entry of originally primary diagnosis with "S" flag
I PRIMFND S ORDX="" D
.F S ORDX=$O(ORDXP(ORDX)) Q:ORDX="" I PRIMFND'=ORDX D
..I $D(NDX(ORDX)) S @POVARR@("DX/PL",NDX(ORDX),"PRIMARY")=0
..E D
...S LPXAK=$O(@POVARR@("DX/PL",""),-1)
...S @POVARR@("DX/PL",LPXAK+1,"DIAGNOSIS")=ORDX
...S @POVARR@("DX/PL",LPXAK+1,"PRIMARY")=0
Q
;

112
samples/M/WVBRNOT.m Normal file
View File

@@ -0,0 +1,112 @@
WVBRNOT ;HCIOFO/FT,JR IHS/ANMC/MWR - BROWSE NOTIFICATIONS; ;7/30/98 11:02
;;1.0;WOMEN'S HEALTH;;Sep 30, 1998
;;* MICHAEL REMILLARD, DDS * ALASKA NATIVE MEDICAL CENTER *
;; CALLED BY OPTION: "WV BROWSE NOTIFICATIONS" TO BROWSE AND EDIT
;; NOTIFICATIONS.
;
;---> VARIABLES:
;---> WVA: 1=ALL PATIENTS, 0=ONE PATIENT
;---> WVDFN: DFN OF SELECTED PATIENT
;---> DATES: WVBEGDT=BEGINNING DATE, WVENDDT=ENDING DATE
;---> WVB: d=DELINQUENT, o=OPEN, q=queued, a=ALL (includes CLOSED).
;---> SORT SEQUENCE IN WVC: 1=DATE, PATIENT, PRIORITY
;---> 2=PATIENT, DATE, PRIORITY
;---> 3=PRIORITY, DATE, PATIENT
;---> USE NODES 3 & 4 IN ^TMP GLOBAL.
;
D SETVARS^WVUTL5
D ^WVBRNOT2 G:WVPOP EXIT
D SORT
D COPYGBL
D ^WVBRNOT1
;
EXIT ;EP
D KILLALL^WVUTL8
Q
;
;
SORT ;EP
;---> SORT AND STORE ARRAY IN ^TMP("WV",$J
K ^TMP("WV",$J)
;---> WVBEGDT1=ONE SECOND BEFORE BEGIN DATE.
;---> WVENDDT1=THE LAST SECOND OF END DATE.
S WVBEGDT1=WVBEGDT-.0001,WVENDDT1=WVENDDT+.9999
;
;**************************
;---> WVA=1: ALL PATIENTS
I WVA D Q
.;---> BY DATE GET EITHER ALL OR OPEN ONLY.
.N WVDFN,WVIEN,Y
.S WVXREF=$S(WVB="a":"D",WVB="q":"APRT",1:"AOPEN")
.S WVDATE=WVBEGDT1
.F S WVDATE=$O(^WV(790.4,WVXREF,WVDATE)) Q:'WVDATE!(WVDATE>WVENDDT1) D
..S WVIEN=0
..F S WVIEN=$O(^WV(790.4,WVXREF,WVDATE,WVIEN)) Q:'WVIEN D
...Q:'$D(^WV(790.4,WVIEN,0))
...S Y=^WV(790.4,WVIEN,0),WVDFN=$P(Y,U)
...;---> QUIT IF SELECTING FOR ONE CASE MANAGER AND THIS DOESN'T MATCH.
...I 'WVE Q:$P(^WV(790,WVDFN,0),U,10)'=WVCMGR
...;---> QUIT IF LISTING "DELINQUENT" AND THIS PROCDURE IS NOT DELINQ.
...I WVB="d" Q:$P(Y,U,13)'<DT!($P(Y,U,13)="")
...D STORE
;
;**************************
;---> WVA=0: ONE PATIENT
N WVIEN,Y S WVIEN=0
F S WVIEN=$O(^WV(790.4,"B",WVDFN,WVIEN)) Q:'WVIEN D
.S Y=^WV(790.4,WVIEN,0)
.;---> QUIT IF NOT WITHIN DATE RANGE.
.S WVDATE=$P(Y,U,2)
.Q:WVDATE'>WVBEGDT1!(WVDATE>WVENDDT1)
.;---> QUIT IF "QUEUED" AND THIS NOTIFICATION IS NOT QUEUED.
.I WVB="q" Q:'$P(Y,U,11) Q:'$D(^WV(790.4,"APRT",$P(Y,U,11),WVIEN))
.;---> QUIT IF "DELINQUENT" OR OPEN ONLY AND THIS ENTRY IS CLOSED.
.Q:"do"[WVB&($P(Y,U,14)="c")
.I WVB="d" Q:$P(Y,U,13)'<DT!($P(Y,U,13)="")
.D STORE
Q
;
STORE ;EP
;--->WVDATE IS ALREADY SET FROM LL SORT ABOVE. ;---> DATE
S WVCHRT=$$SSN^WVUTL1(WVDFN)_" " ;---> SSN#
S WVNAME=$$NAME^WVUTL1(WVDFN) ;---> NAME
S WVACC=$P(Y,U,6) ;---> ACCESSION#
I WVACC]"" S WVACC=$P(^WV(790.1,WVACC,0),U)
S WVSTAT=$$STATUS^WVUTL4 ;---> STATUS
S WVPRIO=9
S:$P(Y,U,4)]"" WVPRIO=$P(^WV(790.404,$P(Y,U,4),0),U,2) ;---> PRIORITY
;
S X=WVCHRT_U_WVNAME_U_WVDATE_U_WVACC_U_WVSTAT_U_WVPRIO_U_WVIEN
I WVC=1 S ^TMP("WV",$J,3,WVDATE,WVNAME,WVPRIO,WVIEN)=X Q
I WVC=2 S ^TMP("WV",$J,3,WVNAME,WVDATE,WVPRIO,WVIEN)=X Q
I WVC=3 S ^TMP("WV",$J,3,WVPRIO,WVDATE,WVNAME,WVIEN)=X
Q
;
COPYGBL ;EP
;---> COPY ^TMP("WV",$J,3 TO ^TMP("WV",$J,4 TO MAKE IT FLAT.
N I,M,N,P,Q
S N=0,I=0
F S N=$O(^TMP("WV",$J,3,N)) Q:N="" D
.S M=0
.F S M=$O(^TMP("WV",$J,3,N,M)) Q:M="" D
..S P=0
..F S P=$O(^TMP("WV",$J,3,N,M,P)) Q:P="" D
...S Q=0
...F S Q=$O(^TMP("WV",$J,3,N,M,P,Q)) Q:Q="" D
....S I=I+1,^TMP("WV",$J,4,I)=^TMP("WV",$J,3,N,M,P,Q)
Q
;
;
DEQUEUE ;EP
;---> TASKMAN QUEUE OF PRINTOUT.
D SETVARS^WVUTL5,SORT,COPYGBL,^WVBRNOT1,EXIT
Q
;
FOLLOW(WVDFN) ;EP
;---> CALLED FROM PROCEDURE FOLLOWUP MENU.
D SETVARS^WVUTL5
S WVA=0,WVB="o",WVBEGDT=(DT-50000),WVC=1,WVE=1,WVENDDT=DT
D DEVICE^WVBRNOT2 Q:WVPOP
S WVLOOP=1
D SORT,COPYGBL,^WVBRNOT1
Q

171
samples/M/ZDIOUT1.m Normal file
View File

@@ -0,0 +1,171 @@
ZDIOUT1 ; Experimental FileMan file output to host file
;---------------------------------------------------------------------------
; Copyright 2011 The Open Source Electronic Health Record Agent
;
; Licensed under the Apache License, Version 2.0 (the "License");
; you may not use this file except in compliance with the License.
; You may obtain a copy of the License at
;
; http://www.apache.org/licenses/LICENSE-2.0
;
; Unless required by applicable law or agreed to in writing, software
; distributed under the License is distributed on an "AS IS" BASIS,
; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
; See the License for the specific language governing permissions and
; limitations under the License.
;---------------------------------------------------------------------------
N W "Experimental FileMan file output to host file",!
D ASKFILE Q:FILE["^"
D ASKDIR Q:DIR["^"
D SAVEFILE(FILE,DIR)
Q
SAVEFILE(FILE,DIR) ; Save FILE to given host directory
I '$$SLASH(DIR) Q
N FGR S FGR=$$FGR(FILE) Q:'$$CHECK(FGR,"Not a valid file number: "_FILE)
S IO=DIR_$P($E(FGR,2,$L(FGR)),"(")_"+"_$$FILENAME(FILE,FGR)_".txt"
W IO,!
C IO O IO:("WNS"):1 E U $P W "Cannot open """_IO_""" for write!",! Q
D FILE("",FILE,FGR)
C IO
Q
PRNFILE(FILE,IO) ; Print FILE, optionally to IO device
S:'$D(IO) IO=$P
N FGR S FGR=$$FGR(FILE) Q:'$$CHECK(FGR,"Not a valid file number: "_FILE)
D FILE("",FILE,FGR)
Q
PRNENTRY(FILE,I,IO) ; Print FILE record #I, optionally to IO device
S:'$D(IO) IO=$P
N FGR S FGR=$$FGR(FILE) Q:'$$CHECK(FGR,"Not a valid file number: "_FILE)
N DD D DDCR(FILE,.DD)
D ENTITY("",FILE,.DD,$$EGR(FGR,I))
Q
PRNDD(FILE,IO) ; Print DD for FILE, optionally to IO device
S:'$D(IO) IO=$P
; DD(FILE) is a file#0 whose entries define fields of FILE
N FGR S FGR=$NA(^DD(FILE))
I '$D(@FGR) W "Not a valid file number: "_FILE,! Q
D FILE("",0,FGR)
Q
;---------------------------------------------------------------------------
; Private implementation entry points below.
; References cite the VA FileMan 22.0 Programmer Manual.
;
ASKFILE ; Ask for file number
R !,"File#: ",FILE G:FILE="" ASKFILE Q:FILE["^" S FILE=+FILE
S FGR=$$FGR(FILE)
I '$$CHECK(FGR," (Not a valid file number)") G ASKFILE
W " ",$$FILENAME(FILE,FGR)
Q
ASKDIR ; Ask for host dir
R !,!,"Host output directory: ",DIR,! Q:DIR["^" G:'$$SLASH(DIR) ASKDIR
Q
SLASH(DIR) ; Validate trailing slash
I $E(DIR,$L(DIR))?1(1"/",1"\") Q 1
E U $P W "Output directory must end in a slash!" Q 0
FGR(FILE) ; Get FILE Global Root
Q $$ROOT^DILFD(FILE,"",1)
EGR(FGR,I) ; Get ENTRY Global Root
Q $NA(@FGR@(I))
CHECK(V,MSG) ; Validate non-empty value
I V="" W MSG,! Q 0
Q 1
DDCR(FILE,DD) ; X-ref global subscript location to DD field
; The DD field definition 0-node has ^-pieces "^^^S;P^" where
; "S;P" is the node Subscript and Piece within the node value (14.9.2).
N F S F="" F S F=$O(^DD(FILE,F)) Q:F="" D:+F
. N F4,S,P S F4=$P(^DD(FILE,F,0),"^",4),S=$P(F4,";",1),P=$P(F4,";",2) Q:S=" "
. S DD(S,F)=P ; Subscript S contains field F at piece P
Q
FILE(D,FILE,FGR) ; Write all entries in a file
; TODO: Sort entries by .01 or KEY to ensure consistent order
N DD D DDCR(FILE,.DD)
N I S I="" F S I=$O(@FGR@(I)) Q:I="" D
. I +I D
. . D ENTITY(D,FILE,.DD,$$EGR(FGR,I))
. E D ; TODO: Handle known non-entry subscripts such as "B"
. . D SUBS(D,$$EGR(FGR,I),I)
Q
WP(D,FGR) ; Write a word-processing value
; A word processing field is actually a file in which each entry has a
; .01 field containing the line of text, and the type of the field has "W".
U IO W D,";",$$VALUE(@FGR@(0)),! ; TODO: Preserve date from ^(0)
N I S I="" F S I=$O(@FGR@(I)) Q:I="" D:+I ; TODO: Other subscripts?
. U IO W D,$$VALUE(@FGR@(I,0)),!
U IO W D,";",!
Q
ENTITY(D,FILE,DD,EGR) ; Write a file entry
U IO W D,"ENTITY"_$C(9)_";;"_$$FILENAME(FILE,FGR)_"^"_$S(FILE=0:"",1:FILE)_" ;"_EGR,!
U IO W D_$C(9)_";",!
; Add key tag with field .01 value (14.9.2).
; TODO: Use indexing cross-references or KEY file entries for key tags?
; TODO: Escape key values, handle pointers?
U IO W D,"KA"_$C(9)_";;",$P(@EGR@(0),"^"),!
U IO W D_$C(9)_";",!
N S S S="" F S S=$O(@EGR@(S)) Q:S="" D ; Find DD fields at S.
. I $D(DD(S))<10 D ; TODO: Field defs like "DEL" not in ^DD(0)
. . D SUBS(D,$NA(@EGR@(S)),S)
. N F S F="" F S F=$O(DD(S,F)) Q:F="" D
. . D FIELD(D,FILE,F,$NA(@EGR@(S)),DD(S,F))
Q
;
SUBS(D,G,S) ; Write an extraneous subscript
U IO W D,"SUBS"_$C(9)_";;"_S,!
I $D(@G)#10 U IO W D_$C(9),$$VALUE(@G),!
I $D(@G)\10 U IO W D_$C(9),"; OMITTED CHILDREN",!
U IO W D_$C(9),";",!
Q
FIELD(D,FILE,F,EGRF,P) ; Write a field
; The DD field definition 0-node has ^-pieces "NAME^TYPE^" (14.9.2).
N FD S FD=^DD(FILE,F,0)
N NAME S NAME=$P(FD,"^",1)
N TYPE S TYPE=$P(FD,"^",2)
; TYPE starts with a subfile number if the field is a multiple (14.9.2)
N SUBFILE S SUBFILE=+TYPE
I SUBFILE D
. D FIELDSUB
E D
. D FIELDONE
Q
FIELDTAG ; Write tag for a field
U IO W D,"F"_$TR(F,".","P")_$C(9)_";;"_NAME_"^"_F_" ;"_TYPE,!
Q
FIELDSUB ; Write a multiple-valued field
D FIELDTAG
I $D(@EGRF)#10 U IO W D_$C(9),"; OMITTED SELF",!
; Word-processing values are files whose .01 field type has "W".
I $P($G(^DD(SUBFILE,.01,0)),"^",2)["W" D
. D WP(D_$C(9),EGRF)
E D
. D FILE(D_$C(9),SUBFILE,EGRF) U IO W D_$C(9),";",!
Q
FIELDONE ; Write a single-valued field
N V S V=$$FIELDVAL(EGRF,P) Q:V=""
N EV ; Some TYPEs have an external-format value
N T S T=TYPE
I T["F" S TYPE=TYPE_";"_"Free Text"
I T["N" S TYPE=TYPE_";"_"Numeric"
I T["K" S TYPE=TYPE_";"_"MUMPS Code"
I T["P" S TYPE=TYPE_";"_"Pointer",EV=1
I T["V" S TYPE=TYPE_";"_"Variable Pointer",EV=1
I T["S" S TYPE=TYPE_";"_"Set of Codes",EV=1
I T["D" S TYPE=TYPE_";"_"Date",EV=1
I $D(EV) S V=V_"^"_$$EXTERNAL^DILFD(FILE,F,"",V)
D FIELDTAG
U IO W D_$C(9),$$VALUE(V),!
I $D(@EGRF)\10 U IO W D_$C(9),"; OMITTED CHILDREN",!
U IO W D_$C(9),";",!
Q
FIELDVAL(EGRF,P) ; Extract piece P of node value holding field
I +P Q $P(@EGRF,"^",P)
I $E(P,1)="E" Q $E(@EGRF,$P($E(P,2,$L(P)),",",1),$P(P,",",2))
Q ";UNKNOWN ""GLOBAL SUBSCRIPT LOCATION"" PIECE """_P_""""
;
FILENAME(FILE,FGR) ; Lookup the name of given FILE# (or subfile#)
I FILE=0 Q $P(@FGR@(0),"^") ; DD
Q $O(^DD(FILE,0,"NM","")) ; TODO: Reliable? Any documented API?
VALUE(V) ; Write value line to output
; TODO: If value starts in one of " $ ; or contains non-printing
; characters then it must be escaped for evaluation on RHS of SET.
; TODO: Caller must define indentation level with a comment if
; the first character of the first value is a tab or space.
Q V

1864
samples/M/_zewdAPI.m Normal file

File diff suppressed because it is too large Load Diff

256
samples/M/_zewdDemo.m Normal file
View File

@@ -0,0 +1,256 @@
%zewdDemo ; Tutorial page functions
;
; Product: Enterprise Web Developer (Build 910)
; Build Date: Wed, 25 Apr 2012 17:59:25
;
;
; ----------------------------------------------------------------------------
; | Enterprise Web Developer for GT.M and m_apache |
; | Copyright (c) 2004-12 M/Gateway Developments Ltd, |
; | Reigate, Surrey UK. |
; | All rights reserved. |
; | |
; | http://www.mgateway.com |
; | Email: rtweed@mgateway.com |
; | |
; | This program is free software: you can redistribute it and/or modify |
; | it under the terms of the GNU Affero General Public License as |
; | published by the Free Software Foundation, either version 3 of the |
; | License, or (at your option) any later version. |
; | |
; | This program is distributed in the hope that it will be useful, |
; | but WITHOUT ANY WARRANTY; without even the implied warranty of |
; | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
; | GNU Affero General Public License for more details. |
; | |
; | You should have received a copy of the GNU Affero General Public License |
; | along with this program. If not, see <http://www.gnu.org/licenses/>. |
; ----------------------------------------------------------------------------
;
getLanguage(sessid)
;
s language=$$getRequestValue^%zewdAPI("ewd_Language",sessid)
d setSessionValue^%zewdAPI("ewd_Language",language,sessid)
d setSessionValue^%zewdAPI("url","www.mgateway.com",sessid)
d setSessionValue^%zewdAPI("imageTest",2,sessid)
d setSessionValue^%zewdAPI("tmp_testing",1234567,sessid)
QUIT ""
;
login(sessid)
;
n username,password
;
s username=$$getTextValue^%zewdAPI("username",sessid)
s password=$$getPasswordValue^%zewdAPI("password",sessid)
;d trace^%zewdAPI("in login - username="_username_" ; password="_password)
i username'="ROB" QUIT "invalid username"
i password'="ROB" QUIT "invalid password"
QUIT ""
;
logine(sessid)
;
n error,username,password,message,textid
;
s error=""
s message=$$errorMessage^%zewdAPI("invalid login attempt",sessid)
s username=$$getTextValue^%zewdAPI("username",sessid)
s password=$$getPasswordValue^%zewdAPI("password",sessid)
;
i '$d(^ewdDemo("tutorial","authentication")) d QUIT $$errorMessage^%zewdAPI(error,sessid)
. i username'="ROB" s error=message q
. i password'="ROB" s error=message q
;
i username="" QUIT message
i '$d(^ewdDemo("tutorial","authentication",username)) QUIT message
i password'=$p(^ewdDemo("tutorial","authentication",username),"~",1) QUIT message
QUIT ""
;
getUsernames(sessid) ;
;
n user
;
i '$d(^ewdDemo("tutorial","authentication")) d QUIT ""
. d clearList^%zewdAPI("user",sessid)
. d appendToList^%zewdAPI("user","Select a user..","nul",sessid)
. d appendToList^%zewdAPI("user","ROB","ROB",sessid)
;
s user=""
d clearList^%zewdAPI("user",sessid)
d appendToList^%zewdAPI("user","Select a user..","",sessid)
f s user=$o(^ewdDemo("tutorial","authentication",user)) q:user="" d
. d appendToList^%zewdAPI("user",user,user,sessid)
;
QUIT ""
;
addUsername(sessid)
;
n newUsername
;
s newUsername=$$getTextValue^%zewdAPI("newUsername",sessid)
i newUsername="" QUIT "You must enter a username"
i $d(^ewdDemo("tutorial","authentication",newUsername)) QUIT newUsername_" already exists"
d setTextValue^%zewdAPI("user",newUsername,sessid)
QUIT ""
;
testValue(sessid)
;
n user,pass
;
s user=$$getSelectValue^%zewdAPI("user",sessid)
;d trace^%zewdAPI("user="_user)
QUIT ""
;
getPassword(sessid)
;
n user,pass
;
s user=$$getSelectValue^%zewdAPI("user",sessid)
s pass=$g(^ewdDemo("tutorial","authentication",user))
s pass=$p(pass,"~",1)
i user="ROB",pass="" s pass="ROB"
d setTextValue^%zewdAPI("pass",pass,sessid)
QUIT ""
;
setPassword(sessid)
;
n user,pass
;
s user=$$getSelectValue^%zewdAPI("user",sessid)
s pass=$$getTextValue^%zewdAPI("pass",sessid)
i pass="" QUIT "You must enter a password"
s ^ewdDemo("tutorial","authentication",user)=pass
QUIT ""
;
getObjDetails(sessid)
i '$$sessionNameExists^%zewdAPI("person.username",sessid) d
. d setSessionValue^%zewdAPI("person.username","Rob",sessid)
. d setSessionValue^%zewdAPI("person.password","secret!",sessid)
. d setSessionValue^%zewdAPI("person.userType","g",sessid)
. d setCheckboxOn^%zewdAPI("person.permissions","w",sessid)
. d setCheckboxOn^%zewdAPI("person.permissions","e",sessid)
. d clearList^%zewdAPI("person.language",sessid)
. d appendToList^%zewdAPI("person.language","English","en",sessid)
. d appendToList^%zewdAPI("person.language","French","fr",sessid)
. d appendToList^%zewdAPI("person.language","German","d",sessid)
. d appendToList^%zewdAPI("person.language","Italian","it",sessid)
. d setMultipleSelectOn^%zewdAPI("person.language","en",sessid)
. d setMultipleSelectOn^%zewdAPI("person.language","d",sessid)
. d clearTextArea^%zewdAPI("person.comments",sessid)
. s textarea(1)="This is a line of text"
. s textarea(2)="This is the second line"
. d createTextArea^%zewdAPI("person.comments",.textarea,sessid)
. d setSessionValue^%zewdAPI("wld.%KEY.MGWLPN","EXTC",sessid)
QUIT ""
;
setObjDetails(sessid)
QUIT ""
;
getDetails(sessid)
;
n user,pass,data,expireDate,userType,selected,textarea,confirmText
;
;d trace^%zewdAPI("got here!!")
s browser=$$getServerValue^%zewdAPI("HTTP_USER_AGENT",sessid)
d setSessionValue^%zewdAPI("browser",browser,sessid)
s user=$$getTextValue^%zewdAPI("user",sessid)
s data=""
i user'="" s data=$g(^ewdDemo("tutorial","authentication",user))
;d trace^%zewdAPI("user="_user_" ; data="_data)
s pass=$p(data,"~",1)
i user="ROB",pass="" d QUIT ""
. d setTextValue^%zewdAPI("pass","ROB",sessid)
. d setRadioOn^%zewdAPI("userType","a",sessid)
. d initialiseCheckbox^%zewdAPI("permissions",sessid)
. d setCheckboxOn^%zewdAPI("permissions","w",sessid)
. d setCheckboxOn^%zewdAPI("permissions","e",sessid)
. d setCheckboxOn^%zewdAPI("permissions","s",sessid)
. d createLanguageList(sessid)
. d setMultipleSelectOn^%zewdAPI("language","en",sessid)
. d setMultipleSelectOn^%zewdAPI("language","d",sessid)
. d clearTextArea^%zewdAPI("comments",sessid)
. s textarea(1)="This is a line of text"
. s textarea(2)="This is the second line"
. d createTextArea^%zewdAPI("comments",.textarea,sessid)
;
d setTextValue^%zewdAPI("pass",pass,sessid)
;d trace^%zewdAPI("data="_data)
s userType=$p(data,"~",2)
i userType="" s userType="g"
d setRadioOn^%zewdAPI("userType",userType,sessid)
d initialiseCheckbox^%zewdAPI("permissions",sessid)
i user'="" m selected=^ewdDemo("tutorial","authentication",user,"permissions")
d setCheckboxValues^%zewdAPI("permissions",.selected,sessid)
d createLanguageList(sessid)
k selected
i user'="" m selected=^ewdDemo("tutorial","authentication",user,"language")
d setMultipleSelectValues^%zewdAPI("language",.selected,sessid)
d clearTextArea^%zewdAPI("comments",sessid)
i user'="" m textarea=^ewdDemo("tutorial","authentication",user,"comments")
d createTextArea^%zewdAPI("comments",.textarea,sessid)
;
QUIT ""
;
createLanguageList(sessid)
;
n attr
d clearList^%zewdAPI("language",sessid)
d appendToList^%zewdAPI("language","English","en",sessid)
s attr("style")="color:red"
d appendToList^%zewdAPI("language","French","fr",sessid,.attr)
d appendToList^%zewdAPI("language","German","d",sessid,.attr)
s attr("style")="color:green"
d appendToList^%zewdAPI("language","Italian","it",sessid,.attr)
s attr("style")="color:green"
d appendToList^%zewdAPI("language","Spanish","esp",sessid,.attr)
d appendToList^%zewdAPI("language","Portuguese","por",sessid)
d appendToList^%zewdAPI("language","Danish","den",sessid)
d appendToList^%zewdAPI("language","Swedish","swe",sessid)
d appendToList^%zewdAPI("language","Norwegian","nor",sessid)
d initialiseMultipleSelect^%zewdAPI("language",sessid)
QUIT
;
setDetails(sessid)
;
n error,expireDate,user,pass,userType,selected,comments,warning
;
s user=$$getTextValue^%zewdAPI("user",sessid)
s pass=$$getTextValue^%zewdAPI("pass",sessid)
i pass="" d QUIT "You must enter a password"
. d setFieldError^%zewdAPI("pass",sessid)
i pass="xxx" d setFieldError^%zewdAPI("testField",sessid) QUIT "test error"
s userType=$$getRadioValue^%zewdAPI("userType",sessid)
s ^ewdDemo("tutorial","authentication",user)=pass_"~"_userType
k ^ewdDemo("tutorial","authentication",user,"permissions")
d getCheckboxValues^%zewdAPI("permissions",.selected,sessid)
m ^ewdDemo("tutorial","authentication",user,"permissions")=selected
k ^ewdDemo("tutorial","authentication",user,"language")
k selected
d getMultipleSelectValues^%zewdAPI("language",.selected,sessid)
m ^ewdDemo("tutorial","authentication",user,"language")=selected
k ^ewdDemo("tutorial","authentication",user,"comments")
d getTextArea^%zewdAPI("comments",.comments,sessid)
m ^ewdDemo("tutorial","authentication",user,"comments")=comments
;s warning="Record successfully updated"
;d setWarning^%zewdAPI(warning,sessid)
;
QUIT ""
;
testAjaxForm(sessid)
;
i $$getRequestValue^%zewdAPI("testField1",sessid)="" d QUIT "field1 must not be null"
. d setFieldError^%zewdAPI("testField1",sessid)
i $$getRequestValue^%zewdAPI("testField2",sessid)="" d QUIT "javascript: x=1 ; document.getElementById('testField3').value = x"
. d setFieldError^%zewdAPI("testField2",sessid)
i $$getRequestValue^%zewdAPI("testField3",sessid)="" d QUIT "field3 must not be null"
. d setFieldError^%zewdAPI("testField3",sessid)
;
QUIT ""
;
getVersion() ;
QUIT $zv
;
getTime(sessid)
;
d setSessionValue^%zewdAPI("dateTime",$$inetDate^%zewdAPI($h),sessid)
QUIT ""
;

25
samples/M/arrays.m Normal file
View File

@@ -0,0 +1,25 @@
start ; create student data
set student("name","first")="Linus"
set student("name","last")="Torvalds"
set student("language")="C"
set student("classes","monday")="Algebra"
set student("classes","tuesday")="Geometry"
set student("classes","wednesday")="English"
set student("classes","thursday")="French"
set student("classes","friday")="Jujitsu"
zwrite student
write $order(student)
write !,"Student array top level",!
set x=""
for do quit:x=""
. set x=$order(student(x))
. write x,!
write !,"Student classes ",!
set x=""
for do quit:x=""
. set x=$order(student("classes",x))
. write:x'="" x," : ",student("classes",x),!
quit

45
samples/M/base64.m Normal file
View File

@@ -0,0 +1,45 @@
;
; This file is part of DataBallet.
; Copyright (C) 2012 Laurent Parenteau <laurent.parenteau@gmail.com>
;
; DataBallet is free software: you can redistribute it and/or modify
; it under the terms of the GNU Affero General Public License as
; published by the Free Software Foundation, either version 3 of the
; License, or (at your option) any later version.
;
; DataBallet is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU Affero General Public License for more details.
;
; You should have received a copy of the GNU Affero General Public License
; along with DataBallet. If not, see <http://www.gnu.org/licenses/>.
;
encode(message)
;
; Return base64 with URL and Filename safe alphabet (RFC 4648)
;
new base64,todrop,i
; Populate safe alphabet values on first use only.
if '$data(base64safe) do
. for i=0:1:25 set base64safe(i)=$zchar(65+i),base64safe(i+26)=$zchar(97+i)
. for i=52:1:61 set base64safe(i)=$zchar(i-4)
. set base64safe(62)="-",base64safe(63)="_"
; Pad message with 0 to ensure number of bytes is a multiple of 3.
set todrop=0
for quit:($zlength(message)#3)=0 set message=message_$zchar(0) set todrop=todrop+1
; Base64 encode the message
set base64=""
for i=1:3:$zlength(message) do
. set base64=base64_base64safe($zascii(message,i)\4)
. set base64=base64_base64safe(($zascii(message,i)#4*16)+($zascii(message,i+1)\16))
. set base64=base64_base64safe(($zascii(message,i+1)#16*4)+($zascii(message,i+2)\64))
. set base64=base64_base64safe($zascii(message,i+2)#64)
set:todrop'=0 base64=$zextract(base64,1,$zlength(base64)-todrop)
quit base64

74
samples/M/digest.m Normal file
View File

@@ -0,0 +1,74 @@
;
; GT.M Digest Extension
; Copyright (C) 2012 Piotr Koper <piotr.koper@gmail.com>
;
; This program is free software: you can redistribute it and/or modify
; it under the terms of the GNU Affero General Public License as
; published by the Free Software Foundation, either version 3 of the
; License, or (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU Affero General Public License for more details.
;
; You should have received a copy of the GNU Affero General Public License
; along with this program. If not, see <http://www.gnu.org/licenses/>.
;
; GT.M is a trademark of Fidelity Information Services, Inc.
; "GT.M is a vetted industrial strength, transaction processing application
; platform consisting of a key-value database engine optimized for extreme
; transaction processing throughput & business continuity."
; -- http://sourceforge.net/projects/fis-gtm/
; GT.M Digest Extension
;
; This simple OpenSSL based digest extension is a rewrite of OpenSSL
; EVP_DigestInit usage example with additional M wrapper.
; See http://www.openssl.org/docs/crypto/EVP_DigestInit.html for details.
;
; The return value from $&digest.init() is 0, usually when an invalid digest
; algorithm was specification. Anyway, properly used, should never fail.
;
; Please feel free to contact me if you have any questions or comments,
; Piotr Koper <piotr.koper@gmail.com>
;
digest(m,a) ; returns digest in ASCII HEX, all-in-one
n c,d
s c=$&digest.init(a)
d &digest.update(.c,.m)
d &digest.final(.c,.d)
q d
init(alg) ; returns context handler, for alg try "md5", "sha256", etc
; 0 is returned when an error occurs (e.g. unknown digest)
q $&digest.init(alg)
update(ctx,msg) ; updates digest (ctx) by message msg
d &digest.update(.ctx,.msg)
q
final(ctx,digest) ; returns hex encoded message digest in digest
; frees memory allocated for the ctx also
d &digest.final(.ctx,.digest)
q
; digest algorithms availability depends on libcrypto (OpenSSL) configuration
md4(m) q $$digest(.m,"md4")
md5(m) q $$digest(.m,"md5")
sha(m) q $$digest(.m,"sha")
sha1(m) q $$digest(.m,"sha1")
sha224(m) q $$digest(.m,"sha224")
sha256(m) q $$digest(.m,"sha256")
sha512(m) q $$digest(.m,"sha512")
dss1(m) q $$digest(.m,"dss1")
ripemd160(m) q $$digest(.m,"ripemd160")

View File

@@ -0,0 +1,42 @@
;------------------------------------
; These first two routines illustrate
; the dynamic scope of variables in M
;------------------------------------
triangle1(x) ;;
set sum=0
for do quit:x'>1
. set sum=sum+x
. set x=x-1
quit sum
main1() ;;
set sum=1500
set x=6
write "sum before=",sum,!
set y=$$triangle1(x)
write "sum after=",sum,!
write "triangle of ",x," is ",y,!
quit
;------------------------------------
; These next two routines illustrate
; the use of the NEW command to make
; variables limited to the local scope
;------------------------------------
triangle2(x) ;;
new sum ; <-- HERE !!
set sum=0
for do quit:x'>1
. set sum=sum+x
. set x=x-1
quit sum
main2() ;;
set sum=1500
set x=6
write "sum before=",sum,!
set y=$$triangle2(x)
write "sum after=",sum,!
write "triangle of ",x," is ",y,!
quit

9
samples/M/fibonacci.m Normal file
View File

@@ -0,0 +1,9 @@
start ; compute the Fibonacci series
set (a,b)=1
for i=1:1 do quit:term>100
. set term=a+b
. write !,term
. set a=b
. set b=term
write !,"Result= ",term,!
quit

19
samples/M/forloop.m Normal file
View File

@@ -0,0 +1,19 @@
start1 ; entry label
set a=1
set b=20
set c=2
set sum=0
for i=a:c do quit:'(i<b)
. set sum=sum+i
. write i," : ",sum,!
quit
start2 ; entry label
set a=1
set b=20
set c=2
set sum=0
for i=a:c:b do
. set sum=sum+i
. write i," : ",sum,!
quit

19
samples/M/functions.m Normal file
View File

@@ -0,0 +1,19 @@
; This function computes a factorial
factorial(n) ;;
new f
set f=n
for do quit:n'>1
. set n=n-1
. set f=f*n
. write n," : ",f,!
quit f
main() ;;
set x=5
set y=$$factorial(x)
write "Factorial of ",x," = ",y,!
quit

3
samples/M/helloworld.m Normal file
View File

@@ -0,0 +1,3 @@
label1 ; This is a label
write "Hello World !",!
quit

35
samples/M/ifelse.m Normal file
View File

@@ -0,0 +1,35 @@
if1 ; simple if statement
set a=5
set b=10
set c=25
if (a<b) set c=b
write c,!
quit
if2 ; if statements contrasted
set a=5
set b=10
if (a<b) write "variable a=",a," is smaller than b=",b,!
if (a>b) write "variable a=",a," is larger than b=",b,!
quit
if3 ; if statement with else clause
set a=5
set b=10
if (a<b) write "variable a=",a," is smaller than b=",b,!
else write "variable a=",a," is larger than b=",b,!
quit
if4 ; if statement with else clause and bodies
set a=5
set b=10
set c=10
if (a<b) do
. write "variable a=",a," is smaller than b=",b,!
. set c=c+a
else do
. write "variable a=",a," is larger than b=",b,!
. set c=c+b
write "c=",c,!
quit

View File

@@ -0,0 +1,22 @@
start ; exercise
set ^car("make")="toyota"
set ^car("model")="corolla"
set ^car("mileage")="$$compute^mileage"
write !,"Regular computation",!
write "make = ",^car("make"),!
write "model = ",^car("model"),!
write "mileage = ",@^car("mileage")@(150,4),!
write !,"Pesimist computation",!
set ^car("mileage")="$$computepesimist^mileage"
write "make = ",^car("make"),!
write "model = ",^car("model"),!
write "mileage = ",@^car("mileage")@(150,4),!
write !,"Optimist computation",!
set ^car("mileage")="$$computeoptimist^mileage"
write "make = ",^car("make"),!
write "model = ",^car("model"),!
write "mileage = ",@^car("mileage")@(150,4),!

76
samples/M/md5.m Normal file
View File

@@ -0,0 +1,76 @@
;
; MD5 Implementation in M
; Copyright (C) 2012 Piotr Koper <piotr.koper@gmail.com>
;
; This program is free software: you can redistribute it and/or modify
; it under the terms of the GNU Affero General Public License as
; published by the Free Software Foundation, either version 3 of the
; License, or (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU Affero General Public License for more details.
;
; You should have received a copy of the GNU Affero General Public License
; along with this program. If not, see <http://www.gnu.org/licenses/>.
;
; It works in GT.M with ZCHSET=M, but please, don't use it. It's only a joke.
; Serves well as a reverse engineering example on obtaining boolean functions
; from integer addition, modulo and division.
md5(msg)
; http://en.wikipedia.org/wiki/MD5
n m,r,k,h,i,j,a,b,c,d,f,g,w,t,p,q
s r(0)=7,r(1)=12,r(2)=17,r(3)=22,r(4)=7,r(5)=12,r(6)=17,r(7)=22,r(8)=7,r(9)=12,r(10)=17,r(11)=22,r(12)=7,r(13)=12,r(14)=17,r(15)=22,r(16)=5,r(17)=9,r(18)=14,r(19)=20,r(20)=5,r(21)=9,r(22)=14,r(23)=20,r(24)=5,r(25)=9,r(26)=14,r(27)=20,r(28)=5,r(29)=9,r(30)=14,r(31)=20,r(32)=4,r(33)=11,r(34)=16,r(35)=23,r(36)=4,r(37)=11,r(38)=16,r(39)=23,r(40)=4,r(41)=11,r(42)=16,r(43)=23,r(44)=4,r(45)=11,r(46)=16,r(47)=23,r(48)=6,r(49)=10,r(50)=15,r(51)=21,r(52)=6,r(53)=10,r(54)=15,r(55)=21,r(56)=6,r(57)=10,r(58)=15,r(59)=21,r(60)=6,r(61)=10,r(62)=15,r(63)=21
s k(0)=3614090360,k(1)=3905402710,k(2)=606105819,k(3)=3250441966,k(4)=4118548399,k(5)=1200080426,k(6)=2821735955,k(7)=4249261313,k(8)=1770035416,k(9)=2336552879,k(10)=4294925233,k(11)=2304563134,k(12)=1804603682,k(13)=4254626195,k(14)=2792965006,k(15)=1236535329,k(16)=4129170786,k(17)=3225465664,k(18)=643717713,k(19)=3921069994,k(20)=3593408605,k(21)=38016083,k(22)=3634488961,k(23)=3889429448,k(24)=568446438,k(25)=3275163606,k(26)=4107603335,k(27)=1163531501,k(28)=2850285829,k(29)=4243563512,k(30)=1735328473,k(31)=2368359562,k(32)=4294588738,k(33)=2272392833,k(34)=1839030562,k(35)=4259657740,k(36)=2763975236,k(37)=1272893353,k(38)=4139469664,k(39)=3200236656,k(40)=681279174,k(41)=3936430074,k(42)=3572445317,k(43)=76029189,k(44)=3654602809,k(45)=3873151461,k(46)=530742520,k(47)=3299628645,k(48)=4096336452,k(49)=1126891415,k(50)=2878612391,k(51)=4237533241,k(52)=1700485571,k(53)=2399980690,k(54)=4293915773,k(55)=2240044497,k(56)=1873313359,k(57)=4264355552,k(58)=2734768916,k(59)=1309151649,k(60)=4149444226,k(61)=3174756917,k(62)=718787259,k(63)=3951481745
s h(0)=1732584193,h(1)=4023233417,h(2)=2562383102,h(3)=271733878
s $p(m,$c(0),(55-$l(msg))#64+1)="",m=msg_$c(128)_m_$$n64($l(msg)*8),p=1,q=0
f q:q d
. f j=0:1:15 s w(j)=$$read(.m,.p)
. i w(0)<0 s q=1 q
. s a=h(0),b=h(1),c=h(2),d=h(3)
. f i=0:1:63 d
.. i i<16 d
... s f=$$or($$and(b,c),$$and($$not(b),d)),g=i
.. e i i<32 d
... s f=$$or($$and(d,b),$$and($$not(d),c)),g=(5*i+1)#16
.. e i i<48 d
... s f=$$xor($$xor(b,c),d),g=(3*i+5)#16
.. e s f=$$xor(c,$$or(b,$$not(d))),g=(7*i)#16
.. s t=d,d=c,c=b,b=(b+$$rotate((a+f+k(i)+w(g))#4294967296,r(i)))#4294967296,a=t
. s h(0)=(h(0)+a)#4294967296,h(1)=(h(1)+b)#4294967296,h(2)=(h(2)+c)#4294967296,h(3)=(h(3)+d)#4294967296
q $$n32h(h(0))_$$n32h(h(1))_$$n32h(h(2))_$$n32h(h(3))
not(a) ; 32bit
q 4294967295-a
xor(a,b) ; 32bit
n x,i s x=0 f i=1:1:32 s x=(x\2)+(((a+b)#2)*2147483648),a=a\2,b=b\2
q x
and(a,b) ; 32bit
n x,i s x=0 f i=1:1:32 s x=(x\2)+((((a#2)+(b#2))\2)*2147483648),a=a\2,b=b\2
q x
or(a,b) ; 32bit
q $$not($$and($$not(.a),$$not(.b)))
rotate(a,n) ; 32bit, rol
n c s c=a*(2**n)
q c#4294967296+(c\4294967296)
read(b,i)
n n,j s n=0 f j=3:-1:0 s n=256*n+$a($e(b,i+j))
s i=i+4
q n
n64(n)
n s,i f i=1:1:8 s $e(s,i)=$c(n#256),n=n\256
q s
n32h(n)
n h,s,i s h="0123456789abcdef" f i=1:2:8 s $e(s,i+1)=$e(h,n#16+1),n=n\16,$e(s,i)=$e(h,n#16+1),n=n\16
q s

9
samples/M/mileage.m Normal file
View File

@@ -0,0 +1,9 @@
compute(miles,gallons)
quit miles/gallons
computepesimist(miles,gallons)
quit miles/(gallons+1)
computeoptimist(miles,gallons)
quit (miles+1)/gallons

319
samples/M/mumtris.m Normal file
View File

@@ -0,0 +1,319 @@
;
; Mumtris
; Copyright (C) 2012 Piotr Koper <piotr.koper@gmail.com>
;
; This program is free software: you can redistribute it and/or modify
; it under the terms of the GNU Affero General Public License as
; published by the Free Software Foundation, either version 3 of the
; License, or (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU Affero General Public License for more details.
;
; You should have received a copy of the GNU Affero General Public License
; along with this program. If not, see <http://www.gnu.org/licenses/>.
;
; Mumtris
; This is a tetris game in MUMPS, for GT.M, have fun.
;
; Resize your terminal (e.g. maximize your PuTTY window), restart GT.M so that
; it can report true size of your terminal, and d ^mumtris.
;
; Try setting ansi=0 for GT.M compatible cursor positioning.
;
; NOTICE: Mumtris uses "active waiting" for making delays lower that 1s.
; That means that one of your CPU will be used at 99%. It's not a bug,
; the Mumtris and GT.M will be fully responsive. Take care when
; running on production system ;-)
;
mumtris
n ansi,e,n,w,h,gr,fl,hl,sc,lv,lc,sb,st,ml,dh,dw,mx,my,mt,r,y,x,t10m,c,ne,i,q
s ansi=1 ; use (faster) ANSI CSI instead of USE $P:X=x positioning
s w=10 ; matrix width
s h=22 ; matrix height (see below)
s gr=1 ; grid
s fl=1 ; fill
s hl=1 ; help
s sc=0 ; score
s lv=1 ; level
s lc=0 ; lines cleared at current level
s sb=70 ; step base
s st=$$step ; current step
s ml=3 ; move/rotate hold limit (without fall)
d dev ; defines dw, dh (device width, device height)
s h=dh-2 ; comment out to disable auto height
s mx=dw/2-(3*w/2) ; matrix left coordinate
s my=dh/2-(h/2)-1 ; matrix top coordinate
s mt="3 5_9 8 2_9 .2_02 /5 \2 2_ 2_2 6_/2 |8_|2_| 6_0 /2 \ /2 \|2 |2 \/5 \3 2_\_2 2_ \2 |/2 3_/0/4 Y4 \2 |2 /2 Y Y2 \2 |2 |2 | \/2 |\3_ \0\4_|2_2 /4_/|2_|_|2 /2_|2 |2_|2 |2_/4_2 >08 \/9 3 \/9 9 2 \/0" ; Mumtris
u $p:noecho
u $p:escape
d cls
d intro
d elements
s ne=$r(e)+1 ; new element
d change,new(),preview
d score(),help,redraw
s (i,q)=0
f q:q d
. d pos(0,0)
. s c=$$key
. i c=1 d exit s q=1 q
. s i=$s('c:0,1:i+1)
. s:i'<ml (i,c)=0
. i c'=3,$$fall d lock,clear,change,preview i $$new d over,exit s q=1 q ; short-circuit and in first if
. d redraw
q
key() ; 0 - timeout, 1 - exit, 2 - harddrop, 3 - other char
n q,c,d,ex,hd
s (q,d,ex,hd)=0
n i
n l s l=1
f q:q d
. r *c:0
. i c<0&'d d
.. f i=1:1:st*t10m r *c:0 q:c>-1 i $h
. i c<0 s q=1 q
. s d=2
. i c=27 d q:q
.. i $l($zb)=1 s (q,ex)=1 q
.. s c=$a($e($zb,3))
.. d:c=65 rotate
.. d:c=66 fall(1)
.. d:c=67 right
.. d:c=68 left
. i c=70!(c=102) s fl=fl+1#3 d preview
. s:c=71!(c=103) gr='gr
. i c=72!(c=104) s hl='hl d help
. d:c=73!(c=105) rotate
. d:c=74!(c=106) left
. d:c=75!(c=107) fall(1)
. d:c=76!(c=108) right
. s:c=81!(c=113) (q,ex)=1
. i c=32 d drop s hd=1
q $s(ex:1,hd:2,d:3,1:0)
redraw
d matrix
d stack
d draw(n,r,y,x)
q
ticks
n x,h,b,e,q
s h=$h,(b,e,q)=0 f i=1:1:1000000000 r *x:0 i h'=$h s h=$h d q:q
. i 'b s b=i
. e s e=i,q=1
s t10m=(e-b)\100
q
change
s n=ne
s ne=$r(e)+1
s x=0,y=0,r=1
q
new()
s r=1,x=w/2-2,y=1-e(n,r)
q:$q $$collision(r,y,x) q
drop
n i
s i=0 f q:$$fall s i=i+2
d score(i)
q
rotate
n k
s k=r#e(n)+1
q:$$collision(k,y,x)
s r=k
q
fall(k)
n c
i $$collision(r,y+1,x) q:$q 1 q
s y=y+1
d:$g(k) score(1)
q:$q 0 q
right q:$$collision(r,y,x+1) s x=x+1 q
left q:$$collision(r,y,x-1) s x=x-1 q
collision(r,y,x)
n i,j,q
s q=0
f i=1:1:4 q:q f j=1:1:4 q:q s:$g(e(n,r,j,i))&($g(n(y+j,x+i))!(y+j>h!(x+i>w!(x+i<1)))) q=1
q q
lock
n i,j
f i=1:1:4 q:q f j=1:1:4 q:q s:$g(e(n,r,j,i)) n(y+j,x+i)=1
q
clear
n c,i,j,q
s c=0
f j=h:-1:1 d
. s q=0
. f i=1:1:w i '$g(n(j,i)) s q=1 q
. q:q
. f i=j:-1:1 k n(i) m n(i)=n(i-1)
. s j=j+1,c=c+1
. d redraw
i c d
. d score($s(c=4:800,1:i*200-100*lv))
. s lc=lc+c
. i lv*10'>lc d score(,1) s lc=0
q
exit
n s
s s=mt_"09 Piotr Koper <piotr.koper@gmail.com>09 8 h2tps:2/github.com/pkoper"
d cls d write(.s,dh/2-3,dw/2-24) h 1 r *s:0 r *s:4
d cls u $p:echo
q
intro
n s
s s=mt_"9 9 8 Mumtris for GT.M0"
d cls h 1 d write(.s,dh/2-3,dw/2-24) h 1
d ticks
d cls
r s:0
q
cls
d pos(0,0,1)
q
pos(y,x,c)
i ansi d
. ; workaround for ANSI driver: NL in some safe place (1,1)
. w $c(27)_"[1;1f",!,$c(27)_"["_(y\1+1)_";"_(x\1+1)_"f"
. w:$g(c) $c(27)_"[2J"
e d
. u $p:(x=x:y=y)
. u:$g(c) $p:clearscreen
q
over
n s
s s="2 8_9 9 6 8_0 /2 5_/5_4 5_3 4_3 \5_2 \3_2 2_ 9_2_0/3 \2 3_\2_2 \2 /5 \_/ 2_ \3 /3 |3 \2 \/ 2/ 2_ \_2 2_ \0\4 \_\2 \/ 2_ \|2 Y Y2 \2 3_/2 /4 |4 \3 /\2 3_/|2 | \/0 \6_2 (4_2 /2_|_|2 /\3_2 > \7_2 /\_/2 \3_2 >2_|08 \/5 \/6 \/5 \/9 \/9 \/0"
d cls,write(.s,dh/2-3,dw/2-32) h 1 r *s:0 r *s:2
q
write(s,y,x)
n i,j,l,c,d
d pos(y,x)
s l=$l(s) f i=1:1:l d
. s c=$e(s,i)
. i c?1N d
.. i 'c s y=y+1 d pos(y,x) q
.. s d=$e(s,i+1) f j=1:1:c w d
.. s i=i+1
. e w c
d pos(0,0)
q
help
n i,x,l,j
s i=9 f x="MOVE: LEFT, RIGHT","TURN: UP","DROP: SPACE","","FILL: F","GRID: G","HELP: H","","QUIT: ESC, Q" d pos(dh/2-(h/2)+i,dw/2+(3*w/2+3)) d s i=i+1
. i hl w x
. e s l=$l(x) f j=1:1:l w " "
q
fill() q $s(fl=1:"[#]",fl=2:"[+]",1:"[ ]")
draw(n,r,y,x,o)
n i,j
s x=3*x+mx+1,y=y+my
f i=1:1:4 i y+i>my f j=1:1:4 d pos(y+i-1,3*(j-1)+x) w $s($g(e(n,r,i,j)):$$fill,$g(o):" ",1:"")
q
step() q 0.85**lv*sb+(0.1*lv)
score(s,l)
s:$g(s) sc=sc+s
i $g(l) s lv=lv+l,st=$$step
d pos(dh/2-(h/2)+2,dw/2+(3*w/2+3)) w "SCORE: ",sc
d pos(dh/2-(h/2)+3,dw/2+(3*w/2+3)) w "LEVEL: ",lv
q
preview
d draw(ne,1,4-e(ne,1),-5,1)
q
stack
n i,j,x,y
s x=mx+1,y=my
f i=1:1:h f j=1:1:w i $g(n(i,j)) d pos(y+i-1,3*(j-1)+x) w $$fill
q
matrix
n i,j
f i=0:1:h-1 d
. d pos(my+i,mx) w "|" f j=1:1:w w $s(gr:" . ",1:" ")
. w "|"
d pos(my+h,mx) w "|" f j=1:1:w*3 w "~"
w "|",!
q
dev
n x,i
zsh "d":x
s i="" f s i=$o(x("D",i)) q:i="" d:(x("D",i)[$p)
. s dw=$p($p(x("D",i),"WIDTH=",2)," ",1),dh=$p($p(x("D",i),"LENG=",2)," ",1)
q
elements
; e - elements
; e(elemId) - rotateVersions
; e(elemId,rotateVersion) - bottom coordinate
; e(elemId,rotateVersion,y,x) - point
;
s e=7
; ____
s e(1)=2,e(1,1)=2
s (e(1,1,2,1),e(1,1,2,2),e(1,1,2,3),e(1,1,2,4))=1
s (e(1,2,1,2),e(1,2,2,2),e(1,2,3,2),e(1,2,4,2))=1
; |__
s e(2)=4,e(2,1)=2
s (e(2,1,1,1),e(2,1,2,1),e(2,1,2,2),e(2,1,2,3))=1
s (e(2,2,1,2),e(2,2,1,3),e(2,2,2,2),e(2,2,3,2))=1
s (e(2,3,2,1),e(2,3,2,2),e(2,3,2,3),e(2,3,3,3))=1
s (e(2,4,1,2),e(2,4,2,2),e(2,4,3,1),e(2,4,3,2))=1
; __|
s e(3)=4,e(3,1)=2
s (e(3,1,1,3),e(3,1,2,1),e(3,1,2,2),e(3,1,2,3))=1
s (e(3,2,1,2),e(3,2,2,2),e(3,2,3,2),e(3,2,3,3))=1
s (e(3,3,2,1),e(3,3,2,2),e(3,3,2,3),e(3,3,3,1))=1
s (e(3,4,1,1),e(3,4,1,2),e(3,4,2,2),e(3,4,3,2))=1
; ||
s e(4)=1,e(4,1)=2
s (e(4,1,1,1),e(4,1,1,2),e(4,1,2,1),e(4,1,2,2))=1
; _-
s e(5)=2,e(5,1)=3
s (e(5,1,2,2),e(5,1,2,3),e(5,1,3,1),e(5,1,3,2))=1
s (e(5,2,1,2),e(5,2,2,2),e(5,2,2,3),e(5,2,3,3))=1
; _|_
s e(6)=4,e(6,1)=2
s (e(6,1,1,2),e(6,1,2,1),e(6,1,2,2),e(6,1,2,3))=1
s (e(6,2,1,2),e(6,2,2,2),e(6,2,2,3),e(6,2,3,2))=1
s (e(6,3,2,1),e(6,3,2,2),e(6,3,2,3),e(6,3,3,2))=1
s (e(6,4,1,2),e(6,4,2,1),e(6,4,2,2),e(6,4,3,2))=1
; -_
s e(7)=2,e(7,1)=3
s (e(7,1,2,1),e(7,1,2,2),e(7,1,3,2),e(7,1,3,3))=1
s (e(7,2,1,2),e(7,2,2,1),e(7,2,2,2),e(7,2,3,1))=1
q

17
samples/M/nesting.m Normal file
View File

@@ -0,0 +1,17 @@
start1 ; entry label
set ax=1
set bx=20
set cx=2
set ay=1
set by=20
set cy=2
set sumx=0
set sqrx=0
set sumxy=0
for x=ax:cx:bx do
. set sumx=sumx+x
. set sqrx=sqrx+(x*x)
. for y=ay:cy:by do
.. set sumxy=sumxy+(x*y)
.. if (sumxy<100) do
... write sumxy,!

511
samples/M/pcre.m Normal file
View File

@@ -0,0 +1,511 @@
;
; GT.M PCRE Extension
; Copyright (C) 2012 Piotr Koper <piotr.koper@gmail.com>
;
; This program is free software: you can redistribute it and/or modify
; it under the terms of the GNU Affero General Public License as
; published by the Free Software Foundation, either version 3 of the
; License, or (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU Affero General Public License for more details.
;
; You should have received a copy of the GNU Affero General Public License
; along with this program. If not, see <http://www.gnu.org/licenses/>.
;
; GT.M is a trademark of Fidelity Information Services, Inc.
; "GT.M is a vetted industrial strength, transaction processing application
; platform consisting of a key-value database engine optimized for extreme
; transaction processing throughput & business continuity."
; -- http://sourceforge.net/projects/fis-gtm/
; GT.M PCRE Extension
;
; This extension tries to deliver the best possible PCRE interface for the
; M world by providing a support for PCRE with M arrays, stringified parameter
; names, simplified API, locales, exceptions and Perl5 Global Match.
;
; See pcreexamples.m for comprehensive examples on ^pcre routines usage and
; beginner level tips on match limits, exception handling and UTF-8 in GT.M.
;
; Try out the best known book on regular expressions: http://regex.info/
; For more information on PCRE see: http://pcre.org/
;
; Please feel free to contact me if you have any questions or comments,
; Piotr Koper <piotr.koper@gmail.com>
;
pcre ;GT.M PCRE Extension
;1.0;Initial release;pkoper
q
version()
q $&pcre.version()
config(name)
; name is one of: (case insensitive)
; "UTF8", "NEWLINE", "LINK_SIZE", "POSIX_MALLOC_THRESHOLD",
; "MATCH_LIMIT", "MATCH_LIMIT_RECURSION", "STACKRECURSE",
; "BSR", "UNICODE_PROPERTIES", "JIT", "JITTARGET"
;
d protect
;
n erropt,isstring,s,n,code
s code=$&pcre.config(.name,.erropt,.isstring,.s,.n)
s:code $ec=",U"_(-code)_","
q $s(isstring:s,1:n)
compile(pattern,options,locale,mlimit,reclimit)
; options is case insensitive and optional string with "|" joined:
; "ANCHORED", "CASELESS", "DOLLAR_ENDONLY", "DOTALL", "EXTENDED",
; "FIRSTLINE", "MULTILINE", "NO_AUTO_CAPTURE", "DUPNAMES",
; "UNGREEDY", "BSR_ANYCRLF", "BSR_UNICODE", "JAVASCRIPT_COMPAT",
; "NL_ANY", "NL_ANYCRLF", "NL_CR", "NL_CRLF","NL_LF",
; "UTF8", "UCP", "NO_UTF8_CHECK"
;
; locale is an optional Unix locale name used for pcre_maketables(),
; cases:
; undefined or "":
; pcre_maketables() will not be called
; "ENV" (case insensitive):
; use locale in program environment defined by the
; environment variables LANG or LC_*
; specified:
; "pl_PL.iso88592", "pl_PL.utf8", "C", ...
; see locale(1), locale(2) and the output of command:
; $ locale -a
; Debian tip: use
; $ dpkg-reconfigure locales
; to enable or set system-wide locale
;
; mlimit (optional) limits the number of internal matching function
; calls in pcre_exec() execution, see PCRE manual for details
;
; reclimit (optional) limit for the depth of recursion when calling
; the internal matching function in a pcre_exec() execution,
; see PCRE manual for details
;
d protect
;
n erropt,ref,err,erroffset,code
s code=$&pcre.compile(.pattern,$g(options),.erropt,.ref,.err,.erroffset,$g(locale),$g(mlimit,0),$g(reclimit,0))
s:code $ec=",U"_(-code)_","
q ref
exec(ref,subject,options,startoffset,length)
; options is case insensitive and optional string with "|" joined:
; "ANCHORED", "BSR_ANYCRLF", "BSR_UNICODE",
; "NL_ANY", "NL_ANYCRLF", "NL_CR", "NL_CRLF", "NL_LF",
; "NOTBOL", "NOTEOL", "NOTEMPTY", "NOTEMPTY_ATSTART",
; "NO_START_OPTIMIZE", "NO_UTF8_CHECK",
; "PARTIAL_SOFT", "PARTIAL_HARD"
;
; startoffset is in octets, starts with 1 (like in M) (optional)
;
; length is subject length in octets, not chars (optional)
;
d protect
;
n erropt,code,start
s start=$g(startoffset,1)-1
s code=$&pcre.exec(.ref,.subject,$g(length,$zl(subject)),start,$g(options),.erropt)
s:code<0 $ec=",U"_(-code)_","
q code
ovector(ref,i) ; return i-element from ovector
d protect
;
n n,code
s code=$&pcre.ovector(.ref,.i,.n)
s:code $ec=",U"_(-code)_","
;s $ec=",U123,"
q n
ovecsize(ref) ; return ovecsize
d protect
;
n n,code
s code=$&pcre.ovecsize(.ref,.n)
s:code $ec=",U"_(-code)_","
q n
fullinfo(ref,name)
; name is one of: (case insensitive)
; "OPTIONS", "SIZE", "CAPTURECOUNT", "BACKREFMAX", "FIRSTBYTE",
; "FIRSTTABLE", "LASTLITERAL", "NAMEENTRYSIZE", "NAMECOUNT",
; "STUDYSIZE", "OKPARTIAL", "JCHANGED", "HASCRORLF", "MINLENGTH",
; "JIT", "JITSIZE"
; for NAME* options see also $$nametable^pcre()
;
d protect
;
n erropt,isstring,s,n,code
s code=$&pcre.fullinfo(.ref,.name,.erropt,.isstring,.s,.n)
s:code $ec=",U"_(-code)_","
q $s(isstring:s,1:n)
nametable(ref,i,n) ; returns index (n) and name, or { 0, "" } for invalid i
; i is indexed from 1
;
d protect
;
n s,code
s code=$&pcre.nametable(.ref,.i,.n,.s)
s:code $ec=",U"_(-code)_","
q s
substring(ref,i,begin,end)
s begin=$$ovector(.ref,i*2)+1,end=$$ovector(.ref,i*2+1)
; ovector contains octet indexed data not UNICODE chars, so $ze is used
q:'begin ""
q $s($g(o,0):begin_","_end,1:$ze(subject,begin,end))
store(ref,i,n,o,key) ; same as above but stores captured data in n array
n begin,end
s begin=$$ovector(.ref,i*2)+1,end=$$ovector(.ref,i*2+1)
q:'begin
s key=$g(key,i)
s:o n(key,0)=begin,n(key,1)=end
s n(key)=$ze(subject,begin,end)
q
gstore(ref,i,n,round,byref,o,key) ; store for global match
n begin,end
s begin=$$ovector(.ref,i*2)+1,end=$$ovector(.ref,i*2+1)
q:'begin
s key=$g(key,i)
i byref d
. s:o n(key,round,0)=begin,n(key,round,1)=end
. s n(key,round)=$ze(subject,begin,end)
e d
. s:o n(round,key,0)=begin,n(round,key,1)=end
. s n(round,key)=$ze(subject,begin,end)
q
test(subject,pattern,options,locale,mlimit,reclimit)
; see $$compile^pcre for options, locale, mlimit and reclimit
;
d protect
n ref,l
s ref=$$compile(.pattern,$g(options),$g(locale),$g(mlimit,0),$g(reclimit,0))
s l=$$exec(.ref,.subject)
d free(.ref)
q l
match(subject,pattern,match,capture,options,locale,mlimit,reclimit)
; see $$compile^pcre for options, locale, mlimit and reclimit
;
; capture is case insensitive and optional string with "|" joined
; names or indexes to be capture
;
; extended options:
; "NAMED_ONLY" - capture only named groups
; "OVECTOR" - return additional ovector data
;
d protect
;
n namedonly,ovector,ref,o,l,i,j,s,c,begin
;
s options=$g(options),(namedonly,ovector)=0
f i=1:1:$l(options,"|") d
. s o=$zco($p(options,"|",i),"u")
. i o="NAMED_ONLY" s namedonly=1,$p(options,"|",i)=""
. i o="OVECTOR" s ovector=1,$p(options,"|",i)=""
s:namedonly options=options_"|NO_AUTO_CAPTURE"
;
k match
s ref=$$compile(.pattern,.options,$g(locale),$g(mlimit,0),$g(reclimit,0))
s l=$$exec(.ref,.subject)
i $d(capture) d
. s c="|"_capture_"|"
. ; ovector indexed data
. i 'namedonly f i=0:1:l-1 d:c[("|"_i_"|") store(.ref,.i,.match,.ovector)
. ; named matches data
. f i=1:1 s s=$$nametable(.ref,.i,.j) q:s="" d:c[("|"_s_"|") store(.ref,.j,.match,.ovector,.s)
e d
. i 'namedonly f i=0:1:l-1 d store(.ref,.i,.match,.ovector)
. f i=1:1 s s=$$nametable(.ref,.i,.j) q:s="" d store(.ref,.j,.match,.ovector,.s)
d free(.ref)
q:$q l q
global(subject,pattern,match,capture,options,locale,mlimit,reclimit)
; options is the same as for match^pcre, extended options:
; "OVECTOR" - return additional ovector data
; "GROUPED" - group the result in match array by pattern groups
; "NAMED_ONLY" - capture only named patterns
;
; see pcredemo.c and pcreccp.cc from PCRE for comments on procedure
; for Perl like global matching
;
d protect
;
n ref,c,o,ovector,byref,namedonly,utf8,crlf,start,end,matches,empty,skip,round,i,j,s,n,q
k match
;
; determine additional options and remove them before calling the compile^pcre
s options=$g(options),(ovector,byref,namedonly)=0
f i=1:1:$l(options,"|") d
. s o=$zco($p(options,"|",i),"u")
. i o="NAMED_ONLY" s namedonly=1,$p(options,"|",i)=""
. i o="GROUPED" s byref=1,$p(options,"|",i)=""
. i o="OVECTOR" s ovector=1,$p(options,"|",i)=""
s:namedonly options=options_"|NO_AUTO_CAPTURE"
;
; compile the pattern
s ref=$$compile(.pattern,.options,$g(locale),$g(mlimit,0),$g(reclimit,0))
;
s:$d(capture) c="|"_capture_"|"
s byref=$g(byref,0)
;
; check pattern options for UTF8 and double char new line
s o="|"_$$fullinfo(.ref,"OPTIONS")_"|"
s utf8=$s(o["|UTF8|":1,1:0)
s crlf=$s(o["|NL_CRLF|":1,o["|NL_ANY|":1,o["|NL_ANYCRLF|":1,1:0)
;
; if none check the PCRE build options
i crlf=0 d
. s o=$$config("NEWLINE")
. s crlf=$s(o="NL_CRLF":1,o="NL_ANY":1,o="NL_ANYCRLF":1,1:0)
;
s (start,round,i)=1,(empty,skip,q)=0
s end=$l(subject)+1
f d q:start>end!q
. i empty d
.. s matches=$$exec(.ref,.subject,"NOTEMPTY_ATSTART|ANCHORED",.start) ; unwind this call to optimize
.. q:matches ; quit this do, leave empty=1, store the matches
..
.. ; advance if no match & clear empty
.. s start=start+1
.. i start>end s q=1 q
..
.. ; skip LF if CR was before and CRLF mode
.. s:crlf&(($ze(subject,start-1)=$c(13))&($ze(subject,start)=$c(10))) start=start+1
..
.. ; skip if in a middle of UTF char
.. i utf8 f q:start'<end!($zbitand($c(0)_$ze(subject,start),$c(0)_$c(192))=$c(0)_$c(128)) s start=start+1
..
.. ; take into account skipped chars
.. s skip=1,empty=0
. e d
.. s matches=$$exec(.ref,.subject,,.start)
.. i 'matches s q=1 q
.
. q:q
. i skip s skip=0 q
.
. i $d(c) d
.. ; ovector indexed data
.. i 'namedonly f i=0:1:matches-1 d:c[("|"_i_"|") gstore(.ref,.i,.match,.round,.byref,.ovector)
.. ; named matches data
.. f i=1:1 s s=$$nametable(.ref,.i,.n) q:s="" d:c[("|"_s_"|") gstore(.ref,.n,.match,.round,.byref,.ovector,.s)
. e d
.. i 'namedonly f i=0:1:matches-1 d gstore(.ref,.i,.match,.round,.byref,.ovector)
.. f i=1:1 s s=$$nametable(.ref,.i,.n) q:s="" d gstore(.ref,.n,.match,.round,.byref,.ovector,.s)
. s round=round+1
.
. s start=$$ovector(.ref,1)+1
. s empty=(($$ovector(.ref,0)+1)=start)
d free(.ref)
q:$q round-1 q
replace(subject,pattern,subst,first,last,options,locale,mlimit,reclimit)
; see $$match^pcre and $$compile^pcre for options, locale, mlimit and
; reclimit
;
; subst is a string to replace with all occurrences of matched data
; \n (like \1, \2, ..) is a back ref for the n-th captured group
; \{name} is back ref for a named captured data
; \\ is replaced with \
;
; first is the n-th match in the subject where the substitution begins,
; 1 .. n-1 matches are not substituted
; defaults to 1
;
; last is the n-th match in the subject where the substitution ends,
; n+1 .. matches are not substituted
; defaults to 0 (no limit)
;
n ref,o,n,i,j,begin,end,offset,backref,boffset,value,s
s ref=$$compile(.pattern,,$g(locale),$g(mlimit,0),$g(reclimit,0))
;
; prepare back reference stack
d global^pcre(.subst,"\\(?:(?<ref>(?:\d+|\\))|{(?<ref>[^}]+)})",.backref,,"ovector|dupnames")
;
s options=$g(options)_"|ovector"
; silently remove "NAMED_ONLY" and "GROUPPED" options
f i=1:1:$l(options,"|") d
. s o=$zco($p(options,"|",i),"u")
. s:o="NAMED_ONLY"!(o="GROUPED") $p(options,"|",i)=""
q:'$$global(.subject,.pattern,.n,,.options,$g(locale),$g(mlimit,0),$g(reclimit,0)) subject
;
; perform the substitution on matched subject parts
s first=$g(first,1),last=$g(last,0)
s offset=0,i=""
f s i=$o(n(i)) q:i="" d:i'<first q:last>0&(i'<last)
.
. ; replace back refs in subst (s) with captured data
. s s=subst,boffset=0,j=""
. f s j=$o(backref(j)) q:j="" d
..
.. ; determine the back ref type and get the value
.. ; silently ignore invalid refs
.. s value=$s(backref(j,"ref")="\":"\\",1:$g(n(i,backref(j,"ref"))))
..
.. ; replace back ref with the value
.. s begin=backref(j,0,0)
.. s end=backref(j,0,1)
.. s $ze(s,begin+boffset,end+boffset)=value
.. s boffset=boffset-(end+1-begin)+$l(value)
.
. ; replace matched data with prepared s
. s begin=n(i,0,0)
. s end=n(i,0,1)
.
. s $ze(subject,begin+offset,end+offset)=s
.
. ; substitute empty matches also (Perl style)
. ;
. ; perl -e '$_ = "aa"; s/(b*|a)/Xy/g; print "$_\n"'
. ; w $$replace^pcre("aa","(b*|a)","Xy")
. ;
. ; perl -e '$_ = "aa"; s/(b*|aa)/Xy/g; print "$_\n"'
. ; w $$replace^pcre("aa","(b*|aa)","Xy")
. ;
. ; perl -e '$_ = "aaa"; s/(b*|aa)/Xy/g; print "$_\n"'
. ; w $$replace^pcre("aaa","(b*|aa)","Xy")
. ;
. s:begin>end $ze(subject,begin+offset,begin+offset+1)=s_$ze(subject,begin+offset,begin+offset+1)
.
. s offset=offset-(end+1-begin)+$l(s)
q:$q subject q
free(ref)
d protect
n code
s code=$&pcre.free(.ref)
s:code $ec=",U"_(-code)_","
q
stackusage()
; return the approximate amount of stack (in bytes) used per
; recursion in pcre_exec()
q -$&pcre.stackusage()
; Exception Handling
;
; Error conditions are handled by setting the $zc to user codes, see labels
; at the end of this file. When neither $zt nor $et are set by the user,
; the default handler (trap^pcre) is used within $zt mechanism.
;
; The default handler will write out the details of the exception, and
; depending on the caller type, it will re raise the exception. This will
; lead to:
; a) writing the exception details, when called from the GT.M prompt,
; b) writing the exception details, the M code place when the pcre routine
; was called, and terminating the GT.M image.
;
; The user should define own exception handler using $zt or $et, see
; pcreexample.m for example exception handlers.
;
protect ; try setup $zt with default handler
;
; "n protect" in the $zt is a marker for trap^pcre
s:'$l($et)&(($zt="B")!'$l($zt)) $zt="n protect d trap zg "_($zl-2)
q
trap(stack)
; see U* labels at the bottom of this file, some lvns are mandatory
; all exceptions are passed through if we wasn't called from direct mode
;
n zl,ref,msg,place
;
; take the $zl if in default handler setup by protect^trap
s zl=$p($zt,"n protect d trap zg ",2)
;
; clear the $zt
s $zt=""
;
; source location from either stack argument, zl (default handler), or $st-2
s place=$st($g(stack,$g(zl,$st-1)-1),"PLACE")
;
; clear location if called from direct mode
s:place["^GTM$DMOD" place=""
;
s ref=$p($ec,",",$l($ec,",")-1)
i $l($t(@ref)) d
. u $p
. w @$p($t(@ref),";",2)
. ; %PCRE-E-COMPILE additional message
. w:ref="U16392"&$g(erroffset) " in "_$e($g(pattern),1,erroffset)_" <-- HERE"
. w !
. ; write the location it has any meaning
. w:$l(place) "%PCRE-I-RTSLOC, At M source location ",place,!
e d
. w $p($zs,",",3,4),!
. w "%GTM-I-RTSLOC, At M source location ",$p($zs,",",2),!
;
; re raise the exception if in a default handler and not called from the direct mode
s:$l(place)&$g(zl,0) $ec=$ec
q
; XC API specific
;
U16384 ;"%PCRE-E-ARGSMALL, Actual argument count is too small"
U16385 ;"%PCRE-E-OPTNAME, Unknown option name "_$p($g(erropt),"|")
U16386 ;"%PCRE-E-OBJLIMIT, Maximum number of objects exceeded"
U16387 ;"%PCRE-E-INVREF, Invalid object reference"
U16388 ;"%PCRE-E-INTBUF, Internal buffer too small"
U16389 ;"%PCRE-E-MALLOC, Could not allocate memory"
U16390 ;"%PCRE-E-STUDY, Pattern study failed: "_$g(err,"unknown reason")
U16391 ;"%PCRE-E-LOCALE, Invalid locale name "_$g(locale)
U16392 ;"%PCRE-E-COMPILE, Pattern compilation failed, "_$g(err,"unknown reason")
U16393 ;"%PCRE-E-LENGTH, Invalid length value specified"
; PCRE specific
;
; NOTES:
;
; U16401 exception is never raised; when pcre_exec() returns -1
; (i.e. NOMATCH) the pcre.exec returns 0, so no exception will
; ever raise, NOMATCH is not an uncommon situation
;
; U16388 is raised when pcre_exec() returns 0, i.e. the ovector
; was too small; considering that ovector size is not controlled
; in M world, it is an exception here
;
U16401 ;"%PCRE-E-NOMATCH, The subject string did not match the pattern"
U16402 ;"%PCRE-E-NULL, Either compiled code or subject was passed as NULL, or ovector was NULL"
U16403 ;"%PCRE-E-BADOPTION, An unrecognized bit was set in the options argument"
U16404 ;"%PCRE-E-BADMAGIC, The magic number is not present in compiled code"
U16405 ;"%PCRE-E-UNKNOWNOPCODE, While running the pattern match, an unknown item was encountered in the compiled pattern"
U16406 ;"%PCRE-E-NOMEMORY, Call via pcre_malloc() or pcre_stack_malloc() failed"
U16407 ;"%PCRE-E-NOSUBSTRING, No substring"
U16408 ;"%PCRE-E-MATCHLIMIT, Match limit was reached"
U16409 ;"%PCRE-E-CALLOUT, Callout function wanted to yield a distinctive error code"
U16410 ;"%PCRE-E-BADUTF8, A string that contains an invalid UTF-8 byte sequence was passed as a subject"
U16411 ;"%PCRE-E-BADUTF8OFFSET, The value of startoffset did not point to the beginning of a UTF-8 character or the end of the subject"
U16412 ;"%PCRE-E-PARTIAL, The subject string did not match, but it did match partially"
U16414 ;"%PCRE-E-INTERNAL, An unexpected internal error has occurred"
U16415 ;"%PCRE-E-BADCOUNT, The value of the ovecsize argument is negative"
U16416 ;"%PCRE-E-DFAUITEM, Unsupported item in the pattern, for e.g. \C o a back reference"
U16417 ;"%PCRE-E-DFAUCOND, Unsupported condition item, for e.g. a back reference for a condition, or a test for recursion in a specific group"
U16418 ;"%PCRE-E-DFAUMLIMIT, Match limits are unsupported for DTA matching"
U16419 ;"%PCRE-E-DFAWSSIZE, Out of space in the workspace vector"
U16420 ;"%PCRE-E-DFARECURSE, The output vector was not large enough while processing recursive subpattern"
U16421 ;"%PCRE-E-RECURSIONLIMIT, The internal recursion limit was reached"
U16423 ;"%PCRE-E-BADNEWLINE, An invalid combination of NL_xxx options was given"
U16424 ;"%PCRE-E-BADOFFSET, The startoffset was negative or greater than the length of the value in length"
U16425 ;"%PCRE-E-SHORTUTF8, The subject string ends with a truncated UTF-8 character and the PCRE_PARTIAL_HARD option is set"
U16426 ;"%PCRE-E-RECURSELOOP, A recursion loop within the pattern was detected"
U16427 ;"%PCRE-E-JITSTACKLIMIT, The memory available for the just-in-time processing stack is not large enough"

622
samples/M/pcreexamples.m Normal file
View File

@@ -0,0 +1,622 @@
;
; GT.M PCRE Extension
; Copyright (C) 2012 Piotr Koper <piotr.koper@gmail.com>
;
; This program is free software: you can redistribute it and/or modify
; it under the terms of the GNU Affero General Public License as
; published by the Free Software Foundation, either version 3 of the
; License, or (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU Affero General Public License for more details.
;
; You should have received a copy of the GNU Affero General Public License
; along with this program. If not, see <http://www.gnu.org/licenses/>.
;
; GT.M PCRE Extension Examples
;
; see pcre.m for comments on routines parameters and all possible values for
; the options
;
pcreexamples ;GT.M PCRE Extension Examples
;1.0;Initial release;pkoper
d routines
q
; GT.M PCRE Extension API
; The shining examples
;
test
; Test the subject for the match
w $$test^pcre("The quick brown fox jumps over the lazy dog","fox"),!
w $$test^pcre("The quick brown fox jumps over the lazy dog","FoX","caseless"),!
q
match
n n
; Simple match
w $$match^pcre("The quick brown fox jumps over the lazy dog"," (\w+) ",.n),! zwr
; Match with a named groups
w $$match^pcre("The quick brown fox jumps over the lazy dog","(?<first>\w+) (?<second>\w+)",.n),! zwr
; Match with a named group and limit the output to only the "second"
w $$match^pcre("The quick brown fox jumps over the lazy dog","(?<first>\w+) (?<second>\w+)",.n,"second"),! zwr
; Match with a named group with only named patterns
w $$match^pcre("The quick brown fox jumps over the lazy dog","(?<first>\w+) (?<second>\w+)",.n,,"named_only"),! zwr
q
global
n n
; Global match
w $$global^pcre("The quick brown fox jumps over the lazy dog","(\w+)",.n),! zwr
; Global match with a named groups
w $$global^pcre("The quick brown fox jumps over the lazy dog","(?<first>\w+)\s+(?<second>\w+)",.n),! zwr
; Global match with grouped captured data
w $$global^pcre("The quick brown fox jumps over the lazy dog","(?<first>\w+)\s+(?<second>\w+)",.n,,"grouped"),! zwr
; Global match with grouped captured data and only named patterns
w $$global^pcre("The quick brown fox jumps over the lazy dog","(?<first>\w+)\s+(?<second>\w+)",.n,,"grouped|named_only"),! zwr
q
replace
; Just the replace
w $$replace^pcre("The quick brown fox jumps over the lazy dog","brown","yellow"),!
; Change the word order
w $$replace^pcre("The quick brown fox jumps over the lazy dog","(\w+)\s+(\w+)","\2 \1"),!
; Change the word order with named groups
w $$replace^pcre("The quick brown fox jumps over the lazy dog","(?<first>\w+)\s+(?<second>\w+)","\{second} \{first}"),!
; Escape the \ sequence
w $$replace^pcre("The quick brown fox jumps over the lazy dog","(?<first>\w+)\s+(?<second>\w+)","\{second} \\{first}"),!
; More \ chars
w $$replace^pcre("The quick brown fox jumps over the lazy dog","(?<first>\w+)\s+(?<second>\w+)","\\\{second} \\\\{first}"),!
q
; PCRE API
; Low level PCRE API examples
;
api
n subject,pattern,options,offset,ref,count,i,begin,end,s,name,n
; Setup exception trap as in myexception2^pcreexamples
s $zt="d trap^pcre("_$st_") zg "_$zl_":apitrap^pcreexamples"
s subject="The quick brown fox "_$c(10)_"jumps over the lazy dog"
s pattern="(?<all>(.*?(?<red>F\S+).*?)(?<high>\w+))"
; options are case insensitive, as well as all stringified option
; names for all functions in this extension
s options="CASELESS|multiLINE|NL_CrLf|NO_AUTO_CAPTURE|dotall"
s offset=5 ; start the match with "quick"
; Compile the pattern
s ref=$$compile^pcre(.pattern,.options) ; pass by the reference
; Run the match
s count=$$exec^pcre(.ref,.subject,,.offset)
w "count: ",count,!
; To access the ovector array $$ovector^pcre and $$ovecsize^pcre can
; be used.
;
; ovector array size is always (n + 1) * 3, where n is a number of
; possible capture strings in the submitted pattern for the
; $$compile^pcre(). The exact number of usable pairs of integers in
; ovector array is by the $$exec^pcre().
;
w "ovecsize: ",$$ovecsize^pcre(.ref),!
; Get the captured data in an old way
f i=0:1:count-1 d
. s begin=$$ovector^pcre(.ref,i*2)+1
. s end=$$ovector^pcre(.ref,i*2+1)
. s s=$ze(subject,begin,end)
. w i,": ",s,!
; See what's in the nametable
;
; $$nametable^pcre returns i-th element of nametable array, where the
; index of the ovector array is passed by the reference in n, and the
; return value is a name.
;
f i=1:1 s name=$$nametable^pcre(.ref,.i,.n) q:name="" d
. s begin=$$ovector^pcre(.ref,n*2)+1 ; the returned subject index in n
. s end=$$ovector^pcre(.ref,n*2+1)
. s s=$ze(subject,begin,end)
. w name,": ",s,!
; Use $$substring^pcre() to get the captured string instead of playing
; with $$ovector^pcre().
f i=0:1:count-1 w i,": ",$$substring^pcre(.ref,.i),!
; .. and get the begin and the end index of the captured data in the
; subject, as a side effect.
f i=0:1:count-1 d
. w i,": ",$$substring^pcre(.ref,.i,.begin,.end),!
. w "begin: ",begin,!
. w "end: ",end,!
; Get some details on compiled pattern
w "options: ",$$fullinfo^pcre(.ref,"OPTIONS"),!
w "capture count: ",$$fullinfo^pcre(.ref,"CAPTURECOUNT"),!
w "jit: ",$$fullinfo^pcre(.ref,"JIT"),!
w "min length: ",$$fullinfo^pcre(.ref,"MINLENGTH"),!
; Free the data internally allocated for the PCRE structures
;
d free^pcre(.ref)
; Finally, raise an example exception
;
; see "Exception Handler Examples"
;
w $t(api+4^pcreexamples),!
w $$compile^pcre("aa)bb"),!
w "should never be written, the %PCRE-E-COMPILE should be raised",!
q
apitrap
w "apitrap^pcreexamples",!
q
; Perl5 Global Match Compatibility
;
; Global match as with /g switch on regular expressions in Perl5 is supported.
;
; See $$global^pcre and $$replace^pcre examples.
; Compatibility Case: Empty Matches
;
; Global Match
;
p5global
w "$ perl -e '$_ = ""aa""; print ""1: $1\n"" while /(b*|aa)/mg'",!
zsy "perl -e ""\$_ = \""aa\""; print \""1: \$1\n\"" while /(b*|aa)/mg"""
d global^pcre("aa","b*|aa",.n) zwr
q
; Global Replace
;
p5replace
w "$ perl -e '$_ = ""aa""; s/(b*|a)/Xy/g; print ""$_\n""'",!
zsy "perl -e ""\$_ = \""aa\""; s/(b*|a)/Xy/g; print \""\$_\n\"""""
w $$replace^pcre("aa","(b*|a)","Xy"),!
w "$ perl -e '$_ = ""aa""; s/(b*|aa)/Xy/g; print ""$_\n""'",!
zsy "perl -e ""\$_ = \""aa\""; s/(b*|aa)/Xy/g; print \""\$_\n\"""""
w $$replace^pcre("aa","(b*|aa)","Xy"),!
w "$ perl -e '$_ = ""aaa""; s/(b*|aa)/Xy/g; print ""$_\n""'",!
zsy "perl -e ""\$_ = \""aaa\""; s/(b*|aa)/Xy/g; print \""\$_\n\"""""
w $$replace^pcre("aaa","(b*|aa)","Xy"),!
q
; Compatibility Case: New Line Characters
;
; Multi-line with LF
;
p5lf
w "perl -e '$_ = ""aa\nbb""; print ""1: $1\n"" while /(.*)/mg'",!
zsy "perl -e ""\$_ = \""aa\nbb\""; print \""1: \$1\n\"" while /(.*)/mg"""
d global^pcre("aa"_$c(10)_"bb",".*",.n,,"multiline|nl_lf") zwr
q
; Various New Line Specs
;
p5nl
d global^pcre("aa"_$c(13)_$c(10)_"bb",".*",.n,,"multiline|nl_lf") zwr
d global^pcre("aa"_$c(13)_$c(10)_"bb",".*",.n,,"multiline|nl_cr") zwr
d global^pcre("aa"_$c(13)_$c(10)_"bb",".*",.n,,"multiline|nl_crlf") zwr
q
; PCRE library version
;
version
w $$version^pcre,!
q
; PCRE compile time defaults
;
newline
w $$config^pcre("NEWLINE"),!
q
utf8support
w $$config^pcre("UTF8"),!
q
; Stack Usage
;
; PCRE's stack usage discover procedure
;
stackusage
w $$stackusage^pcre,!
q
; Locale Support Examples
;
; Polish language has been used as an example for I18N support in PCRE.
;
; The example word "dąb" (encoded here in UTF-8) is an "oak" in Polish.
;
; The second letter in "dąb" is <aogonek> (I18N) which is:
; $c(177) in ISO8859-2,
; $c(261) in UTF-8,
; see http://en.wikipedia.org/wiki/Polish_code_pages for complete listing
;
; Note of $CHAR(n) in different GT.M character modes:
;
; In UTF-8 mode $c(177) will return two octet encoded UTF-8 char is
; probably not an expected result when working with single octet ISO
; encoded chars.
;
; Use $zch(177) to create single octet ISO char, but be prepared for
; %GTM-E-BADCHAR errors. Also the result of $l(), $a() and others might
; be not what is expected.
;
; Locale: C or POSIX (i.e. no localization)
;
nolocale
w $zchset,!
w $$match^pcre("d"_$zch(177)_"b","\w{3}",.n,,,),! zwr
q
; Locale: ISO
;
isolocale
w $zchset,!
w $$match^pcre("d"_$zch(177)_"b","\w{3}",.n,,,"pl_PL"),! zwr
q
; Locale: UTF-8
;
utflocale
; M and UTF-8 mode
w $$match^pcre("d"_$zch(196)_$zch(133)_"b","\w{3}",.n,,"UTF8|UCP","pl_PL.UTF8"),! zwr
; UTF-8 mode only
w $$match^pcre("d"_$c(261)_"b","\w{3}",.n,,"UTF8|UCP","pl_PL.UTF8"),! zwr
q
; Locale: environment ($LANG, $LC_CTYPE)
;
; Set the GT.M environment for LANG="pl_PL" or LANG="pl_PL.UTF8" to obtain
; different results.
;
envlocale
w $ztrnlnm("LANG"),!
w $ztrnlnm("LC_CTYPE"),!
w $$match^pcre("d"_$c(177)_"b","\w{3}",.n,,,"env"),! zwr
w $$match^pcre("d"_$zch(196)_$zch(133)_"b","\w{3}",.n,,"UTF8|UCP","pl_PL.UTF8"),! zwr
q
; Notes on GT.M in UTF-8
;
; Enabling native support for UTF-8 in GT.M requires:
; 1) libicu
; 2) environment:
; gtm_chset=UTF-8
; gtm_icu_version=4.8
; 3) recompiled object files for UTF-8
;
;
; Instructions for UTF-8 in Debian 6
;
; 1) Install libicu (libicu48)
; $ apt-get install libicu48
; 2) append environment setup to GT.M's user .bash_profile
; export gtm_chset=UTF-8
; export gtm_icu_version=4.8
; 3) remove *.o files from the GT.M installation directory
; $ rm /opt/gtm/*.o
; 4) allow GT.M's user to write new object files
; $ chown gtm /opt/gtm
;
;
; Startup errors in UTF-8 mode
;
; %GTM-E-INVOBJ, Cannot ZLINK object file due to unexpected format
; %GTM-I-TEXT, Object compiled with CHSET=M which is different from $ZCHSET
;
; The above errors are written by the GT.M at the startup when the environment
; has the correct setup for the UTF-8, but GT.M can't use already existing
; object files for execution, because they were compiled for the M charset.
; Remove all GT.M's object files like in step 3) in the "Instructions for
; UTF-8 in Debian 6" above.
;
; Match Limits
;
; PCRE has built-in limits on internal matching and recursion.
;
; Those limits prevent the PCRE engine from a very long runs, especially
; when there would be no matches and all possible paths in the match
; tree must be checked.
;
; Functions using $$compile^pcre and the $$compile^pcre itself allows
; setting MATCH_LIMIT and MATCH_LIMIT_RECURSION in optional arguments
; named mlimit and reclimit:
;
; $$compile^pcre(pattern,options,locale,mlimit,reclimit)
; $$match^pcre(subject,pattern,match,capture,options,locale,mlimit,reclimit)
; $$global^pcre(subject,pattern,match,capture,options,locale,mlimit,reclimit)
; $$replace^pcre(subject,pattern,subst,first,last,options,locale,mlimit,reclimit)
;
; If the mlimit or reclimit are not specified, the PCRE library
; compilation time defaults are used.
;
limits
w "Compile time (default) MATCH_LIMIT is: ",$$config^pcre("MATCH_LIMIT"),!
w "Compile time (default) MATCH_LIMIT_RECURSION is: ",$$config^pcre("MATCH_LIMIT"),!
q
; Example pattern with a very long run time
;
longrun
w $$match^pcre("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa","(\D+|<\d+>)*[!?]",.n),! zwr
w "should never be written, the %PCRE-E-MATCHLIMIT should be raised",!
q
; Equal to the longrun^pcreexamples, but corrected pattern
;
shortrun
w $$match^pcre("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa","((?>\D+)|<\d+>)*[!?]",.n),! zwr
q
; Enforced mlimit
;
enforcedlimit
w $$match^pcre("aaaaaa","(.)(.)",.n,,,,2),! zwr
w "should never be written, the %PCRE-E-MATCHLIMIT should be raised",!
q
; Exception Handling
;
; Error conditions are handled by setting the $zc to user codes, see labels at
; the end of this file. When neither $zt nor $et are set by the user, the
; default handler (trap^pcre) is used within $zt mechanism.
;
; The default handler will write out the details of the exception, and
; depending on the caller type, it will re raise the exception. This will lead
; to:
; a) writing the exception details, when called from the GT.M prompt,
; b) writing the exception details, the M code place when the pcre routine
; was called, and terminating the GT.M image.
;
; The user should define own exception handler using $zt or $et, see
; pcreexamples.m for example exception handlers.
;
; Exception Handler Examples
;
; No handler
;
nohandler
s ($ec,$et,$zt)=""
s x=$$compile^pcre("a)b")
w "will never be written",!
q
;
; GTM>d nohandler^pcreexamples
; %PCRE-E-COMPILE, Pattern compilation failed, unmatched parentheses in a <-- HERE
; %PCRE-I-RTSLOC, At M source location nohandler+2^pcreexamples
; %GTM-E-SETECODE, Non-empty value assigned to $ECODE (user-defined error trap)
; %GTM-I-RTSLOC, At M source location trap+32^pcre
; $ (GT.M image has been terminated)
;
; Simple handler
;
myexception1
s $zt="zg "_$zl_":mytrap1^pcreexamples"
s x=$$compile^pcre("a)b")
w "will never be written",!
q
mytrap1
w "it's a trap",!
w $ec,!
s $ec=""
q
;
; GTM>d myexception1^pcreexamples
; it's a trap
; ,U16392,
; GTM>
;
; Simple handler with pcre exception details
;
myexception2
s $zt="d trap^pcre("_$st_") zg "_$zl_":mytrap2^pcreexamples"
s x=$$compile^pcre("a)b")
w "will never be written",!
q
mytrap2
w "it's a trap",!
w $ec,!
s $ec=""
q
;
; GTM>d myexception2^pcreexamples
; %PCRE-E-COMPILE, Pattern compilation failed, unmatched parentheses in a <-- HERE
; %PCRE-I-RTSLOC, At M source location myexception2+2^pcreexamples
; it's a trap
; ,U16392,
; GTM>
;
; In this example the trap^pcre is called with optional argument (level
; of M execution stack), for which trap^pcre will produce the
; %PCRE-I-RTSLOC details.
;
; DETAILS:
; The trap^pcre is executed in the stack frame where the error condition
; occurred, that gives the trap^pcre routine an access to the local
; variables like locale (locale name) or err (PCRE error message).
; The following zg command drops stack frames up to the current frame
; (the frame where the s $zt=.. is used), and executes the mytrap label,
; where locale or err is not available.
;
; Simple handler with limited pcre exception details
;
myexception3
s $zt="zg "_$zl_":mytrap3^pcreexamples"
s x=$$compile^pcre("a)b")
w "will never be written",!
q
mytrap3
d trap^pcre($st)
w "it's a trap",!
w $ec,!
s $ec=""
q
;
; GTM>d myexception3^pcreexamples
; %PCRE-E-COMPILE, Pattern compilation failed, unknown reason
; %PCRE-I-RTSLOC, At M source location myexception3+2^pcreexamples
; it's a trap
; ,U16392,
; GTM>
;
; DETAILS:
; The trap^pcre is executed in the stack frame where the compile^pcre
; was called. The deeper stack frames has already been dropped by the
; zg command, so the err local variable is not available in this
; context. Thats why trap^pcre doesn't know the exact reason why the
; %PCRE-E-COMPILE was raised.
;
; Note on $st() and repeated exceptions
;
; The $st() function returns information connected with $ec codes in
; a stack manner. That means that when once the $ec was set at n-th
; execution level, any future exceptions at that level won't change
; the $st() output for that level unless $ec is cleared.
;
; Always clear $ec when the exception handling is done.
;
; Execute all of the routines in this file
;
routines
w ">> test^pcreexamples",!
d test^pcreexamples
w !,">> match^pcreexamples",!
d match^pcreexamples
w !,">> global^pcreexamples",!
d global^pcreexamples
w !,">> replace^pcreexamples",!
d replace^pcreexamples
w !,">> p5global^pcreexamples",!
d p5global^pcreexamples
w !,">> p5replace^pcreexamples",!
d p5replace^pcreexamples
w !,">> p5lf^pcreexamples",!
d p5lf^pcreexamples
w !,">> p5nl^pcreexamples",!
d p5nl^pcreexamples
w !,">> version^pcreexamples",!
d version^pcreexamples
w !,">> newline^pcreexamples",!
d newline^pcreexamples
w !,">> utf8support^pcreexamples",!
d utf8support^pcreexamples
w !,">> stackusage^pcreexamples",!
d stackusage^pcreexamples
w !,">> nolocale^pcreexamples",!
d nolocale^pcreexamples
w !,">> isolocale^pcreexamples",!
d isolocale^pcreexamples
w !,">> utflocale^pcreexamples",!
d utflocale^pcreexamples
w !,">> envlocale^pcreexamples",!
d envlocale^pcreexamples
w !,">> limits^pcreexamples",!
d limits^pcreexamples
w !,">> longrun^pcreexamples",!
w "(skipped, uncomment to raise the exception)",!
; d longrun^pcreexamples
w !,">> shortrun^pcreexamples",!
d shortrun^pcreexamples
w !,">> enforcedlimit^pcreexamples",!
w "(skipped, uncomment to raise the exception)",!
; d enforcedlimit^pcreexamples
w !,">> nohandler^pcreexamples",!
w "(skipped, uncomment to raise the exception)",!
; d nohandler^pcreexamples
w !,">> myexception1^pcreexamples",!
d myexception1^pcreexamples
w !,">> myexception2^pcreexamples",!
d myexception2^pcreexamples
w !,">> myexception3^pcreexamples",!
d myexception3^pcreexamples
q

View File

@@ -0,0 +1,46 @@
;
; M code examples contrasting postconditionals with IF-commands
;
post1 ; postconditional in set command
set a=5
set b=10
set c=25
I 0 ;purposely set $TEST to false
write "$TEST special variable (before post-condition)=",$TEST
set:(a<b) c=b
write "$TEST special variable (after post-condition) =",$TEST
write "c =",c,!
quit
;
post2 ; postconditional in write command
set a=5
set b=10
I 0 ;purposely set $TEST to false
write "$TEST special variable (before post-condition)=",$TEST
write:(a<b) "variable a=",a," is smaller than b=",b,!
write "$TEST special variable (after post-condition) =",$TEST
write:(a>b) "variable a=",a," is larger than b=",b,!
write "$TEST special variable (after post-condition) =",$TEST
quit
;
if1 ; if command
set a=5
set b=10
set c=25
I 0 ;purposely set $TEST to false
write "$TEST special variable (before IF)=",$TEST
if (a<b) set c=b
write "$TEST special variable (after IF) =",$TEST
write c,!
quit
;
if2 ; postconditional in write command
set a=5
set b=10
I 0 ;purposely set $TEST to false
write "$TEST special variable (before IF)=",$TEST
if (a<b) write "variable a=",a," is smaller than b=",b,!
write "$TEST special variable (after IF) =",$TEST
if (a>b) write "variable a=",a," is larger than b=",b,!
write "$TEST special variable (after IF) =",$TEST
quit

6
samples/M/primes.m Normal file
View File

@@ -0,0 +1,6 @@
; part of Keith Lynch's .signature; it prints a table of primes,
; including code to format it neatly into columns -- DPBS
; -- M Technology and MUMPS Language FAQ, Part 1/2
;
f p=2,3:2 s q=1 x "f f=3:2 q:f*f>p!'q s q=p#f" w:q p,?$x\8+1*8

49
samples/M/url.m Normal file
View File

@@ -0,0 +1,49 @@
;
; This file is part of DataBallet.
; Copyright (C) 2012 Laurent Parenteau <laurent.parenteau@gmail.com>
;
; DataBallet is free software: you can redistribute it and/or modify
; it under the terms of the GNU Affero General Public License as published by
; the Free Software Foundation, either version 3 of the License, or
; (at your option) any later version.
;
; DataBallet is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU Affero General Public License for more details.
;
; You should have received a copy of the GNU Affero General Public License
; along with DataBallet. If not, see <http://www.gnu.org/licenses/>.
;
decode(val)
;
; Decoded a URL Encoded string
;
new decoded,c,i
set decoded=""
for i=1:1:$zlength(val) do
. set c=$zextract(val,i,i)
. if c="+" set decoded=decoded_" "
. else if c'="%" set decoded=decoded_c
. else set decoded=decoded_$zchar($$FUNC^%HD($zextract(val,i+1,i+2))) set i=i+2
quit decoded
encode(val)
;
; Encoded a string for URL usage
;
new encoded,c,i
set encoded=""
; Populate safe char only the first time
if '$data(safechar) for i=45,46,95,126,48:1:57,65:1:90,97:1:122 set safechar($zchar(i))=""
for i=1:1:$zlength(val) do
. set c=$zextract(val,i,i)
. if $data(safechar(c)) set encoded=encoded_c
. else if c=" " set encoded=encoded_"+"
. else set encoded=encoded_"%"_$$FUNC^%DH($zascii(c),2)
quit encoded

1949
samples/M/zmwire.m Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,12 @@
x_0=linspace(0,100,101);
vx_0=linspace(0,100,101);
z=zeros(101,101);
for i=1:101
for j=1:101
z(i,j)=x_0(i)*vx_0(j);
end
end
figure
pcolor(x_0,vx_0,z)
shading flat

149
samples/Matlab/FTLEH.m Normal file
View File

@@ -0,0 +1,149 @@
tic
clear all
%% Choice of the mass parameter
mu=0.1;
%% Computation of Lagrangian Points
[xl1,yl1,xl2,yl2,xl3,yl3,xl4,yl4,xl5,yl5] = Lagr(mu);
%% Computation of initial total energy
E_L1=-Omega(xl1,yl1,mu);
E=E_L1+0.03715; % Offset as in figure 2.2 "LCS in the ER3BP"
%% Initial conditions range
x_0_min=-0.8;
x_0_max=-0.2;
vx_0_min=-2;
vx_0_max=2;
y_0=0;
% Elements for grid definition
n=200;
% Dimensionless integrating time
T=2;
% Grid initializing
[x_0,vx_0]=ndgrid(linspace(x_0_min,x_0_max,n),linspace(vx_0_min,vx_0_max,n));
vy_0=sqrt(2*E+2*Omega(x_0,y_0,mu)-vx_0.^2);
% Kinetic energy computation
E_cin=E+Omega(x_0,y_0,mu);
%% Transforming into Hamiltonian variables
px_0=vx_0-y_0;
py_0=vy_0+x_0;
% Inizializing
x_T=zeros(n,n);
y_T=zeros(n,n);
px_T=zeros(n,n);
py_T=zeros(n,n);
filtro=ones(n,n);
E_T=zeros(n,n);
a=zeros(n,n); % matrix of numbers of integration steps for each integration
np=0; % number of integrated points
fprintf(' con n = %i\n',n)
%% Energy tolerance setting
energy_tol=inf;
%% Computation of the Jacobian of the system
options=odeset('Jacobian',@cr3bp_jac);
%% Parallel integration of equations of motion
parfor i=1:n
for j=1:n
if E_cin(i,j)>0 && isreal(vy_0(i,j)) % Check for real velocity and positive Kinetic energy
[t,Y]=ode45(@fH,[0 T],[x_0(i,j); y_0; px_0(i,j); py_0(i,j)],options);
% Try to obtain the name of the solver for a following use
% sol=ode45(@f,[0 T],[x_0(i,j); y_0; vx_0(i,j); vy_0(i,j)],options);
% Y=sol.y';
% solver=sol.solver;
a(i,j)=length(Y);
%Saving solutions
x_T(i,j)=Y(a(i,j),1);
px_T(i,j)=Y(a(i,j),3);
y_T(i,j)=Y(a(i,j),2);
py_T(i,j)=Y(a(i,j),4);
%Computation of final total energy and difference with
%initial one
E_T(i,j)=EnergyH(x_T(i,j),y_T(i,j),px_T(i,j),py_T(i,j),mu);
delta_E=abs(E_T(i,j)-E);
if delta_E > energy_tol; %Check of total energy conservation
fprintf(' Ouch! Wrong Integration: i,j=(%i,%i)\n E_T=%.2f \n delta_E=%.2f\n\n',i,j,E_T(i,j),delta_E);
filtro(i,j)=2; %Saving position of the point
end
np=np+1;
else
filtro(i,j)=0; % 1=interesting point; 0=non-sense point; 2= bad integration point
end
end
end
t_integrazione=toc;
fprintf(' n = %i\n',n)
fprintf(' energy_tol = %.2f\n',energy_tol)
fprintf('total \t%i\n',n^2)
fprintf('nunber \t%i\n',np)
fprintf('time to integrate \t%.2f s\n',t_integr)
%% Back to Lagrangian variables
vx_T=px_T+y_T;
vy_T=py_T-x_T;
%% FTLE Computation
fprintf('adesso calcolo ftle\n')
tic
dphi=zeros(2,2);
ftle=zeros(n-2,n-2);
for i=2:n-1
for j=2:n-1
if filtro(i,j) && ... % Check for interesting point
filtro(i,j-1) && ...
filtro(i,j+1) && ...
filtro(i-1,j) && ...
filtro(i+1,j)
dphi(1,1)=(x_T(i-1,j)-x_T(i+1,j))/(x_0(i-1,j)-x_0(i+1,j));
dphi(1,2)=(x_T(i,j-1)-x_T(i,j+1))/(vx_0(i,j-1)-vx_0(i,j+1));
dphi(2,1)=(vx_T(i-1,j)-vx_T(i+1,j))/(x_0(i-1,j)-x_0(i+1,j));
dphi(2,2)=(vx_T(i,j-1)-vx_T(i,j+1))/(vx_0(i,j-1)-vx_0(i,j+1));
if filtro(i,j)==2 % Manual setting to visualize bad integrated points
ftle(i-1,j-1)=-Inf;
else
ftle(i-1,j-1)=1/(2*T)*log(max(abs(eig(dphi'*dphi))));
end
end
end
end
%% Plotting results
% figure
% plot(t,Y)
% figure
% plot(Y(:,1),Y(:,2))
% figure
xx=linspace(x_0_min,x_0_max,n);
vvx=linspace(vx_0_min,vx_0_max,n);
[x,vx]=ndgrid(xx(2:n-1),vvx(2:n-1));
figure
pcolor(x,vx,ftle)
shading flat
t_ftle=toc;
fprintf('tempo per integrare \t%.2f s\n',t_integrazione)
fprintf('tempo per calcolare ftle \t%.2f s\n',t_ftle)
% save(['var_' num2str(n) '_' num2str(clock(4)])
nome=['var_xvx_', 'ode00', '_n',num2str(n),'_e',num2str(energy_tol),'_H'];
save(nome)

178
samples/Matlab/FTLE_reg.m Normal file
View File

@@ -0,0 +1,178 @@
tic
clear all
%% Elements for grid definition
n=100;
%% Dimensionless integrating time
T=2;
%% Choice of the mass parameter
mu=0.1;
%% Computation of Lagrangian Points
[xl1,yl1,xl2,yl2,xl3,yl3,xl4,yl4,xl5,yl5] = Lagr(mu);
%% Computation of initial total energy
E_L1=-Omega(xl1,yl1,mu);
C_L1=-2*E_L1; % C_L1 = 3.6869532299 from Szebehely
E=E_L1+0.03715; % Offset as in figure 2.2 "LCS in the ER3BP"
%% Initial conditions range
x_0_min=-0.8;
x_0_max=-0.2;
vx_0_min=-2;
vx_0_max=2;
y_0=0;
% Grid initializing
[x_0,vx_0]=ndgrid(linspace(x_0_min,x_0_max,n),linspace(vx_0_min,vx_0_max,n));
vy_0=sqrt(2*E+2.*Omega(x_0,y_0,mu)-vx_0.^2);
% Kinetic energy computation
E_cin=E+Omega(x_0,y_0,mu);
% Inizializing
x_T=zeros(n,n);
y_T=zeros(n,n);
vx_T=zeros(n,n);
vy_T=zeros(n,n);
filtro=ones(n,n);
E_T=zeros(n,n);
delta_E=zeros(n,n);
a=zeros(n,n); % matrix of numbers of integration steps for each integration
np=0; % number of integrated points
fprintf('integro con n = %i\n',n)
%% Energy tolerance setting
energy_tol=0.1;
%% Setting the options for the integrator
RelTol=1e-12;AbsTol=1e-12; % From Short
% RelTol=1e-13;AbsTol=1e-22; % From JD James Mireles
% RelTol=3e-14;AbsTol=1e-16; % HIGH accuracy from Ross
options=odeset('AbsTol',AbsTol,'RelTol',RelTol);
%% Parallel integration of equations of motion
h=waitbar(0,'','Name','Integration in progress, please wait!');
S=zeros(n,n);
r1=zeros(n,n);
r2=zeros(n,n);
g=zeros(n,n);
for i=1:n
waitbar(i/n,h,sprintf('Computing i=%i',i));
parfor j=1:n
r1(i,j)=sqrt((x_0(i,j)+mu).^2+y_0.^2);
r2(i,j)=sqrt((x_0(i,j)-1+mu).^2+y_0.^2);
g(i,j)=((1-mu)./(r1(i,j).^3)+mu./(r2(i,j).^3));
if E_cin(i,j)>0 && isreal(vy_0(i,j)) % Check for real velocity and positive Kinetic energy
S(i,j)=g(i,j)*T;
[s,Y]=ode45(@f_reg,[0 S(i,j)],[x_0(i,j); y_0; vx_0(i,j); vy_0(i,j)],options,mu);
a(i,j)=length(Y);
% if s(a(i,j)) < 2
% filtro(i,j)=3;
% end
% Saving solutions
x_T(i,j)=Y(a(i,j),1);
vx_T(i,j)=Y(a(i,j),3);
y_T(i,j)=Y(a(i,j),2);
vy_T(i,j)=Y(a(i,j),4);
% Computation of final total energy and difference with
% initial one
E_T(i,j)=Energy(x_T(i,j),y_T(i,j),vx_T(i,j),vy_T(i,j),mu);
delta_E(i,j)=abs(E_T(i,j)-E);
if delta_E(i,j) > energy_tol; % Check of total energy conservation
fprintf(' Ouch! Wrong Integration: i,j=(%i,%i)\n E_T=%.2f \n delta_E=%f\n\n',i,j,E_T(i,j),delta_E(i,j));
filtro(i,j)=2; % Saving position of the point
end
np=np+1;
else
filtro(i,j)=0; % 1 = interesting point; 0 = non-sense point; 2 = bad integration point
end
end
end
close(h);
t_integrazione=toc;
%%
filtro_1=filtro;
for i=2:n-1
for j=2:n-1
if filtro(i,j)==2 || filtro (i,j)==3
filtro_1(i,j)=2;
filtro_1(i+1,j)=2;
filtro_1(i-1,j)=2;
filtro_1(i,j+1)=2;
filtro_1(i,j-1)=2;
end
end
end
fprintf('integato con n = %i\n',n)
fprintf('integato con energy_tol = %f\n',energy_tol)
fprintf('numero punti totali \t%i\n',n^2)
fprintf('numero punti integrati \t%i\n',np)
fprintf('tempo per integrare \t%.2f s\n',t_integrazione)
%% FTLE Computation
fprintf('adesso calcolo ftle\n')
tic
dphi=zeros(2,2);
ftle=zeros(n-2,n-2);
ftle_norm=zeros(n-2,n-2);
ds_x=(x_0_max-x_0_min)/(n-1);
ds_vx=(vx_0_max-vx_0_min)/(n-1);
for i=2:n-1
for j=2:n-1
if filtro_1(i,j) && ... % Check for interesting point
filtro_1(i,j-1) && ...
filtro_1(i,j+1) && ...
filtro_1(i-1,j) && ...
filtro_1(i+1,j)
% La direzione dello spostamento la decide il denominatore
% TODO spiegarsi teoricamente come mai la matrice pu<EFBFBD>
% essere ridotta a 2x2
dphi(1,1)=(x_T(i+1,j)-x_T(i-1,j))/(2*ds_x); %(x_0(i-1,j)-x_0(i+1,j));
dphi(1,2)=(x_T(i,j+1)-x_T(i,j-1))/(2*ds_vx); %(vx_0(i,j-1)-vx_0(i,j+1));
dphi(2,1)=(vx_T(i+1,j)-vx_T(i-1,j))/(2*ds_x); %(x_0(i-1,j)-x_0(i+1,j));
dphi(2,2)=(vx_T(i,j+1)-vx_T(i,j-1))/(2*ds_vx); %(vx_0(i,j-1)-vx_0(i,j+1));
if filtro_1(i,j)==2 % Manual setting to visualize bad integrated points
ftle(i-1,j-1)=0;
else
ftle(i-1,j-1)=(1/abs(T))*log(max(sqrt(abs(eig(dphi*dphi')))));
ftle_norm(i-1,j-1)=(1/abs(T))*log(norm(dphi));
end
end
end
end
%% Plotting results
% figure
% plot(t,Y)
% figure
% plot(Y(:,1),Y(:,2))
% figure
xx=linspace(x_0_min,x_0_max,n);
vvx=linspace(vx_0_min,vx_0_max,n);
[x,vx]=ndgrid(xx(2:n-1),vvx(2:n-1));
figure
pcolor(x,vx,ftle)
shading flat
t_ftle=toc;
fprintf('tempo per integrare \t%.2f s\n',t_integrazione)
fprintf('tempo per calcolare ftle \t%.2f s\n',t_ftle)
% ora=fstringf %TODO
% save(['var_' num2str(n) '_' num2str(clock(4)])
nome=['var_xvx_', 'ode00', '_n',num2str(n)];
save(nome)

View File

@@ -0,0 +1,40 @@
function [ x_T, y_T, vx_T, e_T, filter ] = Integrate_FILE( x_0, y_0, vx_0, e_0, T, N, mu, options)
%Integrate
% This function performs Runge-Kutta-Fehlberg integration for given
% initial conditions to compute FILE
nx=length(x_0);
ny=length(y_0);
nvx=length(vx_0);
ne=length(e_0);
vy_0=zeros(nx,ny,nvx,ne);
x_T=zeros(nx,ny,nvx,ne);
y_T=zeros(nx,ny,nvx,ne);
vx_T=zeros(nx,ny,nvx,ne);
vy_T=zeros(nx,ny,nvx,ne);
e_T=zeros(nx,ny,nvx,ne);
%% Look for phisically meaningful points
filter=zeros(nx,ny,nvx,ne); %0=meaningless point 1=meaningful point
%% Integrate only meaningful points
h=waitbar(0,'','Name','Integration in progress, please wait!');
for i=1:nx
waitbar(i/nx,h,sprintf('Computing i=%i',i));
for j=1:ny
parfor k=1:nvx
for l=1:ne
vy_0(i,j,k,l)=sqrt(2*Potential(x_0(i),y_0(j),mu)+2*e_0(l)-vx_0(k)^2);
if isreal(vy_0(i,j,k,l))
filter(i,j,k,l)=1;
ci=[x_0(i), y_0(j), vx_0(k), vy_0(i,j,k,l)];
[t,Y,te,ye,ie]=ode45(@f,[0 T], ci, options, mu);
x_T(i,j,k,l)=ye(N+1,1);
y_T(i,j,k,l)=ye(N+1,2);
vx_T(i,j,k,l)=ye(N+1,3);
vy_T(i,j,k,l)=ye(N+1,4);
e_T(i,j,k,l)=0.5*(vx_T(i,j,k,l)^2+vy_T(i,j,k,l)^2)-Potential(x_T(i,j,k,l),y_T(i,j,k,l),mu);
end
end
end
end
end
close(h);

View File

@@ -0,0 +1,60 @@
function [ x_T, y_T, vx_T, e_T, filter, delta_e ] = Integrate_FTLE_Gawlick_ell( x_0, y_0, vx_0, e_0, T, mu, ecc, nu, options)
%Integrate
% This function performs Runge-Kutta-Fehlberg integration for given
% initial conditions to compute FTLE to obtain the image in the Gawlick's
% article "Lagrangian Coherent Structures in the Elliptic Restricted
% Three-Body Problem".
nx=length(x_0);
ny=length(y_0);
nvx=length(vx_0);
ne=length(e_0);
vy_0=zeros(nx,ny,nvx,ne);
x_T=zeros(nx,ny,nvx,ne);
y_T=zeros(nx,ny,nvx,ne);
vx_T=zeros(nx,ny,nvx,ne);
vy_T=zeros(nx,ny,nvx,ne);
e_T=zeros(nx,ny,nvx,ne);
delta_e=zeros(nx,ny,nvx,ne);
%% Look for phisically meaningful points
filter=zeros(nx,ny,nvx,ne); %0=meaningless point 1=meaningful point
useful=ones(nx,ny,nvx,ne);
%% Integrate only useful points
useful(:,1,:,1)=0;
useful(:,1,:,3)=0;
useful(:,3,:,1)=0;
useful(:,3,:,3)=0;
%% Integrate only meaningful points
h=waitbar(0,'','Name','Integration in progress, please wait!');
for i=1:nx
waitbar(i/nx,h,sprintf('Computing i=%i',i));
for j=1:ny
parfor k=1:nvx
for l=1:ne
if useful(i,j,k,l)
vy_0(i,j,k,l)=-sqrt(2*(Omega(x_0(i),y_0(j),mu)/(1+ecc*cos(nu)))+2*e_0(l)-vx_0(k)^2);
if isreal(vy_0(i,j,k,l))
filter(i,j,k,l)=1;
ci=[x_0(i), y_0(j), vx_0(k), vy_0(i,j,k,l)];
[t,Y]=ode45(@f_ell,[0 T], ci, options, mu, ecc);
if abs(t(end)) < abs(T) % Consider also negative time
filter(i,j,k,l)=3
end
x_T(i,j,k,l)=Y(end,1);
y_T(i,j,k,l)=Y(end,2);
vx_T(i,j,k,l)=Y(end,3);
vy_T(i,j,k,l)=Y(end,4);
e_T(i,j,k,l)=0.5*(vx_T(i,j,k,l)^2+vy_T(i,j,k,l)^2)-Omega(x_T(i,j,k,l),y_T(i,j,k,l),mu);
% Compute the goodness of the integration
delta_e(i,j,k,l)=abs(e_T(i,j,k,l)-e_0(l));
end
end
end
end
end
end
close(h);

28
samples/Matlab/Lagr.m Normal file
View File

@@ -0,0 +1,28 @@
function [xl1,yl1,xl2,yl2,xl3,yl3,xl4,yl4,xl5,yl5] = Lagr(mu)
% [xl1,yl1,xl2,yl2,xl3,yl3,xl4,yl4,xl5,yl5] = Lagr(mu)
% Lagr This function computes the coordinates of the Lagrangian points,
% given the mass parameter
yl1=0;
yl2=0;
yl3=0;
yl4=sqrt(3)/2;
yl5=-sqrt(3)/2;
c1=roots([1 mu-3 3-2*mu -mu 2*mu -mu]);
c2=roots([1 3-mu 3-2*mu -mu -2*mu -mu]);
c3=roots([1 2+mu 1+2*mu mu-1 2*mu-2 mu-1]);
xl1=0;
xl2=0;
for i=1:5
if isreal(c1(i))
xl1=1-mu-c1(i);
end
if isreal(c2(i))
xl2=1-mu+c2(i);
end
if isreal(c3(i))
xl3=-mu-c3(i);
end
end
xl4=0.5-mu;
xl5=xl4;
end

View File

@@ -0,0 +1,16 @@
% Plot dei Lagrangian points
n=5;
mu=linspace(0,0.5,n);
for i=1:n
[xl1,yl1,xl2,yl2,xl3,yl3,xl4,yl4,xl5,yl5] = Lagr(mu(i));
figure (1)
hold all
plot(xl1, yl1, 's')
plot(xl2, yl2, 's')
plot(xl3, yl3, 's')
plot(xl4, yl4, 's')
plot(xl5, yl5, 's')
plot(-mu,0,'o')
plot(1-mu,0, 'o')
plot([-mu(i) xl4],[0 yl4])
end

18
samples/Matlab/Poincare.m Normal file
View File

@@ -0,0 +1,18 @@
clear
%% Initial Conditions
mu=0.012277471;
T=10;
N=5;
C=3.17;
x_0=0.30910452642073;
y_0=0.07738174525518;
vx_0=-0.72560796964234;
vy_0=sqrt(-C-vx_0^2+2*Potential(x_0,y_0,mu));
k=0;
%% Integration
options=odeset('AbsTol',1e-22,'RelTol',1e-13,'Events',@cross_y);
[t,y,te,ye,ie]=ode113(@f,[0 T],[x_0; y_0; vx_0; vy_0],options,mu);
figure
%plot(ye(:,1),ye(:,3),'rs')
plot(ye(:,1),0,'rs')

24
samples/Matlab/RK4.m Normal file
View File

@@ -0,0 +1,24 @@
function x = RK4( fun, tspan, ci, mu )
%RK4 4th-order Runge Kutta integrator
% Detailed explanation goes here
h=1e-5;
t=tspan(1);
T=tspan(length(tspan));
dim=length(ci);
%x=zeros(l,dim);
x(:,1)=ci;
i=1;
while t<T
k1=fun(t,x(:,i),mu);
k2=fun(t+h/2,x(:,i)+k1*h/2,mu);
k3=fun(t+h/2,x(:,i)+k2*h/2,mu);
k4=fun(t+h,x(:,i)+h*k3,mu);
x(:,i+1)=x(:,i)+(h/6*(k1+2*k2+2*k3+k4));
t=t+h;
i=i+1;
end
x=x';
% function events(x)
% dist=
% return
end

54
samples/Matlab/Traj.m Normal file
View File

@@ -0,0 +1,54 @@
clear all
%mu=0.012151; %Earth-Moon
mu=0.012277471 %Earth-Moon
[xl1,yl1,xl2,yl2,xl3,yl3,xl4,yl4,xl5,yl5] = Lagr(mu);
C=3.17;
%C=2*Potential(xl1,yl1,mu);
x_0=1;
y_0=0;
vx_0=0;
vy_0=sqrt(-C-vx_0^2+2*Potential(x_0,y_0,mu));
%vy_0=0;
T=2;
C_star=2*Potential(x_0,y_0,mu)-(vx_0^2+vy_0^2)
%C=-(vx_0^2+vy_0^2)+2*Omega(x_0,y_0,mu);
E=-C/2;
options=odeset('AbsTol',1e-22,'RelTol',1e-13);
%Integrate first orbit
[t0,Y0]=ode113(@f,[0 T],[x_0; y_0; vx_0; vy_0],options,mu);
x0=Y0(:,1);
y0=Y0(:,2);
vx0=Y0(:,3);
vy0=Y0(:,4);
l0=length(Y0);
% Precisionfirst orbit
delta_E0=abs(Energy(x0,y0,vx0,vy0,mu)-E);
% figure
% plot(delta_E0)
%Hill's region
points=500;
bb=3; % Bounding box
x=linspace(-bb,bb,points);
y=linspace(-bb,bb,points);
[x,y]=meshgrid(x,y);
z=(Potential(x,y,mu));
% figure
% surfc(x,y,z,'Edgecolor','none')
%Plot orbit
%figure
hold on
contour(x,y,z,[C/2,C/2])
plot(x0,y0)
text(-2,-2,sprintf('C=%.2f',C))
%plotto actractors
plot(-mu,0,'ok')
plot(1-mu,0,'ok')
% Plot points
plot(x0(1),y0(1),'sg')
plot(x0(l0),y0(l0),'sr')

Some files were not shown because too many files have changed in this diff Show More