mirror of
				https://github.com/KevinMidboe/linguist.git
				synced 2025-10-29 17:50:22 +00:00 
			
		
		
		
	Compare commits
	
		
			114 Commits
		
	
	
		
			v4.8.18
			...
			puppet-fix
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 110b42fe16 | ||
|  | e93f41f097 | ||
|  | 994bc1f135 | ||
|  | 44f03e64c1 | ||
|  | 4166f2e89d | ||
|  | 1a8f19c6f2 | ||
|  | c0e242358a | ||
|  | eb38c8dcf8 | ||
|  | f146b4afbd | ||
|  | db15d0f5d2 | ||
|  | e6d57c771d | ||
|  | eef0335c5f | ||
|  | 461c27c066 | ||
|  | 59d67d6743 | ||
|  | 7aeeb82d3d | ||
|  | c98ca20076 | ||
|  | 4e0b5f02aa | ||
|  | 8da7cb805e | ||
|  | e5e81a8560 | ||
|  | dd53fa1585 | ||
|  | 354a8f079a | ||
|  | f38d6bd124 | ||
|  | e80b92e407 | ||
|  | fa6ae1116f | ||
|  | b7e27a9f58 | ||
|  | 69ba4c5586 | ||
|  | c39d7fd6e8 | ||
|  | 44ed47cea1 | ||
|  | de51cb08d2 | ||
|  | 3dd2d08190 | ||
|  | 3b625e1954 | ||
|  | 5c6f690b97 | ||
|  | 3bbfc907f3 | ||
|  | 053b8bca97 | ||
|  | 7fb3db6203 | ||
|  | ba09394f85 | ||
|  | c59c88f16e | ||
|  | 8a6e74799a | ||
|  | 4268769d2e | ||
|  | 6601864084 | ||
|  | d57aa37fb7 | ||
|  | e72347fd98 | ||
|  | 1b429ea46b | ||
|  | 9468ad4947 | ||
|  | 733ef63193 | ||
|  | 9ca6a5841e | ||
|  | 41ace5fba0 | ||
|  | cc4295b3b3 | ||
|  | 1e4ce80fd9 | ||
|  | 74a71fd90d | ||
|  | 9b08318456 | ||
|  | fa5b6b03dc | ||
|  | cb59296fe0 | ||
|  | f1be771611 | ||
|  | b66fcb2529 | ||
|  | f7fe1fee66 | ||
|  | 94367cc460 | ||
|  | 72bec1fddc | ||
|  | 4e2eba4ef8 | ||
|  | 10457b6639 | ||
|  | d58cbc68a6 | ||
|  | 01de40faaa | ||
|  | 62d285fce6 | ||
|  | 0056095e8c | ||
|  | d6dc3a3991 | ||
|  | b524461b7c | ||
|  | 76d41697aa | ||
|  | 32147b629e | ||
|  | e7b5e25bf8 | ||
|  | d761658f8b | ||
|  | 3719214aba | ||
|  | 47b109be36 | ||
|  | 1ec4db97c2 | ||
|  | 9fe5fe0de2 | ||
|  | b36ea7ac9d | ||
|  | 625b06c30d | ||
|  | 28bce533b2 | ||
|  | 93ec1922cb | ||
|  | 5d09fb67dd | ||
|  | 93dcb61742 | ||
|  | 3a03594685 | ||
|  | 5ce2c254f9 | ||
|  | d7814c4899 | ||
|  | 50c08bf29e | ||
|  | 34928baee6 | ||
|  | 27bb41aa4d | ||
|  | 1415f4b52d | ||
|  | ae8ffcad22 | ||
|  | f43633bf10 | ||
|  | a604de9846 | ||
|  | 3e224e0039 | ||
|  | 15b04f86c3 | ||
|  | 42af436c20 | ||
|  | 2b08c66f0b | ||
|  | f98ab593fb | ||
|  | f951ec07de | ||
|  | e9ac71590f | ||
|  | 210cd19876 | ||
|  | f473c555ac | ||
|  | 48e4394d87 | ||
|  | e1ce88920d | ||
|  | 675cee1d72 | ||
|  | 1c4baf6dc2 | ||
|  | 8f2820e9cc | ||
|  | 04c268e535 | ||
|  | ec749b3f8d | ||
|  | 08b63e7033 | ||
|  | 7867b946b9 | ||
|  | a4d12cc8e4 | ||
|  | a1165b74b1 | ||
|  | 0fa1fa5581 | ||
|  | d8b91bd5c4 | ||
|  | 9b941a34f0 | ||
|  | 9d8392dab8 | 
							
								
								
									
										1
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -1,3 +1,4 @@ | ||||
| *.gem | ||||
| /Gemfile.lock | ||||
| .bundle/ | ||||
| .idea | ||||
|   | ||||
							
								
								
									
										65
									
								
								.gitmodules
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										65
									
								
								.gitmodules
									
									
									
									
										vendored
									
									
								
							| @@ -67,9 +67,6 @@ | ||||
| [submodule "vendor/grammars/language-javascript"] | ||||
| 	path = vendor/grammars/language-javascript | ||||
| 	url = https://github.com/atom/language-javascript | ||||
| [submodule "vendor/grammars/language-python"] | ||||
| 	path = vendor/grammars/language-python | ||||
| 	url = https://github.com/atom/language-python | ||||
| [submodule "vendor/grammars/language-shellscript"] | ||||
| 	path = vendor/grammars/language-shellscript | ||||
| 	url = https://github.com/atom/language-shellscript | ||||
| @@ -130,9 +127,6 @@ | ||||
| [submodule "vendor/grammars/Sublime-Text-2-OpenEdge-ABL"] | ||||
| 	path = vendor/grammars/Sublime-Text-2-OpenEdge-ABL | ||||
| 	url = https://github.com/jfairbank/Sublime-Text-2-OpenEdge-ABL | ||||
| [submodule "vendor/grammars/sublime-rust"] | ||||
| 	path = vendor/grammars/sublime-rust | ||||
| 	url = https://github.com/jhasse/sublime-rust | ||||
| [submodule "vendor/grammars/sublime-befunge"] | ||||
| 	path = vendor/grammars/sublime-befunge | ||||
| 	url = https://github.com/johanasplund/sublime-befunge | ||||
| @@ -180,7 +174,7 @@ | ||||
| 	url = https://github.com/mokus0/Agda.tmbundle | ||||
| [submodule "vendor/grammars/Julia.tmbundle"] | ||||
| 	path = vendor/grammars/Julia.tmbundle | ||||
| 	url = https://github.com/nanoant/Julia.tmbundle | ||||
| 	url = https://github.com/JuliaEditorSupport/Julia.tmbundle | ||||
| [submodule "vendor/grammars/ooc.tmbundle"] | ||||
| 	path = vendor/grammars/ooc.tmbundle | ||||
| 	url = https://github.com/nilium/ooc.tmbundle | ||||
| @@ -247,9 +241,6 @@ | ||||
| [submodule "vendor/grammars/cpp-qt.tmbundle"] | ||||
| 	path = vendor/grammars/cpp-qt.tmbundle | ||||
| 	url = https://github.com/textmate/cpp-qt.tmbundle | ||||
| [submodule "vendor/grammars/css.tmbundle"] | ||||
| 	path = vendor/grammars/css.tmbundle | ||||
| 	url = https://github.com/textmate/css.tmbundle | ||||
| [submodule "vendor/grammars/d.tmbundle"] | ||||
| 	path = vendor/grammars/d.tmbundle | ||||
| 	url = https://github.com/textmate/d.tmbundle | ||||
| @@ -339,7 +330,7 @@ | ||||
| 	url = https://github.com/textmate/php-smarty.tmbundle | ||||
| [submodule "vendor/grammars/php.tmbundle"] | ||||
| 	path = vendor/grammars/php.tmbundle | ||||
| 	url = https://github.com/textmate/php.tmbundle | ||||
| 	url = https://github.com/brandonblack/php.tmbundle | ||||
| [submodule "vendor/grammars/postscript.tmbundle"] | ||||
| 	path = vendor/grammars/postscript.tmbundle | ||||
| 	url = https://github.com/textmate/postscript.tmbundle | ||||
| @@ -443,9 +434,6 @@ | ||||
| [submodule "vendor/grammars/Sublime-Nit"] | ||||
| 	path = vendor/grammars/Sublime-Nit | ||||
| 	url = https://github.com/R4PaSs/Sublime-Nit | ||||
| [submodule "vendor/grammars/language-hy"] | ||||
| 	path = vendor/grammars/language-hy | ||||
| 	url = https://github.com/rwtolbert/language-hy | ||||
| [submodule "vendor/grammars/Racket"] | ||||
| 	path = vendor/grammars/Racket | ||||
| 	url = https://github.com/soegaard/racket-highlight-for-github | ||||
| @@ -623,9 +611,6 @@ | ||||
| [submodule "vendor/grammars/language-yang"] | ||||
| 	path = vendor/grammars/language-yang | ||||
| 	url = https://github.com/DzonyKalafut/language-yang.git | ||||
| [submodule "vendor/grammars/perl6fe"] | ||||
| 	path = vendor/grammars/perl6fe | ||||
| 	url = https://github.com/MadcapJake/language-perl6fe.git | ||||
| [submodule "vendor/grammars/language-less"] | ||||
| 	path = vendor/grammars/language-less | ||||
| 	url = https://github.com/atom/language-less.git | ||||
| @@ -809,4 +794,48 @@ | ||||
| [submodule "vendor/grammars/rascal-syntax-highlighting"] | ||||
| 	path = vendor/grammars/rascal-syntax-highlighting | ||||
| 	url = https://github.com/usethesource/rascal-syntax-highlighting | ||||
|  | ||||
| [submodule "vendor/grammars/atom-language-perl6"] | ||||
| 	path = vendor/grammars/atom-language-perl6 | ||||
| 	url = https://github.com/perl6/atom-language-perl6 | ||||
| [submodule "vendor/grammars/reason"] | ||||
| 	path = vendor/grammars/reason | ||||
| 	url = https://github.com/facebook/reason | ||||
| [submodule "vendor/grammars/language-xcompose"] | ||||
| 	path = vendor/grammars/language-xcompose | ||||
| 	url = https://github.com/samcv/language-xcompose | ||||
| [submodule "vendor/grammars/SublimeEthereum"] | ||||
| 	path = vendor/grammars/SublimeEthereum | ||||
| 	url = https://github.com/davidhq/SublimeEthereum.git | ||||
| [submodule "vendor/grammars/atom-language-rust"] | ||||
| 	path = vendor/grammars/atom-language-rust | ||||
| 	url = https://github.com/zargony/atom-language-rust | ||||
| [submodule "vendor/grammars/language-css"] | ||||
| 	path = vendor/grammars/language-css | ||||
| 	url = https://github.com/atom/language-css | ||||
| [submodule "vendor/grammars/language-regexp"] | ||||
| 	path = vendor/grammars/language-regexp | ||||
| 	url = https://github.com/Alhadis/language-regexp | ||||
| [submodule "vendor/grammars/Terraform.tmLanguage"] | ||||
| 	path = vendor/grammars/Terraform.tmLanguage | ||||
| 	url = https://github.com/alexlouden/Terraform.tmLanguage | ||||
| [submodule "vendor/grammars/shaders-tmLanguage"] | ||||
| 	path = vendor/grammars/shaders-tmLanguage | ||||
| 	url = https://github.com/tgjones/shaders-tmLanguage | ||||
| [submodule "vendor/grammars/language-meson"] | ||||
| 	path = vendor/grammars/language-meson | ||||
| 	url = https://github.com/TingPing/language-meson | ||||
| [submodule "vendor/grammars/atom-language-p4"] | ||||
| 	path = vendor/grammars/atom-language-p4 | ||||
| 	url = https://github.com/TakeshiTseng/atom-language-p4 | ||||
| [submodule "vendor/grammars/language-jison"] | ||||
| 	path = vendor/grammars/language-jison | ||||
| 	url = https://github.com/cdibbs/language-jison | ||||
| [submodule "vendor/grammars/openscad.tmbundle"] | ||||
| 	path = vendor/grammars/openscad.tmbundle | ||||
| 	url = https://github.com/tbuser/openscad.tmbundle | ||||
| [submodule "vendor/grammars/marko-tmbundle"] | ||||
| 	path = vendor/grammars/marko-tmbundle | ||||
| 	url = https://github.com/marko-js/marko-tmbundle | ||||
| [submodule "vendor/grammars/language-jolie"] | ||||
| 	path = vendor/grammars/language-jolie | ||||
| 	url = https://github.com/fmontesi/language-jolie | ||||
|   | ||||
							
								
								
									
										15
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										15
									
								
								.travis.yml
									
									
									
									
									
								
							| @@ -1,20 +1,33 @@ | ||||
| language: ruby | ||||
| sudo: false | ||||
|  | ||||
| addons: | ||||
|   apt: | ||||
|     packages: | ||||
|     - libicu-dev | ||||
|     - libicu48 | ||||
|  | ||||
| before_install: script/travis/before_install | ||||
|  | ||||
| script: | ||||
|   - bundle exec rake | ||||
|   - script/licensed verify | ||||
|  | ||||
| rvm: | ||||
|   - 2.0.0 | ||||
|   - 2.1 | ||||
|   - 2.2 | ||||
|   - 2.3.3 | ||||
|   - 2.4.0 | ||||
|  | ||||
| matrix: | ||||
|   allow_failures: | ||||
|   - rvm: 2.4.0 | ||||
|  | ||||
| notifications: | ||||
|   disabled: true | ||||
|  | ||||
| git: | ||||
|   submodules: false | ||||
|   depth: 3 | ||||
|  | ||||
| cache: bundler | ||||
|   | ||||
| @@ -10,15 +10,15 @@ We try only to add new extensions once they have some usage on GitHub. In most c | ||||
|  | ||||
| To add support for a new extension: | ||||
|  | ||||
| 0. Add your extension to the language entry in [`languages.yml`][languages], keeping the extensions in alphabetical order. | ||||
| 0. Add at least one sample for your extension to the [samples directory][samples] in the correct subdirectory. | ||||
| 0. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage. | ||||
| 1. Add your extension to the language entry in [`languages.yml`][languages], keeping the extensions in alphabetical order. | ||||
| 1. Add at least one sample for your extension to the [samples directory][samples] in the correct subdirectory. | ||||
| 1. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage. | ||||
|  | ||||
| In addition, if this extension is already listed in [`languages.yml`][languages] then sometimes a few more steps will need to be taken: | ||||
|  | ||||
| 0. Make sure that example `.yourextension` files are present in the [samples directory][samples] for each language that uses `.yourextension`. | ||||
| 0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.yourextension` files. (ping **@arfon** or **@bkeepers** to help with this) to ensure we're not misclassifying files. | ||||
| 0. If the Bayesian classifier does a bad job with the sample `.yourextension` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help. | ||||
| 1. Make sure that example `.yourextension` files are present in the [samples directory][samples] for each language that uses `.yourextension`. | ||||
| 1. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.yourextension` files. (ping **@bkeepers** to help with this) to ensure we're not misclassifying files. | ||||
| 1. If the Bayesian classifier does a bad job with the sample `.yourextension` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help. | ||||
|  | ||||
|  | ||||
| ## Adding a language | ||||
| @@ -27,17 +27,17 @@ We try only to add languages once they have some usage on GitHub. In most cases | ||||
|  | ||||
| To add support for a new language: | ||||
|  | ||||
| 0. Add an entry for your language to [`languages.yml`][languages]. Omit the `language_id` field for now. | ||||
| 0. Add a grammar for your language: `script/add-grammar https://github.com/JaneSmith/MyGrammar`. Please only add grammars that have [one of these licenses][licenses]. | ||||
| 0. Add samples for your language to the [samples directory][samples] in the correct subdirectory. | ||||
| 0. Add a `language_id` for your language using `script/set-language-ids`. **You should only ever need to run `script/set-language-ids --update`. Anything other than this risks breaking GitHub search :cry:** | ||||
| 0. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage. | ||||
| 1. Add an entry for your language to [`languages.yml`][languages]. Omit the `language_id` field for now. | ||||
| 1. Add a grammar for your language: `script/add-grammar https://github.com/JaneSmith/MyGrammar`. Please only add grammars that have [one of these licenses][licenses]. | ||||
| 1. Add samples for your language to the [samples directory][samples] in the correct subdirectory. | ||||
| 1. Add a `language_id` for your language using `script/set-language-ids`. **You should only ever need to run `script/set-language-ids --update`. Anything other than this risks breaking GitHub search :cry:** | ||||
| 1. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage. | ||||
|  | ||||
| In addition, if your new language defines an extension that's already listed in [`languages.yml`][languages] (such as `.foo`) then sometimes a few more steps will need to be taken: | ||||
|  | ||||
| 0. Make sure that example `.foo` files are present in the [samples directory][samples] for each language that uses `.foo`. | ||||
| 0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.foo` files. (ping **@arfon** or **@bkeepers** to help with this) to ensure we're not misclassifying files. | ||||
| 0. If the Bayesian classifier does a bad job with the sample `.foo` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help. | ||||
| 1. Make sure that example `.foo` files are present in the [samples directory][samples] for each language that uses `.foo`. | ||||
| 1. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.foo` files. (ping **@bkeepers** to help with this) to ensure we're not misclassifying files. | ||||
| 1. If the Bayesian classifier does a bad job with the sample `.foo` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help. | ||||
|  | ||||
| Remember, the goal here is to try and avoid false positives! | ||||
|  | ||||
| @@ -80,13 +80,14 @@ Here's our current build status: [ | ||||
| - **@BenEddy** (GitHub staff) | ||||
| - **@Caged** (GitHub staff) | ||||
| - **@grantr** (GitHub staff) | ||||
| - **@larsbrinkhoff** | ||||
| - **@lildude** (GitHub staff) | ||||
| - **@lizzhale** (GitHub staff) | ||||
| - **@mikemcquaid** (GitHub staff) | ||||
| - **@pchaigno** | ||||
| - **@rafer** (GitHub staff) | ||||
| - **@shreyasjoshis** (GitHub staff) | ||||
|  | ||||
| As Linguist is a production dependency for GitHub we have a couple of workflow restrictions: | ||||
|  | ||||
| @@ -97,21 +98,21 @@ As Linguist is a production dependency for GitHub we have a couple of workflow r | ||||
|  | ||||
| If you are the current maintainer of this gem: | ||||
|  | ||||
| 0. Create a branch for the release: `git checkout -b cut-release-vxx.xx.xx` | ||||
| 0. Make sure your local dependencies are up to date: `script/bootstrap` | ||||
| 0. If grammar submodules have not been updated recently, update them: `git submodule update --remote && git commit -a` | ||||
| 0. Ensure that samples are updated: `bundle exec rake samples` | ||||
| 0. Ensure that tests are green: `bundle exec rake test` | ||||
| 0. Bump gem version in `lib/linguist/version.rb`, [like this](https://github.com/github/linguist/commit/8d2ea90a5ba3b2fe6e1508b7155aa4632eea2985). | ||||
| 0. Make a PR to github/linguist, [like this](https://github.com/github/linguist/pull/1238). | ||||
| 0. Build a local gem: `bundle exec rake build_gem` | ||||
| 0. Test the gem: | ||||
|   0. Bump the Gemfile and Gemfile.lock versions for an app which relies on this gem | ||||
|   0. Install the new gem locally | ||||
|   0. Test behavior locally, branch deploy, whatever needs to happen | ||||
| 0. Merge github/linguist PR | ||||
| 0. Tag and push: `git tag vx.xx.xx; git push --tags` | ||||
| 0. Push to rubygems.org -- `gem push github-linguist-3.0.0.gem` | ||||
| 1. Create a branch for the release: `git checkout -b cut-release-vxx.xx.xx` | ||||
| 1. Make sure your local dependencies are up to date: `script/bootstrap` | ||||
| 1. If grammar submodules have not been updated recently, update them: `git submodule update --remote && git commit -a` | ||||
| 1. Ensure that samples are updated: `bundle exec rake samples` | ||||
| 1. Ensure that tests are green: `bundle exec rake test` | ||||
| 1. Bump gem version in `lib/linguist/version.rb`, [like this](https://github.com/github/linguist/commit/8d2ea90a5ba3b2fe6e1508b7155aa4632eea2985). | ||||
| 1. Make a PR to github/linguist, [like this](https://github.com/github/linguist/pull/1238). | ||||
| 1. Build a local gem: `bundle exec rake build_gem` | ||||
| 1. Test the gem: | ||||
|   1. Bump the Gemfile and Gemfile.lock versions for an app which relies on this gem | ||||
|   1. Install the new gem locally | ||||
|   1. Test behavior locally, branch deploy, whatever needs to happen | ||||
| 1. Merge github/linguist PR | ||||
| 1. Tag and push: `git tag vx.xx.xx; git push --tags` | ||||
| 1. Push to rubygems.org -- `gem push github-linguist-3.0.0.gem` | ||||
|  | ||||
| [grammars]: /grammars.yml | ||||
| [languages]: /lib/linguist/languages.yml | ||||
|   | ||||
							
								
								
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							| @@ -1,4 +1,4 @@ | ||||
| Copyright (c) 2011-2016 GitHub, Inc. | ||||
| Copyright (c) 2017 GitHub, Inc. | ||||
|  | ||||
| Permission is hereby granted, free of charge, to any person | ||||
| obtaining a copy of this software and associated documentation | ||||
|   | ||||
							
								
								
									
										25
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										25
									
								
								README.md
									
									
									
									
									
								
							| @@ -15,10 +15,10 @@ See [Troubleshooting](#troubleshooting) and [`CONTRIBUTING.md`](/CONTRIBUTING.md | ||||
|  | ||||
| The Language stats bar displays languages percentages for the files in the repository. The percentages are calculated based on the bytes of code for each language as reported by the [List Languages](https://developer.github.com/v3/repos/#list-languages) API. If the bar is reporting a language that you don't expect: | ||||
|  | ||||
| 0. Click on the name of the language in the stats bar to see a list of the files that are identified as that language. | ||||
| 0. If you see files that you didn't write, consider moving the files into one of the [paths for vendored  code](/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them. | ||||
| 0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you can add, especially links to public repositories, is helpful. | ||||
| 0. If there are no reported issues of this misclassification, [open an issue][new-issue] and include a link to the repository or a sample of the code that is being misclassified. | ||||
| 1. Click on the name of the language in the stats bar to see a list of the files that are identified as that language. | ||||
| 1. If you see files that you didn't write, consider moving the files into one of the [paths for vendored  code](/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them. | ||||
| 1. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you can add, especially links to public repositories, is helpful. | ||||
| 1. If there are no reported issues of this misclassification, [open an issue][new-issue] and include a link to the repository or a sample of the code that is being misclassified. | ||||
|  | ||||
| ### There's a problem with the syntax highlighting of a file | ||||
|  | ||||
| @@ -32,13 +32,15 @@ Linguist supports a number of different custom overrides strategies for language | ||||
|  | ||||
| ### Using gitattributes | ||||
|  | ||||
| Add a `.gitattributes` file to your project and use standard git-style path matchers for the files you want to override to set `linguist-documentation`, `linguist-language`, and `linguist-vendored`. `.gitattributes` will be used to determine language statistics, but will not be used to syntax highlight files. To manually set syntax highlighting, use [Vim or Emacs modelines](#using-emacs-or-vim-modelines). | ||||
| Add a `.gitattributes` file to your project and use standard git-style path matchers for the files you want to override to set `linguist-documentation`, `linguist-language`, `linguist-vendored`, and `linguist-generated`. `.gitattributes` will be used to determine language statistics and will be used to syntax highlight files. You can also manually set syntax highlighting using [Vim or Emacs modelines](#using-emacs-or-vim-modelines). | ||||
|  | ||||
| ``` | ||||
| $ cat .gitattributes | ||||
| *.rb linguist-language=Java | ||||
| ``` | ||||
|  | ||||
| #### Vendored code | ||||
|  | ||||
| Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository. | ||||
|  | ||||
| Use the `linguist-vendored` attribute to vendor or un-vendor paths. | ||||
| @@ -49,6 +51,8 @@ special-vendored-path/* linguist-vendored | ||||
| jquery.js linguist-vendored=false | ||||
| ``` | ||||
|  | ||||
| #### Documentation | ||||
|  | ||||
| Just like vendored files, Linguist excludes documentation files from your project's language stats. [lib/linguist/documentation.yml](lib/linguist/documentation.yml) lists common documentation paths and excludes them from the language statistics for your repository. | ||||
|  | ||||
| Use the `linguist-documentation` attribute to mark or unmark paths as documentation. | ||||
| @@ -59,19 +63,18 @@ project-docs/* linguist-documentation | ||||
| docs/formatter.rb linguist-documentation=false | ||||
| ``` | ||||
|  | ||||
| #### Generated file detection | ||||
| #### Generated code | ||||
|  | ||||
| Not all plain text files are true source files. Generated files like minified js and compiled CoffeeScript can be detected and excluded from language stats. As an added bonus, unlike vendored and documentation files, these files are suppressed in diffs. | ||||
|  | ||||
| ```ruby | ||||
| Linguist::FileBlob.new("underscore.min.js").generated? # => true | ||||
| ``` | ||||
|  | ||||
| See [Linguist::Generated#generated?](https://github.com/github/linguist/blob/master/lib/linguist/generated.rb). | ||||
| $ cat .gitattributes | ||||
| Api.elm linguist-generated=true | ||||
| ``` | ||||
|  | ||||
| ### Using Emacs or Vim modelines | ||||
|  | ||||
| Alternatively, you can use Vim or Emacs style modelines to set the language for a single file. Modelines can be placed anywhere within a file and are respected when determining how to syntax-highlight a file on GitHub.com | ||||
| If you do not want to use `.gitattributes` to override the syntax highlighting used on GitHub.com, you can use Vim or Emacs style modelines to set the language for a single file. Modelines can be placed anywhere within a file and are respected when determining how to syntax-highlight a file on GitHub.com | ||||
|  | ||||
| ##### Vim | ||||
| ``` | ||||
|   | ||||
							
								
								
									
										1
									
								
								Rakefile
									
									
									
									
									
								
							
							
						
						
									
										1
									
								
								Rakefile
									
									
									
									
									
								
							| @@ -4,6 +4,7 @@ require 'rake/testtask' | ||||
| require 'yaml' | ||||
| require 'yajl' | ||||
| require 'open-uri' | ||||
| require 'json' | ||||
|  | ||||
| task :default => :test | ||||
|  | ||||
|   | ||||
| @@ -1,5 +1,7 @@ | ||||
| #!/usr/bin/env ruby | ||||
|  | ||||
| $LOAD_PATH[0, 0] = File.join(File.dirname(__FILE__), '..', 'lib') | ||||
|  | ||||
| require 'linguist' | ||||
| require 'rugged' | ||||
| require 'optparse' | ||||
| @@ -102,10 +104,16 @@ def git_linguist(args) | ||||
|   commit = nil | ||||
|  | ||||
|   parser = OptionParser.new do |opts| | ||||
|     opts.banner = "Usage: git-linguist [OPTIONS] stats|breakdown|dump-cache|clear|disable" | ||||
|     opts.banner = <<-HELP | ||||
|     Linguist v#{Linguist::VERSION} | ||||
|     Detect language type and determine language breakdown for a given Git repository. | ||||
|  | ||||
|     Usage: | ||||
|     git-linguist [OPTIONS] stats|breakdown|dump-cache|clear|disable" | ||||
|     HELP | ||||
|  | ||||
|     opts.on("-f", "--force", "Force a full rescan") { incremental = false } | ||||
|     opts.on("--commit=COMMIT", "Commit to index") { |v| commit = v} | ||||
|     opts.on("-c", "--commit=COMMIT", "Commit to index") { |v| commit = v} | ||||
|   end | ||||
|  | ||||
|   parser.parse!(args) | ||||
|   | ||||
							
								
								
									
										35
									
								
								bin/linguist
									
									
									
									
									
								
							
							
						
						
									
										35
									
								
								bin/linguist
									
									
									
									
									
								
							| @@ -1,29 +1,37 @@ | ||||
| #!/usr/bin/env ruby | ||||
|  | ||||
| # linguist — detect language type for a file, or, given a directory, determine language breakdown | ||||
| #     usage: linguist <path> [<--breakdown>] | ||||
| # | ||||
| $LOAD_PATH[0, 0] = File.join(File.dirname(__FILE__), '..', 'lib') | ||||
|  | ||||
| require 'linguist' | ||||
| require 'rugged' | ||||
| require 'json' | ||||
| require 'optparse' | ||||
|  | ||||
| path = ARGV[0] || Dir.pwd | ||||
|  | ||||
| # special case if not given a directory but still given the --breakdown option | ||||
| # special case if not given a directory | ||||
| # but still given the --breakdown or --json options/ | ||||
| if path == "--breakdown" | ||||
|   path = Dir.pwd | ||||
|   breakdown = true | ||||
| elsif path == "--json" | ||||
|   path = Dir.pwd | ||||
|   json_breakdown = true | ||||
| end | ||||
|  | ||||
| ARGV.shift | ||||
| breakdown = true if ARGV[0] == "--breakdown" | ||||
| json_breakdown = true if ARGV[0] == "--json" | ||||
|  | ||||
| if File.directory?(path) | ||||
|   rugged = Rugged::Repository.new(path) | ||||
|   repo = Linguist::Repository.new(rugged, rugged.head.target_id) | ||||
|   repo.languages.sort_by { |_, size| size }.reverse.each do |language, size| | ||||
|     percentage = ((size / repo.size.to_f) * 100) | ||||
|     percentage = sprintf '%.2f' % percentage | ||||
|     puts "%-7s %s" % ["#{percentage}%", language] | ||||
|   if !json_breakdown | ||||
|     repo.languages.sort_by { |_, size| size }.reverse.each do |language, size| | ||||
|       percentage = ((size / repo.size.to_f) * 100) | ||||
|       percentage = sprintf '%.2f' % percentage | ||||
|       puts "%-7s %s" % ["#{percentage}%", language] | ||||
|     end | ||||
|   end | ||||
|   if breakdown | ||||
|     puts | ||||
| @@ -35,6 +43,8 @@ if File.directory?(path) | ||||
|       end | ||||
|       puts | ||||
|     end | ||||
|   elsif json_breakdown | ||||
|     puts JSON.dump(repo.breakdown_by_file) | ||||
|   end | ||||
| elsif File.file?(path) | ||||
|   blob = Linguist::FileBlob.new(path, Dir.pwd) | ||||
| @@ -63,5 +73,12 @@ elsif File.file?(path) | ||||
|     puts "  appears to be a vendored file" | ||||
|   end | ||||
| else | ||||
|   abort "usage: linguist <path>" | ||||
|   abort <<-HELP | ||||
|   Linguist v#{Linguist::VERSION} | ||||
|   Detect language type for a file, or, given a repository, determine language breakdown. | ||||
|  | ||||
|   Usage: linguist <path> | ||||
|          linguist <path> [--breakdown] [--json] | ||||
|          linguist [--breakdown] [--json] | ||||
|   HELP | ||||
| end | ||||
|   | ||||
| @@ -16,7 +16,7 @@ Gem::Specification.new do |s| | ||||
|   s.add_dependency 'charlock_holmes', '~> 0.7.3' | ||||
|   s.add_dependency 'escape_utils',    '~> 1.1.0' | ||||
|   s.add_dependency 'mime-types',      '>= 1.19' | ||||
|   s.add_dependency 'rugged',          '>= 0.23.0b' | ||||
|   s.add_dependency 'rugged',          '>= 0.25.1' | ||||
|  | ||||
|   s.add_development_dependency 'minitest', '>= 5.0' | ||||
|   s.add_development_dependency 'mocha' | ||||
| @@ -26,5 +26,5 @@ Gem::Specification.new do |s| | ||||
|   s.add_development_dependency 'yajl-ruby' | ||||
|   s.add_development_dependency 'color-proximity', '~> 0.2.1' | ||||
|   s.add_development_dependency 'licensed' | ||||
|   s.add_development_dependency 'licensee', '>= 8.6.0' | ||||
|   s.add_development_dependency 'licensee', '~> 8.8.0' | ||||
| end | ||||
|   | ||||
							
								
								
									
										64
									
								
								grammars.yml
									
									
									
									
									
								
							
							
						
						
									
										64
									
								
								grammars.yml
									
									
									
									
									
								
							| @@ -1,9 +1,9 @@ | ||||
| --- | ||||
| http://svn.edgewall.org/repos/genshi/contrib/textmate/Genshi.tmbundle/Syntaxes/Markup%20Template%20%28XML%29.tmLanguage: | ||||
| - text.xml.genshi | ||||
| https://bitbucket.org/Clams/sublimesystemverilog/get/default.tar.gz: | ||||
| - source.systemverilog | ||||
| - source.ucfconstraints | ||||
| https://svn.edgewall.org/repos/genshi/contrib/textmate/Genshi.tmbundle/Syntaxes/Markup%20Template%20%28XML%29.tmLanguage: | ||||
| - text.xml.genshi | ||||
| vendor/grammars/ABNF.tmbundle: | ||||
| - source.abnf | ||||
| vendor/grammars/Agda.tmbundle: | ||||
| @@ -56,6 +56,8 @@ vendor/grammars/MQL5-sublime: | ||||
| vendor/grammars/MagicPython: | ||||
| - source.python | ||||
| - source.regexp.python | ||||
| - text.python.console | ||||
| - text.python.traceback | ||||
| vendor/grammars/Modelica: | ||||
| - source.modelica | ||||
| vendor/grammars/NSIS: | ||||
| @@ -113,7 +115,9 @@ vendor/grammars/SublimeBrainfuck: | ||||
| - source.bf | ||||
| vendor/grammars/SublimeClarion: | ||||
| - source.clarion | ||||
| vendor/grammars/SublimeGDB: | ||||
| vendor/grammars/SublimeEthereum: | ||||
| - source.solidity | ||||
| vendor/grammars/SublimeGDB/: | ||||
| - source.disasm | ||||
| - source.gdb | ||||
| - source.gdb.session | ||||
| @@ -128,6 +132,8 @@ vendor/grammars/TLA: | ||||
| - source.tla | ||||
| vendor/grammars/TXL: | ||||
| - source.txl | ||||
| vendor/grammars/Terraform.tmLanguage: | ||||
| - source.terraform | ||||
| vendor/grammars/Textmate-Gosu-Bundle: | ||||
| - source.gosu.2 | ||||
| vendor/grammars/UrWeb-Language-Definition: | ||||
| @@ -178,8 +184,18 @@ vendor/grammars/atom-language-1c-bsl: | ||||
| - source.sdbl | ||||
| vendor/grammars/atom-language-clean: | ||||
| - source.clean | ||||
| - text.restructuredtext.clean | ||||
| vendor/grammars/atom-language-p4: | ||||
| - source.p4 | ||||
| vendor/grammars/atom-language-perl6: | ||||
| - source.meta-info | ||||
| - source.perl6fe | ||||
| - source.quoting.perl6fe | ||||
| - source.regexp.perl6fe | ||||
| vendor/grammars/atom-language-purescript: | ||||
| - source.purescript | ||||
| vendor/grammars/atom-language-rust: | ||||
| - source.rust | ||||
| vendor/grammars/atom-language-srt: | ||||
| - text.srt | ||||
| vendor/grammars/atom-language-stan: | ||||
| @@ -211,7 +227,6 @@ vendor/grammars/capnproto.tmbundle: | ||||
| vendor/grammars/carto-atom: | ||||
| - source.css.mss | ||||
| vendor/grammars/ceylon-sublimetext: | ||||
| - module.ceylon | ||||
| - source.ceylon | ||||
| vendor/grammars/chapel-tmbundle: | ||||
| - source.chapel | ||||
| @@ -225,8 +240,6 @@ vendor/grammars/cpp-qt.tmbundle: | ||||
| - source.qmake | ||||
| vendor/grammars/creole: | ||||
| - text.html.creole | ||||
| vendor/grammars/css.tmbundle: | ||||
| - source.css | ||||
| vendor/grammars/cucumber-tmbundle: | ||||
| - source.ruby.rspec.cucumber.steps | ||||
| - text.gherkin.feature | ||||
| @@ -360,6 +373,8 @@ vendor/grammars/language-csound: | ||||
| - source.csound | ||||
| - source.csound-document | ||||
| - source.csound-score | ||||
| vendor/grammars/language-css: | ||||
| - source.css | ||||
| vendor/grammars/language-emacs-lisp: | ||||
| - source.emacs.lisp | ||||
| vendor/grammars/language-fontforge: | ||||
| @@ -384,15 +399,19 @@ vendor/grammars/language-haskell: | ||||
| - source.haskell | ||||
| - source.hsc2hs | ||||
| - text.tex.latex.haskell | ||||
| vendor/grammars/language-hy: | ||||
| - source.hy | ||||
| vendor/grammars/language-inform7: | ||||
| - source.inform7 | ||||
| vendor/grammars/language-javascript: | ||||
| - source.js | ||||
| - source.js.embedded.html | ||||
| - source.js.regexp | ||||
| - source.js.regexp.replacement | ||||
| - source.jsdoc | ||||
| vendor/grammars/language-jison: | ||||
| - source.jison | ||||
| - source.jisonlex | ||||
| - source.jisonlex-injection | ||||
| vendor/grammars/language-jolie: | ||||
| - source.jolie | ||||
| vendor/grammars/language-jsoniq: | ||||
| - source.jq | ||||
| - source.xq | ||||
| @@ -400,20 +419,24 @@ vendor/grammars/language-less: | ||||
| - source.css.less | ||||
| vendor/grammars/language-maxscript: | ||||
| - source.maxscript | ||||
| vendor/grammars/language-meson: | ||||
| - source.meson | ||||
| vendor/grammars/language-ncl: | ||||
| - source.ncl | ||||
| vendor/grammars/language-ninja: | ||||
| - source.ninja | ||||
| vendor/grammars/language-povray: | ||||
| - source.pov-ray sdl | ||||
| vendor/grammars/language-python: | ||||
| - text.python.console | ||||
| - text.python.traceback | ||||
| vendor/grammars/language-regexp: | ||||
| - source.regexp | ||||
| - source.regexp.extended | ||||
| vendor/grammars/language-renpy: | ||||
| - source.renpy | ||||
| vendor/grammars/language-restructuredtext: | ||||
| - text.restructuredtext | ||||
| vendor/grammars/language-roff: | ||||
| - source.ditroff | ||||
| - source.ditroff.desc | ||||
| - source.ideal | ||||
| - source.pic | ||||
| - text.roff | ||||
| @@ -437,6 +460,8 @@ vendor/grammars/language-wavefront: | ||||
| - source.wavefront.obj | ||||
| vendor/grammars/language-xbase: | ||||
| - source.harbour | ||||
| vendor/grammars/language-xcompose: | ||||
| - config.xcompose | ||||
| vendor/grammars/language-yaml: | ||||
| - source.yaml | ||||
| vendor/grammars/language-yang: | ||||
| @@ -466,6 +491,8 @@ vendor/grammars/make.tmbundle: | ||||
| - source.makefile | ||||
| vendor/grammars/mako-tmbundle: | ||||
| - text.html.mako | ||||
| vendor/grammars/marko-tmbundle: | ||||
| - text.marko | ||||
| vendor/grammars/mathematica-tmbundle: | ||||
| - source.mathematica | ||||
| vendor/grammars/matlab.tmbundle: | ||||
| @@ -503,6 +530,8 @@ vendor/grammars/ooc.tmbundle: | ||||
| - source.ooc | ||||
| vendor/grammars/opa.tmbundle: | ||||
| - source.opa | ||||
| vendor/grammars/openscad.tmbundle: | ||||
| - source.scad | ||||
| vendor/grammars/oz-tmbundle/Syntaxes/Oz.tmLanguage: | ||||
| - source.oz | ||||
| vendor/grammars/parrot: | ||||
| @@ -514,10 +543,6 @@ vendor/grammars/pawn-sublime-language: | ||||
| vendor/grammars/perl.tmbundle: | ||||
| - source.perl | ||||
| - source.perl.6 | ||||
| vendor/grammars/perl6fe: | ||||
| - source.meta-info | ||||
| - source.perl6fe | ||||
| - source.regexp.perl6fe | ||||
| vendor/grammars/php-smarty.tmbundle: | ||||
| - text.html.smarty | ||||
| vendor/grammars/php.tmbundle: | ||||
| @@ -542,6 +567,8 @@ vendor/grammars/r.tmbundle: | ||||
| - text.tex.latex.rd | ||||
| vendor/grammars/rascal-syntax-highlighting: | ||||
| - source.rascal | ||||
| vendor/grammars/reason: | ||||
| - source.reason | ||||
| vendor/grammars/ruby-slim.tmbundle: | ||||
| - text.slim | ||||
| vendor/grammars/ruby.tmbundle: | ||||
| @@ -561,6 +588,9 @@ vendor/grammars/scilab.tmbundle: | ||||
| - source.scilab | ||||
| vendor/grammars/secondlife-lsl: | ||||
| - source.lsl | ||||
| vendor/grammars/shaders-tmLanguage: | ||||
| - source.hlsl | ||||
| - source.shaderlab | ||||
| vendor/grammars/smali-sublime: | ||||
| - source.smali | ||||
| vendor/grammars/smalltalk-tmbundle: | ||||
| @@ -609,8 +639,6 @@ vendor/grammars/sublime-rexx: | ||||
| - source.rexx | ||||
| vendor/grammars/sublime-robot-plugin: | ||||
| - text.robot | ||||
| vendor/grammars/sublime-rust: | ||||
| - source.rust | ||||
| vendor/grammars/sublime-spintools: | ||||
| - source.regexp.spin | ||||
| - source.spin | ||||
|   | ||||
| @@ -15,9 +15,9 @@ class << Linguist | ||||
|   #       see Linguist::LazyBlob and Linguist::FileBlob for examples | ||||
|   # | ||||
|   # Returns Language or nil. | ||||
|   def detect(blob) | ||||
|   def detect(blob, allow_empty: false) | ||||
|     # Bail early if the blob is binary or empty. | ||||
|     return nil if blob.likely_binary? || blob.binary? || blob.empty? | ||||
|     return nil if blob.likely_binary? || blob.binary? || (!allow_empty && blob.empty?) | ||||
|  | ||||
|     Linguist.instrument("linguist.detection", :blob => blob) do | ||||
|       # Call each strategy until one candidate is returned. | ||||
| @@ -59,8 +59,9 @@ class << Linguist | ||||
|   # Strategies are called in turn until a single Language is returned. | ||||
|   STRATEGIES = [ | ||||
|     Linguist::Strategy::Modeline, | ||||
|     Linguist::Shebang, | ||||
|     Linguist::Strategy::Filename, | ||||
|     Linguist::Shebang, | ||||
|     Linguist::Strategy::Extension, | ||||
|     Linguist::Heuristics, | ||||
|     Linguist::Classifier | ||||
|   ] | ||||
| @@ -73,7 +74,7 @@ class << Linguist | ||||
|   #       end | ||||
|   #     end | ||||
|   # | ||||
|   #     Linguist.instrumenter = CustomInstrumenter | ||||
|   #     Linguist.instrumenter = CustomInstrumenter.new | ||||
|   # | ||||
|   # The instrumenter must conform to the `ActiveSupport::Notifications` | ||||
|   # interface, which defines `#instrument` and accepts: | ||||
|   | ||||
| @@ -95,7 +95,7 @@ module Linguist | ||||
|     # Returns sorted Array of result pairs. Each pair contains the | ||||
|     # String language name and a Float score. | ||||
|     def classify(tokens, languages) | ||||
|       return [] if tokens.nil? | ||||
|       return [] if tokens.nil? || languages.empty? | ||||
|       tokens = Tokenizer.tokenize(tokens) if tokens.is_a?(String) | ||||
|       scores = {} | ||||
|  | ||||
|   | ||||
| @@ -9,11 +9,12 @@ | ||||
|  | ||||
| ## Documentation directories ## | ||||
|  | ||||
| - ^docs?/ | ||||
| - ^[Dd]ocs?/ | ||||
| - (^|/)[Dd]ocumentation/ | ||||
| - (^|/)javadoc/ | ||||
| - ^man/ | ||||
| - (^|/)[Jj]avadoc/ | ||||
| - ^[Mm]an/ | ||||
| - ^[Ee]xamples/ | ||||
| - ^[Dd]emos?/ | ||||
|  | ||||
| ## Documentation files ## | ||||
|  | ||||
| @@ -27,4 +28,4 @@ | ||||
| - (^|/)[Rr]eadme(\.|$) | ||||
|  | ||||
| # Samples folders | ||||
| - ^[Ss]amples/ | ||||
| - ^[Ss]amples?/ | ||||
|   | ||||
| @@ -3,7 +3,7 @@ module Linguist | ||||
|     # Public: Is the blob a generated file? | ||||
|     # | ||||
|     # name - String filename | ||||
|     # data - String blob data. A block also maybe passed in for lazy | ||||
|     # data - String blob data. A block also may be passed in for lazy | ||||
|     #        loading. This behavior is deprecated and you should always | ||||
|     #        pass in a String. | ||||
|     # | ||||
| @@ -70,6 +70,7 @@ module Linguist | ||||
|       compiled_cython_file? || | ||||
|       generated_go? || | ||||
|       generated_protocol_buffer? || | ||||
|       generated_javascript_protocol_buffer? || | ||||
|       generated_apache_thrift? || | ||||
|       generated_jni_header? || | ||||
|       vcr_cassette? || | ||||
| @@ -77,7 +78,10 @@ module Linguist | ||||
|       generated_unity3d_meta? || | ||||
|       generated_racc? || | ||||
|       generated_jflex? || | ||||
|       generated_grammarkit? | ||||
|       generated_grammarkit? || | ||||
|       generated_roxygen2? || | ||||
|       generated_jison? || | ||||
|       generated_yarn_lock? | ||||
|     end | ||||
|  | ||||
|     # Internal: Is the blob an Xcode file? | ||||
| @@ -275,6 +279,17 @@ module Linguist | ||||
|       return lines[0].include?("Generated by the protocol buffer compiler.  DO NOT EDIT!") | ||||
|     end | ||||
|  | ||||
|     # Internal: Is the blob a Javascript source file generated by the | ||||
|     # Protocol Buffer compiler? | ||||
|     # | ||||
|     # Returns true of false. | ||||
|     def generated_javascript_protocol_buffer? | ||||
|       return false unless extname == ".js" | ||||
|       return false unless lines.count > 6 | ||||
|  | ||||
|       return lines[5].include?("GENERATED CODE -- DO NOT EDIT!") | ||||
|     end | ||||
|  | ||||
|     APACHE_THRIFT_EXTENSIONS = ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp', '.php'] | ||||
|  | ||||
|     # Internal: Is the blob generated by Apache Thrift compiler? | ||||
| @@ -311,7 +326,7 @@ module Linguist | ||||
|       !!name.match(/vendor\/((?!-)[-0-9A-Za-z]+(?<!-)\.)+(com|edu|gov|in|me|net|org|fm|io)/) | ||||
|     end | ||||
|  | ||||
|     # Internal: Is the blob a generated npm shrinkwrap file. | ||||
|     # Internal: Is the blob a generated npm shrinkwrap file? | ||||
|     # | ||||
|     # Returns true or false. | ||||
|     def npm_shrinkwrap? | ||||
| @@ -333,7 +348,7 @@ module Linguist | ||||
|       !!name.match(/composer\.lock/) | ||||
|     end | ||||
|  | ||||
|     # Internal: Is the blob a generated by Zephir | ||||
|     # Internal: Is the blob generated by Zephir? | ||||
|     # | ||||
|     # Returns true or false. | ||||
|     def generated_by_zephir? | ||||
| @@ -433,5 +448,46 @@ module Linguist | ||||
|       return false unless lines.count > 1 | ||||
|       return lines[0].start_with?("// This is a generated file. Not intended for manual editing.") | ||||
|     end | ||||
|  | ||||
|     # Internal: Is this a roxygen2-generated file? | ||||
|     # | ||||
|     # A roxygen2-generated file typically contain: | ||||
|     # % Generated by roxygen2: do not edit by hand | ||||
|     # on the first line. | ||||
|     # | ||||
|     # Return true or false | ||||
|     def generated_roxygen2? | ||||
|       return false unless extname == '.Rd' | ||||
|       return false unless lines.count > 1 | ||||
|  | ||||
|       return lines[0].include?("% Generated by roxygen2: do not edit by hand") | ||||
|     end | ||||
|  | ||||
|     # Internal: Is this a Jison-generated file? | ||||
|     # | ||||
|     # Jison-generated parsers typically contain: | ||||
|     # /* parser generated by jison | ||||
|     # on the first line. | ||||
|     # | ||||
|     # Jison-generated lexers typically contain: | ||||
|     # /* generated by jison-lex | ||||
|     # on the first line. | ||||
|     # | ||||
|     # Return true or false | ||||
|     def generated_jison? | ||||
|       return false unless extname == '.js' | ||||
|       return false unless lines.count > 1 | ||||
|       return lines[0].start_with?("/* parser generated by jison ") || | ||||
|              lines[0].start_with?("/* generated by jison-lex ") | ||||
|     end | ||||
|  | ||||
|     # Internal: Is the blob a generated yarn lockfile? | ||||
|     # | ||||
|     # Returns true or false. | ||||
|     def generated_yarn_lock? | ||||
|       return false unless name.match(/yarn\.lock/) | ||||
|       return false unless lines.count > 0 | ||||
|       return lines[0].include?("# THIS IS AN AUTOGENERATED FILE") | ||||
|     end | ||||
|   end | ||||
| end | ||||
|   | ||||
| @@ -125,11 +125,18 @@ module Linguist | ||||
|     end | ||||
|  | ||||
|     disambiguate ".d" do |data| | ||||
|       if /^module /.match(data) | ||||
|       # see http://dlang.org/spec/grammar | ||||
|       # ModuleDeclaration | ImportDeclaration | FuncDeclaration | unittest | ||||
|       if /^module\s+[\w.]*\s*;|import\s+[\w\s,.:]*;|\w+\s+\w+\s*\(.*\)(?:\(.*\))?\s*{[^}]*}|unittest\s*(?:\(.*\))?\s*{[^}]*}/.match(data) | ||||
|         Language["D"] | ||||
|       elsif /^((dtrace:::)?BEGIN|provider |#pragma (D (option|attributes)|ident)\s)/.match(data) | ||||
|       # see http://dtrace.org/guide/chp-prog.html, http://dtrace.org/guide/chp-profile.html, http://dtrace.org/guide/chp-opt.html | ||||
|       elsif /^(\w+:\w*:\w*:\w*|BEGIN|END|provider\s+|(tick|profile)-\w+\s+{[^}]*}|#pragma\s+D\s+(option|attributes|depends_on)\s|#pragma\s+ident\s)/.match(data) | ||||
|         Language["DTrace"] | ||||
|       elsif /(\/.*:( .* \\)$| : \\$|^ : |: \\$)/.match(data) | ||||
|       # path/target : dependency \ | ||||
|       # target : \ | ||||
|       #  : dependency | ||||
|       # path/file.ext1 : some/path/../file.ext2 | ||||
|       elsif /([\/\\].*:\s+.*\s\\$|: \\$|^ : |^[\w\s\/\\.]+\w+\.\w+\s*:\s+[\w\s\/\\.]+\w+\.\w+)/.match(data) | ||||
|         Language["Makefile"] | ||||
|       end | ||||
|     end | ||||
| @@ -158,7 +165,7 @@ module Linguist | ||||
|       elsif data.include?("flowop") | ||||
|         Language["Filebench WML"] | ||||
|       elsif fortran_rx.match(data) | ||||
|         Language["FORTRAN"] | ||||
|         Language["Fortran"] | ||||
|       end | ||||
|     end | ||||
|  | ||||
| @@ -166,7 +173,7 @@ module Linguist | ||||
|       if /^: /.match(data) | ||||
|         Language["Forth"] | ||||
|       elsif fortran_rx.match(data) | ||||
|         Language["FORTRAN"] | ||||
|         Language["Fortran"] | ||||
|       end | ||||
|     end | ||||
|  | ||||
| @@ -219,7 +226,7 @@ module Linguist | ||||
|       elsif /^(%[%{}]xs|<.*>)/.match(data) | ||||
|         Language["Lex"] | ||||
|       elsif /^\.[a-z][a-z](\s|$)/i.match(data) | ||||
|         Language["Groff"] | ||||
|         Language["Roff"] | ||||
|       elsif /^\((de|class|rel|code|data|must)\s/.match(data) | ||||
|         Language["PicoLisp"] | ||||
|       end | ||||
| @@ -260,10 +267,12 @@ module Linguist | ||||
|     end | ||||
|  | ||||
|     disambiguate ".md" do |data| | ||||
|       if /(^[-a-z0-9=#!\*\[|])|<\//i.match(data) || data.empty? | ||||
|       if /(^[-a-z0-9=#!\*\[|>])|<\//i.match(data) || data.empty? | ||||
|         Language["Markdown"] | ||||
|       elsif /^(;;|\(define_)/.match(data) | ||||
|         Language["GCC machine description"] | ||||
|         Language["GCC Machine Description"] | ||||
|       else | ||||
|         Language["Markdown"] | ||||
|       end | ||||
|     end | ||||
|  | ||||
| @@ -278,7 +287,7 @@ module Linguist | ||||
|     disambiguate ".mod" do |data| | ||||
|       if data.include?('<!ENTITY ') | ||||
|         Language["XML"] | ||||
|       elsif /MODULE\s\w+\s*;/i.match(data) || /^\s*END \w+;$/i.match(data) | ||||
|       elsif /^\s*MODULE [\w\.]+;/i.match(data) || /^\s*END [\w\.]+;/i.match(data) | ||||
|         Language["Modula-2"] | ||||
|       else | ||||
|         [Language["Linux Kernel Module"], Language["AMPL"]] | ||||
| @@ -287,9 +296,9 @@ module Linguist | ||||
|  | ||||
|     disambiguate ".ms" do |data| | ||||
|       if /^[.'][a-z][a-z](\s|$)/i.match(data) | ||||
|         Language["Groff"] | ||||
|         Language["Roff"] | ||||
|       elsif /(?<!\S)\.(include|globa?l)\s/.match(data) || /(?<!\/\*)(\A|\n)\s*\.[A-Za-z]/.match(data.gsub(/"([^\\"]|\\.)*"|'([^\\']|\\.)*'|\\\s*(?:--.*)?\n/, "")) | ||||
|         Language["GAS"] | ||||
|         Language["Unix Assembly"] | ||||
|       else | ||||
|         Language["MAXScript"] | ||||
|       end | ||||
| @@ -297,7 +306,7 @@ module Linguist | ||||
|  | ||||
|     disambiguate ".n" do |data| | ||||
|       if /^[.']/.match(data) | ||||
|         Language["Groff"] | ||||
|         Language["Roff"] | ||||
|       elsif /^(module|namespace|using)\s/.match(data) | ||||
|         Language["Nemerle"] | ||||
|       end | ||||
| @@ -326,7 +335,7 @@ module Linguist | ||||
|     end | ||||
|  | ||||
|     disambiguate ".pl" do |data| | ||||
|       if /^[^#]+:-/.match(data) | ||||
|       if /^[^#]*:-/.match(data) | ||||
|         Language["Prolog"] | ||||
|       elsif /use strict|use\s+v?5\./.match(data) | ||||
|         Language["Perl"] | ||||
| @@ -335,16 +344,16 @@ module Linguist | ||||
|       end | ||||
|     end | ||||
|  | ||||
|     disambiguate ".pm", ".t" do |data| | ||||
|       if /use strict|use\s+v?5\./.match(data) | ||||
|         Language["Perl"] | ||||
|       elsif /^(use v6|(my )?class|module)/.match(data) | ||||
|     disambiguate ".pm" do |data| | ||||
|       if /^\s*(?:use\s+v6\s*;|(?:\bmy\s+)?class|module)\b/.match(data) | ||||
|         Language["Perl6"] | ||||
|       elsif /\buse\s+(?:strict\b|v?5\.)/.match(data) | ||||
|         Language["Perl"] | ||||
|       end | ||||
|     end | ||||
|  | ||||
|     disambiguate ".pod" do |data| | ||||
|       if /^=\w+$/.match(data) | ||||
|       if /^=\w+\b/.match(data) | ||||
|         Language["Pod"] | ||||
|       else | ||||
|         Language["Perl"] | ||||
| @@ -383,7 +392,7 @@ module Linguist | ||||
|       if /^\.!|^\.end lit(?:eral)?\b/i.match(data) | ||||
|         Language["RUNOFF"] | ||||
|       elsif /^\.\\" /.match(data) | ||||
|         Language["Groff"] | ||||
|         Language["Roff"] | ||||
|       end | ||||
|     end | ||||
|  | ||||
| @@ -434,10 +443,12 @@ module Linguist | ||||
|     end | ||||
|      | ||||
|     disambiguate ".t" do |data| | ||||
|       if /^\s*%|^\s*var\s+\w+\s*:\s*\w+/.match(data) | ||||
|       if /^\s*%[ \t]+|^\s*var\s+\w+\s*:=\s*\w+/.match(data) | ||||
|         Language["Turing"] | ||||
|       elsif /^\s*use\s+v6\s*;/.match(data) | ||||
|       elsif /^\s*(?:use\s+v6\s*;|\bmodule\b|\b(?:my\s+)?class\b)/.match(data) | ||||
|         Language["Perl6"] | ||||
|       elsif /\buse\s+(?:strict\b|v?5\.)/.match(data) | ||||
|         Language["Perl"] | ||||
|       end | ||||
|     end | ||||
|      | ||||
| @@ -465,5 +476,13 @@ module Linguist | ||||
|         Language["Scilab"] | ||||
|       end | ||||
|     end | ||||
|  | ||||
|     disambiguate ".tsx" do |data| | ||||
|       if /^\s*(import.+(from\s+|require\()['"]react|\/\/\/\s*<reference\s)/.match(data) | ||||
|         Language["TypeScript"] | ||||
|       elsif /^\s*<\?xml\s+version/i.match(data) | ||||
|         Language["XML"] | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
|   | ||||
| @@ -11,6 +11,7 @@ require 'linguist/samples' | ||||
| require 'linguist/file_blob' | ||||
| require 'linguist/blob_helper' | ||||
| require 'linguist/strategy/filename' | ||||
| require 'linguist/strategy/extension' | ||||
| require 'linguist/strategy/modeline' | ||||
| require 'linguist/shebang' | ||||
|  | ||||
| @@ -90,17 +91,6 @@ module Linguist | ||||
|       language | ||||
|     end | ||||
|  | ||||
|     # Public: Detects the Language of the blob. | ||||
|     # | ||||
|     # blob - an object that includes the Linguist `BlobHelper` interface; | ||||
|     #       see Linguist::LazyBlob and Linguist::FileBlob for examples | ||||
|     # | ||||
|     # Returns Language or nil. | ||||
|     def self.detect(blob) | ||||
|       warn "[DEPRECATED] `Linguist::Language.detect` is deprecated. Use `Linguist.detect`. #{caller[0]}" | ||||
|       Linguist.detect(blob) | ||||
|     end | ||||
|  | ||||
|     # Public: Get all Languages | ||||
|     # | ||||
|     # Returns an Array of Languages | ||||
| @@ -140,46 +130,46 @@ module Linguist | ||||
|  | ||||
|     # Public: Look up Languages by filename. | ||||
|     # | ||||
|     # The behaviour of this method recently changed. | ||||
|     # See the second example below. | ||||
|     # | ||||
|     # filename - The path String. | ||||
|     # | ||||
|     # Examples | ||||
|     # | ||||
|     #   Language.find_by_filename('Cakefile') | ||||
|     #   # => [#<Language name="CoffeeScript">] | ||||
|     #   Language.find_by_filename('foo.rb') | ||||
|     #   # => [#<Language name="Ruby">] | ||||
|     #   # => [] | ||||
|     # | ||||
|     # Returns all matching Languages or [] if none were found. | ||||
|     def self.find_by_filename(filename) | ||||
|       basename = File.basename(filename) | ||||
|  | ||||
|       # find the first extension with language definitions | ||||
|       extname = FileBlob.new(filename).extensions.detect do |e| | ||||
|         !@extension_index[e].empty? | ||||
|       end | ||||
|  | ||||
|       (@filename_index[basename] + @extension_index[extname]).compact.uniq | ||||
|       @filename_index[basename] | ||||
|     end | ||||
|  | ||||
|     # Public: Look up Languages by file extension. | ||||
|     # | ||||
|     # extname - The extension String. | ||||
|     # The behaviour of this method recently changed. | ||||
|     # See the second example below. | ||||
|     # | ||||
|     # filename - The path String. | ||||
|     # | ||||
|     # Examples | ||||
|     # | ||||
|     #   Language.find_by_extension('.rb') | ||||
|     #   Language.find_by_extension('dummy.rb') | ||||
|     #   # => [#<Language name="Ruby">] | ||||
|     # | ||||
|     #   Language.find_by_extension('rb') | ||||
|     #   # => [#<Language name="Ruby">] | ||||
|     #   # => [] | ||||
|     # | ||||
|     # Returns all matching Languages or [] if none were found. | ||||
|     def self.find_by_extension(extname) | ||||
|       extname = ".#{extname}" unless extname.start_with?(".") | ||||
|       @extension_index[extname.downcase] | ||||
|     end | ||||
|     def self.find_by_extension(filename) | ||||
|       # find the first extension with language definitions | ||||
|       extname = FileBlob.new(filename.downcase).extensions.detect do |e| | ||||
|         !@extension_index[e].empty? | ||||
|       end | ||||
|  | ||||
|     # DEPRECATED | ||||
|     def self.find_by_shebang(data) | ||||
|       @interpreter_index[Shebang.interpreter(data)] | ||||
|       @extension_index[extname] | ||||
|     end | ||||
|  | ||||
|     # Public: Look up Languages by interpreter. | ||||
| @@ -225,7 +215,14 @@ module Linguist | ||||
|     # Returns the Language or nil if none was found. | ||||
|     def self.[](name) | ||||
|       return nil if name.to_s.empty? | ||||
|       name && (@index[name.downcase] || @index[name.split(',').first.downcase]) | ||||
|  | ||||
|       lang = @index[name.downcase] | ||||
|       return lang if lang | ||||
|  | ||||
|       name = name.split(',').first | ||||
|       return nil if name.to_s.empty? | ||||
|  | ||||
|       @index[name.downcase] | ||||
|     end | ||||
|  | ||||
|     # Public: A List of popular languages | ||||
| @@ -259,18 +256,6 @@ module Linguist | ||||
|       @colors ||= all.select(&:color).sort_by { |lang| lang.name.downcase } | ||||
|     end | ||||
|  | ||||
|     # Public: A List of languages compatible with Ace. | ||||
|     # | ||||
|     # TODO: Remove this method in a 5.x release. Every language now needs an ace_mode | ||||
|     # key, so this function isn't doing anything unique anymore. | ||||
|     # | ||||
|     # Returns an Array of Languages. | ||||
|     def self.ace_modes | ||||
|       warn "This method will be deprecated in a future 5.x release. Every language now has an `ace_mode` set." | ||||
|       warn caller | ||||
|       @ace_modes ||= all.select(&:ace_mode).sort_by { |lang| lang.name.downcase } | ||||
|     end | ||||
|  | ||||
|     # Internal: Initialize a new Language | ||||
|     # | ||||
|     # attributes - A hash of attributes | ||||
| @@ -287,7 +272,7 @@ module Linguist | ||||
|       @color = attributes[:color] | ||||
|  | ||||
|       # Set aliases | ||||
|       @aliases = [default_alias_name] + (attributes[:aliases] || []) | ||||
|       @aliases = [default_alias] + (attributes[:aliases] || []) | ||||
|  | ||||
|       # Load the TextMate scope name or try to guess one | ||||
|       @tm_scope = attributes[:tm_scope] || begin | ||||
| @@ -305,9 +290,6 @@ module Linguist | ||||
|       @codemirror_mime_type = attributes[:codemirror_mime_type] | ||||
|       @wrap = attributes[:wrap] || false | ||||
|  | ||||
|       # Set legacy search term | ||||
|       @search_term = attributes[:search_term] || default_alias_name | ||||
|  | ||||
|       # Set the language_id | ||||
|       @language_id = attributes[:language_id] | ||||
|  | ||||
| @@ -362,17 +344,6 @@ module Linguist | ||||
|     # Returns an Array of String names | ||||
|     attr_reader :aliases | ||||
|  | ||||
|     # Deprecated: Get code search term | ||||
|     # | ||||
|     # Examples | ||||
|     # | ||||
|     #   # => "ruby" | ||||
|     #   # => "python" | ||||
|     #   # => "perl" | ||||
|     # | ||||
|     # Returns the name String | ||||
|     attr_reader :search_term | ||||
|  | ||||
|     # Public: Get language_id (used in GitHub search) | ||||
|     # | ||||
|     # Examples | ||||
| @@ -457,22 +428,6 @@ module Linguist | ||||
|     # Returns the extensions Array | ||||
|     attr_reader :filenames | ||||
|  | ||||
|     # Deprecated: Get primary extension | ||||
|     # | ||||
|     # Defaults to the first extension but can be overridden | ||||
|     # in the languages.yml. | ||||
|     # | ||||
|     # The primary extension can not be nil. Tests should verify this. | ||||
|     # | ||||
|     # This method is only used by app/helpers/gists_helper.rb for creating | ||||
|     # the language dropdown. It really should be using `name` instead. | ||||
|     # Would like to drop primary extension. | ||||
|     # | ||||
|     # Returns the extension String. | ||||
|     def primary_extension | ||||
|       extensions.first | ||||
|     end | ||||
|  | ||||
|     # Public: Get URL escaped name. | ||||
|     # | ||||
|     # Examples | ||||
| @@ -486,12 +441,13 @@ module Linguist | ||||
|       EscapeUtils.escape_url(name).gsub('+', '%20') | ||||
|     end | ||||
|  | ||||
|     # Internal: Get default alias name | ||||
|     # Public: Get default alias name | ||||
|     # | ||||
|     # Returns the alias name String | ||||
|     def default_alias_name | ||||
|     def default_alias | ||||
|       name.downcase.gsub(/\s/, '-') | ||||
|     end | ||||
|     alias_method :default_alias_name, :default_alias | ||||
|  | ||||
|     # Public: Get Language group | ||||
|     # | ||||
| @@ -606,7 +562,6 @@ module Linguist | ||||
|       :wrap              => options['wrap'], | ||||
|       :group_name        => options['group'], | ||||
|       :searchable        => options.fetch('searchable', true), | ||||
|       :search_term       => options['search_term'], | ||||
|       :language_id       => options['language_id'], | ||||
|       :extensions        => Array(options['extensions']), | ||||
|       :interpreters      => options['interpreters'].sort, | ||||
|   | ||||
| @@ -2,7 +2,8 @@ | ||||
| # | ||||
| # type              - Either data, programming, markup, prose, or nil | ||||
| # aliases           - An Array of additional aliases (implicitly | ||||
| #                     includes name.downcase) | ||||
| #                     includes the lowercase name with spaces replaced | ||||
| #                     by dashes) | ||||
| # ace_mode          - A String name of the Ace Mode used for highlighting whenever | ||||
| #                     a file is edited. This must match one of the filenames in http://git.io/3XO_Cg. | ||||
| #                     Use "text" if a mode does not exist. | ||||
| @@ -14,8 +15,6 @@ | ||||
| #                     listed alphabetically) | ||||
| # interpreters      - An Array of associated interpreters | ||||
| # searchable        - Boolean flag to enable searching (defaults to true) | ||||
| # search_term       - Deprecated: Some languages may be indexed under a | ||||
| #                     different alias. Avoid defining new exceptions. | ||||
| # language_id       - Integer used as a language-name-independent indexed field so that we can rename | ||||
| #                     languages in Linguist without reindexing all the code on GitHub. Must not be | ||||
| #                     changed for existing languages without the explicit permission of GitHub staff. | ||||
| @@ -121,7 +120,6 @@ ASN.1: | ||||
| ASP: | ||||
|   type: programming | ||||
|   color: "#6a40fd" | ||||
|   search_term: aspx-vb | ||||
|   tm_scope: text.html.asp | ||||
|   aliases: | ||||
|   - aspx | ||||
| @@ -154,7 +152,6 @@ ActionScript: | ||||
|   type: programming | ||||
|   tm_scope: source.actionscript.3 | ||||
|   color: "#882B0F" | ||||
|   search_term: as3 | ||||
|   aliases: | ||||
|   - actionscript 3 | ||||
|   - actionscript3 | ||||
| @@ -291,7 +288,6 @@ AspectJ: | ||||
| Assembly: | ||||
|   type: programming | ||||
|   color: "#6E4C13" | ||||
|   search_term: nasm | ||||
|   aliases: | ||||
|   - nasm | ||||
|   extensions: | ||||
| @@ -349,7 +345,6 @@ Awk: | ||||
|   language_id: 28 | ||||
| Batchfile: | ||||
|   type: programming | ||||
|   search_term: bat | ||||
|   aliases: | ||||
|   - bat | ||||
|   - batch | ||||
| @@ -474,7 +469,6 @@ C#: | ||||
|   codemirror_mode: clike | ||||
|   codemirror_mime_type: text/x-csharp | ||||
|   tm_scope: source.cs | ||||
|   search_term: csharp | ||||
|   color: "#178600" | ||||
|   aliases: | ||||
|   - csharp | ||||
| @@ -489,7 +483,6 @@ C++: | ||||
|   ace_mode: c_cpp | ||||
|   codemirror_mode: clike | ||||
|   codemirror_mime_type: text/x-c++src | ||||
|   search_term: cpp | ||||
|   color: "#f34b7d" | ||||
|   aliases: | ||||
|   - cpp | ||||
| @@ -507,6 +500,7 @@ C++: | ||||
|   - ".inc" | ||||
|   - ".inl" | ||||
|   - ".ipp" | ||||
|   - ".re" | ||||
|   - ".tcc" | ||||
|   - ".tpp" | ||||
|   language_id: 43 | ||||
| @@ -719,7 +713,6 @@ ColdFusion: | ||||
|   type: programming | ||||
|   ace_mode: coldfusion | ||||
|   color: "#ed2cd6" | ||||
|   search_term: cfm | ||||
|   aliases: | ||||
|   - cfm | ||||
|   - cfml | ||||
| @@ -733,7 +726,6 @@ ColdFusion CFC: | ||||
|   type: programming | ||||
|   group: ColdFusion | ||||
|   ace_mode: coldfusion | ||||
|   search_term: cfc | ||||
|   aliases: | ||||
|   - cfc | ||||
|   extensions: | ||||
| @@ -854,16 +846,6 @@ Csound Score: | ||||
|   tm_scope: source.csound-score | ||||
|   ace_mode: text | ||||
|   language_id: 75 | ||||
| Cucumber: | ||||
|   type: programming | ||||
|   extensions: | ||||
|   - ".feature" | ||||
|   tm_scope: text.gherkin.feature | ||||
|   aliases: | ||||
|   - gherkin | ||||
|   ace_mode: text | ||||
|   color: "#5B2063" | ||||
|   language_id: 76 | ||||
| Cuda: | ||||
|   type: programming | ||||
|   extensions: | ||||
| @@ -956,7 +938,6 @@ DTrace: | ||||
|   language_id: 85 | ||||
| Darcs Patch: | ||||
|   type: data | ||||
|   search_term: dpatch | ||||
|   aliases: | ||||
|   - dpatch | ||||
|   extensions: | ||||
| @@ -1141,6 +1122,7 @@ Emacs Lisp: | ||||
|   - ".gnus" | ||||
|   - ".spacemacs" | ||||
|   - ".viper" | ||||
|   - Cask | ||||
|   - Project.ede | ||||
|   - _emacs | ||||
|   - abbrev_defs | ||||
| @@ -1175,6 +1157,7 @@ Erlang: | ||||
|   - ".xrl" | ||||
|   - ".yrl" | ||||
|   filenames: | ||||
|   - Emakefile | ||||
|   - rebar.config | ||||
|   - rebar.config.lock | ||||
|   - rebar.lock | ||||
| @@ -1187,7 +1170,6 @@ Erlang: | ||||
| F#: | ||||
|   type: programming | ||||
|   color: "#b845fc" | ||||
|   search_term: fsharp | ||||
|   aliases: | ||||
|   - fsharp | ||||
|   extensions: | ||||
| @@ -1208,23 +1190,6 @@ FLUX: | ||||
|   tm_scope: none | ||||
|   ace_mode: text | ||||
|   language_id: 106 | ||||
| FORTRAN: | ||||
|   type: programming | ||||
|   color: "#4d41b1" | ||||
|   extensions: | ||||
|   - ".f90" | ||||
|   - ".f" | ||||
|   - ".f03" | ||||
|   - ".f08" | ||||
|   - ".f77" | ||||
|   - ".f95" | ||||
|   - ".for" | ||||
|   - ".fpp" | ||||
|   tm_scope: source.fortran.modern | ||||
|   ace_mode: text | ||||
|   codemirror_mode: fortran | ||||
|   codemirror_mime_type: text/x-fortran | ||||
|   language_id: 107 | ||||
| Factor: | ||||
|   type: programming | ||||
|   color: "#636746" | ||||
| @@ -1294,6 +1259,23 @@ Forth: | ||||
|   codemirror_mode: forth | ||||
|   codemirror_mime_type: text/x-forth | ||||
|   language_id: 114 | ||||
| Fortran: | ||||
|   type: programming | ||||
|   color: "#4d41b1" | ||||
|   extensions: | ||||
|   - ".f90" | ||||
|   - ".f" | ||||
|   - ".f03" | ||||
|   - ".f08" | ||||
|   - ".f77" | ||||
|   - ".f95" | ||||
|   - ".for" | ||||
|   - ".fpp" | ||||
|   tm_scope: source.fortran.modern | ||||
|   ace_mode: text | ||||
|   codemirror_mode: fortran | ||||
|   codemirror_mime_type: text/x-fortran | ||||
|   language_id: 107 | ||||
| FreeMarker: | ||||
|   type: programming | ||||
|   color: "#0050b2" | ||||
| @@ -1339,15 +1321,6 @@ GAP: | ||||
|   tm_scope: source.gap | ||||
|   ace_mode: text | ||||
|   language_id: 119 | ||||
| GAS: | ||||
|   type: programming | ||||
|   group: Assembly | ||||
|   extensions: | ||||
|   - ".s" | ||||
|   - ".ms" | ||||
|   tm_scope: source.assembly | ||||
|   ace_mode: assembly_x86 | ||||
|   language_id: 120 | ||||
| GCC Machine Description: | ||||
|   type: programming | ||||
|   extensions: | ||||
| @@ -1415,6 +1388,14 @@ Game Maker Language: | ||||
|   codemirror_mode: clike | ||||
|   codemirror_mime_type: text/x-c++src | ||||
|   language_id: 125 | ||||
| Genie: | ||||
|   type: programming | ||||
|   ace_mode: text | ||||
|   extensions: | ||||
|   - ".gs" | ||||
|   color: "#fb855d" | ||||
|   tm_scope: none | ||||
|   language_id: 792408528 | ||||
| Genshi: | ||||
|   type: programming | ||||
|   extensions: | ||||
| @@ -1449,7 +1430,6 @@ Gentoo Eclass: | ||||
|   language_id: 128 | ||||
| Gettext Catalog: | ||||
|   type: prose | ||||
|   search_term: pot | ||||
|   searchable: false | ||||
|   aliases: | ||||
|   - pot | ||||
| @@ -1459,6 +1439,16 @@ Gettext Catalog: | ||||
|   tm_scope: source.po | ||||
|   ace_mode: text | ||||
|   language_id: 129 | ||||
| Gherkin: | ||||
|   type: programming | ||||
|   extensions: | ||||
|   - ".feature" | ||||
|   tm_scope: text.gherkin.feature | ||||
|   aliases: | ||||
|   - cucumber | ||||
|   ace_mode: text | ||||
|   color: "#5B2063" | ||||
|   language_id: 76 | ||||
| Glyph: | ||||
|   type: programming | ||||
|   color: "#e4cc98" | ||||
| @@ -1562,45 +1552,6 @@ Graphviz (DOT): | ||||
|   - ".gv" | ||||
|   ace_mode: text | ||||
|   language_id: 140 | ||||
| Groff: | ||||
|   type: markup | ||||
|   color: "#ecdebe" | ||||
|   extensions: | ||||
|   - ".man" | ||||
|   - ".1" | ||||
|   - ".1in" | ||||
|   - ".1m" | ||||
|   - ".1x" | ||||
|   - ".2" | ||||
|   - ".3" | ||||
|   - ".3in" | ||||
|   - ".3m" | ||||
|   - ".3qt" | ||||
|   - ".3x" | ||||
|   - ".4" | ||||
|   - ".5" | ||||
|   - ".6" | ||||
|   - ".7" | ||||
|   - ".8" | ||||
|   - ".9" | ||||
|   - ".l" | ||||
|   - ".me" | ||||
|   - ".ms" | ||||
|   - ".n" | ||||
|   - ".rno" | ||||
|   - ".roff" | ||||
|   - ".tmac" | ||||
|   filenames: | ||||
|   - mmn | ||||
|   - mmt | ||||
|   tm_scope: text.roff | ||||
|   aliases: | ||||
|   - nroff | ||||
|   - troff | ||||
|   ace_mode: text | ||||
|   codemirror_mode: troff | ||||
|   codemirror_mime_type: text/troff | ||||
|   language_id: 141 | ||||
| Groovy: | ||||
|   type: programming | ||||
|   ace_mode: groovy | ||||
| @@ -1638,17 +1589,18 @@ HCL: | ||||
|   ace_mode: ruby | ||||
|   codemirror_mode: ruby | ||||
|   codemirror_mime_type: text/x-ruby | ||||
|   tm_scope: source.ruby | ||||
|   tm_scope: source.terraform | ||||
|   language_id: 144 | ||||
| HLSL: | ||||
|   type: programming | ||||
|   extensions: | ||||
|   - ".hlsl" | ||||
|   - ".cginc" | ||||
|   - ".fx" | ||||
|   - ".fxh" | ||||
|   - ".hlsli" | ||||
|   ace_mode: text | ||||
|   tm_scope: none | ||||
|   tm_scope: source.hlsl | ||||
|   language_id: 145 | ||||
| HTML: | ||||
|   type: markup | ||||
| @@ -1656,7 +1608,7 @@ HTML: | ||||
|   ace_mode: html | ||||
|   codemirror_mode: htmlmixed | ||||
|   codemirror_mime_type: text/html | ||||
|   color: "#e44b23" | ||||
|   color: "#e34c26" | ||||
|   aliases: | ||||
|   - xhtml | ||||
|   extensions: | ||||
| @@ -1815,7 +1767,7 @@ Hy: | ||||
|   - ".hy" | ||||
|   aliases: | ||||
|   - hylang | ||||
|   tm_scope: source.hy | ||||
|   tm_scope: none | ||||
|   language_id: 159 | ||||
| HyPhy: | ||||
|   type: programming | ||||
| @@ -1861,7 +1813,6 @@ INI: | ||||
|   language_id: 163 | ||||
| IRC log: | ||||
|   type: data | ||||
|   search_term: irc | ||||
|   aliases: | ||||
|   - irc | ||||
|   - irc logs | ||||
| @@ -2011,17 +1962,6 @@ JSX: | ||||
|   codemirror_mode: jsx | ||||
|   codemirror_mime_type: text/jsx | ||||
|   language_id: 178 | ||||
| Jade: | ||||
|   group: HTML | ||||
|   type: markup | ||||
|   extensions: | ||||
|   - ".jade" | ||||
|   - ".pug" | ||||
|   tm_scope: text.jade | ||||
|   ace_mode: jade | ||||
|   codemirror_mode: pug | ||||
|   codemirror_mime_type: text/x-pug | ||||
|   language_id: 179 | ||||
| Jasmin: | ||||
|   type: programming | ||||
|   ace_mode: java | ||||
| @@ -2041,7 +1981,6 @@ Java: | ||||
| Java Server Pages: | ||||
|   type: programming | ||||
|   group: Java | ||||
|   search_term: jsp | ||||
|   aliases: | ||||
|   - jsp | ||||
|   extensions: | ||||
| @@ -2086,6 +2025,33 @@ JavaScript: | ||||
|   interpreters: | ||||
|   - node | ||||
|   language_id: 183 | ||||
| Jison: | ||||
|   type: programming | ||||
|   group: Yacc | ||||
|   extensions: | ||||
|   - ".jison" | ||||
|   tm_scope: source.jison | ||||
|   ace_mode: text | ||||
|   language_id: 284531423 | ||||
| Jison Lex: | ||||
|   type: programming | ||||
|   group: Lex | ||||
|   extensions: | ||||
|   - ".jisonlex" | ||||
|   tm_scope: source.jisonlex | ||||
|   ace_mode: text | ||||
|   language_id: 406395330 | ||||
| Jolie: | ||||
|   type: programming | ||||
|   extensions: | ||||
|   - ".ol" | ||||
|   - ".iol" | ||||
|   interpreters: | ||||
|   - jolie | ||||
|   color: "#843179" | ||||
|   ace_mode: text | ||||
|   tm_scope: source.jolie | ||||
|   language_id: 998078858 | ||||
| Julia: | ||||
|   type: programming | ||||
|   extensions: | ||||
| @@ -2298,7 +2264,6 @@ Literate CoffeeScript: | ||||
|   group: CoffeeScript | ||||
|   ace_mode: text | ||||
|   wrap: true | ||||
|   search_term: litcoffee | ||||
|   aliases: | ||||
|   - litcoffee | ||||
|   extensions: | ||||
| @@ -2307,7 +2272,6 @@ Literate CoffeeScript: | ||||
| Literate Haskell: | ||||
|   type: programming | ||||
|   group: Haskell | ||||
|   search_term: lhs | ||||
|   aliases: | ||||
|   - lhaskell | ||||
|   - lhs | ||||
| @@ -2357,6 +2321,8 @@ LookML: | ||||
|   color: "#652B81" | ||||
|   extensions: | ||||
|   - ".lookml" | ||||
|   - ".model.lkml" | ||||
|   - ".view.lkml" | ||||
|   tm_scope: source.yaml | ||||
|   language_id: 211 | ||||
| LoomScript: | ||||
| @@ -2503,6 +2469,8 @@ Mako: | ||||
|   language_id: 221 | ||||
| Markdown: | ||||
|   type: prose | ||||
|   aliases: | ||||
|   - pandoc | ||||
|   ace_mode: markdown | ||||
|   codemirror_mode: gfm | ||||
|   codemirror_mime_type: text/x-gfm | ||||
| @@ -2510,12 +2478,27 @@ Markdown: | ||||
|   extensions: | ||||
|   - ".md" | ||||
|   - ".markdown" | ||||
|   - ".mdown" | ||||
|   - ".mdwn" | ||||
|   - ".mkd" | ||||
|   - ".mkdn" | ||||
|   - ".mkdown" | ||||
|   - ".ron" | ||||
|   - ".workbook" | ||||
|   tm_scope: source.gfm | ||||
|   language_id: 222 | ||||
| Marko: | ||||
|   group: HTML | ||||
|   type: markup | ||||
|   tm_scope: text.marko | ||||
|   extensions: | ||||
|   - ".marko" | ||||
|   aliases: | ||||
|   - markojs | ||||
|   ace_mode: text | ||||
|   codemirror_mode: htmlmixed | ||||
|   codemirror_mime_type: text/html | ||||
|   language_id: 932782397 | ||||
| Mask: | ||||
|   type: markup | ||||
|   color: "#f97732" | ||||
| @@ -2569,7 +2552,6 @@ Max: | ||||
|   aliases: | ||||
|   - max/msp | ||||
|   - maxmsp | ||||
|   search_term: max/msp | ||||
|   extensions: | ||||
|   - ".maxpat" | ||||
|   - ".maxhelp" | ||||
| @@ -2601,6 +2583,15 @@ Mercury: | ||||
|   - ".moo" | ||||
|   tm_scope: source.mercury | ||||
|   language_id: 229 | ||||
| Meson: | ||||
|   type: programming | ||||
|   color: "#007800" | ||||
|   filenames: | ||||
|   - meson.build | ||||
|   - meson_options.txt | ||||
|   tm_scope: source.meson | ||||
|   ace_mode: text | ||||
|   language_id: 799141244 | ||||
| Metal: | ||||
|   type: programming | ||||
|   color: "#8f14e9" | ||||
| @@ -2621,7 +2612,6 @@ MiniD: | ||||
|   language_id: 231 | ||||
| Mirah: | ||||
|   type: programming | ||||
|   search_term: mirah | ||||
|   color: "#c7a938" | ||||
|   extensions: | ||||
|   - ".druby" | ||||
| @@ -2778,7 +2768,7 @@ Nginx: | ||||
|   codemirror_mime_type: text/x-nginx-conf | ||||
|   color: "#9469E9" | ||||
|   language_id: 248 | ||||
| Nimrod: | ||||
| Nim: | ||||
|   type: programming | ||||
|   color: "#37775b" | ||||
|   extensions: | ||||
| @@ -2970,9 +2960,9 @@ OpenSCAD: | ||||
|   type: programming | ||||
|   extensions: | ||||
|   - ".scad" | ||||
|   tm_scope: none | ||||
|   tm_scope: source.scad | ||||
|   ace_mode: scad | ||||
|   language_id: 431 | ||||
|   language_id: 266 | ||||
| OpenType Feature File: | ||||
|   type: data | ||||
|   aliases: | ||||
| @@ -2981,7 +2971,7 @@ OpenType Feature File: | ||||
|   - ".fea" | ||||
|   tm_scope: source.opentype | ||||
|   ace_mode: text | ||||
|   language_id: 266 | ||||
|   language_id: 374317347 | ||||
| Org: | ||||
|   type: prose | ||||
|   wrap: true | ||||
| @@ -3017,6 +3007,14 @@ Oz: | ||||
|   codemirror_mode: oz | ||||
|   codemirror_mime_type: text/x-oz | ||||
|   language_id: 270 | ||||
| P4: | ||||
|   type: programming | ||||
|   color: "#7055b5" | ||||
|   extensions: | ||||
|   - ".p4" | ||||
|   tm_scope: source.p4 | ||||
|   ace_mode: text | ||||
|   language_id: 348895984 | ||||
| PAWN: | ||||
|   type: programming | ||||
|   color: "#dbb284" | ||||
| @@ -3062,12 +3060,21 @@ PLSQL: | ||||
|   color: "#dad8d8" | ||||
|   extensions: | ||||
|   - ".pls" | ||||
|   - ".bdy" | ||||
|   - ".ddl" | ||||
|   - ".fnc" | ||||
|   - ".pck" | ||||
|   - ".pkb" | ||||
|   - ".pks" | ||||
|   - ".plb" | ||||
|   - ".plsql" | ||||
|   - ".prc" | ||||
|   - ".spc" | ||||
|   - ".sql" | ||||
|   - ".tpb" | ||||
|   - ".tps" | ||||
|   - ".trg" | ||||
|   - ".vw" | ||||
|   language_id: 273 | ||||
| PLpgSQL: | ||||
|   type: programming | ||||
| @@ -3201,7 +3208,7 @@ Perl6: | ||||
|   language_id: 283 | ||||
| Pic: | ||||
|   type: markup | ||||
|   group: Groff | ||||
|   group: Roff | ||||
|   tm_scope: source.pic | ||||
|   extensions: | ||||
|   - ".pic" | ||||
| @@ -3294,6 +3301,7 @@ PowerBuilder: | ||||
|   language_id: 292 | ||||
| PowerShell: | ||||
|   type: programming | ||||
|   color: "#012456" | ||||
|   ace_mode: powershell | ||||
|   codemirror_mode: powershell | ||||
|   codemirror_mime_type: application/x-powershell | ||||
| @@ -3355,6 +3363,17 @@ Public Key: | ||||
|   codemirror_mode: asciiarmor | ||||
|   codemirror_mime_type: application/pgp | ||||
|   language_id: 298 | ||||
| Pug: | ||||
|   group: HTML | ||||
|   type: markup | ||||
|   extensions: | ||||
|   - ".jade" | ||||
|   - ".pug" | ||||
|   tm_scope: text.jade | ||||
|   ace_mode: jade | ||||
|   codemirror_mode: pug | ||||
|   codemirror_mime_type: text/x-pug | ||||
|   language_id: 179 | ||||
| Puppet: | ||||
|   type: programming | ||||
|   color: "#302B6D" | ||||
| @@ -3410,6 +3429,7 @@ Python: | ||||
|   - ".lmi" | ||||
|   - ".py3" | ||||
|   - ".pyde" | ||||
|   - ".pyi" | ||||
|   - ".pyp" | ||||
|   - ".pyt" | ||||
|   - ".pyw" | ||||
| @@ -3419,12 +3439,13 @@ Python: | ||||
|   - ".wsgi" | ||||
|   - ".xpy" | ||||
|   filenames: | ||||
|   - .gclient | ||||
|   - ".gclient" | ||||
|   - BUCK | ||||
|   - BUILD | ||||
|   - SConscript | ||||
|   - SConstruct | ||||
|   - Snakefile | ||||
|   - WORKSPACE | ||||
|   - wscript | ||||
|   interpreters: | ||||
|   - python | ||||
| @@ -3587,7 +3608,7 @@ Racket: | ||||
|   tm_scope: source.racket | ||||
|   ace_mode: lisp | ||||
|   language_id: 316 | ||||
| Ragel in Ruby Host: | ||||
| Ragel: | ||||
|   type: programming | ||||
|   color: "#9d5200" | ||||
|   extensions: | ||||
| @@ -3608,7 +3629,6 @@ Rascal: | ||||
|   language_id: 173616037 | ||||
| Raw token data: | ||||
|   type: data | ||||
|   search_term: raw | ||||
|   aliases: | ||||
|   - raw | ||||
|   extensions: | ||||
| @@ -3616,6 +3636,19 @@ Raw token data: | ||||
|   tm_scope: none | ||||
|   ace_mode: text | ||||
|   language_id: 318 | ||||
| Reason: | ||||
|   type: programming | ||||
|   group: OCaml | ||||
|   ace_mode: rust | ||||
|   codemirror_mode: rust | ||||
|   codemirror_mime_type: text/x-rustsrc | ||||
|   extensions: | ||||
|   - ".re" | ||||
|   - ".rei" | ||||
|   interpreters: | ||||
|   - ocaml | ||||
|   tm_scope: source.reason | ||||
|   language_id: 869538413 | ||||
| Rebol: | ||||
|   type: programming | ||||
|   color: "#358a5b" | ||||
| @@ -3646,6 +3679,17 @@ Redcode: | ||||
|   tm_scope: none | ||||
|   ace_mode: text | ||||
|   language_id: 321 | ||||
| Regular Expression: | ||||
|   type: data | ||||
|   extensions: | ||||
|   - ".regexp" | ||||
|   - ".regex" | ||||
|   aliases: | ||||
|   - regexp | ||||
|   - regex | ||||
|   ace_mode: text | ||||
|   tm_scope: source.regexp | ||||
|   language_id: 363378884 | ||||
| Ren'Py: | ||||
|   type: programming | ||||
|   aliases: | ||||
| @@ -3671,6 +3715,44 @@ RobotFramework: | ||||
|   tm_scope: text.robot | ||||
|   ace_mode: text | ||||
|   language_id: 324 | ||||
| Roff: | ||||
|   type: markup | ||||
|   color: "#ecdebe" | ||||
|   extensions: | ||||
|   - ".man" | ||||
|   - ".1" | ||||
|   - ".1in" | ||||
|   - ".1m" | ||||
|   - ".1x" | ||||
|   - ".2" | ||||
|   - ".3" | ||||
|   - ".3in" | ||||
|   - ".3m" | ||||
|   - ".3qt" | ||||
|   - ".3x" | ||||
|   - ".4" | ||||
|   - ".5" | ||||
|   - ".6" | ||||
|   - ".7" | ||||
|   - ".8" | ||||
|   - ".9" | ||||
|   - ".l" | ||||
|   - ".me" | ||||
|   - ".ms" | ||||
|   - ".n" | ||||
|   - ".rno" | ||||
|   - ".roff" | ||||
|   - ".tmac" | ||||
|   filenames: | ||||
|   - mmn | ||||
|   - mmt | ||||
|   tm_scope: text.roff | ||||
|   aliases: | ||||
|   - nroff | ||||
|   ace_mode: text | ||||
|   codemirror_mode: troff | ||||
|   codemirror_mime_type: text/troff | ||||
|   language_id: 141 | ||||
| Rouge: | ||||
|   type: programming | ||||
|   ace_mode: clojure | ||||
| @@ -3696,10 +3778,10 @@ Ruby: | ||||
|   extensions: | ||||
|   - ".rb" | ||||
|   - ".builder" | ||||
|   - ".eye" | ||||
|   - ".fcgi" | ||||
|   - ".gemspec" | ||||
|   - ".god" | ||||
|   - ".irbrc" | ||||
|   - ".jbuilder" | ||||
|   - ".mspec" | ||||
|   - ".pluginspec" | ||||
| @@ -3721,6 +3803,7 @@ Ruby: | ||||
|   - jruby | ||||
|   - rbx | ||||
|   filenames: | ||||
|   - ".irbrc" | ||||
|   - ".pryrc" | ||||
|   - Appraisals | ||||
|   - Berksfile | ||||
| @@ -3736,6 +3819,7 @@ Ruby: | ||||
|   - Mavenfile | ||||
|   - Podfile | ||||
|   - Puppetfile | ||||
|   - Rakefile | ||||
|   - Snapfile | ||||
|   - Thorfile | ||||
|   - Vagrantfile | ||||
| @@ -3820,6 +3904,7 @@ SQL: | ||||
|   - ".cql" | ||||
|   - ".ddl" | ||||
|   - ".inc" | ||||
|   - ".mysql" | ||||
|   - ".prc" | ||||
|   - ".tab" | ||||
|   - ".udf" | ||||
| @@ -3953,9 +4038,15 @@ Self: | ||||
|   tm_scope: none | ||||
|   ace_mode: text | ||||
|   language_id: 345 | ||||
| ShaderLab: | ||||
|   type: programming | ||||
|   extensions: | ||||
|   - ".shader" | ||||
|   ace_mode: text | ||||
|   tm_scope: source.shaderlab | ||||
|   language_id: 664257356 | ||||
| Shell: | ||||
|   type: programming | ||||
|   search_term: bash | ||||
|   color: "#89e051" | ||||
|   aliases: | ||||
|   - sh | ||||
| @@ -4067,14 +4158,14 @@ SourcePawn: | ||||
|   - ".sma" | ||||
|   tm_scope: source.sp | ||||
|   ace_mode: text | ||||
|   language_id: 432 | ||||
|   language_id: 354 | ||||
| Spline Font Database: | ||||
|   type: data | ||||
|   extensions: | ||||
|   - ".sfd" | ||||
|   tm_scope: text.sfd | ||||
|   ace_mode: yaml | ||||
|   language_id: 354 | ||||
|   language_id: 767169629 | ||||
| Squirrel: | ||||
|   type: programming | ||||
|   color: "#800000" | ||||
| @@ -4305,12 +4396,16 @@ Text: | ||||
|   - ".no" | ||||
|   filenames: | ||||
|   - COPYING | ||||
|   - COPYRIGHT.regex | ||||
|   - FONTLOG | ||||
|   - INSTALL | ||||
|   - INSTALL.mysql | ||||
|   - LICENSE | ||||
|   - LICENSE.mysql | ||||
|   - NEWS | ||||
|   - README.1ST | ||||
|   - README.me | ||||
|   - README.mysql | ||||
|   - click.me | ||||
|   - delete.me | ||||
|   - keep.me | ||||
| @@ -4401,6 +4496,15 @@ Unity3D Asset: | ||||
|   - ".unity" | ||||
|   tm_scope: source.yaml | ||||
|   language_id: 380 | ||||
| Unix Assembly: | ||||
|   type: programming | ||||
|   group: Assembly | ||||
|   extensions: | ||||
|   - ".s" | ||||
|   - ".ms" | ||||
|   tm_scope: source.assembly | ||||
|   ace_mode: assembly_x86 | ||||
|   language_id: 120 | ||||
| Uno: | ||||
|   type: programming | ||||
|   extensions: | ||||
| @@ -4473,13 +4577,13 @@ Verilog: | ||||
|   codemirror_mode: verilog | ||||
|   codemirror_mime_type: text/x-verilog | ||||
|   language_id: 387 | ||||
| VimL: | ||||
| Vim script: | ||||
|   type: programming | ||||
|   color: "#199f4b" | ||||
|   search_term: vim | ||||
|   tm_scope: source.viml | ||||
|   aliases: | ||||
|   - vim | ||||
|   - viml | ||||
|   - nvim | ||||
|   extensions: | ||||
|   - ".vim" | ||||
| @@ -4588,6 +4692,15 @@ XC: | ||||
|   codemirror_mode: clike | ||||
|   codemirror_mime_type: text/x-csrc | ||||
|   language_id: 398 | ||||
| XCompose: | ||||
|   type: data | ||||
|   filenames: | ||||
|   - ".XCompose" | ||||
|   - XCompose | ||||
|   - xcompose | ||||
|   tm_scope: config.xcompose | ||||
|   ace_mode: text | ||||
|   language_id: 225167241 | ||||
| XML: | ||||
|   type: data | ||||
|   ace_mode: xml | ||||
| @@ -4599,6 +4712,8 @@ XML: | ||||
|   - wsdl | ||||
|   extensions: | ||||
|   - ".xml" | ||||
|   - ".adml" | ||||
|   - ".admx" | ||||
|   - ".ant" | ||||
|   - ".axml" | ||||
|   - ".builds" | ||||
| @@ -4626,6 +4741,7 @@ XML: | ||||
|   - ".kml" | ||||
|   - ".launch" | ||||
|   - ".mdpolicy" | ||||
|   - ".mjml" | ||||
|   - ".mm" | ||||
|   - ".mod" | ||||
|   - ".mxml" | ||||
| @@ -4664,8 +4780,11 @@ XML: | ||||
|   - ".ux" | ||||
|   - ".vbproj" | ||||
|   - ".vcxproj" | ||||
|   - ".vsixmanifest" | ||||
|   - ".vssettings" | ||||
|   - ".vstemplate" | ||||
|   - ".vxml" | ||||
|   - ".wixproj" | ||||
|   - ".wsdl" | ||||
|   - ".wsf" | ||||
|   - ".wxi" | ||||
| @@ -4781,6 +4900,7 @@ YAML: | ||||
|   - ".syntax" | ||||
|   - ".yaml" | ||||
|   - ".yaml-tmlanguage" | ||||
|   - ".yml.mysql" | ||||
|   filenames: | ||||
|   - ".clang-format" | ||||
|   ace_mode: yaml | ||||
| @@ -4832,7 +4952,6 @@ desktop: | ||||
| eC: | ||||
|   type: programming | ||||
|   color: "#913960" | ||||
|   search_term: ec | ||||
|   extensions: | ||||
|   - ".ec" | ||||
|   - ".eh" | ||||
| @@ -4882,7 +5001,6 @@ ooc: | ||||
| reStructuredText: | ||||
|   type: prose | ||||
|   wrap: true | ||||
|   search_term: rst | ||||
|   aliases: | ||||
|   - rst | ||||
|   extensions: | ||||
|   | ||||
| @@ -26,4 +26,4 @@ | ||||
| - Shell | ||||
| - Swift | ||||
| - TeX | ||||
| - VimL | ||||
| - Vim script | ||||
|   | ||||
							
								
								
									
										10
									
								
								lib/linguist/strategy/extension.rb
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								lib/linguist/strategy/extension.rb
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,10 @@ | ||||
| module Linguist | ||||
|   module Strategy | ||||
|     # Detects language based on extension | ||||
|     class Extension | ||||
|       def self.call(blob, _) | ||||
|         Language.find_by_extension(blob.name.to_s) | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| end | ||||
| @@ -1,9 +1,10 @@ | ||||
| module Linguist | ||||
|   module Strategy | ||||
|     # Detects language based on filename and/or extension | ||||
|     # Detects language based on filename | ||||
|     class Filename | ||||
|       def self.call(blob, _) | ||||
|         Language.find_by_filename(blob.name.to_s) | ||||
|         name = blob.name.to_s | ||||
|         Language.find_by_filename(name) | ||||
|       end | ||||
|     end | ||||
|   end | ||||
|   | ||||
| @@ -238,6 +238,12 @@ | ||||
| # BuddyBuild | ||||
| - BuddyBuildSDK.framework/ | ||||
|  | ||||
| # Realm | ||||
| - Realm.framework | ||||
|  | ||||
| # RealmSwift | ||||
| - RealmSwift.framework | ||||
|  | ||||
| # git config files | ||||
| - gitattributes$ | ||||
| - gitignore$ | ||||
|   | ||||
| @@ -1,3 +1,3 @@ | ||||
| module Linguist | ||||
|   VERSION = "4.8.18" | ||||
|   VERSION = "5.0.9" | ||||
| end | ||||
|   | ||||
							
								
								
									
										46
									
								
								samples/C++/bug1163046.--skeleton.re
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										46
									
								
								samples/C++/bug1163046.--skeleton.re
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,46 @@ | ||||
| #include <iostream> | ||||
|  | ||||
| #define YYCTYPE unsigned char | ||||
| #define YYCURSOR cursor | ||||
| #define YYLIMIT cursor | ||||
| #define YYMARKER marker | ||||
| #define YYFILL(n) | ||||
|  | ||||
| bool scan(const char *text) | ||||
| { | ||||
| 	YYCTYPE *start = (YYCTYPE *)text; | ||||
| 	YYCTYPE *cursor = (YYCTYPE *)text; | ||||
| 	YYCTYPE *marker = (YYCTYPE *)text; | ||||
| next: | ||||
| 	YYCTYPE *token = cursor; | ||||
| /*!re2c | ||||
| '(This file must be converted with BinHex 4.0)' | ||||
| 	{ | ||||
| 		if (token == start || *(token - 1) == '\n') | ||||
| 		return true; else goto next; | ||||
| 	} | ||||
| [\001-\377] | ||||
| 	{ goto next; } | ||||
| [\000] | ||||
| 	{ return false; } | ||||
| */ | ||||
| 	return false; | ||||
| } | ||||
|  | ||||
| #define do_scan(str, expect) \ | ||||
| 	res = scan(str) == expect ? 0 : 1; \ | ||||
| 	std::cerr << str << "\t-\t" << (res ? "fail" : "ok") << std::endl; \ | ||||
| 	result += res | ||||
|  | ||||
| /*!max:re2c */ | ||||
|  | ||||
| int main(int,void**) | ||||
| { | ||||
| 	int res, result = 0; | ||||
| 	do_scan("(This file must be converted with BinHex 4.0)", 1); | ||||
| 	do_scan("x(This file must be converted with BinHex 4.0)", 0); | ||||
| 	do_scan("(This file must be converted with BinHex 4.0)x", 1); | ||||
| 	do_scan("x(This file must be converted with BinHex 4.0)x", 0); | ||||
| 	 | ||||
| 	return result; | ||||
| } | ||||
							
								
								
									
										239
									
								
								samples/C++/cnokw.re
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										239
									
								
								samples/C++/cnokw.re
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,239 @@ | ||||
| #include <stdlib.h> | ||||
| #include <stdio.h> | ||||
| #include <string.h> | ||||
|  | ||||
| #define	ADDEQ	257 | ||||
| #define	ANDAND	258 | ||||
| #define	ANDEQ	259 | ||||
| #define	ARRAY	260 | ||||
| #define	ASM	261 | ||||
| #define	AUTO	262 | ||||
| #define	BREAK	263 | ||||
| #define	CASE	264 | ||||
| #define	CHAR	265 | ||||
| #define	CONST	266 | ||||
| #define	CONTINUE	267 | ||||
| #define	DECR	268 | ||||
| #define	DEFAULT	269 | ||||
| #define	DEREF	270 | ||||
| #define	DIVEQ	271 | ||||
| #define	DO	272 | ||||
| #define	DOUBLE	273 | ||||
| #define	ELLIPSIS	274 | ||||
| #define	ELSE	275 | ||||
| #define	ENUM	276 | ||||
| #define	EQL	277 | ||||
| #define	EXTERN	278 | ||||
| #define	FCON	279 | ||||
| #define	FLOAT	280 | ||||
| #define	FOR	281 | ||||
| #define	FUNCTION	282 | ||||
| #define	GEQ	283 | ||||
| #define	GOTO	284 | ||||
| #define	ICON	285 | ||||
| #define	ID	286 | ||||
| #define	IF	287 | ||||
| #define	INCR	288 | ||||
| #define	INT	289 | ||||
| #define	LEQ	290 | ||||
| #define	LONG	291 | ||||
| #define	LSHIFT	292 | ||||
| #define	LSHIFTEQ	293 | ||||
| #define	MODEQ	294 | ||||
| #define	MULEQ	295 | ||||
| #define	NEQ	296 | ||||
| #define	OREQ	297 | ||||
| #define	OROR	298 | ||||
| #define	POINTER	299 | ||||
| #define	REGISTER	300 | ||||
| #define	RETURN	301 | ||||
| #define	RSHIFT	302 | ||||
| #define	RSHIFTEQ	303 | ||||
| #define	SCON	304 | ||||
| #define	SHORT	305 | ||||
| #define	SIGNED	306 | ||||
| #define	SIZEOF	307 | ||||
| #define	STATIC	308 | ||||
| #define	STRUCT	309 | ||||
| #define	SUBEQ	310 | ||||
| #define	SWITCH	311 | ||||
| #define	TYPEDEF	312 | ||||
| #define	UNION	313 | ||||
| #define	UNSIGNED	314 | ||||
| #define	VOID	315 | ||||
| #define	VOLATILE	316 | ||||
| #define	WHILE	317 | ||||
| #define	XOREQ	318 | ||||
| #define	EOI	319 | ||||
|  | ||||
| typedef unsigned int uint; | ||||
| typedef unsigned char uchar; | ||||
|  | ||||
| #define	BSIZE	8192 | ||||
|  | ||||
| #define	YYCTYPE		uchar | ||||
| #define	YYCURSOR	cursor | ||||
| #define	YYLIMIT		s->lim | ||||
| #define	YYMARKER	s->ptr | ||||
| #define	YYFILL(n)	{cursor = fill(s, cursor);} | ||||
|  | ||||
| #define	RET(i)	{s->cur = cursor; return i;} | ||||
|  | ||||
| typedef struct Scanner { | ||||
|     int			fd; | ||||
|     uchar		*bot, *tok, *ptr, *cur, *pos, *lim, *top, *eof; | ||||
|     uint		line; | ||||
| } Scanner; | ||||
|  | ||||
| uchar *fill(Scanner *s, uchar *cursor){ | ||||
|     if(!s->eof){ | ||||
| 	uint cnt = s->tok - s->bot; | ||||
| 	if(cnt){ | ||||
| 	    memcpy(s->bot, s->tok, s->lim - s->tok); | ||||
| 	    s->tok = s->bot; | ||||
| 	    s->ptr -= cnt; | ||||
| 	    cursor -= cnt; | ||||
| 	    s->pos -= cnt; | ||||
| 	    s->lim -= cnt; | ||||
| 	} | ||||
| 	if((s->top - s->lim) < BSIZE){ | ||||
| 	    uchar *buf = (uchar*) malloc(((s->lim - s->bot) + BSIZE)*sizeof(uchar)); | ||||
| 	    memcpy(buf, s->tok, s->lim - s->tok); | ||||
| 	    s->tok = buf; | ||||
| 	    s->ptr = &buf[s->ptr - s->bot]; | ||||
| 	    cursor = &buf[cursor - s->bot]; | ||||
| 	    s->pos = &buf[s->pos - s->bot]; | ||||
| 	    s->lim = &buf[s->lim - s->bot]; | ||||
| 	    s->top = &s->lim[BSIZE]; | ||||
| 	    free(s->bot); | ||||
| 	    s->bot = buf; | ||||
| 	} | ||||
| 	if((cnt = read(s->fd, (char*) s->lim, BSIZE)) != BSIZE){ | ||||
| 	    s->eof = &s->lim[cnt]; *(s->eof)++ = '\n'; | ||||
| 	} | ||||
| 	s->lim += cnt; | ||||
|     } | ||||
|     return cursor; | ||||
| } | ||||
|  | ||||
| int scan(Scanner *s){ | ||||
| 	uchar *cursor = s->cur; | ||||
| std: | ||||
| 	s->tok = cursor; | ||||
| /*!re2c | ||||
| any	= [\000-\377]; | ||||
| O	= [0-7]; | ||||
| D	= [0-9]; | ||||
| L	= [a-zA-Z_]; | ||||
| H	= [a-fA-F0-9]; | ||||
| E	= [Ee] [+-]? D+; | ||||
| FS	= [fFlL]; | ||||
| IS	= [uUlL]*; | ||||
| ESC	= [\\] ([abfnrtv?'"\\] | "x" H+ | O+); | ||||
| */ | ||||
|  | ||||
| /*!re2c | ||||
| 	"/*"			{ goto comment; } | ||||
| 	 | ||||
| 	L (L|D)*		{ RET(ID); } | ||||
| 	 | ||||
| 	("0" [xX] H+ IS?) | ("0" D+ IS?) | (D+ IS?) | | ||||
| 	(['] (ESC|any\[\n\\'])* [']) | ||||
| 				{ RET(ICON); } | ||||
| 	 | ||||
| 	(D+ E FS?) | (D* "." D+ E? FS?) | (D+ "." D* E? FS?) | ||||
| 				{ RET(FCON); } | ||||
| 	 | ||||
| 	(["] (ESC|any\[\n\\"])* ["]) | ||||
| 				{ RET(SCON); } | ||||
| 	 | ||||
| 	"..."                   { RET(ELLIPSIS); } | ||||
| 	">>="			{ RET(RSHIFTEQ); } | ||||
| 	"<<="			{ RET(LSHIFTEQ); } | ||||
| 	"+="			{ RET(ADDEQ); } | ||||
| 	"-="			{ RET(SUBEQ); } | ||||
| 	"*="			{ RET(MULEQ); } | ||||
| 	"/="			{ RET(DIVEQ); } | ||||
| 	"%="			{ RET(MODEQ); } | ||||
| 	"&="			{ RET(ANDEQ); } | ||||
| 	"^="			{ RET(XOREQ); } | ||||
| 	"|="			{ RET(OREQ); } | ||||
| 	">>"			{ RET(RSHIFT); } | ||||
| 	"<<"			{ RET(LSHIFT); } | ||||
| 	"++"			{ RET(INCR); } | ||||
| 	"--"			{ RET(DECR); } | ||||
| 	"->"			{ RET(DEREF); } | ||||
| 	"&&"			{ RET(ANDAND); } | ||||
| 	"||"			{ RET(OROR); } | ||||
| 	"<="			{ RET(LEQ); } | ||||
| 	">="			{ RET(GEQ); } | ||||
| 	"=="			{ RET(EQL); } | ||||
| 	"!="			{ RET(NEQ); } | ||||
| 	";"			{ RET(';'); } | ||||
| 	"{"			{ RET('{'); } | ||||
| 	"}"			{ RET('}'); } | ||||
| 	","			{ RET(','); } | ||||
| 	":"			{ RET(':'); } | ||||
| 	"="			{ RET('='); } | ||||
| 	"("			{ RET('('); } | ||||
| 	")"			{ RET(')'); } | ||||
| 	"["			{ RET('['); } | ||||
| 	"]"			{ RET(']'); } | ||||
| 	"."			{ RET('.'); } | ||||
| 	"&"			{ RET('&'); } | ||||
| 	"!"			{ RET('!'); } | ||||
| 	"~"			{ RET('~'); } | ||||
| 	"-"			{ RET('-'); } | ||||
| 	"+"			{ RET('+'); } | ||||
| 	"*"			{ RET('*'); } | ||||
| 	"/"			{ RET('/'); } | ||||
| 	"%"			{ RET('%'); } | ||||
| 	"<"			{ RET('<'); } | ||||
| 	">"			{ RET('>'); } | ||||
| 	"^"			{ RET('^'); } | ||||
| 	"|"			{ RET('|'); } | ||||
| 	"?"			{ RET('?'); } | ||||
|  | ||||
|  | ||||
| 	[ \t\v\f]+		{ goto std; } | ||||
|  | ||||
| 	"\n" | ||||
| 	    { | ||||
| 		if(cursor == s->eof) RET(EOI); | ||||
| 		s->pos = cursor; s->line++; | ||||
| 		goto std; | ||||
| 	    } | ||||
|  | ||||
| 	any | ||||
| 	    { | ||||
| 		printf("unexpected character: %c\n", *s->tok); | ||||
| 		goto std; | ||||
| 	    } | ||||
| */ | ||||
|  | ||||
| comment: | ||||
| /*!re2c | ||||
| 	"*/"			{ goto std; } | ||||
| 	"\n" | ||||
| 	    { | ||||
| 		if(cursor == s->eof) RET(EOI); | ||||
| 		s->tok = s->pos = cursor; s->line++; | ||||
| 		goto comment; | ||||
| 	    } | ||||
|         any			{ goto comment; } | ||||
| */ | ||||
| } | ||||
|  | ||||
| main(){ | ||||
|     Scanner in; | ||||
|     int t; | ||||
|     memset((char*) &in, 0, sizeof(in)); | ||||
|     in.fd = 0; | ||||
|     while((t = scan(&in)) != EOI){ | ||||
| /* | ||||
| 	printf("%d\t%.*s\n", t, in.cur - in.tok, in.tok); | ||||
| 	printf("%d\n", t); | ||||
| */ | ||||
|     } | ||||
|     close(in.fd); | ||||
| } | ||||
							
								
								
									
										63
									
								
								samples/C++/cvsignore.re
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										63
									
								
								samples/C++/cvsignore.re
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,63 @@ | ||||
|  | ||||
| #define YYFILL(n) if (cursor >= limit) break; | ||||
| #define YYCTYPE char | ||||
| #define YYCURSOR cursor | ||||
| #define YYLIMIT limit | ||||
| #define YYMARKER marker | ||||
|  | ||||
| /*!re2c | ||||
| any     = (.|"\n"); | ||||
| value	= (":" (.\"$")+)?; | ||||
| cvsdat	= "Date"; | ||||
| cvsid	= "Id"; | ||||
| cvslog	= "Log"; | ||||
| cvsrev	= "Revision"; | ||||
| cvssrc	= "Source"; | ||||
| */ | ||||
|  | ||||
| #define APPEND(text) \ | ||||
| 	append(output, outsize, text, sizeof(text) - sizeof(YYCTYPE)) | ||||
|  | ||||
| inline void append(YYCTYPE *output, size_t & outsize, const YYCTYPE * text, size_t len) | ||||
| { | ||||
| 	memcpy(output + outsize, text, len); | ||||
| 	outsize += (len / sizeof(YYCTYPE)); | ||||
| } | ||||
|  | ||||
| void scan(YYCTYPE *pText, size_t *pSize, int *pbChanged) | ||||
| { | ||||
| 	// rule | ||||
| 	// scan lines | ||||
| 	// find $ in lines | ||||
| 	//   compact $<keyword>: .. $ to $<keyword>$ | ||||
|    | ||||
| 	YYCTYPE *output; | ||||
| 	const YYCTYPE *cursor, *limit, *marker; | ||||
|  | ||||
| 	cursor = marker = output = *pText; | ||||
|  | ||||
| 	size_t insize = *pSize; | ||||
| 	size_t outsize = 0; | ||||
|  | ||||
| 	limit = cursor + insize; | ||||
|  | ||||
| 	while(1) { | ||||
| loop: | ||||
| /*!re2c | ||||
|  | ||||
| "$" cvsdat value "$"	{ APPEND(L"$" L"Date$"); goto loop; } | ||||
| "$" cvsid  value "$"	{ APPEND(L"$" L"Id$"); goto loop; } | ||||
| "$" cvslog value "$"	{ APPEND(L"$" L"Log$"); goto loop; } | ||||
| "$" cvsrev value "$"	{ APPEND(L"$" L"Revision$"); goto loop; } | ||||
| "$" cvssrc value "$"	{ APPEND(L"$" L"Source$"); goto loop; } | ||||
| any						{ output[outsize++] = cursor[-1]; if (cursor >= limit) break; goto loop; } | ||||
|  | ||||
| */ | ||||
| 	} | ||||
| 	output[outsize] = '\0'; | ||||
|  | ||||
| 	// set the new size | ||||
| 	*pSize = outsize; | ||||
| 	 | ||||
| 	*pbChanged = (insize == outsize) ? 0 : 1; | ||||
| } | ||||
							
								
								
									
										13
									
								
								samples/C++/simple.re
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								samples/C++/simple.re
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | ||||
| #define	NULL		((char*) 0) | ||||
| char *scan(char *p){ | ||||
| char *q; | ||||
| #define	YYCTYPE		char | ||||
| #define	YYCURSOR	p | ||||
| #define	YYLIMIT		p | ||||
| #define	YYMARKER	q | ||||
| #define	YYFILL(n) | ||||
| /*!re2c | ||||
| 	[0-9]+		{return YYCURSOR;} | ||||
| 	[\000-\377]	{return NULL;} | ||||
| */ | ||||
| } | ||||
							
								
								
									
										440
									
								
								samples/D/aa.d
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										440
									
								
								samples/D/aa.d
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,440 @@ | ||||
| /** | ||||
|  * Implementation of associative arrays. | ||||
|  * | ||||
|  * Copyright: Martin Nowak 2015 -. | ||||
|  * License:   $(LINK2 http://www.boost.org/LICENSE_1_0.txt, Boost License 1.0) | ||||
|  * Authors:   Martin Nowak | ||||
|  */ | ||||
| module core.aa; | ||||
|  | ||||
| import core.memory : GC; | ||||
|  | ||||
| private | ||||
| { | ||||
|     // grow threshold | ||||
|     enum GROW_NUM = 4; | ||||
|     enum GROW_DEN = 5; | ||||
|     // shrink threshold | ||||
|     enum SHRINK_NUM = 1; | ||||
|     enum SHRINK_DEN = 8; | ||||
|     // grow factor | ||||
|     enum GROW_FAC = 4; | ||||
|     // growing the AA doubles it's size, so the shrink threshold must be | ||||
|     // smaller than half the grow threshold to have a hysteresis | ||||
|     static assert(GROW_FAC * SHRINK_NUM * GROW_DEN < GROW_NUM * SHRINK_DEN); | ||||
|     // initial load factor (for literals), mean of both thresholds | ||||
|     enum INIT_NUM = (GROW_DEN * SHRINK_NUM + GROW_NUM * SHRINK_DEN) / 2; | ||||
|     enum INIT_DEN = SHRINK_DEN * GROW_DEN; | ||||
|  | ||||
|     // magic hash constants to distinguish empty, deleted, and filled buckets | ||||
|     enum HASH_EMPTY = 0; | ||||
|     enum HASH_DELETED = 0x1; | ||||
|     enum HASH_FILLED_MARK = size_t(1) << 8 * size_t.sizeof - 1; | ||||
| } | ||||
|  | ||||
| enum INIT_NUM_BUCKETS = 8; | ||||
|  | ||||
| struct AA(Key, Val) | ||||
| { | ||||
|     this(size_t sz) | ||||
|     { | ||||
|         impl = new Impl(nextpow2(sz)); | ||||
|     } | ||||
|  | ||||
|     @property bool empty() const pure nothrow @safe @nogc | ||||
|     { | ||||
|         return !length; | ||||
|     } | ||||
|  | ||||
|     @property size_t length() const pure nothrow @safe @nogc | ||||
|     { | ||||
|         return impl is null ? 0 : impl.length; | ||||
|     } | ||||
|  | ||||
|     void opIndexAssign(Val val, in Key key) | ||||
|     { | ||||
|         // lazily alloc implementation | ||||
|         if (impl is null) | ||||
|             impl = new Impl(INIT_NUM_BUCKETS); | ||||
|  | ||||
|         // get hash and bucket for key | ||||
|         immutable hash = calcHash(key); | ||||
|  | ||||
|         // found a value => assignment | ||||
|         if (auto p = impl.findSlotLookup(hash, key)) | ||||
|         { | ||||
|             p.entry.val = val; | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         auto p = findSlotInsert(hash); | ||||
|         if (p.deleted) | ||||
|             --deleted; | ||||
|         // check load factor and possibly grow | ||||
|         else if (++used * GROW_DEN > dim * GROW_NUM) | ||||
|         { | ||||
|             grow(); | ||||
|             p = findSlotInsert(hash); | ||||
|             assert(p.empty); | ||||
|         } | ||||
|  | ||||
|         // update search cache and allocate entry | ||||
|         firstUsed = min(firstUsed, cast(uint)(p - buckets.ptr)); | ||||
|         p.hash = hash; | ||||
|         p.entry = new Impl.Entry(key, val); // TODO: move | ||||
|         return; | ||||
|     } | ||||
|  | ||||
|     ref inout(Val) opIndex(in Key key) inout @trusted | ||||
|     { | ||||
|         auto p = opIn_r(key); | ||||
|         assert(p !is null); | ||||
|         return *p; | ||||
|     } | ||||
|  | ||||
|     inout(Val)* opIn_r(in Key key) inout @trusted | ||||
|     { | ||||
|         if (empty) | ||||
|             return null; | ||||
|  | ||||
|         immutable hash = calcHash(key); | ||||
|         if (auto p = findSlotLookup(hash, key)) | ||||
|             return &p.entry.val; | ||||
|         return null; | ||||
|     } | ||||
|  | ||||
|     bool remove(in Key key) | ||||
|     { | ||||
|         if (empty) | ||||
|             return false; | ||||
|  | ||||
|         immutable hash = calcHash(key); | ||||
|         if (auto p = findSlotLookup(hash, key)) | ||||
|         { | ||||
|             // clear entry | ||||
|             p.hash = HASH_DELETED; | ||||
|             p.entry = null; | ||||
|  | ||||
|             ++deleted; | ||||
|             if (length * SHRINK_DEN < dim * SHRINK_NUM) | ||||
|                 shrink(); | ||||
|  | ||||
|             return true; | ||||
|         } | ||||
|         return false; | ||||
|     } | ||||
|  | ||||
|     Val get(in Key key, lazy Val val) | ||||
|     { | ||||
|         auto p = opIn_r(key); | ||||
|         return p is null ? val : *p; | ||||
|     } | ||||
|  | ||||
|     ref Val getOrSet(in Key key, lazy Val val) | ||||
|     { | ||||
|         // lazily alloc implementation | ||||
|         if (impl is null) | ||||
|             impl = new Impl(INIT_NUM_BUCKETS); | ||||
|  | ||||
|         // get hash and bucket for key | ||||
|         immutable hash = calcHash(key); | ||||
|  | ||||
|         // found a value => assignment | ||||
|         if (auto p = impl.findSlotLookup(hash, key)) | ||||
|             return p.entry.val; | ||||
|  | ||||
|         auto p = findSlotInsert(hash); | ||||
|         if (p.deleted) | ||||
|             --deleted; | ||||
|         // check load factor and possibly grow | ||||
|         else if (++used * GROW_DEN > dim * GROW_NUM) | ||||
|         { | ||||
|             grow(); | ||||
|             p = findSlotInsert(hash); | ||||
|             assert(p.empty); | ||||
|         } | ||||
|  | ||||
|         // update search cache and allocate entry | ||||
|         firstUsed = min(firstUsed, cast(uint)(p - buckets.ptr)); | ||||
|         p.hash = hash; | ||||
|         p.entry = new Impl.Entry(key, val); | ||||
|         return p.entry.val; | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|        Convert the AA to the type of the builtin language AA. | ||||
|      */ | ||||
|     Val[Key] toBuiltinAA() pure nothrow | ||||
|     { | ||||
|         return cast(Val[Key]) _aaFromCoreAA(impl, rtInterface); | ||||
|     } | ||||
|  | ||||
| private: | ||||
|  | ||||
|     private this(inout(Impl)* impl) inout | ||||
|     { | ||||
|         this.impl = impl; | ||||
|     } | ||||
|  | ||||
|     ref Val getLValue(in Key key) | ||||
|     { | ||||
|         // lazily alloc implementation | ||||
|         if (impl is null) | ||||
|             impl = new Impl(INIT_NUM_BUCKETS); | ||||
|  | ||||
|         // get hash and bucket for key | ||||
|         immutable hash = calcHash(key); | ||||
|  | ||||
|         // found a value => assignment | ||||
|         if (auto p = impl.findSlotLookup(hash, key)) | ||||
|             return p.entry.val; | ||||
|  | ||||
|         auto p = findSlotInsert(hash); | ||||
|         if (p.deleted) | ||||
|             --deleted; | ||||
|         // check load factor and possibly grow | ||||
|         else if (++used * GROW_DEN > dim * GROW_NUM) | ||||
|         { | ||||
|             grow(); | ||||
|             p = findSlotInsert(hash); | ||||
|             assert(p.empty); | ||||
|         } | ||||
|  | ||||
|         // update search cache and allocate entry | ||||
|         firstUsed = min(firstUsed, cast(uint)(p - buckets.ptr)); | ||||
|         p.hash = hash; | ||||
|         p.entry = new Impl.Entry(key); // TODO: move | ||||
|         return p.entry.val; | ||||
|     } | ||||
|  | ||||
|     static struct Impl | ||||
|     { | ||||
|         this(size_t sz) | ||||
|         { | ||||
|             buckets = allocBuckets(sz); | ||||
|         } | ||||
|  | ||||
|         @property size_t length() const pure nothrow @nogc | ||||
|         { | ||||
|             assert(used >= deleted); | ||||
|             return used - deleted; | ||||
|         } | ||||
|  | ||||
|         @property size_t dim() const pure nothrow @nogc | ||||
|         { | ||||
|             return buckets.length; | ||||
|         } | ||||
|  | ||||
|         @property size_t mask() const pure nothrow @nogc | ||||
|         { | ||||
|             return dim - 1; | ||||
|         } | ||||
|  | ||||
|         // find the first slot to insert a value with hash | ||||
|         inout(Bucket)* findSlotInsert(size_t hash) inout pure nothrow @nogc | ||||
|         { | ||||
|             for (size_t i = hash & mask, j = 1;; ++j) | ||||
|             { | ||||
|                 if (!buckets[i].filled) | ||||
|                     return &buckets[i]; | ||||
|                 i = (i + j) & mask; | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         // lookup a key | ||||
|         inout(Bucket)* findSlotLookup(size_t hash, in Key key) inout | ||||
|         { | ||||
|             for (size_t i = hash & mask, j = 1;; ++j) | ||||
|             { | ||||
|                 if (buckets[i].hash == hash && key == buckets[i].entry.key) | ||||
|                     return &buckets[i]; | ||||
|                 else if (buckets[i].empty) | ||||
|                     return null; | ||||
|                 i = (i + j) & mask; | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         void grow() | ||||
|         { | ||||
|             // If there are so many deleted entries, that growing would push us | ||||
|             // below the shrink threshold, we just purge deleted entries instead. | ||||
|             if (length * SHRINK_DEN < GROW_FAC * dim * SHRINK_NUM) | ||||
|                 resize(dim); | ||||
|             else | ||||
|                 resize(GROW_FAC * dim); | ||||
|         } | ||||
|  | ||||
|         void shrink() | ||||
|         { | ||||
|             if (dim > INIT_NUM_BUCKETS) | ||||
|                 resize(dim / GROW_FAC); | ||||
|         } | ||||
|  | ||||
|         void resize(size_t ndim) pure nothrow | ||||
|         { | ||||
|             auto obuckets = buckets; | ||||
|             buckets = allocBuckets(ndim); | ||||
|  | ||||
|             foreach (ref b; obuckets) | ||||
|                 if (b.filled) | ||||
|                     *findSlotInsert(b.hash) = b; | ||||
|  | ||||
|             firstUsed = 0; | ||||
|             used -= deleted; | ||||
|             deleted = 0; | ||||
|             GC.free(obuckets.ptr); // safe to free b/c impossible to reference | ||||
|         } | ||||
|  | ||||
|         static struct Entry | ||||
|         { | ||||
|             Key key; | ||||
|             Val val; | ||||
|         } | ||||
|  | ||||
|         static struct Bucket | ||||
|         { | ||||
|             size_t hash; | ||||
|             Entry* entry; | ||||
|  | ||||
|             @property bool empty() const | ||||
|             { | ||||
|                 return hash == HASH_EMPTY; | ||||
|             } | ||||
|  | ||||
|             @property bool deleted() const | ||||
|             { | ||||
|                 return hash == HASH_DELETED; | ||||
|             } | ||||
|  | ||||
|             @property bool filled() const | ||||
|             { | ||||
|                 return cast(ptrdiff_t) hash < 0; | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         Bucket[] allocBuckets(size_t dim) @trusted pure nothrow | ||||
|         { | ||||
|             enum attr = GC.BlkAttr.NO_INTERIOR; | ||||
|             immutable sz = dim * Bucket.sizeof; | ||||
|             return (cast(Bucket*) GC.calloc(sz, attr))[0 .. dim]; | ||||
|         } | ||||
|  | ||||
|         Bucket[] buckets; | ||||
|         uint used; | ||||
|         uint deleted; | ||||
|         uint firstUsed; | ||||
|     } | ||||
|  | ||||
|     RTInterface* rtInterface()() pure nothrow @nogc | ||||
|     { | ||||
|         static size_t aaLen(in void* pimpl) pure nothrow @nogc | ||||
|         { | ||||
|             auto aa = const(AA)(cast(const(Impl)*) pimpl); | ||||
|             return aa.length; | ||||
|         } | ||||
|  | ||||
|         static void* aaGetY(void** pimpl, in void* pkey) | ||||
|         { | ||||
|             auto aa = AA(cast(Impl*)*pimpl); | ||||
|             auto res = &aa.getLValue(*cast(const(Key*)) pkey); | ||||
|             *pimpl = aa.impl; // might have changed | ||||
|             return res; | ||||
|         } | ||||
|  | ||||
|         static inout(void)* aaInX(inout void* pimpl, in void* pkey) | ||||
|         { | ||||
|             auto aa = inout(AA)(cast(inout(Impl)*) pimpl); | ||||
|             return aa.opIn_r(*cast(const(Key*)) pkey); | ||||
|         } | ||||
|  | ||||
|         static bool aaDelX(void* pimpl, in void* pkey) | ||||
|         { | ||||
|             auto aa = AA(cast(Impl*) pimpl); | ||||
|             return aa.remove(*cast(const(Key*)) pkey); | ||||
|         } | ||||
|  | ||||
|         static immutable vtbl = RTInterface(&aaLen, &aaGetY, &aaInX, &aaDelX); | ||||
|         return cast(RTInterface*)&vtbl; | ||||
|     } | ||||
|  | ||||
|     static size_t calcHash(in ref Key key) | ||||
|     { | ||||
|         return hashOf(key) | HASH_FILLED_MARK; | ||||
|     } | ||||
|  | ||||
|     Impl* impl; | ||||
|     alias impl this; | ||||
| } | ||||
|  | ||||
| package extern (C) void* _aaFromCoreAA(void* impl, RTInterface* rtIntf) pure nothrow; | ||||
|  | ||||
| private: | ||||
|  | ||||
| struct RTInterface | ||||
| { | ||||
|     alias AA = void*; | ||||
|  | ||||
|     size_t function(in AA aa) pure nothrow @nogc len; | ||||
|     void* function(AA* aa, in void* pkey) getY; | ||||
|     inout(void)* function(inout AA aa, in void* pkey) inX; | ||||
|     bool function(AA aa, in void* pkey) delX; | ||||
| } | ||||
|  | ||||
| unittest | ||||
| { | ||||
|     AA!(int, int) aa; | ||||
|     assert(aa.length == 0); | ||||
|     aa[0] = 1; | ||||
|     assert(aa.length == 1 && aa[0] == 1); | ||||
|     aa[1] = 2; | ||||
|     assert(aa.length == 2 && aa[1] == 2); | ||||
|     import core.stdc.stdio; | ||||
|  | ||||
|     int[int] rtaa = aa.toBuiltinAA(); | ||||
|     assert(rtaa.length == 2); | ||||
|     puts("length"); | ||||
|     assert(rtaa[0] == 1); | ||||
|     assert(rtaa[1] == 2); | ||||
|     rtaa[2] = 3; | ||||
|  | ||||
|     assert(aa[2] == 3); | ||||
| } | ||||
|  | ||||
| unittest | ||||
| { | ||||
|     auto aa = AA!(int, int)(3); | ||||
|     aa[0] = 0; | ||||
|     aa[1] = 1; | ||||
|     aa[2] = 2; | ||||
|     assert(aa.length == 3); | ||||
| } | ||||
|  | ||||
| //============================================================================== | ||||
| // Helper functions | ||||
| //------------------------------------------------------------------------------ | ||||
|  | ||||
| size_t nextpow2(in size_t n) pure nothrow @nogc | ||||
| { | ||||
|     import core.bitop : bsr; | ||||
|  | ||||
|     if (n < 2) | ||||
|         return 1; | ||||
|     return size_t(1) << bsr(n - 1) + 1; | ||||
| } | ||||
|  | ||||
| pure nothrow @nogc unittest | ||||
| { | ||||
|     //                            0, 1, 2, 3, 4, 5, 6, 7, 8,  9 | ||||
|     foreach (const n, const pow2; [1, 1, 2, 4, 4, 8, 8, 8, 8, 16]) | ||||
|         assert(nextpow2(n) == pow2); | ||||
| } | ||||
|  | ||||
| T min(T)(T a, T b) pure nothrow @nogc | ||||
| { | ||||
|     return a < b ? a : b; | ||||
| } | ||||
|  | ||||
| T max(T)(T a, T b) pure nothrow @nogc | ||||
| { | ||||
|     return b < a ? a : b; | ||||
| } | ||||
							
								
								
									
										187
									
								
								samples/D/arrayops.d
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										187
									
								
								samples/D/arrayops.d
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,187 @@ | ||||
| /** | ||||
|  * Benchmark for array ops. | ||||
|  * | ||||
|  * Copyright: Copyright Martin Nowak 2016 -. | ||||
|  * License:   $(LINK2 http://www.boost.org/LICENSE_1_0.txt, Boost License 1.0) | ||||
|  * Authors:   Martin Nowak | ||||
|  */ | ||||
| import core.cpuid, std.algorithm, std.datetime, std.meta, std.stdio, std.string, | ||||
|     std.range; | ||||
|  | ||||
| float[6] getLatencies(T, string op)() | ||||
| { | ||||
|     enum N = (64 * (1 << 6) + 64) * T.sizeof; | ||||
|     auto a = Array!T(N), b = Array!T(N), c = Array!T(N); | ||||
|     float[6] latencies = float.max; | ||||
|     foreach (i, ref latency; latencies) | ||||
|     { | ||||
|         auto len = 1 << i; | ||||
|         foreach (_; 1 .. 32) | ||||
|         { | ||||
|             a[] = 24; | ||||
|             b[] = 4; | ||||
|             c[] = 2; | ||||
|             auto sw = StopWatch(AutoStart.yes); | ||||
|             foreach (off; size_t(0) .. size_t(64)) | ||||
|             { | ||||
|                 off = off * len + off; | ||||
|                 enum op = op.replace("const", "2").replace("a", | ||||
|                         "a[off .. off + len]").replace("b", | ||||
|                         "b[off .. off + len]").replace("c", "c[off .. off + len]"); | ||||
|                 mixin(op ~ ";"); | ||||
|             } | ||||
|             latency = min(latency, sw.peek.nsecs); | ||||
|         } | ||||
|     } | ||||
|     float[6] res = latencies[] / 1024; | ||||
|     return res; | ||||
| } | ||||
|  | ||||
| float[4] getThroughput(T, string op)() | ||||
| { | ||||
|     enum N = (40 * 1024 * 1024 + 64 * T.sizeof) / T.sizeof; | ||||
|     auto a = Array!T(N), b = Array!T(N), c = Array!T(N); | ||||
|     float[4] latencies = float.max; | ||||
|     size_t[4] lengths = [ | ||||
|         8 * 1024 / T.sizeof, 32 * 1024 / T.sizeof, 512 * 1024 / T.sizeof, 32 * 1024 * 1024 / T | ||||
|         .sizeof | ||||
|     ]; | ||||
|     foreach (i, ref latency; latencies) | ||||
|     { | ||||
|         auto len = lengths[i] / 64; | ||||
|         foreach (_; 1 .. 4) | ||||
|         { | ||||
|             a[] = 24; | ||||
|             b[] = 4; | ||||
|             c[] = 2; | ||||
|             auto sw = StopWatch(AutoStart.yes); | ||||
|             foreach (off; size_t(0) .. size_t(64)) | ||||
|             { | ||||
|                 off = off * len + off; | ||||
|                 enum op = op.replace("const", "2").replace("a", | ||||
|                         "a[off .. off + len]").replace("b", | ||||
|                         "b[off .. off + len]").replace("c", "c[off .. off + len]"); | ||||
|                 mixin(op ~ ";"); | ||||
|             } | ||||
|             immutable nsecs = sw.peek.nsecs; | ||||
|             runMasked({latency = min(latency, nsecs);}); | ||||
|         } | ||||
|     } | ||||
|     float[4] throughputs = void; | ||||
|     runMasked({throughputs = T.sizeof * lengths[] / latencies[];}); | ||||
|     return throughputs; | ||||
| } | ||||
|  | ||||
| string[] genOps() | ||||
| { | ||||
|     string[] ops; | ||||
|     foreach (op1; ["+", "-", "*", "/"]) | ||||
|     { | ||||
|         ops ~= "a " ~ op1 ~ "= b"; | ||||
|         ops ~= "a " ~ op1 ~ "= const"; | ||||
|         foreach (op2; ["+", "-", "*", "/"]) | ||||
|         { | ||||
|             ops ~= "a " ~ op1 ~ "= b " ~ op2 ~ " c"; | ||||
|             ops ~= "a " ~ op1 ~ "= b " ~ op2 ~ " const"; | ||||
|         } | ||||
|     } | ||||
|     return ops; | ||||
| } | ||||
|  | ||||
| void runOp(string op)() | ||||
| { | ||||
|     foreach (T; AliasSeq!(ubyte, ushort, uint, ulong, byte, short, int, long, float, | ||||
|             double)) | ||||
|         writefln("%s, %s, %(%.2f, %), %(%s, %)", T.stringof, op, | ||||
|             getLatencies!(T, op), getThroughput!(T, op)); | ||||
| } | ||||
|  | ||||
| struct Array(T) | ||||
| { | ||||
|     import core.stdc.stdlib : free, malloc; | ||||
|  | ||||
|     this(size_t n) | ||||
|     { | ||||
|         ary = (cast(T*) malloc(T.sizeof * n))[0 .. n]; | ||||
|     } | ||||
|  | ||||
|     ~this() | ||||
|     { | ||||
|         free(ary.ptr); | ||||
|     } | ||||
|  | ||||
|     T[] ary; | ||||
|     alias ary this; | ||||
| } | ||||
|  | ||||
| version (X86) | ||||
|     version = SSE; | ||||
| else version (X86_64) | ||||
|     version = SSE; | ||||
| else | ||||
|     static assert(0, "unimplemented"); | ||||
|  | ||||
| version (SSE) | ||||
| { | ||||
|     uint mxcsr() | ||||
|     { | ||||
|         uint ret = void; | ||||
|         asm | ||||
|         { | ||||
|             stmxcsr ret; | ||||
|         } | ||||
|         return ret; | ||||
|     } | ||||
|  | ||||
|     void mxcsr(uint val) | ||||
|     { | ||||
|         asm | ||||
|         { | ||||
|             ldmxcsr val; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     // http://softpixel.com/~cwright/programming/simd/sse.php | ||||
|     enum FPU_EXCEPTION_MASKS = 1 << 12 | 1 << 11 | 1 << 10 | 1 << 9 | 1 << 8 | 1 << 7; | ||||
|     enum FPU_EXCEPTION_FLAGS = 1 << 5 | 1 << 4 | 1 << 3 | 1 << 2 | 1 << 1 | 1 << 0; | ||||
|  | ||||
|     void maskFPUExceptions() | ||||
|     { | ||||
|         mxcsr = mxcsr | FPU_EXCEPTION_MASKS; | ||||
|     } | ||||
|  | ||||
|     void unmaskFPUExceptions() | ||||
|     { | ||||
|         mxcsr = mxcsr & ~FPU_EXCEPTION_MASKS; | ||||
|     } | ||||
|  | ||||
|     uint FPUExceptionFlags() | ||||
|     { | ||||
|         return mxcsr & FPU_EXCEPTION_FLAGS; | ||||
|     } | ||||
|  | ||||
|     void clearFPUExceptionFlags() | ||||
|     { | ||||
|         mxcsr = mxcsr & ~FPU_EXCEPTION_FLAGS; | ||||
|     } | ||||
| } | ||||
|  | ||||
| void runMasked(scope void delegate() dg) | ||||
| { | ||||
|     assert(FPUExceptionFlags == 0); | ||||
|     maskFPUExceptions; | ||||
|     dg(); | ||||
|     clearFPUExceptionFlags; | ||||
|     unmaskFPUExceptions; | ||||
| } | ||||
|  | ||||
| void main() | ||||
| { | ||||
|     unmaskFPUExceptions; | ||||
|  | ||||
|     writefln("type, op, %(latency%s, %), %-(throughput%s, %)", iota(6) | ||||
|         .map!(i => 1 << i), ["8KB", "32KB", "512KB", "32MB"]); | ||||
|     foreach (op; mixin("AliasSeq!(%(%s, %))".format(genOps))) | ||||
|         runOp!op; | ||||
|     maskFPUExceptions; | ||||
| } | ||||
							
								
								
									
										3
									
								
								samples/D/function.d
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								samples/D/function.d
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| void foo() | ||||
| { | ||||
| } | ||||
							
								
								
									
										6
									
								
								samples/D/hello_world.d
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								samples/D/hello_world.d
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| import std.stdio; | ||||
|  | ||||
| void main() | ||||
| { | ||||
|     writeln("Hello World"); | ||||
| } | ||||
							
								
								
									
										7
									
								
								samples/D/template.d
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								samples/D/template.d
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | ||||
| template Fib(size_t N) | ||||
| { | ||||
|     static if (N < 2) | ||||
|         enum Fib = size_t(1); | ||||
|     else | ||||
|         enum Fib = Fib!(N - 2) + Fib!(N - 1); | ||||
| } | ||||
							
								
								
									
										3
									
								
								samples/D/template_function.d
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								samples/D/template_function.d
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| void bar(T)(T t) | ||||
| { | ||||
| } | ||||
							
								
								
									
										3
									
								
								samples/D/unittest1.d
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								samples/D/unittest1.d
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| unittest | ||||
| { | ||||
| } | ||||
							
								
								
									
										3
									
								
								samples/D/unittest2.d
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								samples/D/unittest2.d
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| unittest("optional name") | ||||
| { | ||||
| } | ||||
							
								
								
									
										9
									
								
								samples/Emacs Lisp/filenames/Cask
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								samples/Emacs Lisp/filenames/Cask
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,9 @@ | ||||
| (package "composer" "0.0.7" "Interface to PHP Composer") | ||||
| (source "melpa" "https://melpa.org/packages/") | ||||
|  | ||||
| (package-file "composer.el") | ||||
|  | ||||
| (depends-on "f") | ||||
| (depends-on "s") | ||||
| (depends-on "request") | ||||
| (depends-on "seq") | ||||
							
								
								
									
										7
									
								
								samples/Erlang/filenames/Emakefile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								samples/Erlang/filenames/Emakefile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | ||||
| {"src/*", [ | ||||
|    report,  | ||||
|    verbose,  | ||||
|    {i, "include"},  | ||||
|    {outdir, "ebin"}, | ||||
|    debug_info  | ||||
| ]}. | ||||
							
								
								
									
										161
									
								
								samples/GLSL/SyLens.shader
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										161
									
								
								samples/GLSL/SyLens.shader
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,161 @@ | ||||
| #version 120 | ||||
|  | ||||
| /* | ||||
|   Original Lens Distortion Algorithm from SSontech (Syntheyes) | ||||
|   http://www.ssontech.com/content/lensalg.htm | ||||
|    | ||||
|   r2 is radius squared. | ||||
|    | ||||
|   r2 = image_aspect*image_aspect*u*u + v*v | ||||
|   f = 1 + r2*(k + kcube*sqrt(r2)) | ||||
|   u' = f*u | ||||
|   v' = f*v | ||||
|  | ||||
| */ | ||||
|  | ||||
| // Controls | ||||
| uniform float kCoeff, kCube, uShift, vShift; | ||||
| uniform float chroma_red, chroma_green, chroma_blue; | ||||
| uniform bool apply_disto; | ||||
|  | ||||
| // Uniform inputs | ||||
| uniform sampler2D input1; | ||||
| uniform float adsk_input1_w, adsk_input1_h, adsk_input1_aspect, adsk_input1_frameratio; | ||||
| uniform float adsk_result_w, adsk_result_h; | ||||
|  | ||||
| float distortion_f(float r) { | ||||
|     float f = 1 + (r*r)*(kCoeff + kCube * r); | ||||
|     return f; | ||||
| } | ||||
|  | ||||
|  | ||||
| float inverse_f(float r) | ||||
| { | ||||
|      | ||||
|     // Build a lookup table on the radius, as a fixed-size table. | ||||
|     // We will use a vec3 since we will store the multipled number in the Z coordinate. | ||||
|     // So to recap: x will be the radius, y will be the f(x) distortion, and Z will be x * y; | ||||
|     vec3[48] lut; | ||||
|      | ||||
|     // Since out LUT is shader-global check if it's been computed alrite | ||||
|     // Flame has no overflow bbox so we can safely max out at the image edge, plus some cushion | ||||
|     float max_r = sqrt((adsk_input1_frameratio * adsk_input1_frameratio) + 1) + 0.1; | ||||
|     float incr = max_r / 48; | ||||
|     float lut_r = 0; | ||||
|     float f; | ||||
|     for(int i=0; i < 48; i++) { | ||||
|         f = distortion_f(lut_r); | ||||
|         lut[i] = vec3(lut_r, f, lut_r * f); | ||||
|         lut_r += incr; | ||||
|     } | ||||
|      | ||||
|     float t; | ||||
|     // Now find the nehgbouring elements | ||||
|     // only iterate to 46 since we will need | ||||
|     // 47 as i+1 | ||||
|     for(int i=0; i < 47; i++) { | ||||
|         if(lut[i].z < r && lut[i+1].z > r) { | ||||
|             // BAM! our value is between these two segments | ||||
|             // get the T interpolant and mix | ||||
|             t = (r - lut[i].z) / (lut[i+1].z - lut[i]).z; | ||||
|             return mix(lut[i].y, lut[i+1].y, t ); | ||||
|         } | ||||
|     } | ||||
| } | ||||
|  | ||||
| float aberrate(float f, float chroma) | ||||
| { | ||||
|    return f + (f * chroma); | ||||
| } | ||||
|  | ||||
| vec3 chromaticize_and_invert(float f) | ||||
| { | ||||
|    vec3 rgb_f = vec3(aberrate(f, chroma_red), aberrate(f, chroma_green), aberrate(f, chroma_blue)); | ||||
|    // We need to DIVIDE by F when we redistort, and x / y == x * (1 / y) | ||||
|    if(apply_disto) { | ||||
|       rgb_f = 1 / rgb_f; | ||||
|    } | ||||
|    return rgb_f; | ||||
| } | ||||
|  | ||||
| void main(void) | ||||
| { | ||||
|    vec2 px, uv; | ||||
|    float f = 1; | ||||
|    float r = 1; | ||||
|     | ||||
|    px = gl_FragCoord.xy; | ||||
|     | ||||
|    // Make sure we are still centered | ||||
|    px.x -= (adsk_result_w - adsk_input1_w) / 2; | ||||
|    px.y -= (adsk_result_h - adsk_input1_h) / 2; | ||||
|     | ||||
|    // Push the destination coordinates into the [0..1] range | ||||
|    uv.x = px.x / adsk_input1_w; | ||||
|    uv.y = px.y / adsk_input1_h; | ||||
|     | ||||
|         | ||||
|    // And to Syntheyes UV which are [1..-1] on both X and Y | ||||
|    uv.x = (uv.x *2 ) - 1; | ||||
|    uv.y = (uv.y *2 ) - 1; | ||||
|     | ||||
|    // Add UV shifts | ||||
|    uv.x += uShift; | ||||
|    uv.y += vShift; | ||||
|     | ||||
|    // Make the X value the aspect value, so that the X coordinates go to [-aspect..aspect] | ||||
|    uv.x = uv.x * adsk_input1_frameratio; | ||||
|     | ||||
|    // Compute the radius | ||||
|    r = sqrt(uv.x*uv.x + uv.y*uv.y); | ||||
|     | ||||
|    // If we are redistorting, account for the oversize plate in the input, assume that | ||||
|    // the input aspect is the same | ||||
|    if(apply_disto) { | ||||
|        r = r / (float(adsk_input1_w) / float(adsk_result_w)); | ||||
|    } | ||||
|     | ||||
|    // Apply or remove disto, per channel honoring chromatic aberration | ||||
|    if(apply_disto) { | ||||
|       f = inverse_f(r); | ||||
|    } else { | ||||
|       f = distortion_f(r); | ||||
|    } | ||||
|     | ||||
|    vec2[3] rgb_uvs = vec2[](uv, uv, uv); | ||||
|     | ||||
|    // Compute distortions per component | ||||
|    vec3 rgb_f = chromaticize_and_invert(f); | ||||
|     | ||||
|    // Apply the disto coefficients, per component | ||||
|    rgb_uvs[0] = rgb_uvs[0] * rgb_f.rr; | ||||
|    rgb_uvs[1] = rgb_uvs[1] * rgb_f.gg; | ||||
|    rgb_uvs[2] = rgb_uvs[2] * rgb_f.bb; | ||||
|     | ||||
|    // Convert all the UVs back to the texture space, per color component | ||||
|    for(int i=0; i < 3; i++) { | ||||
|        uv = rgb_uvs[i]; | ||||
|         | ||||
|        // Back from [-aspect..aspect] to [-1..1] | ||||
|        uv.x = uv.x / adsk_input1_frameratio; | ||||
|         | ||||
|        // Remove UV shifts | ||||
|        uv.x -= uShift; | ||||
|        uv.y -= vShift; | ||||
|         | ||||
|        // Back to OGL UV | ||||
|        uv.x = (uv.x + 1) / 2; | ||||
|        uv.y = (uv.y + 1) / 2; | ||||
|         | ||||
|        rgb_uvs[i] = uv; | ||||
|    } | ||||
|     | ||||
|    // Sample the input plate, per component | ||||
|    vec4 sampled; | ||||
|    sampled.r = texture2D(input1, rgb_uvs[0]).r; | ||||
|    sampled.g = texture2D(input1, rgb_uvs[1]).g; | ||||
|    sampled.b = texture2D(input1, rgb_uvs[2]).b; | ||||
|     | ||||
|    // and assign to the output | ||||
|    gl_FragColor.rgba = vec4(sampled.rgb, 1.0 ); | ||||
| } | ||||
							
								
								
									
										630
									
								
								samples/GLSL/islandScene.shader
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										630
									
								
								samples/GLSL/islandScene.shader
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,630 @@ | ||||
| //// High quality (Some browsers may freeze or crash) | ||||
| //#define HIGHQUALITY | ||||
|  | ||||
| //// Medium quality (Should be fine on all systems, works on Intel HD2000 on Win7 but quite slow) | ||||
| //#define MEDIUMQUALITY | ||||
|  | ||||
| //// Defaults | ||||
| //#define REFLECTIONS | ||||
| #define SHADOWS | ||||
| //#define GRASS | ||||
| //#define SMALL_WAVES | ||||
| #define RAGGED_LEAVES | ||||
| //#define DETAILED_NOISE | ||||
| //#define LIGHT_AA // 2 sample SSAA | ||||
| //#define HEAVY_AA // 2x2 RG SSAA | ||||
| //#define TONEMAP | ||||
|  | ||||
| //// Configurations | ||||
| #ifdef MEDIUMQUALITY | ||||
| 	#define SHADOWS | ||||
| 	#define SMALL_WAVES | ||||
| 	#define RAGGED_LEAVES | ||||
| 	#define TONEMAP | ||||
| #endif | ||||
|  | ||||
| #ifdef HIGHQUALITY | ||||
| 	#define REFLECTIONS | ||||
| 	#define SHADOWS | ||||
| 	//#define GRASS | ||||
| 	#define SMALL_WAVES | ||||
| 	#define RAGGED_LEAVES | ||||
| 	#define DETAILED_NOISE | ||||
| 	#define LIGHT_AA | ||||
| 	#define TONEMAP | ||||
| #endif | ||||
|  | ||||
| // Constants | ||||
| const float eps = 1e-5; | ||||
| const float PI = 3.14159265359; | ||||
|  | ||||
| const vec3 sunDir = vec3(0.79057,-0.47434, 0.0); | ||||
| const vec3 skyCol = vec3(0.3, 0.5, 0.8); | ||||
| const vec3 sandCol = vec3(0.9, 0.8, 0.5); | ||||
| const vec3 treeCol = vec3(0.8, 0.65, 0.3); | ||||
| const vec3 grassCol = vec3(0.4, 0.5, 0.18); | ||||
| const vec3 leavesCol = vec3(0.3, 0.6, 0.2); | ||||
| const vec3 leavesPos = vec3(-5.1,13.4, 0.0); | ||||
|  | ||||
| #ifdef TONEMAP | ||||
| const vec3 sunCol = vec3(1.8, 1.7, 1.6); | ||||
| #else | ||||
| const vec3 sunCol = vec3(0.9, 0.85, 0.8); | ||||
| #endif | ||||
|  | ||||
| const float exposure = 1.1; // Only used when tonemapping | ||||
|  | ||||
| // Description : Array and textureless GLSL 2D/3D/4D simplex | ||||
| // noise functions. | ||||
| // Author : Ian McEwan, Ashima Arts. | ||||
| // License : Copyright (C) 2011 Ashima Arts. All rights reserved. | ||||
| // Distributed under the MIT License. See LICENSE file. | ||||
| // https://github.com/ashima/webgl-noise | ||||
| vec3 mod289(vec3 x) { | ||||
|   return x - floor(x * (1.0 / 289.0)) * 289.0; | ||||
| } | ||||
|  | ||||
| vec4 mod289(vec4 x) { | ||||
|   return x - floor(x * (1.0 / 289.0)) * 289.0; | ||||
| } | ||||
|  | ||||
| vec4 permute(vec4 x) { | ||||
|      return mod289(((x*34.0)+1.0)*x); | ||||
| } | ||||
|  | ||||
| vec4 taylorInvSqrt(vec4 r) { | ||||
|   return 1.79284291400159 - 0.85373472095314 * r; | ||||
| } | ||||
|  | ||||
| float snoise(vec3 v) { | ||||
|   const vec2 C = vec2(1.0/6.0, 1.0/3.0) ; | ||||
|   const vec4 D = vec4(0.0, 0.5, 1.0, 2.0); | ||||
|  | ||||
| // First corner | ||||
|   vec3 i = floor(v + dot(v, C.yyy) ); | ||||
|   vec3 x0 = v - i + dot(i, C.xxx) ; | ||||
|  | ||||
| // Other corners | ||||
|   vec3 g = step(x0.yzx, x0.xyz); | ||||
|   vec3 l = 1.0 - g; | ||||
|   vec3 i1 = min( g.xyz, l.zxy ); | ||||
|   vec3 i2 = max( g.xyz, l.zxy ); | ||||
|  | ||||
|   // x0 = x0 - 0.0 + 0.0 * C.xxx; | ||||
|   // x1 = x0 - i1 + 1.0 * C.xxx; | ||||
|   // x2 = x0 - i2 + 2.0 * C.xxx; | ||||
|   // x3 = x0 - 1.0 + 3.0 * C.xxx; | ||||
|   vec3 x1 = x0 - i1 + C.xxx; | ||||
|   vec3 x2 = x0 - i2 + C.yyy; // 2.0*C.x = 1/3 = C.y | ||||
|   vec3 x3 = x0 - D.yyy; // -1.0+3.0*C.x = -0.5 = -D.y | ||||
|  | ||||
| // Permutations | ||||
|   i = mod289(i); | ||||
|   vec4 p = permute( permute( permute( | ||||
|              i.z + vec4(0.0, i1.z, i2.z, 1.0 )) | ||||
|            + i.y + vec4(0.0, i1.y, i2.y, 1.0 )) | ||||
|            + i.x + vec4(0.0, i1.x, i2.x, 1.0 )); | ||||
|  | ||||
| // Gradients: 7x7 points over a square, mapped onto an octahedron. | ||||
| // The ring size 17*17 = 289 is close to a multiple of 49 (49*6 = 294) | ||||
|   float n_ = 0.142857142857; // 1.0/7.0 | ||||
|   vec3 ns = n_ * D.wyz - D.xzx; | ||||
|  | ||||
|   vec4 j = p - 49.0 * floor(p * ns.z * ns.z); // mod(p,7*7) | ||||
|  | ||||
|   vec4 x_ = floor(j * ns.z); | ||||
|   vec4 y_ = floor(j - 7.0 * x_ ); // mod(j,N) | ||||
|  | ||||
|   vec4 x = x_ *ns.x + ns.yyyy; | ||||
|   vec4 y = y_ *ns.x + ns.yyyy; | ||||
|   vec4 h = 1.0 - abs(x) - abs(y); | ||||
|  | ||||
|   vec4 b0 = vec4( x.xy, y.xy ); | ||||
|   vec4 b1 = vec4( x.zw, y.zw ); | ||||
|  | ||||
|   //vec4 s0 = vec4(lessThan(b0,0.0))*2.0 - 1.0; | ||||
|   //vec4 s1 = vec4(lessThan(b1,0.0))*2.0 - 1.0; | ||||
|   vec4 s0 = floor(b0)*2.0 + 1.0; | ||||
|   vec4 s1 = floor(b1)*2.0 + 1.0; | ||||
|   vec4 sh = -step(h, vec4(0.0)); | ||||
|  | ||||
|   vec4 a0 = b0.xzyw + s0.xzyw*sh.xxyy ; | ||||
|   vec4 a1 = b1.xzyw + s1.xzyw*sh.zzww ; | ||||
|  | ||||
|   vec3 p0 = vec3(a0.xy,h.x); | ||||
|   vec3 p1 = vec3(a0.zw,h.y); | ||||
|   vec3 p2 = vec3(a1.xy,h.z); | ||||
|   vec3 p3 = vec3(a1.zw,h.w); | ||||
|  | ||||
| //Normalise gradients | ||||
|   vec4 norm = taylorInvSqrt(vec4(dot(p0,p0), dot(p1,p1), dot(p2, p2), dot(p3,p3))); | ||||
|   p0 *= norm.x; | ||||
|   p1 *= norm.y; | ||||
|   p2 *= norm.z; | ||||
|   p3 *= norm.w; | ||||
|  | ||||
| // Mix final noise value | ||||
|   vec4 m = max(0.6 - vec4(dot(x0,x0), dot(x1,x1), dot(x2,x2), dot(x3,x3)), 0.0); | ||||
|   m = m * m; | ||||
|   return 42.0 * dot( m*m, vec4( dot(p0,x0), dot(p1,x1), | ||||
|                                 dot(p2,x2), dot(p3,x3) ) ); | ||||
| } | ||||
|  | ||||
|  | ||||
|  | ||||
| // Main | ||||
| float fbm(vec3 p) | ||||
| { | ||||
| 	float final = snoise(p);  | ||||
| 	p *= 1.94; final += snoise(p) * 0.5; | ||||
| 	#ifdef DETAILED_NOISE | ||||
| 	p *= 3.75; final += snoise(p) * 0.25; | ||||
| 	return final / 1.75; | ||||
| 	#else | ||||
| 	return final / 1.5; | ||||
| 	#endif | ||||
| } | ||||
|  | ||||
| float waterHeight(vec3 p) | ||||
| { | ||||
| 	float d = length(p.xz); | ||||
| 	float h = sin(d * 1.5 + iGlobalTime * 3.0) * 12.0 / d; // Island waves | ||||
| 	#ifdef SMALL_WAVES | ||||
| 	h += fbm(p*0.5); // Other waves | ||||
| 	#endif | ||||
| 	return h; | ||||
| } | ||||
|  | ||||
| vec3 bump(vec3 pos, vec3 rayDir) | ||||
| { | ||||
| 	float s = 2.0; | ||||
| 	 | ||||
| 	// Fade out waves to reduce aliasing | ||||
| 	float dist = dot(pos, rayDir); | ||||
| 	s *= dist < 2.0 ? 1.0 : 1.4142 / sqrt(dist); | ||||
| 	 | ||||
| 	// Calculate normal from heightmap | ||||
| 	vec2 e = vec2(1e-2, 0.0); | ||||
| 	vec3 p = vec3(pos.x, iGlobalTime*0.5, pos.z)*0.7; | ||||
| 	float m = waterHeight(p)*s; | ||||
| 	return normalize(vec3( | ||||
| 		waterHeight(p+e.xyy)*s-m, | ||||
| 		1.0, | ||||
| 		waterHeight(p+e.yxy)*s-m | ||||
| 	)); | ||||
| } | ||||
|  | ||||
| // Ray intersections | ||||
| vec4 intersectSphere(vec3 rpos, vec3 rdir, vec3 pos, float rad) | ||||
| { | ||||
| 	vec3 op = pos - rpos; | ||||
| 	float b = dot(op, rdir);  | ||||
| 	float det = b*b - dot(op, op) + rad*rad;  | ||||
| 		 | ||||
| 	if (det > 0.0) | ||||
| 	{ | ||||
| 		det = sqrt(det); | ||||
| 		float t = b - det; | ||||
| 		if (t > eps) | ||||
| 			return vec4(-normalize(rpos+rdir*t-pos), t); | ||||
| 	} | ||||
| 	 | ||||
| 	return vec4(0.0); | ||||
| } | ||||
|  | ||||
| vec4 intersectCylinder(vec3 rpos, vec3 rdir, vec3 pos, float rad) | ||||
| { | ||||
| 	vec3 op = pos - rpos; | ||||
| 	vec2 rdir2 = normalize(rdir.yz); | ||||
| 	float b = dot(op.yz, rdir2); | ||||
| 	float det = b*b - dot(op.yz, op.yz) + rad*rad;  | ||||
| 	 | ||||
| 	if (det > 0.0) | ||||
| 	{ | ||||
| 		det = sqrt(det); | ||||
| 		float t = b - det; | ||||
| 		if (t > eps) | ||||
| 			return vec4(-normalize(rpos.yz+rdir2*t-pos.yz), 0.0, t); | ||||
| 		t = b + det; | ||||
| 		if (t > eps) | ||||
| 			return vec4(-normalize(rpos.yz+rdir2*t-pos.yz), 0.0, t); | ||||
| 	} | ||||
| 	 | ||||
| 	return vec4(0.0); | ||||
| } | ||||
|  | ||||
| vec4 intersectPlane(vec3 rayPos, vec3 rayDir, vec3 n, float d) | ||||
| { | ||||
| 	float t = -(dot(rayPos, n) + d) / dot(rayDir, n); | ||||
| 	return vec4(n * sign(dot(rayDir, n)), t); | ||||
| } | ||||
|  | ||||
| // Helper functions | ||||
| vec3 rotate(vec3 p, float theta) | ||||
| { | ||||
| 	float c = cos(theta), s = sin(theta); | ||||
| 	return vec3(p.x * c + p.z * s, p.y, | ||||
| 				p.z * c - p.x * s); | ||||
| } | ||||
|  | ||||
| float impulse(float k, float x) // by iq | ||||
| { | ||||
|     float h = k*x; | ||||
|     return h * exp(1.0 - h); | ||||
| } | ||||
|  | ||||
| // Raymarched parts of scene | ||||
| float grass(vec3 pos) | ||||
| { | ||||
| 	float h = length(pos - vec3(0.0, -7.0, 0.0)) - 8.0; | ||||
| 	 | ||||
| 	if (h > 2.0) return h; // Optimization (Avoid noise if too far away) | ||||
| 	 | ||||
| 	return h + snoise(pos * 3.0) * 0.1 + pos.y * 0.9; | ||||
| } | ||||
|  | ||||
| float tree(vec3 pos) | ||||
| { | ||||
| 	pos.y -= 0.5; | ||||
| 	float s = sin(pos.y*0.03); | ||||
| 	float c = cos(pos.y*0.03); | ||||
| 	mat2 m = mat2(c, -s, s, c); | ||||
| 	vec3 p = vec3(m*pos.xy, pos.z); | ||||
| 	 | ||||
| 	float width = 1.0 - pos.y * 0.02 - clamp(sin(pos.y * 8.0) * 0.1, 0.05, 0.1); | ||||
| 	 | ||||
| 	return max(length(p.xz) - width, pos.y - 12.5); | ||||
| } | ||||
|  | ||||
| vec2 scene(vec3 pos) | ||||
| { | ||||
| 	float vtree = tree(pos); | ||||
| 	#ifdef GRASS | ||||
| 	float vgrass = grass(pos); | ||||
| 	float v = min(vtree, vgrass); | ||||
| 	#else | ||||
| 	float v = vtree; | ||||
| 	#endif | ||||
| 	return vec2(v, v == vtree ? 2.0 : 1.0); | ||||
| } | ||||
|  | ||||
| vec3 normal(vec3 pos) | ||||
| { | ||||
| 	vec2 eps = vec2(1e-3, 0.0); | ||||
| 	float h = scene(pos).x; | ||||
| 	return normalize(vec3( | ||||
| 		scene(pos-eps.xyy).x-h, | ||||
| 		scene(pos-eps.yxy).x-h, | ||||
| 		scene(pos-eps.yyx).x-h | ||||
| 	)); | ||||
| } | ||||
|  | ||||
| float plantsShadow(vec3 rayPos, vec3 rayDir) | ||||
| { | ||||
| 	// Soft shadow taken from iq | ||||
| 	float k = 6.0; | ||||
| 	float t = 0.0; | ||||
| 	float s = 1.0;	 | ||||
| 	for (int i = 0; i < 30; i++) | ||||
| 	{ | ||||
| 		vec3 pos = rayPos+rayDir*t;	 | ||||
| 		vec2 res = scene(pos);		 | ||||
| 		if (res.x < 0.001) return 0.0; | ||||
| 		s = min(s, k*res.x/t);  | ||||
| 		t += max(res.x, 0.01); | ||||
| 	} | ||||
| 	 | ||||
| 	return s*s*(3.0 - 2.0*s); | ||||
| } | ||||
|  | ||||
| // Ray-traced parts of scene | ||||
| vec4 intersectWater(vec3 rayPos, vec3 rayDir) | ||||
| { | ||||
| 	float h = sin(20.5 + iGlobalTime * 2.0) * 0.03; | ||||
| 	float t = -(rayPos.y + 2.5 + h) / rayDir.y; | ||||
| 	return vec4(0.0, 1.0, 0.0, t); | ||||
| } | ||||
|  | ||||
| vec4 intersectSand(vec3 rayPos, vec3 rayDir) | ||||
| { | ||||
| 	return intersectSphere(rayPos, rayDir, vec3(0.0,-24.1,0.0), 24.1); | ||||
| } | ||||
|  | ||||
| vec4 intersectTreasure(vec3 rayPos, vec3 rayDir) | ||||
| { | ||||
| 	return vec4(0.0); | ||||
| } | ||||
|  | ||||
| vec4 intersectLeaf(vec3 rayPos, vec3 rayDir, float openAmount) | ||||
| {	 | ||||
| 	vec3 dir = normalize(vec3(0.0, 1.0, openAmount)); | ||||
| 	float offset = 0.0; | ||||
| 			 | ||||
| 	vec4 res = intersectPlane(rayPos, rayDir, dir, 0.0); | ||||
| 	vec3 pos = rayPos+rayDir*res.w; | ||||
| 	#ifdef RAGGED_LEAVES | ||||
| 	offset = snoise(pos*0.8) * 0.3; | ||||
| 	#endif | ||||
| 	if (pos.y > 0.0 || length(pos * vec3(0.9, 2.0, 1.0)) > 4.0 - offset) res.w = 0.0; | ||||
| 	 | ||||
| 	vec4 res2 = intersectPlane(rayPos, rayDir, vec3(dir.xy, -dir.z), 0.0); | ||||
| 	pos = rayPos+rayDir*res2.w; | ||||
| 	#ifdef RAGGED_LEAVES | ||||
| 	offset = snoise(pos*0.8) * 0.3; | ||||
| 	#endif | ||||
| 	if (pos.y > 0.0 || length(pos * vec3(0.9, 2.0, 1.0)) > 4.0 - offset) res2.w = 0.0; | ||||
| 	 | ||||
| 	if (res2.w > 0.0 && res2.w < res.w || res.w <= 0.0) | ||||
| 		res = res2; | ||||
| 		 | ||||
| 	return res; | ||||
| } | ||||
|  | ||||
| vec4 leaves(vec3 rayPos, vec3 rayDir) | ||||
| { | ||||
| 	float t = 1e20; | ||||
| 	vec3 n = vec3(0.0); | ||||
| 	 | ||||
| 	rayPos -= leavesPos; | ||||
| 	 | ||||
| 	float sway = impulse(15.0, fract(iGlobalTime / PI * 0.125)); | ||||
| 	float upDownSway = sway * -sin(iGlobalTime) * 0.06; | ||||
| 	float openAmount = sway * max(-cos(iGlobalTime) * 0.4, 0.0); | ||||
| 	 | ||||
| 	float angleOffset = -0.1;	 | ||||
| 	for (float k = 0.0; k < 6.2; k += 0.75) | ||||
| 	{ | ||||
| 		// Left-right | ||||
| 		float alpha = k + (k - PI) * sway * 0.015; | ||||
| 		vec3 p = rotate(rayPos, alpha); | ||||
| 		vec3 d = rotate(rayDir, alpha); | ||||
| 		 | ||||
| 		// Up-down | ||||
| 		angleOffset *= -1.0; | ||||
| 		float theta = -0.4 +  | ||||
| 			angleOffset +  | ||||
| 			cos(k) * 0.35 +  | ||||
| 			upDownSway +  | ||||
| 			sin(iGlobalTime+k*10.0) * 0.03 * (sway + 0.2); | ||||
| 		 | ||||
| 		p = rotate(p.xzy, theta).xzy; | ||||
| 		d = rotate(d.xzy, theta).xzy; | ||||
| 	 | ||||
| 		// Shift | ||||
| 		p -= vec3(5.4, 0.0, 0.0); | ||||
| 		 | ||||
| 		// Intersect individual leaf | ||||
| 		vec4 res = intersectLeaf(p, d, 1.0+openAmount); | ||||
| 		if (res.w > 0.0 && res.w < t) | ||||
| 		{ | ||||
| 			t = res.w; | ||||
| 			n = res.xyz; | ||||
| 		} | ||||
| 	} | ||||
| 	 | ||||
| 	return vec4(n, t); | ||||
| } | ||||
|  | ||||
| // Lighting | ||||
| float shadow(vec3 rayPos, vec3 rayDir) | ||||
| {	 | ||||
| 	float s = 1.0; | ||||
| 	 | ||||
| 	// Intersect sand | ||||
| 	//vec4 resSand = intersectSand(rayPos, rayDir); | ||||
| 	//if (resSand.w > 0.0) return 0.0; | ||||
| 	 | ||||
| 	// Intersect plants | ||||
| 	s = min(s, plantsShadow(rayPos, rayDir)); | ||||
| 	if (s < 0.0001) return 0.0; | ||||
| 	 | ||||
| 	// Intersect leaves | ||||
| 	vec4 resLeaves = leaves(rayPos, rayDir); | ||||
| 	if (resLeaves.w > 0.0 && resLeaves.w < 1e7) return 0.0; | ||||
| 	 | ||||
| 	return s; | ||||
| } | ||||
|  | ||||
| vec3 light(vec3 p, vec3 n) | ||||
| { | ||||
| 	float s = 1.0; | ||||
| 	 | ||||
| 	#ifdef SHADOWS | ||||
| 	s = shadow(p-sunDir*0.01, -sunDir); | ||||
| 	#endif | ||||
| 	 | ||||
| 	vec3 col = sunCol * min(max(dot(n, sunDir), 0.0), s); | ||||
| 	col += skyCol * (-n.y * 0.5 + 0.5) * 0.3; | ||||
| 	return col; | ||||
| } | ||||
|  | ||||
| vec3 lightLeaves(vec3 p, vec3 n) | ||||
| { | ||||
| 	float s = 1.0; | ||||
| 	 | ||||
| 	#ifdef SHADOWS | ||||
| 	s = shadow(p-sunDir*0.01, -sunDir); | ||||
| 	#endif | ||||
| 	 | ||||
| 	float ao = min(length(p - leavesPos) * 0.1, 1.0); | ||||
| 	 | ||||
| 	float ns = dot(n, sunDir); | ||||
| 	float d = sqrt(max(ns, 0.0)); | ||||
| 	vec3 col = sunCol * min(d, s); | ||||
| 	col += sunCol * max(-ns, 0.0) * vec3(0.3, 0.3, 0.1) * ao; | ||||
| 	col += skyCol * (-n.y * 0.5 + 0.5) * 0.3 * ao; | ||||
| 	return col; | ||||
| } | ||||
|  | ||||
| vec3 sky(vec3 n) | ||||
| { | ||||
| 	return skyCol * (1.0 - n.y * 0.8); | ||||
| } | ||||
|  | ||||
| // Ray-marching | ||||
| vec4 plants(vec3 rayPos, vec3 rayDir) | ||||
| { | ||||
| 	float t = 0.0; | ||||
| 	 | ||||
| 	for (int i = 0; i < 40; i++) | ||||
| 	{ | ||||
| 		vec3 pos = rayPos+rayDir*t;	 | ||||
| 		vec2 res = scene(pos); | ||||
| 		float h = res.x; | ||||
| 		 | ||||
| 		if (h < 0.001) | ||||
| 		{ | ||||
| 			vec3 col = res.y == 2.0 ? treeCol : grassCol; | ||||
| 			float uvFact = res.y == 2.0 ? 1.0 : 10.0; | ||||
| 			 | ||||
| 			vec3 n = normal(pos); | ||||
| 			vec2 uv = vec2(n.x, pos.y * 0.5) * 0.2 * uvFact; | ||||
| 			vec3 tex = texture2D(iChannel0, uv).rgb * 0.6 + 0.4; | ||||
| 			float ao = min(length(pos - leavesPos) * 0.1, 1.0); | ||||
| 			return vec4(col * light(pos, n) * ao * tex, t); | ||||
| 		} | ||||
| 		 | ||||
| 		t += h; | ||||
| 	} | ||||
| 	 | ||||
| 	return vec4(sky(rayDir), 1e8); | ||||
| } | ||||
|  | ||||
| // Final combination | ||||
| vec3 traceReflection(vec3 rayPos, vec3 rayDir) | ||||
| { | ||||
| 	vec3 col = vec3(0.0); | ||||
| 	float t = 1e20; | ||||
| 			 | ||||
| 	// Intersect plants | ||||
| 	vec4 resPlants = plants(rayPos, rayDir); | ||||
| 	if (resPlants.w > 0.0 && resPlants.w < t) | ||||
| 	{ | ||||
| 		t = resPlants.w; | ||||
| 		col = resPlants.xyz; | ||||
| 	} | ||||
| 	 | ||||
| 	// Intersect leaves | ||||
| 	vec4 resLeaves = leaves(rayPos, rayDir); | ||||
| 	if (resLeaves.w > 0.0 && resLeaves.w < t) | ||||
| 	{ | ||||
| 		vec3 pos = rayPos + rayDir * resLeaves.w; | ||||
| 		vec2 uv = (pos.xz - leavesPos.xz) * 0.3; | ||||
| 		float tex = texture2D(iChannel0, uv).r * 0.6 + 0.5; | ||||
| 		 | ||||
| 		t = resLeaves.w; | ||||
| 		col = leavesCol * lightLeaves(pos, resLeaves.xyz) * tex; | ||||
| 	} | ||||
| 		 | ||||
| 	if (t > 1e7) return sky(rayDir); | ||||
| 	 | ||||
| 	return col; | ||||
| } | ||||
|  | ||||
| vec3 trace(vec3 rayPos, vec3 rayDir) | ||||
| { | ||||
| 	vec3 col = vec3(0.0); | ||||
| 	float t = 1e20; | ||||
| 	 | ||||
| 	// Intersect sand | ||||
| 	vec4 resSand = intersectSand(rayPos, rayDir); | ||||
| 	if (resSand.w > 0.0) | ||||
| 	{ | ||||
| 		vec3 pos = rayPos + rayDir * resSand.w; | ||||
| 		t = resSand.w; | ||||
|  | ||||
| 		col = sandCol * light(pos, resSand.xyz); | ||||
| 	} | ||||
| 	 | ||||
| 	// Intersect treasure chest | ||||
| 	vec4 resTreasure = intersectTreasure(rayPos, rayDir); | ||||
| 	if (resTreasure.w > 0.0 && resTreasure.w < t) | ||||
| 	{ | ||||
| 		vec3 pos = rayPos + rayDir * resTreasure.w; | ||||
| 		t = resTreasure.w; | ||||
| 		col = leavesCol * light(pos, resTreasure.xyz); | ||||
| 	} | ||||
| 	 | ||||
| 	// Intersect leaves | ||||
| 	vec4 resLeaves = leaves(rayPos, rayDir); | ||||
| 	if (resLeaves.w > 0.0 && resLeaves.w < t) | ||||
| 	{ | ||||
| 		vec3 pos = rayPos + rayDir * resLeaves.w; | ||||
| 		vec2 uv = (pos.xz - leavesPos.xz) * 0.3; | ||||
| 		float tex = texture2D(iChannel0, uv).r * 0.6 + 0.5; | ||||
| 		 | ||||
| 		t = resLeaves.w; | ||||
| 		col = leavesCol * lightLeaves(pos, resLeaves.xyz) * tex; | ||||
| 	} | ||||
| 	 | ||||
| 	// Intersect plants | ||||
| 	vec4 resPlants = plants(rayPos, rayDir); | ||||
| 	if (resPlants.w > 0.0 && resPlants.w < t) | ||||
| 	{ | ||||
| 		t = resPlants.w; | ||||
| 		col = resPlants.xyz; | ||||
| 	} | ||||
| 		 | ||||
| 	// Intersect water	 | ||||
| 	vec4 resWater = intersectWater(rayPos, rayDir); | ||||
| 	if (resWater.w > 0.0 && resWater.w < t) | ||||
| 	{ | ||||
| 		vec3 pos = rayPos + rayDir * resWater.w; | ||||
| 		float dist = t - resWater.w; | ||||
| 		vec3 n = bump(pos, rayDir); | ||||
| 		 | ||||
| 		float ct = -min(dot(n,rayDir), 0.0); | ||||
| 		float fresnel = 0.9 - 0.9 * pow(1.0 - ct, 5.0); | ||||
| 		 | ||||
| 		vec3 trans = col * exp(-dist * vec3(1.0, 0.7, 0.4) * 3.0); | ||||
| 		vec3 reflDir = normalize(reflect(rayDir, n)); | ||||
| 		vec3 refl = sky(reflDir); | ||||
| 		 | ||||
| 		#ifdef REFLECTIONS | ||||
| 		if (dot(pos, rayDir) < -2.0) | ||||
| 			refl = traceReflection(pos, reflDir).rgb; | ||||
| 		#endif | ||||
| 				 | ||||
| 		t = resWater.t; | ||||
| 		col = mix(refl, trans, fresnel); | ||||
| 	} | ||||
| 	 | ||||
| 	if (t > 1e7) return sky(rayDir); | ||||
| 	 | ||||
| 	return col; | ||||
| } | ||||
|  | ||||
| // Ray-generation | ||||
| vec3 camera(vec2 px) | ||||
| { | ||||
| 	vec2 rd = (px / iResolution.yy - vec2(iResolution.x/iResolution.y*0.5-0.5, 0.0)) * 2.0 - 1.0; | ||||
| 	float t = sin(iGlobalTime * 0.1) * 0.2; | ||||
| 	vec3 rayDir = normalize(vec3(rd.x, rd.y, 1.0)); | ||||
| 	vec3 rayPos = vec3(0.0, 3.0, -18.0); | ||||
| 	return trace(rayPos, rayDir); | ||||
| } | ||||
|  | ||||
| void main(void) | ||||
| { | ||||
| 	#ifdef HEAVY_AA | ||||
| 		vec3 col = camera(gl_FragCoord.xy+vec2(0.0,0.5))*0.25; | ||||
| 		col += camera(gl_FragCoord.xy+vec2(0.25,0.0))*0.25; | ||||
| 		col += camera(gl_FragCoord.xy+vec2(0.5,0.75))*0.25; | ||||
| 		col += camera(gl_FragCoord.xy+vec2(0.75,0.25))*0.25; | ||||
| 	#else | ||||
| 		vec3 col = camera(gl_FragCoord.xy); | ||||
| 		#ifdef LIGHT_AA | ||||
| 			col = col * 0.5 + camera(gl_FragCoord.xy+vec2(0.5,0.5))*0.5; | ||||
| 		#endif | ||||
| 	#endif | ||||
| 	 | ||||
| 	#ifdef TONEMAP | ||||
| 	// Optimized Haarm-Peter Duiker’s curve | ||||
| 	vec3 x = max(vec3(0.0),col*exposure-0.004); | ||||
| 	col = (x*(6.2*x+.5))/(x*(6.2*x+1.7)+0.06); | ||||
| 	#else | ||||
| 	col = pow(col, vec3(0.4545)); | ||||
| 	#endif | ||||
| 	 | ||||
| 	gl_FragColor = vec4(col, 1.0); | ||||
| } | ||||
							
								
								
									
										12
									
								
								samples/Genie/Class.gs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								samples/Genie/Class.gs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | ||||
| init | ||||
| 	new Demo( "Demonstration class" ).run() | ||||
|  | ||||
| class Demo | ||||
| 	_message:string = "" | ||||
|  | ||||
| 	construct ( message:string = "Optional argument - no message passed in constructor" ) | ||||
| 		_message = message | ||||
|  | ||||
| 	def run() | ||||
| 		print( _message ) | ||||
| 		 | ||||
							
								
								
									
										2
									
								
								samples/Genie/Hello.gs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								samples/Genie/Hello.gs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,2 @@ | ||||
| init | ||||
| 	print( "Hello, World!" ) | ||||
							
								
								
									
										135
									
								
								samples/HCL/main.tf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										135
									
								
								samples/HCL/main.tf
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,135 @@ | ||||
| resource "aws_security_group" "elb_sec_group" { | ||||
|   description = "Allow traffic from the internet to ELB port 80" | ||||
|   vpc_id = "${var.vpc_id}" | ||||
|  | ||||
|   ingress { | ||||
|       from_port = 80 | ||||
|       to_port = 80 | ||||
|       protocol = "tcp" | ||||
|       cidr_blocks = ["${split(",", var.allowed_cidr_blocks)}"] | ||||
|   } | ||||
|  | ||||
|   egress { | ||||
|       from_port = 0 | ||||
|       to_port = 0 | ||||
|       protocol = "-1" | ||||
|       cidr_blocks = ["0.0.0.0/0"] | ||||
|   } | ||||
| } | ||||
|  | ||||
| resource "aws_security_group" "dokku_allow_ssh_from_internal" { | ||||
|   description = "Allow git access over ssh from the private subnet" | ||||
|   vpc_id = "${var.vpc_id}" | ||||
|  | ||||
|   ingress { | ||||
|       from_port = 22 | ||||
|       to_port = 22 | ||||
|       protocol = "tcp" | ||||
|       cidr_blocks = ["${var.private_subnet_cidr}"] | ||||
|   } | ||||
|  | ||||
|   egress { | ||||
|       from_port = 0 | ||||
|       to_port = 0 | ||||
|       protocol = "-1" | ||||
|       cidr_blocks = ["0.0.0.0/0"] | ||||
|   } | ||||
| } | ||||
|  | ||||
| resource "aws_security_group" "allow_from_elb_to_instance" { | ||||
|   description = "Allow traffic from the ELB to the private instance" | ||||
|   vpc_id = "${var.vpc_id}" | ||||
|  | ||||
|   ingress { | ||||
|       security_groups = ["${aws_security_group.elb_sec_group.id}"] | ||||
|       from_port = 80 | ||||
|       to_port = 80 | ||||
|       protocol = "tcp" | ||||
|   } | ||||
|  | ||||
|   egress { | ||||
|       from_port = 0 | ||||
|       to_port = 0 | ||||
|       protocol = "-1" | ||||
|       cidr_blocks = ["0.0.0.0/0"] | ||||
|   } | ||||
| } | ||||
|  | ||||
| resource "aws_instance" "dokku" { | ||||
|   ami = "ami-47a23a30" | ||||
|   instance_type = "${var.instance_type}" | ||||
|   associate_public_ip_address = false | ||||
|   key_name = "${var.key_name}" | ||||
|   subnet_id = "${var.private_subnet_id}" | ||||
|   vpc_security_group_ids = [ | ||||
|     "${var.bastion_sec_group_id}", | ||||
|     "${aws_security_group.allow_from_elb_to_instance.id}", | ||||
|     "${aws_security_group.dokku_allow_ssh_from_internal.id}" | ||||
|   ] | ||||
|   tags { | ||||
|     Name = "${var.name}" | ||||
|   } | ||||
|   connection { | ||||
|     user = "ubuntu" | ||||
|     private_key = "${var.private_key}" | ||||
|     bastion_host = "${var.bastion_host}" | ||||
|     bastion_port = "${var.bastion_port}" | ||||
|     bastion_user = "${var.bastion_user}" | ||||
|     bastion_private_key = "${var.bastion_private_key}" | ||||
|   } | ||||
|   provisioner "file" { | ||||
|     source = "${path.module}/../scripts/install-dokku.sh" | ||||
|     destination = "/home/ubuntu/install-dokku.sh" | ||||
|   } | ||||
|   provisioner "remote-exec" { | ||||
|     inline = [ | ||||
|       "chmod +x /home/ubuntu/install-dokku.sh", | ||||
|       "HOSTNAME=${var.hostname} /home/ubuntu/install-dokku.sh" | ||||
|     ] | ||||
|   } | ||||
| } | ||||
|  | ||||
| resource "aws_elb" "elb_dokku" { | ||||
|   name = "elb-dokku-${var.name}" | ||||
|   subnets = ["${var.public_subnet_id}"] | ||||
|   security_groups = ["${aws_security_group.elb_sec_group.id}"] | ||||
|  | ||||
|   listener { | ||||
|     instance_port = 80 | ||||
|     instance_protocol = "http" | ||||
|     lb_port = 80 | ||||
|     lb_protocol = "http" | ||||
|   } | ||||
|  | ||||
|   health_check { | ||||
|     healthy_threshold = 2 | ||||
|     unhealthy_threshold = 2 | ||||
|     timeout = 3 | ||||
|     target = "HTTP:80/" | ||||
|     interval = 30 | ||||
|   } | ||||
|  | ||||
|   instances = ["${aws_instance.dokku.id}"] | ||||
|   cross_zone_load_balancing = false | ||||
|   idle_timeout = 400 | ||||
|  | ||||
|   tags { | ||||
|     Name = "elb-dokku-${var.name}" | ||||
|   } | ||||
| } | ||||
|  | ||||
| resource "aws_route53_record" "dokku-deploy" { | ||||
|    zone_id = "${var.zone_id}" | ||||
|    name = "deploy.${var.hostname}" | ||||
|    type = "A" | ||||
|    ttl = "300" | ||||
|    records = ["${aws_instance.dokku.private_ip}"] | ||||
| } | ||||
|  | ||||
| resource "aws_route53_record" "dokku-wildcard" { | ||||
|    zone_id = "${var.zone_id}" | ||||
|    name = "*.${var.hostname}" | ||||
|    type = "CNAME" | ||||
|    ttl = "300" | ||||
|    records = ["${aws_elb.elb_dokku.dns_name}"] | ||||
| } | ||||
							
								
								
									
										89
									
								
								samples/HLSL/bloom.cginc
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										89
									
								
								samples/HLSL/bloom.cginc
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,89 @@ | ||||
| // From https://github.com/Unity-Technologies/PostProcessing/blob/master/PostProcessing/Resources/Shaders/Bloom.cginc | ||||
| // Licensed under the MIT license | ||||
|  | ||||
| #ifndef __BLOOM__ | ||||
| #define __BLOOM__ | ||||
|  | ||||
| #include "Common.cginc" | ||||
|  | ||||
| // Brightness function | ||||
| half Brightness(half3 c) | ||||
| { | ||||
|     return Max3(c); | ||||
| } | ||||
|  | ||||
| // 3-tap median filter | ||||
| half3 Median(half3 a, half3 b, half3 c) | ||||
| { | ||||
|     return a + b + c - min(min(a, b), c) - max(max(a, b), c); | ||||
| } | ||||
|  | ||||
| // Downsample with a 4x4 box filter | ||||
| half3 DownsampleFilter(sampler2D tex, float2 uv, float2 texelSize) | ||||
| { | ||||
|     float4 d = texelSize.xyxy * float4(-1.0, -1.0, 1.0, 1.0); | ||||
|  | ||||
|     half3 s; | ||||
|     s = DecodeHDR(tex2D(tex, uv + d.xy)); | ||||
|     s += DecodeHDR(tex2D(tex, uv + d.zy)); | ||||
|     s += DecodeHDR(tex2D(tex, uv + d.xw)); | ||||
|     s += DecodeHDR(tex2D(tex, uv + d.zw)); | ||||
|  | ||||
|     return s * (1.0 / 4.0); | ||||
| } | ||||
|  | ||||
| // Downsample with a 4x4 box filter + anti-flicker filter | ||||
| half3 DownsampleAntiFlickerFilter(sampler2D tex, float2 uv, float2 texelSize) | ||||
| { | ||||
|     float4 d = texelSize.xyxy * float4(-1.0, -1.0, 1.0, 1.0); | ||||
|  | ||||
|     half3 s1 = DecodeHDR(tex2D(tex, uv + d.xy)); | ||||
|     half3 s2 = DecodeHDR(tex2D(tex, uv + d.zy)); | ||||
|     half3 s3 = DecodeHDR(tex2D(tex, uv + d.xw)); | ||||
|     half3 s4 = DecodeHDR(tex2D(tex, uv + d.zw)); | ||||
|  | ||||
|     // Karis's luma weighted average (using brightness instead of luma) | ||||
|     half s1w = 1.0 / (Brightness(s1) + 1.0); | ||||
|     half s2w = 1.0 / (Brightness(s2) + 1.0); | ||||
|     half s3w = 1.0 / (Brightness(s3) + 1.0); | ||||
|     half s4w = 1.0 / (Brightness(s4) + 1.0); | ||||
|     half one_div_wsum = 1.0 / (s1w + s2w + s3w + s4w); | ||||
|  | ||||
|     return (s1 * s1w + s2 * s2w + s3 * s3w + s4 * s4w) * one_div_wsum; | ||||
| } | ||||
|  | ||||
| half3 UpsampleFilter(sampler2D tex, float2 uv, float2 texelSize, float sampleScale) | ||||
| { | ||||
| #if MOBILE_OR_CONSOLE | ||||
|     // 4-tap bilinear upsampler | ||||
|     float4 d = texelSize.xyxy * float4(-1.0, -1.0, 1.0, 1.0) * (sampleScale * 0.5); | ||||
|  | ||||
|     half3 s; | ||||
|     s =  DecodeHDR(tex2D(tex, uv + d.xy)); | ||||
|     s += DecodeHDR(tex2D(tex, uv + d.zy)); | ||||
|     s += DecodeHDR(tex2D(tex, uv + d.xw)); | ||||
|     s += DecodeHDR(tex2D(tex, uv + d.zw)); | ||||
|  | ||||
|     return s * (1.0 / 4.0); | ||||
| #else | ||||
|     // 9-tap bilinear upsampler (tent filter) | ||||
|     float4 d = texelSize.xyxy * float4(1.0, 1.0, -1.0, 0.0) * sampleScale; | ||||
|  | ||||
|     half3 s; | ||||
|     s =  DecodeHDR(tex2D(tex, uv - d.xy)); | ||||
|     s += DecodeHDR(tex2D(tex, uv - d.wy)) * 2.0; | ||||
|     s += DecodeHDR(tex2D(tex, uv - d.zy)); | ||||
|  | ||||
|     s += DecodeHDR(tex2D(tex, uv + d.zw)) * 2.0; | ||||
|     s += DecodeHDR(tex2D(tex, uv))        * 4.0; | ||||
|     s += DecodeHDR(tex2D(tex, uv + d.xw)) * 2.0; | ||||
|  | ||||
|     s += DecodeHDR(tex2D(tex, uv + d.zy)); | ||||
|     s += DecodeHDR(tex2D(tex, uv + d.wy)) * 2.0; | ||||
|     s += DecodeHDR(tex2D(tex, uv + d.xy)); | ||||
|  | ||||
|     return s * (1.0 / 16.0); | ||||
| #endif | ||||
| } | ||||
|  | ||||
| #endif // __BLOOM__ | ||||
							
								
								
									
										923
									
								
								samples/JavaScript/ccalc-lex.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										923
									
								
								samples/JavaScript/ccalc-lex.js
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,923 @@ | ||||
| /* generated by jison-lex 0.3.4-159 */ | ||||
| var ccalcLex = (function () { | ||||
| // See also: | ||||
| // http://stackoverflow.com/questions/1382107/whats-a-good-way-to-extend-error-in-javascript/#35881508 | ||||
| // but we keep the prototype.constructor and prototype.name assignment lines too for compatibility | ||||
| // with userland code which might access the derived class in a 'classic' way. | ||||
| function JisonLexerError(msg, hash) { | ||||
|     Object.defineProperty(this, 'name', { | ||||
|         enumerable: false, | ||||
|         writable: false, | ||||
|         value: 'JisonLexerError' | ||||
|     }); | ||||
|  | ||||
|     if (msg == null) msg = '???'; | ||||
|  | ||||
|     Object.defineProperty(this, 'message', { | ||||
|         enumerable: false, | ||||
|         writable: true, | ||||
|         value: msg | ||||
|     }); | ||||
|  | ||||
|     this.hash = hash; | ||||
|  | ||||
|     var stacktrace; | ||||
|     if (hash && hash.exception instanceof Error) { | ||||
|         var ex2 = hash.exception; | ||||
|         this.message = ex2.message || msg; | ||||
|         stacktrace = ex2.stack; | ||||
|     } | ||||
|     if (!stacktrace) { | ||||
|         if (Error.hasOwnProperty('captureStackTrace')) { // V8 | ||||
|             Error.captureStackTrace(this, this.constructor); | ||||
|         } else { | ||||
|             stacktrace = (new Error(msg)).stack; | ||||
|         } | ||||
|     } | ||||
|     if (stacktrace) { | ||||
|         Object.defineProperty(this, 'stack', { | ||||
|             enumerable: false, | ||||
|             writable: false, | ||||
|             value: stacktrace | ||||
|         }); | ||||
|     } | ||||
| } | ||||
|  | ||||
| if (typeof Object.setPrototypeOf === 'function') { | ||||
|     Object.setPrototypeOf(JisonLexerError.prototype, Error.prototype); | ||||
| } else { | ||||
|     JisonLexerError.prototype = Object.create(Error.prototype); | ||||
| } | ||||
| JisonLexerError.prototype.constructor = JisonLexerError; | ||||
| JisonLexerError.prototype.name = 'JisonLexerError'; | ||||
|  | ||||
|  | ||||
| var lexer = { | ||||
|     EOF: 1, | ||||
|     ERROR: 2, | ||||
|  | ||||
|     // JisonLexerError: JisonLexerError,        // <-- injected by the code generator | ||||
|  | ||||
|     // options: {},                             // <-- injected by the code generator | ||||
|  | ||||
|     // yy: ...,                                 // <-- injected by setInput() | ||||
|  | ||||
|     __currentRuleSet__: null,                   // <-- internal rule set cache for the current lexer state | ||||
|  | ||||
|     __error_infos: [],                          // INTERNAL USE ONLY: the set of lexErrorInfo objects created since the last cleanup | ||||
|  | ||||
|     __decompressed: false,                      // INTERNAL USE ONLY: mark whether the lexer instance has been 'unfolded' completely and is now ready for use | ||||
|  | ||||
|     done: false,                                // INTERNAL USE ONLY | ||||
|     _backtrack: false,                          // INTERNAL USE ONLY | ||||
|     _input: '',                                 // INTERNAL USE ONLY | ||||
|     _more: false,                               // INTERNAL USE ONLY | ||||
|     _signaled_error_token: false,               // INTERNAL USE ONLY | ||||
|  | ||||
|     conditionStack: [],                         // INTERNAL USE ONLY; managed via `pushState()`, `popState()`, `topState()` and `stateStackSize()` | ||||
|  | ||||
|     match: '',                                  // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: tracks input which has been matched so far for the lexer token under construction. `match` is identical to `yytext` except that this one still contains the matched input string after `lexer.performAction()` has been invoked, where userland code MAY have changed/replaced the `yytext` value entirely! | ||||
|     matched: '',                                // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: tracks entire input which has been matched so far | ||||
|     matches: false,                             // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: tracks RE match result for last (successful) match attempt | ||||
|     yytext: '',                                 // ADVANCED USE ONLY: tracks input which has been matched so far for the lexer token under construction; this value is transferred to the parser as the 'token value' when the parser consumes the lexer token produced through a call to the `lex()` API. | ||||
|     offset: 0,                                  // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: tracks the 'cursor position' in the input string, i.e. the number of characters matched so far | ||||
|     yyleng: 0,                                  // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: length of matched input for the token under construction (`yytext`) | ||||
|     yylineno: 0,                                // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: 'line number' at which the token under construction is located | ||||
|     yylloc: null,                               // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: tracks location info (lines + columns) for the token under construction | ||||
|  | ||||
|     // INTERNAL USE: construct a suitable error info hash object instance for `parseError`. | ||||
|     constructLexErrorInfo: function lexer_constructLexErrorInfo(msg, recoverable) { | ||||
|         var pei = { | ||||
|             errStr: msg, | ||||
|             recoverable: !!recoverable, | ||||
|             text: this.match,           // This one MAY be empty; userland code should use the `upcomingInput` API to obtain more text which follows the 'lexer cursor position'... | ||||
|             token: null, | ||||
|             line: this.yylineno, | ||||
|             loc: this.yylloc, | ||||
|             yy: this.yy, | ||||
|             lexer: this, | ||||
|  | ||||
|             // and make sure the error info doesn't stay due to potential | ||||
|             // ref cycle via userland code manipulations. | ||||
|             // These would otherwise all be memory leak opportunities! | ||||
|             // | ||||
|             // Note that only array and object references are nuked as those | ||||
|             // constitute the set of elements which can produce a cyclic ref. | ||||
|             // The rest of the members is kept intact as they are harmless. | ||||
|             destroy: function destructLexErrorInfo() { | ||||
|                 // remove cyclic references added to error info: | ||||
|                 // info.yy = null; | ||||
|                 // info.lexer = null; | ||||
|                 // ... | ||||
|                 var rec = !!this.recoverable; | ||||
|                 for (var key in this) { | ||||
|                     if (this.hasOwnProperty(key) && typeof key === 'object') { | ||||
|                         this[key] = undefined; | ||||
|                     } | ||||
|                 } | ||||
|                 this.recoverable = rec; | ||||
|             } | ||||
|         }; | ||||
|         // track this instance so we can `destroy()` it once we deem it superfluous and ready for garbage collection! | ||||
|         this.__error_infos.push(pei); | ||||
|         return pei; | ||||
|     }, | ||||
|  | ||||
|     parseError: function lexer_parseError(str, hash) { | ||||
|         if (this.yy.parser && typeof this.yy.parser.parseError === 'function') { | ||||
|             return this.yy.parser.parseError(str, hash) || this.ERROR; | ||||
|         } else if (typeof this.yy.parseError === 'function') { | ||||
|             return this.yy.parseError.call(this, str, hash) || this.ERROR; | ||||
|         } else { | ||||
|             throw new this.JisonLexerError(str); | ||||
|         } | ||||
|     }, | ||||
|  | ||||
|     // final cleanup function for when we have completed lexing the input;  | ||||
|     // make it an API so that external code can use this one once userland | ||||
|     // code has decided it's time to destroy any lingering lexer error | ||||
|     // hash object instances and the like: this function helps to clean | ||||
|     // up these constructs, which *may* carry cyclic references which would | ||||
|     // otherwise prevent the instances from being properly and timely | ||||
|     // garbage-collected, i.e. this function helps prevent memory leaks! | ||||
|     cleanupAfterLex: function lexer_cleanupAfterLex(do_not_nuke_errorinfos) { | ||||
|         var rv; | ||||
|  | ||||
|         // prevent lingering circular references from causing memory leaks: | ||||
|         this.setInput('', {}); | ||||
|  | ||||
|         // nuke the error hash info instances created during this run. | ||||
|         // Userland code must COPY any data/references | ||||
|         // in the error hash instance(s) it is more permanently interested in. | ||||
|         if (!do_not_nuke_errorinfos) { | ||||
|             for (var i = this.__error_infos.length - 1; i >= 0; i--) { | ||||
|                 var el = this.__error_infos[i]; | ||||
|                 if (el && typeof el.destroy === 'function') { | ||||
|                     el.destroy(); | ||||
|                 } | ||||
|             } | ||||
|             this.__error_infos.length = 0; | ||||
|         } | ||||
|  | ||||
|         return this; | ||||
|     }, | ||||
|  | ||||
|     // clear the lexer token context; intended for internal use only | ||||
|     clear: function lexer_clear() { | ||||
|         this.yytext = ''; | ||||
|         this.yyleng = 0; | ||||
|         this.match = ''; | ||||
|         this.matches = false; | ||||
|         this._more = false; | ||||
|         this._backtrack = false; | ||||
|     }, | ||||
|  | ||||
|     // resets the lexer, sets new input | ||||
|     setInput: function lexer_setInput(input, yy) { | ||||
|         this.yy = yy || this.yy || {}; | ||||
|  | ||||
|         // also check if we've fully initialized the lexer instance, | ||||
|         // including expansion work to be done to go from a loaded | ||||
|         // lexer to a usable lexer: | ||||
|         if (!this.__decompressed) { | ||||
|           // step 1: decompress the regex list: | ||||
|           var rules = this.rules; | ||||
|           for (var i = 0, len = rules.length; i < len; i++) { | ||||
|             var rule_re = rules[i]; | ||||
|  | ||||
|             // compression: is the RE an xref to another RE slot in the rules[] table? | ||||
|             if (typeof rule_re === 'number') { | ||||
|               rules[i] = rules[rule_re]; | ||||
|             } | ||||
|           } | ||||
|  | ||||
|           // step 2: unfold the conditions[] set to make these ready for use: | ||||
|           var conditions = this.conditions; | ||||
|           for (var k in conditions) { | ||||
|             var spec = conditions[k]; | ||||
|  | ||||
|             var rule_ids = spec.rules; | ||||
|  | ||||
|             var len = rule_ids.length; | ||||
|             var rule_regexes = new Array(len + 1);            // slot 0 is unused; we use a 1-based index approach here to keep the hottest code in `lexer_next()` fast and simple! | ||||
|             var rule_new_ids = new Array(len + 1); | ||||
|  | ||||
|             if (this.rules_prefix1) { | ||||
|                 var rule_prefixes = new Array(65536); | ||||
|                 var first_catch_all_index = 0; | ||||
|  | ||||
|                 for (var i = 0; i < len; i++) { | ||||
|                   var idx = rule_ids[i]; | ||||
|                   var rule_re = rules[idx]; | ||||
|                   rule_regexes[i + 1] = rule_re; | ||||
|                   rule_new_ids[i + 1] = idx; | ||||
|  | ||||
|                   var prefix = this.rules_prefix1[idx]; | ||||
|                   // compression: is the PREFIX-STRING an xref to another PREFIX-STRING slot in the rules_prefix1[] table? | ||||
|                   if (typeof prefix === 'number') { | ||||
|                     prefix = this.rules_prefix1[prefix]; | ||||
|                   } | ||||
|                   // init the prefix lookup table: first come, first serve... | ||||
|                   if (!prefix) { | ||||
|                     if (!first_catch_all_index) { | ||||
|                       first_catch_all_index = i + 1; | ||||
|                     } | ||||
|                   } else { | ||||
|                     for (var j = 0, pfxlen = prefix.length; j < pfxlen; j++) { | ||||
|                       var pfxch = prefix.charCodeAt(j); | ||||
|                       // first come, first serve: | ||||
|                       if (!rule_prefixes[pfxch]) { | ||||
|                         rule_prefixes[pfxch] = i + 1; | ||||
|                       }   | ||||
|                     } | ||||
|                   } | ||||
|                 } | ||||
|  | ||||
|                 // if no catch-all prefix has been encountered yet, it means all | ||||
|                 // rules have limited prefix sets and it MAY be that particular | ||||
|                 // input characters won't be recognized by any rule in this  | ||||
|                 // condition state. | ||||
|                 //  | ||||
|                 // To speed up their discovery at run-time while keeping the | ||||
|                 // remainder of the lexer kernel code very simple (and fast), | ||||
|                 // we point these to an 'illegal' rule set index *beyond* | ||||
|                 // the end of the rule set. | ||||
|                 if (!first_catch_all_index) { | ||||
|                   first_catch_all_index = len + 1; | ||||
|                 } | ||||
|  | ||||
|                 for (var i = 0; i < 65536; i++) { | ||||
|                   if (!rule_prefixes[i]) { | ||||
|                     rule_prefixes[i] = first_catch_all_index;  | ||||
|                   } | ||||
|                 } | ||||
|  | ||||
|                 spec.__dispatch_lut = rule_prefixes; | ||||
|             } else { | ||||
|                 for (var i = 0; i < len; i++) { | ||||
|                   var idx = rule_ids[i]; | ||||
|                   var rule_re = rules[idx]; | ||||
|                   rule_regexes[i + 1] = rule_re; | ||||
|                   rule_new_ids[i + 1] = idx; | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             spec.rules = rule_new_ids; | ||||
|             spec.__rule_regexes = rule_regexes; | ||||
|             spec.__rule_count = len; | ||||
|           } | ||||
|  | ||||
|           this.__decompressed = true; | ||||
|         } | ||||
|  | ||||
|         this._input = input || ''; | ||||
|         this.clear(); | ||||
|         this._signaled_error_token = false; | ||||
|         this.done = false; | ||||
|         this.yylineno = 0; | ||||
|         this.matched = ''; | ||||
|         this.conditionStack = ['INITIAL']; | ||||
|         this.__currentRuleSet__ = null; | ||||
|         this.yylloc = { | ||||
|             first_line: 1, | ||||
|             first_column: 0, | ||||
|             last_line: 1, | ||||
|             last_column: 0 | ||||
|         }; | ||||
|         if (this.options.ranges) { | ||||
|             this.yylloc.range = [0, 0]; | ||||
|         } | ||||
|         this.offset = 0; | ||||
|         return this; | ||||
|     }, | ||||
|  | ||||
|     // consumes and returns one char from the input | ||||
|     input: function lexer_input() { | ||||
|         if (!this._input) { | ||||
|             this.done = true; | ||||
|             return null; | ||||
|         } | ||||
|         var ch = this._input[0]; | ||||
|         this.yytext += ch; | ||||
|         this.yyleng++; | ||||
|         this.offset++; | ||||
|         this.match += ch; | ||||
|         this.matched += ch; | ||||
|         // Count the linenumber up when we hit the LF (or a stand-alone CR). | ||||
|         // On CRLF, the linenumber is incremented when you fetch the CR or the CRLF combo | ||||
|         // and we advance immediately past the LF as well, returning both together as if | ||||
|         // it was all a single 'character' only. | ||||
|         var slice_len = 1; | ||||
|         var lines = false; | ||||
|         if (ch === '\n') { | ||||
|             lines = true; | ||||
|         } else if (ch === '\r') { | ||||
|             lines = true; | ||||
|             var ch2 = this._input[1]; | ||||
|             if (ch2 === '\n') { | ||||
|                 slice_len++; | ||||
|                 ch += ch2; | ||||
|                 this.yytext += ch2; | ||||
|                 this.yyleng++; | ||||
|                 this.offset++; | ||||
|                 this.match += ch2; | ||||
|                 this.matched += ch2; | ||||
|                 if (this.options.ranges) { | ||||
|                     this.yylloc.range[1]++; | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|         if (lines) { | ||||
|             this.yylineno++; | ||||
|             this.yylloc.last_line++; | ||||
|         } else { | ||||
|             this.yylloc.last_column++; | ||||
|         } | ||||
|         if (this.options.ranges) { | ||||
|             this.yylloc.range[1]++; | ||||
|         } | ||||
|  | ||||
|         this._input = this._input.slice(slice_len); | ||||
|         return ch; | ||||
|     }, | ||||
|  | ||||
|     // unshifts one char (or a string) into the input | ||||
|     unput: function lexer_unput(ch) { | ||||
|         var len = ch.length; | ||||
|         var lines = ch.split(/(?:\r\n?|\n)/g); | ||||
|  | ||||
|         this._input = ch + this._input; | ||||
|         this.yytext = this.yytext.substr(0, this.yytext.length - len); | ||||
|         //this.yyleng -= len; | ||||
|         this.offset -= len; | ||||
|         var oldLines = this.match.split(/(?:\r\n?|\n)/g); | ||||
|         this.match = this.match.substr(0, this.match.length - len); | ||||
|         this.matched = this.matched.substr(0, this.matched.length - len); | ||||
|  | ||||
|         if (lines.length - 1) { | ||||
|             this.yylineno -= lines.length - 1; | ||||
|         } | ||||
|  | ||||
|         this.yylloc.last_line = this.yylineno + 1; | ||||
|         this.yylloc.last_column = (lines ? | ||||
|                 (lines.length === oldLines.length ? this.yylloc.first_column : 0) | ||||
|                 + oldLines[oldLines.length - lines.length].length - lines[0].length : | ||||
|                 this.yylloc.first_column - len); | ||||
|  | ||||
|         if (this.options.ranges) { | ||||
|             this.yylloc.range[1] = this.yylloc.range[0] + this.yyleng - len; | ||||
|         } | ||||
|         this.yyleng = this.yytext.length; | ||||
|         this.done = false; | ||||
|         return this; | ||||
|     }, | ||||
|  | ||||
|     // When called from action, caches matched text and appends it on next action | ||||
|     more: function lexer_more() { | ||||
|         this._more = true; | ||||
|         return this; | ||||
|     }, | ||||
|  | ||||
|     // When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead. | ||||
|     reject: function lexer_reject() { | ||||
|         if (this.options.backtrack_lexer) { | ||||
|             this._backtrack = true; | ||||
|         } else { | ||||
|             // when the parseError() call returns, we MUST ensure that the error is registered. | ||||
|             // We accomplish this by signaling an 'error' token to be produced for the current | ||||
|             // .lex() run. | ||||
|             var p = this.constructLexErrorInfo('Lexical error on line ' + (this.yylineno + 1) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this.showPosition(), false); | ||||
|             this._signaled_error_token = (this.parseError(p.errStr, p) || this.ERROR); | ||||
|         } | ||||
|         return this; | ||||
|     }, | ||||
|  | ||||
|     // retain first n characters of the match | ||||
|     less: function lexer_less(n) { | ||||
|         return this.unput(this.match.slice(n)); | ||||
|     }, | ||||
|  | ||||
|     // return (part of the) already matched input, i.e. for error messages. | ||||
|     // Limit the returned string length to `maxSize` (default: 20). | ||||
|     // Limit the returned string to the `maxLines` number of lines of input (default: 1). | ||||
|     // Negative limit values equal *unlimited*. | ||||
|     pastInput: function lexer_pastInput(maxSize, maxLines) { | ||||
|         var past = this.matched.substring(0, this.matched.length - this.match.length); | ||||
|         if (maxSize < 0) | ||||
|             maxSize = past.length; | ||||
|         else if (!maxSize) | ||||
|             maxSize = 20; | ||||
|         if (maxLines < 0) | ||||
|             maxLines = past.length;         // can't ever have more input lines than this! | ||||
|         else if (!maxLines) | ||||
|             maxLines = 1; | ||||
|         // `substr` anticipation: treat \r\n as a single character and take a little | ||||
|         // more than necessary so that we can still properly check against maxSize | ||||
|         // after we've transformed and limited the newLines in here: | ||||
|         past = past.substr(-maxSize * 2 - 2); | ||||
|         // now that we have a significantly reduced string to process, transform the newlines | ||||
|         // and chop them, then limit them: | ||||
|         var a = past.replace(/\r\n|\r/g, '\n').split('\n'); | ||||
|         a = a.slice(-maxLines); | ||||
|         past = a.join('\n'); | ||||
|         // When, after limiting to maxLines, we still have too much to return,  | ||||
|         // do add an ellipsis prefix... | ||||
|         if (past.length > maxSize) { | ||||
|             past = '...' + past.substr(-maxSize); | ||||
|         } | ||||
|         return past; | ||||
|     }, | ||||
|  | ||||
|     // return (part of the) upcoming input, i.e. for error messages. | ||||
|     // Limit the returned string length to `maxSize` (default: 20). | ||||
|     // Limit the returned string to the `maxLines` number of lines of input (default: 1). | ||||
|     // Negative limit values equal *unlimited*. | ||||
|     upcomingInput: function lexer_upcomingInput(maxSize, maxLines) { | ||||
|         var next = this.match; | ||||
|         if (maxSize < 0) | ||||
|             maxSize = next.length + this._input.length; | ||||
|         else if (!maxSize) | ||||
|             maxSize = 20; | ||||
|         if (maxLines < 0) | ||||
|             maxLines = maxSize;         // can't ever have more input lines than this! | ||||
|         else if (!maxLines) | ||||
|             maxLines = 1; | ||||
|         // `substring` anticipation: treat \r\n as a single character and take a little | ||||
|         // more than necessary so that we can still properly check against maxSize | ||||
|         // after we've transformed and limited the newLines in here: | ||||
|         if (next.length < maxSize * 2 + 2) { | ||||
|             next += this._input.substring(0, maxSize * 2 + 2);  // substring is faster on Chrome/V8 | ||||
|         } | ||||
|         // now that we have a significantly reduced string to process, transform the newlines | ||||
|         // and chop them, then limit them: | ||||
|         var a = next.replace(/\r\n|\r/g, '\n').split('\n'); | ||||
|         a = a.slice(0, maxLines); | ||||
|         next = a.join('\n'); | ||||
|         // When, after limiting to maxLines, we still have too much to return,  | ||||
|         // do add an ellipsis postfix... | ||||
|         if (next.length > maxSize) { | ||||
|             next = next.substring(0, maxSize) + '...'; | ||||
|         } | ||||
|         return next; | ||||
|     }, | ||||
|  | ||||
|     // return a string which displays the character position where the lexing error occurred, i.e. for error messages | ||||
|     showPosition: function lexer_showPosition(maxPrefix, maxPostfix) { | ||||
|         var pre = this.pastInput(maxPrefix).replace(/\s/g, ' '); | ||||
|         var c = new Array(pre.length + 1).join('-'); | ||||
|         return pre + this.upcomingInput(maxPostfix).replace(/\s/g, ' ') + '\n' + c + '^'; | ||||
|     }, | ||||
|  | ||||
|     // helper function, used to produce a human readable description as a string, given | ||||
|     // the input `yylloc` location object.  | ||||
|     // Set `display_range_too` to TRUE to include the string character index position(s) | ||||
|     // in the description if the `yylloc.range` is available.  | ||||
|     describeYYLLOC: function lexer_describe_yylloc(yylloc, display_range_too) { | ||||
|         var l1 = yylloc.first_line; | ||||
|         var l2 = yylloc.last_line; | ||||
|         var o1 = yylloc.first_column; | ||||
|         var o2 = yylloc.last_column - 1; | ||||
|         var dl = l2 - l1; | ||||
|         var d_o = (dl === 0 ? o2 - o1 : 1000); | ||||
|         var rv; | ||||
|         if (dl === 0) { | ||||
|             rv = 'line ' + l1 + ', '; | ||||
|             if (d_o === 0) { | ||||
|                 rv += 'column ' + o1; | ||||
|             } else { | ||||
|                 rv += 'columns ' + o1 + ' .. ' + o2; | ||||
|             } | ||||
|         } else { | ||||
|             rv = 'lines ' + l1 + '(column ' + o1 + ') .. ' + l2 + '(column ' + o2 + ')'; | ||||
|         } | ||||
|         if (yylloc.range && display_range_too) { | ||||
|             var r1 = yylloc.range[0]; | ||||
|             var r2 = yylloc.range[1] - 1; | ||||
|             if (r2 === r1) { | ||||
|                 rv += ' {String Offset: ' + r1 + '}'; | ||||
|             } else { | ||||
|                 rv += ' {String Offset range: ' + r1 + ' .. ' + r2 + '}'; | ||||
|             } | ||||
|         } | ||||
|         return rv; | ||||
|         // return JSON.stringify(yylloc); | ||||
|     }, | ||||
|  | ||||
|     // test the lexed token: return FALSE when not a match, otherwise return token. | ||||
|     // | ||||
|     // `match` is supposed to be an array coming out of a regex match, i.e. `match[0]` | ||||
|     // contains the actually matched text string. | ||||
|     // | ||||
|     // Also move the input cursor forward and update the match collectors: | ||||
|     // - yytext | ||||
|     // - yyleng | ||||
|     // - match | ||||
|     // - matches | ||||
|     // - yylloc | ||||
|     // - offset | ||||
|     test_match: function lexer_test_match(match, indexed_rule) { | ||||
|         var token, | ||||
|             lines, | ||||
|             backup, | ||||
|             match_str; | ||||
|  | ||||
|         if (this.options.backtrack_lexer) { | ||||
|             // save context | ||||
|             backup = { | ||||
|                 yylineno: this.yylineno, | ||||
|                 yylloc: { | ||||
|                     first_line: this.yylloc.first_line, | ||||
|                     last_line: this.last_line, | ||||
|                     first_column: this.yylloc.first_column, | ||||
|                     last_column: this.yylloc.last_column | ||||
|                 }, | ||||
|                 yytext: this.yytext, | ||||
|                 match: this.match, | ||||
|                 matches: this.matches, | ||||
|                 matched: this.matched, | ||||
|                 yyleng: this.yyleng, | ||||
|                 offset: this.offset, | ||||
|                 _more: this._more, | ||||
|                 _input: this._input, | ||||
|                 yy: this.yy, | ||||
|                 conditionStack: this.conditionStack.slice(0), | ||||
|                 done: this.done | ||||
|             }; | ||||
|             if (this.options.ranges) { | ||||
|                 backup.yylloc.range = this.yylloc.range.slice(0); | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         match_str = match[0]; | ||||
|         lines = match_str.match(/(?:\r\n?|\n).*/g); | ||||
|         if (lines) { | ||||
|             this.yylineno += lines.length; | ||||
|         } | ||||
|         this.yylloc = { | ||||
|             first_line: this.yylloc.last_line, | ||||
|             last_line: this.yylineno + 1, | ||||
|             first_column: this.yylloc.last_column, | ||||
|             last_column: lines ? | ||||
|                          lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length : | ||||
|                          this.yylloc.last_column + match_str.length | ||||
|         }; | ||||
|         this.yytext += match_str; | ||||
|         this.match += match_str; | ||||
|         this.matches = match; | ||||
|         this.yyleng = this.yytext.length; | ||||
|         if (this.options.ranges) { | ||||
|             this.yylloc.range = [this.offset, this.offset + this.yyleng]; | ||||
|         } | ||||
|         // previous lex rules MAY have invoked the `more()` API rather than producing a token: | ||||
|         // those rules will already have moved this `offset` forward matching their match lengths, | ||||
|         // hence we must only add our own match length now: | ||||
|         this.offset += match_str.length; | ||||
|         this._more = false; | ||||
|         this._backtrack = false; | ||||
|         this._input = this._input.slice(match_str.length); | ||||
|         this.matched += match_str; | ||||
|  | ||||
|         // calling this method:  | ||||
|         // | ||||
|         //   function lexer__performAction(yy, yy_, $avoiding_name_collisions, YY_START) {...} | ||||
|         token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1] /* = YY_START */); | ||||
|         // otherwise, when the action codes are all simple return token statements: | ||||
|         //token = this.simpleCaseActionClusters[indexed_rule]; | ||||
|  | ||||
|         if (this.done && this._input) { | ||||
|             this.done = false; | ||||
|         } | ||||
|         if (token) { | ||||
|             return token; | ||||
|         } else if (this._backtrack) { | ||||
|             // recover context | ||||
|             for (var k in backup) { | ||||
|                 this[k] = backup[k]; | ||||
|             } | ||||
|             this.__currentRuleSet__ = null; | ||||
|             return false; // rule action called reject() implying the next rule should be tested instead. | ||||
|         } else if (this._signaled_error_token) { | ||||
|             // produce one 'error' token as .parseError() in reject() did not guarantee a failure signal by throwing an exception! | ||||
|             token = this._signaled_error_token; | ||||
|             this._signaled_error_token = false; | ||||
|             return token; | ||||
|         } | ||||
|         return false; | ||||
|     }, | ||||
|  | ||||
|     // return next match in input | ||||
|     next: function lexer_next() { | ||||
|         if (this.done) { | ||||
|             this.clear(); | ||||
|             return this.EOF; | ||||
|         } | ||||
|         if (!this._input) { | ||||
|             this.done = true; | ||||
|         } | ||||
|  | ||||
|         var token, | ||||
|             match, | ||||
|             tempMatch, | ||||
|             index; | ||||
|         if (!this._more) { | ||||
|             this.clear(); | ||||
|         } | ||||
|         var spec = this.__currentRuleSet__; | ||||
|         if (!spec) { | ||||
|             // Update the ruleset cache as we apparently encountered a state change or just started lexing. | ||||
|             // The cache is set up for fast lookup -- we assume a lexer will switch states much less often than it will | ||||
|             // invoke the `lex()` token-producing API and related APIs, hence caching the set for direct access helps | ||||
|             // speed up those activities a tiny bit. | ||||
|             spec = this.__currentRuleSet__ = this._currentRules(); | ||||
|         } | ||||
|  | ||||
|         var rule_ids = spec.rules; | ||||
| //        var dispatch = spec.__dispatch_lut; | ||||
|         var regexes = spec.__rule_regexes; | ||||
|         var len = spec.__rule_count; | ||||
|  | ||||
| //        var c0 = this._input[0]; | ||||
|  | ||||
|         // Note: the arrays are 1-based, while `len` itself is a valid index,  | ||||
|         // hence the non-standard less-or-equal check in the next loop condition! | ||||
|         //  | ||||
|         // `dispatch` is a lookup table which lists the *first* rule which matches the 1-char *prefix* of the rule-to-match. | ||||
|         // By using that array as a jumpstart, we can cut down on the otherwise O(n*m) behaviour of this lexer, down to | ||||
|         // O(n) ideally, where: | ||||
|         //  | ||||
|         // - N is the number of input particles -- which is not precisely characters  | ||||
|         //   as we progress on a per-regex-match basis rather than on a per-character basis | ||||
|         //    | ||||
|         // - M is the number of rules (regexes) to test in the active condition state. | ||||
|         //   | ||||
|         for (var i = 1 /* (dispatch[c0] || 1) */ ; i <= len; i++) { | ||||
|             tempMatch = this._input.match(regexes[i]); | ||||
|             if (tempMatch && (!match || tempMatch[0].length > match[0].length)) { | ||||
|                 match = tempMatch; | ||||
|                 index = i; | ||||
|                 if (this.options.backtrack_lexer) { | ||||
|                     token = this.test_match(tempMatch, rule_ids[i]); | ||||
|                     if (token !== false) { | ||||
|                         return token; | ||||
|                     } else if (this._backtrack) { | ||||
|                         match = undefined; | ||||
|                         continue; // rule action called reject() implying a rule MISmatch. | ||||
|                     } else { | ||||
|                         // else: this is a lexer rule which consumes input without producing a token (e.g. whitespace) | ||||
|                         return false; | ||||
|                     } | ||||
|                 } else if (!this.options.flex) { | ||||
|                     break; | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|         if (match) { | ||||
|             token = this.test_match(match, rule_ids[index]); | ||||
|             if (token !== false) { | ||||
|                 return token; | ||||
|             } | ||||
|             // else: this is a lexer rule which consumes input without producing a token (e.g. whitespace) | ||||
|             return false; | ||||
|         } | ||||
|         if (this._input === '') { | ||||
|             this.done = true; | ||||
|             return this.EOF; | ||||
|         } else { | ||||
|             var p = this.constructLexErrorInfo('Lexical error on line ' + (this.yylineno + 1) + '. Unrecognized text.\n' + this.showPosition(), this.options.lexer_errors_are_recoverable); | ||||
|             token = (this.parseError(p.errStr, p) || this.ERROR); | ||||
|             if (token === this.ERROR) { | ||||
|                 // we can try to recover from a lexer error that parseError() did not 'recover' for us, by moving forward at least one character at a time: | ||||
|                 if (!this.match.length) { | ||||
|                     this.input(); | ||||
|                 } | ||||
|             } | ||||
|             return token; | ||||
|         } | ||||
|     }, | ||||
|  | ||||
|     // return next match that has a token | ||||
|     lex: function lexer_lex() { | ||||
|         var r; | ||||
|         // allow the PRE/POST handlers set/modify the return token for maximum flexibility of the generated lexer: | ||||
|         if (typeof this.options.pre_lex === 'function') { | ||||
|             r = this.options.pre_lex.call(this); | ||||
|         } | ||||
|         while (!r) { | ||||
|             r = this.next(); | ||||
|         } | ||||
|         if (typeof this.options.post_lex === 'function') { | ||||
|             // (also account for a userdef function which does not return any value: keep the token as is) | ||||
|             r = this.options.post_lex.call(this, r) || r; | ||||
|         } | ||||
|         return r; | ||||
|     }, | ||||
|  | ||||
|     // backwards compatible alias for `pushState()`; | ||||
|     // the latter is symmetrical with `popState()` and we advise to use | ||||
|     // those APIs in any modern lexer code, rather than `begin()`. | ||||
|     begin: function lexer_begin(condition) { | ||||
|         return this.pushState(condition); | ||||
|     }, | ||||
|  | ||||
|     // activates a new lexer condition state (pushes the new lexer condition state onto the condition stack) | ||||
|     pushState: function lexer_pushState(condition) { | ||||
|         this.conditionStack.push(condition); | ||||
|         this.__currentRuleSet__ = null; | ||||
|         return this; | ||||
|     }, | ||||
|  | ||||
|     // pop the previously active lexer condition state off the condition stack | ||||
|     popState: function lexer_popState() { | ||||
|         var n = this.conditionStack.length - 1; | ||||
|         if (n > 0) { | ||||
|             this.__currentRuleSet__ = null; | ||||
|             return this.conditionStack.pop(); | ||||
|         } else { | ||||
|             return this.conditionStack[0]; | ||||
|         } | ||||
|     }, | ||||
|  | ||||
|     // return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available | ||||
|     topState: function lexer_topState(n) { | ||||
|         n = this.conditionStack.length - 1 - Math.abs(n || 0); | ||||
|         if (n >= 0) { | ||||
|             return this.conditionStack[n]; | ||||
|         } else { | ||||
|             return 'INITIAL'; | ||||
|         } | ||||
|     }, | ||||
|  | ||||
|     // (internal) determine the lexer rule set which is active for the currently active lexer condition state | ||||
|     _currentRules: function lexer__currentRules() { | ||||
|         if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) { | ||||
|             return this.conditions[this.conditionStack[this.conditionStack.length - 1]]; | ||||
|         } else { | ||||
|             return this.conditions['INITIAL']; | ||||
|         } | ||||
|     }, | ||||
|  | ||||
|     // return the number of states currently on the stack | ||||
|     stateStackSize: function lexer_stateStackSize() { | ||||
|         return this.conditionStack.length; | ||||
|     }, | ||||
| options: {}, | ||||
| JisonLexerError: JisonLexerError, | ||||
| performAction: function lexer__performAction(yy, yy_, $avoiding_name_collisions, YY_START) { | ||||
|  | ||||
| var YYSTATE = YY_START; | ||||
| switch($avoiding_name_collisions) { | ||||
| case 0 :  | ||||
| /*! Conditions:: INITIAL */  | ||||
| /*! Rule::       [ \t\r\n]+ */  | ||||
|   | ||||
|     /* eat up whitespace */ | ||||
|     BeginToken(yy_.yytext);  | ||||
|       | ||||
| break; | ||||
| case 1 :  | ||||
| /*! Conditions:: INITIAL */  | ||||
| /*! Rule::       {DIGIT}+ */  | ||||
|   | ||||
|     BeginToken(yy_.yytext);  | ||||
|     yylval.value = atof(yy_.yytext); | ||||
|     return VALUE; | ||||
|       | ||||
| break; | ||||
| case 2 :  | ||||
| /*! Conditions:: INITIAL */  | ||||
| /*! Rule::       {DIGIT}+\.{DIGIT}* */  | ||||
|   | ||||
|     BeginToken(yy_.yytext); | ||||
|     yylval.value = atof(yy_.yytext); | ||||
|     return VALUE; | ||||
|       | ||||
| break; | ||||
| case 3 :  | ||||
| /*! Conditions:: INITIAL */  | ||||
| /*! Rule::       {DIGIT}+[eE]["+""-"]?{DIGIT}* */  | ||||
|   | ||||
|     BeginToken(yy_.yytext); | ||||
|     yylval.value = atof(yy_.yytext); | ||||
|     return VALUE; | ||||
|       | ||||
| break; | ||||
| case 4 :  | ||||
| /*! Conditions:: INITIAL */  | ||||
| /*! Rule::       {DIGIT}+\.{DIGIT}*[eE]["+""-"]?{DIGIT}* */  | ||||
|   | ||||
|     BeginToken(yy_.yytext); | ||||
|     yylval.value = atof(yy_.yytext); | ||||
|     return VALUE; | ||||
|       | ||||
| break; | ||||
| case 5 :  | ||||
| /*! Conditions:: INITIAL */  | ||||
| /*! Rule::       {ID} */  | ||||
|   | ||||
|     BeginToken(yy_.yytext); | ||||
|     yylval.string = malloc(strlen(yy_.yytext)+1); | ||||
|     strcpy(yylval.string, yy_.yytext); | ||||
|     return IDENTIFIER; | ||||
|       | ||||
| break; | ||||
| case 6 :  | ||||
| /*! Conditions:: INITIAL */  | ||||
| /*! Rule::       \+ */  | ||||
|   BeginToken(yy_.yytext); return ADD;   | ||||
| break; | ||||
| case 7 :  | ||||
| /*! Conditions:: INITIAL */  | ||||
| /*! Rule::       - */  | ||||
|   BeginToken(yy_.yytext); return SUB;   | ||||
| break; | ||||
| case 8 :  | ||||
| /*! Conditions:: INITIAL */  | ||||
| /*! Rule::       \* */  | ||||
|   BeginToken(yy_.yytext); return MULT;   | ||||
| break; | ||||
| case 9 :  | ||||
| /*! Conditions:: INITIAL */  | ||||
| /*! Rule::       \/ */  | ||||
|   BeginToken(yy_.yytext); return DIV;   | ||||
| break; | ||||
| case 10 :  | ||||
| /*! Conditions:: INITIAL */  | ||||
| /*! Rule::       \( */  | ||||
|   BeginToken(yy_.yytext); return LBRACE;   | ||||
| break; | ||||
| case 11 :  | ||||
| /*! Conditions:: INITIAL */  | ||||
| /*! Rule::       \) */  | ||||
|   BeginToken(yy_.yytext); return RBRACE;   | ||||
| break; | ||||
| case 12 :  | ||||
| /*! Conditions:: INITIAL */  | ||||
| /*! Rule::       ; */  | ||||
|   BeginToken(yy_.yytext); return SEMICOLON;   | ||||
| break; | ||||
| case 13 :  | ||||
| /*! Conditions:: INITIAL */  | ||||
| /*! Rule::       = */  | ||||
|   BeginToken(yy_.yytext); return ASSIGN;   | ||||
| break; | ||||
| case 14 :  | ||||
| /*! Conditions:: INITIAL */  | ||||
| /*! Rule::       . */  | ||||
|   | ||||
|     BeginToken(yy_.yytext); | ||||
|     return yy_.yytext[0]; | ||||
|       | ||||
| break; | ||||
| default: | ||||
|   return this.simpleCaseActionClusters[$avoiding_name_collisions]; | ||||
| } | ||||
| }, | ||||
| simpleCaseActionClusters: { | ||||
|  | ||||
| }, | ||||
| rules: [ | ||||
| /^(?:[ \t\r\n]+)/, | ||||
| /^(?:(\d)+)/, | ||||
| /^(?:(\d)+\.(\d)*)/, | ||||
| /^(?:(\d)+[Ee]["+]?(\d)*)/, | ||||
| /^(?:(\d)+\.(\d)*[Ee]["+]?(\d)*)/, | ||||
| /^(?:([^\W\d]\w*))/, | ||||
| /^(?:\+)/, | ||||
| /^(?:-)/, | ||||
| /^(?:\*)/, | ||||
| /^(?:\/)/, | ||||
| /^(?:\()/, | ||||
| /^(?:\))/, | ||||
| /^(?:;)/, | ||||
| /^(?:=)/, | ||||
| /^(?:.)/ | ||||
| ], | ||||
| conditions: { | ||||
|   "INITIAL": { | ||||
|     rules: [ | ||||
|       0, | ||||
|       1, | ||||
|       2, | ||||
|       3, | ||||
|       4, | ||||
|       5, | ||||
|       6, | ||||
|       7, | ||||
|       8, | ||||
|       9, | ||||
|       10, | ||||
|       11, | ||||
|       12, | ||||
|       13, | ||||
|       14 | ||||
|     ], | ||||
|     inclusive: true | ||||
|   } | ||||
| } | ||||
| }; | ||||
|  | ||||
| /*-------------------------------------------------------------------- | ||||
|  * lex.l | ||||
|  *------------------------------------------------------------------*/; | ||||
| return lexer; | ||||
| })(); | ||||
							
								
								
									
										2145
									
								
								samples/JavaScript/ccalc-parse.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2145
									
								
								samples/JavaScript/ccalc-parse.js
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										31
									
								
								samples/JavaScript/proto.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										31
									
								
								samples/JavaScript/proto.js
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,31 @@ | ||||
| /** | ||||
|  * @fileoverview | ||||
|  * @enhanceable | ||||
|  * @public | ||||
|  */ | ||||
| // GENERATED CODE -- DO NOT EDIT! | ||||
|  | ||||
| goog.provide('proto.google.protobuf.Timestamp'); | ||||
|  | ||||
| goog.require('jspb.Message'); | ||||
|  | ||||
| /** | ||||
|  * Generated by JsPbCodeGenerator. | ||||
|  * @param {Array=} opt_data Optional initial data array, typically from a | ||||
|  * server response, or constructed directly in Javascript. The array is used | ||||
|  * in place and becomes part of the constructed object. It is not cloned. | ||||
|  * If no data is provided, the constructed object will be empty, but still | ||||
|  * valid. | ||||
|  * @extends {jspb.Message} | ||||
|  * @constructor | ||||
|  */ | ||||
| proto.google.protobuf.Timestamp = function(opt_data) { | ||||
|   jspb.Message.initialize(this, opt_data, 0, -1, null, null); | ||||
| }; | ||||
| goog.inherits(proto.google.protobuf.Timestamp, jspb.Message); | ||||
| if (goog.DEBUG && !COMPILED) { | ||||
|   proto.google.protobuf.Timestamp.displayName = 'proto.google.protobuf.Timestamp'; | ||||
| } | ||||
|  | ||||
|  | ||||
| // Remainder elided | ||||
							
								
								
									
										39
									
								
								samples/Jison Lex/classy.jisonlex
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										39
									
								
								samples/Jison Lex/classy.jisonlex
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,39 @@ | ||||
| digit                       [0-9] | ||||
| id                          [a-zA-Z][a-zA-Z0-9]* | ||||
|  | ||||
| %% | ||||
| "//".*                      /* ignore comment */ | ||||
| "main"                      return 'MAIN'; | ||||
| "class"                     return 'CLASS'; | ||||
| "extends"                   return 'EXTENDS'; | ||||
| "nat"                       return 'NATTYPE'; | ||||
| "if"                        return 'IF'; | ||||
| "else"                      return 'ELSE'; | ||||
| "for"                       return 'FOR'; | ||||
| "printNat"                  return 'PRINTNAT'; | ||||
| "readNat"                   return 'READNAT'; | ||||
| "this"                      return 'THIS'; | ||||
| "new"                       return 'NEW'; | ||||
| "var"                       return 'VAR'; | ||||
| "null"                      return 'NUL'; | ||||
| {digit}+                    return 'NATLITERAL'; | ||||
| {id}                        return 'ID'; | ||||
| "=="                        return 'EQUALITY'; | ||||
| "="                         return 'ASSIGN'; | ||||
| "+"                         return 'PLUS'; | ||||
| "-"                         return 'MINUS'; | ||||
| "*"                         return 'TIMES'; | ||||
| ">"                         return 'GREATER'; | ||||
| "||"                        return 'OR'; | ||||
| "!"                         return 'NOT'; | ||||
| "."                         return 'DOT'; | ||||
| "{"                         return 'LBRACE'; | ||||
| "}"                         return 'RBRACE'; | ||||
| "("                         return 'LPAREN'; | ||||
| ")"                         return 'RPAREN'; | ||||
| ";"                         return 'SEMICOLON'; | ||||
| \s+                         /* skip whitespace */ | ||||
| "."                         throw 'Illegal character'; | ||||
| <<EOF>>                     return 'ENDOFFILE'; | ||||
|  | ||||
|  | ||||
							
								
								
									
										29
									
								
								samples/Jison Lex/lex_grammar.jisonlex
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								samples/Jison Lex/lex_grammar.jisonlex
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | ||||
|  | ||||
| %% | ||||
| \n+                         {yy.freshLine = true;} | ||||
| \s+                         {yy.freshLine = false;} | ||||
| "y{"[^}]*"}"                {yytext = yytext.substr(2, yyleng - 3); return 'ACTION';} | ||||
| [a-zA-Z_][a-zA-Z0-9_-]*     {return 'NAME';} | ||||
| '"'([^"]|'\"')*'"'          {return 'STRING_LIT';} | ||||
| "'"([^']|"\'")*"'"          {return 'STRING_LIT';} | ||||
| "|"                         {return '|';} | ||||
| "["("\]"|[^\]])*"]"         {return 'ANY_GROUP_REGEX';} | ||||
| "("                         {return '(';} | ||||
| ")"                         {return ')';} | ||||
| "+"                         {return '+';} | ||||
| "*"                         {return '*';} | ||||
| "?"                         {return '?';} | ||||
| "^"                         {return '^';} | ||||
| "/"                         {return '/';} | ||||
| "\\"[a-zA-Z0]               {return 'ESCAPE_CHAR';} | ||||
| "$"                         {return '$';} | ||||
| "<<EOF>>"                   {return '$';} | ||||
| "."                         {return '.';} | ||||
| "%%"                        {return '%%';} | ||||
| "{"\d+(","\s?\d+|",")?"}"   {return 'RANGE_REGEX';} | ||||
| /"{"                        %{if (yy.freshLine) { this.input('{'); return '{'; } else { this.unput('y'); }%} | ||||
| "}"                         %{return '}';%} | ||||
| "%{"(.|\n)*?"}%"            {yytext = yytext.substr(2, yyleng - 4); return 'ACTION';} | ||||
| .                           {/* ignore bad characters */} | ||||
| <<EOF>>                     {return 'EOF';} | ||||
|  | ||||
							
								
								
									
										418
									
								
								samples/Jison/ansic.jison
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										418
									
								
								samples/Jison/ansic.jison
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,418 @@ | ||||
| %token IDENTIFIER CONSTANT STRING_LITERAL SIZEOF | ||||
| %token PTR_OP INC_OP DEC_OP LEFT_OP RIGHT_OP LE_OP GE_OP EQ_OP NE_OP | ||||
| %token AND_OP OR_OP MUL_ASSIGN DIV_ASSIGN MOD_ASSIGN ADD_ASSIGN | ||||
| %token SUB_ASSIGN LEFT_ASSIGN RIGHT_ASSIGN AND_ASSIGN | ||||
| %token XOR_ASSIGN OR_ASSIGN TYPE_NAME | ||||
|  | ||||
| %token TYPEDEF EXTERN STATIC AUTO REGISTER | ||||
| %token CHAR SHORT INT LONG SIGNED UNSIGNED FLOAT DOUBLE CONST VOLATILE VOID | ||||
| %token STRUCT UNION ENUM ELLIPSIS | ||||
|  | ||||
| %token CASE DEFAULT IF ELSE SWITCH WHILE DO FOR GOTO CONTINUE BREAK RETURN | ||||
|  | ||||
| %nonassoc IF_WITHOUT_ELSE | ||||
| %nonassoc ELSE | ||||
|  | ||||
| %start translation_unit | ||||
| %% | ||||
|  | ||||
| primary_expression | ||||
|     : IDENTIFIER | ||||
|     | CONSTANT | ||||
|     | STRING_LITERAL | ||||
|     | '(' expression ')' | ||||
|     ; | ||||
|  | ||||
| postfix_expression | ||||
|     : primary_expression | ||||
|     | postfix_expression '[' expression ']' | ||||
|     | postfix_expression '(' ')' | ||||
|     | postfix_expression '(' argument_expression_list ')' | ||||
|     | postfix_expression '.' IDENTIFIER | ||||
|     | postfix_expression PTR_OP IDENTIFIER | ||||
|     | postfix_expression INC_OP | ||||
|     | postfix_expression DEC_OP | ||||
|     ; | ||||
|  | ||||
| argument_expression_list | ||||
|     : assignment_expression | ||||
|     | argument_expression_list ',' assignment_expression | ||||
|     ; | ||||
|  | ||||
| unary_expression | ||||
|     : postfix_expression | ||||
|     | INC_OP unary_expression | ||||
|     | DEC_OP unary_expression | ||||
|     | unary_operator cast_expression | ||||
|     | SIZEOF unary_expression | ||||
|     | SIZEOF '(' type_name ')' | ||||
|     ; | ||||
|  | ||||
| unary_operator | ||||
|     : '&' | ||||
|     | '*' | ||||
|     | '+' | ||||
|     | '-' | ||||
|     | '~' | ||||
|     | '!' | ||||
|     ; | ||||
|  | ||||
| cast_expression | ||||
|     : unary_expression | ||||
|     | '(' type_name ')' cast_expression | ||||
|     ; | ||||
|  | ||||
| multiplicative_expression | ||||
|     : cast_expression | ||||
|     | multiplicative_expression '*' cast_expression | ||||
|     | multiplicative_expression '/' cast_expression | ||||
|     | multiplicative_expression '%' cast_expression | ||||
|     ; | ||||
|  | ||||
| additive_expression | ||||
|     : multiplicative_expression | ||||
|     | additive_expression '+' multiplicative_expression | ||||
|     | additive_expression '-' multiplicative_expression | ||||
|     ; | ||||
|  | ||||
| shift_expression | ||||
|     : additive_expression | ||||
|     | shift_expression LEFT_OP additive_expression | ||||
|     | shift_expression RIGHT_OP additive_expression | ||||
|     ; | ||||
|  | ||||
| relational_expression | ||||
|     : shift_expression | ||||
|     | relational_expression '<' shift_expression | ||||
|     | relational_expression '>' shift_expression | ||||
|     | relational_expression LE_OP shift_expression | ||||
|     | relational_expression GE_OP shift_expression | ||||
|     ; | ||||
|  | ||||
| equality_expression | ||||
|     : relational_expression | ||||
|     | equality_expression EQ_OP relational_expression | ||||
|     | equality_expression NE_OP relational_expression | ||||
|     ; | ||||
|  | ||||
| and_expression | ||||
|     : equality_expression | ||||
|     | and_expression '&' equality_expression | ||||
|     ; | ||||
|  | ||||
| exclusive_or_expression | ||||
|     : and_expression | ||||
|     | exclusive_or_expression '^' and_expression | ||||
|     ; | ||||
|  | ||||
| inclusive_or_expression | ||||
|     : exclusive_or_expression | ||||
|     | inclusive_or_expression '|' exclusive_or_expression | ||||
|     ; | ||||
|  | ||||
| logical_and_expression | ||||
|     : inclusive_or_expression | ||||
|     | logical_and_expression AND_OP inclusive_or_expression | ||||
|     ; | ||||
|  | ||||
| logical_or_expression | ||||
|     : logical_and_expression | ||||
|     | logical_or_expression OR_OP logical_and_expression | ||||
|     ; | ||||
|  | ||||
| conditional_expression | ||||
|     : logical_or_expression | ||||
|     | logical_or_expression '?' expression ':' conditional_expression | ||||
|     ; | ||||
|  | ||||
| assignment_expression | ||||
|     : conditional_expression | ||||
|     | unary_expression assignment_operator assignment_expression | ||||
|     ; | ||||
|  | ||||
| assignment_operator | ||||
|     : '=' | ||||
|     | MUL_ASSIGN | ||||
|     | DIV_ASSIGN | ||||
|     | MOD_ASSIGN | ||||
|     | ADD_ASSIGN | ||||
|     | SUB_ASSIGN | ||||
|     | LEFT_ASSIGN | ||||
|     | RIGHT_ASSIGN | ||||
|     | AND_ASSIGN | ||||
|     | XOR_ASSIGN | ||||
|     | OR_ASSIGN | ||||
|     ; | ||||
|  | ||||
| expression | ||||
|     : assignment_expression | ||||
|     | expression ',' assignment_expression | ||||
|     ; | ||||
|  | ||||
| constant_expression | ||||
|     : conditional_expression | ||||
|     ; | ||||
|  | ||||
| declaration | ||||
|     : declaration_specifiers ';' | ||||
|     | declaration_specifiers init_declarator_list ';' | ||||
|     ; | ||||
|  | ||||
| declaration_specifiers | ||||
|     : storage_class_specifier | ||||
|     | storage_class_specifier declaration_specifiers | ||||
|     | type_specifier | ||||
|     | type_specifier declaration_specifiers | ||||
|     | type_qualifier | ||||
|     | type_qualifier declaration_specifiers | ||||
|     ; | ||||
|  | ||||
| init_declarator_list | ||||
|     : init_declarator | ||||
|     | init_declarator_list ',' init_declarator | ||||
|     ; | ||||
|  | ||||
| init_declarator | ||||
|     : declarator | ||||
|     | declarator '=' initializer | ||||
|     ; | ||||
|  | ||||
| storage_class_specifier | ||||
|     : TYPEDEF | ||||
|     | EXTERN | ||||
|     | STATIC | ||||
|     | AUTO | ||||
|     | REGISTER | ||||
|     ; | ||||
|  | ||||
| type_specifier | ||||
|     : VOID | ||||
|     | CHAR | ||||
|     | SHORT | ||||
|     | INT | ||||
|     | LONG | ||||
|     | FLOAT | ||||
|     | DOUBLE | ||||
|     | SIGNED | ||||
|     | UNSIGNED | ||||
|     | struct_or_union_specifier | ||||
|     | enum_specifier | ||||
|     | TYPE_NAME | ||||
|     ; | ||||
|  | ||||
| struct_or_union_specifier | ||||
|     : struct_or_union IDENTIFIER '{' struct_declaration_list '}' | ||||
|     | struct_or_union '{' struct_declaration_list '}' | ||||
|     | struct_or_union IDENTIFIER | ||||
|     ; | ||||
|  | ||||
| struct_or_union | ||||
|     : STRUCT | ||||
|     | UNION | ||||
|     ; | ||||
|  | ||||
| struct_declaration_list | ||||
|     : struct_declaration | ||||
|     | struct_declaration_list struct_declaration | ||||
|     ; | ||||
|  | ||||
| struct_declaration | ||||
|     : specifier_qualifier_list struct_declarator_list ';' | ||||
|     ; | ||||
|  | ||||
| specifier_qualifier_list | ||||
|     : type_specifier specifier_qualifier_list | ||||
|     | type_specifier | ||||
|     | type_qualifier specifier_qualifier_list | ||||
|     | type_qualifier | ||||
|     ; | ||||
|  | ||||
| struct_declarator_list | ||||
|     : struct_declarator | ||||
|     | struct_declarator_list ',' struct_declarator | ||||
|     ; | ||||
|  | ||||
| struct_declarator | ||||
|     : declarator | ||||
|     | ':' constant_expression | ||||
|     | declarator ':' constant_expression | ||||
|     ; | ||||
|  | ||||
| enum_specifier | ||||
|     : ENUM '{' enumerator_list '}' | ||||
|     | ENUM IDENTIFIER '{' enumerator_list '}' | ||||
|     | ENUM IDENTIFIER | ||||
|     ; | ||||
|  | ||||
| enumerator_list | ||||
|     : enumerator | ||||
|     | enumerator_list ',' enumerator | ||||
|     ; | ||||
|  | ||||
| enumerator | ||||
|     : IDENTIFIER | ||||
|     | IDENTIFIER '=' constant_expression | ||||
|     ; | ||||
|  | ||||
| type_qualifier | ||||
|     : CONST | ||||
|     | VOLATILE | ||||
|     ; | ||||
|  | ||||
| declarator | ||||
|     : pointer direct_declarator | ||||
|     | direct_declarator | ||||
|     ; | ||||
|  | ||||
| direct_declarator | ||||
|     : IDENTIFIER | ||||
|     | '(' declarator ')' | ||||
|     | direct_declarator '[' constant_expression ']' | ||||
|     | direct_declarator '[' ']' | ||||
|     | direct_declarator '(' parameter_type_list ')' | ||||
|     | direct_declarator '(' identifier_list ')' | ||||
|     | direct_declarator '(' ')' | ||||
|     ; | ||||
|  | ||||
| pointer | ||||
|     : '*' | ||||
|     | '*' type_qualifier_list | ||||
|     | '*' pointer | ||||
|     | '*' type_qualifier_list pointer | ||||
|     ; | ||||
|  | ||||
| type_qualifier_list | ||||
|     : type_qualifier | ||||
|     | type_qualifier_list type_qualifier | ||||
|     ; | ||||
|  | ||||
|  | ||||
| parameter_type_list | ||||
|     : parameter_list | ||||
|     | parameter_list ',' ELLIPSIS | ||||
|     ; | ||||
|  | ||||
| parameter_list | ||||
|     : parameter_declaration | ||||
|     | parameter_list ',' parameter_declaration | ||||
|     ; | ||||
|  | ||||
| parameter_declaration | ||||
|     : declaration_specifiers declarator | ||||
|     | declaration_specifiers abstract_declarator | ||||
|     | declaration_specifiers | ||||
|     ; | ||||
|  | ||||
| identifier_list | ||||
|     : IDENTIFIER | ||||
|     | identifier_list ',' IDENTIFIER | ||||
|     ; | ||||
|  | ||||
| type_name | ||||
|     : specifier_qualifier_list | ||||
|     | specifier_qualifier_list abstract_declarator | ||||
|     ; | ||||
|  | ||||
| abstract_declarator | ||||
|     : pointer | ||||
|     | direct_abstract_declarator | ||||
|     | pointer direct_abstract_declarator | ||||
|     ; | ||||
|  | ||||
| direct_abstract_declarator | ||||
|     : '(' abstract_declarator ')' | ||||
|     | '[' ']' | ||||
|     | '[' constant_expression ']' | ||||
|     | direct_abstract_declarator '[' ']' | ||||
|     | direct_abstract_declarator '[' constant_expression ']' | ||||
|     | '(' ')' | ||||
|     | '(' parameter_type_list ')' | ||||
|     | direct_abstract_declarator '(' ')' | ||||
|     | direct_abstract_declarator '(' parameter_type_list ')' | ||||
|     ; | ||||
|  | ||||
| initializer | ||||
|     : assignment_expression | ||||
|     | '{' initializer_list '}' | ||||
|     | '{' initializer_list ',' '}' | ||||
|     ; | ||||
|  | ||||
| initializer_list | ||||
|     : initializer | ||||
|     | initializer_list ',' initializer | ||||
|     ; | ||||
|  | ||||
| statement | ||||
|     : labeled_statement | ||||
|     | compound_statement | ||||
|     | expression_statement | ||||
|     | selection_statement | ||||
|     | iteration_statement | ||||
|     | jump_statement | ||||
|     ; | ||||
|  | ||||
| labeled_statement | ||||
|     : IDENTIFIER ':' statement | ||||
|     | CASE constant_expression ':' statement | ||||
|     | DEFAULT ':' statement | ||||
|     ; | ||||
|  | ||||
| compound_statement | ||||
|     : '{' '}' | ||||
|     | '{' statement_list '}' | ||||
|     | '{' declaration_list '}' | ||||
|     | '{' declaration_list statement_list '}' | ||||
|     ; | ||||
|  | ||||
| declaration_list | ||||
|     : declaration | ||||
|     | declaration_list declaration | ||||
|     ; | ||||
|  | ||||
| statement_list | ||||
|     : statement | ||||
|     | statement_list statement | ||||
|     ; | ||||
|  | ||||
| expression_statement | ||||
|     : ';' | ||||
|     | expression ';' | ||||
|     ; | ||||
|  | ||||
| selection_statement | ||||
|     : IF '(' expression ')' statement %prec IF_WITHOUT_ELSE | ||||
|     | IF '(' expression ')' statement ELSE statement | ||||
|     | SWITCH '(' expression ')' statement | ||||
|     ; | ||||
|  | ||||
| iteration_statement | ||||
|     : WHILE '(' expression ')' statement | ||||
|     | DO statement WHILE '(' expression ')' ';' | ||||
|     | FOR '(' expression_statement expression_statement ')' statement | ||||
|     | FOR '(' expression_statement expression_statement expression ')' statement | ||||
|     ; | ||||
|  | ||||
| jump_statement | ||||
|     : GOTO IDENTIFIER ';' | ||||
|     | CONTINUE ';' | ||||
|     | BREAK ';' | ||||
|     | RETURN ';' | ||||
|     | RETURN expression ';' | ||||
|     ; | ||||
|  | ||||
| translation_unit | ||||
|     : external_declaration | ||||
|     | translation_unit external_declaration | ||||
|     ; | ||||
|  | ||||
| external_declaration | ||||
|     : function_definition | ||||
|     | declaration | ||||
|     ; | ||||
|  | ||||
| function_definition | ||||
|     : declaration_specifiers declarator declaration_list compound_statement | ||||
|     | declaration_specifiers declarator compound_statement | ||||
|     | declarator declaration_list compound_statement | ||||
|     | declarator compound_statement | ||||
|     ; | ||||
							
								
								
									
										84
									
								
								samples/Jison/classy.jison
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										84
									
								
								samples/Jison/classy.jison
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,84 @@ | ||||
|  | ||||
| /* description: ClassyLang grammar. Very classy. */ | ||||
| /* | ||||
|   To build parser: | ||||
|  | ||||
|     $ ./bin/jison examples/classy.jison examples/classy.jisonlex | ||||
|  | ||||
| */ | ||||
|  | ||||
|  | ||||
| /* author: Zach Carter */ | ||||
|  | ||||
| %right ASSIGN | ||||
| %left OR | ||||
| %nonassoc EQUALITY GREATER | ||||
| %left PLUS MINUS | ||||
| %left TIMES | ||||
| %right NOT | ||||
| %left DOT | ||||
|  | ||||
| %% | ||||
|  | ||||
| pgm | ||||
|     : cdl MAIN LBRACE vdl el RBRACE ENDOFFILE | ||||
|     ; | ||||
|  | ||||
| cdl | ||||
|     : c cdl | ||||
|     | | ||||
|     ; | ||||
|  | ||||
| c | ||||
|     : CLASS id EXTENDS id LBRACE vdl mdl RBRACE | ||||
|     ; | ||||
|  | ||||
| vdl | ||||
|     : VAR t id SEMICOLON vdl | ||||
|     | | ||||
|     ; | ||||
|  | ||||
| mdl | ||||
|     : t id LPAREN t id RPAREN LBRACE vdl el RBRACE mdl | ||||
|     | | ||||
|     ; | ||||
|  | ||||
| t | ||||
|     : NATTYPE | ||||
|     | id | ||||
|     ; | ||||
|  | ||||
| id | ||||
|     : ID | ||||
|     ; | ||||
|  | ||||
| el | ||||
|     : e SEMICOLON el | ||||
|     | e SEMICOLON | ||||
|     ; | ||||
|  | ||||
| e | ||||
|     : NATLITERAL | ||||
|     | NUL | ||||
|     | id | ||||
|     | NEW id | ||||
|     | THIS | ||||
|     | IF LPAREN e RPAREN LBRACE el RBRACE ELSE LBRACE el RBRACE | ||||
|     | FOR LPAREN e SEMICOLON e SEMICOLON e RPAREN LBRACE el RBRACE | ||||
|     | READNAT LPAREN RPAREN | ||||
|     | PRINTNAT LPAREN e RPAREN | ||||
|     | e PLUS e | ||||
|     | e MINUS e | ||||
|     | e TIMES e | ||||
|     | e EQUALITY e | ||||
|     | e GREATER e | ||||
|     | NOT e | ||||
|     | e OR e | ||||
|     | e DOT id | ||||
|     | id ASSIGN e | ||||
|     | e DOT id ASSIGN e | ||||
|     | id LPAREN e RPAREN | ||||
|     | e DOT id LPAREN e RPAREN | ||||
|     | LPAREN e RPAREN | ||||
|     ; | ||||
|  | ||||
							
								
								
									
										145
									
								
								samples/Jison/lex.jison
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										145
									
								
								samples/Jison/lex.jison
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,145 @@ | ||||
|  | ||||
| // `%nonassoc` tells the parser compiler (JISON) that these tokens cannot occur more than once, | ||||
| // i.e. input like '//a' (tokens '/', '/' and 'a') is not a legal input while '/a' (tokens '/' and 'a') | ||||
| // *is* legal input for this grammar. | ||||
|   | ||||
| %nonassoc '/' '/!' | ||||
|  | ||||
| // Likewise for `%left`: this informs the LALR(1) grammar compiler (JISON) that these tokens | ||||
| // *can* occur repeatedly, e.g. 'a?*' and even 'a**' are considered legal inputs given this | ||||
| // grammar! | ||||
| // | ||||
| // Token `RANGE_REGEX` may seem the odd one out here but really isn't: given the `regex_base` | ||||
| // choice/rule `regex_base range_regex`, which is recursive, this grammar tells JISON that  | ||||
| // any input matching a sequence like `regex_base range_regex range_regex` *is* legal. | ||||
| // If you do not want that to be legal, you MUST adjust the grammar rule set you match your | ||||
| // actual intent. | ||||
|  | ||||
| %left '*' '+' '?' RANGE_REGEX | ||||
|  | ||||
|  | ||||
| %%  | ||||
|  | ||||
| lex  | ||||
|     : definitions include '%%' rules '%%' EOF | ||||
|         {{ $$ = {macros: $1, rules: $4}; | ||||
|           if ($2) $$.actionInclude = $2; | ||||
|           return $$; }} | ||||
|     | definitions include '%%' rules EOF | ||||
|         {{ $$ = {macros: $1, rules: $4}; | ||||
|           if ($2) $$.actionInclude = $2; | ||||
|           return $$; }} | ||||
|     ; | ||||
|  | ||||
| include | ||||
|     : action | ||||
|     | | ||||
|     ; | ||||
|  | ||||
| definitions | ||||
|     : definitions definition | ||||
|         { $$ = $1; $$.concat($2); } | ||||
|     | definition | ||||
|         { $$ = [$1]; } | ||||
|     ; | ||||
|  | ||||
| definition | ||||
|     : name regex | ||||
|         { $$ = [$1, $2]; } | ||||
|     ; | ||||
|  | ||||
| name | ||||
|     : NAME | ||||
|         { $$ = yytext; } | ||||
|     ; | ||||
|  | ||||
| rules | ||||
|     : rules rule | ||||
|         { $$ = $1; $$.push($2); } | ||||
|     | rule | ||||
|         { $$ = [$1]; } | ||||
|     ; | ||||
|  | ||||
| rule | ||||
|     : regex action | ||||
|         { $$ = [$1, $2]; } | ||||
|     ; | ||||
|  | ||||
| action | ||||
|     : ACTION  | ||||
|         { $$ = yytext; } | ||||
|     ; | ||||
|  | ||||
| regex | ||||
|     : start_caret regex_list end_dollar | ||||
|         { $$ = $1+$2+$3; } | ||||
|     ; | ||||
|  | ||||
| start_caret | ||||
|     : '^' | ||||
|         { $$ = '^'; } | ||||
|     | | ||||
|         { $$ = ''; } | ||||
|     ; | ||||
|  | ||||
| end_dollar | ||||
|     : '$' | ||||
|         { $$ = '$'; } | ||||
|     | | ||||
|         { $$ = ''; } | ||||
|     ; | ||||
|  | ||||
| regex_list | ||||
|     : regex_list '|' regex_chain | ||||
|         { $$ = $1+'|'+$3; } | ||||
|     | regex_chain | ||||
|     ; | ||||
|  | ||||
| regex_chain | ||||
|     : regex_chain regex_base | ||||
|         { $$ = $1+$2;} | ||||
|     | regex_base | ||||
|         { $$ = $1;} | ||||
|     ; | ||||
|  | ||||
| regex_base | ||||
|     : '(' regex_list ')' | ||||
|         { $$ = '('+$2+')'; } | ||||
|     | regex_base '+' | ||||
|         { $$ = $1+'+'; } | ||||
|     | regex_base '*' | ||||
|         { $$ = $1+'*'; } | ||||
|     | regex_base '?' | ||||
|         { $$ = $1+'?'; } | ||||
|     | '/' regex_base | ||||
|         { $$ = '(?=' + $regex_base + ')'; } | ||||
|     | '/!' regex_base | ||||
|         { $$ = '(?!' + $regex_base + ')'; } | ||||
|     | name_expansion | ||||
|     | regex_base range_regex | ||||
|         { $$ = $1+$2; } | ||||
|     | any_group_regex | ||||
|     | '.' | ||||
|         { $$ = '.'; } | ||||
|     | string | ||||
|     ; | ||||
|  | ||||
| name_expansion | ||||
|     : '{' name '}' | ||||
|         {{ $$ = '{'+$2+'}'; }} | ||||
|     ; | ||||
|  | ||||
| any_group_regex | ||||
|     : ANY_GROUP_REGEX | ||||
|         { $$ = yytext; } | ||||
|     ; | ||||
|  | ||||
| range_regex | ||||
|     : RANGE_REGEX | ||||
|         { $$ = yytext; } | ||||
|     ; | ||||
|  | ||||
| string | ||||
|     : STRING_LIT | ||||
|         { $$ = yy.prepareString(yytext.substr(1, yyleng-2)); } | ||||
|     ; | ||||
							
								
								
									
										37
									
								
								samples/Jolie/common.iol
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										37
									
								
								samples/Jolie/common.iol
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,37 @@ | ||||
| include "types/Binding.iol" | ||||
|  | ||||
| constants { | ||||
| 	Location_Exam = "socket://localhost:8000" | ||||
| } | ||||
|  | ||||
| type StartExamRequest:void { | ||||
| 	.examName:string | ||||
| 	.studentName:string | ||||
| 	.student:Binding | ||||
| } | ||||
|  | ||||
| type MakeQuestionRequest:void { | ||||
| 	.question:string | ||||
| 	.examName:string | ||||
| 	.studentName:string | ||||
| } | ||||
|  | ||||
| type DecisionMessage:void { | ||||
| 	.studentName:string | ||||
| 	.examName:string | ||||
| } | ||||
|  | ||||
| interface ExamInterface { | ||||
| OneWay: | ||||
| 	startExam(StartExamRequest), | ||||
| 	pass(DecisionMessage), fail(DecisionMessage) | ||||
| RequestResponse: | ||||
| 	makeQuestion(MakeQuestionRequest)(int) | ||||
| } | ||||
|  | ||||
| interface StudentInterface { | ||||
| OneWay: | ||||
| 	sendMessage(string) | ||||
| RequestResponse: | ||||
| 	makeQuestion(MakeQuestionRequest)(int) | ||||
| } | ||||
							
								
								
									
										39
									
								
								samples/Jolie/exam.ol
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										39
									
								
								samples/Jolie/exam.ol
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,39 @@ | ||||
| include "common.iol" | ||||
|  | ||||
| cset { | ||||
| studentName: | ||||
| 	StartExamRequest.studentName | ||||
| 	DecisionMessage.studentName | ||||
| 	MakeQuestionRequest.studentName, | ||||
| examName: | ||||
| 	StartExamRequest.examName | ||||
| 	DecisionMessage.examName | ||||
| 	MakeQuestionRequest.examName | ||||
| } | ||||
|  | ||||
| execution { concurrent } | ||||
|  | ||||
| outputPort Student { | ||||
| Interfaces: StudentInterface | ||||
| } | ||||
|  | ||||
| inputPort ExamInput { | ||||
| Location: Location_Exam | ||||
| Protocol: sodep | ||||
| Interfaces: ExamInterface | ||||
| } | ||||
|  | ||||
| main | ||||
| { | ||||
| 	startExam( examRequest ); | ||||
| 	Student << examRequest.student; | ||||
| 	makeQuestion( question )( answer ) { | ||||
| 		makeQuestion@Student( question )( answer ) | ||||
| 	}; | ||||
| 	[ pass( message ) ] { | ||||
| 		sendMessage@Student( "You passed!" ) | ||||
| 	} | ||||
| 	[ fail( message ) ] { | ||||
| 		sendMessage@Student( "You failed!" ) | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										26
									
								
								samples/Jolie/examiner.ol
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								samples/Jolie/examiner.ol
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | ||||
| include "common.iol" | ||||
| include "ui/swing_ui.iol" | ||||
| include "console.iol" | ||||
|  | ||||
| outputPort Exam { | ||||
| Location: Location_Exam | ||||
| Protocol: sodep | ||||
| Interfaces: ExamInterface | ||||
| } | ||||
|  | ||||
| main | ||||
| { | ||||
| 	question.studentName = "John"; | ||||
| 	question.examName = "SPLG"; | ||||
| 	question.question = "Random question"; | ||||
| 	makeQuestion@Exam( question )( answer ); | ||||
| 	showYesNoQuestionDialog@SwingUI( "Do you want to accept answer " + answer + " ?" )( decision ); | ||||
|  | ||||
| 	message.studentName = "John"; | ||||
| 	message.examName = "SPLG"; | ||||
| 	if ( decision == 0 ) { | ||||
| 		pass@Exam( message ) | ||||
| 	} else { | ||||
| 		fail@Exam( message ) | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										84
									
								
								samples/Jolie/hanoi.ol
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										84
									
								
								samples/Jolie/hanoi.ol
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,84 @@ | ||||
| // https://github.com/jolie/website/blob/master/docs/documentation/locations/code/local.ol | ||||
|  | ||||
| include "runtime.iol" | ||||
| include "string_utils.iol" | ||||
|  | ||||
| type HanoiRequest: void{ | ||||
|   .src: string | ||||
|   .aux: string | ||||
|   .dst: string | ||||
|   .n: int | ||||
|   .sid?: string | ||||
| } | ||||
|  | ||||
| type HanoiReponse: void { | ||||
|   .move?: string | ||||
| } | ||||
|  | ||||
| interface LocalOperations{ | ||||
|   RequestResponse: | ||||
|     hanoiSolver( HanoiRequest )( HanoiReponse ) | ||||
| } | ||||
|  | ||||
| interface ExternalOperations{ | ||||
|   RequestResponse: | ||||
|     hanoi( HanoiRequest )( string ) | ||||
| } | ||||
|  | ||||
| outputPort Self{ | ||||
|   Interfaces: LocalOperations | ||||
| } | ||||
|  | ||||
| inputPort Self { | ||||
|   Location: "local" | ||||
|   Interfaces: LocalOperations | ||||
| } | ||||
|  | ||||
| inputPort PowerService { | ||||
|   Location: "socket://localhost:8000" | ||||
|   Protocol: http{ | ||||
|     .format = "html" | ||||
|   } | ||||
|   Interfaces: ExternalOperations | ||||
| } | ||||
|  | ||||
| execution { concurrent } | ||||
|  | ||||
| init | ||||
| { | ||||
|   getLocalLocation@Runtime()( Self.location ) | ||||
| } | ||||
|  | ||||
| main | ||||
| { | ||||
|   [ hanoi( request )( response ){ | ||||
|     getRandomUUID@StringUtils()(request.sid); | ||||
|     hanoiSolver@Self( request )( subRes ); | ||||
|     response = subRes.move | ||||
|   }]{ nullProcess } | ||||
|  | ||||
|   [ hanoiSolver( request )( response ){ | ||||
|     if ( request.n > 0 ){ | ||||
|       subReq.n = request.n; | ||||
|       subReq.n--; | ||||
|       with( request ){ | ||||
|         subReq.aux = .dst; | ||||
|         subReq.dst = .aux; | ||||
|         subReq.src = .src; | ||||
|         subReq.sid = .sid | ||||
|       }; | ||||
|       hanoiSolver@Self( subReq )( response ); | ||||
|       response.move +=  "<br>" +  | ||||
|                 ++global.counters.(request.sid) +  | ||||
|                 ") Move from " + request.src + | ||||
|                 " to " + request.dst + ";"; | ||||
|       with ( request ){ | ||||
|         subReq.src = .aux; | ||||
|         subReq.aux = .src; | ||||
|         subReq.dst = .dst | ||||
|       }; | ||||
|       hanoiSolver@Self( subReq )( subRes ); | ||||
|       response.move += subRes.move | ||||
|     } | ||||
|   }]{ nullProcess } | ||||
| } | ||||
							
								
								
									
										29
									
								
								samples/Jolie/student.ol
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								samples/Jolie/student.ol
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | ||||
| include "common.iol" | ||||
| include "ui/swing_ui.iol" | ||||
| include "console.iol" | ||||
|  | ||||
| outputPort Exam { | ||||
| Location: Location_Exam | ||||
| Protocol: sodep | ||||
| Interfaces: ExamInterface | ||||
| } | ||||
|  | ||||
| inputPort StudentInput { | ||||
| Location: "socket://localhost:8001/" | ||||
| Protocol: sodep | ||||
| Interfaces: StudentInterface | ||||
| } | ||||
|  | ||||
| main | ||||
| { | ||||
| 	request.studentName = "John"; | ||||
| 	request.examName = "SPLG"; | ||||
| 	request.student.location = "socket://localhost:8001/"; | ||||
| 	request.student.protocol = "sodep"; | ||||
| 	startExam@Exam( request ); | ||||
| 	makeQuestion( question )( answer ) { | ||||
| 		showYesNoQuestionDialog@SwingUI( question.question )( answer ) | ||||
| 	}; | ||||
| 	sendMessage( message ); | ||||
| 	println@Console( message )() | ||||
| } | ||||
							
								
								
									
										49
									
								
								samples/LookML/example.model.lkml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										49
									
								
								samples/LookML/example.model.lkml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,49 @@ | ||||
| - label:          'desired label name' | ||||
| - connection:     connection_name | ||||
| - include:        filename_or_pattern | ||||
|   # Possibly more include declarations | ||||
| - persist_for:    N (seconds | minutes | hours) | ||||
| - case_sensitive: true | false | ||||
| - week_start_day: monday | tuesday | wednesday | thursday | friday | saturday | sunday | ||||
| - value_formats: | ||||
|   - name: desired_format_name | ||||
|     value_format: 'excel-style formatting string' | ||||
|   # Possibly more value formats | ||||
|  | ||||
| - explore: view_name | ||||
|   label:  'desired label name' | ||||
|   description: 'description string' | ||||
|   symmetric_aggregates: true | false | ||||
|   hidden: true | false | ||||
|   fields: [field_or_set, field_or_set, …] | ||||
|  | ||||
|   sql_always_where: SQL WHERE condition | ||||
|   always_filter: | ||||
|     field_name: 'looker filter expression' | ||||
|   conditionally_filter: | ||||
|     field_name: 'looker filter expression' | ||||
|     unless: [field_or_set, field_or_set, …] | ||||
|   access_filter_fields: [fully_scoped_field, fully_scoped_field, …] | ||||
|  | ||||
|   always_join: [view_name, view_name, …] | ||||
|   joins: | ||||
|     - join: view_name | ||||
|       type: left_outer | full_outer | inner | cross | ||||
|       relationship: one_to_one | many_to_one | one_to_many | many_to_many | ||||
|       from: view_name | ||||
|       sql_table_name: table_name | ||||
|       view_label: 'desired label name' | ||||
|       fields: [field_or_set, field_or_set, …] | ||||
|       required_joins: [view_name, view_name, …] | ||||
|       foreign_key: dimension_name | ||||
|       sql_on: SQL ON clause | ||||
|     # Possibly more join declarations | ||||
|  | ||||
|   persist_for: N (seconds | minutes | hours) | ||||
|   from: view_name | ||||
|   view: view_name | ||||
|   case_sensitive: true | false | ||||
|   sql_table_name: table_name | ||||
|   cancel_grouping_fields: [fully_scoped_field, fully_scoped_field, …] | ||||
|  | ||||
| # Possibly more explore declarations | ||||
							
								
								
									
										90
									
								
								samples/LookML/example.view.lkml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										90
									
								
								samples/LookML/example.view.lkml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,90 @@ | ||||
| - view: view_name | ||||
|   sql_table_name: table_name | ||||
|   suggestions: true | false | ||||
|  | ||||
|   derived_table: | ||||
|     sql: SQL query | ||||
|     persist_for: N (seconds | minutes | hours) | ||||
|     sql_trigger_value: SQL query | ||||
|     distribution: column_name | ||||
|     distribution_style: ALL | EVEN | ||||
|     sortkeys: [column_name, column_name, …] | ||||
|     indexes: [column_name, column_name, …] | ||||
|  | ||||
|   sets: | ||||
|     set_name: | ||||
|       - field_or_set | ||||
|       - field_or_set | ||||
|       - … | ||||
|     # Possibly more set declarations | ||||
|  | ||||
|   fields: | ||||
|   - (dimension | dimension_group | measure | filter): field_name | ||||
|     label: 'desired label name' | ||||
|     view_label: 'desired label name' | ||||
|     group_label: 'desired label name' | ||||
|     description: 'description string' | ||||
|     hidden: true | false | ||||
|     alias: [old_field_name, old_field_name, …] | ||||
|     value_format: 'excel-style formatting string' | ||||
|     value_format_name: format_name | ||||
|     html: HTML expression using Liquid template elements | ||||
|     sql: SQL expression to generate the field value | ||||
|     required_fields: [field_name, field_name, …] | ||||
|     drill_fields: [field_or_set, field_or_set, …] | ||||
|     can_filter: true | false | ||||
|     fanout_on: repeated_record_name | ||||
|  | ||||
|     # DIMENSION SPECIFIC PARAMETERS | ||||
|  | ||||
|     type: dimension_field_type | ||||
|     primary_key: true | false | ||||
|     sql_case: | ||||
|       value: SQL condition | ||||
|       value: SQL condition | ||||
|       # Possibly more sql_case statements | ||||
|     alpha_sort: true | false | ||||
|     tiers: [N, N, …] | ||||
|     style: classic | interval | integer | relational | ||||
|     sql_latitude: SQL expression to generate a latitude | ||||
|     sql_longitude: SQL expression to generate a longitude | ||||
|     suggestable: true | false | ||||
|     suggest_persist_for: N (seconds | minutes | hours) | ||||
|     suggest_dimension: dimension_name | ||||
|     suggest_explore: explore_name | ||||
|     suggestions: ['suggestion string', 'suggestion string', …] | ||||
|     bypass_suggest_restrictions: true | false | ||||
|     full_suggestions: true | false | ||||
|     skip_drill_filter: true | false | ||||
|     case_sensitive: true | false | ||||
|     order_by_field: dimension_name | ||||
|     map_layer: name_of_map_layer | ||||
|     links: | ||||
|       - label: 'desired label name' | ||||
|         url: desired_url | ||||
|         icon_url: url_of_an_ico_file | ||||
|       # Possibly more links | ||||
|  | ||||
|     # DIMENSION GROUP SPECIFIC PARAMETERS | ||||
|  | ||||
|     timeframes: [timeframe, timeframe, …] | ||||
|     convert_tz: true | false | ||||
|     datatype: epoch | timestamp | datetime | date | yyyymmdd | ||||
|  | ||||
|     # MEASURE SPECIFIC PARAMETERS | ||||
|  | ||||
|     type: measure_field_type | ||||
|     direction: row | column | ||||
|     approximate: true | false | ||||
|     approximate_threshold: N | ||||
|     sql_distinct_key: SQL expression to define repeated entities | ||||
|     list_field: dimension_name | ||||
|     filters: | ||||
|       dimension_name: 'looker filter expression' | ||||
|       # Possibly more filters statements | ||||
|  | ||||
|     # FILTER SPECIFIC PARAMETERS | ||||
|  | ||||
|     default_value: 'desired default value' | ||||
|  | ||||
|   # Possibly more dimension or measure declarations | ||||
| @@ -1,9 +1,14 @@ | ||||
| --- | ||||
| type: grammar | ||||
| name: css.tmbundle | ||||
| license: permissive | ||||
| curated: true | ||||
| --- | ||||
| # Installation | ||||
| 
 | ||||
| You can install this bundle in TextMate by opening the preferences and going to the bundles tab. After installation it will be automatically updated for you. | ||||
| 
 | ||||
| # General | ||||
| 
 | ||||
| * [Bundle Styleguide](http://kb.textmate.org/bundle_styleguide) — _before you make changes_ | ||||
| * [Commit Styleguide](http://kb.textmate.org/commit_styleguide) — _before you send a pull request_ | ||||
| * [Writing Bug Reports](http://kb.textmate.org/writing_bug_reports) — _before you report an issue_ | ||||
| 
 | ||||
| # License | ||||
| 
 | ||||
| If not otherwise specified (see below), files in this repository fall under the following license: | ||||
| 
 | ||||
| @@ -12,4 +17,4 @@ If not otherwise specified (see below), files in this repository fall under the | ||||
| 	express or implied warranty, and with no claim as to its | ||||
| 	suitability for any purpose. | ||||
| 
 | ||||
| An exception is made for files in readable text which contain their own license information, or files where an accompanying file exists (in the same directory) with a “-license” suffix added to the base-name name of the original file, and an extension of txt, html, or similar. For example “tidy” is accompanied by “tidy-license.txt”. | ||||
| An exception is made for files in readable text which contain their own license information, or files where an accompanying file exists (in the same directory) with a “-license” suffix added to the base-name name of the original file, and an extension of txt, html, or similar. For example “tidy” is accompanied by “tidy-license.txt”. | ||||
							
								
								
									
										192
									
								
								samples/Markdown/csharp6.workbook
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										192
									
								
								samples/Markdown/csharp6.workbook
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,192 @@ | ||||
| --- | ||||
| uti: com.xamarin.workbook | ||||
| platforms: | ||||
| - Console | ||||
| --- | ||||
|  | ||||
| # Using C# 6 | ||||
|  | ||||
| Some examples from Xamarin's [intro to C# 6](https://developer.xamarin.com/guides/cross-platform/advanced/csharp_six/). | ||||
|  | ||||
| * Null-conditional operator | ||||
|  | ||||
| * String Interpolation | ||||
|  | ||||
| * Expression-bodied Function Members | ||||
|  | ||||
| * Auto-property Initialization | ||||
|  | ||||
| * Index Initializers | ||||
|  | ||||
| * using static | ||||
|  | ||||
| ## Null-conditional operator | ||||
|  | ||||
| The `?.` operator automatically does a null-check before referencing the | ||||
| specified member. The example string array below has a `null` entry: | ||||
|  | ||||
| ```csharp | ||||
| var names = new string[] { "Foo", null }; | ||||
| ``` | ||||
|  | ||||
| In C# 5, a null-check is required before accessing the `.Length` property: | ||||
|  | ||||
| ```csharp | ||||
| // C# 5 | ||||
| int secondLength = 0; | ||||
| if (names[1] != null) | ||||
|   secondLength = names[1].Length; | ||||
| ``` | ||||
|  | ||||
| C# 6 allows the length to be queried in a single line; the entire | ||||
| statement returns `null` if any object is null. | ||||
|  | ||||
| ```csharp | ||||
| var length0 = names[0]?.Length; // 3 | ||||
| var length1 = names[1]?.Length; // null | ||||
| ``` | ||||
|  | ||||
| This can be used in conjunction with the `??` null coalescing operator | ||||
| to set a default value (such as `0`) in the example below: | ||||
|  | ||||
| ```csharp | ||||
| var lengths = names.Select (names => names?.Length ?? 0); //[3, 0] | ||||
| ``` | ||||
|  | ||||
| ## String Interpolation | ||||
|  | ||||
| Previously strings were built in a number of different ways: | ||||
|  | ||||
| ```csharp | ||||
| var animal = "Monkeys"; | ||||
| var food = "bananas"; | ||||
|  | ||||
| var out1 = String.Format ("{0} love to eat {1}", animal, food); | ||||
| var out2 = animal + " love to eat " + food; | ||||
| // or even StringBuilder | ||||
| ``` | ||||
|  | ||||
| C# 6 provides a simple syntax where the fieldname can be | ||||
| embedded directly in the string: | ||||
|  | ||||
| ```csharp | ||||
| $"{animal} love to eat {food}" | ||||
| ``` | ||||
|  | ||||
| String-formatting can also be done with this syntax: | ||||
|  | ||||
| ```csharp | ||||
| var values = new int[] { 1, 2, 3, 4, 12, 123456 }; | ||||
| foreach (var s in values.Select (i => $"The value is {i,10:N2}.")) { | ||||
|    Console.WriteLine (s); | ||||
| } | ||||
| ``` | ||||
|  | ||||
| ## Expression-bodied Function Members | ||||
|  | ||||
| The `ToString` override in the following class is an expression-bodied | ||||
| function - a more succinct declaration syntax. | ||||
|  | ||||
| ```csharp | ||||
| class Person | ||||
| { | ||||
|     public string FirstName { get; } | ||||
|     public string LastName { get; } | ||||
|     public Person (string firstname, string lastname) | ||||
|     { | ||||
|        FirstName = firstname; | ||||
|        LastName = lastname; | ||||
|     } | ||||
|     // note there is no explicit `return` keyword | ||||
|     public override string ToString () => $"{LastName}, {FirstName} {LastName}"; | ||||
| } | ||||
| ``` | ||||
|  | ||||
| `void` expression bodied functions are also allowed so long as | ||||
| the expression is a statement: | ||||
|  | ||||
| ```csharp | ||||
| public void Log(string message) => System.Console.WriteLine($"{DateTime.Now.ToString ("s", System.Globalization.CultureInfo.InvariantCulture )}: {message}"); | ||||
| ``` | ||||
|  | ||||
| This simple example calls these two methods: | ||||
|  | ||||
| ```csharp | ||||
| Log(new Person("James", "Bond").ToString()) | ||||
| ``` | ||||
|  | ||||
| ## Auto-property Initialization | ||||
|  | ||||
| Properties (ie. specified with `{get;set;}`) can be initialized inline | ||||
| with C# 6: | ||||
|  | ||||
| ```csharp | ||||
| class Todo | ||||
| { | ||||
|     public bool Done { get; set; } = false; | ||||
|     public DateTime Created { get; } = DateTime.Now; | ||||
|     public string Description { get; } | ||||
|  | ||||
|     public Todo (string description) | ||||
|     { | ||||
|        this.Description = description; // can assign (only in constructor!) | ||||
|     } | ||||
|     public override string ToString () => $"'{Description}' was created on {Created}"; | ||||
| } | ||||
| ``` | ||||
|  | ||||
| ```csharp | ||||
| new Todo("buy apples") | ||||
| ``` | ||||
|  | ||||
| ## Index Initializers | ||||
|  | ||||
| Dictionary-style data structures let you specify key/value | ||||
| types with a simple object-initializer-like syntax: | ||||
|  | ||||
| ```csharp | ||||
| var userInfo = new Dictionary<string,object> { | ||||
|     ["Created"] = DateTime.Now, | ||||
|     ["Due"] = DateTime.Now.AddSeconds(60 * 60 * 24), | ||||
|     ["Task"] = "buy lettuce" | ||||
| }; | ||||
| ``` | ||||
|  | ||||
| ## using static | ||||
|  | ||||
| Enumerations, and certain classes such as System.Math, are primarily | ||||
| holders of static values and functions. In C# 6, you can import all | ||||
| static members of a type with a single using static statement: | ||||
|  | ||||
| ```csharp | ||||
| using static System.Math; | ||||
| ``` | ||||
|  | ||||
| C# 6 code can then reference the static members directly, avoiding | ||||
| repetition of the class name (eg. `Math.PI` becomes `PI`): | ||||
|  | ||||
| ```csharp | ||||
| public class Location  | ||||
| { | ||||
|     public Location (double lat, double @long) {Latitude = lat; Longitude = @long;}  | ||||
|     public double Latitude = 0; public double Longitude = 0;  | ||||
| } | ||||
| static public double MilesBetween(Location loc1, Location loc2) | ||||
| { | ||||
|   double rlat1  = PI * loc1.Latitude / 180; | ||||
|   double rlat2  = PI * loc2.Latitude / 180; | ||||
|   double theta  = loc1.Longitude - loc2.Longitude; | ||||
|   double rtheta = PI * theta / 180; | ||||
|   double dist = | ||||
|       Sin(rlat1) * Sin(rlat2) + Cos(rlat1) * | ||||
|       Cos(rlat2) * Cos(rtheta); | ||||
|   dist = Acos(dist); | ||||
|   dist = dist*180/PI; | ||||
|   dist = dist*60*1.1515; | ||||
|   return dist; //miles | ||||
| } | ||||
| ``` | ||||
|  | ||||
| ```csharp | ||||
| MilesBetween (new Location(-12,22), new Location(-13,33)) | ||||
| ``` | ||||
							
								
								
									
										1
									
								
								samples/Markdown/minimal.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								samples/Markdown/minimal.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| _This_ is a **Markdown** readme. | ||||
							
								
								
									
										26
									
								
								samples/Marko/counter.marko
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								samples/Marko/counter.marko
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | ||||
| class { | ||||
|     constructor() { | ||||
|         this.state = { count:0 }; | ||||
|     } | ||||
|     increment() { | ||||
|         this.state.count++; | ||||
|     } | ||||
| } | ||||
|  | ||||
| style { | ||||
|     .count { | ||||
|         color:#09c; | ||||
|         font-size:3em; | ||||
|     } | ||||
|     .example-button { | ||||
|         font-size:1em; | ||||
|         padding:0.5em; | ||||
|     } | ||||
| } | ||||
|  | ||||
| <div.count> | ||||
|     ${state.count} | ||||
| </div> | ||||
| <button.example-button on-click('increment')> | ||||
|     Click me! | ||||
| </button> | ||||
							
								
								
									
										15
									
								
								samples/Marko/hello.marko
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								samples/Marko/hello.marko
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| $ var name = 'Frank'; | ||||
| $ var colors = ['red', 'green', 'blue']; | ||||
|  | ||||
| <h1> | ||||
|     Hello ${name}! | ||||
| </h1> | ||||
|  | ||||
| <ul if(colors.length)> | ||||
|     <li style={color: color} for(color in colors)> | ||||
|         ${color} | ||||
|     </li> | ||||
| </ul> | ||||
| <div else> | ||||
|     No colors! | ||||
| </div> | ||||
							
								
								
									
										36
									
								
								samples/Marko/rgb-sliders.marko
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										36
									
								
								samples/Marko/rgb-sliders.marko
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,36 @@ | ||||
| static const colors = ['red', 'green', 'blue']; | ||||
| static const defaultColor = [255, 0, 0]; | ||||
|  | ||||
| class { | ||||
| 	onInput(input) { | ||||
| 		this.state = { color: input.color || defaultColor }; | ||||
| 	} | ||||
|  | ||||
| 	updateColor() { | ||||
|     	this.state.color = colors.map((color) => { | ||||
|         	return parseInt(this.getEl(color + 'Input').value, 10); | ||||
|         }); | ||||
|     } | ||||
|  | ||||
|     getStyleColor() { | ||||
|     	return 'rgb(' + this.state.color.join(',') + ')'; | ||||
|     } | ||||
| } | ||||
|  | ||||
| <div.rgb-sliders> | ||||
|     <div.inputs> | ||||
|     	<for(i, color in colors)> | ||||
|         	<div> | ||||
|                 <label for-key=color+"Input"> | ||||
|                     ${color}: | ||||
|                 </label> | ||||
|                 <input type="range" max="255" | ||||
|                     key=color+"Input" | ||||
|                     on-input('updateColor') | ||||
|                     value=state.color[i] > | ||||
|             </div> | ||||
|         </for> | ||||
|     </div> | ||||
|     <div.color style={backgroundColor: component.getStyleColor()}> | ||||
|     </div> | ||||
| </div> | ||||
							
								
								
									
										51
									
								
								samples/Meson/filenames/meson.build
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										51
									
								
								samples/Meson/filenames/meson.build
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,51 @@ | ||||
| project('test', ['c'], | ||||
|   version: '0.1.0' | ||||
| ) | ||||
|  | ||||
| # This is a comment test('foo') | ||||
|  | ||||
| add_global_arguments(['-foo']) | ||||
| add_global_link_arguments(['-foo']) | ||||
|  | ||||
| gnome = import('gnome') # As is this | ||||
|  | ||||
| gnome.do_something('test') | ||||
|  | ||||
| meson.source_root() | ||||
|  | ||||
| foreach foo: bar | ||||
|   foreach baz : foo | ||||
|     message(baz) | ||||
|   endforeach | ||||
| endforeach | ||||
|  | ||||
| blah = ''' | ||||
| afjoakjflajf  # Test | ||||
| lflkasjf | ||||
| test\'test | ||||
| test\\\\test | ||||
| test\ntest | ||||
| ''' | ||||
|  | ||||
| foo = '' | ||||
| foo = '''''' | ||||
| foo = 'string' | ||||
| foo = '''string2''' | ||||
| foo = 12314 | ||||
| foo = 1231.1231 | ||||
| foo = true | ||||
| foo = false | ||||
| foo = ['te\'st', 1, 3.3, '''test'''] | ||||
| foo += 1231 | ||||
| foo = '@0@'.format('test') | ||||
| foo = include_directories('foo', kwarg: 'bar', include_directories: 'foo') | ||||
| foo = true ? 'true' : 'false' | ||||
| foo = 2 - 1 + 3 % 8 / 4 * 3 | ||||
|  | ||||
| if true and false | ||||
| elif false or true | ||||
| elif true not false | ||||
| elif foo == 12 | ||||
| elif (foo != 124) and (foo <= 200) | ||||
| else | ||||
| endif | ||||
							
								
								
									
										3
									
								
								samples/Meson/filenames/meson_options.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								samples/Meson/filenames/meson_options.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| option('with-something', type: 'boolean', | ||||
|   value: true, | ||||
| ) | ||||
							
								
								
									
										329
									
								
								samples/P4/l2.p4
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										329
									
								
								samples/P4/l2.p4
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,329 @@ | ||||
| /* | ||||
| Copyright 2013-present Barefoot Networks, Inc.  | ||||
|  | ||||
| Licensed under the Apache License, Version 2.0 (the "License"); | ||||
| you may not use this file except in compliance with the License. | ||||
| You may obtain a copy of the License at | ||||
|  | ||||
|     http://www.apache.org/licenses/LICENSE-2.0 | ||||
|  | ||||
| Unless required by applicable law or agreed to in writing, software | ||||
| distributed under the License is distributed on an "AS IS" BASIS, | ||||
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
| See the License for the specific language governing permissions and | ||||
| limitations under the License. | ||||
| */ | ||||
|  | ||||
| /* | ||||
|  * Layer-2 processing | ||||
|  */ | ||||
|  | ||||
| header_type l2_metadata_t { | ||||
|     fields { | ||||
|         lkp_pkt_type : 3; | ||||
|         lkp_mac_sa : 48; | ||||
|         lkp_mac_da : 48; | ||||
|         lkp_mac_type : 16; | ||||
|  | ||||
|         l2_nexthop : 16;                       /* next hop from l2 */ | ||||
|         l2_nexthop_type : 1;                   /* ecmp or nexthop */ | ||||
|         l2_redirect : 1;                       /* l2 redirect action */ | ||||
|         l2_src_miss : 1;                       /* l2 source miss */ | ||||
|         l2_src_move : IFINDEX_BIT_WIDTH;       /* l2 source interface mis-match */ | ||||
|         stp_group: 10;                         /* spanning tree group id */ | ||||
|         stp_state : 3;                         /* spanning tree port state */ | ||||
|         bd_stats_idx : 16;                     /* ingress BD stats index */ | ||||
|         learning_enabled : 1;                  /* is learning enabled */ | ||||
|         port_vlan_mapping_miss : 1;            /* port vlan mapping miss */ | ||||
|         same_if_check : IFINDEX_BIT_WIDTH;     /* same interface check */ | ||||
|     } | ||||
| } | ||||
|  | ||||
| metadata l2_metadata_t l2_metadata; | ||||
|  | ||||
| #ifndef L2_DISABLE | ||||
| /*****************************************************************************/ | ||||
| /* Spanning tree lookup                                                      */ | ||||
| /*****************************************************************************/ | ||||
| action set_stp_state(stp_state) { | ||||
|     modify_field(l2_metadata.stp_state, stp_state); | ||||
| } | ||||
|  | ||||
| table spanning_tree { | ||||
|     reads { | ||||
|         ingress_metadata.ifindex : exact; | ||||
|         l2_metadata.stp_group: exact; | ||||
|     } | ||||
|     actions { | ||||
|         set_stp_state; | ||||
|     } | ||||
|     size : SPANNING_TREE_TABLE_SIZE; | ||||
| } | ||||
| #endif /* L2_DISABLE */ | ||||
|  | ||||
| control process_spanning_tree { | ||||
| #ifndef L2_DISABLE | ||||
|     if (l2_metadata.stp_group != STP_GROUP_NONE) { | ||||
|         apply(spanning_tree); | ||||
|     } | ||||
| #endif /* L2_DISABLE */ | ||||
| } | ||||
|  | ||||
| #ifndef L2_DISABLE | ||||
| /*****************************************************************************/ | ||||
| /* Source MAC lookup                                                         */ | ||||
| /*****************************************************************************/ | ||||
| action smac_miss() { | ||||
|     modify_field(l2_metadata.l2_src_miss, TRUE); | ||||
| } | ||||
|  | ||||
| action smac_hit(ifindex) { | ||||
|     bit_xor(l2_metadata.l2_src_move, ingress_metadata.ifindex, ifindex); | ||||
| } | ||||
|  | ||||
| table smac { | ||||
|     reads { | ||||
|         ingress_metadata.bd : exact; | ||||
|         l2_metadata.lkp_mac_sa : exact; | ||||
|     } | ||||
|     actions { | ||||
|         nop; | ||||
|         smac_miss; | ||||
|         smac_hit; | ||||
|     } | ||||
|     size : MAC_TABLE_SIZE; | ||||
| } | ||||
|  | ||||
| /*****************************************************************************/ | ||||
| /* Destination MAC lookup                                                    */ | ||||
| /*****************************************************************************/ | ||||
| action dmac_hit(ifindex) { | ||||
|     modify_field(ingress_metadata.egress_ifindex, ifindex); | ||||
|     bit_xor(l2_metadata.same_if_check, l2_metadata.same_if_check, ifindex); | ||||
| } | ||||
|  | ||||
| action dmac_multicast_hit(mc_index) { | ||||
|     modify_field(intrinsic_metadata.mcast_grp, mc_index); | ||||
| #ifdef FABRIC_ENABLE | ||||
|     modify_field(fabric_metadata.dst_device, FABRIC_DEVICE_MULTICAST); | ||||
| #endif /* FABRIC_ENABLE */ | ||||
| } | ||||
|  | ||||
| action dmac_miss() { | ||||
|     modify_field(ingress_metadata.egress_ifindex, IFINDEX_FLOOD); | ||||
| #ifdef FABRIC_ENABLE | ||||
|     modify_field(fabric_metadata.dst_device, FABRIC_DEVICE_MULTICAST); | ||||
| #endif /* FABRIC_ENABLE */ | ||||
| } | ||||
|  | ||||
| action dmac_redirect_nexthop(nexthop_index) { | ||||
|     modify_field(l2_metadata.l2_redirect, TRUE); | ||||
|     modify_field(l2_metadata.l2_nexthop, nexthop_index); | ||||
|     modify_field(l2_metadata.l2_nexthop_type, NEXTHOP_TYPE_SIMPLE); | ||||
| } | ||||
|  | ||||
| action dmac_redirect_ecmp(ecmp_index) { | ||||
|     modify_field(l2_metadata.l2_redirect, TRUE); | ||||
|     modify_field(l2_metadata.l2_nexthop, ecmp_index); | ||||
|     modify_field(l2_metadata.l2_nexthop_type, NEXTHOP_TYPE_ECMP); | ||||
| } | ||||
|  | ||||
| action dmac_drop() { | ||||
|     drop(); | ||||
| } | ||||
|  | ||||
| table dmac { | ||||
|     reads { | ||||
|         ingress_metadata.bd : exact; | ||||
|         l2_metadata.lkp_mac_da : exact; | ||||
|     } | ||||
|     actions { | ||||
| #ifdef OPENFLOW_ENABLE | ||||
|         openflow_apply; | ||||
|         openflow_miss; | ||||
| #endif /* OPENFLOW_ENABLE */ | ||||
|         nop; | ||||
|         dmac_hit; | ||||
|         dmac_multicast_hit; | ||||
|         dmac_miss; | ||||
|         dmac_redirect_nexthop; | ||||
|         dmac_redirect_ecmp; | ||||
|         dmac_drop; | ||||
|     } | ||||
|     size : MAC_TABLE_SIZE; | ||||
|     support_timeout: true; | ||||
| } | ||||
| #endif /* L2_DISABLE */ | ||||
|  | ||||
| control process_mac { | ||||
| #ifndef L2_DISABLE | ||||
|     apply(smac); | ||||
|     apply(dmac); | ||||
| #endif /* L2_DISABLE */ | ||||
| } | ||||
|  | ||||
| #ifndef L2_DISABLE | ||||
| /*****************************************************************************/ | ||||
| /* MAC learn notification                                                    */ | ||||
| /*****************************************************************************/ | ||||
| field_list mac_learn_digest { | ||||
|     ingress_metadata.bd; | ||||
|     l2_metadata.lkp_mac_sa; | ||||
|     ingress_metadata.ifindex; | ||||
| } | ||||
|  | ||||
| action generate_learn_notify() { | ||||
|     generate_digest(MAC_LEARN_RECEIVER, mac_learn_digest); | ||||
| } | ||||
|  | ||||
| table learn_notify { | ||||
|     reads { | ||||
|         l2_metadata.l2_src_miss : ternary; | ||||
|         l2_metadata.l2_src_move : ternary; | ||||
|         l2_metadata.stp_state : ternary; | ||||
|     } | ||||
|     actions { | ||||
|         nop; | ||||
|         generate_learn_notify; | ||||
|     } | ||||
|     size : LEARN_NOTIFY_TABLE_SIZE; | ||||
| } | ||||
| #endif /* L2_DISABLE */ | ||||
|  | ||||
| control process_mac_learning { | ||||
| #ifndef L2_DISABLE | ||||
|     if (l2_metadata.learning_enabled == TRUE) { | ||||
|         apply(learn_notify); | ||||
|     } | ||||
| #endif /* L2_DISABLE */ | ||||
| } | ||||
|  | ||||
|  | ||||
| /*****************************************************************************/ | ||||
| /* Validate packet                                                           */ | ||||
| /*****************************************************************************/ | ||||
| action set_unicast() { | ||||
|     modify_field(l2_metadata.lkp_pkt_type, L2_UNICAST); | ||||
| } | ||||
|  | ||||
| action set_unicast_and_ipv6_src_is_link_local() { | ||||
|     modify_field(l2_metadata.lkp_pkt_type, L2_UNICAST); | ||||
|     modify_field(ipv6_metadata.ipv6_src_is_link_local, TRUE); | ||||
| } | ||||
|  | ||||
| action set_multicast() { | ||||
|     modify_field(l2_metadata.lkp_pkt_type, L2_MULTICAST); | ||||
|     add_to_field(l2_metadata.bd_stats_idx, 1); | ||||
| } | ||||
|  | ||||
| action set_multicast_and_ipv6_src_is_link_local() { | ||||
|     modify_field(l2_metadata.lkp_pkt_type, L2_MULTICAST); | ||||
|     modify_field(ipv6_metadata.ipv6_src_is_link_local, TRUE); | ||||
|     add_to_field(l2_metadata.bd_stats_idx, 1); | ||||
| } | ||||
|  | ||||
| action set_broadcast() { | ||||
|     modify_field(l2_metadata.lkp_pkt_type, L2_BROADCAST); | ||||
|     add_to_field(l2_metadata.bd_stats_idx, 2); | ||||
| } | ||||
|  | ||||
| action set_malformed_packet(drop_reason) { | ||||
|     modify_field(ingress_metadata.drop_flag, TRUE); | ||||
|     modify_field(ingress_metadata.drop_reason, drop_reason); | ||||
| } | ||||
|  | ||||
| table validate_packet { | ||||
|     reads { | ||||
| #ifndef __TARGET_BMV2__ | ||||
|         l2_metadata.lkp_mac_sa mask 0x010000000000 : ternary; | ||||
| #else | ||||
|         l2_metadata.lkp_mac_sa : ternary; | ||||
| #endif | ||||
|         l2_metadata.lkp_mac_da : ternary; | ||||
|         l3_metadata.lkp_ip_type : ternary; | ||||
|         l3_metadata.lkp_ip_ttl : ternary; | ||||
|         l3_metadata.lkp_ip_version : ternary; | ||||
| #ifndef __TARGET_BMV2__ | ||||
|         ipv4_metadata.lkp_ipv4_sa mask 0xFF000000 : ternary; | ||||
| #else | ||||
|         ipv4_metadata.lkp_ipv4_sa : ternary; | ||||
| #endif | ||||
| #ifndef IPV6_DISABLE | ||||
| #ifndef __TARGET_BMV2__ | ||||
|         ipv6_metadata.lkp_ipv6_sa mask 0xFFFF0000000000000000000000000000 : ternary; | ||||
| #else | ||||
|         ipv6_metadata.lkp_ipv6_sa : ternary; | ||||
| #endif | ||||
| #endif /* IPV6_DISABLE */ | ||||
|     } | ||||
|     actions { | ||||
|         nop; | ||||
|         set_unicast; | ||||
|         set_unicast_and_ipv6_src_is_link_local; | ||||
|         set_multicast; | ||||
|         set_multicast_and_ipv6_src_is_link_local; | ||||
|         set_broadcast; | ||||
|         set_malformed_packet; | ||||
|     } | ||||
|     size : VALIDATE_PACKET_TABLE_SIZE; | ||||
| } | ||||
|  | ||||
| control process_validate_packet { | ||||
|     if (ingress_metadata.drop_flag == FALSE) { | ||||
|         apply(validate_packet); | ||||
|     } | ||||
| } | ||||
|  | ||||
|  | ||||
| /*****************************************************************************/ | ||||
| /* Egress BD lookup                                                          */ | ||||
| /*****************************************************************************/ | ||||
| action set_egress_bd_properties() { | ||||
| } | ||||
|  | ||||
| table egress_bd_map { | ||||
|     reads { | ||||
|         egress_metadata.bd : exact; | ||||
|     } | ||||
|     actions { | ||||
|         nop; | ||||
|         set_egress_bd_properties; | ||||
|     } | ||||
|     size : EGRESS_BD_MAPPING_TABLE_SIZE; | ||||
| } | ||||
|  | ||||
| control process_egress_bd { | ||||
|     apply(egress_bd_map); | ||||
| } | ||||
|  | ||||
|  | ||||
| /*****************************************************************************/ | ||||
| /* Egress VLAN decap                                                         */ | ||||
| /*****************************************************************************/ | ||||
| action remove_vlan_single_tagged() { | ||||
|     modify_field(ethernet.etherType, vlan_tag_[0].etherType); | ||||
|     remove_header(vlan_tag_[0]); | ||||
| } | ||||
|  | ||||
| action remove_vlan_double_tagged() { | ||||
|     modify_field(ethernet.etherType, vlan_tag_[1].etherType); | ||||
|     remove_header(vlan_tag_[0]); | ||||
|     remove_header(vlan_tag_[1]); | ||||
| } | ||||
|  | ||||
| table vlan_decap { | ||||
|     reads { | ||||
|         vlan_tag_[0] : valid; | ||||
|         vlan_tag_[1] : valid; | ||||
|     } | ||||
|     actions { | ||||
|         nop; | ||||
|         remove_vlan_single_tagged; | ||||
|         remove_vlan_double_tagged; | ||||
|     } | ||||
|     size: VLAN_DECAP_TABLE_SIZE; | ||||
| } | ||||
|  | ||||
| control process_vlan_decap { | ||||
|     apply(vlan_decap); | ||||
| } | ||||
							
								
								
									
										39
									
								
								samples/P4/mirror_acl.p4
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										39
									
								
								samples/P4/mirror_acl.p4
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,39 @@ | ||||
| // Copyright 2015, Barefoot Networks, Inc. | ||||
| // | ||||
| // Licensed under the Apache License, Version 2.0 (the "License"); | ||||
| // you may not use this file except in compliance with the License. | ||||
| // You may obtain a copy of the License at | ||||
| // | ||||
| //     http://www.apache.org/licenses/LICENSE-2.0 | ||||
| // | ||||
| // Unless required by applicable law or agreed to in writing, software | ||||
| // distributed under the License is distributed on an "AS IS" BASIS, | ||||
| // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
| // See the License for the specific language governing permissions and | ||||
| // limitations under the License. | ||||
|  | ||||
| action set_mirror_id(session_id) { | ||||
|     clone_ingress_pkt_to_egress(session_id); | ||||
| } | ||||
|  | ||||
| table mirror_acl { | ||||
|     reads { | ||||
|         ingress_metadata.if_label : ternary; | ||||
|         ingress_metadata.bd_label : ternary; | ||||
|  | ||||
|         /* ip acl */ | ||||
|         ingress_metadata.lkp_ipv4_sa : ternary; | ||||
|         ingress_metadata.lkp_ipv4_da : ternary; | ||||
|         ingress_metadata.lkp_ip_proto : ternary; | ||||
|  | ||||
|         /* mac acl */ | ||||
|         ingress_metadata.lkp_mac_sa : ternary; | ||||
|         ingress_metadata.lkp_mac_da : ternary; | ||||
|         ingress_metadata.lkp_mac_type : ternary; | ||||
|     } | ||||
|     actions { | ||||
|         nop; | ||||
|         set_mirror_id; | ||||
|     } | ||||
|     size : INGRESS_MIRROR_ACL_TABLE_SIZE; | ||||
| } | ||||
							
								
								
									
										12
									
								
								samples/PLSQL/print_bool.prc
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								samples/PLSQL/print_bool.prc
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | ||||
| create or replace procedure print_bool( | ||||
|     p_bool in BOOLEAN, | ||||
|     p_true_value in varchar2 default 'TRUE', | ||||
|     p_false_value in varchar2 := 'FALSE' | ||||
| ) | ||||
| as | ||||
| begin | ||||
|  | ||||
|     dbms_output.put_line(case when p_bool then p_true_value else p_false_value end); | ||||
|  | ||||
| end print_bool; | ||||
| / | ||||
							
								
								
									
										48
									
								
								samples/PLSQL/videodb.ddl
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								samples/PLSQL/videodb.ddl
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,48 @@ | ||||
| CREATE TABLE users ( | ||||
|    user_name varchar2(40), | ||||
|    first_name varchar2(40), | ||||
|    last_name varchar2(40), | ||||
|    email varchar2(40), | ||||
|    password varchar2(40), | ||||
|    created_date DATE, | ||||
|    total_credits NUMBER, | ||||
|    credit_change_date DATE, | ||||
|    PRIMARY KEY (user_name) | ||||
| ); | ||||
| / | ||||
|  | ||||
| CREATE TABLE users_videos ( | ||||
|    video_id NUMBER, | ||||
|    video_name varchar2(40), | ||||
|    user_name varchar2(40), | ||||
|    description varchar2(512), | ||||
|    upload_date DATE, | ||||
|    PRIMARY KEY (video_id), | ||||
|    CONSTRAINT "USERS_VIDEOS_FK1" FOREIGN KEY ("USER_NAME") REFERENCES "USERS"("USER_NAME") | ||||
| ); | ||||
| / | ||||
|  | ||||
| create or replace procedure print_user_videos( | ||||
|     p_user_name in users.user_name%type | ||||
| ) | ||||
| AUTHID DEFINER | ||||
| as | ||||
|     type t_user_videos is table of users_videos%rowtype | ||||
|         index by pls_integer; | ||||
|     l_videos t_user_videos; | ||||
| begin | ||||
|  | ||||
|     select * | ||||
|     bulk collect into l_videos | ||||
|     from users_videos | ||||
|     where user_name = p_user_name; | ||||
|  | ||||
|     for i in 1..l_videos.COUNT | ||||
|     loop | ||||
|  | ||||
|         dbms_output.put_line(l_videos(i).video_name); | ||||
|  | ||||
|     end loop; | ||||
|  | ||||
| end print_user_videos; | ||||
| / | ||||
							
								
								
									
										4
									
								
								samples/Puppet/init.pp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								samples/Puppet/init.pp
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,4 @@ | ||||
| include docker | ||||
| include apt | ||||
| include ::something | ||||
| include foo::bar | ||||
							
								
								
									
										159
									
								
								samples/Python/argparse.pyi
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										159
									
								
								samples/Python/argparse.pyi
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,159 @@ | ||||
| # Stubs for argparse (Python 3.4) | ||||
|  | ||||
| from typing import ( | ||||
|     Any, Callable, Iterable, List, IO, Optional, Sequence, Tuple, Type, Union, | ||||
|     TypeVar, overload | ||||
| ) | ||||
| import sys | ||||
|  | ||||
| _T = TypeVar('_T') | ||||
|  | ||||
| if sys.version_info >= (3,): | ||||
|     _Text = str | ||||
| else: | ||||
|     _Text = Union[str, unicode] | ||||
|  | ||||
| ONE_OR_MORE = ...  # type: str | ||||
| OPTIONAL = ...  # type: str | ||||
| PARSER = ...  # type: str | ||||
| REMAINDER = ...  # type: str | ||||
| SUPPRESS = ...  # type: str | ||||
| ZERO_OR_MORE = ...  # type: str | ||||
|  | ||||
| class ArgumentError(Exception): ... | ||||
|  | ||||
| class ArgumentParser: | ||||
|     if sys.version_info >= (3, 5): | ||||
|         def __init__(self, | ||||
|                      prog: Optional[str] = ..., | ||||
|                      usage: Optional[str] = ..., | ||||
|                      description: Optional[str] = ..., | ||||
|                      epilog: Optional[str] = ..., | ||||
|                      parents: Sequence[ArgumentParser] = ..., | ||||
|                      formatter_class: Type[HelpFormatter] = ..., | ||||
|                      prefix_chars: _Text = ..., | ||||
|                      fromfile_prefix_chars: Optional[str] = ..., | ||||
|                      argument_default: Optional[str] = ..., | ||||
|                      conflict_handler: _Text = ..., | ||||
|                      add_help: bool = ..., | ||||
|                      allow_abbrev: bool = ...) -> None: ... | ||||
|     else: | ||||
|         def __init__(self, | ||||
|                      prog: Optional[_Text] = ..., | ||||
|                      usage: Optional[_Text] = ..., | ||||
|                      description: Optional[_Text] = ..., | ||||
|                      epilog: Optional[_Text] = ..., | ||||
|                      parents: Sequence[ArgumentParser] = ..., | ||||
|                      formatter_class: Type[HelpFormatter] = ..., | ||||
|                      prefix_chars: _Text = ..., | ||||
|                      fromfile_prefix_chars: Optional[_Text] = ..., | ||||
|                      argument_default: Optional[_Text] = ..., | ||||
|                      conflict_handler: _Text = ..., | ||||
|                      add_help: bool = ...) -> None: ... | ||||
|     def add_argument(self, | ||||
|                      *name_or_flags: Union[_Text, Sequence[_Text]], | ||||
|                      action: Union[_Text, Type[Action]] = ..., | ||||
|                      nargs: Union[int, _Text] = ..., | ||||
|                      const: Any = ..., | ||||
|                      default: Any = ..., | ||||
|                      type: Union[Callable[[str], _T], FileType] = ..., | ||||
|                      choices: Iterable[_T] = ..., | ||||
|                      required: bool = ..., | ||||
|                      help: _Text = ..., | ||||
|                      metavar: Union[_Text, Tuple[_Text, ...]] = ..., | ||||
|                      dest: _Text = ..., | ||||
|                      version: _Text = ...) -> None: ...  # weirdly documented | ||||
|     def parse_args(self, args: Optional[Sequence[_Text]] = ..., | ||||
|                    namespace: Optional[Namespace] = ...) -> Namespace: ... | ||||
|     def add_subparsers(self, title: _Text = ..., | ||||
|                        description: Optional[_Text] = ..., | ||||
|                        prog: _Text = ..., | ||||
|                        parser_class: Type[ArgumentParser] = ..., | ||||
|                        action: Type[Action] = ..., | ||||
|                        option_string: _Text = ..., | ||||
|                        dest: Optional[_Text] = ..., | ||||
|                        help: Optional[_Text] = ..., | ||||
|                        metavar: Optional[_Text] = ...) -> _SubParsersAction: ... | ||||
|     def add_argument_group(self, title: Optional[_Text] = ..., | ||||
|                            description: Optional[_Text] = ...) -> _ArgumentGroup: ... | ||||
|     def add_mutually_exclusive_group(self, required: bool = ...) -> _MutuallyExclusiveGroup: ... | ||||
|     def set_defaults(self, **kwargs: Any) -> None: ... | ||||
|     def get_default(self, dest: _Text) -> Any: ... | ||||
|     def print_usage(self, file: Optional[IO[str]] = ...) -> None: ... | ||||
|     def print_help(self, file: Optional[IO[str]] = ...) -> None: ... | ||||
|     def format_usage(self) -> str: ... | ||||
|     def format_help(self) -> str: ... | ||||
|     def parse_known_args(self, args: Optional[Sequence[_Text]] = ..., | ||||
|                          namespace: Optional[Namespace] = ...) -> Tuple[Namespace, List[str]]: ... | ||||
|     def convert_arg_line_to_args(self, arg_line: _Text) -> List[str]: ... | ||||
|     def exit(self, status: int = ..., message: Optional[_Text] = ...) -> None: ... | ||||
|     def error(self, message: _Text) -> None: ... | ||||
|  | ||||
| class HelpFormatter: | ||||
|     # not documented | ||||
|     def __init__(self, prog: _Text, indent_increment: int = ..., | ||||
|                  max_help_position: int = ..., | ||||
|                  width: Optional[int] = ...) -> None: ... | ||||
| class RawDescriptionHelpFormatter(HelpFormatter): ... | ||||
| class RawTextHelpFormatter(HelpFormatter): ... | ||||
| class ArgumentDefaultsHelpFormatter(HelpFormatter): ... | ||||
| if sys.version_info >= (3,): | ||||
|     class MetavarTypeHelpFormatter(HelpFormatter): ... | ||||
|  | ||||
| class Action: | ||||
|     def __init__(self, | ||||
|                  option_strings: Sequence[_Text], | ||||
|                  dest: _Text = ..., | ||||
|                  nargs: Optional[Union[int, _Text]] = ..., | ||||
|                  const: Any = ..., | ||||
|                  default: Any = ..., | ||||
|                  type: Union[Callable[[str], _T], FileType, None] = ..., | ||||
|                  choices: Optional[Iterable[_T]] = ..., | ||||
|                  required: bool = ..., | ||||
|                  help: Optional[_Text] = ..., | ||||
|                  metavar: Union[_Text, Tuple[_Text, ...]] = ...) -> None: ... | ||||
|     def __call__(self, parser: ArgumentParser, namespace: Namespace, | ||||
|                  values: Union[_Text, Sequence[Any], None], | ||||
|                  option_string: _Text = ...) -> None: ... | ||||
|  | ||||
| class Namespace: | ||||
|     def __getattr__(self, name: _Text) -> Any: ... | ||||
|     def __setattr__(self, name: _Text, value: Any) -> None: ... | ||||
|  | ||||
| class FileType: | ||||
|     if sys.version_info >= (3, 4): | ||||
|         def __init__(self, mode: _Text = ..., bufsize: int = ..., | ||||
|                      encoding: Optional[_Text] = ..., | ||||
|                      errors: Optional[_Text] = ...) -> None: ... | ||||
|     elif sys.version_info >= (3,): | ||||
|         def __init__(self, | ||||
|                      mode: _Text = ..., bufsize: int = ...) -> None: ... | ||||
|     else: | ||||
|         def __init__(self, | ||||
|                      mode: _Text = ..., bufsize: Optional[int] = ...) -> None: ... | ||||
|     def __call__(self, string: _Text) -> IO[Any]: ... | ||||
|  | ||||
| class _ArgumentGroup: | ||||
|     def add_argument(self, | ||||
|                      *name_or_flags: Union[_Text, Sequence[_Text]], | ||||
|                      action: Union[_Text, Type[Action]] = ..., | ||||
|                      nargs: Union[int, _Text] = ..., | ||||
|                      const: Any = ..., | ||||
|                      default: Any = ..., | ||||
|                      type: Union[Callable[[str], _T], FileType] = ..., | ||||
|                      choices: Iterable[_T] = ..., | ||||
|                      required: bool = ..., | ||||
|                      help: _Text = ..., | ||||
|                      metavar: Union[_Text, Tuple[_Text, ...]] = ..., | ||||
|                      dest: _Text = ..., | ||||
|                      version: _Text = ...) -> None: ... | ||||
|     def add_mutually_exclusive_group(self, required: bool = ...) -> _MutuallyExclusiveGroup: ... | ||||
|  | ||||
| class _MutuallyExclusiveGroup(_ArgumentGroup): ... | ||||
|  | ||||
| class _SubParsersAction: | ||||
|     # TODO: Type keyword args properly. | ||||
|     def add_parser(self, name: _Text, **kwargs: Any) -> ArgumentParser: ... | ||||
|  | ||||
| # not documented | ||||
| class ArgumentTypeError(Exception): ... | ||||
							
								
								
									
										12
									
								
								samples/Python/filenames/WORKSPACE
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								samples/Python/filenames/WORKSPACE
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | ||||
| # rules for scala | ||||
| # https://github.com/bazelbuild/rules_scala#getting-started | ||||
| # pull rule definitions from git | ||||
| git_repository( | ||||
|     name = "io_bazel_rules_scala", | ||||
|     remote = "https://github.com/bazelbuild/rules_scala.git", | ||||
|     commit = "73743b830ae98d13a946b25ad60cad5fee58e6d3", # update this as needed | ||||
| ) | ||||
|  | ||||
| # load the desired scala rules for this workspace | ||||
| load("@io_bazel_rules_scala//scala:scala.bzl", "scala_repositories") | ||||
| scala_repositories() | ||||
							
								
								
									
										72
									
								
								samples/R/import.Rd
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										72
									
								
								samples/R/import.Rd
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,72 @@ | ||||
| % Generated by roxygen2: do not edit by hand | ||||
| % Please edit documentation in R/hello.R | ||||
| \name{import} | ||||
| \alias{import} | ||||
| \title{Import a module into the current scope} | ||||
| \usage{ | ||||
| import(module, attach, attach_operators = TRUE) | ||||
| } | ||||
| \arguments{ | ||||
| \item{module}{an identifier specifying the full module path} | ||||
|  | ||||
| \item{attach}{if \code{TRUE}, attach the newly loaded module to the object | ||||
| search path (see \code{Details})} | ||||
|  | ||||
| \item{attach_operators}{if \code{TRUE}, attach operators of module to the | ||||
| object search path, even if \code{attach} is \code{FALSE}} | ||||
| } | ||||
| \value{ | ||||
| the loaded module environment (invisible) | ||||
| } | ||||
| \description{ | ||||
| \code{module = import('module')} imports a specified module and makes its | ||||
| code available via the environment-like object it returns. | ||||
| } | ||||
| \details{ | ||||
| Modules are loaded in an isolated environment which is returned, and | ||||
| optionally attached to the object search path of the current scope (if | ||||
| argument \code{attach} is \code{TRUE}). | ||||
| \code{attach} defaults to \code{FALSE}. However, in interactive code it is | ||||
| often helpful to attach packages by default. Therefore, in interactive code | ||||
| invoked directly from the terminal only (i.e. not within modules), | ||||
| \code{attach} defaults to the value of \code{options('import.attach')}, which | ||||
| can be set to \code{TRUE} or \code{FALSE} depending on the user’s preference. | ||||
|  | ||||
| \code{attach_operators} causes \emph{operators} to be attached by default, | ||||
| because operators can only be invoked in R if they re found in the search | ||||
| path. Not attaching them therefore drastically limits a module’s usefulness. | ||||
|  | ||||
| Modules are searched in the module search path \code{options('import.path')}. | ||||
| This is a vector of paths to consider, from the highest to the lowest | ||||
| priority. The current directory is \emph{always} considered first. That is, | ||||
| if a file \code{a.r} exists both in the current directory and in a module | ||||
| search path, the local file \code{./a.r} will be loaded. | ||||
|  | ||||
| Module names can be fully qualified to refer to nested paths. See | ||||
| \code{Examples}. | ||||
| } | ||||
| \note{ | ||||
| Unlike for packages, attaching happens \emph{locally}: if | ||||
| \code{import} is executed in the global environment, the effect is the same. | ||||
| Otherwise, the imported module is inserted as the parent of the current | ||||
| \code{environment()}. When used (globally) \emph{inside} a module, the newly | ||||
| imported module is only available inside the module’s search path, not | ||||
| outside it (nor in other modules which might be loaded). | ||||
| } | ||||
| \examples{ | ||||
| # `a.r` is a file in the local directory containing a function `f`. | ||||
| a = import('a') | ||||
| a$f() | ||||
|  | ||||
| # b/c.r is a file in path `b`, containing a function `g`. | ||||
| import('b/c', attach = TRUE) | ||||
| g() # No module name qualification necessary | ||||
|  | ||||
| } | ||||
| \seealso{ | ||||
| \code{unload} | ||||
|  | ||||
| \code{reload} | ||||
|  | ||||
| \code{module_name} | ||||
| } | ||||
							
								
								
									
										483
									
								
								samples/Reason/JSX.re
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										483
									
								
								samples/Reason/JSX.re
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,483 @@ | ||||
| type component = {displayName: string}; | ||||
|  | ||||
| let module Bar = { | ||||
|   let createElement c::c=? children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module Nesting = { | ||||
|   let createElement children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module Much = { | ||||
|   let createElement children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module Foo = { | ||||
|   let createElement a::a=? b::b=? children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module One = { | ||||
|   let createElement | ||||
|       test::test=? | ||||
|       foo::foo=? | ||||
|       children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
|   let createElementobvioustypo | ||||
|       test::test | ||||
|       children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module Two = { | ||||
|   let createElement foo::foo=? children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module Sibling = { | ||||
|   let createElement | ||||
|       foo::foo=? | ||||
|       (children: list component) => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module Test = { | ||||
|   let createElement yo::yo=? children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module So = { | ||||
|   let createElement children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module Foo2 = { | ||||
|   let createElement children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module Text = { | ||||
|   let createElement children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module Exp = { | ||||
|   let createElement children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module Pun = { | ||||
|   let createElement intended::intended=? children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module Namespace = { | ||||
|   let module Foo = { | ||||
|     let createElement | ||||
|         intended::intended=? | ||||
|         anotherOptional::x=100 | ||||
|         children => { | ||||
|       displayName: "test" | ||||
|     }; | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module LotsOfArguments = { | ||||
|   let createElement | ||||
|       argument1::argument1=? | ||||
|       argument2::argument2=? | ||||
|       argument3::argument3=? | ||||
|       argument4::argument4=? | ||||
|       argument5::argument5=? | ||||
|       argument6::argument6=? | ||||
|       children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let div argument1::argument1=? children => { | ||||
|   displayName: "test" | ||||
| }; | ||||
|  | ||||
| let module List1 = { | ||||
|   let createElement children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module List2 = { | ||||
|   let createElement children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module List3 = { | ||||
|   let createElement children => { | ||||
|     displayName: "test" | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let (/><) a b => a + b; | ||||
|  | ||||
| let (><) a b => a + b; | ||||
|  | ||||
| let (/>) a b => a + b; | ||||
|  | ||||
| let (><\/) a b => a + b; | ||||
|  | ||||
| let tag1 = 5 />< 6; | ||||
|  | ||||
| let tag2 = 5 >< 7; | ||||
|  | ||||
| let tag3 = 5 /> 7; | ||||
|  | ||||
| let tag4 = 5 ><\/ 7; | ||||
|  | ||||
| let b = 2; | ||||
|  | ||||
| let selfClosing = <Foo />; | ||||
|  | ||||
| let selfClosing2 = <Foo a=1 b=true />; | ||||
|  | ||||
| let selfClosing3 = | ||||
|   <Foo | ||||
|     a="really long values that should" | ||||
|     b="cause the entire thing to wrap" | ||||
|   />; | ||||
|  | ||||
| let a = <Foo> <Bar c=(fun a => a + 2) /> </Foo>; | ||||
|  | ||||
| let a3 = <So> <Much> <Nesting /> </Much> </So>; | ||||
|  | ||||
| let a4 = | ||||
|   <Sibling> | ||||
|     <One test=true foo=b /> | ||||
|     <Two foo=b /> | ||||
|   </Sibling>; | ||||
|  | ||||
| let a5 = <Foo> "testing a string here" </Foo>; | ||||
|  | ||||
| let a6 = | ||||
|   <Foo2> | ||||
|     <Text> "testing a string here" </Text> | ||||
|     <Test yo=1 /> | ||||
|     <Text> "another string" </Text> | ||||
|     <Bar /> | ||||
|     <Exp> (2 + 4) </Exp> | ||||
|   </Foo2>; | ||||
|  | ||||
| let intended = true; | ||||
|  | ||||
| let punning = <Pun intended />; | ||||
|  | ||||
| let namespace = <Namespace.Foo />; | ||||
|  | ||||
| let c = <Foo />; | ||||
|  | ||||
| let d = <Foo />; | ||||
|  | ||||
| let spaceBefore = | ||||
|   <So> <Much> <Nesting /> </Much> </So>; | ||||
|  | ||||
| let spaceBefore2 = <So> <Much /> </So>; | ||||
|  | ||||
| let siblingNotSpaced = | ||||
|   <So> <Much /> <Much /> </So>; | ||||
|  | ||||
| let jsxInList = [<Foo />]; | ||||
|  | ||||
| let jsxInList2 = [<Foo />]; | ||||
|  | ||||
| let jsxInListA = [<Foo />]; | ||||
|  | ||||
| let jsxInListB = [<Foo />]; | ||||
|  | ||||
| let jsxInListC = [<Foo />]; | ||||
|  | ||||
| let jsxInListD = [<Foo />]; | ||||
|  | ||||
| let jsxInList3 = [<Foo />, <Foo />, <Foo />]; | ||||
|  | ||||
| let jsxInList4 = [<Foo />, <Foo />, <Foo />]; | ||||
|  | ||||
| let jsxInList5 = [<Foo />, <Foo />]; | ||||
|  | ||||
| let jsxInList6 = [<Foo />, <Foo />]; | ||||
|  | ||||
| let jsxInList7 = [<Foo />, <Foo />]; | ||||
|  | ||||
| let jsxInList8 = [<Foo />, <Foo />]; | ||||
|  | ||||
| let testFunc b => b; | ||||
|  | ||||
| let jsxInFnCall = testFunc <Foo />; | ||||
|  | ||||
| let lotsOfArguments = | ||||
|   <LotsOfArguments | ||||
|     argument1=1 | ||||
|     argument2=2 | ||||
|     argument3=3 | ||||
|     argument4=4 | ||||
|     argument5=5 | ||||
|     argument6="test"> | ||||
|     <Namespace.Foo /> | ||||
|   </LotsOfArguments>; | ||||
|  | ||||
| let lowerCase = <div argument1=1 />; | ||||
|  | ||||
| let b = 0; | ||||
|  | ||||
| let d = 0; | ||||
|  | ||||
| /* | ||||
|  * Should pun the first example: | ||||
|  */ | ||||
| let a = <Foo a> 5 </Foo>; | ||||
|  | ||||
| let a = <Foo a=b> 5 </Foo>; | ||||
|  | ||||
| let a = <Foo a=b b=d> 5 </Foo>; | ||||
|  | ||||
| let a = <Foo a> 0.55 </Foo>; | ||||
|  | ||||
| let a = Foo.createElement "" [@JSX]; | ||||
|  | ||||
| let ident = <Foo> a </Foo>; | ||||
|  | ||||
| let fragment1 = <> <Foo /> <Foo /> </>; | ||||
|  | ||||
| let fragment2 = <> <Foo /> <Foo /> </>; | ||||
|  | ||||
| let fragment3 = <> <Foo /> <Foo /> </>; | ||||
|  | ||||
| let fragment4 = <> <Foo /> <Foo /> </>; | ||||
|  | ||||
| let fragment5 = <> <Foo /> <Foo /> </>; | ||||
|  | ||||
| let fragment6 = <> <Foo /> <Foo /> </>; | ||||
|  | ||||
| let fragment7 = <> <Foo /> <Foo /> </>; | ||||
|  | ||||
| let fragment8 = <> <Foo /> <Foo /> </>; | ||||
|  | ||||
| let fragment9 = <> 2 2 2 2 </>; | ||||
|  | ||||
| let fragment10 = <> 2.2 3.2 4.6 1.2 </>; | ||||
|  | ||||
| let fragment11 = <> "str" </>; | ||||
|  | ||||
| let fragment12 = <> (6 + 2) (6 + 2) (6 + 2) </>; | ||||
|  | ||||
| let fragment13 = <> fragment11 fragment11 </>; | ||||
|  | ||||
| let listOfItems1 = <List1> 1 2 3 4 5 </List1>; | ||||
|  | ||||
| let listOfItems2 = | ||||
|   <List2> 1.0 2.8 3.8 4.0 5.1 </List2>; | ||||
|  | ||||
| let listOfItems3 = | ||||
|   <List3> fragment11 fragment11 </List3>; | ||||
|  | ||||
| /* | ||||
|  * Several sequential simple jsx expressions must be separated with a space. | ||||
|  */ | ||||
| let thisIsRight a b => (); | ||||
|  | ||||
| let tagOne children => (); | ||||
|  | ||||
| let tagTwo children => (); | ||||
|  | ||||
| /* thisIsWrong <tagOne /><tagTwo />; */ | ||||
| thisIsRight <tagOne /> <tagTwo />; | ||||
|  | ||||
| /* thisIsWrong <tagOne> </tagOne><tagTwo> </tagTwo>; */ | ||||
| thisIsRight <tagOne /> <tagTwo />; | ||||
|  | ||||
| let a children => (); | ||||
|  | ||||
| let b children => (); | ||||
|  | ||||
| let thisIsOkay = | ||||
|   <List1> <a /> <b /> <a /> <b /> </List1>; | ||||
|  | ||||
| let thisIsAlsoOkay = | ||||
|   <List1> <a /> <b /> </List1>; | ||||
|  | ||||
| /* Doesn't make any sense, but suppose you defined an | ||||
|    infix operator to compare jsx */ | ||||
| <a /> < <b />; | ||||
|  | ||||
| <a /> > <b />; | ||||
|  | ||||
| <a /> < <b />; | ||||
|  | ||||
| <a /> > <b />; | ||||
|  | ||||
| let listOfListOfJsx = [<> </>]; | ||||
|  | ||||
| let listOfListOfJsx = [<> <Foo /> </>]; | ||||
|  | ||||
| let listOfListOfJsx = [ | ||||
|   <> <Foo /> </>, | ||||
|   <> <Bar /> </> | ||||
| ]; | ||||
|  | ||||
| let listOfListOfJsx = [ | ||||
|   <> <Foo /> </>, | ||||
|   <> <Bar /> </>, | ||||
|   ...listOfListOfJsx | ||||
| ]; | ||||
|  | ||||
| let sameButWithSpaces = [<> </>]; | ||||
|  | ||||
| let sameButWithSpaces = [<> <Foo /> </>]; | ||||
|  | ||||
| let sameButWithSpaces = [ | ||||
|   <> <Foo /> </>, | ||||
|   <> <Bar /> </> | ||||
| ]; | ||||
|  | ||||
| let sameButWithSpaces = [ | ||||
|   <> <Foo /> </>, | ||||
|   <> <Bar /> </>, | ||||
|   ...sameButWithSpaces | ||||
| ]; | ||||
|  | ||||
| /* | ||||
|  * Test named tag right next to an open bracket. | ||||
|  */ | ||||
| let listOfJsx = []; | ||||
|  | ||||
| let listOfJsx = [<Foo />]; | ||||
|  | ||||
| let listOfJsx = [<Foo />, <Bar />]; | ||||
|  | ||||
| let listOfJsx = [<Foo />, <Bar />, ...listOfJsx]; | ||||
|  | ||||
| let sameButWithSpaces = []; | ||||
|  | ||||
| let sameButWithSpaces = [<Foo />]; | ||||
|  | ||||
| let sameButWithSpaces = [<Foo />, <Bar />]; | ||||
|  | ||||
| let sameButWithSpaces = [ | ||||
|   <Foo />, | ||||
|   <Bar />, | ||||
|   ...sameButWithSpaces | ||||
| ]; | ||||
|  | ||||
|  | ||||
| /** | ||||
|  * Test no conflict with polymorphic variant types. | ||||
|  */ | ||||
| type thisType = [ | `Foo | `Bar]; | ||||
|  | ||||
| type t 'a = [< thisType] as 'a; | ||||
|  | ||||
| let asd = | ||||
|   <One test=true foo=2> "a" "b" </One> [@foo]; | ||||
|  | ||||
| let asd2 = | ||||
|   One.createElementobvioustypo | ||||
|   test::false | ||||
|   ["a", "b"] | ||||
|   [@JSX] | ||||
|   [@foo]; | ||||
|  | ||||
| let span | ||||
|     test::(test: bool) | ||||
|     foo::(foo: int) | ||||
|     children => 1; | ||||
|  | ||||
| let asd = | ||||
|   <span test=true foo=2> "a" "b" </span> [@foo]; | ||||
|  | ||||
| /* "video" call doesn't end with a list, so the expression isn't converted to JSX */ | ||||
| let video test::(test: bool) children => children; | ||||
|  | ||||
| let asd2 = video test::false 10 [@JSX] [@foo]; | ||||
|  | ||||
| let div children => 1; | ||||
|  | ||||
| ((fun () => div) ()) [] [@JSX]; | ||||
|  | ||||
| let myFun () => | ||||
|   <> | ||||
|     <Namespace.Foo | ||||
|       intended=true | ||||
|       anotherOptional=200 | ||||
|     /> | ||||
|     <Namespace.Foo | ||||
|       intended=true | ||||
|       anotherOptional=200 | ||||
|     /> | ||||
|     <Namespace.Foo | ||||
|       intended=true anotherOptional=200> | ||||
|       <Foo /> | ||||
|       <Foo /> | ||||
|       <Foo /> | ||||
|       <Foo /> | ||||
|       <Foo /> | ||||
|       <Foo /> | ||||
|       <Foo /> | ||||
|     </Namespace.Foo> | ||||
|   </>; | ||||
|  | ||||
| let myFun () => <> </>; | ||||
|  | ||||
| let myFun () => | ||||
|   <> | ||||
|     <Namespace.Foo | ||||
|       intended=true | ||||
|       anotherOptional=200 | ||||
|     /> | ||||
|     <Namespace.Foo | ||||
|       intended=true | ||||
|       anotherOptional=200 | ||||
|     /> | ||||
|     <Namespace.Foo | ||||
|       intended=true anotherOptional=200> | ||||
|       <Foo /> | ||||
|       <Foo /> | ||||
|       <Foo /> | ||||
|       <Foo /> | ||||
|       <Foo /> | ||||
|       <Foo /> | ||||
|       <Foo /> | ||||
|     </Namespace.Foo> | ||||
|   </>; | ||||
|  | ||||
|  | ||||
| /** | ||||
|  * Children should wrap without forcing attributes to. | ||||
|  */ | ||||
| <Foo a=10 b=0> | ||||
|   <Bar /> | ||||
|   <Bar /> | ||||
|   <Bar /> | ||||
|   <Bar /> | ||||
| </Foo>; | ||||
| /** | ||||
|  * Failing test cases: | ||||
|  */ | ||||
| /* let res = <Foo a=10 b=(<Foo a=200 />) > */ | ||||
| /*   <Bar /> */ | ||||
| /* </Foo>; */ | ||||
| /* let res = <Foo a=10 b=(<Foo a=200 />) />; */ | ||||
							
								
								
									
										1326
									
								
								samples/Reason/Layout.re
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1326
									
								
								samples/Reason/Layout.re
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										344
									
								
								samples/Reason/Machine.re
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										344
									
								
								samples/Reason/Machine.re
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,344 @@ | ||||
| open Format; | ||||
|  | ||||
| let module Endo = { | ||||
|   type t 'a = 'a => 'a; | ||||
| }; | ||||
|  | ||||
| let module Syntax = { | ||||
|   let module Var = { | ||||
|     type t = int; | ||||
|   }; | ||||
|   let module Term = { | ||||
|     type t = | ||||
|       | App t t | ||||
|       | Lam t | ||||
|       | Var Var.t | ||||
|       ; | ||||
|   }; | ||||
|   let module Sub = { | ||||
|     type t 'a = | ||||
|       | Cmp (t 'a) (t 'a) | ||||
|       | Dot 'a (t 'a) | ||||
|       | Id | ||||
|       | Shift | ||||
|       ; | ||||
|  | ||||
|     let map f sgm => { | ||||
|       let rec go = fun | ||||
|       | Cmp sgm0 sgm1 => Cmp (go sgm0) (go sgm1) | ||||
|       | Dot a sgm => Dot (f a) (go sgm) | ||||
|       | Id => Id | ||||
|       | Shift => Shift | ||||
|       ; | ||||
|       go sgm; | ||||
|     }; | ||||
|  | ||||
|     let rec apply sgm e => | ||||
|       switch (sgm, e) { | ||||
|       | (sgm, Term.App e0 e1) => Term.App (apply sgm e0) (apply sgm e1) | ||||
|       | (sgm, Term.Lam e) => Term.Lam (apply (Dot (Term.Var 0) (Cmp sgm Shift)) e) | ||||
|       | (Dot e _, Term.Var 0) => e | ||||
|       | (Dot _ sgm, Term.Var i) => apply sgm (Term.Var (i - 1)) | ||||
|       | (Id, Term.Var i) => Term.Var i | ||||
|       | (Shift, Term.Var i) => Term.Var (i + 1) | ||||
|       | (Cmp rho sgm, e) => apply sgm (apply rho e) | ||||
|       }; | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module Zip = { | ||||
|   open Syntax; | ||||
|   type t 'a = | ||||
|     | App0 (t 'a) 'a | ||||
|     | App1 'a (t 'a) | ||||
|     | Halt | ||||
|     | Lam (t 'a) | ||||
|     ; | ||||
|  | ||||
|   let map f sgm => { | ||||
|     let rec go = fun | ||||
|     | App0 zip e1 => App0 (go zip) (f e1) | ||||
|     | App1 e0 zip => App1 (f e0) (go zip) | ||||
|     | Halt => Halt | ||||
|     | Lam zip => Lam (go zip) | ||||
|     ; | ||||
|     go sgm; | ||||
|   }; | ||||
|  | ||||
|   let rec apply zip acc => switch zip { | ||||
|     | App0 zip e1 => apply zip (Term.App acc e1) | ||||
|     | App1 e0 zip => apply zip (Term.App e0 acc) | ||||
|     | Halt => acc | ||||
|     | Lam zip => apply zip (Term.Lam acc) | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module Clo = { | ||||
|   open Syntax; | ||||
|   type t = | ||||
|     | Clo Term.t (Sub.t t); | ||||
|   let rec from (Clo term sgm) => Sub.apply (Sub.map from sgm) term; | ||||
| }; | ||||
|  | ||||
| let module Pretty = { | ||||
|   let module Delim = { | ||||
|     type t = string; | ||||
|     let pp prev next fmt token => if (prev < next) { fprintf fmt "%s" token }; | ||||
|   }; | ||||
|   let module Prec = { | ||||
|     type t = int; | ||||
|     open Syntax.Term; | ||||
|     let calc = fun | ||||
|       | App _ _ => 1 | ||||
|       | Lam _ => 2 | ||||
|       | Var _ => 0 | ||||
|       ; | ||||
|   }; | ||||
|   let module Name = { | ||||
|     type t = string; | ||||
|  | ||||
|     let suffix = { | ||||
|       let script = fun | ||||
|         | 0 => "₀" | ||||
|         | 1 => "₁" | ||||
|         | 2 => "₂" | ||||
|         | 3 => "₃" | ||||
|         | 4 => "₄" | ||||
|         | 5 => "₅" | ||||
|         | 6 => "₆" | ||||
|         | 7 => "₇" | ||||
|         | 8 => "₈" | ||||
|         | 9 => "₉" | ||||
|         | _ => failwith "bad subscript"; | ||||
|       let rec go acc => fun | ||||
|         | 0 => acc | ||||
|         | n => go (script (n mod 10) ^ acc) (n / 10); | ||||
|       go "" | ||||
|     }; | ||||
|  | ||||
|     let gen = { | ||||
|       let offset = 97; | ||||
|       let width = 26; | ||||
|       fun () i => { | ||||
|         let code = i mod width + offset; | ||||
|         let char = Char.chr code; | ||||
|         let prime = i / width; | ||||
|         let suffix = suffix prime; | ||||
|         let name = Char.escaped char ^ suffix; | ||||
|         Some name; | ||||
|       } | ||||
|     }; | ||||
|   }; | ||||
|  | ||||
|   let module Env = { | ||||
|     type t = { | ||||
|       used: list Name.t, | ||||
|       rest: Stream.t Name.t, | ||||
|     }; | ||||
|     let mk () => { | ||||
|       let used = []; | ||||
|       let rest = Stream.from @@ Name.gen (); | ||||
|       { used, rest }; | ||||
|     }; | ||||
|   }; | ||||
|  | ||||
|   type printer 'a = Env.t => Prec.t => formatter => 'a => unit; | ||||
|  | ||||
|   let module Term = { | ||||
|     open Syntax.Term; | ||||
|     let rec pp ({ Env.used: used, rest } as env) prev fmt e => { | ||||
|       let next = Prec.calc e; | ||||
|       switch e { | ||||
|       | App e0 e1 => | ||||
|         fprintf fmt "@[%a%a@ %a%a@]" | ||||
|           (Delim.pp prev next) "(" | ||||
|           (pp env 1) e0 | ||||
|           (pp env 0) e1 | ||||
|           (Delim.pp prev next) ")" | ||||
|       | Lam e => | ||||
|         let name = Stream.next rest; | ||||
|         let env = { ...env, Env.used: [name, ...used] }; | ||||
|         fprintf fmt "%aλ%a.%a%a" | ||||
|           (Delim.pp prev next) "(" | ||||
|           (pp_print_string) name | ||||
|           (pp env next) e | ||||
|           (Delim.pp prev next) ")" | ||||
|       | Var index => | ||||
|         fprintf fmt "%s" @@ try (List.nth used index) { | ||||
|           | _ => "#" ^ string_of_int index | ||||
|           } | ||||
|       } | ||||
|     }; | ||||
|   }; | ||||
|  | ||||
|   let module Sub = { | ||||
|     open Syntax.Sub; | ||||
|     let rec pp pp_elem env prev fmt => fun | ||||
|     | Cmp sgm1 sgm0 => | ||||
|       fprintf fmt "@[%a;@ %a@]" | ||||
|         (pp pp_elem env prev) sgm1 | ||||
|         (pp pp_elem env prev) sgm0 | ||||
|     | Dot e sgm => | ||||
|       fprintf fmt "@[%a@ ·@ %a@]" | ||||
|         (pp_elem env prev) e | ||||
|         (pp pp_elem env prev) sgm | ||||
|     | Id => | ||||
|       fprintf fmt "ι" | ||||
|     | Shift => | ||||
|       fprintf fmt "↑" | ||||
|     ; | ||||
|   }; | ||||
|  | ||||
|   let module Clo = { | ||||
|     let rec pp env prev fmt (Clo.Clo e sgm) => { | ||||
|       let next = Prec.calc e; | ||||
|       fprintf fmt "@[%a%a%a[%a]@]" | ||||
|         (Delim.pp prev next) "(" | ||||
|         (Term.pp env next) e | ||||
|         (Delim.pp prev next) ")" | ||||
|         (Sub.pp pp env next) sgm | ||||
|     }; | ||||
|   }; | ||||
|  | ||||
|   let module Zip = { | ||||
|     open Zip; | ||||
|     let rec pp pp_elem env prev fmt => fun | ||||
|     | App0 zip elem => | ||||
|       fprintf fmt "inl@[<v -1>⟨@,%a@,%a⟩@]" | ||||
|         (pp pp_elem env prev) zip | ||||
|         (pp_elem env prev) elem | ||||
|     | App1 elem zip => | ||||
|       fprintf fmt "inr@[<v -1>⟨@,%a@,%a⟩@]" | ||||
|         (pp_elem env prev) elem | ||||
|         (pp pp_elem env prev) zip | ||||
|     | Halt => | ||||
|       fprintf fmt "halt" | ||||
|     | Lam zip => | ||||
|       fprintf fmt "lam@[<v -1>⟨@,%a⟩@]" | ||||
|         (pp pp_elem env prev) zip | ||||
|     ; | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module Machine = { | ||||
|   type t = { | ||||
|     clo: Clo.t, | ||||
|     ctx: Zip.t Clo.t, | ||||
|   }; | ||||
|  | ||||
|   let into e => { | ||||
|     open Clo; | ||||
|     open Syntax.Sub; | ||||
|     let clo = Clo e Id; | ||||
|     let ctx = Zip.Halt; | ||||
|     { clo, ctx } | ||||
|   }; | ||||
|  | ||||
|   let from { clo, ctx } => Zip.apply (Zip.map Clo.from ctx) (Clo.from clo); | ||||
|  | ||||
|   let pp fmt rule state => { | ||||
|     fprintf fmt "@[<v>ctx  ::@[<v -5>@,%a@]@,clo  ::@[<v -5>@,%a@]@,rule ::@[<v -5>@,%a@]@,term ::@[<v -5>@,%a@]@]@." | ||||
|       (Pretty.Zip.pp Pretty.Clo.pp (Pretty.Env.mk ()) 2) state.ctx | ||||
|                     (Pretty.Clo.pp (Pretty.Env.mk ()) 2) state.clo | ||||
|                                        (pp_print_string) rule | ||||
|                    (Pretty.Term.pp (Pretty.Env.mk ()) 2) (from state) | ||||
|   }; | ||||
|  | ||||
|   let halted state => { | ||||
|     open Clo; | ||||
|     open Syntax.Sub; | ||||
|     open Syntax.Term; | ||||
|     switch state { | ||||
|     | { clo: Clo (Var _) Id, _ } => true | ||||
|     | _ => false | ||||
|     } [@warning "-4"]; | ||||
|   }; | ||||
|  | ||||
|   let step state => { | ||||
|     open Clo; | ||||
|     open Syntax.Sub; | ||||
|     open Syntax.Term; | ||||
|     let rule = ref ""; | ||||
|     let state = switch state { | ||||
|     /* left */ | ||||
|     | { clo: Clo (App e0 e1) sgm, ctx } => | ||||
|       let clo = Clo e0 sgm; | ||||
|       let ctx = Zip.App0 ctx (Clo e1 sgm); | ||||
|       rule := "LEFT"; | ||||
|       { clo, ctx }; | ||||
|     /* beta */ | ||||
|     | { clo: Clo (Lam e) sgm, ctx: Zip.App0 ctx c0 } => | ||||
|       let clo = Clo e (Cmp (Dot c0 sgm) Id); | ||||
|       rule := "BETA"; | ||||
|       { clo, ctx }; | ||||
|     /* lambda */ | ||||
|     | { clo: Clo (Lam e) sgm, ctx } => | ||||
|       let clo = Clo e (Cmp (Dot (Clo (Var 0) Id) (Cmp sgm Shift)) Id); | ||||
|       let ctx = Zip.Lam ctx; | ||||
|       rule := "LAMBDA"; | ||||
|       { clo, ctx }; | ||||
|     /* associate */ | ||||
|     | { clo: Clo (Var n) (Cmp (Cmp pi rho) sgm), ctx } => | ||||
|       let clo = Clo (Var n) (Cmp pi (Cmp rho sgm)); | ||||
|       rule := "ASSOCIATE"; | ||||
|       { clo, ctx }; | ||||
|     /* head */ | ||||
|     | { clo: Clo (Var 0) (Cmp (Dot (Clo e pi) _) sgm), ctx } => | ||||
|       let clo = Clo e (Cmp pi sgm); | ||||
|       rule := "HEAD"; | ||||
|       { clo, ctx }; | ||||
|     /* tail */ | ||||
|     | { clo: Clo (Var n) (Cmp (Dot (Clo _ _) rho) sgm), ctx } => | ||||
|       let clo = Clo (Var (n - 1)) (Cmp rho sgm); | ||||
|       rule := "TAIL"; | ||||
|       { clo, ctx }; | ||||
|     /* shift */ | ||||
|     | { clo: Clo (Var n) (Cmp Shift sgm), ctx } => | ||||
|       let clo = Clo (Var (n + 1)) sgm; | ||||
|       rule := "SHIFT"; | ||||
|       { clo, ctx }; | ||||
|     /* id */ | ||||
|     | { clo: Clo (Var n) (Cmp Id sgm), ctx } => | ||||
|       let clo = Clo (Var n) sgm; | ||||
|       rule := "ID"; | ||||
|       { clo, ctx }; | ||||
|     | _ => | ||||
|       pp std_formatter !rule state; | ||||
|       failwith "bad state"; | ||||
|     } [@warning "-4"]; | ||||
|     pp std_formatter !rule state; | ||||
|     state; | ||||
|   }; | ||||
|  | ||||
|   let norm e => { | ||||
|     let count = ref 0; | ||||
|     let state = ref (into e); | ||||
|     while (not (halted !state)) { | ||||
|       fprintf std_formatter "@\n--- step[%d] ---@\n" !count; | ||||
|       incr count; | ||||
|       state := step !state; | ||||
|     }; | ||||
|     from !state; | ||||
|   }; | ||||
| }; | ||||
|  | ||||
| let module Test = { | ||||
|   open Syntax.Term; | ||||
|   let l e => Lam e; | ||||
|   let ( *@ ) e0 e1 => App e0 e1; | ||||
|   let ff = l (l (Var 1)); | ||||
|   let tt = l (l (Var 0)); | ||||
|   let zero = l (l (Var 1)); | ||||
|   let succ = l (l (l (Var 0 *@ Var 2))); | ||||
|   let one = succ *@ zero; | ||||
|   let two = succ *@ one; | ||||
|   let three = succ *@ two; | ||||
|   let const = l (l (Var 1)); | ||||
|   let fix = l (l (Var 1 *@ (Var 0 *@ Var 0)) *@ l (Var 1 *@ (Var 0 *@ Var 0))); | ||||
|   let add = fix *@ l (l (l (Var 1 *@ Var 0 *@ l (succ *@ Var 3 *@ Var 0 *@ Var 1)))); | ||||
|   let init = l (l (Var 0) *@ l (l (Var 1))); | ||||
| }; | ||||
|  | ||||
| let module Run = { | ||||
|   let go () => Machine.norm Test.init; | ||||
| }; | ||||
							
								
								
									
										308
									
								
								samples/Reason/SuperMerlin.re
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										308
									
								
								samples/Reason/SuperMerlin.re
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,308 @@ | ||||
| /* | ||||
|  * Copyright (c) 2015-present, Facebook, Inc. | ||||
|  * All rights reserved. | ||||
|  * | ||||
|  */ | ||||
| let startedMerlin: ref (option Js.Unsafe.any) = {contents: None}; | ||||
|  | ||||
| let fixedEnv = Js.Unsafe.js_expr "require('../lib/fixedEnv')"; | ||||
|  | ||||
| /* This and the subsequent big js blocks are copied over from Nuclide. More convenient for now. */ | ||||
| let findNearestMerlinFile' = Js.Unsafe.js_expr {| | ||||
|   function findNearestMerlinFile(beginAtFilePath) { | ||||
|     var path = require('path'); | ||||
|     var fs = require('fs'); | ||||
|     var fileDir = path.dirname(beginAtFilePath); | ||||
|     var currentPath = path.resolve(fileDir); | ||||
|     do { | ||||
|       var fileToFind = path.join(currentPath, '.merlin'); | ||||
|       var hasFile = fs.existsSync(fileToFind); | ||||
|       if (hasFile) { | ||||
|         return path.dirname(currentPath); | ||||
|       } | ||||
|  | ||||
|       if (path.dirname(currentPath) === currentPath) { | ||||
|         // Bail | ||||
|         return '.'; | ||||
|       } | ||||
|       currentPath = path.dirname(currentPath); | ||||
|     } while (true); | ||||
|   } | ||||
| |}; | ||||
|  | ||||
| let findNearestMerlinFile beginAtFilePath::path => { | ||||
|   let result = Js.Unsafe.fun_call findNearestMerlinFile' [|Js.Unsafe.inject (Js.string path)|]; | ||||
|   Js.to_string result | ||||
| }; | ||||
|  | ||||
| let createMerlinReaderFnOnce' = Js.Unsafe.js_expr {| | ||||
|   function(ocamlMerlinPath, ocamlMerlinFlags, dotMerlinDir, fixedEnv) { | ||||
|     var spawn = require('child_process').spawn; | ||||
|     // To split while stripping out any leading/trailing space, we match on all | ||||
|     // *non*-whitespace. | ||||
|     var items = ocamlMerlinFlags === '' ? [] : ocamlMerlinFlags.split(/\s+/); | ||||
|     var merlinProcess = spawn(ocamlMerlinPath, items, {cwd: dotMerlinDir}); | ||||
|     merlinProcess.stderr.on('data', function(d) { | ||||
|       console.error('Ocamlmerlin: something wrong happened:'); | ||||
|       console.error(d.toString()); | ||||
|     }); | ||||
|  | ||||
|     merlinProcess.stdout.on('close', function(d) { | ||||
|       console.error('Ocamlmerlin: closed.'); | ||||
|     }); | ||||
|  | ||||
|     var cmdQueue = []; | ||||
|     var hasStartedReading = false; | ||||
|  | ||||
|     var readline = require('readline'); | ||||
|     var reader = readline.createInterface({ | ||||
|       input: merlinProcess.stdout, | ||||
|       terminal: false, | ||||
|     }); | ||||
|  | ||||
|     return function(cmd, resolve, reject) { | ||||
|       cmdQueue.push([resolve, reject]); | ||||
|  | ||||
|       if (!hasStartedReading) { | ||||
|         hasStartedReading = true; | ||||
|         reader.on('line', function(line) { | ||||
|           var response; | ||||
|           try { | ||||
|             response = JSON.parse(line); | ||||
|           } catch (err) { | ||||
|             response = null; | ||||
|           } | ||||
|           var resolveReject = cmdQueue.shift(); | ||||
|           var resolve = resolveReject[0]; | ||||
|           var reject = resolveReject[1]; | ||||
|  | ||||
|           if (!response || !Array.isArray(response) || response.length !== 2) { | ||||
|             reject(new Error('Unexpected ocamlmerlin output format: ' + line)); | ||||
|             return; | ||||
|           } | ||||
|  | ||||
|           var status = response[0]; | ||||
|           var content = response[1]; | ||||
|  | ||||
|           var errorResponses = { | ||||
|             'failure': true, | ||||
|             'error': true, | ||||
|             'exception': true, | ||||
|           }; | ||||
|  | ||||
|           if (errorResponses[status]) { | ||||
|             reject(new Error('Ocamlmerlin returned an error: ' + line)); | ||||
|             return; | ||||
|           } | ||||
|  | ||||
|           resolve(content); | ||||
|         }); | ||||
|       } | ||||
|  | ||||
|       merlinProcess.stdin.write(JSON.stringify(cmd)); | ||||
|     }; | ||||
|   } | ||||
| |}; | ||||
|  | ||||
| let createMerlinReaderFnOnce | ||||
|     pathToMerlin::pathToMerlin | ||||
|     merlinFlags::merlinFlags | ||||
|     dotMerlinPath::dotMerlinPath => | ||||
|   Js.Unsafe.fun_call | ||||
|     createMerlinReaderFnOnce' | ||||
|     [| | ||||
|       Js.Unsafe.inject (Js.string pathToMerlin), | ||||
|       Js.Unsafe.inject (Js.string merlinFlags), | ||||
|       Js.Unsafe.inject (Js.string dotMerlinPath), | ||||
|       Js.Unsafe.inject fixedEnv | ||||
|     |]; | ||||
|  | ||||
| let startMerlinProcess path::path => | ||||
|   switch startedMerlin.contents { | ||||
|   | Some readerFn => () | ||||
|   | None => | ||||
|     let atomReasonPathToMerlin = Atom.Config.get "atom-reason.pathToMerlin"; | ||||
|     let atomReasonMerlinFlags = Atom.Config.get "atom-reason.merlinFlags"; | ||||
|     let atomReasonMerlinLogFile = Atom.Config.get "atom-reason.merlinLogFile"; | ||||
|     switch atomReasonMerlinLogFile { | ||||
|     | JsonString "" => () | ||||
|     | JsonString s => Atom.Env.setEnvVar "MERLIN_LOG" s | ||||
|     | _ => () | ||||
|     }; | ||||
|     let readerFn = | ||||
|       createMerlinReaderFnOnce | ||||
|         pathToMerlin::(Atom.JsonValue.unsafeExtractString atomReasonPathToMerlin) | ||||
|         merlinFlags::(Atom.JsonValue.unsafeExtractString atomReasonMerlinFlags) | ||||
|         dotMerlinPath::(findNearestMerlinFile beginAtFilePath::path); | ||||
|     startedMerlin.contents = Some readerFn | ||||
|   }; | ||||
|  | ||||
| let readOneLine cmd::cmd resolve reject => | ||||
|   switch startedMerlin.contents { | ||||
|   | None => raise Not_found | ||||
|   | Some readerFn => | ||||
|     Js.Unsafe.fun_call | ||||
|       readerFn | ||||
|       [| | ||||
|         Js.Unsafe.inject cmd, | ||||
|         Js.Unsafe.inject (Js.wrap_callback resolve), | ||||
|         Js.Unsafe.inject (Js.wrap_callback reject) | ||||
|       |] | ||||
|   }; | ||||
|  | ||||
| /* contextify is important for avoiding different buffers calling the backing merlin at the same time. */ | ||||
| /* https://github.com/the-lambda-church/merlin/blob/d98a08d318ca14d9c702bbd6eeadbb762d325ce7/doc/dev/PROTOCOL.md#contextual-commands */ | ||||
| let contextify query::query path::path => Js.Unsafe.obj [| | ||||
|   ("query", Js.Unsafe.inject query), | ||||
|   ("context", Js.Unsafe.inject (Js.array [|Js.string "auto", Js.string path|])) | ||||
| |]; | ||||
|  | ||||
| let prepareCommand text::text path::path query::query resolve reject => { | ||||
|   startMerlinProcess path; | ||||
|   /* These two commands should be run before every main command. */ | ||||
|   readOneLine | ||||
|     cmd::( | ||||
|       contextify | ||||
|         /* The protocol command tells Merlin which API version we want to use. (2 for us) */ | ||||
|         query::( | ||||
|           Js.array [| | ||||
|             Js.Unsafe.inject (Js.string "protocol"), | ||||
|             Js.Unsafe.inject (Js.string "version"), | ||||
|             Js.Unsafe.inject (Js.number_of_float 2.) | ||||
|           |] | ||||
|         ) | ||||
|         path::path | ||||
|     ) | ||||
|     ( | ||||
|       fun _ => | ||||
|         readOneLine | ||||
|           cmd::( | ||||
|             contextify | ||||
|               /* The tell command allows us to synchronize our text with Merlin's internal buffer. */ | ||||
|               query::( | ||||
|                 Js.array [|Js.string "tell", Js.string "start", Js.string "end", Js.string text|] | ||||
|               ) | ||||
|               path::path | ||||
|           ) | ||||
|           (fun _ => readOneLine cmd::(contextify query::query path::path) resolve reject) | ||||
|           reject | ||||
|     ) | ||||
|     reject | ||||
| }; | ||||
|  | ||||
| let positionToJsMerlinPosition (line, col) => Js.Unsafe.obj [| | ||||
|   /* lines (rows) are 1-based for merlin, not 0-based, like for Atom */ | ||||
|   ("line", Js.Unsafe.inject (Js.number_of_float (float_of_int (line + 1)))), | ||||
|   ("col", Js.Unsafe.inject (Js.number_of_float (float_of_int col))) | ||||
| |]; | ||||
|  | ||||
| /* Actual merlin commands we'll use. */ | ||||
| let getTypeHint path::path text::text position::position resolve reject => | ||||
|   prepareCommand | ||||
|     text::text | ||||
|     path::path | ||||
|     query::( | ||||
|       Js.array [| | ||||
|         Js.Unsafe.inject (Js.string "type"), | ||||
|         Js.Unsafe.inject (Js.string "enclosing"), | ||||
|         Js.Unsafe.inject (Js.string "at"), | ||||
|         Js.Unsafe.inject (positionToJsMerlinPosition position) | ||||
|       |] | ||||
|     ) | ||||
|     resolve | ||||
|     reject; | ||||
|  | ||||
| let getAutoCompleteSuggestions | ||||
|     path::path | ||||
|     text::text | ||||
|     position::position | ||||
|     prefix::prefix | ||||
|     resolve | ||||
|     reject => | ||||
|   prepareCommand | ||||
|     text::text | ||||
|     path::path | ||||
|     query::( | ||||
|       Js.array [| | ||||
|         Js.Unsafe.inject (Js.string "complete"), | ||||
|         Js.Unsafe.inject (Js.string "prefix"), | ||||
|         Js.Unsafe.inject (Js.string prefix), | ||||
|         Js.Unsafe.inject (Js.string "at"), | ||||
|         Js.Unsafe.inject (positionToJsMerlinPosition position), | ||||
|         Js.Unsafe.inject (Js.string "with"), | ||||
|         Js.Unsafe.inject (Js.string "doc") | ||||
|       |] | ||||
|     ) | ||||
|     resolve | ||||
|     reject; | ||||
|  | ||||
| let getDiagnostics path::path text::text resolve reject => | ||||
|   prepareCommand | ||||
|     text::text | ||||
|     path::path | ||||
|     query::(Js.array [|Js.Unsafe.inject (Js.string "errors")|]) | ||||
|     resolve | ||||
|     reject; | ||||
|  | ||||
| let locate path::path text::text extension::extension position::position resolve reject => | ||||
|   prepareCommand | ||||
|     text::text | ||||
|     path::path | ||||
|     query::( | ||||
|       Js.array [| | ||||
|         Js.Unsafe.inject (Js.string "locate"), | ||||
|         Js.Unsafe.inject (Js.string ""), | ||||
|         Js.Unsafe.inject (Js.string extension), | ||||
|         Js.Unsafe.inject (Js.string "at"), | ||||
|         Js.Unsafe.inject (positionToJsMerlinPosition position) | ||||
|       |] | ||||
|     ) | ||||
|     resolve | ||||
|     reject; | ||||
|  | ||||
| /* reject */ | ||||
| let getOccurrences path::path text::text position::position resolve reject => | ||||
|   prepareCommand | ||||
|     text::text | ||||
|     path::path | ||||
|     query::( | ||||
|       Js.array [| | ||||
|         Js.Unsafe.inject (Js.string "occurrences"), | ||||
|         Js.Unsafe.inject (Js.string "ident"), | ||||
|         Js.Unsafe.inject (Js.string "at"), | ||||
|         Js.Unsafe.inject (positionToJsMerlinPosition position) | ||||
|       |] | ||||
|     ) | ||||
|     resolve | ||||
|     reject; | ||||
|  | ||||
| let destruct | ||||
|     path::path | ||||
|     text::text | ||||
|     startPosition::startPosition | ||||
|     endPosition::endPosition | ||||
|     resolve | ||||
|     reject => | ||||
|   prepareCommand | ||||
|     text::text | ||||
|     path::path | ||||
|     query::( | ||||
|       Js.array [| | ||||
|         Js.Unsafe.inject (Js.string "case"), | ||||
|         Js.Unsafe.inject (Js.string "analysis"), | ||||
|         Js.Unsafe.inject (Js.string "from"), | ||||
|         Js.Unsafe.inject (positionToJsMerlinPosition startPosition), | ||||
|         Js.Unsafe.inject (Js.string "to"), | ||||
|         Js.Unsafe.inject (positionToJsMerlinPosition endPosition) | ||||
|       |] | ||||
|     ) | ||||
|     resolve | ||||
|     reject; | ||||
|  | ||||
| let getOutline path::path text::text resolve reject => | ||||
|   prepareCommand | ||||
|     text::text | ||||
|     path::path | ||||
|     query::(Js.array [|Js.Unsafe.inject (Js.string "outline")|]) | ||||
|     resolve | ||||
|     reject; | ||||
							
								
								
									
										989
									
								
								samples/Reason/Syntax.re
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										989
									
								
								samples/Reason/Syntax.re
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,989 @@ | ||||
| /* Copyright (c) 2015-present, Facebook, Inc. All rights reserved. */ | ||||
| [@@@autoFormat let wrap = 80; let shift = 2]; | ||||
|  | ||||
| Modules.run (); | ||||
|  | ||||
| Polymorphism.run (); | ||||
|  | ||||
| Variants.run (); | ||||
|  | ||||
| BasicStructures.run (); | ||||
|  | ||||
| TestUtils.printSection "General Syntax"; | ||||
|  | ||||
| /* Won't work! */ | ||||
| /* let matchingFunc a = match a with */ | ||||
| /*   `Thingy x => (print_string "matched thingy x"); x */ | ||||
| /*   | `Other x => (print_string "matched other x"); x;; */ | ||||
| /*  */ | ||||
| let matchingFunc a => | ||||
|   switch a { | ||||
|   | `Thingy x => | ||||
|     print_string "matched thingy x"; | ||||
|     let zz = 10; | ||||
|     zz | ||||
|   | `Other x => | ||||
|     print_string "matched other x"; | ||||
|     x | ||||
|   }; | ||||
|  | ||||
| type firstTwoShouldBeGroupedInParens = | ||||
|   (int => int) => int => int; | ||||
|  | ||||
| type allParensCanBeRemoved = | ||||
|   int => int => int => int; | ||||
|  | ||||
| type firstTwoShouldBeGroupedAndFirstThree = | ||||
|   ((int => int) => int) => int; | ||||
|  | ||||
| /* Same thing now but with type constructors instead of each int */ | ||||
| type firstTwoShouldBeGroupedInParens = | ||||
|   (list int => list int) => list int => list int; | ||||
|  | ||||
| type allParensCanBeRemoved = | ||||
|   list int => list int => list int => list int; | ||||
|  | ||||
| type firstTwoShouldBeGroupedAndFirstThree = | ||||
|   ((list int => list int) => list int) => | ||||
|   list int; | ||||
|  | ||||
| type myRecordType = { | ||||
|   firstTwoShouldBeGroupedInParens: | ||||
|     (int => int) => int => int, | ||||
|   allParensCanBeRemoved: | ||||
|     int => int => int => int, | ||||
|   firstTwoShouldBeGroupedAndFirstThree: | ||||
|     ((int => int) => int) => int | ||||
| }; | ||||
|  | ||||
| type firstNamedArgShouldBeGroupedInParens = | ||||
|   first::(int => int) => second::int => int; | ||||
|  | ||||
| type allParensCanBeRemoved = | ||||
|   first::int => second::int => third::int => int; | ||||
|  | ||||
| type firstTwoShouldBeGroupedAndFirstThree = | ||||
|   first::((int => int) => int) => int; | ||||
|  | ||||
| /* Same thing now, but with type constructors instead of int */ | ||||
| type firstNamedArgShouldBeGroupedInParens = | ||||
|   first::(list int => list int) => | ||||
|   second::list int => | ||||
|   list int; | ||||
|  | ||||
| type allParensCanBeRemoved = | ||||
|   first::list int => | ||||
|   second::list int => | ||||
|   third::list int => | ||||
|   list int; | ||||
|  | ||||
| type firstTwoShouldBeGroupedAndFirstThree = | ||||
|   first::((list int => list int) => list int) => | ||||
|   list int; | ||||
|  | ||||
| type firstNamedArgShouldBeGroupedInParens = | ||||
|   first::(int => int)? => | ||||
|   second::int list? => | ||||
|   int; | ||||
|  | ||||
| /* The arrow necessitates parens around the next two args. The ? isn't what | ||||
|  * makes the parens necessary. */ | ||||
| type firstNamedArgShouldBeGroupedInParensAndSecondNamedArg = | ||||
|   first::(int => int)? => | ||||
|   second::(int => int)? => | ||||
|   int; | ||||
|  | ||||
| type allParensCanBeRemoved = | ||||
|   first::int? => | ||||
|   second::int? => | ||||
|   third::int? => | ||||
|   int; | ||||
|  | ||||
| type firstTwoShouldBeGroupedAndFirstThree = | ||||
|   first::((int => int) => int) => int; | ||||
|  | ||||
| type noParens = | ||||
|   one::int => int => int => two::int => int; | ||||
|  | ||||
| type noParensNeeded = | ||||
|   one::int => int => int => two::int => int; | ||||
|  | ||||
| type firstNamedArgNeedsParens = | ||||
|   one::(int => int => int) => two::int => int; | ||||
|  | ||||
| /* Now, let's try type aliasing */ | ||||
| /* Unless wrapped in parens, types between arrows may not be aliased, may not | ||||
|  * themselves be arrows. */ | ||||
| type parensRequiredAroundFirstArg = | ||||
|   (list int as 'a) => int as 'a; | ||||
|  | ||||
| type parensRequiredAroundReturnType = | ||||
|   (list int as 'a) => (int as 'a); | ||||
|  | ||||
| type parensRequiredAroundReturnType = | ||||
|   (list int as 'a) => (int as 'a) as 'b; | ||||
|  | ||||
| type noParensNeededWhenInTuple = | ||||
|   (list int as 'a, list int as 'b) as 'entireThing; | ||||
|  | ||||
| type myTypeDef 'a = list 'a; | ||||
|  | ||||
| type instatiatedTypeDef = myTypeDef int => int; | ||||
|  | ||||
| /* Test a type attribute for good measure */ | ||||
| /* We should clean up all of the attribute tagging eventually, but for now, | ||||
|  * let's make it super ugly to get out of the way of all the formatting/parsing | ||||
|  * implementations (fewer conflicts during parsing, fewer edge cases during | ||||
|  * printing). | ||||
|  */ | ||||
| type something = ( | ||||
|   int, | ||||
|   int [@lookAtThisAttribute] | ||||
| ); | ||||
|  | ||||
| type longWrappingTypeDefinitionExample = | ||||
|   M_RK__G.Types.instance | ||||
|     (TGRecognizer.tGFields unit unit) | ||||
|     (TGRecognizer.tGMethods unit unit); | ||||
|  | ||||
| type semiLongWrappingTypeDefinitionExample = | ||||
|   M_RK__Gesture.Types.instance | ||||
|     TGRecognizerFinal.tGFields | ||||
|     TGRecognizerFinal.tGMethods; | ||||
|  | ||||
| type semiLongWrappingTypeWithConstraint = | ||||
|   M_RK__Gesture.Types.instance | ||||
|     'a | ||||
|     TGRecognizerFinal.tGFields | ||||
|     TGRecognizerFinal.tGMethods | ||||
| constraint 'a = (unit, unit); | ||||
|  | ||||
| type onelineConstrain = 'a constraint 'a = int; | ||||
|  | ||||
| /* This must be in trunk but not in this branch of OCaml */ | ||||
| /* type withNestedRecords = MyConstructor {myField: int} */ | ||||
| type colors = | ||||
|   | Red int | ||||
|   | Black int | ||||
|   | Green int; | ||||
|  | ||||
| /* Another approach is to require declared variants to wrap any record */ | ||||
| /* type myRecord = MyRecord {name: int}; */ | ||||
| /* let myValue = MyRecord {name: int}; */ | ||||
| /* This would force importing of the module */ | ||||
| /* This would also lend itself naturally to pattern matching - and avoid having | ||||
|    to use `.` operator at all since you normally destructure. */ | ||||
| type nameBlahType = {nameBlah: int}; | ||||
|  | ||||
| let myRecord = {nameBlah: 20}; | ||||
|  | ||||
| let myRecordName = myRecord.nameBlah; | ||||
|  | ||||
| let {nameBlah}: nameBlahType = {nameBlah: 20}; | ||||
|  | ||||
| print_int nameBlah; | ||||
|  | ||||
| let {nameBlah: aliasedToThisVar}: nameBlahType = { | ||||
|   nameBlah: 20 | ||||
| }; | ||||
|  | ||||
| print_int aliasedToThisVar; | ||||
|  | ||||
| let desiredFormattingForWrappedLambda: | ||||
|   int => int => int => nameBlahType = | ||||
|   /* | ||||
|  | ||||
|    fun is | ||||
|    pre-   /firstarg\ | ||||
|    fix   /-coupled--\ | ||||
|     |-\ /-to-prefix--\       */ | ||||
|   fun curriedArg anotherArg lastArg => { | ||||
|     nameBlah: 10 | ||||
|   }; | ||||
|  | ||||
| type longerInt = int; | ||||
|  | ||||
| let desiredFormattingForWrappedLambdaWrappedArrow: | ||||
|   longerInt => | ||||
|   longerInt => | ||||
|   longerInt => | ||||
|   nameBlahType = | ||||
|   /* | ||||
|  | ||||
|    fun is | ||||
|    pre-   /firstarg\ | ||||
|    fix   /-coupled--\ | ||||
|     |-\ /-to-prefix--\       */ | ||||
|   fun curriedArg anotherArg lastArg => { | ||||
|     nameBlah: 10 | ||||
|   }; | ||||
|  | ||||
| let desiredFormattingForWrappedLambdaReturnOnNewLine | ||||
|     /* | ||||
|  | ||||
|      fun is | ||||
|      pre-   /firstarg\ | ||||
|      fix   /-coupled--\ | ||||
|       |-\ /-to-prefix--\       */ | ||||
|     curriedArg | ||||
|     anotherArg | ||||
|     lastArg => { | ||||
|   nameBlah: 10 | ||||
| }; | ||||
|  | ||||
| /* | ||||
|  let is | ||||
|  pre- | ||||
|  fix    /-function binding name---\ | ||||
|  |-\   / is coupled to prefix      \   */ | ||||
| let desiredFormattingForWrappedSugar | ||||
|     curriedArg | ||||
|     anotherArg | ||||
|     lastArg => { | ||||
|   nameBlah: 10 | ||||
| }; | ||||
|  | ||||
| /* | ||||
|  let is | ||||
|  pre- | ||||
|  fix    /-function binding name---\ | ||||
|  |-\   / is coupled to prefix      \   */ | ||||
| let desiredFormattingForWrappedSugarReturnOnNewLine | ||||
|     curriedArg | ||||
|     anotherArg | ||||
|     lastArg => { | ||||
|   nameBlah: 10 | ||||
| }; | ||||
|  | ||||
| /* | ||||
|    let  : type t1 t2. t1 * t2 list -> t1 = ... | ||||
|    let rec f : 't1 't2. 't1 * 't2 list -> 't1 = | ||||
|      fun (type t1) (type t2) -> (... : t1 * t2 list -> t1) | ||||
|  */ | ||||
| type point = {x: int, y: int}; | ||||
|  | ||||
| type point3D = {x: int, y: int, z: int}; | ||||
|  | ||||
| let point2D = {x: 20, y: 30}; | ||||
|  | ||||
| let point3D: point3D = { | ||||
|   x: 10, | ||||
|   y: 11, | ||||
|   z: 80 /* Optional Comma */ | ||||
| }; | ||||
|  | ||||
| let printPoint (p: point) => { | ||||
|   print_int p.x; | ||||
|   print_int p.y | ||||
| }; | ||||
|  | ||||
| let addPoints (p1: point, p2: point) => { | ||||
|   x: p1.x + p2.x, | ||||
|   y: p1.y + p2.y | ||||
| }; | ||||
|  | ||||
| let res1 = printPoint point2D; | ||||
|  | ||||
| let res2 = | ||||
|   printPoint {x: point3D.x, y: point3D.y}; | ||||
|  | ||||
| /* | ||||
|     When () were used to indicate sequences, the parser used seq_expr not only | ||||
|     for grouping sequences, but also to form standard precedences. | ||||
|                           /------- sequence_expr ------\ | ||||
|     let res3 = printPoint (addPoints (point2D, point3D)); | ||||
|  | ||||
|     Interestingly, it knew that tuples aren't sequences. | ||||
|  | ||||
|     To move towards semi delimited, semi-terminated, braces-grouped sequences: | ||||
|     while allowing any non-sequence expression to be grouped on parens, we make | ||||
|     an explicit rule that allows one single non-semi ended expression to be | ||||
|     grouped in parens. | ||||
|  | ||||
|     Actually: We will allow an arbitrary number of semi-delimited expressions to | ||||
|     be wrapped in parens, but the braces grouped semi delimited (sequence) | ||||
|     expressions must *also* be terminated with a semicolon. | ||||
|  | ||||
|     This allows the parser to distinguish between | ||||
|  | ||||
|         let x = {a};    /* Record {a:a} */ | ||||
|         let x = {a;};   /* Single item sequence returning identifier {a} */ | ||||
|  */ | ||||
| let res3 = | ||||
|   printPoint ( | ||||
|     addPoints ( | ||||
|       point2D, | ||||
|       {x: point3D.x, y: point3D.y} | ||||
|     ) | ||||
|   ); | ||||
|  | ||||
| type person = {age: int, name: string}; | ||||
|  | ||||
| type hiredPerson = { | ||||
|   age: string, | ||||
|   name: string, | ||||
|   dateHired: int | ||||
| }; | ||||
|  | ||||
| let o: person = {name: "bob", age: 10}; | ||||
|  | ||||
| /* Parens needed? Nope! */ | ||||
| let o: person = {name: "bob", age: 10}; | ||||
|  | ||||
| let printPerson (p: person) => { | ||||
|   let q: person = p; | ||||
|   p.name ^ p.name | ||||
| }; | ||||
|  | ||||
| /* let dontParseMeBro x y:int = x = y;*/ | ||||
| /* With this unification, anywhere eyou see `= fun` you can just ommit it */ | ||||
| let blah a => a; /* Done */ | ||||
|  | ||||
| let blah a => a; /* Done (almost) */ | ||||
|  | ||||
| let blah a b => a; /* Done */ | ||||
|  | ||||
| let blah a b => a; /* Done (almost) */ | ||||
|  | ||||
| /* More than one consecutive pattern must have a single case */ | ||||
| type blah = {blahBlah: int}; | ||||
|  | ||||
| let blah a {blahBlah} => a; | ||||
|  | ||||
| let blah a {blahBlah} => a; | ||||
|  | ||||
| let module TryToExportTwice = { | ||||
|   let myVal = "hello"; | ||||
| }; | ||||
|  | ||||
| /* | ||||
|    Unifying top level module syntax with local module syntax is probably a bad | ||||
|    idea at the moment because it makes it more difficult to continue to support | ||||
|    `let .. in` bindings. We can distinguish local modules for `let..in` that | ||||
|    just happen to be defined at the top level (but not exported). | ||||
|  | ||||
|      let MyModule = {let myVal = 20;} in | ||||
|      MyModule.x | ||||
|  | ||||
|    Wait, where would this ever be valid, even if we continued to support | ||||
|    `let..in`? | ||||
|  */ | ||||
| let onlyDoingThisTopLevelLetToBypassTopLevelSequence = { | ||||
|   let x = { | ||||
|     print_int 1; | ||||
|     print_int 20 /* Missing trailing SEMI */ | ||||
|   }; | ||||
|   let x = { | ||||
|     print_int 1; | ||||
|     print_int 20; /* Ensure missing middle SEMI reported well */ | ||||
|     print_int 20 | ||||
|   }; | ||||
|   let x = { | ||||
|     print_int 1; | ||||
|     print_int 20; | ||||
|     10 | ||||
|     /* Comment in final position */ | ||||
|   }; /* Missing final SEMI */ | ||||
|   x + x | ||||
| }; | ||||
|  | ||||
| type hasA = {a: int}; | ||||
|  | ||||
| let a = 10; | ||||
|  | ||||
| let returnsASequenceExpressionWithASingleIdentifier | ||||
|     () => a; | ||||
|  | ||||
| let thisReturnsA () => a; | ||||
|  | ||||
| let thisReturnsAAsWell () => a; | ||||
|  | ||||
| let recordVal: int = (thisReturnsARecord ()).a; | ||||
|  | ||||
| Printf.printf | ||||
|   "\nproof that thisReturnsARecord: %n\n" | ||||
|   recordVal; | ||||
|  | ||||
| Printf.printf | ||||
|   "\nproof that thisReturnsA: %n\n" | ||||
|   (thisReturnsA ()); | ||||
|  | ||||
| /* Pattern matching */ | ||||
| let blah arg => | ||||
|   switch arg { | ||||
|   /* Comment before Bar */ | ||||
|   | /* Comment between bar/pattern */ Red _ => 1 | ||||
|   /* Comment Before non-first bar */ | ||||
|   | /* Comment betwen bar/pattern */ Black _ => 0 | ||||
|   | Green _ => 0 | ||||
|   }; | ||||
|  | ||||
| /* Any function that pattern matches a multicase match is interpretted as a | ||||
|  * single arg that is then matched on. Instead of the above `blah` example:*/ | ||||
| let blah = | ||||
|   fun | ||||
|   | Red _ => 1 | ||||
|   | Black _ => 0 | ||||
|   | Green _ => 1; | ||||
|  | ||||
| /* `fun a => a` is read as "a function that maps a to a". Then the */ | ||||
| /* above example is read: "a function that 'either maps' Red to.. or maps .." */ | ||||
| /* Thc00f564e first bar is read as "either maps" */ | ||||
| /* Curried form is not supported: | ||||
|       let blah x | Red _ => 1 | Black _ => 0; | ||||
|       Theres no sugar rule for dropping => fun, only = fun | ||||
|    */ | ||||
| /* let blahCurriedX x => fun  /* See, nothing says we can drop the => fun */ */ | ||||
| /*   |(Red x | Black x | Green x) => 1     /* With some effort, we can ammend the sugar rule that would */ */ | ||||
| /*   | Black x => 0                       /* Allow us to drop any => fun.. Just need to make pattern matching */ */ | ||||
| /*   | Green x => 0;                      /* Support that */ */ | ||||
| /*  */ | ||||
| let blahCurriedX x => | ||||
|   fun | ||||
|   | Red x | ||||
|   | Black x | ||||
|   | Green x => | ||||
|     1 /* With some effort, we can ammend the sugar rule that would */ | ||||
|   | Black x => 0 /* Allow us to drop any => fun.. Just need to make pattern matching */ | ||||
|   | Green x => 0; /* Support that */ | ||||
|  | ||||
| let sameThingInLocal = { | ||||
|   let blahCurriedX x => | ||||
|     fun | ||||
|     | Red x | ||||
|     | Black x | ||||
|     | Green x => | ||||
|       1 /* With some effort, we can ammend the sugar rule that would */ | ||||
|     | Black x => 0 /* Allow us to drop any => fun.. Just need to make pattern matching */ | ||||
|     | Green x => 0; /* Support that */ | ||||
|   blahCurriedX | ||||
| }; | ||||
|  | ||||
| /* This should be parsed/printed exactly as the previous */ | ||||
| let blahCurriedX x => | ||||
|   fun | ||||
|   | Red x | ||||
|   | Black x | ||||
|   | Green x => 1 | ||||
|   | Black x => 0 | ||||
|   | Green x => 0; | ||||
|  | ||||
| /* Any time there are multiple match cases we require a leading BAR */ | ||||
| let v = Red 10; | ||||
|  | ||||
| let Black x | Red x | Green x = v; /* So this NON-function still parses */ | ||||
|  | ||||
| /* This doesn't parse, however (and it doesn't in OCaml either): | ||||
|      let | Black x | Red x | Green x = v; | ||||
|    */ | ||||
| print_int x; | ||||
|  | ||||
| /* Scoping: Let sequences. Familiar syntax for lexical ML style scope and | ||||
|    sequences. */ | ||||
| let res = { | ||||
|   let a = "a starts out as"; | ||||
|   { | ||||
|     print_string a; | ||||
|     let a = 20; | ||||
|     print_int a | ||||
|   }; | ||||
|   print_string a | ||||
| }; | ||||
|  | ||||
| let res = { | ||||
|   let a = "first its a string"; | ||||
|   let a = 20; | ||||
|   print_int a; | ||||
|   print_int a; | ||||
|   print_int a | ||||
| }; | ||||
|  | ||||
| let res = { | ||||
|   let a = "a is always a string"; | ||||
|   print_string a; | ||||
|   let b = 30; | ||||
|   print_int b | ||||
| }; | ||||
|  | ||||
| /* let result = LyList.map (fun | [] => true | _ => false) []; */ | ||||
| /* OTHERWISE: You cannot tell if a is the first match case falling through or | ||||
|  * a curried first arg */ | ||||
| /* let blah = fun a | patt => 0 | anotherPatt => 1; */ | ||||
| /* let blah a patt => 0 | anotherPatt => 1; */ | ||||
| /*simple pattern  EQUALGREATER      expr */ | ||||
| let blah a {blahBlah} => a; | ||||
|  | ||||
| /*            match_case             */ | ||||
| /*     pattern EQUALGREATER  expr */ | ||||
| let blah = | ||||
|   fun | ||||
|   | Red _ => 1 | ||||
|   | Black _ => 0 | ||||
|   | Green _ => 0; | ||||
|  | ||||
| /* Won't work! */ | ||||
| /* let arrowFunc = fun a b => print_string "returning aplusb from arrow"; a + b;;  */ | ||||
| let arrowFunc a b => { | ||||
|   print_string "returning aplusb from arrow"; | ||||
|   a + b | ||||
| }; | ||||
|  | ||||
| let add a b => { | ||||
|   let extra = { | ||||
|     print_string "adding"; | ||||
|     0 | ||||
|   }; | ||||
|   let anotherExtra = 0; | ||||
|   extra + a + b + anotherExtra | ||||
| }; | ||||
|  | ||||
| print_string (string_of_int (add 4 34)); | ||||
|  | ||||
| let dummy _ => 10; | ||||
|  | ||||
| dummy res1; | ||||
|  | ||||
| dummy res2; | ||||
|  | ||||
| dummy res3; | ||||
|  | ||||
| /* Some edge cases */ | ||||
| let myFun firstArg (Red x | Black x | Green x) => | ||||
|   firstArg + x; | ||||
|  | ||||
| let matchesWithWhen a => | ||||
|   switch a { | ||||
|   | Red x when 1 > 0 => 10 | ||||
|   | Red _ => 10 | ||||
|   | Black x => 10 | ||||
|   | Green x => 10 | ||||
|   }; | ||||
|  | ||||
| let matchesWithWhen = | ||||
|   fun | ||||
|   | Red x when 1 > 0 => 10 | ||||
|   | Red _ => 10 | ||||
|   | Black x => 10 | ||||
|   | Green x => 10; | ||||
|  | ||||
| let matchesOne (`Red x) => 10; | ||||
|  | ||||
| /* | ||||
|  Typical OCaml would make you *wrap the functions in parens*! This is because it | ||||
|  can't tell if a semicolon is a sequence operator. Even if we had records use | ||||
|  commas to separate fields, | ||||
|  */ | ||||
| type adders = { | ||||
|   addTwoNumbers: int => int => int, | ||||
|   addThreeNumbers: int => int => int => int, | ||||
|   addThreeNumbersTupled: (int, int, int) => int | ||||
| }; | ||||
|  | ||||
| let myRecordWithFunctions = { | ||||
|   addTwoNumbers: fun a b => a + b, | ||||
|   addThreeNumbers: fun a b c => a + b + c, | ||||
|   addThreeNumbersTupled: fun (a, b, c) => | ||||
|     a + b + c | ||||
| }; | ||||
|  | ||||
| let result = | ||||
|   myRecordWithFunctions.addThreeNumbers 10 20 30; | ||||
|  | ||||
| let result = | ||||
|   myRecordWithFunctions.addThreeNumbersTupled ( | ||||
|     10, | ||||
|     20, | ||||
|     30 | ||||
|   ); | ||||
|  | ||||
| let lookTuplesRequireParens = (1, 2); | ||||
|  | ||||
| /* let thisDoesntParse = 1, 2;  */ | ||||
| let tupleInsideAParenSequence = { | ||||
|   print_string "look, a tuple inside a sequence"; | ||||
|   let x = 10; | ||||
|   (x, x) | ||||
| }; | ||||
|  | ||||
| let tupleInsideALetSequence = { | ||||
|   print_string "look, a tuple inside a sequence"; | ||||
|   let x = 10; | ||||
|   (x, x) | ||||
| }; | ||||
|  | ||||
| /* We *require* that function return types be wrapped in | ||||
|    parenthesis. In this example, there's no ambiguity */ | ||||
| let makeIncrementer (delta: int) :(int => int) => | ||||
|   fun a => a + delta; | ||||
|  | ||||
| /* We could even force that consistency with let bindings - it's allowed | ||||
|       currently but not forced. | ||||
|    */ | ||||
| let myAnnotatedValBinding: int = 10; | ||||
|  | ||||
| /* Class functions (constructors) and methods are unified in the same way */ | ||||
| class classWithNoArg = { | ||||
|   method x = 0; | ||||
|   method y = 0; | ||||
| }; | ||||
|  | ||||
| /* This parses but doesn't type check | ||||
|      class myClass init => object | ||||
|        method x => init | ||||
|        method y => init | ||||
|      end; | ||||
|    */ | ||||
| let myFunc (a: int) (b: int) :(int, int) => ( | ||||
|   a, | ||||
|   b | ||||
| ); | ||||
|  | ||||
| let myFunc (a: int) (b: int) :list int => [1]; | ||||
|  | ||||
| let myFunc (a: int) (b: int) :point => { | ||||
|   x: a, | ||||
|   y: b | ||||
| }; | ||||
|  | ||||
| let myFunc (a: int, b: int) :point => { | ||||
|   x: a, | ||||
|   y: b | ||||
| }; | ||||
|  | ||||
| type myThing = (int, int); | ||||
|  | ||||
| type stillARecord = {name: string, age: int}; | ||||
|  | ||||
| /* Rebase latest OCaml to get the following: And fixup | ||||
|    `generalized_constructor_arguments` according to master. */ | ||||
| /* type ('a, 'b) myOtherThing = Leaf {first:'a, second: 'b} | Null; */ | ||||
| type branch 'a 'b = {first: 'a, second: 'b}; | ||||
|  | ||||
| type myOtherThing 'a 'b = | ||||
|   | Leaf (branch 'a 'b) | ||||
|   | Null; | ||||
|  | ||||
| type yourThing = myOtherThing int int; | ||||
|  | ||||
| /* Conveniently - this parses exactly how you would intend! No *need* to wrap | ||||
|    in an extra [], but it doesn't hurt */ | ||||
| /* FIXME type lookAtThesePolyVariants = list [`Red] ; */ | ||||
| /* FIXME type bracketsGroupMultipleParamsAndPrecedence = list (list (list [`Red])); */ | ||||
| /* FIXME type youCanWrapExtraIfYouWant = (list [`Red]); */ | ||||
| /* FIXME type hereAreMultiplePolyVariants = list [`Red | `Black]; */ | ||||
| /* FIXME type hereAreMultiplePolyVariantsWithOptionalWrapping = list ([`Red | `Black]); */ | ||||
| /* | ||||
|    /* Proposal: ES6 style lambdas: */ | ||||
|  | ||||
|    /* Currying */ | ||||
|    let lookES6Style = (`Red x) (`Black y) => { }; | ||||
|    let lookES6Style (`Red x) (`Black y) => { }; | ||||
|  | ||||
|    /* Matching the single argument */ | ||||
|    let lookES6Style = oneArg => match oneArg with | ||||
|      | `Red x => x | ||||
|      | `Black x => x; | ||||
|  | ||||
|    /* The "trick" to currying that we already have is basically the same - we just | ||||
|     * have to reword it a bit: | ||||
|     * From: | ||||
|     * "Any time you see [let x = fun ...] just replace it with [let x ...]" | ||||
|     * To: | ||||
|     * "Any time you see [let x = ... => ] just replace it with [let x ... => ]" | ||||
|     */ | ||||
|    let lookES6Style oneArg => match oneArg with | ||||
|      | `Red x => x | ||||
|      | `Black x => x; | ||||
|  | ||||
|  */ | ||||
|  | ||||
| /** Current OCaml Named Arguments. Any aliasing is more than just aliasing! | ||||
|     OCaml allows full on pattern matching of named args. */ | ||||
| /* | ||||
|  A: let named              ~a    ~b                = aa + bb in | ||||
|  B: let namedAlias         ~a:aa ~b:bb             = aa + bb in | ||||
|  C: let namedAnnot         ~(a:int) ~(b:int)       = a + b in | ||||
|  D: let namedAliasAnnot    ~a:(aa:int) ~b:(bb:int) = aa + bb in | ||||
|  E: let optional           ?a    ?b                              = 10 in | ||||
|  F: let optionalAlias      ?a:aa ?b:bb                           = 10 in | ||||
|  G: let optionalAnnot      ?(a:int option) ?(b:int option)       = 10 in | ||||
|  H: let optionalAliasAnnot ?a:(aa:int option) ?b:(bb:int option) = 10 in | ||||
|  /* | ||||
|  Look! When a default is provided, annotation causes inferred type of argument | ||||
|  to not be "option" since it's automatically destructured (because we know it | ||||
|  will always be available one way or another.) | ||||
|  */ | ||||
|  I: let defOptional           ?(a=10)    ?(b=10)                 = 10 in | ||||
|  J: let defOptionalAlias      ?a:(aa=10) ?b:(bb=10)              = 10 in | ||||
|  K: let defOptionalAnnot      ?(a:int=10) ?(b:int=10)            = 10 in | ||||
|                              \       \ | ||||
|                               \label_let_pattern opt_default: no longer needed in SugarML | ||||
|  | ||||
|  L: let defOptionalAliasAnnot ?a:(aa:int=10) ?b:(bb:int=10)      = 10 in | ||||
|                                \        \ | ||||
|                                 \let_pattern: still a useful syntactic building block in SugarML | ||||
|  */ | ||||
|  | ||||
| /** | ||||
|  * In Reason, the syntax for named args uses double semicolon, since | ||||
|  * the syntax for lists uses ES6 style [], freeing up the ::. | ||||
|  */ | ||||
| let a = 10; | ||||
|  | ||||
| let b = 20; | ||||
|  | ||||
| /*A*/ | ||||
| let named a::a b::b => a + b; | ||||
|  | ||||
| type named = a::int => b::int => int; | ||||
|  | ||||
| /*B*/ | ||||
| let namedAlias a::aa b::bb => aa + bb; | ||||
|  | ||||
| let namedAlias a::aa b::bb => aa + bb; | ||||
|  | ||||
| type namedAlias = a::int => b::int => int; | ||||
|  | ||||
| /*C*/ | ||||
| let namedAnnot a::(a: int) b::(b: int) => 20; | ||||
|  | ||||
| /*D*/ | ||||
| let namedAliasAnnot a::(aa: int) b::(bb: int) => 20; | ||||
|  | ||||
| /*E*/ | ||||
| let myOptional a::a=? b::b=? () => 10; | ||||
|  | ||||
| type named = a::int? => b::int? => unit => int; | ||||
|  | ||||
| /*F*/ | ||||
| let optionalAlias a::aa=? b::bb=? () => 10; | ||||
|  | ||||
| /*G*/ | ||||
| let optionalAnnot a::(a: int)=? b::(b: int)=? () => 10; | ||||
|  | ||||
| /*H*/ | ||||
| let optionalAliasAnnot | ||||
|     a::(aa: int)=? | ||||
|     b::(bb: int)=? | ||||
|     () => 10; | ||||
|  | ||||
| /*I: */ | ||||
| let defOptional a::a=10 b::b=10 () => 10; | ||||
|  | ||||
| type named = a::int? => b::int? => unit => int; | ||||
|  | ||||
| /*J*/ | ||||
| let defOptionalAlias a::aa=10 b::bb=10 () => 10; | ||||
|  | ||||
| /*K*/ | ||||
| let defOptionalAnnot | ||||
|     a::(a: int)=10 | ||||
|     b::(b: int)=10 | ||||
|     () => 10; | ||||
|  | ||||
| /*L*/ | ||||
| let defOptionalAliasAnnot | ||||
|     a::(aa: int)=10 | ||||
|     b::(bb: int)=10 | ||||
|     () => 10; | ||||
|  | ||||
| /*M: Invoking them - Punned */ | ||||
| let resNotAnnotated = named a::a b::b; | ||||
|  | ||||
| /*N:*/ | ||||
| let resAnnotated: int = named a::a b::b; | ||||
|  | ||||
| /*O: Invoking them */ | ||||
| let resNotAnnotated = named a::a b::b; | ||||
|  | ||||
| /*P: Invoking them */ | ||||
| let resAnnotated: int = named a::a b::b; | ||||
|  | ||||
| /*Q: Here's why "punning" doesn't work!  */ | ||||
| /* Is b:: punned with a final non-named arg, or is b:: supplied b as one named arg? */ | ||||
| let b = 20; | ||||
|  | ||||
| let resAnnotated = named a::a b::b; | ||||
|  | ||||
| /*R: Proof that there are no ambiguities with return values being annotated */ | ||||
| let resAnnotated: ty = named a::a b; | ||||
|  | ||||
| /*S: Explicitly passed optionals are a nice way to say "use the default value"*/ | ||||
| let explictlyPassed = | ||||
|   myOptional a::?None b::?None; | ||||
|  | ||||
| /*T: Annotating the return value of the entire function call */ | ||||
| let explictlyPassedAnnotated: int = | ||||
|   myOptional a::?None b::?None; | ||||
|  | ||||
| /*U: Explicitly passing optional with identifier expression */ | ||||
| let a = None; | ||||
|  | ||||
| let explictlyPassed = myOptional a::?a b::?None; | ||||
|  | ||||
| let explictlyPassedAnnotated: int = | ||||
|   myOptional a::?a b::?None; | ||||
|  | ||||
| let nestedLet = { | ||||
|   let _ = 1; | ||||
|   () | ||||
| }; | ||||
|  | ||||
| let nestedLet = { | ||||
|   let _ = 1; | ||||
|   () | ||||
| }; | ||||
|  | ||||
| let nestedLet = { | ||||
|   let _ = 1; | ||||
|   () | ||||
| }; | ||||
|  | ||||
| let nestedLet = { | ||||
|   let _ = 1; | ||||
|   2 | ||||
| }; | ||||
|  | ||||
| /* | ||||
|  * Showing many combinations of type annotations and named arguments. | ||||
|  */ | ||||
| type typeWithNestedNamedArgs = | ||||
|   outerOne::( | ||||
|     innerOne::int => innerTwo::int => int | ||||
|   ) => | ||||
|   outerTwo::int => | ||||
|   int; | ||||
|  | ||||
| type typeWithNestedOptionalNamedArgs = | ||||
|   outerOne:: | ||||
|     (innerOne::int => innerTwo::int => int)? => | ||||
|   outerTwo::int? => | ||||
|   int; | ||||
|  | ||||
| type typeWithNestedOptionalNamedArgs = | ||||
|   outerOne::list string? => outerTwo::int? => int; | ||||
|  | ||||
| let x = | ||||
|   callSomeFunction | ||||
|     withArg::10 andOtherArg::wrappedArg; | ||||
|  | ||||
| let res = { | ||||
|   (constraintedSequenceItem: string); | ||||
|   (dontKnowWheYoudWantToActuallyDoThis: string) | ||||
| }; | ||||
|  | ||||
| let res = { | ||||
|   ( | ||||
|     butTheyWillBePrintedWithAppropriateSpacing: string | ||||
|   ); | ||||
|   (soAsToInstillBestDevelopmentPractices: string) | ||||
| }; | ||||
|  | ||||
| let x = [ | ||||
|   (eachItemInListCanBeAnnotated: int), | ||||
|   (typeConstraints: float), | ||||
|   ( | ||||
|     tupleConstraints: int, | ||||
|     andNotFunctionInvocations: int | ||||
|   ) | ||||
| ]; | ||||
|  | ||||
| let x = [ | ||||
|   (butWeWillPrint: int), | ||||
|   (themAsSpaceSeparated: float), | ||||
|   (toInfluenceYour: int, developmentHabbits: int) | ||||
| ]; | ||||
|  | ||||
| let newRecord = { | ||||
|   ...(annotatedSpreadRecord: someRec), | ||||
|   x: y | ||||
| }; | ||||
|  | ||||
| let newRecord = { | ||||
|   ...(annotatedSpreadRecord: someRec), | ||||
|   blah: 0, | ||||
|   foo: 1 | ||||
| }; | ||||
|  | ||||
| let newRecord = { | ||||
|   ...( | ||||
|     youCanEvenCallMethodsHereAndAnnotate them: someRec | ||||
|   ), | ||||
|   blah: 0, | ||||
|   foo: 1 | ||||
| }; | ||||
|  | ||||
| let newRecord = { | ||||
|   ...( | ||||
|     youCanEvenCallMethodsHereAndAnnotate | ||||
|       them named::10: someRec | ||||
|   ), | ||||
|   blah: 0, | ||||
|   foo: 1 | ||||
| }; | ||||
|  | ||||
| let something: thing blah = aTypeAnnotation; | ||||
|  | ||||
| let something: thing blah = thisIsANamedArg; | ||||
|  | ||||
| let something: thing blah = aTypeAnnotation; | ||||
|  | ||||
| let something: blah = thisIsANamedArg thing; | ||||
|  | ||||
| let something: blah = typeAnnotation thing; | ||||
|  | ||||
| let newRecord = { | ||||
|   ...( | ||||
|     heresAFunctionWithNamedArgs argOne::i: annotatedResult | ||||
|   ), | ||||
|   soAsToInstill: 0, | ||||
|   developmentHabbits: 1 | ||||
| }; | ||||
|  | ||||
| [@@@thisIsAThing]; | ||||
|  | ||||
| let x = 10; | ||||
|  | ||||
| /* Ensure that the parenthesis are preserved here because they are | ||||
|  * important: | ||||
|  */ | ||||
| let something = | ||||
|   fun | ||||
|   | None => ( | ||||
|       fun | ||||
|       | [] => "emptyList" | ||||
|       | [_, ..._] => "nonEmptyList" | ||||
|     ) | ||||
|   | Some _ => ( | ||||
|       fun | ||||
|       | [] => "emptyList" | ||||
|       | [_, ..._] => "nonEmptyList" | ||||
|     ); | ||||
|  | ||||
| /*  A | B = X; */ | ||||
| let A | B = X; | ||||
|  | ||||
| /*  A | (B | C) = X; */ | ||||
| let A | (B | C) = X; | ||||
|  | ||||
| /* (A | B) | (C | D) = X; */ | ||||
| let A | B | (C | D) = X; | ||||
|  | ||||
| /*  A | B | (C | D) = X; */ | ||||
| let A | B | (C | D) = X; | ||||
|  | ||||
| /* (A | B) | C = X; */ | ||||
| let A | B | C = X; | ||||
|  | ||||
| /*  A | B | C = X; */ | ||||
| let A | B | C = X; | ||||
|  | ||||
|  | ||||
| /** External function declaration | ||||
|  * | ||||
|  */ | ||||
| external f : int => int = "foo"; | ||||
|  | ||||
| let x = {contents: 0}; | ||||
|  | ||||
| let unitVal = x.contents = 210; | ||||
							
								
								
									
										19
									
								
								samples/Regular Expression/modeline-emacs.regexp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								samples/Regular Expression/modeline-emacs.regexp
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | ||||
| -\*- | ||||
| (?: | ||||
| 	\s* | ||||
| 	(?= [^:;\s]+ \s* -\*-) | ||||
| 	| | ||||
| 	(?: | ||||
| 		.*?[;\s] | ||||
| 		| | ||||
| 		(?<=-\*-) | ||||
| 	) | ||||
| 	mode\s*:\s* | ||||
| ) | ||||
| ([^:;\s]+) | ||||
|  | ||||
| (?= | ||||
| 	[\s;] | (?<![-*]) -\*- | ||||
| ) | ||||
| .*? | ||||
| -\*- | ||||
							
								
								
									
										27
									
								
								samples/Regular Expression/modeline-vim.regexp
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										27
									
								
								samples/Regular Expression/modeline-vim.regexp
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,27 @@ | ||||
| (?: | ||||
| 	(?:\s|^) | ||||
| 	vi | ||||
| 	(?:m[<=>]?\d+|m)? | ||||
| 	| | ||||
| 	[\t\x20] | ||||
| 	ex | ||||
| ) | ||||
| (?= | ||||
| 	: (?=\s* set? \s [^\n:]+ :) | | ||||
| 	: (?!\s* set? \s) | ||||
| ) | ||||
|  | ||||
| (?: | ||||
| 	(?:\s|\s*:\s*) | ||||
| 	\w* | ||||
| 	(?: | ||||
| 		\s*= | ||||
| 		(?:[^\n\\\s]|\\.)* | ||||
| 	)? | ||||
| )* | ||||
|  | ||||
| [\s:] | ||||
| (?:filetype|ft|syntax) | ||||
| \s*= | ||||
| (MODE_NAME_HERE) | ||||
| (?=\s|:|$) | ||||
							
								
								
									
										1
									
								
								samples/Regular Expression/ordinal.regex
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								samples/Regular Expression/ordinal.regex
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| \b(\d*1[1-3]th|\d*0th|(?:(?!11st)\d)*1st|\d*2nd|(?:(?!13rd)\d*)3rd|\d*[4-9]th)\b | ||||
							
								
								
									
										1
									
								
								samples/Regular Expression/url.regex
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								samples/Regular Expression/url.regex
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| /^([^\/#\?]*:?\/\/)?(\/?(?:[^\/#\?]+\/)*)?([^\/#\?]+)?(?:\/(?=$))?(\?[^#]*)?(#.*)?$/ | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user