mirror of
				https://github.com/KevinMidboe/linguist.git
				synced 2025-10-29 17:50:22 +00:00 
			
		
		
		
	Compare commits
	
		
			340 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					3b625e1954 | ||
| 
						 | 
					5c6f690b97 | ||
| 
						 | 
					3bbfc907f3 | ||
| 
						 | 
					053b8bca97 | ||
| 
						 | 
					7fb3db6203 | ||
| 
						 | 
					ba09394f85 | ||
| 
						 | 
					c59c88f16e | ||
| 
						 | 
					8a6e74799a | ||
| 
						 | 
					4268769d2e | ||
| 
						 | 
					6601864084 | ||
| 
						 | 
					d57aa37fb7 | ||
| 
						 | 
					e72347fd98 | ||
| 
						 | 
					1b429ea46b | ||
| 
						 | 
					9468ad4947 | ||
| 
						 | 
					733ef63193 | ||
| 
						 | 
					9ca6a5841e | ||
| 
						 | 
					41ace5fba0 | ||
| 
						 | 
					cc4295b3b3 | ||
| 
						 | 
					1e4ce80fd9 | ||
| 
						 | 
					74a71fd90d | ||
| 
						 | 
					9b08318456 | ||
| 
						 | 
					fa5b6b03dc | ||
| 
						 | 
					cb59296fe0 | ||
| 
						 | 
					f1be771611 | ||
| 
						 | 
					b66fcb2529 | ||
| 
						 | 
					f7fe1fee66 | ||
| 
						 | 
					94367cc460 | ||
| 
						 | 
					72bec1fddc | ||
| 
						 | 
					4e2eba4ef8 | ||
| 
						 | 
					10457b6639 | ||
| 
						 | 
					d58cbc68a6 | ||
| 
						 | 
					01de40faaa | ||
| 
						 | 
					62d285fce6 | ||
| 
						 | 
					0056095e8c | ||
| 
						 | 
					d6dc3a3991 | ||
| 
						 | 
					b524461b7c | ||
| 
						 | 
					76d41697aa | ||
| 
						 | 
					32147b629e | ||
| 
						 | 
					e7b5e25bf8 | ||
| 
						 | 
					d761658f8b | ||
| 
						 | 
					3719214aba | ||
| 
						 | 
					47b109be36 | ||
| 
						 | 
					1ec4db97c2 | ||
| 
						 | 
					9fe5fe0de2 | ||
| 
						 | 
					b36ea7ac9d | ||
| 
						 | 
					625b06c30d | ||
| 
						 | 
					28bce533b2 | ||
| 
						 | 
					93ec1922cb | ||
| 
						 | 
					5d09fb67dd | ||
| 
						 | 
					93dcb61742 | ||
| 
						 | 
					3a03594685 | ||
| 
						 | 
					5ce2c254f9 | ||
| 
						 | 
					d7814c4899 | ||
| 
						 | 
					50c08bf29e | ||
| 
						 | 
					34928baee6 | ||
| 
						 | 
					27bb41aa4d | ||
| 
						 | 
					1415f4b52d | ||
| 
						 | 
					ae8ffcad22 | ||
| 
						 | 
					f43633bf10 | ||
| 
						 | 
					a604de9846 | ||
| 
						 | 
					3e224e0039 | ||
| 
						 | 
					15b04f86c3 | ||
| 
						 | 
					42af436c20 | ||
| 
						 | 
					2b08c66f0b | ||
| 
						 | 
					f98ab593fb | ||
| 
						 | 
					f951ec07de | ||
| 
						 | 
					e9ac71590f | ||
| 
						 | 
					210cd19876 | ||
| 
						 | 
					f473c555ac | ||
| 
						 | 
					48e4394d87 | ||
| 
						 | 
					e1ce88920d | ||
| 
						 | 
					675cee1d72 | ||
| 
						 | 
					1c4baf6dc2 | ||
| 
						 | 
					8f2820e9cc | ||
| 
						 | 
					04c268e535 | ||
| 
						 | 
					ec749b3f8d | ||
| 
						 | 
					08b63e7033 | ||
| 
						 | 
					7867b946b9 | ||
| 
						 | 
					a4d12cc8e4 | ||
| 
						 | 
					a1165b74b1 | ||
| 
						 | 
					0fa1fa5581 | ||
| 
						 | 
					d8b91bd5c4 | ||
| 
						 | 
					9b941a34f0 | ||
| 
						 | 
					9d8392dab8 | ||
| 
						 | 
					2c78dd2c66 | ||
| 
						 | 
					3988f3e7a7 | ||
| 
						 | 
					d9a4e831b4 | ||
| 
						 | 
					45c27f26a2 | ||
| 
						 | 
					0fbc29bf68 | ||
| 
						 | 
					5569d2056d | ||
| 
						 | 
					be262d0b4f | ||
| 
						 | 
					33ce2d7264 | ||
| 
						 | 
					c486f56204 | ||
| 
						 | 
					9f3b7d0ba5 | ||
| 
						 | 
					79f20e8057 | ||
| 
						 | 
					cd30c7613c | ||
| 
						 | 
					5aa53c0711 | ||
| 
						 | 
					c17cdca896 | ||
| 
						 | 
					ecdae83364 | ||
| 
						 | 
					31aafa2c78 | ||
| 
						 | 
					8a911b8ff3 | ||
| 
						 | 
					9233f1d17f | ||
| 
						 | 
					77eb36a982 | ||
| 
						 | 
					4e6e58a099 | ||
| 
						 | 
					c87976330f | ||
| 
						 | 
					0e9109c3fc | ||
| 
						 | 
					12f9295dd7 | ||
| 
						 | 
					581723748b | ||
| 
						 | 
					0980e304b1 | ||
| 
						 | 
					d46a529b6a | ||
| 
						 | 
					1d2ec4dbc3 | ||
| 
						 | 
					829eea0139 | ||
| 
						 | 
					78b2853d70 | ||
| 
						 | 
					202f3c08cd | ||
| 
						 | 
					b958779e3d | ||
| 
						 | 
					00dc775daf | ||
| 
						 | 
					009a4e67b6 | ||
| 
						 | 
					faaa4470af | ||
| 
						 | 
					2a320cb988 | ||
| 
						 | 
					74931d1bd5 | ||
| 
						 | 
					3ca93a84b9 | ||
| 
						 | 
					aa27f18ea6 | ||
| 
						 | 
					d3e2ea3f71 | ||
| 
						 | 
					53aa1209ab | ||
| 
						 | 
					b2a486fed2 | ||
| 
						 | 
					4f1e5c34b1 | ||
| 
						 | 
					85c9833081 | ||
| 
						 | 
					33899b9d6b | ||
| 
						 | 
					417239004a | ||
| 
						 | 
					6a1423d28f | ||
| 
						 | 
					96a23ce388 | ||
| 
						 | 
					e8d7eed3aa | ||
| 
						 | 
					9d419c4ab9 | ||
| 
						 | 
					4eefc1f58e | ||
| 
						 | 
					0b94b9cda7 | ||
| 
						 | 
					c736038d94 | ||
| 
						 | 
					ec562138f8 | ||
| 
						 | 
					50013e8dd7 | ||
| 
						 | 
					416c5d1185 | ||
| 
						 | 
					8869912d31 | ||
| 
						 | 
					43fa563b77 | ||
| 
						 | 
					41c6aee8c3 | ||
| 
						 | 
					8cf575c37d | ||
| 
						 | 
					4e20928e04 | ||
| 
						 | 
					3e37bd2680 | ||
| 
						 | 
					a29f5b2d46 | ||
| 
						 | 
					4efc6f8c95 | ||
| 
						 | 
					359699c454 | ||
| 
						 | 
					346aa99fcf | ||
| 
						 | 
					d147778677 | ||
| 
						 | 
					e520209e49 | ||
| 
						 | 
					338cc16239 | ||
| 
						 | 
					67ea35094b | ||
| 
						 | 
					6f0393fcbd | ||
| 
						 | 
					2923d50d7e | ||
| 
						 | 
					4e26f609ef | ||
| 
						 | 
					e86d6e8dd2 | ||
| 
						 | 
					5fa02ad1fb | ||
| 
						 | 
					5a06240f69 | ||
| 
						 | 
					d6e0f74c80 | ||
| 
						 | 
					a5c08bb203 | ||
| 
						 | 
					c6dc29abb1 | ||
| 
						 | 
					ffd984bb7e | ||
| 
						 | 
					dc5473559b | ||
| 
						 | 
					8e9c224952 | ||
| 
						 | 
					d43f111723 | ||
| 
						 | 
					de9ff713a4 | ||
| 
						 | 
					98783560ec | ||
| 
						 | 
					8f31fbbd55 | ||
| 
						 | 
					e4cdbd2b2b | ||
| 
						 | 
					ba52e48ceb | ||
| 
						 | 
					a44ebe493b | ||
| 
						 | 
					eb0e75e11e | ||
| 
						 | 
					22c2cf4967 | ||
| 
						 | 
					39e3688fb8 | ||
| 
						 | 
					6b83e5fb7b | ||
| 
						 | 
					dd2e5ffe07 | ||
| 
						 | 
					f6b6c4e165 | ||
| 
						 | 
					608ed60b5c | ||
| 
						 | 
					2ce2945058 | ||
| 
						 | 
					c8d376754e | ||
| 
						 | 
					ecaef91fa1 | ||
| 
						 | 
					d265b78e7e | ||
| 
						 | 
					5a5bf7d5e5 | ||
| 
						 | 
					e46781b903 | ||
| 
						 | 
					9543a8c8e9 | ||
| 
						 | 
					6ac1ac9232 | ||
| 
						 | 
					1bbb919fef | ||
| 
						 | 
					71dfed0e45 | ||
| 
						 | 
					a2db058ce4 | ||
| 
						 | 
					12695fee2f | ||
| 
						 | 
					4a775dca37 | ||
| 
						 | 
					d7c689fd6b | ||
| 
						 | 
					20b8188384 | ||
| 
						 | 
					26310d9515 | ||
| 
						 | 
					e38cc75da5 | ||
| 
						 | 
					8d55fc1bd5 | ||
| 
						 | 
					7e63399196 | ||
| 
						 | 
					520e5a5cfe | ||
| 
						 | 
					5d85692c24 | ||
| 
						 | 
					676861fff3 | ||
| 
						 | 
					6589bd9dc7 | ||
| 
						 | 
					e32a4f13ef | ||
| 
						 | 
					4e4d851f71 | ||
| 
						 | 
					a3628f86da | ||
| 
						 | 
					fe70965906 | ||
| 
						 | 
					c863435c84 | ||
| 
						 | 
					eeec48198a | ||
| 
						 | 
					82167063da | ||
| 
						 | 
					3ae89b48ba | ||
| 
						 | 
					cd9401c424 | ||
| 
						 | 
					e7e8a7d835 | ||
| 
						 | 
					7654032d2e | ||
| 
						 | 
					05b536fc61 | ||
| 
						 | 
					ebe85788ab | ||
| 
						 | 
					524337d07b | ||
| 
						 | 
					f8ce42e169 | ||
| 
						 | 
					71032cd252 | ||
| 
						 | 
					41593b3ea7 | ||
| 
						 | 
					bed8add2f5 | ||
| 
						 | 
					e424e8e88c | ||
| 
						 | 
					07d4f218a3 | ||
| 
						 | 
					67ed060d37 | ||
| 
						 | 
					3abe081560 | ||
| 
						 | 
					d3f3c0345c | ||
| 
						 | 
					855f1a1f86 | ||
| 
						 | 
					0406a5b326 | ||
| 
						 | 
					0108ef4386 | ||
| 
						 | 
					daefff86ff | ||
| 
						 | 
					fdb962518f | ||
| 
						 | 
					6564078061 | ||
| 
						 | 
					39ea9be5f8 | ||
| 
						 | 
					152b5ade5e | ||
| 
						 | 
					c525e3fbef | ||
| 
						 | 
					88c74fa9c2 | ||
| 
						 | 
					6a54ee767f | ||
| 
						 | 
					2ea1ff2736 | ||
| 
						 | 
					a1901fceff | ||
| 
						 | 
					b4035a3804 | ||
| 
						 | 
					fc67fc525c | ||
| 
						 | 
					f0659d3aa5 | ||
| 
						 | 
					a7a123a8db | ||
| 
						 | 
					0e5327a77a | ||
| 
						 | 
					ecd4ae3bda | ||
| 
						 | 
					7a8bd628e1 | ||
| 
						 | 
					8e19aea39e | ||
| 
						 | 
					6fcba83f3e | ||
| 
						 | 
					d6d7d38eb8 | ||
| 
						 | 
					c8094d3775 | ||
| 
						 | 
					de478d2f2d | ||
| 
						 | 
					991dcef18b | ||
| 
						 | 
					f30e9270f1 | ||
| 
						 | 
					1d7ba18b15 | ||
| 
						 | 
					35a06d6cb8 | ||
| 
						 | 
					4cf7feb275 | ||
| 
						 | 
					30298a9ef8 | ||
| 
						 | 
					cc5f1c57ca | ||
| 
						 | 
					82af10e3fd | ||
| 
						 | 
					63c8d2284c | ||
| 
						 | 
					697380336c | ||
| 
						 | 
					5fd8d71858 | ||
| 
						 | 
					5bc88814e2 | ||
| 
						 | 
					00efd6a463 | ||
| 
						 | 
					81ca6e7766 | ||
| 
						 | 
					cd288a8ee4 | ||
| 
						 | 
					a8d84f3d55 | ||
| 
						 | 
					600115afed | ||
| 
						 | 
					e57273c839 | ||
| 
						 | 
					65491d460e | ||
| 
						 | 
					8dc4a1308f | ||
| 
						 | 
					6841b4d259 | ||
| 
						 | 
					a53423b6e0 | ||
| 
						 | 
					02f3ba1840 | ||
| 
						 | 
					e1216ea4ee | ||
| 
						 | 
					a3227c2c27 | ||
| 
						 | 
					9f1c950a1f | ||
| 
						 | 
					c7a0d7b83d | ||
| 
						 | 
					53c9b2b435 | ||
| 
						 | 
					b4a77abd82 | ||
| 
						 | 
					8a622823b0 | ||
| 
						 | 
					3310d925b6 | ||
| 
						 | 
					65201b322a | ||
| 
						 | 
					b71bf19e37 | ||
| 
						 | 
					1f43664a51 | ||
| 
						 | 
					7cda13afcb | ||
| 
						 | 
					e0d890240b | ||
| 
						 | 
					abf7bee464 | ||
| 
						 | 
					e73a4ecd0e | ||
| 
						 | 
					70779c9986 | ||
| 
						 | 
					1fc4c9fdc6 | ||
| 
						 | 
					fdec52c89a | ||
| 
						 | 
					6e40de47da | ||
| 
						 | 
					28be72892e | ||
| 
						 | 
					6df0e4591d | ||
| 
						 | 
					879c63a25e | ||
| 
						 | 
					ab2e640759 | ||
| 
						 | 
					b61fe90d12 | ||
| 
						 | 
					e6c849d92c | ||
| 
						 | 
					5e4e38b39a | ||
| 
						 | 
					22d4865c52 | ||
| 
						 | 
					3247d46e81 | ||
| 
						 | 
					dad3191238 | ||
| 
						 | 
					35a13b3633 | ||
| 
						 | 
					56fb48ea96 | ||
| 
						 | 
					983ff20d3c | ||
| 
						 | 
					2d51a5dba4 | ||
| 
						 | 
					df98c86acd | ||
| 
						 | 
					98118eb70b | ||
| 
						 | 
					1ec84da277 | ||
| 
						 | 
					3112e6deda | ||
| 
						 | 
					be316c2943 | ||
| 
						 | 
					68c45be47d | ||
| 
						 | 
					4584963dd2 | ||
| 
						 | 
					f382abc2f3 | ||
| 
						 | 
					9d57e1e1b5 | ||
| 
						 | 
					2a4150b104 | ||
| 
						 | 
					09612ae42e | ||
| 
						 | 
					49e9ee48d0 | ||
| 
						 | 
					a8719f3e82 | ||
| 
						 | 
					04e1cc6d0a | ||
| 
						 | 
					dd7b125869 | ||
| 
						 | 
					426818120c | ||
| 
						 | 
					50bd2cc3c8 | ||
| 
						 | 
					00647be113 | ||
| 
						 | 
					e930ee1a8e | ||
| 
						 | 
					48b64c2d31 | ||
| 
						 | 
					f95365946c | ||
| 
						 | 
					5ddccaac83 | ||
| 
						 | 
					51d7c8f905 | ||
| 
						 | 
					ed71855612 | ||
| 
						 | 
					742faebd8b | ||
| 
						 | 
					6763b73d9c | ||
| 
						 | 
					b056df06f4 | ||
| 
						 | 
					6bf223e641 | ||
| 
						 | 
					fa817b6a1d | ||
| 
						 | 
					adaf4011bc | ||
| 
						 | 
					4a031107ac | ||
| 
						 | 
					789607d9bc | ||
| 
						 | 
					d46530989c | ||
| 
						 | 
					3c5bcb434c | 
							
								
								
									
										2
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							@@ -1,5 +1,7 @@
 | 
			
		||||
*.gem
 | 
			
		||||
/Gemfile.lock
 | 
			
		||||
.bundle/
 | 
			
		||||
.idea
 | 
			
		||||
benchmark/
 | 
			
		||||
lib/linguist/samples.json
 | 
			
		||||
/grammars
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										137
									
								
								.gitmodules
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										137
									
								
								.gitmodules
									
									
									
									
										vendored
									
									
								
							@@ -12,7 +12,7 @@
 | 
			
		||||
	url = https://github.com/Drako/SublimeBrainfuck
 | 
			
		||||
[submodule "vendor/grammars/awk-sublime"]
 | 
			
		||||
	path = vendor/grammars/awk-sublime
 | 
			
		||||
	url = https://github.com/JohnNilsson/awk-sublime
 | 
			
		||||
	url = https://github.com/github-linguist/awk-sublime
 | 
			
		||||
[submodule "vendor/grammars/Sublime-SQF-Language"]
 | 
			
		||||
	path = vendor/grammars/Sublime-SQF-Language
 | 
			
		||||
	url = https://github.com/JonBons/Sublime-SQF-Language
 | 
			
		||||
@@ -22,15 +22,15 @@
 | 
			
		||||
[submodule "vendor/grammars/Sublime-REBOL"]
 | 
			
		||||
	path = vendor/grammars/Sublime-REBOL
 | 
			
		||||
	url = https://github.com/Oldes/Sublime-REBOL
 | 
			
		||||
[submodule "vendor/grammars/Sublime-VimL"]
 | 
			
		||||
	path = vendor/grammars/Sublime-VimL
 | 
			
		||||
	url = https://github.com/SalGnt/Sublime-VimL
 | 
			
		||||
[submodule "vendor/grammars/language-viml"]
 | 
			
		||||
	path = vendor/grammars/language-viml
 | 
			
		||||
	url = https://github.com/Alhadis/language-viml
 | 
			
		||||
[submodule "vendor/grammars/ColdFusion"]
 | 
			
		||||
	path = vendor/grammars/ColdFusion
 | 
			
		||||
	url = https://github.com/SublimeText/ColdFusion
 | 
			
		||||
[submodule "vendor/grammars/NSIS"]
 | 
			
		||||
	path = vendor/grammars/NSIS
 | 
			
		||||
	url = https://github.com/SublimeText/NSIS
 | 
			
		||||
	url = https://github.com/github-linguist/NSIS
 | 
			
		||||
[submodule "vendor/grammars/NimLime"]
 | 
			
		||||
	path = vendor/grammars/NimLime
 | 
			
		||||
	url = https://github.com/Varriount/NimLime
 | 
			
		||||
@@ -67,9 +67,6 @@
 | 
			
		||||
[submodule "vendor/grammars/language-javascript"]
 | 
			
		||||
	path = vendor/grammars/language-javascript
 | 
			
		||||
	url = https://github.com/atom/language-javascript
 | 
			
		||||
[submodule "vendor/grammars/language-python"]
 | 
			
		||||
	path = vendor/grammars/language-python
 | 
			
		||||
	url = https://github.com/atom/language-python
 | 
			
		||||
[submodule "vendor/grammars/language-shellscript"]
 | 
			
		||||
	path = vendor/grammars/language-shellscript
 | 
			
		||||
	url = https://github.com/atom/language-shellscript
 | 
			
		||||
@@ -130,9 +127,6 @@
 | 
			
		||||
[submodule "vendor/grammars/Sublime-Text-2-OpenEdge-ABL"]
 | 
			
		||||
	path = vendor/grammars/Sublime-Text-2-OpenEdge-ABL
 | 
			
		||||
	url = https://github.com/jfairbank/Sublime-Text-2-OpenEdge-ABL
 | 
			
		||||
[submodule "vendor/grammars/sublime-rust"]
 | 
			
		||||
	path = vendor/grammars/sublime-rust
 | 
			
		||||
	url = https://github.com/jhasse/sublime-rust
 | 
			
		||||
[submodule "vendor/grammars/sublime-befunge"]
 | 
			
		||||
	path = vendor/grammars/sublime-befunge
 | 
			
		||||
	url = https://github.com/johanasplund/sublime-befunge
 | 
			
		||||
@@ -180,7 +174,7 @@
 | 
			
		||||
	url = https://github.com/mokus0/Agda.tmbundle
 | 
			
		||||
[submodule "vendor/grammars/Julia.tmbundle"]
 | 
			
		||||
	path = vendor/grammars/Julia.tmbundle
 | 
			
		||||
	url = https://github.com/nanoant/Julia.tmbundle
 | 
			
		||||
	url = https://github.com/JuliaEditorSupport/Julia.tmbundle
 | 
			
		||||
[submodule "vendor/grammars/ooc.tmbundle"]
 | 
			
		||||
	path = vendor/grammars/ooc.tmbundle
 | 
			
		||||
	url = https://github.com/nilium/ooc.tmbundle
 | 
			
		||||
@@ -202,9 +196,6 @@
 | 
			
		||||
[submodule "vendor/grammars/sublime-robot-plugin"]
 | 
			
		||||
	path = vendor/grammars/sublime-robot-plugin
 | 
			
		||||
	url = https://github.com/shellderp/sublime-robot-plugin
 | 
			
		||||
[submodule "vendor/grammars/actionscript3-tmbundle"]
 | 
			
		||||
	path = vendor/grammars/actionscript3-tmbundle
 | 
			
		||||
	url = https://github.com/honzabrecka/actionscript3-tmbundle
 | 
			
		||||
[submodule "vendor/grammars/Sublime-QML"]
 | 
			
		||||
	path = vendor/grammars/Sublime-QML
 | 
			
		||||
	url = https://github.com/skozlovf/Sublime-QML
 | 
			
		||||
@@ -250,9 +241,6 @@
 | 
			
		||||
[submodule "vendor/grammars/cpp-qt.tmbundle"]
 | 
			
		||||
	path = vendor/grammars/cpp-qt.tmbundle
 | 
			
		||||
	url = https://github.com/textmate/cpp-qt.tmbundle
 | 
			
		||||
[submodule "vendor/grammars/css.tmbundle"]
 | 
			
		||||
	path = vendor/grammars/css.tmbundle
 | 
			
		||||
	url = https://github.com/textmate/css.tmbundle
 | 
			
		||||
[submodule "vendor/grammars/d.tmbundle"]
 | 
			
		||||
	path = vendor/grammars/d.tmbundle
 | 
			
		||||
	url = https://github.com/textmate/d.tmbundle
 | 
			
		||||
@@ -328,9 +316,6 @@
 | 
			
		||||
[submodule "vendor/grammars/nemerle.tmbundle"]
 | 
			
		||||
	path = vendor/grammars/nemerle.tmbundle
 | 
			
		||||
	url = https://github.com/textmate/nemerle.tmbundle
 | 
			
		||||
[submodule "vendor/grammars/ninja.tmbundle"]
 | 
			
		||||
	path = vendor/grammars/ninja.tmbundle
 | 
			
		||||
	url = https://github.com/textmate/ninja.tmbundle
 | 
			
		||||
[submodule "vendor/grammars/objective-c.tmbundle"]
 | 
			
		||||
	path = vendor/grammars/objective-c.tmbundle
 | 
			
		||||
	url = https://github.com/textmate/objective-c.tmbundle
 | 
			
		||||
@@ -345,7 +330,7 @@
 | 
			
		||||
	url = https://github.com/textmate/php-smarty.tmbundle
 | 
			
		||||
[submodule "vendor/grammars/php.tmbundle"]
 | 
			
		||||
	path = vendor/grammars/php.tmbundle
 | 
			
		||||
	url = https://github.com/textmate/php.tmbundle
 | 
			
		||||
	url = https://github.com/brandonblack/php.tmbundle
 | 
			
		||||
[submodule "vendor/grammars/postscript.tmbundle"]
 | 
			
		||||
	path = vendor/grammars/postscript.tmbundle
 | 
			
		||||
	url = https://github.com/textmate/postscript.tmbundle
 | 
			
		||||
@@ -358,9 +343,6 @@
 | 
			
		||||
[submodule "vendor/grammars/r.tmbundle"]
 | 
			
		||||
	path = vendor/grammars/r.tmbundle
 | 
			
		||||
	url = https://github.com/textmate/r.tmbundle
 | 
			
		||||
[submodule "vendor/grammars/ruby-haml.tmbundle"]
 | 
			
		||||
	path = vendor/grammars/ruby-haml.tmbundle
 | 
			
		||||
	url = https://github.com/textmate/ruby-haml.tmbundle
 | 
			
		||||
[submodule "vendor/grammars/scheme.tmbundle"]
 | 
			
		||||
	path = vendor/grammars/scheme.tmbundle
 | 
			
		||||
	url = https://github.com/textmate/scheme.tmbundle
 | 
			
		||||
@@ -412,9 +394,9 @@
 | 
			
		||||
[submodule "vendor/grammars/oz-tmbundle"]
 | 
			
		||||
	path = vendor/grammars/oz-tmbundle
 | 
			
		||||
	url = https://github.com/eregon/oz-tmbundle
 | 
			
		||||
[submodule "vendor/grammars/ebundles"]
 | 
			
		||||
	path = vendor/grammars/ebundles
 | 
			
		||||
	url = https://github.com/ericzou/ebundles
 | 
			
		||||
[submodule "vendor/grammars/language-batchfile"]
 | 
			
		||||
	path = vendor/grammars/language-batchfile
 | 
			
		||||
	url = https://github.com/mmims/language-batchfile
 | 
			
		||||
[submodule "vendor/grammars/sublime-mask"]
 | 
			
		||||
	path = vendor/grammars/sublime-mask
 | 
			
		||||
	url = https://github.com/tenbits/sublime-mask
 | 
			
		||||
@@ -452,9 +434,6 @@
 | 
			
		||||
[submodule "vendor/grammars/Sublime-Nit"]
 | 
			
		||||
	path = vendor/grammars/Sublime-Nit
 | 
			
		||||
	url = https://github.com/R4PaSs/Sublime-Nit
 | 
			
		||||
[submodule "vendor/grammars/language-hy"]
 | 
			
		||||
	path = vendor/grammars/language-hy
 | 
			
		||||
	url = https://github.com/rwtolbert/language-hy
 | 
			
		||||
[submodule "vendor/grammars/Racket"]
 | 
			
		||||
	path = vendor/grammars/Racket
 | 
			
		||||
	url = https://github.com/soegaard/racket-highlight-for-github
 | 
			
		||||
@@ -559,7 +538,7 @@
 | 
			
		||||
	url = https://github.com/ShaneWilton/sublime-smali
 | 
			
		||||
[submodule "vendor/grammars/language-jsoniq"]
 | 
			
		||||
	path = vendor/grammars/language-jsoniq
 | 
			
		||||
	url = http://github.com/wcandillon/language-jsoniq
 | 
			
		||||
	url = https://github.com/wcandillon/language-jsoniq
 | 
			
		||||
[submodule "vendor/grammars/atom-fsharp"]
 | 
			
		||||
	path = vendor/grammars/atom-fsharp
 | 
			
		||||
	url = https://github.com/fsprojects/atom-fsharp
 | 
			
		||||
@@ -602,9 +581,6 @@
 | 
			
		||||
[submodule "vendor/grammars/X10"]
 | 
			
		||||
	path = vendor/grammars/X10
 | 
			
		||||
	url = https://github.com/x10-lang/x10-highlighting
 | 
			
		||||
[submodule "vendor/grammars/language-babel"]
 | 
			
		||||
	path = vendor/grammars/language-babel
 | 
			
		||||
	url = https://github.com/gandm/language-babel
 | 
			
		||||
[submodule "vendor/grammars/UrWeb-Language-Definition"]
 | 
			
		||||
	path = vendor/grammars/UrWeb-Language-Definition
 | 
			
		||||
	url = https://github.com/gwalborn/UrWeb-Language-Definition.git
 | 
			
		||||
@@ -635,9 +611,6 @@
 | 
			
		||||
[submodule "vendor/grammars/language-yang"]
 | 
			
		||||
	path = vendor/grammars/language-yang
 | 
			
		||||
	url = https://github.com/DzonyKalafut/language-yang.git
 | 
			
		||||
[submodule "vendor/grammars/perl6fe"]
 | 
			
		||||
	path = vendor/grammars/perl6fe
 | 
			
		||||
	url = https://github.com/MadcapJake/language-perl6fe.git
 | 
			
		||||
[submodule "vendor/grammars/language-less"]
 | 
			
		||||
	path = vendor/grammars/language-less
 | 
			
		||||
	url = https://github.com/atom/language-less.git
 | 
			
		||||
@@ -669,8 +642,8 @@
 | 
			
		||||
	path = vendor/grammars/pig-latin
 | 
			
		||||
	url = https://github.com/goblindegook/sublime-text-pig-latin
 | 
			
		||||
[submodule "vendor/grammars/sourcepawn"]
 | 
			
		||||
path = vendor/grammars/sourcepawn
 | 
			
		||||
url = https://github.com/austinwagner/sublime-sourcepawn
 | 
			
		||||
	path = vendor/grammars/sourcepawn
 | 
			
		||||
	url = https://github.com/github-linguist/sublime-sourcepawn
 | 
			
		||||
[submodule "vendor/grammars/gdscript"]
 | 
			
		||||
	path = vendor/grammars/gdscript
 | 
			
		||||
	url = https://github.com/beefsack/GDScript-sublime
 | 
			
		||||
@@ -782,6 +755,84 @@ url = https://github.com/austinwagner/sublime-sourcepawn
 | 
			
		||||
[submodule "vendor/grammars/vhdl"]
 | 
			
		||||
	path = vendor/grammars/vhdl
 | 
			
		||||
	url = https://github.com/textmate/vhdl.tmbundle
 | 
			
		||||
[submodule "vendor/grammars/xquery"]
 | 
			
		||||
	path = vendor/grammars/xquery
 | 
			
		||||
	url = https://github.com/textmate/xquery.tmbundle
 | 
			
		||||
[submodule "vendor/grammars/language-rpm-spec"]
 | 
			
		||||
	path = vendor/grammars/language-rpm-spec
 | 
			
		||||
	url = https://github.com/waveclaw/language-rpm-spec
 | 
			
		||||
[submodule "vendor/grammars/language-emacs-lisp"]
 | 
			
		||||
	path = vendor/grammars/language-emacs-lisp
 | 
			
		||||
	url = https://github.com/Alhadis/language-emacs-lisp
 | 
			
		||||
[submodule "vendor/grammars/language-babel"]
 | 
			
		||||
	path = vendor/grammars/language-babel
 | 
			
		||||
	url = https://github.com/github-linguist/language-babel
 | 
			
		||||
[submodule "vendor/CodeMirror"]
 | 
			
		||||
	path = vendor/CodeMirror
 | 
			
		||||
	url = https://github.com/codemirror/CodeMirror
 | 
			
		||||
[submodule "vendor/grammars/MQL5-sublime"]
 | 
			
		||||
	path = vendor/grammars/MQL5-sublime
 | 
			
		||||
	url = https://github.com/mqsoft/MQL5-sublime
 | 
			
		||||
[submodule "vendor/grammars/actionscript3-tmbundle"]
 | 
			
		||||
	path = vendor/grammars/actionscript3-tmbundle
 | 
			
		||||
	url = https://github.com/simongregory/actionscript3-tmbundle
 | 
			
		||||
[submodule "vendor/grammars/ABNF.tmbundle"]
 | 
			
		||||
	path = vendor/grammars/ABNF.tmbundle
 | 
			
		||||
	url = https://github.com/sanssecours/ABNF.tmbundle
 | 
			
		||||
[submodule "vendor/grammars/EBNF.tmbundle"]
 | 
			
		||||
	path = vendor/grammars/EBNF.tmbundle
 | 
			
		||||
	url = https://github.com/sanssecours/EBNF.tmbundle
 | 
			
		||||
[submodule "vendor/grammars/language-haml"]
 | 
			
		||||
	path = vendor/grammars/language-haml
 | 
			
		||||
	url = https://github.com/ezekg/language-haml
 | 
			
		||||
[submodule "vendor/grammars/language-ninja"]
 | 
			
		||||
	path = vendor/grammars/language-ninja
 | 
			
		||||
	url = https://github.com/khyo/language-ninja
 | 
			
		||||
[submodule "vendor/grammars/language-fontforge"]
 | 
			
		||||
	path = vendor/grammars/language-fontforge
 | 
			
		||||
	url = https://github.com/Alhadis/language-fontforge
 | 
			
		||||
[submodule "vendor/grammars/language-gn"]
 | 
			
		||||
	path = vendor/grammars/language-gn
 | 
			
		||||
	url = https://github.com/devoncarew/language-gn
 | 
			
		||||
[submodule "vendor/grammars/rascal-syntax-highlighting"]
 | 
			
		||||
	path = vendor/grammars/rascal-syntax-highlighting
 | 
			
		||||
	url = https://github.com/usethesource/rascal-syntax-highlighting
 | 
			
		||||
[submodule "vendor/grammars/atom-language-perl6"]
 | 
			
		||||
	path = vendor/grammars/atom-language-perl6
 | 
			
		||||
	url = https://github.com/perl6/atom-language-perl6
 | 
			
		||||
[submodule "vendor/grammars/reason"]
 | 
			
		||||
	path = vendor/grammars/reason
 | 
			
		||||
	url = https://github.com/facebook/reason
 | 
			
		||||
[submodule "vendor/grammars/language-xcompose"]
 | 
			
		||||
	path = vendor/grammars/language-xcompose
 | 
			
		||||
	url = https://github.com/samcv/language-xcompose
 | 
			
		||||
[submodule "vendor/grammars/SublimeEthereum"]
 | 
			
		||||
	path = vendor/grammars/SublimeEthereum
 | 
			
		||||
	url = https://github.com/davidhq/SublimeEthereum.git
 | 
			
		||||
[submodule "vendor/grammars/atom-language-rust"]
 | 
			
		||||
	path = vendor/grammars/atom-language-rust
 | 
			
		||||
	url = https://github.com/zargony/atom-language-rust
 | 
			
		||||
[submodule "vendor/grammars/language-css"]
 | 
			
		||||
	path = vendor/grammars/language-css
 | 
			
		||||
	url = https://github.com/atom/language-css
 | 
			
		||||
[submodule "vendor/grammars/language-regexp"]
 | 
			
		||||
	path = vendor/grammars/language-regexp
 | 
			
		||||
	url = https://github.com/Alhadis/language-regexp
 | 
			
		||||
[submodule "vendor/grammars/Terraform.tmLanguage"]
 | 
			
		||||
	path = vendor/grammars/Terraform.tmLanguage
 | 
			
		||||
	url = https://github.com/alexlouden/Terraform.tmLanguage
 | 
			
		||||
[submodule "vendor/grammars/shaders-tmLanguage"]
 | 
			
		||||
	path = vendor/grammars/shaders-tmLanguage
 | 
			
		||||
	url = https://github.com/tgjones/shaders-tmLanguage
 | 
			
		||||
[submodule "vendor/grammars/language-meson"]
 | 
			
		||||
	path = vendor/grammars/language-meson
 | 
			
		||||
	url = https://github.com/TingPing/language-meson
 | 
			
		||||
[submodule "vendor/grammars/atom-language-p4"]
 | 
			
		||||
	path = vendor/grammars/atom-language-p4
 | 
			
		||||
	url = https://github.com/TakeshiTseng/atom-language-p4
 | 
			
		||||
[submodule "vendor/grammars/language-jison"]
 | 
			
		||||
	path = vendor/grammars/language-jison
 | 
			
		||||
	url = https://github.com/cdibbs/language-jison
 | 
			
		||||
[submodule "vendor/grammars/openscad.tmbundle"]
 | 
			
		||||
	path = vendor/grammars/openscad.tmbundle
 | 
			
		||||
	url = https://github.com/tbuser/openscad.tmbundle
 | 
			
		||||
[submodule "vendor/grammars/marko-tmbundle"]
 | 
			
		||||
	path = vendor/grammars/marko-tmbundle
 | 
			
		||||
	url = https://github.com/marko-js/marko-tmbundle
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										15
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										15
									
								
								.travis.yml
									
									
									
									
									
								
							@@ -1,20 +1,33 @@
 | 
			
		||||
language: ruby
 | 
			
		||||
sudo: false
 | 
			
		||||
 | 
			
		||||
addons:
 | 
			
		||||
  apt:
 | 
			
		||||
    packages:
 | 
			
		||||
    - libicu-dev
 | 
			
		||||
    - libicu48
 | 
			
		||||
 | 
			
		||||
before_install: script/travis/before_install
 | 
			
		||||
 | 
			
		||||
script:
 | 
			
		||||
  - bundle exec rake
 | 
			
		||||
  - script/licensed verify
 | 
			
		||||
 | 
			
		||||
rvm:
 | 
			
		||||
  - 2.0.0
 | 
			
		||||
  - 2.1
 | 
			
		||||
  - 2.2
 | 
			
		||||
  - 2.3.3
 | 
			
		||||
  - 2.4.0
 | 
			
		||||
 | 
			
		||||
matrix:
 | 
			
		||||
  allow_failures:
 | 
			
		||||
  - rvm: 2.4.0
 | 
			
		||||
 | 
			
		||||
notifications:
 | 
			
		||||
  disabled: true
 | 
			
		||||
 | 
			
		||||
git:
 | 
			
		||||
  submodules: false
 | 
			
		||||
  depth: 3
 | 
			
		||||
 | 
			
		||||
cache: bundler
 | 
			
		||||
 
 | 
			
		||||
@@ -17,7 +17,7 @@ To add support for a new extension:
 | 
			
		||||
In addition, if this extension is already listed in [`languages.yml`][languages] then sometimes a few more steps will need to be taken:
 | 
			
		||||
 | 
			
		||||
0. Make sure that example `.yourextension` files are present in the [samples directory][samples] for each language that uses `.yourextension`.
 | 
			
		||||
0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.yourextension` files. (ping @arfon or @bkeepers to help with this) to ensure we're not misclassifying files.
 | 
			
		||||
0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.yourextension` files. (ping **@bkeepers** to help with this) to ensure we're not misclassifying files.
 | 
			
		||||
0. If the Bayesian classifier does a bad job with the sample `.yourextension` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@@ -27,18 +27,16 @@ We try only to add languages once they have some usage on GitHub. In most cases
 | 
			
		||||
 | 
			
		||||
To add support for a new language:
 | 
			
		||||
 | 
			
		||||
0. Add an entry for your language to [`languages.yml`][languages].
 | 
			
		||||
0. Add a grammar for your language. Please only add grammars that have [one of these licenses](https://github.com/github/linguist/blob/257425141d4e2a5232786bf0b13c901ada075f93/vendor/licenses/config.yml#L2-L11).
 | 
			
		||||
  0. Add your grammar as a submodule: `git submodule add https://github.com/JaneSmith/MyGrammar vendor/grammars/MyGrammar`.
 | 
			
		||||
  0. Add your grammar to [`grammars.yml`][grammars] by running `script/convert-grammars --add vendor/grammars/MyGrammar`.
 | 
			
		||||
  0. Download the license for the grammar: `script/licensed`. Be careful to only commit the file for the new grammar, as this script may update licenses for other grammars as well.
 | 
			
		||||
0. Add an entry for your language to [`languages.yml`][languages]. Omit the `language_id` field for now.
 | 
			
		||||
0. Add a grammar for your language: `script/add-grammar https://github.com/JaneSmith/MyGrammar`. Please only add grammars that have [one of these licenses][licenses].
 | 
			
		||||
0. Add samples for your language to the [samples directory][samples] in the correct subdirectory.
 | 
			
		||||
0. Add a `language_id` for your language using `script/set-language-ids`. **You should only ever need to run `script/set-language-ids --update`. Anything other than this risks breaking GitHub search :cry:**
 | 
			
		||||
0. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
 | 
			
		||||
 | 
			
		||||
In addition, if your new language defines an extension that's already listed in [`languages.yml`][languages] (such as `.foo`) then sometimes a few more steps will need to be taken:
 | 
			
		||||
 | 
			
		||||
0. Make sure that example `.foo` files are present in the [samples directory][samples] for each language that uses `.foo`.
 | 
			
		||||
0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.foo` files. (ping @arfon or @bkeepers to help with this) to ensure we're not misclassifying files.
 | 
			
		||||
0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.foo` files. (ping **@bkeepers** to help with this) to ensure we're not misclassifying files.
 | 
			
		||||
0. If the Bayesian classifier does a bad job with the sample `.foo` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
 | 
			
		||||
 | 
			
		||||
Remember, the goal here is to try and avoid false positives!
 | 
			
		||||
@@ -81,9 +79,13 @@ Here's our current build status: [
 | 
			
		||||
- @larsbrinkhoff
 | 
			
		||||
- @pchaigno
 | 
			
		||||
- **@Alhadis**
 | 
			
		||||
- **@brandonblack** (GitHub staff)
 | 
			
		||||
- **@larsbrinkhoff**
 | 
			
		||||
- **@lildude** (GitHub staff)
 | 
			
		||||
- **@lizzhale** (GitHub staff)
 | 
			
		||||
- **@mikemcquaid** (GitHub staff)
 | 
			
		||||
- **@pchaigno**
 | 
			
		||||
 | 
			
		||||
As Linguist is a production dependency for GitHub we have a couple of workflow restrictions:
 | 
			
		||||
 | 
			
		||||
@@ -112,5 +114,6 @@ If you are the current maintainer of this gem:
 | 
			
		||||
 | 
			
		||||
[grammars]: /grammars.yml
 | 
			
		||||
[languages]: /lib/linguist/languages.yml
 | 
			
		||||
[licenses]: https://github.com/github/linguist/blob/257425141d4e2a5232786bf0b13c901ada075f93/vendor/licenses/config.yml#L2-L11
 | 
			
		||||
[samples]: /samples
 | 
			
		||||
[new-issue]: https://github.com/github/linguist/issues/new
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							@@ -1,4 +1,4 @@
 | 
			
		||||
Copyright (c) 2011-2016 GitHub, Inc.
 | 
			
		||||
Copyright (c) 2017 GitHub, Inc.
 | 
			
		||||
 | 
			
		||||
Permission is hereby granted, free of charge, to any person
 | 
			
		||||
obtaining a copy of this software and associated documentation
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										10
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										10
									
								
								README.md
									
									
									
									
									
								
							@@ -20,13 +20,19 @@ The Language stats bar displays languages percentages for the files in the repos
 | 
			
		||||
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you can add, especially links to public repositories, is helpful.
 | 
			
		||||
0. If there are no reported issues of this misclassification, [open an issue][new-issue] and include a link to the repository or a sample of the code that is being misclassified.
 | 
			
		||||
 | 
			
		||||
### There's a problem with the syntax highlighting of a file
 | 
			
		||||
 | 
			
		||||
Linguist detects the language of a file but the actual syntax-highlighting is powered by a set of language grammars which are included in this project as a set of submodules [and may be found here](https://github.com/github/linguist/blob/master/vendor/README.md).
 | 
			
		||||
 | 
			
		||||
If you experience an issue with the syntax-highlighting on GitHub, **please report the issue to the upstream grammar repository, not here.** Grammars are updated every time we build the Linguist gem and so upstream bug fixes are automatically incorporated as they are fixed.
 | 
			
		||||
 | 
			
		||||
## Overrides
 | 
			
		||||
 | 
			
		||||
Linguist supports a number of different custom overrides strategies for language definitions and vendored paths.
 | 
			
		||||
 | 
			
		||||
### Using gitattributes
 | 
			
		||||
 | 
			
		||||
Add a `.gitattributes` file to your project and use standard git-style path matchers for the files you want to override to set `linguist-documentation`, `linguist-language`, and `linguist-vendored`. `.gitattributes` will be used to determine language statistics, but will not be used to syntax highlight files. To manually set syntax highlighting, use [Vim or Emacs modelines](#using-emacs-or-vim-modelines).
 | 
			
		||||
Add a `.gitattributes` file to your project and use standard git-style path matchers for the files you want to override to set `linguist-documentation`, `linguist-language`, and `linguist-vendored`. `.gitattributes` will be used to determine language statistics and will be used to syntax highlight files. You can also manually set syntax highlighting using [Vim or Emacs modelines](#using-emacs-or-vim-modelines).
 | 
			
		||||
 | 
			
		||||
```
 | 
			
		||||
$ cat .gitattributes
 | 
			
		||||
@@ -65,7 +71,7 @@ See [Linguist::Generated#generated?](https://github.com/github/linguist/blob/mas
 | 
			
		||||
 | 
			
		||||
### Using Emacs or Vim modelines
 | 
			
		||||
 | 
			
		||||
Alternatively, you can use Vim or Emacs style modelines to set the language for a single file. Modelines can be placed anywhere within a file and are respected when determining how to syntax-highlight a file on GitHub.com
 | 
			
		||||
If you do not want to use `.gitattributes` to override the syntax highlighting used on GitHub.com, you can use Vim or Emacs style modelines to set the language for a single file. Modelines can be placed anywhere within a file and are respected when determining how to syntax-highlight a file on GitHub.com
 | 
			
		||||
 | 
			
		||||
##### Vim
 | 
			
		||||
```
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,7 @@
 | 
			
		||||
#!/usr/bin/env ruby
 | 
			
		||||
 | 
			
		||||
$LOAD_PATH[0, 0] = File.join(File.dirname(__FILE__), '..', 'lib')
 | 
			
		||||
 | 
			
		||||
require 'linguist'
 | 
			
		||||
require 'rugged'
 | 
			
		||||
require 'optparse'
 | 
			
		||||
@@ -23,7 +25,7 @@ class GitLinguist
 | 
			
		||||
    if @incremental && stats = load_language_stats
 | 
			
		||||
      old_commit_oid, old_stats = stats
 | 
			
		||||
 | 
			
		||||
      # A cache with NULL oid means that we want to froze
 | 
			
		||||
      # A cache with NULL oid means that we want to freeze
 | 
			
		||||
      # these language stats in place and stop computing
 | 
			
		||||
      # them (for performance reasons)
 | 
			
		||||
      return old_stats if old_commit_oid == NULL_OID
 | 
			
		||||
@@ -102,16 +104,22 @@ def git_linguist(args)
 | 
			
		||||
  commit = nil
 | 
			
		||||
 | 
			
		||||
  parser = OptionParser.new do |opts|
 | 
			
		||||
    opts.banner = "Usage: git-linguist [OPTIONS] stats|breakdown|dump-cache|clear|disable"
 | 
			
		||||
    opts.banner = <<-HELP
 | 
			
		||||
    Linguist v#{Linguist::VERSION}
 | 
			
		||||
    Detect language type and determine language breakdown for a given Git repository.
 | 
			
		||||
 | 
			
		||||
    Usage:
 | 
			
		||||
    git-linguist [OPTIONS] stats|breakdown|dump-cache|clear|disable"
 | 
			
		||||
    HELP
 | 
			
		||||
 | 
			
		||||
    opts.on("-f", "--force", "Force a full rescan") { incremental = false }
 | 
			
		||||
    opts.on("--commit=COMMIT", "Commit to index") { |v| commit = v}
 | 
			
		||||
    opts.on("-c", "--commit=COMMIT", "Commit to index") { |v| commit = v}
 | 
			
		||||
  end
 | 
			
		||||
 | 
			
		||||
  parser.parse!(args)
 | 
			
		||||
 | 
			
		||||
  git_dir = `git rev-parse --git-dir`.strip
 | 
			
		||||
  raise "git-linguist must be ran in a Git repository (#{Dir.pwd})" unless $?.success?
 | 
			
		||||
  raise "git-linguist must be run in a Git repository (#{Dir.pwd})" unless $?.success?
 | 
			
		||||
  wrapper = GitLinguist.new(git_dir, commit, incremental)
 | 
			
		||||
 | 
			
		||||
  case args.pop
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										35
									
								
								bin/linguist
									
									
									
									
									
								
							
							
						
						
									
										35
									
								
								bin/linguist
									
									
									
									
									
								
							@@ -1,29 +1,37 @@
 | 
			
		||||
#!/usr/bin/env ruby
 | 
			
		||||
 | 
			
		||||
# linguist — detect language type for a file, or, given a directory, determine language breakdown
 | 
			
		||||
#     usage: linguist <path> [<--breakdown>]
 | 
			
		||||
#
 | 
			
		||||
$LOAD_PATH[0, 0] = File.join(File.dirname(__FILE__), '..', 'lib')
 | 
			
		||||
 | 
			
		||||
require 'linguist'
 | 
			
		||||
require 'rugged'
 | 
			
		||||
require 'json'
 | 
			
		||||
require 'optparse'
 | 
			
		||||
 | 
			
		||||
path = ARGV[0] || Dir.pwd
 | 
			
		||||
 | 
			
		||||
# special case if not given a directory but still given the --breakdown option
 | 
			
		||||
# special case if not given a directory
 | 
			
		||||
# but still given the --breakdown or --json options/
 | 
			
		||||
if path == "--breakdown"
 | 
			
		||||
  path = Dir.pwd
 | 
			
		||||
  breakdown = true
 | 
			
		||||
elsif path == "--json"
 | 
			
		||||
  path = Dir.pwd
 | 
			
		||||
  json_breakdown = true
 | 
			
		||||
end
 | 
			
		||||
 | 
			
		||||
ARGV.shift
 | 
			
		||||
breakdown = true if ARGV[0] == "--breakdown"
 | 
			
		||||
json_breakdown = true if ARGV[0] == "--json"
 | 
			
		||||
 | 
			
		||||
if File.directory?(path)
 | 
			
		||||
  rugged = Rugged::Repository.new(path)
 | 
			
		||||
  repo = Linguist::Repository.new(rugged, rugged.head.target_id)
 | 
			
		||||
  repo.languages.sort_by { |_, size| size }.reverse.each do |language, size|
 | 
			
		||||
    percentage = ((size / repo.size.to_f) * 100)
 | 
			
		||||
    percentage = sprintf '%.2f' % percentage
 | 
			
		||||
    puts "%-7s %s" % ["#{percentage}%", language]
 | 
			
		||||
  if !json_breakdown
 | 
			
		||||
    repo.languages.sort_by { |_, size| size }.reverse.each do |language, size|
 | 
			
		||||
      percentage = ((size / repo.size.to_f) * 100)
 | 
			
		||||
      percentage = sprintf '%.2f' % percentage
 | 
			
		||||
      puts "%-7s %s" % ["#{percentage}%", language]
 | 
			
		||||
    end
 | 
			
		||||
  end
 | 
			
		||||
  if breakdown
 | 
			
		||||
    puts
 | 
			
		||||
@@ -35,6 +43,8 @@ if File.directory?(path)
 | 
			
		||||
      end
 | 
			
		||||
      puts
 | 
			
		||||
    end
 | 
			
		||||
  elsif json_breakdown
 | 
			
		||||
    puts JSON.dump(repo.breakdown_by_file)
 | 
			
		||||
  end
 | 
			
		||||
elsif File.file?(path)
 | 
			
		||||
  blob = Linguist::FileBlob.new(path, Dir.pwd)
 | 
			
		||||
@@ -63,5 +73,12 @@ elsif File.file?(path)
 | 
			
		||||
    puts "  appears to be a vendored file"
 | 
			
		||||
  end
 | 
			
		||||
else
 | 
			
		||||
  abort "usage: linguist <path>"
 | 
			
		||||
  abort <<-HELP
 | 
			
		||||
  Linguist v#{Linguist::VERSION}
 | 
			
		||||
  Detect language type for a file, or, given a directory, determine language breakdown.
 | 
			
		||||
 | 
			
		||||
  Usage: linguist <path>
 | 
			
		||||
         linguist <path> [--breakdown] [--json]
 | 
			
		||||
         linguist [--breakdown] [--json]
 | 
			
		||||
  HELP
 | 
			
		||||
end
 | 
			
		||||
 
 | 
			
		||||
@@ -16,7 +16,7 @@ Gem::Specification.new do |s|
 | 
			
		||||
  s.add_dependency 'charlock_holmes', '~> 0.7.3'
 | 
			
		||||
  s.add_dependency 'escape_utils',    '~> 1.1.0'
 | 
			
		||||
  s.add_dependency 'mime-types',      '>= 1.19'
 | 
			
		||||
  s.add_dependency 'rugged',          '>= 0.23.0b'
 | 
			
		||||
  s.add_dependency 'rugged',          '>= 0.25.1'
 | 
			
		||||
 | 
			
		||||
  s.add_development_dependency 'minitest', '>= 5.0'
 | 
			
		||||
  s.add_development_dependency 'mocha'
 | 
			
		||||
@@ -26,6 +26,5 @@ Gem::Specification.new do |s|
 | 
			
		||||
  s.add_development_dependency 'yajl-ruby'
 | 
			
		||||
  s.add_development_dependency 'color-proximity', '~> 0.2.1'
 | 
			
		||||
  s.add_development_dependency 'licensed'
 | 
			
		||||
  s.add_development_dependency 'licensee', '>= 8.3.0'
 | 
			
		||||
 | 
			
		||||
  s.add_development_dependency 'licensee', '~> 8.8.0'
 | 
			
		||||
end
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										207
									
								
								grammars.yml
									
									
									
									
									
								
							
							
						
						
									
										207
									
								
								grammars.yml
									
									
									
									
									
								
							@@ -1,16 +1,18 @@
 | 
			
		||||
---
 | 
			
		||||
http://svn.edgewall.org/repos/genshi/contrib/textmate/Genshi.tmbundle/Syntaxes/Markup%20Template%20%28XML%29.tmLanguage:
 | 
			
		||||
- text.xml.genshi
 | 
			
		||||
https://bitbucket.org/Clams/sublimesystemverilog/get/default.tar.gz:
 | 
			
		||||
- source.systemverilog
 | 
			
		||||
- source.ucfconstraints
 | 
			
		||||
https://svn.edgewall.org/repos/genshi/contrib/textmate/Genshi.tmbundle/Syntaxes/Markup%20Template%20%28XML%29.tmLanguage:
 | 
			
		||||
- text.xml.genshi
 | 
			
		||||
vendor/grammars/ABNF.tmbundle:
 | 
			
		||||
- source.abnf
 | 
			
		||||
vendor/grammars/Agda.tmbundle:
 | 
			
		||||
- source.agda
 | 
			
		||||
vendor/grammars/Alloy.tmbundle:
 | 
			
		||||
- source.alloy
 | 
			
		||||
vendor/grammars/AutoHotkey/:
 | 
			
		||||
vendor/grammars/AutoHotkey:
 | 
			
		||||
- source.ahk
 | 
			
		||||
vendor/grammars/BrightScript.tmbundle/:
 | 
			
		||||
vendor/grammars/BrightScript.tmbundle:
 | 
			
		||||
- source.brightauthorproject
 | 
			
		||||
- source.brightscript
 | 
			
		||||
vendor/grammars/ColdFusion:
 | 
			
		||||
@@ -20,11 +22,15 @@ vendor/grammars/ColdFusion:
 | 
			
		||||
- text.html.cfm
 | 
			
		||||
vendor/grammars/Docker.tmbundle:
 | 
			
		||||
- source.dockerfile
 | 
			
		||||
vendor/grammars/Elm/:
 | 
			
		||||
vendor/grammars/EBNF.tmbundle:
 | 
			
		||||
- source.ebnf
 | 
			
		||||
vendor/grammars/Elm/Syntaxes:
 | 
			
		||||
- source.elm
 | 
			
		||||
- text.html.mediawiki.elm-build-output
 | 
			
		||||
- text.html.mediawiki.elm-documentation
 | 
			
		||||
vendor/grammars/FreeMarker.tmbundle:
 | 
			
		||||
- text.html.ftl
 | 
			
		||||
vendor/grammars/G-Code/:
 | 
			
		||||
vendor/grammars/G-Code:
 | 
			
		||||
- source.LS
 | 
			
		||||
- source.MCPOST
 | 
			
		||||
- source.MOD
 | 
			
		||||
@@ -37,7 +43,7 @@ vendor/grammars/IDL-Syntax:
 | 
			
		||||
vendor/grammars/Isabelle.tmbundle:
 | 
			
		||||
- source.isabelle.root
 | 
			
		||||
- source.isabelle.theory
 | 
			
		||||
vendor/grammars/JSyntax/:
 | 
			
		||||
vendor/grammars/JSyntax:
 | 
			
		||||
- source.j
 | 
			
		||||
vendor/grammars/Julia.tmbundle:
 | 
			
		||||
- source.julia
 | 
			
		||||
@@ -45,10 +51,14 @@ vendor/grammars/Lean.tmbundle:
 | 
			
		||||
- source.lean
 | 
			
		||||
vendor/grammars/LiveScript.tmbundle:
 | 
			
		||||
- source.livescript
 | 
			
		||||
vendor/grammars/MQL5-sublime:
 | 
			
		||||
- source.mql5
 | 
			
		||||
vendor/grammars/MagicPython:
 | 
			
		||||
- source.python
 | 
			
		||||
- source.regexp.python
 | 
			
		||||
vendor/grammars/Modelica/:
 | 
			
		||||
- text.python.console
 | 
			
		||||
- text.python.traceback
 | 
			
		||||
vendor/grammars/Modelica:
 | 
			
		||||
- source.modelica
 | 
			
		||||
vendor/grammars/NSIS:
 | 
			
		||||
- source.nsis
 | 
			
		||||
@@ -58,7 +68,7 @@ vendor/grammars/NimLime:
 | 
			
		||||
- source.nimcfg
 | 
			
		||||
vendor/grammars/PHP-Twig.tmbundle:
 | 
			
		||||
- text.html.twig
 | 
			
		||||
vendor/grammars/PogoScript.tmbundle/:
 | 
			
		||||
vendor/grammars/PogoScript.tmbundle:
 | 
			
		||||
- source.pogoscript
 | 
			
		||||
vendor/grammars/RDoc.tmbundle:
 | 
			
		||||
- text.rdoc
 | 
			
		||||
@@ -73,10 +83,10 @@ vendor/grammars/Scalate.tmbundle:
 | 
			
		||||
- text.html.ssp
 | 
			
		||||
vendor/grammars/Slash.tmbundle:
 | 
			
		||||
- text.html.slash
 | 
			
		||||
vendor/grammars/Stata.tmbundle/:
 | 
			
		||||
vendor/grammars/Stata.tmbundle:
 | 
			
		||||
- source.mata
 | 
			
		||||
- source.stata
 | 
			
		||||
vendor/grammars/Stylus/:
 | 
			
		||||
vendor/grammars/Stylus:
 | 
			
		||||
- source.stylus
 | 
			
		||||
vendor/grammars/Sublime-Coq:
 | 
			
		||||
- source.coq
 | 
			
		||||
@@ -86,7 +96,7 @@ vendor/grammars/Sublime-Lasso:
 | 
			
		||||
- file.lasso
 | 
			
		||||
vendor/grammars/Sublime-Loom:
 | 
			
		||||
- source.loomscript
 | 
			
		||||
vendor/grammars/Sublime-Modula-2/:
 | 
			
		||||
vendor/grammars/Sublime-Modula-2:
 | 
			
		||||
- source.modula2
 | 
			
		||||
vendor/grammars/Sublime-Nit:
 | 
			
		||||
- source.nit
 | 
			
		||||
@@ -101,27 +111,29 @@ vendor/grammars/Sublime-SQF-Language:
 | 
			
		||||
vendor/grammars/Sublime-Text-2-OpenEdge-ABL:
 | 
			
		||||
- source.abl
 | 
			
		||||
- text.html.abl
 | 
			
		||||
vendor/grammars/Sublime-VimL:
 | 
			
		||||
- source.viml
 | 
			
		||||
vendor/grammars/SublimeBrainfuck:
 | 
			
		||||
- source.bf
 | 
			
		||||
vendor/grammars/SublimeClarion/:
 | 
			
		||||
vendor/grammars/SublimeClarion:
 | 
			
		||||
- source.clarion
 | 
			
		||||
vendor/grammars/SublimeEthereum:
 | 
			
		||||
- source.solidity
 | 
			
		||||
vendor/grammars/SublimeGDB/:
 | 
			
		||||
- source.disasm
 | 
			
		||||
- source.gdb
 | 
			
		||||
- source.gdb.session
 | 
			
		||||
- source.gdbregs
 | 
			
		||||
vendor/grammars/SublimePapyrus/:
 | 
			
		||||
vendor/grammars/SublimePapyrus:
 | 
			
		||||
- source.papyrus.skyrim
 | 
			
		||||
vendor/grammars/SublimePuppet/:
 | 
			
		||||
vendor/grammars/SublimePuppet:
 | 
			
		||||
- source.puppet
 | 
			
		||||
vendor/grammars/SublimeXtend:
 | 
			
		||||
- source.xtend
 | 
			
		||||
vendor/grammars/TLA:
 | 
			
		||||
- source.tla
 | 
			
		||||
vendor/grammars/TXL/:
 | 
			
		||||
vendor/grammars/TXL:
 | 
			
		||||
- source.txl
 | 
			
		||||
vendor/grammars/Terraform.tmLanguage:
 | 
			
		||||
- source.terraform
 | 
			
		||||
vendor/grammars/Textmate-Gosu-Bundle:
 | 
			
		||||
- source.gosu.2
 | 
			
		||||
vendor/grammars/UrWeb-Language-Definition:
 | 
			
		||||
@@ -134,7 +146,7 @@ vendor/grammars/X10:
 | 
			
		||||
- source.x10
 | 
			
		||||
vendor/grammars/abap.tmbundle:
 | 
			
		||||
- source.abap
 | 
			
		||||
vendor/grammars/actionscript3-tmbundle:
 | 
			
		||||
vendor/grammars/actionscript3-tmbundle/:
 | 
			
		||||
- source.actionscript.3
 | 
			
		||||
- text.html.asdoc
 | 
			
		||||
- text.xml.flex-config
 | 
			
		||||
@@ -149,20 +161,20 @@ vendor/grammars/antlr.tmbundle:
 | 
			
		||||
vendor/grammars/apache.tmbundle:
 | 
			
		||||
- source.apache-config
 | 
			
		||||
- source.apache-config.mod_perl
 | 
			
		||||
vendor/grammars/api-blueprint-sublime-plugin/:
 | 
			
		||||
vendor/grammars/api-blueprint-sublime-plugin:
 | 
			
		||||
- text.html.markdown.source.gfm.apib
 | 
			
		||||
- text.html.markdown.source.gfm.mson
 | 
			
		||||
vendor/grammars/applescript.tmbundle:
 | 
			
		||||
- source.applescript
 | 
			
		||||
vendor/grammars/asciidoc.tmbundle/:
 | 
			
		||||
vendor/grammars/asciidoc.tmbundle:
 | 
			
		||||
- text.html.asciidoc
 | 
			
		||||
vendor/grammars/asp.tmbundle:
 | 
			
		||||
- source.asp
 | 
			
		||||
- text.html.asp
 | 
			
		||||
vendor/grammars/assembly/:
 | 
			
		||||
vendor/grammars/assembly:
 | 
			
		||||
- objdump.x86asm
 | 
			
		||||
- source.x86asm
 | 
			
		||||
vendor/grammars/atom-fsharp/:
 | 
			
		||||
vendor/grammars/atom-fsharp:
 | 
			
		||||
- source.fsharp
 | 
			
		||||
- source.fsharp.fsi
 | 
			
		||||
- source.fsharp.fsl
 | 
			
		||||
@@ -172,16 +184,26 @@ vendor/grammars/atom-language-1c-bsl:
 | 
			
		||||
- source.sdbl
 | 
			
		||||
vendor/grammars/atom-language-clean:
 | 
			
		||||
- source.clean
 | 
			
		||||
vendor/grammars/atom-language-purescript/:
 | 
			
		||||
- source.gfm.clean
 | 
			
		||||
vendor/grammars/atom-language-p4:
 | 
			
		||||
- source.p4
 | 
			
		||||
vendor/grammars/atom-language-perl6:
 | 
			
		||||
- source.meta-info
 | 
			
		||||
- source.perl6fe
 | 
			
		||||
- source.quoting.perl6fe
 | 
			
		||||
- source.regexp.perl6fe
 | 
			
		||||
vendor/grammars/atom-language-purescript:
 | 
			
		||||
- source.purescript
 | 
			
		||||
vendor/grammars/atom-language-rust:
 | 
			
		||||
- source.rust
 | 
			
		||||
vendor/grammars/atom-language-srt:
 | 
			
		||||
- text.srt
 | 
			
		||||
vendor/grammars/atom-language-stan/:
 | 
			
		||||
vendor/grammars/atom-language-stan:
 | 
			
		||||
- source.stan
 | 
			
		||||
vendor/grammars/atom-salt:
 | 
			
		||||
- source.python.salt
 | 
			
		||||
- source.yaml.salt
 | 
			
		||||
vendor/grammars/atomic-dreams/:
 | 
			
		||||
vendor/grammars/atomic-dreams:
 | 
			
		||||
- source.dm
 | 
			
		||||
- source.dmf
 | 
			
		||||
vendor/grammars/ats:
 | 
			
		||||
@@ -192,7 +214,7 @@ vendor/grammars/bison.tmbundle:
 | 
			
		||||
- source.bison
 | 
			
		||||
vendor/grammars/blitzmax:
 | 
			
		||||
- source.blitzmax
 | 
			
		||||
vendor/grammars/boo/:
 | 
			
		||||
vendor/grammars/boo:
 | 
			
		||||
- source.boo
 | 
			
		||||
vendor/grammars/bro-sublime:
 | 
			
		||||
- source.bro
 | 
			
		||||
@@ -205,7 +227,6 @@ vendor/grammars/capnproto.tmbundle:
 | 
			
		||||
vendor/grammars/carto-atom:
 | 
			
		||||
- source.css.mss
 | 
			
		||||
vendor/grammars/ceylon-sublimetext:
 | 
			
		||||
- module.ceylon
 | 
			
		||||
- source.ceylon
 | 
			
		||||
vendor/grammars/chapel-tmbundle:
 | 
			
		||||
- source.chapel
 | 
			
		||||
@@ -217,10 +238,8 @@ vendor/grammars/cool-tmbundle:
 | 
			
		||||
vendor/grammars/cpp-qt.tmbundle:
 | 
			
		||||
- source.c++.qt
 | 
			
		||||
- source.qmake
 | 
			
		||||
vendor/grammars/creole/:
 | 
			
		||||
vendor/grammars/creole:
 | 
			
		||||
- text.html.creole
 | 
			
		||||
vendor/grammars/css.tmbundle:
 | 
			
		||||
- source.css
 | 
			
		||||
vendor/grammars/cucumber-tmbundle:
 | 
			
		||||
- source.ruby.rspec.cucumber.steps
 | 
			
		||||
- text.gherkin.feature
 | 
			
		||||
@@ -241,9 +260,7 @@ vendor/grammars/dylan.tmbundle:
 | 
			
		||||
- source.dylan
 | 
			
		||||
- source.lid
 | 
			
		||||
- source.makegen
 | 
			
		||||
vendor/grammars/ebundles/Bundles/MSDOS batch file.tmbundle:
 | 
			
		||||
- source.dosbatch
 | 
			
		||||
vendor/grammars/ec.tmbundle/:
 | 
			
		||||
vendor/grammars/ec.tmbundle:
 | 
			
		||||
- source.c.ec
 | 
			
		||||
vendor/grammars/eiffel.tmbundle:
 | 
			
		||||
- source.eiffel
 | 
			
		||||
@@ -268,9 +285,9 @@ vendor/grammars/forth:
 | 
			
		||||
vendor/grammars/fortran.tmbundle:
 | 
			
		||||
- source.fortran
 | 
			
		||||
- source.fortran.modern
 | 
			
		||||
vendor/grammars/gap-tmbundle/:
 | 
			
		||||
vendor/grammars/gap-tmbundle:
 | 
			
		||||
- source.gap
 | 
			
		||||
vendor/grammars/gdscript/:
 | 
			
		||||
vendor/grammars/gdscript:
 | 
			
		||||
- source.gdscript
 | 
			
		||||
vendor/grammars/gettext.tmbundle:
 | 
			
		||||
- source.po
 | 
			
		||||
@@ -298,7 +315,7 @@ vendor/grammars/idl.tmbundle:
 | 
			
		||||
- source.idl
 | 
			
		||||
- source.idl-dlm
 | 
			
		||||
- text.idl-idldoc
 | 
			
		||||
vendor/grammars/idris/:
 | 
			
		||||
vendor/grammars/idris:
 | 
			
		||||
- source.idris
 | 
			
		||||
vendor/grammars/ini.tmbundle:
 | 
			
		||||
- source.ini
 | 
			
		||||
@@ -330,12 +347,14 @@ vendor/grammars/language-apl:
 | 
			
		||||
- source.apl
 | 
			
		||||
vendor/grammars/language-asn1:
 | 
			
		||||
- source.asn
 | 
			
		||||
vendor/grammars/language-babel/:
 | 
			
		||||
vendor/grammars/language-babel:
 | 
			
		||||
- source.js.jsx
 | 
			
		||||
- source.regexp.babel
 | 
			
		||||
vendor/grammars/language-blade/:
 | 
			
		||||
vendor/grammars/language-batchfile:
 | 
			
		||||
- source.batchfile
 | 
			
		||||
vendor/grammars/language-blade:
 | 
			
		||||
- text.html.php.blade
 | 
			
		||||
vendor/grammars/language-click/:
 | 
			
		||||
vendor/grammars/language-click:
 | 
			
		||||
- source.click
 | 
			
		||||
vendor/grammars/language-clojure:
 | 
			
		||||
- source.clojure
 | 
			
		||||
@@ -354,10 +373,23 @@ vendor/grammars/language-csound:
 | 
			
		||||
- source.csound
 | 
			
		||||
- source.csound-document
 | 
			
		||||
- source.csound-score
 | 
			
		||||
vendor/grammars/language-css:
 | 
			
		||||
- source.css
 | 
			
		||||
vendor/grammars/language-emacs-lisp:
 | 
			
		||||
- source.emacs.lisp
 | 
			
		||||
vendor/grammars/language-fontforge:
 | 
			
		||||
- source.fontforge
 | 
			
		||||
- source.opentype
 | 
			
		||||
- text.sfd
 | 
			
		||||
vendor/grammars/language-gfm:
 | 
			
		||||
- source.gfm
 | 
			
		||||
vendor/grammars/language-gn:
 | 
			
		||||
- source.gn
 | 
			
		||||
vendor/grammars/language-graphql:
 | 
			
		||||
- source.graphql
 | 
			
		||||
vendor/grammars/language-haml:
 | 
			
		||||
- text.haml
 | 
			
		||||
- text.hamlc
 | 
			
		||||
vendor/grammars/language-haskell:
 | 
			
		||||
- hint.haskell
 | 
			
		||||
- hint.message.haskell
 | 
			
		||||
@@ -367,37 +399,49 @@ vendor/grammars/language-haskell:
 | 
			
		||||
- source.haskell
 | 
			
		||||
- source.hsc2hs
 | 
			
		||||
- text.tex.latex.haskell
 | 
			
		||||
vendor/grammars/language-hy:
 | 
			
		||||
- source.hy
 | 
			
		||||
vendor/grammars/language-inform7:
 | 
			
		||||
- source.inform7
 | 
			
		||||
vendor/grammars/language-javascript:
 | 
			
		||||
- source.js
 | 
			
		||||
- source.js.regexp
 | 
			
		||||
- source.js.regexp.replacement
 | 
			
		||||
vendor/grammars/language-jsoniq/:
 | 
			
		||||
- source.jsdoc
 | 
			
		||||
vendor/grammars/language-jison:
 | 
			
		||||
- source.jison
 | 
			
		||||
- source.jisonlex
 | 
			
		||||
- source.jisonlex-injection
 | 
			
		||||
vendor/grammars/language-jsoniq:
 | 
			
		||||
- source.jq
 | 
			
		||||
- source.xq
 | 
			
		||||
vendor/grammars/language-less/:
 | 
			
		||||
vendor/grammars/language-less:
 | 
			
		||||
- source.css.less
 | 
			
		||||
vendor/grammars/language-maxscript:
 | 
			
		||||
- source.maxscript
 | 
			
		||||
vendor/grammars/language-meson:
 | 
			
		||||
- source.meson
 | 
			
		||||
vendor/grammars/language-ncl:
 | 
			
		||||
- source.ncl
 | 
			
		||||
vendor/grammars/language-ninja:
 | 
			
		||||
- source.ninja
 | 
			
		||||
vendor/grammars/language-povray:
 | 
			
		||||
- source.pov-ray sdl
 | 
			
		||||
vendor/grammars/language-python:
 | 
			
		||||
- text.python.console
 | 
			
		||||
- text.python.traceback
 | 
			
		||||
vendor/grammars/language-regexp:
 | 
			
		||||
- source.regexp
 | 
			
		||||
- source.regexp.extended
 | 
			
		||||
vendor/grammars/language-renpy:
 | 
			
		||||
- source.renpy
 | 
			
		||||
vendor/grammars/language-restructuredtext:
 | 
			
		||||
- text.restructuredtext
 | 
			
		||||
vendor/grammars/language-roff:
 | 
			
		||||
- source.ditroff
 | 
			
		||||
- source.ditroff.desc
 | 
			
		||||
- source.ideal
 | 
			
		||||
- source.pic
 | 
			
		||||
- text.roff
 | 
			
		||||
- text.runoff
 | 
			
		||||
vendor/grammars/language-rpm-spec:
 | 
			
		||||
- source.changelogs.rpm-spec
 | 
			
		||||
- source.rpm-spec
 | 
			
		||||
vendor/grammars/language-shellscript:
 | 
			
		||||
- source.shell
 | 
			
		||||
- text.shell-session
 | 
			
		||||
@@ -407,14 +451,18 @@ vendor/grammars/language-toc-wow:
 | 
			
		||||
- source.toc
 | 
			
		||||
vendor/grammars/language-turing:
 | 
			
		||||
- source.turing
 | 
			
		||||
vendor/grammars/language-viml:
 | 
			
		||||
- source.viml
 | 
			
		||||
vendor/grammars/language-wavefront:
 | 
			
		||||
- source.wavefront.mtl
 | 
			
		||||
- source.wavefront.obj
 | 
			
		||||
vendor/grammars/language-xbase:
 | 
			
		||||
- source.harbour
 | 
			
		||||
vendor/grammars/language-xcompose:
 | 
			
		||||
- config.xcompose
 | 
			
		||||
vendor/grammars/language-yaml:
 | 
			
		||||
- source.yaml
 | 
			
		||||
vendor/grammars/language-yang/:
 | 
			
		||||
vendor/grammars/language-yang:
 | 
			
		||||
- source.yang
 | 
			
		||||
vendor/grammars/latex.tmbundle:
 | 
			
		||||
- text.bibtex
 | 
			
		||||
@@ -441,6 +489,8 @@ vendor/grammars/make.tmbundle:
 | 
			
		||||
- source.makefile
 | 
			
		||||
vendor/grammars/mako-tmbundle:
 | 
			
		||||
- text.html.mako
 | 
			
		||||
vendor/grammars/marko-tmbundle:
 | 
			
		||||
- text.marko
 | 
			
		||||
vendor/grammars/mathematica-tmbundle:
 | 
			
		||||
- source.mathematica
 | 
			
		||||
vendor/grammars/matlab.tmbundle:
 | 
			
		||||
@@ -448,11 +498,11 @@ vendor/grammars/matlab.tmbundle:
 | 
			
		||||
- source.octave
 | 
			
		||||
vendor/grammars/maven.tmbundle:
 | 
			
		||||
- text.xml.pom
 | 
			
		||||
vendor/grammars/mediawiki.tmbundle/:
 | 
			
		||||
vendor/grammars/mediawiki.tmbundle:
 | 
			
		||||
- text.html.mediawiki
 | 
			
		||||
vendor/grammars/mercury-tmlanguage:
 | 
			
		||||
- source.mercury
 | 
			
		||||
vendor/grammars/monkey/:
 | 
			
		||||
vendor/grammars/monkey:
 | 
			
		||||
- source.monkey
 | 
			
		||||
vendor/grammars/moonscript-tmbundle:
 | 
			
		||||
- source.moonscript
 | 
			
		||||
@@ -460,8 +510,6 @@ vendor/grammars/nemerle.tmbundle:
 | 
			
		||||
- source.nemerle
 | 
			
		||||
vendor/grammars/nesC:
 | 
			
		||||
- source.nesc
 | 
			
		||||
vendor/grammars/ninja.tmbundle:
 | 
			
		||||
- source.ninja
 | 
			
		||||
vendor/grammars/nix:
 | 
			
		||||
- source.nix
 | 
			
		||||
vendor/grammars/nu.tmbundle:
 | 
			
		||||
@@ -480,26 +528,24 @@ vendor/grammars/ooc.tmbundle:
 | 
			
		||||
- source.ooc
 | 
			
		||||
vendor/grammars/opa.tmbundle:
 | 
			
		||||
- source.opa
 | 
			
		||||
vendor/grammars/openscad.tmbundle:
 | 
			
		||||
- source.scad
 | 
			
		||||
vendor/grammars/oz-tmbundle/Syntaxes/Oz.tmLanguage:
 | 
			
		||||
- source.oz
 | 
			
		||||
vendor/grammars/parrot:
 | 
			
		||||
- source.parrot.pir
 | 
			
		||||
vendor/grammars/pascal.tmbundle:
 | 
			
		||||
- source.pascal
 | 
			
		||||
vendor/grammars/pawn-sublime-language/:
 | 
			
		||||
vendor/grammars/pawn-sublime-language:
 | 
			
		||||
- source.pawn
 | 
			
		||||
vendor/grammars/perl.tmbundle/:
 | 
			
		||||
vendor/grammars/perl.tmbundle:
 | 
			
		||||
- source.perl
 | 
			
		||||
- source.perl.6
 | 
			
		||||
vendor/grammars/perl6fe:
 | 
			
		||||
- source.meta-info
 | 
			
		||||
- source.perl6fe
 | 
			
		||||
- source.regexp.perl6fe
 | 
			
		||||
vendor/grammars/php-smarty.tmbundle:
 | 
			
		||||
- text.html.smarty
 | 
			
		||||
vendor/grammars/php.tmbundle:
 | 
			
		||||
- text.html.php
 | 
			
		||||
vendor/grammars/pig-latin/:
 | 
			
		||||
vendor/grammars/pig-latin:
 | 
			
		||||
- source.pig_latin
 | 
			
		||||
vendor/grammars/pike-textmate:
 | 
			
		||||
- source.pike
 | 
			
		||||
@@ -517,8 +563,10 @@ vendor/grammars/python-django.tmbundle:
 | 
			
		||||
vendor/grammars/r.tmbundle:
 | 
			
		||||
- source.r
 | 
			
		||||
- text.tex.latex.rd
 | 
			
		||||
vendor/grammars/ruby-haml.tmbundle:
 | 
			
		||||
- text.haml
 | 
			
		||||
vendor/grammars/rascal-syntax-highlighting:
 | 
			
		||||
- source.rascal
 | 
			
		||||
vendor/grammars/reason:
 | 
			
		||||
- source.reason
 | 
			
		||||
vendor/grammars/ruby-slim.tmbundle:
 | 
			
		||||
- text.slim
 | 
			
		||||
vendor/grammars/ruby.tmbundle:
 | 
			
		||||
@@ -538,11 +586,14 @@ vendor/grammars/scilab.tmbundle:
 | 
			
		||||
- source.scilab
 | 
			
		||||
vendor/grammars/secondlife-lsl:
 | 
			
		||||
- source.lsl
 | 
			
		||||
vendor/grammars/smali-sublime/smali.tmLanguage:
 | 
			
		||||
vendor/grammars/shaders-tmLanguage:
 | 
			
		||||
- source.hlsl
 | 
			
		||||
- source.shaderlab
 | 
			
		||||
vendor/grammars/smali-sublime:
 | 
			
		||||
- source.smali
 | 
			
		||||
vendor/grammars/smalltalk-tmbundle:
 | 
			
		||||
- source.smalltalk
 | 
			
		||||
vendor/grammars/sourcepawn/:
 | 
			
		||||
vendor/grammars/sourcepawn:
 | 
			
		||||
- source.sp
 | 
			
		||||
vendor/grammars/sql.tmbundle:
 | 
			
		||||
- source.sql
 | 
			
		||||
@@ -553,9 +604,9 @@ vendor/grammars/standard-ml.tmbundle:
 | 
			
		||||
- source.ml
 | 
			
		||||
vendor/grammars/sublime-MuPAD:
 | 
			
		||||
- source.mupad
 | 
			
		||||
vendor/grammars/sublime-aspectj/:
 | 
			
		||||
vendor/grammars/sublime-aspectj:
 | 
			
		||||
- source.aspectj
 | 
			
		||||
vendor/grammars/sublime-autoit/:
 | 
			
		||||
vendor/grammars/sublime-autoit:
 | 
			
		||||
- source.autoit
 | 
			
		||||
vendor/grammars/sublime-befunge:
 | 
			
		||||
- source.befunge
 | 
			
		||||
@@ -563,12 +614,12 @@ vendor/grammars/sublime-bsv:
 | 
			
		||||
- source.bsv
 | 
			
		||||
vendor/grammars/sublime-cirru:
 | 
			
		||||
- source.cirru
 | 
			
		||||
vendor/grammars/sublime-clips/:
 | 
			
		||||
vendor/grammars/sublime-clips:
 | 
			
		||||
- source.clips
 | 
			
		||||
vendor/grammars/sublime-glsl:
 | 
			
		||||
- source.essl
 | 
			
		||||
- source.glsl
 | 
			
		||||
vendor/grammars/sublime-golo/:
 | 
			
		||||
vendor/grammars/sublime-golo:
 | 
			
		||||
- source.golo
 | 
			
		||||
vendor/grammars/sublime-mask:
 | 
			
		||||
- source.mask
 | 
			
		||||
@@ -577,29 +628,29 @@ vendor/grammars/sublime-netlinx:
 | 
			
		||||
- source.netlinx.erb
 | 
			
		||||
vendor/grammars/sublime-nginx:
 | 
			
		||||
- source.nginx
 | 
			
		||||
vendor/grammars/sublime-opal/:
 | 
			
		||||
vendor/grammars/sublime-opal:
 | 
			
		||||
- source.opal
 | 
			
		||||
- source.opalsysdefs
 | 
			
		||||
vendor/grammars/sublime-pony:
 | 
			
		||||
- source.pony
 | 
			
		||||
vendor/grammars/sublime-rexx/:
 | 
			
		||||
vendor/grammars/sublime-rexx:
 | 
			
		||||
- source.rexx
 | 
			
		||||
vendor/grammars/sublime-robot-plugin:
 | 
			
		||||
- text.robot
 | 
			
		||||
vendor/grammars/sublime-rust:
 | 
			
		||||
- source.rust
 | 
			
		||||
vendor/grammars/sublime-spintools/:
 | 
			
		||||
vendor/grammars/sublime-spintools:
 | 
			
		||||
- source.regexp.spin
 | 
			
		||||
- source.spin
 | 
			
		||||
vendor/grammars/sublime-tea:
 | 
			
		||||
- source.tea
 | 
			
		||||
vendor/grammars/sublime-terra:
 | 
			
		||||
- source.terra
 | 
			
		||||
vendor/grammars/sublime-text-ox/:
 | 
			
		||||
vendor/grammars/sublime-text-ox:
 | 
			
		||||
- source.ox
 | 
			
		||||
vendor/grammars/sublime-typescript/:
 | 
			
		||||
vendor/grammars/sublime-typescript:
 | 
			
		||||
- source.ts
 | 
			
		||||
- source.tsx
 | 
			
		||||
- text.error-list
 | 
			
		||||
- text.find-refs
 | 
			
		||||
vendor/grammars/sublime-varnish:
 | 
			
		||||
- source.varnish.vcl
 | 
			
		||||
vendor/grammars/sublime_cobol:
 | 
			
		||||
@@ -607,9 +658,9 @@ vendor/grammars/sublime_cobol:
 | 
			
		||||
- source.cobol
 | 
			
		||||
- source.jcl
 | 
			
		||||
- source.opencobol
 | 
			
		||||
vendor/grammars/sublimeassembly/:
 | 
			
		||||
vendor/grammars/sublimeassembly:
 | 
			
		||||
- source.assembly
 | 
			
		||||
vendor/grammars/sublimeprolog/:
 | 
			
		||||
vendor/grammars/sublimeprolog:
 | 
			
		||||
- source.prolog
 | 
			
		||||
- source.prolog.eclipse
 | 
			
		||||
vendor/grammars/sublimetext-cuda-cpp:
 | 
			
		||||
@@ -632,12 +683,10 @@ vendor/grammars/vhdl:
 | 
			
		||||
- source.vhdl
 | 
			
		||||
vendor/grammars/vue-syntax-highlight:
 | 
			
		||||
- text.html.vue
 | 
			
		||||
vendor/grammars/xc.tmbundle/:
 | 
			
		||||
vendor/grammars/xc.tmbundle:
 | 
			
		||||
- source.xc
 | 
			
		||||
vendor/grammars/xml.tmbundle:
 | 
			
		||||
- text.xml
 | 
			
		||||
- text.xml.xsl
 | 
			
		||||
vendor/grammars/xquery:
 | 
			
		||||
- source.xquery
 | 
			
		||||
vendor/grammars/zephir-sublime:
 | 
			
		||||
- source.php.zephir
 | 
			
		||||
 
 | 
			
		||||
@@ -15,9 +15,9 @@ class << Linguist
 | 
			
		||||
  #       see Linguist::LazyBlob and Linguist::FileBlob for examples
 | 
			
		||||
  #
 | 
			
		||||
  # Returns Language or nil.
 | 
			
		||||
  def detect(blob)
 | 
			
		||||
  def detect(blob, allow_empty: false)
 | 
			
		||||
    # Bail early if the blob is binary or empty.
 | 
			
		||||
    return nil if blob.likely_binary? || blob.binary? || blob.empty?
 | 
			
		||||
    return nil if blob.likely_binary? || blob.binary? || (!allow_empty && blob.empty?)
 | 
			
		||||
 | 
			
		||||
    Linguist.instrument("linguist.detection", :blob => blob) do
 | 
			
		||||
      # Call each strategy until one candidate is returned.
 | 
			
		||||
@@ -59,8 +59,9 @@ class << Linguist
 | 
			
		||||
  # Strategies are called in turn until a single Language is returned.
 | 
			
		||||
  STRATEGIES = [
 | 
			
		||||
    Linguist::Strategy::Modeline,
 | 
			
		||||
    Linguist::Shebang,
 | 
			
		||||
    Linguist::Strategy::Filename,
 | 
			
		||||
    Linguist::Shebang,
 | 
			
		||||
    Linguist::Strategy::Extension,
 | 
			
		||||
    Linguist::Heuristics,
 | 
			
		||||
    Linguist::Classifier
 | 
			
		||||
  ]
 | 
			
		||||
 
 | 
			
		||||
@@ -63,7 +63,7 @@ module Linguist
 | 
			
		||||
    #
 | 
			
		||||
    # Returns an Array
 | 
			
		||||
    def extensions
 | 
			
		||||
      _, *segments = name.downcase.split(".")
 | 
			
		||||
      _, *segments = name.downcase.split(".", -1)
 | 
			
		||||
 | 
			
		||||
      segments.map.with_index do |segment, index|
 | 
			
		||||
        "." + segments[index..-1].join(".")
 | 
			
		||||
 
 | 
			
		||||
@@ -95,7 +95,7 @@ module Linguist
 | 
			
		||||
    # Returns sorted Array of result pairs. Each pair contains the
 | 
			
		||||
    # String language name and a Float score.
 | 
			
		||||
    def classify(tokens, languages)
 | 
			
		||||
      return [] if tokens.nil?
 | 
			
		||||
      return [] if tokens.nil? || languages.empty?
 | 
			
		||||
      tokens = Tokenizer.tokenize(tokens) if tokens.is_a?(String)
 | 
			
		||||
      scores = {}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -9,11 +9,12 @@
 | 
			
		||||
 | 
			
		||||
## Documentation directories ##
 | 
			
		||||
 | 
			
		||||
- ^docs?/
 | 
			
		||||
- ^[Dd]ocs?/
 | 
			
		||||
- (^|/)[Dd]ocumentation/
 | 
			
		||||
- (^|/)javadoc/
 | 
			
		||||
- ^man/
 | 
			
		||||
- (^|/)[Jj]avadoc/
 | 
			
		||||
- ^[Mm]an/
 | 
			
		||||
- ^[Ee]xamples/
 | 
			
		||||
- ^[Dd]emos?/
 | 
			
		||||
 | 
			
		||||
## Documentation files ##
 | 
			
		||||
 | 
			
		||||
@@ -27,4 +28,4 @@
 | 
			
		||||
- (^|/)[Rr]eadme(\.|$)
 | 
			
		||||
 | 
			
		||||
# Samples folders
 | 
			
		||||
- ^[Ss]amples/
 | 
			
		||||
- ^[Ss]amples?/
 | 
			
		||||
 
 | 
			
		||||
@@ -3,7 +3,7 @@ module Linguist
 | 
			
		||||
    # Public: Is the blob a generated file?
 | 
			
		||||
    #
 | 
			
		||||
    # name - String filename
 | 
			
		||||
    # data - String blob data. A block also maybe passed in for lazy
 | 
			
		||||
    # data - String blob data. A block also may be passed in for lazy
 | 
			
		||||
    #        loading. This behavior is deprecated and you should always
 | 
			
		||||
    #        pass in a String.
 | 
			
		||||
    #
 | 
			
		||||
@@ -56,6 +56,7 @@ module Linguist
 | 
			
		||||
      generated_net_specflow_feature_file? ||
 | 
			
		||||
      composer_lock? ||
 | 
			
		||||
      node_modules? ||
 | 
			
		||||
      go_vendor? ||
 | 
			
		||||
      npm_shrinkwrap? ||
 | 
			
		||||
      godeps? ||
 | 
			
		||||
      generated_by_zephir? ||
 | 
			
		||||
@@ -69,6 +70,7 @@ module Linguist
 | 
			
		||||
      compiled_cython_file? ||
 | 
			
		||||
      generated_go? ||
 | 
			
		||||
      generated_protocol_buffer? ||
 | 
			
		||||
      generated_javascript_protocol_buffer? ||
 | 
			
		||||
      generated_apache_thrift? ||
 | 
			
		||||
      generated_jni_header? ||
 | 
			
		||||
      vcr_cassette? ||
 | 
			
		||||
@@ -76,7 +78,10 @@ module Linguist
 | 
			
		||||
      generated_unity3d_meta? ||
 | 
			
		||||
      generated_racc? ||
 | 
			
		||||
      generated_jflex? ||
 | 
			
		||||
      generated_grammarkit?
 | 
			
		||||
      generated_grammarkit? ||
 | 
			
		||||
      generated_roxygen2? ||
 | 
			
		||||
      generated_jison? ||
 | 
			
		||||
      generated_yarn_lock?
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Internal: Is the blob an Xcode file?
 | 
			
		||||
@@ -274,16 +279,25 @@ module Linguist
 | 
			
		||||
      return lines[0].include?("Generated by the protocol buffer compiler.  DO NOT EDIT!")
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    APACHE_THRIFT_EXTENSIONS = ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp']
 | 
			
		||||
    # Internal: Is the blob a Javascript source file generated by the
 | 
			
		||||
    # Protocol Buffer compiler?
 | 
			
		||||
    #
 | 
			
		||||
    # Returns true of false.
 | 
			
		||||
    def generated_javascript_protocol_buffer?
 | 
			
		||||
      return false unless extname == ".js"
 | 
			
		||||
      return false unless lines.count > 6
 | 
			
		||||
 | 
			
		||||
      return lines[5].include?("GENERATED CODE -- DO NOT EDIT!")
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    APACHE_THRIFT_EXTENSIONS = ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp', '.php']
 | 
			
		||||
 | 
			
		||||
    # Internal: Is the blob generated by Apache Thrift compiler?
 | 
			
		||||
    #
 | 
			
		||||
    # Returns true or false
 | 
			
		||||
    def generated_apache_thrift?
 | 
			
		||||
      return false unless APACHE_THRIFT_EXTENSIONS.include?(extname)
 | 
			
		||||
      return false unless lines.count > 1
 | 
			
		||||
 | 
			
		||||
      return lines[0].include?("Autogenerated by Thrift Compiler") || lines[1].include?("Autogenerated by Thrift Compiler")
 | 
			
		||||
      return lines.first(6).any? { |l| l.include?("Autogenerated by Thrift Compiler") }
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Internal: Is the blob a C/C++ header generated by the Java JNI tool javah?
 | 
			
		||||
@@ -304,7 +318,15 @@ module Linguist
 | 
			
		||||
      !!name.match(/node_modules\//)
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Internal: Is the blob a generated npm shrinkwrap file.
 | 
			
		||||
    # Internal: Is the blob part of the Go vendor/ tree,
 | 
			
		||||
    # not meant for humans in pull requests.
 | 
			
		||||
    #
 | 
			
		||||
    # Returns true or false.
 | 
			
		||||
    def go_vendor?
 | 
			
		||||
      !!name.match(/vendor\/((?!-)[-0-9A-Za-z]+(?<!-)\.)+(com|edu|gov|in|me|net|org|fm|io)/)
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Internal: Is the blob a generated npm shrinkwrap file?
 | 
			
		||||
    #
 | 
			
		||||
    # Returns true or false.
 | 
			
		||||
    def npm_shrinkwrap?
 | 
			
		||||
@@ -326,7 +348,7 @@ module Linguist
 | 
			
		||||
      !!name.match(/composer\.lock/)
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Internal: Is the blob a generated by Zephir
 | 
			
		||||
    # Internal: Is the blob generated by Zephir?
 | 
			
		||||
    #
 | 
			
		||||
    # Returns true or false.
 | 
			
		||||
    def generated_by_zephir?
 | 
			
		||||
@@ -426,5 +448,46 @@ module Linguist
 | 
			
		||||
      return false unless lines.count > 1
 | 
			
		||||
      return lines[0].start_with?("// This is a generated file. Not intended for manual editing.")
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Internal: Is this a roxygen2-generated file?
 | 
			
		||||
    #
 | 
			
		||||
    # A roxygen2-generated file typically contain:
 | 
			
		||||
    # % Generated by roxygen2: do not edit by hand
 | 
			
		||||
    # on the first line.
 | 
			
		||||
    #
 | 
			
		||||
    # Return true or false
 | 
			
		||||
    def generated_roxygen2?
 | 
			
		||||
      return false unless extname == '.Rd'
 | 
			
		||||
      return false unless lines.count > 1
 | 
			
		||||
 | 
			
		||||
      return lines[0].include?("% Generated by roxygen2: do not edit by hand")
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Internal: Is this a Jison-generated file?
 | 
			
		||||
    #
 | 
			
		||||
    # Jison-generated parsers typically contain:
 | 
			
		||||
    # /* parser generated by jison
 | 
			
		||||
    # on the first line.
 | 
			
		||||
    #
 | 
			
		||||
    # Jison-generated lexers typically contain:
 | 
			
		||||
    # /* generated by jison-lex
 | 
			
		||||
    # on the first line.
 | 
			
		||||
    #
 | 
			
		||||
    # Return true or false
 | 
			
		||||
    def generated_jison?
 | 
			
		||||
      return false unless extname == '.js'
 | 
			
		||||
      return false unless lines.count > 1
 | 
			
		||||
      return lines[0].start_with?("/* parser generated by jison ") ||
 | 
			
		||||
             lines[0].start_with?("/* generated by jison-lex ")
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Internal: Is the blob a generated yarn lockfile?
 | 
			
		||||
    #
 | 
			
		||||
    # Returns true or false.
 | 
			
		||||
    def generated_yarn_lock?
 | 
			
		||||
      return false unless name.match(/yarn\.lock/)
 | 
			
		||||
      return false unless lines.count > 0
 | 
			
		||||
      return lines[0].include?("# THIS IS AN AUTOGENERATED FILE")
 | 
			
		||||
    end
 | 
			
		||||
  end
 | 
			
		||||
end
 | 
			
		||||
 
 | 
			
		||||
@@ -110,6 +110,12 @@ module Linguist
 | 
			
		||||
      end
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    disambiguate ".cls" do |data|
 | 
			
		||||
      if /\\\w+{/.match(data)
 | 
			
		||||
        Language["TeX"]
 | 
			
		||||
      end
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    disambiguate ".cs" do |data|
 | 
			
		||||
      if /![\w\s]+methodsFor: /.match(data)
 | 
			
		||||
        Language["Smalltalk"]
 | 
			
		||||
@@ -144,10 +150,22 @@ module Linguist
 | 
			
		||||
      end
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    disambiguate ".for", ".f" do |data|
 | 
			
		||||
    fortran_rx = /^([c*][^abd-z]|      (subroutine|program|end|data)\s|\s*!)/i
 | 
			
		||||
 | 
			
		||||
    disambiguate ".f" do |data|
 | 
			
		||||
      if /^: /.match(data)
 | 
			
		||||
        Language["Forth"]
 | 
			
		||||
      elsif /^([c*][^abd-z]|      (subroutine|program|end)\s|\s*!)/i.match(data)
 | 
			
		||||
      elsif data.include?("flowop")
 | 
			
		||||
        Language["Filebench WML"]
 | 
			
		||||
      elsif fortran_rx.match(data)
 | 
			
		||||
        Language["FORTRAN"]
 | 
			
		||||
      end
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    disambiguate ".for" do |data|
 | 
			
		||||
      if /^: /.match(data)
 | 
			
		||||
        Language["Forth"]
 | 
			
		||||
      elsif fortran_rx.match(data)
 | 
			
		||||
        Language["FORTRAN"]
 | 
			
		||||
      end
 | 
			
		||||
    end
 | 
			
		||||
@@ -190,6 +208,8 @@ module Linguist
 | 
			
		||||
    disambiguate ".inc" do |data|
 | 
			
		||||
      if /^<\?(?:php)?/.match(data)
 | 
			
		||||
        Language["PHP"]
 | 
			
		||||
      elsif /^\s*#(declare|local|macro|while)\s/.match(data)
 | 
			
		||||
        Language["POV-Ray SDL"]
 | 
			
		||||
      end
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
@@ -230,7 +250,7 @@ module Linguist
 | 
			
		||||
        Language["MUF"]
 | 
			
		||||
      elsif /^\s*;/.match(data)
 | 
			
		||||
        Language["M"]
 | 
			
		||||
      elsif /^\s*\(\*/.match(data)
 | 
			
		||||
      elsif /\*\)$/.match(data)
 | 
			
		||||
        Language["Mathematica"]
 | 
			
		||||
      elsif /^\s*%/.match(data)
 | 
			
		||||
        Language["Matlab"]
 | 
			
		||||
@@ -240,10 +260,12 @@ module Linguist
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    disambiguate ".md" do |data|
 | 
			
		||||
      if /^[-a-z0-9=#!\*\[|]/i.match(data)
 | 
			
		||||
      if /(^[-a-z0-9=#!\*\[|>])|<\//i.match(data) || data.empty?
 | 
			
		||||
        Language["Markdown"]
 | 
			
		||||
      elsif /^(;;|\(define_)/.match(data)
 | 
			
		||||
        Language["GCC machine description"]
 | 
			
		||||
      else
 | 
			
		||||
        Language["Markdown"]
 | 
			
		||||
      end
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
@@ -258,7 +280,7 @@ module Linguist
 | 
			
		||||
    disambiguate ".mod" do |data|
 | 
			
		||||
      if data.include?('<!ENTITY ')
 | 
			
		||||
        Language["XML"]
 | 
			
		||||
      elsif /MODULE\s\w+\s*;/i.match(data) || /^\s*END \w+;$/i.match(data)
 | 
			
		||||
      elsif /^\s*MODULE [\w\.]+;/i.match(data) || /^\s*END [\w\.]+;/i.match(data)
 | 
			
		||||
        Language["Modula-2"]
 | 
			
		||||
      else
 | 
			
		||||
        [Language["Linux Kernel Module"], Language["AMPL"]]
 | 
			
		||||
@@ -306,7 +328,7 @@ module Linguist
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    disambiguate ".pl" do |data|
 | 
			
		||||
      if /^[^#]+:-/.match(data)
 | 
			
		||||
      if /^[^#]*:-/.match(data)
 | 
			
		||||
        Language["Prolog"]
 | 
			
		||||
      elsif /use strict|use\s+v?5\./.match(data)
 | 
			
		||||
        Language["Perl"]
 | 
			
		||||
@@ -354,7 +376,7 @@ module Linguist
 | 
			
		||||
    disambiguate ".r" do |data|
 | 
			
		||||
      if /\bRebol\b/i.match(data)
 | 
			
		||||
        Language["Rebol"]
 | 
			
		||||
      elsif data.include?("<-")
 | 
			
		||||
      elsif /<-|^\s*#/.match(data)
 | 
			
		||||
        Language["R"]
 | 
			
		||||
      end
 | 
			
		||||
    end
 | 
			
		||||
@@ -430,7 +452,7 @@ module Linguist
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    disambiguate ".ts" do |data|
 | 
			
		||||
      if data.include?("</TS>")
 | 
			
		||||
      if data.include?("<TS")
 | 
			
		||||
        Language["XML"]
 | 
			
		||||
      else
 | 
			
		||||
        Language["TypeScript"]
 | 
			
		||||
@@ -445,5 +467,13 @@ module Linguist
 | 
			
		||||
        Language["Scilab"]
 | 
			
		||||
      end
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    disambiguate ".tsx" do |data|
 | 
			
		||||
      if /^\s*(import.+(from\s+|require\()['"]react|\/\/\/\s*<reference\s)/.match(data)
 | 
			
		||||
        Language["TypeScript"]
 | 
			
		||||
      elsif /^\s*<\?xml\s+version/i.match(data)
 | 
			
		||||
        Language["XML"]
 | 
			
		||||
      end
 | 
			
		||||
    end
 | 
			
		||||
  end
 | 
			
		||||
end
 | 
			
		||||
 
 | 
			
		||||
@@ -11,6 +11,7 @@ require 'linguist/samples'
 | 
			
		||||
require 'linguist/file_blob'
 | 
			
		||||
require 'linguist/blob_helper'
 | 
			
		||||
require 'linguist/strategy/filename'
 | 
			
		||||
require 'linguist/strategy/extension'
 | 
			
		||||
require 'linguist/strategy/modeline'
 | 
			
		||||
require 'linguist/shebang'
 | 
			
		||||
 | 
			
		||||
@@ -20,10 +21,11 @@ module Linguist
 | 
			
		||||
  #
 | 
			
		||||
  # Languages are defined in `lib/linguist/languages.yml`.
 | 
			
		||||
  class Language
 | 
			
		||||
    @languages       = []
 | 
			
		||||
    @index           = {}
 | 
			
		||||
    @name_index      = {}
 | 
			
		||||
    @alias_index     = {}
 | 
			
		||||
    @languages          = []
 | 
			
		||||
    @index              = {}
 | 
			
		||||
    @name_index         = {}
 | 
			
		||||
    @alias_index        = {}
 | 
			
		||||
    @language_id_index  = {}
 | 
			
		||||
 | 
			
		||||
    @extension_index          = Hash.new { |h,k| h[k] = [] }
 | 
			
		||||
    @interpreter_index        = Hash.new { |h,k| h[k] = [] }
 | 
			
		||||
@@ -84,18 +86,9 @@ module Linguist
 | 
			
		||||
        @filename_index[filename] << language
 | 
			
		||||
      end
 | 
			
		||||
 | 
			
		||||
      language
 | 
			
		||||
    end
 | 
			
		||||
      @language_id_index[language.language_id] = language
 | 
			
		||||
 | 
			
		||||
    # Public: Detects the Language of the blob.
 | 
			
		||||
    #
 | 
			
		||||
    # blob - an object that includes the Linguist `BlobHelper` interface;
 | 
			
		||||
    #       see Linguist::LazyBlob and Linguist::FileBlob for examples
 | 
			
		||||
    #
 | 
			
		||||
    # Returns Language or nil.
 | 
			
		||||
    def self.detect(blob)
 | 
			
		||||
      warn "[DEPRECATED] `Linguist::Language.detect` is deprecated. Use `Linguist.detect`. #{caller[0]}"
 | 
			
		||||
      Linguist.detect(blob)
 | 
			
		||||
      language
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Public: Get all Languages
 | 
			
		||||
@@ -137,46 +130,46 @@ module Linguist
 | 
			
		||||
 | 
			
		||||
    # Public: Look up Languages by filename.
 | 
			
		||||
    #
 | 
			
		||||
    # The behaviour of this method recently changed.
 | 
			
		||||
    # See the second example below.
 | 
			
		||||
    #
 | 
			
		||||
    # filename - The path String.
 | 
			
		||||
    #
 | 
			
		||||
    # Examples
 | 
			
		||||
    #
 | 
			
		||||
    #   Language.find_by_filename('Cakefile')
 | 
			
		||||
    #   # => [#<Language name="CoffeeScript">]
 | 
			
		||||
    #   Language.find_by_filename('foo.rb')
 | 
			
		||||
    #   # => [#<Language name="Ruby">]
 | 
			
		||||
    #   # => []
 | 
			
		||||
    #
 | 
			
		||||
    # Returns all matching Languages or [] if none were found.
 | 
			
		||||
    def self.find_by_filename(filename)
 | 
			
		||||
      basename = File.basename(filename)
 | 
			
		||||
 | 
			
		||||
      # find the first extension with language definitions
 | 
			
		||||
      extname = FileBlob.new(filename).extensions.detect do |e|
 | 
			
		||||
        !@extension_index[e].empty?
 | 
			
		||||
      end
 | 
			
		||||
 | 
			
		||||
      (@filename_index[basename] + @extension_index[extname]).compact.uniq
 | 
			
		||||
      @filename_index[basename]
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Public: Look up Languages by file extension.
 | 
			
		||||
    #
 | 
			
		||||
    # extname - The extension String.
 | 
			
		||||
    # The behaviour of this method recently changed.
 | 
			
		||||
    # See the second example below.
 | 
			
		||||
    #
 | 
			
		||||
    # filename - The path String.
 | 
			
		||||
    #
 | 
			
		||||
    # Examples
 | 
			
		||||
    #
 | 
			
		||||
    #   Language.find_by_extension('.rb')
 | 
			
		||||
    #   Language.find_by_extension('dummy.rb')
 | 
			
		||||
    #   # => [#<Language name="Ruby">]
 | 
			
		||||
    #
 | 
			
		||||
    #   Language.find_by_extension('rb')
 | 
			
		||||
    #   # => [#<Language name="Ruby">]
 | 
			
		||||
    #   # => []
 | 
			
		||||
    #
 | 
			
		||||
    # Returns all matching Languages or [] if none were found.
 | 
			
		||||
    def self.find_by_extension(extname)
 | 
			
		||||
      extname = ".#{extname}" unless extname.start_with?(".")
 | 
			
		||||
      @extension_index[extname.downcase]
 | 
			
		||||
    end
 | 
			
		||||
    def self.find_by_extension(filename)
 | 
			
		||||
      # find the first extension with language definitions
 | 
			
		||||
      extname = FileBlob.new(filename.downcase).extensions.detect do |e|
 | 
			
		||||
        !@extension_index[e].empty?
 | 
			
		||||
      end
 | 
			
		||||
 | 
			
		||||
    # DEPRECATED
 | 
			
		||||
    def self.find_by_shebang(data)
 | 
			
		||||
      @interpreter_index[Shebang.interpreter(data)]
 | 
			
		||||
      @extension_index[extname]
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Public: Look up Languages by interpreter.
 | 
			
		||||
@@ -193,6 +186,19 @@ module Linguist
 | 
			
		||||
      @interpreter_index[interpreter]
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Public: Look up Languages by its language_id.
 | 
			
		||||
    #
 | 
			
		||||
    # language_id - Integer of language_id
 | 
			
		||||
    #
 | 
			
		||||
    # Examples
 | 
			
		||||
    #
 | 
			
		||||
    #   Language.find_by_id(100)
 | 
			
		||||
    #   # => [#<Language name="Elixir">]
 | 
			
		||||
    #
 | 
			
		||||
    # Returns the matching Language
 | 
			
		||||
    def self.find_by_id(language_id)
 | 
			
		||||
      @language_id_index[language_id.to_i]
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Public: Look up Language by its name.
 | 
			
		||||
    #
 | 
			
		||||
@@ -209,7 +215,14 @@ module Linguist
 | 
			
		||||
    # Returns the Language or nil if none was found.
 | 
			
		||||
    def self.[](name)
 | 
			
		||||
      return nil if name.to_s.empty?
 | 
			
		||||
      name && (@index[name.downcase] || @index[name.split(',').first.downcase])
 | 
			
		||||
 | 
			
		||||
      lang = @index[name.downcase]
 | 
			
		||||
      return lang if lang
 | 
			
		||||
 | 
			
		||||
      name = name.split(',').first
 | 
			
		||||
      return nil if name.to_s.empty?
 | 
			
		||||
 | 
			
		||||
      @index[name.downcase]
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Public: A List of popular languages
 | 
			
		||||
@@ -243,17 +256,6 @@ module Linguist
 | 
			
		||||
      @colors ||= all.select(&:color).sort_by { |lang| lang.name.downcase }
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Public: A List of languages compatible with Ace.
 | 
			
		||||
    #
 | 
			
		||||
    # TODO: Remove this method in a 5.x release. Every language now needs an ace_mode
 | 
			
		||||
    # key, so this function isn't doing anything unique anymore.
 | 
			
		||||
    #
 | 
			
		||||
    # Returns an Array of Languages.
 | 
			
		||||
    def self.ace_modes
 | 
			
		||||
      warn "This method will be deprecated in a future 5.x release. Every language now has an `ace_mode` set."
 | 
			
		||||
      @ace_modes ||= all.select(&:ace_mode).sort_by { |lang| lang.name.downcase }
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Internal: Initialize a new Language
 | 
			
		||||
    #
 | 
			
		||||
    # attributes - A hash of attributes
 | 
			
		||||
@@ -270,7 +272,7 @@ module Linguist
 | 
			
		||||
      @color = attributes[:color]
 | 
			
		||||
 | 
			
		||||
      # Set aliases
 | 
			
		||||
      @aliases = [default_alias_name] + (attributes[:aliases] || [])
 | 
			
		||||
      @aliases = [default_alias] + (attributes[:aliases] || [])
 | 
			
		||||
 | 
			
		||||
      # Load the TextMate scope name or try to guess one
 | 
			
		||||
      @tm_scope = attributes[:tm_scope] || begin
 | 
			
		||||
@@ -284,10 +286,12 @@ module Linguist
 | 
			
		||||
      end
 | 
			
		||||
 | 
			
		||||
      @ace_mode = attributes[:ace_mode]
 | 
			
		||||
      @codemirror_mode = attributes[:codemirror_mode]
 | 
			
		||||
      @codemirror_mime_type = attributes[:codemirror_mime_type]
 | 
			
		||||
      @wrap = attributes[:wrap] || false
 | 
			
		||||
 | 
			
		||||
      # Set legacy search term
 | 
			
		||||
      @search_term = attributes[:search_term] || default_alias_name
 | 
			
		||||
      # Set the language_id
 | 
			
		||||
      @language_id = attributes[:language_id]
 | 
			
		||||
 | 
			
		||||
      # Set extensions or default to [].
 | 
			
		||||
      @extensions = attributes[:extensions] || []
 | 
			
		||||
@@ -340,16 +344,16 @@ module Linguist
 | 
			
		||||
    # Returns an Array of String names
 | 
			
		||||
    attr_reader :aliases
 | 
			
		||||
 | 
			
		||||
    # Deprecated: Get code search term
 | 
			
		||||
    # Public: Get language_id (used in GitHub search)
 | 
			
		||||
    #
 | 
			
		||||
    # Examples
 | 
			
		||||
    #
 | 
			
		||||
    #   # => "ruby"
 | 
			
		||||
    #   # => "python"
 | 
			
		||||
    #   # => "perl"
 | 
			
		||||
    #   # => "1"
 | 
			
		||||
    #   # => "2"
 | 
			
		||||
    #   # => "3"
 | 
			
		||||
    #
 | 
			
		||||
    # Returns the name String
 | 
			
		||||
    attr_reader :search_term
 | 
			
		||||
    # Returns the integer language_id
 | 
			
		||||
    attr_reader :language_id
 | 
			
		||||
 | 
			
		||||
    # Public: Get the name of a TextMate-compatible scope
 | 
			
		||||
    #
 | 
			
		||||
@@ -367,6 +371,31 @@ module Linguist
 | 
			
		||||
    # Returns a String name or nil
 | 
			
		||||
    attr_reader :ace_mode
 | 
			
		||||
 | 
			
		||||
    # Public: Get CodeMirror mode
 | 
			
		||||
    #
 | 
			
		||||
    # Maps to a directory in the `mode/` source code.
 | 
			
		||||
    #   https://github.com/codemirror/CodeMirror/tree/master/mode
 | 
			
		||||
    #
 | 
			
		||||
    # Examples
 | 
			
		||||
    #
 | 
			
		||||
    #  # => "nil"
 | 
			
		||||
    #  # => "javascript"
 | 
			
		||||
    #  # => "clike"
 | 
			
		||||
    #
 | 
			
		||||
    # Returns a String name or nil
 | 
			
		||||
    attr_reader :codemirror_mode
 | 
			
		||||
 | 
			
		||||
    # Public: Get CodeMirror MIME type mode
 | 
			
		||||
    #
 | 
			
		||||
    # Examples
 | 
			
		||||
    #
 | 
			
		||||
    #  # => "nil"
 | 
			
		||||
    #  # => "text/x-javascript"
 | 
			
		||||
    #  # => "text/x-csrc"
 | 
			
		||||
    #
 | 
			
		||||
    # Returns a String name or nil
 | 
			
		||||
    attr_reader :codemirror_mime_type
 | 
			
		||||
 | 
			
		||||
    # Public: Should language lines be wrapped
 | 
			
		||||
    #
 | 
			
		||||
    # Returns true or false
 | 
			
		||||
@@ -399,22 +428,6 @@ module Linguist
 | 
			
		||||
    # Returns the extensions Array
 | 
			
		||||
    attr_reader :filenames
 | 
			
		||||
 | 
			
		||||
    # Deprecated: Get primary extension
 | 
			
		||||
    #
 | 
			
		||||
    # Defaults to the first extension but can be overridden
 | 
			
		||||
    # in the languages.yml.
 | 
			
		||||
    #
 | 
			
		||||
    # The primary extension can not be nil. Tests should verify this.
 | 
			
		||||
    #
 | 
			
		||||
    # This method is only used by app/helpers/gists_helper.rb for creating
 | 
			
		||||
    # the language dropdown. It really should be using `name` instead.
 | 
			
		||||
    # Would like to drop primary extension.
 | 
			
		||||
    #
 | 
			
		||||
    # Returns the extension String.
 | 
			
		||||
    def primary_extension
 | 
			
		||||
      extensions.first
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Public: Get URL escaped name.
 | 
			
		||||
    #
 | 
			
		||||
    # Examples
 | 
			
		||||
@@ -428,12 +441,13 @@ module Linguist
 | 
			
		||||
      EscapeUtils.escape_url(name).gsub('+', '%20')
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    # Internal: Get default alias name
 | 
			
		||||
    # Public: Get default alias name
 | 
			
		||||
    #
 | 
			
		||||
    # Returns the alias name String
 | 
			
		||||
    def default_alias_name
 | 
			
		||||
    def default_alias
 | 
			
		||||
      name.downcase.gsub(/\s/, '-')
 | 
			
		||||
    end
 | 
			
		||||
    alias_method :default_alias_name, :default_alias
 | 
			
		||||
 | 
			
		||||
    # Public: Get Language group
 | 
			
		||||
    #
 | 
			
		||||
@@ -543,10 +557,12 @@ module Linguist
 | 
			
		||||
      :aliases           => options['aliases'],
 | 
			
		||||
      :tm_scope          => options['tm_scope'],
 | 
			
		||||
      :ace_mode          => options['ace_mode'],
 | 
			
		||||
      :codemirror_mode   => options['codemirror_mode'],
 | 
			
		||||
      :codemirror_mime_type => options['codemirror_mime_type'],
 | 
			
		||||
      :wrap              => options['wrap'],
 | 
			
		||||
      :group_name        => options['group'],
 | 
			
		||||
      :searchable        => options.fetch('searchable', true),
 | 
			
		||||
      :search_term       => options['search_term'],
 | 
			
		||||
      :language_id       => options['language_id'],
 | 
			
		||||
      :extensions        => Array(options['extensions']),
 | 
			
		||||
      :interpreters      => options['interpreters'].sort,
 | 
			
		||||
      :filenames         => options['filenames'],
 | 
			
		||||
 
 | 
			
		||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							@@ -26,4 +26,4 @@
 | 
			
		||||
- Shell
 | 
			
		||||
- Swift
 | 
			
		||||
- TeX
 | 
			
		||||
- VimL
 | 
			
		||||
- Vim script
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										10
									
								
								lib/linguist/strategy/extension.rb
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								lib/linguist/strategy/extension.rb
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,10 @@
 | 
			
		||||
module Linguist
 | 
			
		||||
  module Strategy
 | 
			
		||||
    # Detects language based on extension
 | 
			
		||||
    class Extension
 | 
			
		||||
      def self.call(blob, _)
 | 
			
		||||
        Language.find_by_extension(blob.name.to_s)
 | 
			
		||||
      end
 | 
			
		||||
    end
 | 
			
		||||
  end
 | 
			
		||||
end
 | 
			
		||||
@@ -1,9 +1,10 @@
 | 
			
		||||
module Linguist
 | 
			
		||||
  module Strategy
 | 
			
		||||
    # Detects language based on filename and/or extension
 | 
			
		||||
    # Detects language based on filename
 | 
			
		||||
    class Filename
 | 
			
		||||
      def self.call(blob, _)
 | 
			
		||||
        Language.find_by_filename(blob.name.to_s)
 | 
			
		||||
        name = blob.name.to_s
 | 
			
		||||
        Language.find_by_filename(name)
 | 
			
		||||
      end
 | 
			
		||||
    end
 | 
			
		||||
  end
 | 
			
		||||
 
 | 
			
		||||
@@ -1,19 +1,98 @@
 | 
			
		||||
module Linguist
 | 
			
		||||
  module Strategy
 | 
			
		||||
    class Modeline
 | 
			
		||||
      EMACS_MODELINE = /-\*-\s*(?:(?!mode)[\w-]+\s*:\s*(?:[\w+-]+)\s*;?\s*)*(?:mode\s*:)?\s*([\w+-]+)\s*(?:;\s*(?!mode)[\w-]+\s*:\s*[\w+-]+\s*)*;?\s*-\*-/i
 | 
			
		||||
      EMACS_MODELINE = /
 | 
			
		||||
        -\*-
 | 
			
		||||
        (?:
 | 
			
		||||
          # Short form: `-*- ruby -*-`
 | 
			
		||||
          \s* (?= [^:;\s]+ \s* -\*-)
 | 
			
		||||
          |
 | 
			
		||||
          # Longer form: `-*- foo:bar; mode: ruby; -*-`
 | 
			
		||||
          (?:
 | 
			
		||||
            .*?       # Preceding variables: `-*- foo:bar bar:baz;`
 | 
			
		||||
            [;\s]     # Which are delimited by spaces or semicolons
 | 
			
		||||
            |
 | 
			
		||||
            (?<=-\*-) # Not preceded by anything: `-*-mode:ruby-*-`
 | 
			
		||||
          )
 | 
			
		||||
          mode        # Major mode indicator
 | 
			
		||||
          \s*:\s*     # Allow whitespace around colon: `mode : ruby`
 | 
			
		||||
        )
 | 
			
		||||
        ([^:;\s]+)    # Name of mode
 | 
			
		||||
 | 
			
		||||
      # First form vim modeline
 | 
			
		||||
      # [text]{white}{vi:|vim:|ex:}[white]{options}
 | 
			
		||||
      # ex: 'vim: syntax=ruby'
 | 
			
		||||
      VIM_MODELINE_1 = /(?:vim|vi|ex):\s*(?:ft|filetype|syntax)=(\w+)\s?/i
 | 
			
		||||
        # Ensure the mode is terminated correctly
 | 
			
		||||
        (?=
 | 
			
		||||
          # Followed by semicolon or whitespace
 | 
			
		||||
          [\s;]
 | 
			
		||||
          |
 | 
			
		||||
          # Touching the ending sequence: `ruby-*-`
 | 
			
		||||
          (?<![-*])   # Don't allow stuff like `ruby--*-` to match; it'll invalidate the mode
 | 
			
		||||
          -\*-        # Emacs has no problems reading `ruby --*-`, however.
 | 
			
		||||
        )
 | 
			
		||||
        .*?           # Anything between a cleanly-terminated mode and the ending -*-
 | 
			
		||||
        -\*-
 | 
			
		||||
      /xi
 | 
			
		||||
 | 
			
		||||
      # Second form vim modeline (compatible with some versions of Vi)
 | 
			
		||||
      # [text]{white}{vi:|vim:|Vim:|ex:}[white]se[t] {options}:[text]
 | 
			
		||||
      # ex: 'vim set syntax=ruby:'
 | 
			
		||||
      VIM_MODELINE_2 = /(?:vim|vi|Vim|ex):\s*se(?:t)?.*\s(?:ft|filetype|syntax)=(\w+)\s?.*:/i
 | 
			
		||||
      VIM_MODELINE   = /
 | 
			
		||||
 | 
			
		||||
      MODELINES = [EMACS_MODELINE, VIM_MODELINE_1, VIM_MODELINE_2]
 | 
			
		||||
        # Start modeline. Could be `vim:`, `vi:` or `ex:`
 | 
			
		||||
        (?:
 | 
			
		||||
          (?:\s|^)
 | 
			
		||||
          vi
 | 
			
		||||
          (?:m[<=>]?\d+|m)? # Version-specific modeline
 | 
			
		||||
          |
 | 
			
		||||
          [\t\x20] # `ex:` requires whitespace, because "ex:" might be short for "example:"
 | 
			
		||||
          ex
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # If the option-list begins with `set ` or `se `, it indicates an alternative
 | 
			
		||||
        # modeline syntax partly-compatible with older versions of Vi. Here, the colon
 | 
			
		||||
        # serves as a terminator for an option sequence, delimited by whitespace.
 | 
			
		||||
        (?=
 | 
			
		||||
          # So we have to ensure the modeline ends with a colon
 | 
			
		||||
          : (?=\s* set? \s [^\n:]+ :) |
 | 
			
		||||
 | 
			
		||||
          # Otherwise, it isn't valid syntax and should be ignored
 | 
			
		||||
          : (?!\s* set? \s)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # Possible (unrelated) `option=value` pairs to skip past
 | 
			
		||||
        (?:
 | 
			
		||||
          # Option separator. Vim uses whitespace or colons to separate options (except if
 | 
			
		||||
          # the alternate "vim: set " form is used, where only whitespace is used)
 | 
			
		||||
          (?:
 | 
			
		||||
            \s
 | 
			
		||||
            |
 | 
			
		||||
            \s* : \s* # Note that whitespace around colons is accepted too:
 | 
			
		||||
          )           # vim: noai :  ft=ruby:noexpandtab
 | 
			
		||||
 | 
			
		||||
          # Option's name. All recognised Vim options have an alphanumeric form.
 | 
			
		||||
          \w*
 | 
			
		||||
 | 
			
		||||
          # Possible value. Not every option takes an argument.
 | 
			
		||||
          (?:
 | 
			
		||||
            # Whitespace between name and value is allowed: `vim: ft   =ruby`
 | 
			
		||||
            \s*=
 | 
			
		||||
 | 
			
		||||
            # Option's value. Might be blank; `vim: ft= ` says "use no filetype".
 | 
			
		||||
            (?:
 | 
			
		||||
              [^\\\s] # Beware of escaped characters: titlestring=\ ft=ruby
 | 
			
		||||
              |       # will be read by Vim as { titlestring: " ft=ruby" }.
 | 
			
		||||
              \\.
 | 
			
		||||
            )*
 | 
			
		||||
          )?
 | 
			
		||||
        )*
 | 
			
		||||
 | 
			
		||||
        # The actual filetype declaration
 | 
			
		||||
        [\s:] (?:filetype|ft|syntax) \s*=
 | 
			
		||||
 | 
			
		||||
        # Language's name
 | 
			
		||||
        (\w+)
 | 
			
		||||
 | 
			
		||||
        # Ensure it's followed by a legal separator
 | 
			
		||||
        (?=\s|:|$)
 | 
			
		||||
      /xi
 | 
			
		||||
 | 
			
		||||
      MODELINES = [EMACS_MODELINE, VIM_MODELINE]
 | 
			
		||||
 | 
			
		||||
      # Scope of the search for modelines
 | 
			
		||||
      # Number of lines to check at the beginning and at the end of the file
 | 
			
		||||
 
 | 
			
		||||
@@ -15,6 +15,9 @@
 | 
			
		||||
# Dependencies
 | 
			
		||||
- ^[Dd]ependencies/
 | 
			
		||||
 | 
			
		||||
# Distributions
 | 
			
		||||
- (^|/)dist/
 | 
			
		||||
 | 
			
		||||
# C deps
 | 
			
		||||
#  https://github.com/joyent/node
 | 
			
		||||
- ^deps/
 | 
			
		||||
@@ -47,6 +50,9 @@
 | 
			
		||||
# Go dependencies
 | 
			
		||||
- Godeps/_workspace/
 | 
			
		||||
 | 
			
		||||
# GNU indent profiles
 | 
			
		||||
- .indent.pro
 | 
			
		||||
 | 
			
		||||
# Minified JavaScript and CSS
 | 
			
		||||
- (\.|-)min\.(js|css)$
 | 
			
		||||
 | 
			
		||||
@@ -165,7 +171,7 @@
 | 
			
		||||
# Chart.js
 | 
			
		||||
- (^|/)Chart\.js$
 | 
			
		||||
 | 
			
		||||
# Codemirror
 | 
			
		||||
# CodeMirror
 | 
			
		||||
- (^|/)[Cc]ode[Mm]irror/(\d+\.\d+/)?(lib|mode|theme|addon|keymap|demo)
 | 
			
		||||
 | 
			
		||||
# SyntaxHighlighter - http://alexgorbatchev.com/
 | 
			
		||||
@@ -229,6 +235,15 @@
 | 
			
		||||
# Fabric
 | 
			
		||||
- Fabric.framework/
 | 
			
		||||
 | 
			
		||||
# BuddyBuild
 | 
			
		||||
- BuddyBuildSDK.framework/
 | 
			
		||||
 | 
			
		||||
# Realm
 | 
			
		||||
- Realm.framework
 | 
			
		||||
 | 
			
		||||
# RealmSwift
 | 
			
		||||
- RealmSwift.framework
 | 
			
		||||
 | 
			
		||||
# git config files
 | 
			
		||||
- gitattributes$
 | 
			
		||||
- gitignore$
 | 
			
		||||
 
 | 
			
		||||
@@ -1,3 +1,3 @@
 | 
			
		||||
module Linguist
 | 
			
		||||
  VERSION = "4.8.9"
 | 
			
		||||
  VERSION = "5.0.7"
 | 
			
		||||
end
 | 
			
		||||
 
 | 
			
		||||
@@ -1,7 +1,7 @@
 | 
			
		||||
{
 | 
			
		||||
  "repository": "https://github.com/github/linguist",
 | 
			
		||||
  "dependencies": {
 | 
			
		||||
    "season": "~>5.0"
 | 
			
		||||
    "season": "~>5.4"
 | 
			
		||||
  },
 | 
			
		||||
  "license": "MIT"
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										190
									
								
								samples/ABNF/toml.abnf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										190
									
								
								samples/ABNF/toml.abnf
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,190 @@
 | 
			
		||||
; Source:  https://github.com/toml-lang/toml
 | 
			
		||||
; License: MIT
 | 
			
		||||
 | 
			
		||||
;; This is an attempt to define TOML in ABNF according to the grammar defined
 | 
			
		||||
;; in RFC 4234 (http://www.ietf.org/rfc/rfc4234.txt).
 | 
			
		||||
 | 
			
		||||
;; TOML
 | 
			
		||||
 | 
			
		||||
toml = expression *( newline expression )
 | 
			
		||||
expression = (
 | 
			
		||||
  ws /
 | 
			
		||||
  ws comment /
 | 
			
		||||
  ws keyval ws [ comment ] /
 | 
			
		||||
  ws table ws [ comment ]
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
;; Newline
 | 
			
		||||
 | 
			
		||||
newline = (
 | 
			
		||||
  %x0A /              ; LF
 | 
			
		||||
  %x0D.0A             ; CRLF
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
newlines = 1*newline
 | 
			
		||||
 | 
			
		||||
;; Whitespace
 | 
			
		||||
 | 
			
		||||
ws = *(
 | 
			
		||||
  %x20 /              ; Space
 | 
			
		||||
  %x09                ; Horizontal tab
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
;; Comment
 | 
			
		||||
 | 
			
		||||
comment-start-symbol = %x23 ; #
 | 
			
		||||
non-eol = %x09 / %x20-10FFFF
 | 
			
		||||
comment = comment-start-symbol *non-eol
 | 
			
		||||
 | 
			
		||||
;; Key-Value pairs
 | 
			
		||||
 | 
			
		||||
keyval-sep = ws %x3D ws ; =
 | 
			
		||||
keyval = key keyval-sep val
 | 
			
		||||
 | 
			
		||||
key = unquoted-key / quoted-key
 | 
			
		||||
unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _
 | 
			
		||||
quoted-key = quotation-mark 1*basic-char quotation-mark ; See Basic Strings
 | 
			
		||||
 | 
			
		||||
val = integer / float / string / boolean / date-time / array / inline-table
 | 
			
		||||
 | 
			
		||||
;; Table
 | 
			
		||||
 | 
			
		||||
table = std-table / array-table
 | 
			
		||||
 | 
			
		||||
;; Standard Table
 | 
			
		||||
 | 
			
		||||
std-table-open  = %x5B ws     ; [ Left square bracket
 | 
			
		||||
std-table-close = ws %x5D     ; ] Right square bracket
 | 
			
		||||
table-key-sep   = ws %x2E ws  ; . Period
 | 
			
		||||
 | 
			
		||||
std-table = std-table-open key *( table-key-sep key) std-table-close
 | 
			
		||||
 | 
			
		||||
;; Array Table
 | 
			
		||||
 | 
			
		||||
array-table-open  = %x5B.5B ws  ; [[ Double left square bracket
 | 
			
		||||
array-table-close = ws %x5D.5D  ; ]] Double right square bracket
 | 
			
		||||
 | 
			
		||||
array-table = array-table-open key *( table-key-sep key) array-table-close
 | 
			
		||||
 | 
			
		||||
;; Integer
 | 
			
		||||
 | 
			
		||||
integer = [ minus / plus ] int
 | 
			
		||||
minus = %x2D                       ; -
 | 
			
		||||
plus = %x2B                        ; +
 | 
			
		||||
digit1-9 = %x31-39                 ; 1-9
 | 
			
		||||
underscore = %x5F                  ; _
 | 
			
		||||
int = DIGIT / digit1-9 1*( DIGIT / underscore DIGIT )
 | 
			
		||||
 | 
			
		||||
;; Float
 | 
			
		||||
 | 
			
		||||
float = integer ( frac / frac exp / exp )
 | 
			
		||||
zero-prefixable-int = DIGIT *( DIGIT / underscore DIGIT )
 | 
			
		||||
frac = decimal-point zero-prefixable-int
 | 
			
		||||
decimal-point = %x2E               ; .
 | 
			
		||||
exp = e integer
 | 
			
		||||
e = %x65 / %x45                    ; e E
 | 
			
		||||
 | 
			
		||||
;; String
 | 
			
		||||
 | 
			
		||||
string = basic-string / ml-basic-string / literal-string / ml-literal-string
 | 
			
		||||
 | 
			
		||||
;; Basic String
 | 
			
		||||
 | 
			
		||||
basic-string = quotation-mark *basic-char quotation-mark
 | 
			
		||||
 | 
			
		||||
quotation-mark = %x22            ; "
 | 
			
		||||
 | 
			
		||||
basic-char = basic-unescaped / escaped
 | 
			
		||||
escaped = escape ( %x22 /          ; "    quotation mark  U+0022
 | 
			
		||||
                   %x5C /          ; \    reverse solidus U+005C
 | 
			
		||||
                   %x2F /          ; /    solidus         U+002F
 | 
			
		||||
                   %x62 /          ; b    backspace       U+0008
 | 
			
		||||
                   %x66 /          ; f    form feed       U+000C
 | 
			
		||||
                   %x6E /          ; n    line feed       U+000A
 | 
			
		||||
                   %x72 /          ; r    carriage return U+000D
 | 
			
		||||
                   %x74 /          ; t    tab             U+0009
 | 
			
		||||
                   %x75 4HEXDIG /  ; uXXXX                U+XXXX
 | 
			
		||||
                   %x55 8HEXDIG )  ; UXXXXXXXX            U+XXXXXXXX
 | 
			
		||||
 | 
			
		||||
basic-unescaped = %x20-21 / %x23-5B / %x5D-10FFFF
 | 
			
		||||
 | 
			
		||||
escape = %x5C                    ; \
 | 
			
		||||
 | 
			
		||||
;; Multiline Basic String
 | 
			
		||||
 | 
			
		||||
ml-basic-string-delim = quotation-mark quotation-mark quotation-mark
 | 
			
		||||
ml-basic-string = ml-basic-string-delim ml-basic-body ml-basic-string-delim
 | 
			
		||||
ml-basic-body = *( ml-basic-char / newline / ( escape newline ))
 | 
			
		||||
 | 
			
		||||
ml-basic-char = ml-basic-unescaped / escaped
 | 
			
		||||
ml-basic-unescaped = %x20-5B / %x5D-10FFFF
 | 
			
		||||
 | 
			
		||||
;; Literal String
 | 
			
		||||
 | 
			
		||||
literal-string = apostraphe *literal-char apostraphe
 | 
			
		||||
 | 
			
		||||
apostraphe = %x27 ; ' Apostrophe
 | 
			
		||||
 | 
			
		||||
literal-char = %x09 / %x20-26 / %x28-10FFFF
 | 
			
		||||
 | 
			
		||||
;; Multiline Literal String
 | 
			
		||||
 | 
			
		||||
ml-literal-string-delim = apostraphe apostraphe apostraphe
 | 
			
		||||
ml-literal-string = ml-literal-string-delim ml-literal-body ml-literal-string-delim
 | 
			
		||||
 | 
			
		||||
ml-literal-body = *( ml-literal-char / newline )
 | 
			
		||||
ml-literal-char = %x09 / %x20-10FFFF
 | 
			
		||||
 | 
			
		||||
;; Boolean
 | 
			
		||||
 | 
			
		||||
boolean = true / false
 | 
			
		||||
true    = %x74.72.75.65     ; true
 | 
			
		||||
false   = %x66.61.6C.73.65  ; false
 | 
			
		||||
 | 
			
		||||
;; Datetime (as defined in RFC 3339)
 | 
			
		||||
 | 
			
		||||
date-fullyear  = 4DIGIT
 | 
			
		||||
date-month     = 2DIGIT  ; 01-12
 | 
			
		||||
date-mday      = 2DIGIT  ; 01-28, 01-29, 01-30, 01-31 based on month/year
 | 
			
		||||
time-hour      = 2DIGIT  ; 00-23
 | 
			
		||||
time-minute    = 2DIGIT  ; 00-59
 | 
			
		||||
time-second    = 2DIGIT  ; 00-58, 00-59, 00-60 based on leap second rules
 | 
			
		||||
time-secfrac   = "." 1*DIGIT
 | 
			
		||||
time-numoffset = ( "+" / "-" ) time-hour ":" time-minute
 | 
			
		||||
time-offset    = "Z" / time-numoffset
 | 
			
		||||
 | 
			
		||||
partial-time   = time-hour ":" time-minute ":" time-second [time-secfrac]
 | 
			
		||||
full-date      = date-fullyear "-" date-month "-" date-mday
 | 
			
		||||
full-time      = partial-time time-offset
 | 
			
		||||
 | 
			
		||||
date-time      = full-date "T" full-time
 | 
			
		||||
 | 
			
		||||
;; Array
 | 
			
		||||
 | 
			
		||||
array-open  = %x5B ws  ; [
 | 
			
		||||
array-close = ws %x5D  ; ]
 | 
			
		||||
 | 
			
		||||
array = array-open array-values array-close
 | 
			
		||||
 | 
			
		||||
array-values = [ val [ array-sep ] [ ( comment newlines) / newlines ] /
 | 
			
		||||
                 val array-sep [ ( comment newlines) / newlines ] array-values ]
 | 
			
		||||
 | 
			
		||||
array-sep = ws %x2C ws  ; , Comma
 | 
			
		||||
 | 
			
		||||
;; Inline Table
 | 
			
		||||
 | 
			
		||||
inline-table-open  = %x7B ws     ; {
 | 
			
		||||
inline-table-close = ws %x7D     ; }
 | 
			
		||||
inline-table-sep   = ws %x2C ws  ; , Comma
 | 
			
		||||
 | 
			
		||||
inline-table = inline-table-open inline-table-keyvals inline-table-close
 | 
			
		||||
 | 
			
		||||
inline-table-keyvals = [ inline-table-keyvals-non-empty ]
 | 
			
		||||
inline-table-keyvals-non-empty = key keyval-sep val /
 | 
			
		||||
                                 key keyval-sep val inline-table-sep inline-table-keyvals-non-empty
 | 
			
		||||
 | 
			
		||||
;; Built-in ABNF terms, reproduced here for clarity
 | 
			
		||||
 | 
			
		||||
; ALPHA = %x41-5A / %x61-7A ; A-Z / a-z
 | 
			
		||||
; DIGIT = %x30-39 ; 0-9
 | 
			
		||||
; HEXDIG = DIGIT / "A" / "B" / "C" / "D" / "E" / "F"
 | 
			
		||||
							
								
								
									
										46
									
								
								samples/C++/bug1163046.--skeleton.re
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										46
									
								
								samples/C++/bug1163046.--skeleton.re
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,46 @@
 | 
			
		||||
#include <iostream>
 | 
			
		||||
 | 
			
		||||
#define YYCTYPE unsigned char
 | 
			
		||||
#define YYCURSOR cursor
 | 
			
		||||
#define YYLIMIT cursor
 | 
			
		||||
#define YYMARKER marker
 | 
			
		||||
#define YYFILL(n)
 | 
			
		||||
 | 
			
		||||
bool scan(const char *text)
 | 
			
		||||
{
 | 
			
		||||
	YYCTYPE *start = (YYCTYPE *)text;
 | 
			
		||||
	YYCTYPE *cursor = (YYCTYPE *)text;
 | 
			
		||||
	YYCTYPE *marker = (YYCTYPE *)text;
 | 
			
		||||
next:
 | 
			
		||||
	YYCTYPE *token = cursor;
 | 
			
		||||
/*!re2c
 | 
			
		||||
'(This file must be converted with BinHex 4.0)'
 | 
			
		||||
	{
 | 
			
		||||
		if (token == start || *(token - 1) == '\n')
 | 
			
		||||
		return true; else goto next;
 | 
			
		||||
	}
 | 
			
		||||
[\001-\377]
 | 
			
		||||
	{ goto next; }
 | 
			
		||||
[\000]
 | 
			
		||||
	{ return false; }
 | 
			
		||||
*/
 | 
			
		||||
	return false;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#define do_scan(str, expect) \
 | 
			
		||||
	res = scan(str) == expect ? 0 : 1; \
 | 
			
		||||
	std::cerr << str << "\t-\t" << (res ? "fail" : "ok") << std::endl; \
 | 
			
		||||
	result += res
 | 
			
		||||
 | 
			
		||||
/*!max:re2c */
 | 
			
		||||
 | 
			
		||||
int main(int,void**)
 | 
			
		||||
{
 | 
			
		||||
	int res, result = 0;
 | 
			
		||||
	do_scan("(This file must be converted with BinHex 4.0)", 1);
 | 
			
		||||
	do_scan("x(This file must be converted with BinHex 4.0)", 0);
 | 
			
		||||
	do_scan("(This file must be converted with BinHex 4.0)x", 1);
 | 
			
		||||
	do_scan("x(This file must be converted with BinHex 4.0)x", 0);
 | 
			
		||||
	
 | 
			
		||||
	return result;
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										239
									
								
								samples/C++/cnokw.re
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										239
									
								
								samples/C++/cnokw.re
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,239 @@
 | 
			
		||||
#include <stdlib.h>
 | 
			
		||||
#include <stdio.h>
 | 
			
		||||
#include <string.h>
 | 
			
		||||
 | 
			
		||||
#define	ADDEQ	257
 | 
			
		||||
#define	ANDAND	258
 | 
			
		||||
#define	ANDEQ	259
 | 
			
		||||
#define	ARRAY	260
 | 
			
		||||
#define	ASM	261
 | 
			
		||||
#define	AUTO	262
 | 
			
		||||
#define	BREAK	263
 | 
			
		||||
#define	CASE	264
 | 
			
		||||
#define	CHAR	265
 | 
			
		||||
#define	CONST	266
 | 
			
		||||
#define	CONTINUE	267
 | 
			
		||||
#define	DECR	268
 | 
			
		||||
#define	DEFAULT	269
 | 
			
		||||
#define	DEREF	270
 | 
			
		||||
#define	DIVEQ	271
 | 
			
		||||
#define	DO	272
 | 
			
		||||
#define	DOUBLE	273
 | 
			
		||||
#define	ELLIPSIS	274
 | 
			
		||||
#define	ELSE	275
 | 
			
		||||
#define	ENUM	276
 | 
			
		||||
#define	EQL	277
 | 
			
		||||
#define	EXTERN	278
 | 
			
		||||
#define	FCON	279
 | 
			
		||||
#define	FLOAT	280
 | 
			
		||||
#define	FOR	281
 | 
			
		||||
#define	FUNCTION	282
 | 
			
		||||
#define	GEQ	283
 | 
			
		||||
#define	GOTO	284
 | 
			
		||||
#define	ICON	285
 | 
			
		||||
#define	ID	286
 | 
			
		||||
#define	IF	287
 | 
			
		||||
#define	INCR	288
 | 
			
		||||
#define	INT	289
 | 
			
		||||
#define	LEQ	290
 | 
			
		||||
#define	LONG	291
 | 
			
		||||
#define	LSHIFT	292
 | 
			
		||||
#define	LSHIFTEQ	293
 | 
			
		||||
#define	MODEQ	294
 | 
			
		||||
#define	MULEQ	295
 | 
			
		||||
#define	NEQ	296
 | 
			
		||||
#define	OREQ	297
 | 
			
		||||
#define	OROR	298
 | 
			
		||||
#define	POINTER	299
 | 
			
		||||
#define	REGISTER	300
 | 
			
		||||
#define	RETURN	301
 | 
			
		||||
#define	RSHIFT	302
 | 
			
		||||
#define	RSHIFTEQ	303
 | 
			
		||||
#define	SCON	304
 | 
			
		||||
#define	SHORT	305
 | 
			
		||||
#define	SIGNED	306
 | 
			
		||||
#define	SIZEOF	307
 | 
			
		||||
#define	STATIC	308
 | 
			
		||||
#define	STRUCT	309
 | 
			
		||||
#define	SUBEQ	310
 | 
			
		||||
#define	SWITCH	311
 | 
			
		||||
#define	TYPEDEF	312
 | 
			
		||||
#define	UNION	313
 | 
			
		||||
#define	UNSIGNED	314
 | 
			
		||||
#define	VOID	315
 | 
			
		||||
#define	VOLATILE	316
 | 
			
		||||
#define	WHILE	317
 | 
			
		||||
#define	XOREQ	318
 | 
			
		||||
#define	EOI	319
 | 
			
		||||
 | 
			
		||||
typedef unsigned int uint;
 | 
			
		||||
typedef unsigned char uchar;
 | 
			
		||||
 | 
			
		||||
#define	BSIZE	8192
 | 
			
		||||
 | 
			
		||||
#define	YYCTYPE		uchar
 | 
			
		||||
#define	YYCURSOR	cursor
 | 
			
		||||
#define	YYLIMIT		s->lim
 | 
			
		||||
#define	YYMARKER	s->ptr
 | 
			
		||||
#define	YYFILL(n)	{cursor = fill(s, cursor);}
 | 
			
		||||
 | 
			
		||||
#define	RET(i)	{s->cur = cursor; return i;}
 | 
			
		||||
 | 
			
		||||
typedef struct Scanner {
 | 
			
		||||
    int			fd;
 | 
			
		||||
    uchar		*bot, *tok, *ptr, *cur, *pos, *lim, *top, *eof;
 | 
			
		||||
    uint		line;
 | 
			
		||||
} Scanner;
 | 
			
		||||
 | 
			
		||||
uchar *fill(Scanner *s, uchar *cursor){
 | 
			
		||||
    if(!s->eof){
 | 
			
		||||
	uint cnt = s->tok - s->bot;
 | 
			
		||||
	if(cnt){
 | 
			
		||||
	    memcpy(s->bot, s->tok, s->lim - s->tok);
 | 
			
		||||
	    s->tok = s->bot;
 | 
			
		||||
	    s->ptr -= cnt;
 | 
			
		||||
	    cursor -= cnt;
 | 
			
		||||
	    s->pos -= cnt;
 | 
			
		||||
	    s->lim -= cnt;
 | 
			
		||||
	}
 | 
			
		||||
	if((s->top - s->lim) < BSIZE){
 | 
			
		||||
	    uchar *buf = (uchar*) malloc(((s->lim - s->bot) + BSIZE)*sizeof(uchar));
 | 
			
		||||
	    memcpy(buf, s->tok, s->lim - s->tok);
 | 
			
		||||
	    s->tok = buf;
 | 
			
		||||
	    s->ptr = &buf[s->ptr - s->bot];
 | 
			
		||||
	    cursor = &buf[cursor - s->bot];
 | 
			
		||||
	    s->pos = &buf[s->pos - s->bot];
 | 
			
		||||
	    s->lim = &buf[s->lim - s->bot];
 | 
			
		||||
	    s->top = &s->lim[BSIZE];
 | 
			
		||||
	    free(s->bot);
 | 
			
		||||
	    s->bot = buf;
 | 
			
		||||
	}
 | 
			
		||||
	if((cnt = read(s->fd, (char*) s->lim, BSIZE)) != BSIZE){
 | 
			
		||||
	    s->eof = &s->lim[cnt]; *(s->eof)++ = '\n';
 | 
			
		||||
	}
 | 
			
		||||
	s->lim += cnt;
 | 
			
		||||
    }
 | 
			
		||||
    return cursor;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
int scan(Scanner *s){
 | 
			
		||||
	uchar *cursor = s->cur;
 | 
			
		||||
std:
 | 
			
		||||
	s->tok = cursor;
 | 
			
		||||
/*!re2c
 | 
			
		||||
any	= [\000-\377];
 | 
			
		||||
O	= [0-7];
 | 
			
		||||
D	= [0-9];
 | 
			
		||||
L	= [a-zA-Z_];
 | 
			
		||||
H	= [a-fA-F0-9];
 | 
			
		||||
E	= [Ee] [+-]? D+;
 | 
			
		||||
FS	= [fFlL];
 | 
			
		||||
IS	= [uUlL]*;
 | 
			
		||||
ESC	= [\\] ([abfnrtv?'"\\] | "x" H+ | O+);
 | 
			
		||||
*/
 | 
			
		||||
 | 
			
		||||
/*!re2c
 | 
			
		||||
	"/*"			{ goto comment; }
 | 
			
		||||
	
 | 
			
		||||
	L (L|D)*		{ RET(ID); }
 | 
			
		||||
	
 | 
			
		||||
	("0" [xX] H+ IS?) | ("0" D+ IS?) | (D+ IS?) |
 | 
			
		||||
	(['] (ESC|any\[\n\\'])* ['])
 | 
			
		||||
				{ RET(ICON); }
 | 
			
		||||
	
 | 
			
		||||
	(D+ E FS?) | (D* "." D+ E? FS?) | (D+ "." D* E? FS?)
 | 
			
		||||
				{ RET(FCON); }
 | 
			
		||||
	
 | 
			
		||||
	(["] (ESC|any\[\n\\"])* ["])
 | 
			
		||||
				{ RET(SCON); }
 | 
			
		||||
	
 | 
			
		||||
	"..."                   { RET(ELLIPSIS); }
 | 
			
		||||
	">>="			{ RET(RSHIFTEQ); }
 | 
			
		||||
	"<<="			{ RET(LSHIFTEQ); }
 | 
			
		||||
	"+="			{ RET(ADDEQ); }
 | 
			
		||||
	"-="			{ RET(SUBEQ); }
 | 
			
		||||
	"*="			{ RET(MULEQ); }
 | 
			
		||||
	"/="			{ RET(DIVEQ); }
 | 
			
		||||
	"%="			{ RET(MODEQ); }
 | 
			
		||||
	"&="			{ RET(ANDEQ); }
 | 
			
		||||
	"^="			{ RET(XOREQ); }
 | 
			
		||||
	"|="			{ RET(OREQ); }
 | 
			
		||||
	">>"			{ RET(RSHIFT); }
 | 
			
		||||
	"<<"			{ RET(LSHIFT); }
 | 
			
		||||
	"++"			{ RET(INCR); }
 | 
			
		||||
	"--"			{ RET(DECR); }
 | 
			
		||||
	"->"			{ RET(DEREF); }
 | 
			
		||||
	"&&"			{ RET(ANDAND); }
 | 
			
		||||
	"||"			{ RET(OROR); }
 | 
			
		||||
	"<="			{ RET(LEQ); }
 | 
			
		||||
	">="			{ RET(GEQ); }
 | 
			
		||||
	"=="			{ RET(EQL); }
 | 
			
		||||
	"!="			{ RET(NEQ); }
 | 
			
		||||
	";"			{ RET(';'); }
 | 
			
		||||
	"{"			{ RET('{'); }
 | 
			
		||||
	"}"			{ RET('}'); }
 | 
			
		||||
	","			{ RET(','); }
 | 
			
		||||
	":"			{ RET(':'); }
 | 
			
		||||
	"="			{ RET('='); }
 | 
			
		||||
	"("			{ RET('('); }
 | 
			
		||||
	")"			{ RET(')'); }
 | 
			
		||||
	"["			{ RET('['); }
 | 
			
		||||
	"]"			{ RET(']'); }
 | 
			
		||||
	"."			{ RET('.'); }
 | 
			
		||||
	"&"			{ RET('&'); }
 | 
			
		||||
	"!"			{ RET('!'); }
 | 
			
		||||
	"~"			{ RET('~'); }
 | 
			
		||||
	"-"			{ RET('-'); }
 | 
			
		||||
	"+"			{ RET('+'); }
 | 
			
		||||
	"*"			{ RET('*'); }
 | 
			
		||||
	"/"			{ RET('/'); }
 | 
			
		||||
	"%"			{ RET('%'); }
 | 
			
		||||
	"<"			{ RET('<'); }
 | 
			
		||||
	">"			{ RET('>'); }
 | 
			
		||||
	"^"			{ RET('^'); }
 | 
			
		||||
	"|"			{ RET('|'); }
 | 
			
		||||
	"?"			{ RET('?'); }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
	[ \t\v\f]+		{ goto std; }
 | 
			
		||||
 | 
			
		||||
	"\n"
 | 
			
		||||
	    {
 | 
			
		||||
		if(cursor == s->eof) RET(EOI);
 | 
			
		||||
		s->pos = cursor; s->line++;
 | 
			
		||||
		goto std;
 | 
			
		||||
	    }
 | 
			
		||||
 | 
			
		||||
	any
 | 
			
		||||
	    {
 | 
			
		||||
		printf("unexpected character: %c\n", *s->tok);
 | 
			
		||||
		goto std;
 | 
			
		||||
	    }
 | 
			
		||||
*/
 | 
			
		||||
 | 
			
		||||
comment:
 | 
			
		||||
/*!re2c
 | 
			
		||||
	"*/"			{ goto std; }
 | 
			
		||||
	"\n"
 | 
			
		||||
	    {
 | 
			
		||||
		if(cursor == s->eof) RET(EOI);
 | 
			
		||||
		s->tok = s->pos = cursor; s->line++;
 | 
			
		||||
		goto comment;
 | 
			
		||||
	    }
 | 
			
		||||
        any			{ goto comment; }
 | 
			
		||||
*/
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
main(){
 | 
			
		||||
    Scanner in;
 | 
			
		||||
    int t;
 | 
			
		||||
    memset((char*) &in, 0, sizeof(in));
 | 
			
		||||
    in.fd = 0;
 | 
			
		||||
    while((t = scan(&in)) != EOI){
 | 
			
		||||
/*
 | 
			
		||||
	printf("%d\t%.*s\n", t, in.cur - in.tok, in.tok);
 | 
			
		||||
	printf("%d\n", t);
 | 
			
		||||
*/
 | 
			
		||||
    }
 | 
			
		||||
    close(in.fd);
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										63
									
								
								samples/C++/cvsignore.re
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										63
									
								
								samples/C++/cvsignore.re
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,63 @@
 | 
			
		||||
 | 
			
		||||
#define YYFILL(n) if (cursor >= limit) break;
 | 
			
		||||
#define YYCTYPE char
 | 
			
		||||
#define YYCURSOR cursor
 | 
			
		||||
#define YYLIMIT limit
 | 
			
		||||
#define YYMARKER marker
 | 
			
		||||
 | 
			
		||||
/*!re2c
 | 
			
		||||
any     = (.|"\n");
 | 
			
		||||
value	= (":" (.\"$")+)?;
 | 
			
		||||
cvsdat	= "Date";
 | 
			
		||||
cvsid	= "Id";
 | 
			
		||||
cvslog	= "Log";
 | 
			
		||||
cvsrev	= "Revision";
 | 
			
		||||
cvssrc	= "Source";
 | 
			
		||||
*/
 | 
			
		||||
 | 
			
		||||
#define APPEND(text) \
 | 
			
		||||
	append(output, outsize, text, sizeof(text) - sizeof(YYCTYPE))
 | 
			
		||||
 | 
			
		||||
inline void append(YYCTYPE *output, size_t & outsize, const YYCTYPE * text, size_t len)
 | 
			
		||||
{
 | 
			
		||||
	memcpy(output + outsize, text, len);
 | 
			
		||||
	outsize += (len / sizeof(YYCTYPE));
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void scan(YYCTYPE *pText, size_t *pSize, int *pbChanged)
 | 
			
		||||
{
 | 
			
		||||
	// rule
 | 
			
		||||
	// scan lines
 | 
			
		||||
	// find $ in lines
 | 
			
		||||
	//   compact $<keyword>: .. $ to $<keyword>$
 | 
			
		||||
  
 | 
			
		||||
	YYCTYPE *output;
 | 
			
		||||
	const YYCTYPE *cursor, *limit, *marker;
 | 
			
		||||
 | 
			
		||||
	cursor = marker = output = *pText;
 | 
			
		||||
 | 
			
		||||
	size_t insize = *pSize;
 | 
			
		||||
	size_t outsize = 0;
 | 
			
		||||
 | 
			
		||||
	limit = cursor + insize;
 | 
			
		||||
 | 
			
		||||
	while(1) {
 | 
			
		||||
loop:
 | 
			
		||||
/*!re2c
 | 
			
		||||
 | 
			
		||||
"$" cvsdat value "$"	{ APPEND(L"$" L"Date$"); goto loop; }
 | 
			
		||||
"$" cvsid  value "$"	{ APPEND(L"$" L"Id$"); goto loop; }
 | 
			
		||||
"$" cvslog value "$"	{ APPEND(L"$" L"Log$"); goto loop; }
 | 
			
		||||
"$" cvsrev value "$"	{ APPEND(L"$" L"Revision$"); goto loop; }
 | 
			
		||||
"$" cvssrc value "$"	{ APPEND(L"$" L"Source$"); goto loop; }
 | 
			
		||||
any						{ output[outsize++] = cursor[-1]; if (cursor >= limit) break; goto loop; }
 | 
			
		||||
 | 
			
		||||
*/
 | 
			
		||||
	}
 | 
			
		||||
	output[outsize] = '\0';
 | 
			
		||||
 | 
			
		||||
	// set the new size
 | 
			
		||||
	*pSize = outsize;
 | 
			
		||||
	
 | 
			
		||||
	*pbChanged = (insize == outsize) ? 0 : 1;
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										13
									
								
								samples/C++/simple.re
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								samples/C++/simple.re
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,13 @@
 | 
			
		||||
#define	NULL		((char*) 0)
 | 
			
		||||
char *scan(char *p){
 | 
			
		||||
char *q;
 | 
			
		||||
#define	YYCTYPE		char
 | 
			
		||||
#define	YYCURSOR	p
 | 
			
		||||
#define	YYLIMIT		p
 | 
			
		||||
#define	YYMARKER	q
 | 
			
		||||
#define	YYFILL(n)
 | 
			
		||||
/*!re2c
 | 
			
		||||
	[0-9]+		{return YYCURSOR;}
 | 
			
		||||
	[\000-\377]	{return NULL;}
 | 
			
		||||
*/
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										72
									
								
								samples/CSON/base.cson
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										72
									
								
								samples/CSON/base.cson
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,72 @@
 | 
			
		||||
'atom-text-editor':
 | 
			
		||||
  # Platform Bindings
 | 
			
		||||
  'home': 'editor:move-to-first-character-of-line'
 | 
			
		||||
  'end': 'editor:move-to-end-of-screen-line'
 | 
			
		||||
  'shift-home': 'editor:select-to-first-character-of-line'
 | 
			
		||||
  'shift-end': 'editor:select-to-end-of-line'
 | 
			
		||||
 | 
			
		||||
'atom-text-editor:not([mini])':
 | 
			
		||||
  # Atom Specific
 | 
			
		||||
  'ctrl-C': 'editor:copy-path'
 | 
			
		||||
 | 
			
		||||
  # Sublime Parity
 | 
			
		||||
  'tab': 'editor:indent'
 | 
			
		||||
  'enter': 'editor:newline'
 | 
			
		||||
  'shift-tab': 'editor:outdent-selected-rows'
 | 
			
		||||
  'ctrl-K': 'editor:delete-line'
 | 
			
		||||
 | 
			
		||||
'.select-list atom-text-editor[mini]':
 | 
			
		||||
  'enter': 'core:confirm'
 | 
			
		||||
 | 
			
		||||
'.tool-panel.panel-left, .tool-panel.panel-right':
 | 
			
		||||
  'escape': 'tool-panel:unfocus'
 | 
			
		||||
 | 
			
		||||
'atom-text-editor !important, atom-text-editor[mini] !important':
 | 
			
		||||
  'escape': 'editor:consolidate-selections'
 | 
			
		||||
 | 
			
		||||
# allow standard input fields to work correctly
 | 
			
		||||
'body .native-key-bindings':
 | 
			
		||||
  'tab': 'core:focus-next'
 | 
			
		||||
  'shift-tab': 'core:focus-previous'
 | 
			
		||||
  'enter': 'native!'
 | 
			
		||||
  'backspace': 'native!'
 | 
			
		||||
  'shift-backspace': 'native!'
 | 
			
		||||
  'delete': 'native!'
 | 
			
		||||
  'up': 'native!'
 | 
			
		||||
  'down': 'native!'
 | 
			
		||||
  'shift-up': 'native!'
 | 
			
		||||
  'shift-down': 'native!'
 | 
			
		||||
  'alt-up': 'native!'
 | 
			
		||||
  'alt-down': 'native!'
 | 
			
		||||
  'alt-shift-up': 'native!'
 | 
			
		||||
  'alt-shift-down': 'native!'
 | 
			
		||||
  'cmd-up': 'native!'
 | 
			
		||||
  'cmd-down': 'native!'
 | 
			
		||||
  'cmd-shift-up': 'native!'
 | 
			
		||||
  'cmd-shift-down': 'native!'
 | 
			
		||||
  'ctrl-up': 'native!'
 | 
			
		||||
  'ctrl-down': 'native!'
 | 
			
		||||
  'ctrl-shift-up': 'native!'
 | 
			
		||||
  'ctrl-shift-down': 'native!'
 | 
			
		||||
  'left': 'native!'
 | 
			
		||||
  'right': 'native!'
 | 
			
		||||
  'shift-left': 'native!'
 | 
			
		||||
  'shift-right': 'native!'
 | 
			
		||||
  'alt-left': 'native!'
 | 
			
		||||
  'alt-right': 'native!'
 | 
			
		||||
  'alt-shift-left': 'native!'
 | 
			
		||||
  'alt-shift-right': 'native!'
 | 
			
		||||
  'cmd-left': 'native!'
 | 
			
		||||
  'cmd-right': 'native!'
 | 
			
		||||
  'cmd-shift-left': 'native!'
 | 
			
		||||
  'cmd-shift-right': 'native!'
 | 
			
		||||
  'ctrl-left': 'native!'
 | 
			
		||||
  'ctrl-right': 'native!'
 | 
			
		||||
  'ctrl-shift-left': 'native!'
 | 
			
		||||
  'ctrl-shift-right': 'native!'
 | 
			
		||||
  'ctrl-b': 'native!'
 | 
			
		||||
  'ctrl-f': 'native!'
 | 
			
		||||
  'ctrl-F': 'native!'
 | 
			
		||||
  'ctrl-B': 'native!'
 | 
			
		||||
  'ctrl-h': 'native!'
 | 
			
		||||
  'ctrl-d': 'native!'
 | 
			
		||||
							
								
								
									
										59
									
								
								samples/CSON/config.cson
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										59
									
								
								samples/CSON/config.cson
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,59 @@
 | 
			
		||||
directoryIcons:
 | 
			
		||||
 | 
			
		||||
	Atom:
 | 
			
		||||
		icon: "atom"
 | 
			
		||||
		match: /^\.atom$/
 | 
			
		||||
		colour: "dark-green"
 | 
			
		||||
 | 
			
		||||
	Bower:
 | 
			
		||||
		icon: "bower"
 | 
			
		||||
		match: /^bower[-_]components$/
 | 
			
		||||
		colour: "bower"
 | 
			
		||||
 | 
			
		||||
	Dropbox:
 | 
			
		||||
		icon: "dropbox"
 | 
			
		||||
		match: /^(?:Dropbox|\.dropbox\.cache)$/
 | 
			
		||||
		colour: "medium-blue"
 | 
			
		||||
 | 
			
		||||
	Git:
 | 
			
		||||
		icon: "git"
 | 
			
		||||
		match: /^\.git$/
 | 
			
		||||
 | 
			
		||||
	GitHub:
 | 
			
		||||
		icon: "github"
 | 
			
		||||
		match: /^\.github$/
 | 
			
		||||
 | 
			
		||||
	Meteor:
 | 
			
		||||
		icon: "meteor"
 | 
			
		||||
		match: /^\.meteor$/
 | 
			
		||||
 | 
			
		||||
	NodeJS:
 | 
			
		||||
		icon: "node"
 | 
			
		||||
		match: /^node_modules$/
 | 
			
		||||
		colour: "medium-green"
 | 
			
		||||
 | 
			
		||||
	Package:
 | 
			
		||||
		icon: "package"
 | 
			
		||||
		match: /^\.bundle$/i
 | 
			
		||||
 | 
			
		||||
	TextMate:
 | 
			
		||||
		icon: "textmate"
 | 
			
		||||
		match: ".tmBundle"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
fileIcons:
 | 
			
		||||
 | 
			
		||||
	ABAP:
 | 
			
		||||
		icon: "abap"
 | 
			
		||||
		scope: "abp"
 | 
			
		||||
		match: ".abap"
 | 
			
		||||
		colour: "medium-orange"
 | 
			
		||||
 | 
			
		||||
	ActionScript: # Or Flash-related
 | 
			
		||||
		icon: "as"
 | 
			
		||||
		match: [
 | 
			
		||||
			[".swf", "medium-blue"]
 | 
			
		||||
			[".as", "medium-red", scope: /\.(?:flex-config|actionscript(?:\.\d+)?)$/i, alias: /ActionScript\s?3|as3/i]
 | 
			
		||||
			[".jsfl", "auto-yellow"]
 | 
			
		||||
			[".swc", "dark-red"]
 | 
			
		||||
		]
 | 
			
		||||
							
								
								
									
										108
									
								
								samples/CSON/ff-sfd.cson
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										108
									
								
								samples/CSON/ff-sfd.cson
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,108 @@
 | 
			
		||||
name: "Spline Font Database"
 | 
			
		||||
scopeName: "text.sfd"
 | 
			
		||||
fileTypes: ["sfd"]
 | 
			
		||||
firstLineMatch: "^SplineFontDB: [\\d.]+"
 | 
			
		||||
patterns: [include: "#main"]
 | 
			
		||||
 | 
			
		||||
repository:
 | 
			
		||||
	main:
 | 
			
		||||
		patterns: [
 | 
			
		||||
			{include: "#punctuation"}
 | 
			
		||||
			{include: "#private"}
 | 
			
		||||
			{include: "#image"}
 | 
			
		||||
			{include: "#pickleData"}
 | 
			
		||||
			{include: "#sections"}
 | 
			
		||||
			{include: "#copyright"}
 | 
			
		||||
			{include: "#property"}
 | 
			
		||||
			{include: "#control"}
 | 
			
		||||
			{include: "#address"}
 | 
			
		||||
			{include: "#encoding"}
 | 
			
		||||
			{include: "source.fontforge#shared"}
 | 
			
		||||
			{include: "#colour"}
 | 
			
		||||
		]
 | 
			
		||||
	
 | 
			
		||||
	punctuation:
 | 
			
		||||
		patterns: [
 | 
			
		||||
			{match: "<|>",  name: "punctuation.definition.brackets.angle.sfd"}
 | 
			
		||||
			{match: "[{}]", name: "punctuation.definition.brackets.curly.sfd"}
 | 
			
		||||
		]
 | 
			
		||||
 | 
			
		||||
	private:
 | 
			
		||||
		name: "meta.section.private.sfd"
 | 
			
		||||
		begin: "^BeginPrivate(?=:)"
 | 
			
		||||
		end:   "^EndPrivate\\b"
 | 
			
		||||
		beginCaptures: 0: name: "keyword.control.begin.private.sfd"
 | 
			
		||||
		endCaptures:   0: name: "keyword.control.end.private.sfd"
 | 
			
		||||
		patterns: [
 | 
			
		||||
			{match: "^\\S+", name: "entity.name.private.property.sfd"}
 | 
			
		||||
			{include: "$self"}
 | 
			
		||||
		]
 | 
			
		||||
	
 | 
			
		||||
	image:
 | 
			
		||||
		name: "meta.image.sfd"
 | 
			
		||||
		begin: "^(Image)(?=:)(.+)$"
 | 
			
		||||
		end:   "^(EndImage)\\b"
 | 
			
		||||
		contentName: "string.unquoted.raw.data.sfd"
 | 
			
		||||
		beginCaptures:
 | 
			
		||||
			1: name: "keyword.control.begin.image.sfd"
 | 
			
		||||
			2: patterns: [include: "$self"]
 | 
			
		||||
		endCaptures:
 | 
			
		||||
			1: name: "keyword.control.end.image.sfd"
 | 
			
		||||
 | 
			
		||||
	pickleData:
 | 
			
		||||
		name: "meta.pickle-data.sfd"
 | 
			
		||||
		begin: "^(PickledData)(:)\\s*(\")"
 | 
			
		||||
		end:   '"'
 | 
			
		||||
		beginCaptures:
 | 
			
		||||
			1: name: "entity.name.property.sfd"
 | 
			
		||||
			2: name: "punctuation.separator.dictionary.key-value.sfd"
 | 
			
		||||
			3: name: "punctuation.definition.string.begin.sfd"
 | 
			
		||||
		endCaptures:
 | 
			
		||||
			0: name: "punctuation.definition.string.end.sfd"
 | 
			
		||||
		patterns: [match: "\\\\.", name: "constant.character.escape.sfd"]
 | 
			
		||||
 | 
			
		||||
	sections:
 | 
			
		||||
		name: "meta.section.${2:/downcase}.sfd"
 | 
			
		||||
		begin: "^(Start|Begin)([A-Z]\\w+)(?=:)"
 | 
			
		||||
		end:   "^(End\\2)\\b"
 | 
			
		||||
		beginCaptures: 0: name: "keyword.control.begin.${2:/downcase}.sfd"
 | 
			
		||||
		endCaptures:   0: name: "keyword.control.end.${2:/downcase}.sfd"
 | 
			
		||||
		patterns: [include: "$self"]
 | 
			
		||||
 | 
			
		||||
	control:
 | 
			
		||||
		name: "keyword.control.${1:/downcase}.sfd"
 | 
			
		||||
		match: "\\b(Fore|Back|SplineSet|^End\\w+)\\b"
 | 
			
		||||
 | 
			
		||||
	colour:
 | 
			
		||||
		name: "constant.other.hex.colour.sfd"
 | 
			
		||||
		match: "(#)[A-Fa-f0-9]{3,}|(?<=\\s)[A-Fa-f0-9]{6,8}"
 | 
			
		||||
		captures:
 | 
			
		||||
			1: name: "punctuation.definition.colour.sfd"
 | 
			
		||||
 | 
			
		||||
	encoding:
 | 
			
		||||
		name: "constant.language.encoding.sfd"
 | 
			
		||||
		match: "(?i)\\b(ISO[-\\w]+)(?<=\\d)(?=\\s|$)"
 | 
			
		||||
 | 
			
		||||
	# Don't highlight numbers in freeform strings (years/version strings)
 | 
			
		||||
	copyright:
 | 
			
		||||
		name: "meta.${1:/downcase}-string.sfd"
 | 
			
		||||
		begin: "^(Copyright|U?Comments?|\\w+Name)(:)"
 | 
			
		||||
		end:   "$"
 | 
			
		||||
		beginCaptures:
 | 
			
		||||
			1: name: "entity.name.property.sfd"
 | 
			
		||||
			2: name: "punctuation.separator.dictionary.key-value.sfd"
 | 
			
		||||
		patterns: [include: "source.fontforge#stringEscapes"]
 | 
			
		||||
	
 | 
			
		||||
	# No idea what this is, but it looks distracting without a fix
 | 
			
		||||
	# Assuming it's referring to a memory register or something.
 | 
			
		||||
	address:
 | 
			
		||||
		match: "\\d+[xX][A-Fa-f0-9]+"
 | 
			
		||||
		name: "constant.numeric.hexadecimal.sfd"
 | 
			
		||||
	
 | 
			
		||||
	property:
 | 
			
		||||
		match: "^([^:]+)(:)"
 | 
			
		||||
		name: "meta.dictionary.key-value.sfd"
 | 
			
		||||
		captures:
 | 
			
		||||
			1: name: "entity.name.property.sfd"
 | 
			
		||||
			2: name: "punctuation.separator.dictionary.key-value.sfd"
 | 
			
		||||
	
 | 
			
		||||
							
								
								
									
										11
									
								
								samples/CSON/wercker-status.cson
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								samples/CSON/wercker-status.cson
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,11 @@
 | 
			
		||||
'menu': [
 | 
			
		||||
  {
 | 
			
		||||
    'label': 'Packages'
 | 
			
		||||
    'submenu': [
 | 
			
		||||
      'label': 'Wercker Status'
 | 
			
		||||
      'submenu': [
 | 
			
		||||
        { 'label': 'Check now!', 'command': 'wercker-status:checknow' }
 | 
			
		||||
      ]
 | 
			
		||||
    ]
 | 
			
		||||
  }
 | 
			
		||||
]
 | 
			
		||||
@@ -1,707 +0,0 @@
 | 
			
		||||
Inductive day : Type :=
 | 
			
		||||
| monday : day
 | 
			
		||||
| tuesday : day
 | 
			
		||||
| wednesday : day
 | 
			
		||||
| thursday : day
 | 
			
		||||
| friday : day
 | 
			
		||||
| saturday : day
 | 
			
		||||
| sunday : day.
 | 
			
		||||
 | 
			
		||||
Definition next_weekday (d:day) : day :=
 | 
			
		||||
  match d with
 | 
			
		||||
  | monday => tuesday
 | 
			
		||||
  | tuesday => wednesday
 | 
			
		||||
  | wednesday => thursday
 | 
			
		||||
  | thursday => friday
 | 
			
		||||
  | friday => monday
 | 
			
		||||
  | saturday => monday
 | 
			
		||||
  | sunday => monday
 | 
			
		||||
  end.
 | 
			
		||||
 | 
			
		||||
Example test_next_weekday:
 | 
			
		||||
(next_weekday (next_weekday saturday)) = tuesday.
 | 
			
		||||
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Inductive bool : Type :=
 | 
			
		||||
	| true : bool
 | 
			
		||||
	| false : bool.
 | 
			
		||||
 | 
			
		||||
Definition negb (b:bool) : bool :=
 | 
			
		||||
													 match b with
 | 
			
		||||
																			 | true => false
 | 
			
		||||
																			 | false => true
 | 
			
		||||
													 end.
 | 
			
		||||
 | 
			
		||||
Definition andb (b1:bool) (b2:bool) : bool :=
 | 
			
		||||
		match b1 with
 | 
			
		||||
		 | true => b2
 | 
			
		||||
		 | false => false
 | 
			
		||||
	  end.
 | 
			
		||||
 | 
			
		||||
Definition orb (b1:bool) (b2:bool) : bool :=
 | 
			
		||||
		match b1 with
 | 
			
		||||
		  | true => true
 | 
			
		||||
		  | false => b2
 | 
			
		||||
		end.
 | 
			
		||||
 | 
			
		||||
Example test_orb1: (orb true false) = true.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Example test_orb2: (orb false false) = false.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Example test_orb3: (orb false true) = true.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Example test_orb4: (orb true true) = true.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Definition nandb (b1: bool) (b2:bool) : bool :=
 | 
			
		||||
	match b1 with
 | 
			
		||||
		| true => match b2 with
 | 
			
		||||
										| false => true
 | 
			
		||||
										| true => false
 | 
			
		||||
							end
 | 
			
		||||
		| false => true
 | 
			
		||||
	end.
 | 
			
		||||
 | 
			
		||||
Example test_nandb1: (nandb true false) = true.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
Example test_nandb2: (nandb false false) = true.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
Example test_nandb3: (nandb false true) = true.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
Example test_nandb4: (nandb true true) = false.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Definition andb3 (b1: bool) (b2:bool) (b3:bool) : bool :=
 | 
			
		||||
	match b1 with
 | 
			
		||||
    | false => false
 | 
			
		||||
		| true => match b2 with
 | 
			
		||||
								| false => false
 | 
			
		||||
								| true => b3
 | 
			
		||||
							end
 | 
			
		||||
	end.
 | 
			
		||||
 | 
			
		||||
Example test_andb31: (andb3 true true true) = true.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
Example test_andb32: (andb3 false true true) = false.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
Example test_andb33: (andb3 true false true) = false.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
Example test_andb34: (andb3 true true false) = false.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Module Playground1.
 | 
			
		||||
 | 
			
		||||
Inductive nat : Type :=
 | 
			
		||||
	| O : nat
 | 
			
		||||
	| S : nat -> nat.
 | 
			
		||||
 | 
			
		||||
Definition pred (n : nat) : nat :=
 | 
			
		||||
	match n with
 | 
			
		||||
		| O => O
 | 
			
		||||
		| S n' => n'
 | 
			
		||||
	end.
 | 
			
		||||
 | 
			
		||||
Definition minustwo (n : nat) : nat :=
 | 
			
		||||
	match n with
 | 
			
		||||
		| O => O
 | 
			
		||||
		| S O => O
 | 
			
		||||
		| S (S n') => n'
 | 
			
		||||
	end.
 | 
			
		||||
 | 
			
		||||
Fixpoint evenb (n : nat) : bool :=
 | 
			
		||||
	match n with
 | 
			
		||||
		| O => true
 | 
			
		||||
		| S O => false
 | 
			
		||||
		| S (S n') => evenb n'
 | 
			
		||||
	end.
 | 
			
		||||
 | 
			
		||||
Definition oddb (n : nat) : bool := negb (evenb n).
 | 
			
		||||
 | 
			
		||||
Example test_oddb1: (oddb (S O)) = true.
 | 
			
		||||
Proof. reflexivity. Qed.
 | 
			
		||||
Example test_oddb2: (oddb (S (S (S (S O))))) = false.
 | 
			
		||||
Proof. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Fixpoint plus (n : nat) (m : nat) : nat :=
 | 
			
		||||
	match n with
 | 
			
		||||
		| O => m
 | 
			
		||||
		| S n' => S (plus n' m)
 | 
			
		||||
	end.
 | 
			
		||||
 | 
			
		||||
Fixpoint mult (n m : nat) : nat :=
 | 
			
		||||
	match n with
 | 
			
		||||
		| O => O
 | 
			
		||||
		| S n' => plus m (mult n' m)
 | 
			
		||||
	end.
 | 
			
		||||
 | 
			
		||||
Fixpoint minus (n m : nat) : nat :=
 | 
			
		||||
	match n, m with
 | 
			
		||||
		| O, _ => n
 | 
			
		||||
		| S n', O => S n'
 | 
			
		||||
		| S n', S m' => minus n' m'
 | 
			
		||||
	end.
 | 
			
		||||
 | 
			
		||||
Fixpoint exp (base power : nat) : nat :=
 | 
			
		||||
	match power with
 | 
			
		||||
		| O => S O
 | 
			
		||||
		| S p => mult base (exp base p)
 | 
			
		||||
	end.
 | 
			
		||||
 | 
			
		||||
Fixpoint factorial (n : nat) : nat :=
 | 
			
		||||
	match n with
 | 
			
		||||
		| O => S O
 | 
			
		||||
		| S n' => mult n (factorial n')
 | 
			
		||||
	end.
 | 
			
		||||
 | 
			
		||||
Example test_factorial1: (factorial (S (S (S O)))) = (S (S (S (S (S (S O)))))).
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Notation "x + y" := (plus x y) (at level 50, left associativity) : nat_scope.
 | 
			
		||||
Notation "x - y" := (minus x y) (at level 50, left associativity) : nat_scope.
 | 
			
		||||
Notation "x * y" := (mult x y) (at level 40, left associativity) : nat_scope.
 | 
			
		||||
 | 
			
		||||
Fixpoint beq_nat (n m : nat) : bool :=
 | 
			
		||||
	match n with
 | 
			
		||||
		| O => match m with
 | 
			
		||||
						| O => true
 | 
			
		||||
						| S m' => false
 | 
			
		||||
					 end
 | 
			
		||||
		| S n' => match m with
 | 
			
		||||
							| O => false
 | 
			
		||||
							| S m' => beq_nat n' m'
 | 
			
		||||
							end
 | 
			
		||||
	end.
 | 
			
		||||
 | 
			
		||||
Fixpoint ble_nat (n m : nat) : bool :=
 | 
			
		||||
	match n with
 | 
			
		||||
		| O => true
 | 
			
		||||
		| S n' => 
 | 
			
		||||
				match m with
 | 
			
		||||
					| O => false
 | 
			
		||||
					| S m' => ble_nat n' m'
 | 
			
		||||
				end
 | 
			
		||||
	end.
 | 
			
		||||
 | 
			
		||||
Example test_ble_nat1: (ble_nat (S (S O)) (S (S O))) = true.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
Example test_ble_nat2: (ble_nat (S (S O)) (S (S (S (S O))))) = true.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
Example test_ble_nat3: (ble_nat (S (S (S (S O)))) (S (S O))) = false.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Definition blt_nat (n m : nat) : bool :=
 | 
			
		||||
		(andb (negb (beq_nat n m)) (ble_nat n m)).
 | 
			
		||||
 | 
			
		||||
Example test_blt_nat1: (blt_nat (S (S O)) (S (S O))) = false.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
Example test_blt_nat3: (blt_nat (S (S (S (S O)))) (S (S O))) = false.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
Example test_blt_nat2 : (blt_nat (S (S O)) (S (S (S (S O))))) = true.
 | 
			
		||||
Proof. simpl. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem plus_O_n : forall n : nat, O + n = n.
 | 
			
		||||
Proof.
 | 
			
		||||
	simpl. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem plus_O_n' : forall n : nat, O + n = n.
 | 
			
		||||
Proof.
 | 
			
		||||
	reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem plus_O_n'' : forall n : nat, O + n = n.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem plus_1_1 : forall n : nat, (S O) + n = S n.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem mult_0_1: forall n : nat, O * n = O.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem plus_id_example : forall n m:nat,
 | 
			
		||||
	n = m -> n + n = m + m.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m.
 | 
			
		||||
	intros H.
 | 
			
		||||
	rewrite -> H.
 | 
			
		||||
	reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem plus_id_exercise : forall n m o: nat,
 | 
			
		||||
	n = m -> m = o -> n + m = m + o.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m o.
 | 
			
		||||
	intros H.
 | 
			
		||||
	intros H'.
 | 
			
		||||
	rewrite -> H.
 | 
			
		||||
	rewrite <- H'.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Theorem mult_0_plus : forall n m : nat,
 | 
			
		||||
				(O + n) * m = n * m.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m.
 | 
			
		||||
	rewrite -> plus_O_n.
 | 
			
		||||
	reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem mult_1_plus : forall n m: nat,
 | 
			
		||||
	((S O) + n) * m = m + (n * m).
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m.
 | 
			
		||||
	rewrite -> plus_1_1.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Theorem mult_1 : forall n : nat,
 | 
			
		||||
				n * (S O) = n.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n.
 | 
			
		||||
	induction n as [| n'].
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	simpl.
 | 
			
		||||
	rewrite -> IHn'.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Theorem plus_1_neq_0 : forall n : nat,
 | 
			
		||||
				beq_nat (n + (S O)) O = false.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n.
 | 
			
		||||
	destruct n as [| n'].
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Theorem zero_nbeq_plus_1 : forall n : nat,
 | 
			
		||||
				beq_nat O (n + (S O)) = false.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n.
 | 
			
		||||
	destruct n.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Require String. Open Scope string_scope.
 | 
			
		||||
 | 
			
		||||
Ltac move_to_top x :=
 | 
			
		||||
match reverse goal with
 | 
			
		||||
| H : _ |- _ => try move x after H
 | 
			
		||||
end.
 | 
			
		||||
 | 
			
		||||
Tactic Notation "assert_eq" ident(x) constr(v) :=
 | 
			
		||||
	let H := fresh in
 | 
			
		||||
	assert (x = v) as H by reflexivity;
 | 
			
		||||
	clear H.
 | 
			
		||||
 | 
			
		||||
	Tactic Notation "Case_aux" ident(x) constr(name) :=
 | 
			
		||||
		first [
 | 
			
		||||
		set (x := name); move_to_top x
 | 
			
		||||
		| assert_eq x name; move_to_top x
 | 
			
		||||
		| fail 1 "because we are working on a different case" ].
 | 
			
		||||
 | 
			
		||||
		Ltac Case name := Case_aux Case name.
 | 
			
		||||
		Ltac SCase name := Case_aux SCase name.
 | 
			
		||||
		Ltac SSCase name := Case_aux SSCase name.
 | 
			
		||||
		Ltac SSSCase name := Case_aux SSSCase name.
 | 
			
		||||
		Ltac SSSSCase name := Case_aux SSSSCase name.
 | 
			
		||||
		Ltac SSSSSCase name := Case_aux SSSSSCase name.
 | 
			
		||||
		Ltac SSSSSSCase name := Case_aux SSSSSSCase name.
 | 
			
		||||
		Ltac SSSSSSSCase name := Case_aux SSSSSSSCase name.
 | 
			
		||||
 | 
			
		||||
Theorem andb_true_elim1 : forall b c : bool,
 | 
			
		||||
				andb b c = true -> b = true.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros b c H.
 | 
			
		||||
	destruct b.
 | 
			
		||||
	Case "b = true".
 | 
			
		||||
		reflexivity.
 | 
			
		||||
	Case "b = false".
 | 
			
		||||
		rewrite <- H. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem plus_0_r : forall n : nat, n + O = n.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n. induction n as [| n'].
 | 
			
		||||
	Case "n = 0". reflexivity.
 | 
			
		||||
	Case "n = S n'". simpl. rewrite -> IHn'. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem minus_diag : forall n,
 | 
			
		||||
				minus n n = O.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n. induction n as [| n'].
 | 
			
		||||
	Case "n = 0".
 | 
			
		||||
		simpl. reflexivity.
 | 
			
		||||
	Case "n = S n'".
 | 
			
		||||
		simpl. rewrite -> IHn'. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
Theorem mult_0_r : forall n:nat,
 | 
			
		||||
				n * O = O.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n. induction n as [| n'].
 | 
			
		||||
	Case "n = 0".
 | 
			
		||||
		reflexivity.
 | 
			
		||||
	Case "n = S n'".
 | 
			
		||||
		simpl. rewrite -> IHn'. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem plus_n_Sm : forall n m : nat,
 | 
			
		||||
				S (n + m) = n + (S m).
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m. induction n as [| n'].
 | 
			
		||||
	Case "n = 0".
 | 
			
		||||
		reflexivity.
 | 
			
		||||
	Case "n = S n'".
 | 
			
		||||
		simpl. rewrite -> IHn'. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem plus_assoc : forall n m p : nat,
 | 
			
		||||
					n + (m + p) = (n + m) + p.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m p.
 | 
			
		||||
	induction n as [| n'].
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	simpl.
 | 
			
		||||
	rewrite -> IHn'.
 | 
			
		||||
	reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem plus_distr : forall n m: nat, S (n + m) = n + (S m).
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m.  induction n as [| n'].
 | 
			
		||||
	Case "n = 0".
 | 
			
		||||
		reflexivity.
 | 
			
		||||
	Case "n = S n'".
 | 
			
		||||
		simpl. rewrite -> IHn'. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem mult_distr : forall n m: nat, n * ((S O) + m) = n * (S m).
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m.
 | 
			
		||||
	induction n as [| n'].
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Theorem plus_comm : forall n m : nat,
 | 
			
		||||
	n + m = m + n.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m.
 | 
			
		||||
	induction n as [| n'].
 | 
			
		||||
	Case "n = 0".
 | 
			
		||||
		simpl.
 | 
			
		||||
		rewrite -> plus_0_r.
 | 
			
		||||
		reflexivity.
 | 
			
		||||
	Case "n = S n'".
 | 
			
		||||
		simpl.
 | 
			
		||||
		rewrite -> IHn'.
 | 
			
		||||
		rewrite -> plus_distr.
 | 
			
		||||
		reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Fixpoint double (n:nat) :=
 | 
			
		||||
	match n with
 | 
			
		||||
		| O => O
 | 
			
		||||
		| S n' => S (S (double n'))
 | 
			
		||||
	end.
 | 
			
		||||
 | 
			
		||||
Lemma double_plus : forall n, double n = n + n.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n. induction n as [| n'].
 | 
			
		||||
	Case "n = 0".
 | 
			
		||||
		reflexivity.
 | 
			
		||||
	Case "n = S n'".
 | 
			
		||||
		simpl. rewrite -> IHn'.
 | 
			
		||||
		rewrite -> plus_distr. reflexivity.
 | 
			
		||||
		Qed.
 | 
			
		||||
 | 
			
		||||
Theorem beq_nat_refl : forall n : nat,
 | 
			
		||||
	true = beq_nat n n.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n. induction n as [| n'].
 | 
			
		||||
	Case "n = 0".
 | 
			
		||||
		reflexivity.
 | 
			
		||||
	Case "n = S n".
 | 
			
		||||
		simpl. rewrite <- IHn'.
 | 
			
		||||
		reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem plus_rearrange: forall n m p q : nat,
 | 
			
		||||
				(n + m) + (p + q) = (m + n) + (p + q).
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m p q.
 | 
			
		||||
	assert(H: n + m = m + n).
 | 
			
		||||
		Case "Proof by assertion".
 | 
			
		||||
		rewrite -> plus_comm. reflexivity.
 | 
			
		||||
	rewrite -> H. reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem plus_swap : forall n m p: nat,
 | 
			
		||||
				n + (m + p) = m + (n + p).
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m p.
 | 
			
		||||
	rewrite -> plus_assoc.
 | 
			
		||||
	assert(H: m + (n + p) = (m + n) + p).
 | 
			
		||||
	rewrite -> plus_assoc.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	rewrite -> H.
 | 
			
		||||
	assert(H2: m + n = n + m).
 | 
			
		||||
	rewrite -> plus_comm.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	rewrite -> H2.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Theorem plus_swap' : forall n m p: nat,
 | 
			
		||||
				n + (m + p) = m + (n + p).
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m p.
 | 
			
		||||
	rewrite -> plus_assoc.
 | 
			
		||||
	assert(H: m + (n + p) = (m + n) + p).
 | 
			
		||||
	rewrite -> plus_assoc.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	rewrite -> H.
 | 
			
		||||
	replace (m + n) with (n + m).
 | 
			
		||||
	rewrite -> plus_comm.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	rewrite -> plus_comm.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Theorem mult_1_distr: forall m n: nat,
 | 
			
		||||
				n * ((S O) + m) = n * (S O) + n * m.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m.
 | 
			
		||||
	rewrite -> mult_1.
 | 
			
		||||
	rewrite -> plus_1_1.
 | 
			
		||||
	simpl.
 | 
			
		||||
	induction m as [|m'].
 | 
			
		||||
	simpl.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	simpl.
 | 
			
		||||
	rewrite -> plus_swap.
 | 
			
		||||
	rewrite <- IHm'.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Theorem mult_comm: forall m n : nat,
 | 
			
		||||
				m * n = n * m.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros m n.
 | 
			
		||||
	induction n as [| n'].
 | 
			
		||||
	Case "n = 0".
 | 
			
		||||
		simpl.
 | 
			
		||||
		rewrite -> mult_0_r.
 | 
			
		||||
		reflexivity.
 | 
			
		||||
	Case "n = S n'".
 | 
			
		||||
		simpl.
 | 
			
		||||
		rewrite <- mult_distr.
 | 
			
		||||
		rewrite -> mult_1_distr.
 | 
			
		||||
		rewrite -> mult_1.
 | 
			
		||||
		rewrite -> IHn'.
 | 
			
		||||
		reflexivity.
 | 
			
		||||
		Qed.
 | 
			
		||||
 | 
			
		||||
Theorem evenb_next : forall n : nat,
 | 
			
		||||
				evenb n = evenb (S (S n)).
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n.
 | 
			
		||||
Admitted.
 | 
			
		||||
 | 
			
		||||
Theorem negb_negb : forall n : bool,
 | 
			
		||||
				n = negb (negb n).
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n.
 | 
			
		||||
	destruct n.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Theorem evenb_n_oddb_Sn : forall n : nat,
 | 
			
		||||
				evenb n = negb (evenb (S n)).
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n.
 | 
			
		||||
	induction n as [|n'].
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	assert(H: evenb n' = evenb (S (S n'))).
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	rewrite <- H.
 | 
			
		||||
	rewrite -> IHn'.
 | 
			
		||||
	rewrite <- negb_negb.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
(*Fixpoint bad (n : nat) : bool :=
 | 
			
		||||
	match n with
 | 
			
		||||
		| O => true
 | 
			
		||||
		| S O => bad (S n)
 | 
			
		||||
		| S (S n') => bad n'
 | 
			
		||||
	end.*)
 | 
			
		||||
 | 
			
		||||
Theorem ble_nat_refl : forall n:nat,
 | 
			
		||||
				true = ble_nat n n.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n.
 | 
			
		||||
	induction n as [|n'].
 | 
			
		||||
	Case "n = 0".
 | 
			
		||||
		reflexivity.
 | 
			
		||||
	Case "n = S n".
 | 
			
		||||
		simpl.
 | 
			
		||||
		rewrite <- IHn'.
 | 
			
		||||
		reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Theorem zero_nbeq_S : forall n: nat,
 | 
			
		||||
				beq_nat O (S n) = false.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Theorem andb_false_r : forall b : bool,
 | 
			
		||||
				andb b false = false.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros b.
 | 
			
		||||
	destruct b.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Theorem plus_ble_compat_1 : forall n m p : nat,
 | 
			
		||||
				ble_nat n m = true -> ble_nat (p + n) (p + m) = true.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m p.
 | 
			
		||||
	intros H.
 | 
			
		||||
	induction p.
 | 
			
		||||
	Case "p = 0".
 | 
			
		||||
		simpl.
 | 
			
		||||
		rewrite -> H.
 | 
			
		||||
		reflexivity.
 | 
			
		||||
	Case "p = S p'".
 | 
			
		||||
		simpl.
 | 
			
		||||
		rewrite -> IHp.
 | 
			
		||||
		reflexivity.
 | 
			
		||||
		Qed.
 | 
			
		||||
 | 
			
		||||
Theorem S_nbeq_0 : forall n:nat,
 | 
			
		||||
				beq_nat (S n) O = false.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Theorem mult_1_1 : forall n:nat, (S O) * n = n.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n.
 | 
			
		||||
	simpl.
 | 
			
		||||
	rewrite -> plus_0_r.
 | 
			
		||||
	reflexivity. Qed.
 | 
			
		||||
 | 
			
		||||
Theorem all3_spec : forall b c : bool,
 | 
			
		||||
	orb (andb b c)
 | 
			
		||||
			(orb (negb b)
 | 
			
		||||
			 		 (negb c))
 | 
			
		||||
	= true.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros b c.
 | 
			
		||||
	destruct b.
 | 
			
		||||
	destruct c.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Lemma mult_plus_1 : forall n m : nat,
 | 
			
		||||
			S(m + n) = m + (S n).
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m.
 | 
			
		||||
	induction m.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	simpl.
 | 
			
		||||
	rewrite -> IHm.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Theorem mult_mult : forall n m : nat,
 | 
			
		||||
	n * (S m) = n * m + n.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m.
 | 
			
		||||
	induction n.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	simpl.
 | 
			
		||||
	rewrite -> IHn.
 | 
			
		||||
	rewrite -> plus_assoc.
 | 
			
		||||
	rewrite -> mult_plus_1.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Theorem mult_plus_distr_r : forall n m p:nat,
 | 
			
		||||
				(n + m) * p = (n * p) + (m * p).
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m p.
 | 
			
		||||
	induction p.
 | 
			
		||||
	rewrite -> mult_0_r.
 | 
			
		||||
	rewrite -> mult_0_r.
 | 
			
		||||
	rewrite -> mult_0_r.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	rewrite -> mult_mult.
 | 
			
		||||
	rewrite -> mult_mult.
 | 
			
		||||
	rewrite -> mult_mult.
 | 
			
		||||
	rewrite -> IHp.
 | 
			
		||||
	assert(H1: ((n * p) + n) + (m * p + m) = (n * p) + (n + (m * p + m))).
 | 
			
		||||
	rewrite <- plus_assoc.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	rewrite -> H1.
 | 
			
		||||
	assert(H2: (n + (m * p + m)) = (m * p + (n + m))).
 | 
			
		||||
	rewrite -> plus_swap.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	rewrite -> H2.
 | 
			
		||||
	assert(H3: (n * p) + (m * p + (n + m)) = ((n * p ) + (m * p)) + (n + m)).
 | 
			
		||||
	rewrite -> plus_assoc.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	rewrite -> H3.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Theorem mult_assoc : forall n m p : nat,
 | 
			
		||||
				n * (m * p) = (n * m) * p.
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n m p.
 | 
			
		||||
	induction n.
 | 
			
		||||
	simpl.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	simpl.
 | 
			
		||||
	rewrite -> mult_plus_distr_r.
 | 
			
		||||
	rewrite -> IHn.
 | 
			
		||||
	reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
Inductive bin : Type :=
 | 
			
		||||
	| BO : bin
 | 
			
		||||
	| D : bin -> bin
 | 
			
		||||
	| M : bin -> bin.
 | 
			
		||||
 | 
			
		||||
Fixpoint incbin (n : bin) : bin :=
 | 
			
		||||
	match n with
 | 
			
		||||
		| BO => M (BO)
 | 
			
		||||
		| D n' => M n'
 | 
			
		||||
		| M n' => D (incbin n')
 | 
			
		||||
	end.
 | 
			
		||||
 | 
			
		||||
Fixpoint bin2un (n : bin) : nat :=
 | 
			
		||||
	match n with
 | 
			
		||||
		| BO => O
 | 
			
		||||
		| D n' => double (bin2un n')
 | 
			
		||||
		| M n' => S (double (bin2un n'))
 | 
			
		||||
	end.
 | 
			
		||||
 | 
			
		||||
Theorem bin_comm : forall n : bin,
 | 
			
		||||
				bin2un(incbin n) = S (bin2un n).
 | 
			
		||||
Proof.
 | 
			
		||||
	intros n.
 | 
			
		||||
	induction n.
 | 
			
		||||
		reflexivity.
 | 
			
		||||
		reflexivity.
 | 
			
		||||
		simpl.
 | 
			
		||||
		rewrite -> IHn.
 | 
			
		||||
		reflexivity.
 | 
			
		||||
	Qed.
 | 
			
		||||
 | 
			
		||||
End Playground1.
 | 
			
		||||
							
								
								
									
										85
									
								
								samples/Coq/Computation.v
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										85
									
								
								samples/Coq/Computation.v
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,85 @@
 | 
			
		||||
(** The definition of computations, used to represent interactive programs. *)
 | 
			
		||||
Require Import Coq.NArith.NArith.
 | 
			
		||||
Require Import ListString.All.
 | 
			
		||||
 | 
			
		||||
Local Open Scope type.
 | 
			
		||||
 | 
			
		||||
(** System calls. *)
 | 
			
		||||
Module Command.
 | 
			
		||||
  Inductive t :=
 | 
			
		||||
  | AskCard
 | 
			
		||||
  | AskPIN
 | 
			
		||||
  | CheckPIN (pin : N)
 | 
			
		||||
  | AskAmount
 | 
			
		||||
  | CheckAmount (amount : N)
 | 
			
		||||
  | GiveCard
 | 
			
		||||
  | GiveAmount (amount : N)
 | 
			
		||||
  | ShowError (message : LString.t).
 | 
			
		||||
 | 
			
		||||
  (** The type of an answer for a command depends on the value of the command. *)
 | 
			
		||||
  Definition answer (command : t) : Type :=
 | 
			
		||||
    match command with
 | 
			
		||||
    | AskCard => bool (* If the given card seems valid. *)
 | 
			
		||||
    | AskPIN => option N (* A number or cancellation. *)
 | 
			
		||||
    | CheckPIN _ => bool (* If the PIN number is valid. *)
 | 
			
		||||
    | AskAmount => option N (* A number or cancellation. *)
 | 
			
		||||
    | CheckAmount _ => bool (* If the amount can be withdrawn. *)
 | 
			
		||||
    | GiveCard => bool (* If the card was given. *)
 | 
			
		||||
    | GiveAmount _ => bool (* If the money was given. *)
 | 
			
		||||
    | ShowError _ => unit (* Show an error message. *)
 | 
			
		||||
    end.
 | 
			
		||||
End Command.
 | 
			
		||||
 | 
			
		||||
(** Computations with I/Os. *)
 | 
			
		||||
Module C.
 | 
			
		||||
  (** A computation can either does nothing, or do a system call and wait
 | 
			
		||||
      for the answer to run another computation. *)
 | 
			
		||||
  Inductive t : Type :=
 | 
			
		||||
  | Ret : t
 | 
			
		||||
  | Call : forall (command : Command.t), (Command.answer command -> t) -> t.
 | 
			
		||||
  Arguments Ret.
 | 
			
		||||
  Arguments Call _ _.
 | 
			
		||||
 | 
			
		||||
  (** Some optional notations. *)
 | 
			
		||||
  Module Notations.
 | 
			
		||||
    (** A nicer notation for `Ret`. *)
 | 
			
		||||
    Definition ret : t :=
 | 
			
		||||
      Ret.
 | 
			
		||||
 | 
			
		||||
    (** We define an explicit apply function so that Coq does not try to expand
 | 
			
		||||
        the notations everywhere. *)
 | 
			
		||||
    Definition apply {A B} (f : A -> B) (x : A) := f x.
 | 
			
		||||
 | 
			
		||||
    (** System call. *)
 | 
			
		||||
    Notation "'call!' answer ':=' command 'in' X" :=
 | 
			
		||||
      (Call command (fun answer => X))
 | 
			
		||||
      (at level 200, answer ident, command at level 100, X at level 200).
 | 
			
		||||
 | 
			
		||||
    (** System call with typed answer. *)
 | 
			
		||||
    Notation "'call!' answer : A ':=' command 'in' X" :=
 | 
			
		||||
      (Call command (fun (answer : A) => X))
 | 
			
		||||
      (at level 200, answer ident, command at level 100, A at level 200, X at level 200).
 | 
			
		||||
 | 
			
		||||
    (** System call ignoring the answer. *)
 | 
			
		||||
    Notation "'do_call!' command 'in' X" :=
 | 
			
		||||
      (Call command (fun _ => X))
 | 
			
		||||
      (at level 200, command at level 100, X at level 200).
 | 
			
		||||
 | 
			
		||||
    (** This notation is useful to compose computations which wait for a
 | 
			
		||||
        continuation. We do not have an explicit bind operator to simplify the
 | 
			
		||||
        language and the proofs. *)
 | 
			
		||||
    Notation "'let!' x ':=' X 'in' Y" :=
 | 
			
		||||
      (apply X (fun x => Y))
 | 
			
		||||
      (at level 200, x ident, X at level 100, Y at level 200).
 | 
			
		||||
 | 
			
		||||
    (** Let with a typed answer. *)
 | 
			
		||||
    Notation "'let!' x : A ':=' X 'in' Y" :=
 | 
			
		||||
      (apply X (fun (x : A) => Y))
 | 
			
		||||
      (at level 200, x ident, X at level 100, A at level 200, Y at level 200).
 | 
			
		||||
 | 
			
		||||
    (** Let ignoring the answer. *)
 | 
			
		||||
    Notation "'do!' X 'in' Y" :=
 | 
			
		||||
      (apply X (fun _ => Y))
 | 
			
		||||
      (at level 200, X at level 100, Y at level 200).
 | 
			
		||||
  End Notations.
 | 
			
		||||
End C.
 | 
			
		||||
@@ -1,290 +0,0 @@
 | 
			
		||||
(** A development of Treesort on Heap trees. It has an average
 | 
			
		||||
    complexity of O(n.log n) but of O(n²) in the worst case (e.g. if
 | 
			
		||||
    the list is already sorted) *)
 | 
			
		||||
 | 
			
		||||
(* G. Huet 1-9-95 uses Multiset *)
 | 
			
		||||
 | 
			
		||||
Require Import List Multiset PermutSetoid Relations Sorting.
 | 
			
		||||
 | 
			
		||||
Section defs.
 | 
			
		||||
 | 
			
		||||
  (** * Trees and heap trees *)
 | 
			
		||||
 | 
			
		||||
  (** ** Definition of trees over an ordered set *)
 | 
			
		||||
 | 
			
		||||
  Variable A : Type.
 | 
			
		||||
  Variable leA : relation A.
 | 
			
		||||
  Variable eqA : relation A.
 | 
			
		||||
 | 
			
		||||
  Let gtA (x y:A) := ~ leA x y.
 | 
			
		||||
 | 
			
		||||
  Hypothesis leA_dec : forall x y:A, {leA x y} + {leA y x}.
 | 
			
		||||
  Hypothesis eqA_dec : forall x y:A, {eqA x y} + {~ eqA x y}.
 | 
			
		||||
  Hypothesis leA_refl : forall x y:A, eqA x y -> leA x y.
 | 
			
		||||
  Hypothesis leA_trans : forall x y z:A, leA x y -> leA y z -> leA x z.
 | 
			
		||||
  Hypothesis leA_antisym : forall x y:A, leA x y -> leA y x -> eqA x y.
 | 
			
		||||
 | 
			
		||||
  Hint Resolve leA_refl.
 | 
			
		||||
  Hint Immediate eqA_dec leA_dec leA_antisym.
 | 
			
		||||
 | 
			
		||||
  Let emptyBag := EmptyBag A.
 | 
			
		||||
  Let singletonBag := SingletonBag _ eqA_dec.
 | 
			
		||||
 | 
			
		||||
  Inductive Tree :=
 | 
			
		||||
    | Tree_Leaf : Tree
 | 
			
		||||
    | Tree_Node : A -> Tree -> Tree -> Tree.
 | 
			
		||||
 | 
			
		||||
  (** [a] is lower than a Tree [T] if [T] is a Leaf
 | 
			
		||||
      or [T] is a Node holding [b>a] *)
 | 
			
		||||
 | 
			
		||||
  Definition leA_Tree (a:A) (t:Tree) :=
 | 
			
		||||
    match t with
 | 
			
		||||
      | Tree_Leaf => True
 | 
			
		||||
      | Tree_Node b T1 T2 => leA a b
 | 
			
		||||
    end.
 | 
			
		||||
 | 
			
		||||
  Lemma leA_Tree_Leaf : forall a:A, leA_Tree a Tree_Leaf.
 | 
			
		||||
  Proof.
 | 
			
		||||
    simpl; auto with datatypes.
 | 
			
		||||
  Qed.
 | 
			
		||||
 | 
			
		||||
  Lemma leA_Tree_Node :
 | 
			
		||||
    forall (a b:A) (G D:Tree), leA a b -> leA_Tree a (Tree_Node b G D).
 | 
			
		||||
  Proof.
 | 
			
		||||
    simpl; auto with datatypes.
 | 
			
		||||
  Qed.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
  (** ** The heap property *)
 | 
			
		||||
 | 
			
		||||
  Inductive is_heap : Tree -> Prop :=
 | 
			
		||||
    | nil_is_heap : is_heap Tree_Leaf
 | 
			
		||||
    | node_is_heap :
 | 
			
		||||
      forall (a:A) (T1 T2:Tree),
 | 
			
		||||
        leA_Tree a T1 ->
 | 
			
		||||
        leA_Tree a T2 ->
 | 
			
		||||
        is_heap T1 -> is_heap T2 -> is_heap (Tree_Node a T1 T2).
 | 
			
		||||
 | 
			
		||||
  Lemma invert_heap :
 | 
			
		||||
    forall (a:A) (T1 T2:Tree),
 | 
			
		||||
      is_heap (Tree_Node a T1 T2) ->
 | 
			
		||||
      leA_Tree a T1 /\ leA_Tree a T2 /\ is_heap T1 /\ is_heap T2.
 | 
			
		||||
  Proof.
 | 
			
		||||
    intros; inversion H; auto with datatypes.
 | 
			
		||||
  Qed.
 | 
			
		||||
 | 
			
		||||
  (* This lemma ought to be generated automatically by the Inversion tools *)
 | 
			
		||||
  Lemma is_heap_rect :
 | 
			
		||||
    forall P:Tree -> Type,
 | 
			
		||||
      P Tree_Leaf ->
 | 
			
		||||
      (forall (a:A) (T1 T2:Tree),
 | 
			
		||||
        leA_Tree a T1 ->
 | 
			
		||||
        leA_Tree a T2 ->
 | 
			
		||||
        is_heap T1 -> P T1 -> is_heap T2 -> P T2 -> P (Tree_Node a T1 T2)) ->
 | 
			
		||||
      forall T:Tree, is_heap T -> P T.
 | 
			
		||||
  Proof.
 | 
			
		||||
    simple induction T; auto with datatypes.
 | 
			
		||||
    intros a G PG D PD PN.
 | 
			
		||||
    elim (invert_heap a G D); auto with datatypes.
 | 
			
		||||
    intros H1 H2; elim H2; intros H3 H4; elim H4; intros.
 | 
			
		||||
    apply X0; auto with datatypes.
 | 
			
		||||
  Qed.
 | 
			
		||||
 | 
			
		||||
  (* This lemma ought to be generated automatically by the Inversion tools *)
 | 
			
		||||
  Lemma is_heap_rec :
 | 
			
		||||
    forall P:Tree -> Set,
 | 
			
		||||
      P Tree_Leaf ->
 | 
			
		||||
      (forall (a:A) (T1 T2:Tree),
 | 
			
		||||
        leA_Tree a T1 ->
 | 
			
		||||
        leA_Tree a T2 ->
 | 
			
		||||
        is_heap T1 -> P T1 -> is_heap T2 -> P T2 -> P (Tree_Node a T1 T2)) ->
 | 
			
		||||
      forall T:Tree, is_heap T -> P T.
 | 
			
		||||
  Proof.
 | 
			
		||||
    simple induction T; auto with datatypes.
 | 
			
		||||
    intros a G PG D PD PN.
 | 
			
		||||
    elim (invert_heap a G D); auto with datatypes.
 | 
			
		||||
    intros H1 H2; elim H2; intros H3 H4; elim H4; intros.
 | 
			
		||||
    apply X; auto with datatypes.
 | 
			
		||||
  Qed.
 | 
			
		||||
 | 
			
		||||
  Lemma low_trans :
 | 
			
		||||
    forall (T:Tree) (a b:A), leA a b -> leA_Tree b T -> leA_Tree a T.
 | 
			
		||||
  Proof.
 | 
			
		||||
    simple induction T; auto with datatypes.
 | 
			
		||||
    intros; simpl; apply leA_trans with b; auto with datatypes.
 | 
			
		||||
  Qed.
 | 
			
		||||
 | 
			
		||||
  (** ** Merging two sorted lists *)
 | 
			
		||||
 | 
			
		||||
  Inductive merge_lem (l1 l2:list A) : Type :=
 | 
			
		||||
    merge_exist :
 | 
			
		||||
    forall l:list A,
 | 
			
		||||
      Sorted leA l ->
 | 
			
		||||
      meq (list_contents _ eqA_dec l)
 | 
			
		||||
      (munion (list_contents _ eqA_dec l1) (list_contents _ eqA_dec l2)) ->
 | 
			
		||||
      (forall a, HdRel leA a l1 -> HdRel leA a l2 -> HdRel leA a l) ->
 | 
			
		||||
      merge_lem l1 l2.
 | 
			
		||||
  Require Import Morphisms.
 | 
			
		||||
 | 
			
		||||
  Instance: Equivalence (@meq A).
 | 
			
		||||
  Proof. constructor; auto with datatypes. red. apply meq_trans. Defined.
 | 
			
		||||
 | 
			
		||||
  Instance: Proper (@meq A ++> @meq _ ++> @meq _) (@munion A).
 | 
			
		||||
  Proof. intros x y H x' y' H'. now apply meq_congr. Qed.
 | 
			
		||||
 | 
			
		||||
  Lemma merge :
 | 
			
		||||
    forall l1:list A, Sorted leA l1 ->
 | 
			
		||||
    forall l2:list A, Sorted leA l2 -> merge_lem l1 l2.
 | 
			
		||||
  Proof.
 | 
			
		||||
    fix 1; intros; destruct l1.
 | 
			
		||||
    apply merge_exist with l2; auto with datatypes.
 | 
			
		||||
    rename l1 into l.
 | 
			
		||||
    revert l2 H0. fix 1. intros.
 | 
			
		||||
    destruct l2 as [|a0 l0].
 | 
			
		||||
    apply merge_exist with (a :: l); simpl; auto with datatypes.
 | 
			
		||||
    elim (leA_dec a a0); intros.
 | 
			
		||||
 | 
			
		||||
    (* 1 (leA a a0) *)
 | 
			
		||||
    apply Sorted_inv in H. destruct H.
 | 
			
		||||
    destruct (merge l H (a0 :: l0) H0).
 | 
			
		||||
    apply merge_exist with (a :: l1). clear merge merge0.
 | 
			
		||||
      auto using cons_sort, cons_leA with datatypes.
 | 
			
		||||
    simpl. rewrite m. now rewrite munion_ass.
 | 
			
		||||
    intros. apply cons_leA.
 | 
			
		||||
    apply (@HdRel_inv _ leA) with l; trivial with datatypes.
 | 
			
		||||
 | 
			
		||||
    (* 2 (leA a0 a) *)
 | 
			
		||||
    apply Sorted_inv in H0. destruct H0.
 | 
			
		||||
    destruct (merge0 l0 H0). clear merge merge0.
 | 
			
		||||
    apply merge_exist with (a0 :: l1);
 | 
			
		||||
      auto using cons_sort, cons_leA with datatypes.
 | 
			
		||||
    simpl; rewrite m. simpl. setoid_rewrite munion_ass at 1. rewrite munion_comm.
 | 
			
		||||
    repeat rewrite munion_ass. setoid_rewrite munion_comm at 3. reflexivity.
 | 
			
		||||
    intros. apply cons_leA.
 | 
			
		||||
    apply (@HdRel_inv _ leA) with l0; trivial with datatypes.
 | 
			
		||||
  Qed.
 | 
			
		||||
 | 
			
		||||
  (** ** From trees to multisets *)
 | 
			
		||||
 | 
			
		||||
  (** contents of a tree as a multiset *)
 | 
			
		||||
 | 
			
		||||
  (** Nota Bene : In what follows the definition of SingletonBag
 | 
			
		||||
      in not used. Actually, we could just take as postulate:
 | 
			
		||||
      [Parameter SingletonBag : A->multiset].  *)
 | 
			
		||||
 | 
			
		||||
  Fixpoint contents (t:Tree) : multiset A :=
 | 
			
		||||
    match t with
 | 
			
		||||
      | Tree_Leaf => emptyBag
 | 
			
		||||
      | Tree_Node a t1 t2 =>
 | 
			
		||||
        munion (contents t1) (munion (contents t2) (singletonBag a))
 | 
			
		||||
    end.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
  (** equivalence of two trees is equality of corresponding multisets *)
 | 
			
		||||
  Definition equiv_Tree (t1 t2:Tree) := meq (contents t1) (contents t2).
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
  (** * From lists to sorted lists *)
 | 
			
		||||
 | 
			
		||||
  (** ** Specification of heap insertion *)
 | 
			
		||||
 | 
			
		||||
  Inductive insert_spec (a:A) (T:Tree) : Type :=
 | 
			
		||||
    insert_exist :
 | 
			
		||||
    forall T1:Tree,
 | 
			
		||||
      is_heap T1 ->
 | 
			
		||||
      meq (contents T1) (munion (contents T) (singletonBag a)) ->
 | 
			
		||||
      (forall b:A, leA b a -> leA_Tree b T -> leA_Tree b T1) ->
 | 
			
		||||
      insert_spec a T.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
  Lemma insert : forall T:Tree, is_heap T -> forall a:A, insert_spec a T.
 | 
			
		||||
  Proof.
 | 
			
		||||
    simple induction 1; intros.
 | 
			
		||||
    apply insert_exist with (Tree_Node a Tree_Leaf Tree_Leaf);
 | 
			
		||||
      auto using node_is_heap, nil_is_heap, leA_Tree_Leaf with datatypes.
 | 
			
		||||
    simpl; unfold meq, munion; auto using node_is_heap with datatypes.
 | 
			
		||||
    elim (leA_dec a a0); intros.
 | 
			
		||||
    elim (X a0); intros.
 | 
			
		||||
    apply insert_exist with (Tree_Node a T2 T0);
 | 
			
		||||
      auto using node_is_heap, nil_is_heap, leA_Tree_Leaf with datatypes.
 | 
			
		||||
    simpl; apply treesort_twist1; trivial with datatypes.
 | 
			
		||||
    elim (X a); intros T3 HeapT3 ConT3 LeA.
 | 
			
		||||
    apply insert_exist with (Tree_Node a0 T2 T3);
 | 
			
		||||
      auto using node_is_heap, nil_is_heap, leA_Tree_Leaf with datatypes.
 | 
			
		||||
    apply node_is_heap; auto using node_is_heap, nil_is_heap, leA_Tree_Leaf with datatypes.
 | 
			
		||||
    apply low_trans with a; auto with datatypes.
 | 
			
		||||
    apply LeA; auto with datatypes.
 | 
			
		||||
    apply low_trans with a; auto with datatypes.
 | 
			
		||||
    simpl; apply treesort_twist2; trivial with datatypes.
 | 
			
		||||
  Qed.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
  (** ** Building a heap from a list *)
 | 
			
		||||
 | 
			
		||||
  Inductive build_heap (l:list A) : Type :=
 | 
			
		||||
    heap_exist :
 | 
			
		||||
    forall T:Tree,
 | 
			
		||||
      is_heap T ->
 | 
			
		||||
      meq (list_contents _ eqA_dec l) (contents T) -> build_heap l.
 | 
			
		||||
 | 
			
		||||
  Lemma list_to_heap : forall l:list A, build_heap l.
 | 
			
		||||
  Proof.
 | 
			
		||||
    simple induction l.
 | 
			
		||||
    apply (heap_exist nil Tree_Leaf); auto with datatypes.
 | 
			
		||||
    simpl; unfold meq; exact nil_is_heap.
 | 
			
		||||
    simple induction 1.
 | 
			
		||||
    intros T i m; elim (insert T i a).
 | 
			
		||||
    intros; apply heap_exist with T1; simpl; auto with datatypes.
 | 
			
		||||
    apply meq_trans with (munion (contents T) (singletonBag a)).
 | 
			
		||||
    apply meq_trans with (munion (singletonBag a) (contents T)).
 | 
			
		||||
    apply meq_right; trivial with datatypes.
 | 
			
		||||
    apply munion_comm.
 | 
			
		||||
    apply meq_sym; trivial with datatypes.
 | 
			
		||||
  Qed.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
  (** ** Building the sorted list *)
 | 
			
		||||
 | 
			
		||||
  Inductive flat_spec (T:Tree) : Type :=
 | 
			
		||||
    flat_exist :
 | 
			
		||||
    forall l:list A,
 | 
			
		||||
      Sorted leA l ->
 | 
			
		||||
      (forall a:A, leA_Tree a T -> HdRel leA a l) ->
 | 
			
		||||
      meq (contents T) (list_contents _ eqA_dec l) -> flat_spec T.
 | 
			
		||||
 | 
			
		||||
  Lemma heap_to_list : forall T:Tree, is_heap T -> flat_spec T.
 | 
			
		||||
  Proof.
 | 
			
		||||
    intros T h; elim h; intros.
 | 
			
		||||
    apply flat_exist with (nil (A:=A)); auto with datatypes.
 | 
			
		||||
    elim X; intros l1 s1 i1 m1; elim X0; intros l2 s2 i2 m2.
 | 
			
		||||
    elim (merge _ s1 _ s2); intros.
 | 
			
		||||
    apply flat_exist with (a :: l); simpl; auto with datatypes.
 | 
			
		||||
    apply meq_trans with
 | 
			
		||||
      (munion (list_contents _ eqA_dec l1)
 | 
			
		||||
        (munion (list_contents _ eqA_dec l2) (singletonBag a))).
 | 
			
		||||
    apply meq_congr; auto with datatypes.
 | 
			
		||||
    apply meq_trans with
 | 
			
		||||
      (munion (singletonBag a)
 | 
			
		||||
        (munion (list_contents _ eqA_dec l1) (list_contents _ eqA_dec l2))).
 | 
			
		||||
    apply munion_rotate.
 | 
			
		||||
    apply meq_right; apply meq_sym; trivial with datatypes.
 | 
			
		||||
  Qed.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
  (** * Specification of treesort *)
 | 
			
		||||
 | 
			
		||||
  Theorem treesort :
 | 
			
		||||
    forall l:list A,
 | 
			
		||||
    {m : list A | Sorted leA m & permutation _ eqA_dec l m}.
 | 
			
		||||
  Proof.
 | 
			
		||||
    intro l; unfold permutation.
 | 
			
		||||
    elim (list_to_heap l).
 | 
			
		||||
    intros.
 | 
			
		||||
    elim (heap_to_list T); auto with datatypes.
 | 
			
		||||
    intros.
 | 
			
		||||
    exists l0; auto with datatypes.
 | 
			
		||||
    apply meq_trans with (contents T); trivial with datatypes.
 | 
			
		||||
  Qed.
 | 
			
		||||
 | 
			
		||||
End defs.
 | 
			
		||||
							
								
								
									
										5944
									
								
								samples/Coq/JsCorrectness.v
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5944
									
								
								samples/Coq/JsCorrectness.v
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										249
									
								
								samples/Coq/JsInterpreterExtraction.v
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										249
									
								
								samples/Coq/JsInterpreterExtraction.v
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,249 @@
 | 
			
		||||
Set Implicit Arguments.
 | 
			
		||||
Require Import JsSyntax JsInterpreterMonads JsInterpreter JsInit.
 | 
			
		||||
Require Import LibFix LibList.
 | 
			
		||||
 | 
			
		||||
Require Export Shared.
 | 
			
		||||
Require Export LibTactics LibLogic LibReflect LibList
 | 
			
		||||
  LibOperation LibStruct LibNat LibEpsilon LibFunc LibHeap.
 | 
			
		||||
Require Flocq.Appli.Fappli_IEEE Flocq.Appli.Fappli_IEEE_bits.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
(* Here stands some commands to extract relatively correctly the interpreter to Ocaml. *)
 | 
			
		||||
Extraction Language Ocaml.
 | 
			
		||||
 | 
			
		||||
Require Import ExtrOcamlBasic.
 | 
			
		||||
Require Import ExtrOcamlNatInt.
 | 
			
		||||
Require Import ExtrOcamlString.
 | 
			
		||||
 | 
			
		||||
(* Optimal fixpoint. *)
 | 
			
		||||
Extraction Inline FixFun3 FixFun3Mod FixFun4 FixFun4Mod FixFunMod curry3 uncurry3 curry4 uncurry4.
 | 
			
		||||
(* As classical logic statements are now unused, they should not be extracted
 | 
			
		||||
   (otherwise, useless errors will be launched). *)
 | 
			
		||||
Extraction Inline epsilon epsilon_def classicT arbitrary indefinite_description Inhab_witness Fix isTrue.
 | 
			
		||||
 | 
			
		||||
(**************************************************************)
 | 
			
		||||
(** ** Numerical values *)
 | 
			
		||||
 | 
			
		||||
(* number *)
 | 
			
		||||
 | 
			
		||||
Extract Inductive positive => float
 | 
			
		||||
[ "(fun p -> 1. +. (2. *. p))"
 | 
			
		||||
  "(fun p -> 2. *. p)"
 | 
			
		||||
  "1." ]
 | 
			
		||||
"(fun f2p1 f2p f1 p ->
 | 
			
		||||
if p <= 1. then f1 () else if mod_float p 2. = 0. then f2p (floor (p /. 2.)) else f2p1 (floor (p /. 2.)))".
 | 
			
		||||
 | 
			
		||||
Extract Inductive Z => float [ "0." "" "(~-.)" ]
 | 
			
		||||
"(fun f0 fp fn z -> if z=0. then f0 () else if z>0. then fp z else fn (~-. z))".
 | 
			
		||||
 | 
			
		||||
Extract Inductive N => float [ "0." "" ]
 | 
			
		||||
"(fun f0 fp n -> if n=0. then f0 () else fp n)".
 | 
			
		||||
 | 
			
		||||
Extract Constant Z.add => "(+.)".
 | 
			
		||||
Extract Constant Z.succ => "(+.) 1.".
 | 
			
		||||
Extract Constant Z.pred => "(fun x -> x -. 1.)".
 | 
			
		||||
Extract Constant Z.sub => "(-.)".
 | 
			
		||||
Extract Constant Z.mul => "( *. )".
 | 
			
		||||
Extract Constant Z.opp => "(~-.)".
 | 
			
		||||
Extract Constant Z.abs => "abs_float".
 | 
			
		||||
Extract Constant Z.min => "min".
 | 
			
		||||
Extract Constant Z.max => "max".
 | 
			
		||||
Extract Constant Z.compare =>
 | 
			
		||||
 "fun x y -> if x=y then Eq else if x<y then Lt else Gt".
 | 
			
		||||
 | 
			
		||||
Extract Constant Pos.add => "(+.)".
 | 
			
		||||
Extract Constant Pos.succ => "(+.) 1.".
 | 
			
		||||
Extract Constant Pos.pred => "(fun x -> x -. 1.)".
 | 
			
		||||
Extract Constant Pos.sub => "(-.)".
 | 
			
		||||
Extract Constant Pos.mul => "( *. )".
 | 
			
		||||
Extract Constant Pos.min => "min".
 | 
			
		||||
Extract Constant Pos.max => "max".
 | 
			
		||||
Extract Constant Pos.compare =>
 | 
			
		||||
 "fun x y -> if x=y then Eq else if x<y then Lt else Gt".
 | 
			
		||||
Extract Constant Pos.compare_cont =>
 | 
			
		||||
 "fun x y c -> if x=y then c else if x<y then Lt else Gt".
 | 
			
		||||
 | 
			
		||||
Extract Constant N.add => "(+.)".
 | 
			
		||||
Extract Constant N.succ => "(+.) 1.".
 | 
			
		||||
Extract Constant N.pred => "(fun x -> x -. 1.)".
 | 
			
		||||
Extract Constant N.sub => "(-.)".
 | 
			
		||||
Extract Constant N.mul => "( *. )".
 | 
			
		||||
Extract Constant N.min => "min".
 | 
			
		||||
Extract Constant N.max => "max".
 | 
			
		||||
Extract Constant N.div => "(fun x y -> if x = 0. then 0. else floor (x /. y))".
 | 
			
		||||
Extract Constant N.modulo => "mod_float".
 | 
			
		||||
Extract Constant N.compare =>
 | 
			
		||||
 "fun x y -> if x=y then Eq else if x<y then Lt else Gt".
 | 
			
		||||
 | 
			
		||||
Extract Inductive Fappli_IEEE.binary_float => float [
 | 
			
		||||
  "(fun s -> if s then (0.) else (-0.))"
 | 
			
		||||
  "(fun s -> if s then infinity else neg_infinity)"
 | 
			
		||||
  "nan"
 | 
			
		||||
  "(fun (s, m, e) -> failwith ""FIXME: No extraction from binary float allowed yet."")"
 | 
			
		||||
]. 
 | 
			
		||||
 | 
			
		||||
Extract Constant JsNumber.of_int => "fun x -> x".
 | 
			
		||||
 | 
			
		||||
Extract Constant JsNumber.nan => "nan".
 | 
			
		||||
Extract Constant JsNumber.zero => "0.".
 | 
			
		||||
Extract Constant JsNumber.neg_zero => "(-0.)".
 | 
			
		||||
Extract Constant JsNumber.one => "1.".
 | 
			
		||||
Extract Constant JsNumber.infinity => "infinity".
 | 
			
		||||
Extract Constant JsNumber.neg_infinity => "neg_infinity".
 | 
			
		||||
Extract Constant JsNumber.max_value => "max_float".
 | 
			
		||||
Extract Constant JsNumber.min_value => "(Int64.float_of_bits Int64.one)".
 | 
			
		||||
Extract Constant JsNumber.pi => "(4. *. atan 1.)".
 | 
			
		||||
Extract Constant JsNumber.e => "(exp 1.)".
 | 
			
		||||
Extract Constant JsNumber.ln2 => "(log 2.)".
 | 
			
		||||
Extract Constant JsNumber.floor => "floor".
 | 
			
		||||
Extract Constant JsNumber.absolute => "abs_float".
 | 
			
		||||
 | 
			
		||||
Extract Constant JsNumber.from_string =>
 | 
			
		||||
  "(fun s ->
 | 
			
		||||
    try
 | 
			
		||||
      let s = (String.concat """" (List.map (String.make 1) s)) in
 | 
			
		||||
      if s = """" then 0. else float_of_string s
 | 
			
		||||
    with Failure ""float_of_string"" -> nan)
 | 
			
		||||
   (* Note that we're using `float_of_string' there, which does not have the same
 | 
			
		||||
      behavior than JavaScript.  For instance it will read ""022"" as 22 instead of
 | 
			
		||||
      18, which should be the JavaScript result for it. *)".
 | 
			
		||||
 | 
			
		||||
Extract Constant JsNumber.to_string =>
 | 
			
		||||
  "(fun f -> 
 | 
			
		||||
    prerr_string (""Warning:  JsNumber.to_string called.  This might be responsible for errors.  Argument value:  "" ^ string_of_float f ^ ""."");
 | 
			
		||||
    prerr_newline();
 | 
			
		||||
    let string_of_number n =
 | 
			
		||||
      let sfn = string_of_float n in
 | 
			
		||||
      (if (sfn = ""inf"") then ""Infinity"" else
 | 
			
		||||
       if (sfn = ""-inf"") then ""-Infinity"" else
 | 
			
		||||
       if (sfn = ""nan"") then ""NaN"" else
 | 
			
		||||
       let inum = int_of_float n in
 | 
			
		||||
       if (float_of_int inum = n) then (string_of_int inum) else (string_of_float n)) in
 | 
			
		||||
    let ret = ref [] in (* Ugly, but the API for OCaml string is not very functional... *)
 | 
			
		||||
    String.iter (fun c -> ret := c :: !ret) (string_of_number f);
 | 
			
		||||
    List.rev !ret)
 | 
			
		||||
   (* Note that this is ugly, we should use the spec of JsNumber.to_string here (9.8.1). *)".
 | 
			
		||||
 | 
			
		||||
Extract Constant JsNumber.add => "(+.)".
 | 
			
		||||
Extract Constant JsNumber.sub => "(-.)".
 | 
			
		||||
Extract Constant JsNumber.mult => "( *. )".
 | 
			
		||||
Extract Constant JsNumber.div => "(/.)".
 | 
			
		||||
Extract Constant JsNumber.fmod => "mod_float".
 | 
			
		||||
Extract Constant JsNumber.neg => "(~-.)".
 | 
			
		||||
Extract Constant JsNumber.sign => "(fun f -> float_of_int (compare f 0.))".
 | 
			
		||||
Extract Constant JsNumber.number_comparable => "(fun n1 n2 -> 0 = compare n1 n2)".
 | 
			
		||||
Extract Constant JsNumber.lt_bool => "(<)".
 | 
			
		||||
 | 
			
		||||
Extract Constant JsNumber.to_int32 => 
 | 
			
		||||
"fun n ->
 | 
			
		||||
  match classify_float n with
 | 
			
		||||
  | FP_normal | FP_subnormal ->
 | 
			
		||||
    let i32 = 2. ** 32. in
 | 
			
		||||
    let i31 = 2. ** 31. in
 | 
			
		||||
    let posint = (if n < 0. then (-1.) else 1.) *. (floor (abs_float n)) in
 | 
			
		||||
    let int32bit =
 | 
			
		||||
      let smod = mod_float posint i32 in
 | 
			
		||||
      if smod < 0. then smod +. i32 else smod
 | 
			
		||||
    in
 | 
			
		||||
    (if int32bit >= i31 then int32bit -. i32 else int32bit)
 | 
			
		||||
  | _ -> 0.". (* LATER:  do in Coq.  Spec is 9.5, p. 47.*)
 | 
			
		||||
 | 
			
		||||
Extract Constant JsNumber.to_uint32 =>
 | 
			
		||||
"fun n ->
 | 
			
		||||
  match classify_float n with
 | 
			
		||||
  | FP_normal | FP_subnormal ->
 | 
			
		||||
    let i32 = 2. ** 32. in
 | 
			
		||||
    let posint = (if n < 0. then (-1.) else 1.) *. (floor (abs_float n)) in
 | 
			
		||||
    let int32bit =
 | 
			
		||||
      let smod = mod_float posint i32 in
 | 
			
		||||
      if smod < 0. then smod +. i32 else smod
 | 
			
		||||
    in
 | 
			
		||||
    int32bit
 | 
			
		||||
  | _ -> 0.". (* LAER:  do in Coq.  Spec is 9.6, p47.*)
 | 
			
		||||
 | 
			
		||||
Extract Constant JsNumber.modulo_32 => "(fun x -> let r = mod_float x 32. in if x < 0. then r +. 32. else r)".
 | 
			
		||||
Extract Constant JsNumber.int32_bitwise_not => "fun x -> Int32.to_float (Int32.lognot (Int32.of_float x))".
 | 
			
		||||
Extract Constant JsNumber.int32_bitwise_and => "fun x y -> Int32.to_float (Int32.logand (Int32.of_float x) (Int32.of_float y))".
 | 
			
		||||
Extract Constant JsNumber.int32_bitwise_or => "fun x y -> Int32.to_float (Int32.logor (Int32.of_float x) (Int32.of_float y))".
 | 
			
		||||
Extract Constant JsNumber.int32_bitwise_xor => "fun x y -> Int32.to_float (Int32.logxor (Int32.of_float x) (Int32.of_float y))".
 | 
			
		||||
Extract Constant JsNumber.int32_left_shift => "(fun x y -> Int32.to_float (Int32.shift_left (Int32.of_float x) (int_of_float y)))".
 | 
			
		||||
Extract Constant JsNumber.int32_right_shift => "(fun x y -> Int32.to_float (Int32.shift_right (Int32.of_float x) (int_of_float y)))".
 | 
			
		||||
Extract Constant JsNumber.uint32_right_shift => 
 | 
			
		||||
"(fun x y ->
 | 
			
		||||
  let i31 = 2. ** 31. in
 | 
			
		||||
  let i32 = 2. ** 32. in
 | 
			
		||||
  let newx = if x >= i31 then x -. i32 else x in
 | 
			
		||||
  let r = Int32.to_float (Int32.shift_right_logical (Int32.of_float newx) (int_of_float y)) in
 | 
			
		||||
  if r < 0. then r +. i32 else r)".
 | 
			
		||||
 | 
			
		||||
Extract Constant int_of_char => "(fun c -> float_of_int (int_of_char c))".
 | 
			
		||||
 | 
			
		||||
Extract Constant ascii_comparable => "(=)".
 | 
			
		||||
Extract Constant lt_int_decidable => "(<)".
 | 
			
		||||
Extract Constant le_int_decidable => "(<=)".
 | 
			
		||||
Extract Constant ge_nat_decidable => "(>=)".
 | 
			
		||||
 | 
			
		||||
(* TODO ARTHUR:  This TLC lemma does not extract to something computable... whereas it should! *)
 | 
			
		||||
Extract Constant prop_eq_decidable => "(=)".
 | 
			
		||||
 | 
			
		||||
Extract Constant env_loc_global_env_record => "0".
 | 
			
		||||
 | 
			
		||||
(* The following functions make pattern matches with floats and shall thus be removed. *)
 | 
			
		||||
Extraction Inline Fappli_IEEE.Bplus Fappli_IEEE.binary_normalize Fappli_IEEE_bits.b64_plus.
 | 
			
		||||
Extraction Inline Fappli_IEEE.Bmult Fappli_IEEE.Bmult_FF Fappli_IEEE_bits.b64_mult.
 | 
			
		||||
Extraction Inline Fappli_IEEE.Bdiv Fappli_IEEE_bits.b64_div.
 | 
			
		||||
 | 
			
		||||
(* New options for the interpreter to work in Coq 8.4 *)
 | 
			
		||||
Set Extraction AccessOpaque.
 | 
			
		||||
 | 
			
		||||
(* These parameters are implementation-dependant according to the spec.
 | 
			
		||||
   I've chosed some very simple values, but we could choose another thing for them. *)
 | 
			
		||||
Extract Constant object_prealloc_global_proto => "(Coq_value_prim Coq_prim_null)".
 | 
			
		||||
Extract Constant object_prealloc_global_class => "(
 | 
			
		||||
  let rec aux s = function
 | 
			
		||||
  | 0 -> []
 | 
			
		||||
  | n -> let n' = n - 1 in
 | 
			
		||||
    s.[n'] :: aux s n'
 | 
			
		||||
  in let aux2 s =
 | 
			
		||||
    List.rev (aux s (String.length s))
 | 
			
		||||
  in aux2 ""GlobalClass"")".
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
(* Parsing *)
 | 
			
		||||
Extract Constant parse_pickable => "(fun s strict ->
 | 
			
		||||
    let str = String.concat """" (List.map (String.make 1) s) in
 | 
			
		||||
    try
 | 
			
		||||
      let parserExp = Parser_main.exp_from_string ~force_strict:strict str in
 | 
			
		||||
      Some (JsSyntaxInfos.add_infos_prog strict
 | 
			
		||||
        (Translate_syntax.exp_to_prog parserExp))
 | 
			
		||||
    with
 | 
			
		||||
    (* | Translate_syntax.CoqSyntaxDoesNotSupport _ -> assert false (* Temporary *) *)
 | 
			
		||||
    | Parser.ParserFailure _
 | 
			
		||||
    | Parser.InvalidArgument ->
 | 
			
		||||
      prerr_string (""Warning:  Parser error on eval.  Input string:  \"""" ^ str ^ ""\""\n"");
 | 
			
		||||
      None
 | 
			
		||||
  )".
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
(* Debugging *)
 | 
			
		||||
Extract Inlined Constant not_yet_implemented_because => "(fun s ->
 | 
			
		||||
  print_endline (__LOC__ ^ "": Not implemented because: "" ^ Prheap.string_of_char_list s) ;
 | 
			
		||||
  Coq_result_not_yet_implemented)".
 | 
			
		||||
Extract Inlined Constant impossible_because => "(fun s ->
 | 
			
		||||
  print_endline (__LOC__ ^ "": Stuck because: "" ^ Prheap.string_of_char_list s) ;
 | 
			
		||||
  Coq_result_impossible)".
 | 
			
		||||
Extract Inlined Constant impossible_with_heap_because => "(fun s message ->
 | 
			
		||||
  print_endline (__LOC__ ^ "": Stuck!\nState:  "" ^ Prheap.prstate true s
 | 
			
		||||
    ^ ""\nMessage:\t"" ^ Prheap.string_of_char_list message) ;
 | 
			
		||||
  Coq_result_impossible)".
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
(* Final Extraction *)
 | 
			
		||||
Extraction Blacklist string list bool.
 | 
			
		||||
Separate Extraction runs run_javascript.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
(* -- LATER: extract inequality_test_string in more efficient way*)
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										1051
									
								
								samples/Coq/JsNumber.v
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1051
									
								
								samples/Coq/JsNumber.v
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										1766
									
								
								samples/Coq/JsPrettyInterm.v
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1766
									
								
								samples/Coq/JsPrettyInterm.v
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										42
									
								
								samples/Coq/Main.v
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										42
									
								
								samples/Coq/Main.v
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,42 @@
 | 
			
		||||
Require Import FunctionNinjas.All.
 | 
			
		||||
Require Import ListString.All.
 | 
			
		||||
Require Import Computation.
 | 
			
		||||
 | 
			
		||||
Import C.Notations.
 | 
			
		||||
 | 
			
		||||
Definition error (message : LString.t) : C.t :=
 | 
			
		||||
  do_call! Command.ShowError message in
 | 
			
		||||
  ret.
 | 
			
		||||
 | 
			
		||||
Definition main : C.t :=
 | 
			
		||||
  call! card_is_valid := Command.AskCard in
 | 
			
		||||
  if card_is_valid then
 | 
			
		||||
    call! pin := Command.AskPIN in
 | 
			
		||||
    match pin with
 | 
			
		||||
    | None => error @@ LString.s "No PIN given."
 | 
			
		||||
    | Some pin =>
 | 
			
		||||
      call! pin_is_valid := Command.CheckPIN pin in
 | 
			
		||||
      if pin_is_valid then
 | 
			
		||||
        call! ask_amount := Command.AskAmount in
 | 
			
		||||
        match ask_amount with
 | 
			
		||||
        | None => error @@ LString.s "No amount given."
 | 
			
		||||
        | Some amount =>
 | 
			
		||||
          call! amount_is_valid := Command.CheckAmount amount in
 | 
			
		||||
          if amount_is_valid then
 | 
			
		||||
            call! card_is_given := Command.GiveCard in
 | 
			
		||||
            if card_is_given then
 | 
			
		||||
              call! amount_is_given := Command.GiveAmount amount in
 | 
			
		||||
              if amount_is_given then
 | 
			
		||||
                ret
 | 
			
		||||
              else
 | 
			
		||||
                error @@ LString.s "Cannot give you the amount. Please contact your bank."
 | 
			
		||||
            else
 | 
			
		||||
              error @@ LString.s "Cannot give you back the card. Please contact your bank."
 | 
			
		||||
          else
 | 
			
		||||
            error @@ LString.s "Invalid amount."
 | 
			
		||||
        end
 | 
			
		||||
      else
 | 
			
		||||
        error @@ LString.s "Invalid PIN."
 | 
			
		||||
    end
 | 
			
		||||
  else
 | 
			
		||||
    error @@ LString.s "Invalid card.".
 | 
			
		||||
@@ -1,539 +0,0 @@
 | 
			
		||||
Require Import Omega Relations Multiset SetoidList.
 | 
			
		||||
 | 
			
		||||
(** This file is deprecated, use [Permutation.v] instead.
 | 
			
		||||
 | 
			
		||||
    Indeed, this file defines a notion of permutation based on
 | 
			
		||||
    multisets (there exists a permutation between two lists iff every
 | 
			
		||||
    elements have the same multiplicity in the two lists) which
 | 
			
		||||
    requires a more complex apparatus (the equipment of the domain
 | 
			
		||||
    with a decidable equality) than [Permutation] in [Permutation.v].
 | 
			
		||||
 | 
			
		||||
    The relation between the two relations are in lemma
 | 
			
		||||
    [permutation_Permutation].
 | 
			
		||||
 | 
			
		||||
    File [Permutation] concerns Leibniz equality : it shows in particular
 | 
			
		||||
    that [List.Permutation] and [permutation] are equivalent in this context.
 | 
			
		||||
*)
 | 
			
		||||
 | 
			
		||||
Set Implicit Arguments.
 | 
			
		||||
 | 
			
		||||
Local Notation "[ ]" := nil.
 | 
			
		||||
Local Notation "[ a ; .. ; b ]" := (a :: .. (b :: []) ..).
 | 
			
		||||
 | 
			
		||||
Section Permut.
 | 
			
		||||
 | 
			
		||||
(** * From lists to multisets *)
 | 
			
		||||
 | 
			
		||||
Variable A : Type.
 | 
			
		||||
Variable eqA : relation A.
 | 
			
		||||
Hypothesis eqA_equiv : Equivalence eqA.
 | 
			
		||||
Hypothesis eqA_dec : forall x y:A, {eqA x y} + {~ eqA x y}.
 | 
			
		||||
 | 
			
		||||
Let emptyBag := EmptyBag A.
 | 
			
		||||
Let singletonBag := SingletonBag _ eqA_dec.
 | 
			
		||||
 | 
			
		||||
(** contents of a list *)
 | 
			
		||||
 | 
			
		||||
Fixpoint list_contents (l:list A) : multiset A :=
 | 
			
		||||
  match l with
 | 
			
		||||
  | [] => emptyBag
 | 
			
		||||
  | a :: l => munion (singletonBag a) (list_contents l)
 | 
			
		||||
  end.
 | 
			
		||||
 | 
			
		||||
Lemma list_contents_app :
 | 
			
		||||
  forall l m:list A,
 | 
			
		||||
    meq (list_contents (l ++ m)) (munion (list_contents l) (list_contents m)).
 | 
			
		||||
Proof.
 | 
			
		||||
  simple induction l; simpl; auto with datatypes.
 | 
			
		||||
  intros.
 | 
			
		||||
  apply meq_trans with
 | 
			
		||||
    (munion (singletonBag a) (munion (list_contents l0) (list_contents m)));
 | 
			
		||||
    auto with datatypes.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
(** * [permutation]: definition and basic properties *)
 | 
			
		||||
 | 
			
		||||
Definition permutation (l m:list A) := meq (list_contents l) (list_contents m).
 | 
			
		||||
 | 
			
		||||
Lemma permut_refl : forall l:list A, permutation l l.
 | 
			
		||||
Proof.
 | 
			
		||||
  unfold permutation; auto with datatypes.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_sym :
 | 
			
		||||
  forall l1 l2 : list A, permutation l1 l2 -> permutation l2 l1.
 | 
			
		||||
Proof.
 | 
			
		||||
  unfold permutation, meq; intros; symmetry; trivial.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_trans :
 | 
			
		||||
  forall l m n:list A, permutation l m -> permutation m n -> permutation l n.
 | 
			
		||||
Proof.
 | 
			
		||||
  unfold permutation; intros.
 | 
			
		||||
  apply meq_trans with (list_contents m); auto with datatypes.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_cons_eq :
 | 
			
		||||
  forall l m:list A,
 | 
			
		||||
    permutation l m -> forall a a', eqA a a' -> permutation (a :: l) (a' :: m).
 | 
			
		||||
Proof.
 | 
			
		||||
  unfold permutation; simpl; intros.
 | 
			
		||||
  apply meq_trans with (munion (singletonBag a') (list_contents l)).
 | 
			
		||||
  apply meq_left, meq_singleton; auto.
 | 
			
		||||
  auto with datatypes.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_cons :
 | 
			
		||||
  forall l m:list A,
 | 
			
		||||
    permutation l m -> forall a:A, permutation (a :: l) (a :: m).
 | 
			
		||||
Proof.
 | 
			
		||||
  unfold permutation; simpl; auto with datatypes.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_app :
 | 
			
		||||
  forall l l' m m':list A,
 | 
			
		||||
    permutation l l' -> permutation m m' -> permutation (l ++ m) (l' ++ m').
 | 
			
		||||
Proof.
 | 
			
		||||
  unfold permutation; intros.
 | 
			
		||||
  apply meq_trans with (munion (list_contents l) (list_contents m));
 | 
			
		||||
    auto using permut_cons, list_contents_app with datatypes.
 | 
			
		||||
  apply meq_trans with (munion (list_contents l') (list_contents m'));
 | 
			
		||||
    auto using permut_cons, list_contents_app with datatypes.
 | 
			
		||||
  apply meq_trans with (munion (list_contents l') (list_contents m));
 | 
			
		||||
    auto using permut_cons, list_contents_app with datatypes.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_add_inside_eq :
 | 
			
		||||
  forall a a' l1 l2 l3 l4, eqA a a' ->
 | 
			
		||||
    permutation (l1 ++ l2) (l3 ++ l4) ->
 | 
			
		||||
    permutation (l1 ++ a :: l2) (l3 ++ a' :: l4).
 | 
			
		||||
Proof.
 | 
			
		||||
  unfold permutation, meq in *; intros.
 | 
			
		||||
  specialize H0 with a0.
 | 
			
		||||
  repeat rewrite list_contents_app in *; simpl in *.
 | 
			
		||||
  destruct (eqA_dec a a0) as [Ha|Ha]; rewrite H in Ha;
 | 
			
		||||
    decide (eqA_dec a' a0) with Ha; simpl; auto with arith.
 | 
			
		||||
  do 2 rewrite <- plus_n_Sm; f_equal; auto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_add_inside :
 | 
			
		||||
  forall a l1 l2 l3 l4,
 | 
			
		||||
    permutation (l1 ++ l2) (l3 ++ l4) ->
 | 
			
		||||
    permutation (l1 ++ a :: l2) (l3 ++ a :: l4).
 | 
			
		||||
Proof.
 | 
			
		||||
  unfold permutation, meq in *; intros.
 | 
			
		||||
  generalize (H a0); clear H.
 | 
			
		||||
  do 4 rewrite list_contents_app.
 | 
			
		||||
  simpl.
 | 
			
		||||
  destruct (eqA_dec a a0); simpl; auto with arith.
 | 
			
		||||
  do 2 rewrite <- plus_n_Sm; f_equal; auto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_add_cons_inside_eq :
 | 
			
		||||
  forall a a' l l1 l2, eqA a a' ->
 | 
			
		||||
    permutation l (l1 ++ l2) ->
 | 
			
		||||
    permutation (a :: l) (l1 ++ a' :: l2).
 | 
			
		||||
Proof.
 | 
			
		||||
  intros;
 | 
			
		||||
  replace (a :: l) with ([] ++ a :: l); trivial;
 | 
			
		||||
    apply permut_add_inside_eq; trivial.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_add_cons_inside :
 | 
			
		||||
  forall a l l1 l2,
 | 
			
		||||
    permutation l (l1 ++ l2) ->
 | 
			
		||||
    permutation (a :: l) (l1 ++ a :: l2).
 | 
			
		||||
Proof.
 | 
			
		||||
  intros;
 | 
			
		||||
    replace (a :: l) with ([] ++ a :: l); trivial;
 | 
			
		||||
        apply permut_add_inside; trivial.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_middle :
 | 
			
		||||
  forall (l m:list A) (a:A), permutation (a :: l ++ m) (l ++ a :: m).
 | 
			
		||||
Proof.
 | 
			
		||||
  intros; apply permut_add_cons_inside; auto using permut_sym, permut_refl.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_sym_app :
 | 
			
		||||
  forall l1 l2, permutation (l1 ++ l2) (l2 ++ l1).
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l1 l2;
 | 
			
		||||
    unfold permutation, meq;
 | 
			
		||||
        intro a; do 2 rewrite list_contents_app; simpl;
 | 
			
		||||
          auto with arith.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_rev :
 | 
			
		||||
  forall l, permutation l (rev l).
 | 
			
		||||
Proof.
 | 
			
		||||
  induction l.
 | 
			
		||||
  simpl; trivial using permut_refl.
 | 
			
		||||
  simpl.
 | 
			
		||||
  apply permut_add_cons_inside.
 | 
			
		||||
  rewrite <- app_nil_end. trivial.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
(** * Some inversion results. *)
 | 
			
		||||
Lemma permut_conv_inv :
 | 
			
		||||
  forall e l1 l2, permutation (e :: l1) (e :: l2) -> permutation l1 l2.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros e l1 l2; unfold permutation, meq; simpl; intros H a;
 | 
			
		||||
    generalize (H a); apply plus_reg_l.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_app_inv1 :
 | 
			
		||||
  forall l l1 l2, permutation (l1 ++ l) (l2 ++ l) -> permutation l1 l2.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l l1 l2; unfold permutation, meq; simpl;
 | 
			
		||||
    intros H a; generalize (H a); clear H.
 | 
			
		||||
  do 2 rewrite list_contents_app.
 | 
			
		||||
  simpl.
 | 
			
		||||
  intros; apply plus_reg_l with (multiplicity (list_contents l) a).
 | 
			
		||||
  rewrite plus_comm; rewrite H; rewrite plus_comm.
 | 
			
		||||
  trivial.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
(** we can use [multiplicity] to define [InA] and [NoDupA]. *)
 | 
			
		||||
 | 
			
		||||
Fact if_eqA_then : forall a a' (B:Type)(b b':B),
 | 
			
		||||
 eqA a a' -> (if eqA_dec a a' then b else b') = b.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros. destruct eqA_dec as [_|NEQ]; auto.
 | 
			
		||||
  contradict NEQ; auto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_app_inv2 :
 | 
			
		||||
  forall l l1 l2, permutation (l ++ l1) (l ++ l2) -> permutation l1 l2.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l l1 l2; unfold permutation, meq; simpl;
 | 
			
		||||
    intros H a; generalize (H a); clear H.
 | 
			
		||||
  do 2 rewrite list_contents_app.
 | 
			
		||||
  simpl.
 | 
			
		||||
  intros; apply plus_reg_l with (multiplicity (list_contents l) a).
 | 
			
		||||
  trivial.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_remove_hd_eq :
 | 
			
		||||
  forall l l1 l2 a b, eqA a b ->
 | 
			
		||||
    permutation (a :: l) (l1 ++ b :: l2) -> permutation l (l1 ++ l2).
 | 
			
		||||
Proof.
 | 
			
		||||
  unfold permutation, meq; simpl; intros l l1 l2 a b Heq H a0.
 | 
			
		||||
  specialize H with a0.
 | 
			
		||||
  rewrite list_contents_app in *; simpl in *.
 | 
			
		||||
  apply plus_reg_l with (if eqA_dec a a0 then 1 else 0).
 | 
			
		||||
  rewrite H; clear H.
 | 
			
		||||
  symmetry; rewrite plus_comm, <- ! plus_assoc; f_equal.
 | 
			
		||||
  rewrite plus_comm.
 | 
			
		||||
  destruct (eqA_dec a a0) as [Ha|Ha]; rewrite Heq in Ha;
 | 
			
		||||
    decide (eqA_dec b a0) with Ha; reflexivity.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_remove_hd :
 | 
			
		||||
  forall l l1 l2 a,
 | 
			
		||||
    permutation (a :: l) (l1 ++ a :: l2) -> permutation l (l1 ++ l2).
 | 
			
		||||
Proof.
 | 
			
		||||
  eauto using permut_remove_hd_eq, Equivalence_Reflexive.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Fact if_eqA_else : forall a a' (B:Type)(b b':B),
 | 
			
		||||
 ~eqA a a' -> (if eqA_dec a a' then b else b') = b'.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros. decide (eqA_dec a a') with H; auto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Fact if_eqA_refl : forall a (B:Type)(b b':B),
 | 
			
		||||
 (if eqA_dec a a then b else b') = b.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros; apply (decide_left (eqA_dec a a)); auto with *.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
(** PL: Inutilisable dans un rewrite sans un change prealable. *)
 | 
			
		||||
 | 
			
		||||
Global Instance if_eqA (B:Type)(b b':B) :
 | 
			
		||||
 Proper (eqA==>eqA==>@eq _) (fun x y => if eqA_dec x y then b else b').
 | 
			
		||||
Proof.
 | 
			
		||||
 intros x x' Hxx' y y' Hyy'.
 | 
			
		||||
 intros; destruct (eqA_dec x y) as [H|H];
 | 
			
		||||
  destruct (eqA_dec x' y') as [H'|H']; auto.
 | 
			
		||||
 contradict H'; transitivity x; auto with *; transitivity y; auto with *.
 | 
			
		||||
 contradict H; transitivity x'; auto with *; transitivity y'; auto with *.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Fact if_eqA_rewrite_l : forall a1 a1' a2 (B:Type)(b b':B),
 | 
			
		||||
 eqA a1 a1' -> (if eqA_dec a1 a2 then b else b') =
 | 
			
		||||
               (if eqA_dec a1' a2 then b else b').
 | 
			
		||||
Proof.
 | 
			
		||||
 intros; destruct (eqA_dec a1 a2) as [A1|A1];
 | 
			
		||||
  destruct (eqA_dec a1' a2) as [A1'|A1']; auto.
 | 
			
		||||
 contradict A1'; transitivity a1; eauto with *.
 | 
			
		||||
 contradict A1; transitivity a1'; eauto with *.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Fact if_eqA_rewrite_r : forall a1 a2 a2' (B:Type)(b b':B),
 | 
			
		||||
 eqA a2 a2' -> (if eqA_dec a1 a2 then b else b') =
 | 
			
		||||
               (if eqA_dec a1 a2' then b else b').
 | 
			
		||||
Proof.
 | 
			
		||||
 intros; destruct (eqA_dec a1 a2) as [A2|A2];
 | 
			
		||||
  destruct (eqA_dec a1 a2') as [A2'|A2']; auto.
 | 
			
		||||
 contradict A2'; transitivity a2; eauto with *.
 | 
			
		||||
 contradict A2; transitivity a2'; eauto with *.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
Global Instance multiplicity_eqA (l:list A) :
 | 
			
		||||
 Proper (eqA==>@eq _) (multiplicity (list_contents l)).
 | 
			
		||||
Proof.
 | 
			
		||||
  intros x x' Hxx'.
 | 
			
		||||
  induction l as [|y l Hl]; simpl; auto.
 | 
			
		||||
  rewrite (@if_eqA_rewrite_r y x x'); auto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma multiplicity_InA :
 | 
			
		||||
  forall l a, InA eqA a l <-> 0 < multiplicity (list_contents l) a.
 | 
			
		||||
Proof.
 | 
			
		||||
  induction l.
 | 
			
		||||
  simpl.
 | 
			
		||||
  split; inversion 1.
 | 
			
		||||
  simpl.
 | 
			
		||||
  intros a'; split; intros H. inversion_clear H.
 | 
			
		||||
  apply (decide_left (eqA_dec a a')); auto with *.
 | 
			
		||||
  destruct (eqA_dec a a'); auto with *. simpl; rewrite <- IHl; auto.
 | 
			
		||||
  destruct (eqA_dec a a'); auto with *. right. rewrite IHl; auto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma multiplicity_InA_O :
 | 
			
		||||
  forall l a, ~ InA eqA a l -> multiplicity (list_contents l) a = 0.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l a; rewrite multiplicity_InA;
 | 
			
		||||
    destruct (multiplicity (list_contents l) a); auto with arith.
 | 
			
		||||
  destruct 1; auto with arith.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma multiplicity_InA_S :
 | 
			
		||||
  forall l a, InA eqA a l -> multiplicity (list_contents l) a >= 1.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l a; rewrite multiplicity_InA; auto with arith.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma multiplicity_NoDupA : forall l,
 | 
			
		||||
  NoDupA eqA l <-> (forall a, multiplicity (list_contents l) a <= 1).
 | 
			
		||||
Proof.
 | 
			
		||||
  induction l.
 | 
			
		||||
  simpl.
 | 
			
		||||
  split; auto with arith.
 | 
			
		||||
  split; simpl.
 | 
			
		||||
  inversion_clear 1.
 | 
			
		||||
  rewrite IHl in H1.
 | 
			
		||||
  intros; destruct (eqA_dec a a0) as [EQ|NEQ]; simpl; auto with *.
 | 
			
		||||
  rewrite <- EQ.
 | 
			
		||||
  rewrite multiplicity_InA_O; auto.
 | 
			
		||||
  intros; constructor.
 | 
			
		||||
  rewrite multiplicity_InA.
 | 
			
		||||
  specialize (H a).
 | 
			
		||||
  rewrite if_eqA_refl in H.
 | 
			
		||||
  clear IHl; omega.
 | 
			
		||||
  rewrite IHl; intros.
 | 
			
		||||
  specialize (H a0). omega.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
(** Permutation is compatible with InA. *)
 | 
			
		||||
Lemma permut_InA_InA :
 | 
			
		||||
  forall l1 l2 e, permutation l1 l2 -> InA eqA e l1 -> InA eqA e l2.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l1 l2 e.
 | 
			
		||||
  do 2 rewrite multiplicity_InA.
 | 
			
		||||
  unfold permutation, meq.
 | 
			
		||||
  intros H;rewrite H; auto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_cons_InA :
 | 
			
		||||
  forall l1 l2 e, permutation (e :: l1) l2 -> InA eqA e l2.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros; apply (permut_InA_InA (e:=e) H); auto with *.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
(** Permutation of an empty list. *)
 | 
			
		||||
Lemma permut_nil :
 | 
			
		||||
  forall l, permutation l [] -> l = [].
 | 
			
		||||
Proof.
 | 
			
		||||
  intro l; destruct l as [ | e l ]; trivial.
 | 
			
		||||
  assert (InA eqA e (e::l)) by (auto with *).
 | 
			
		||||
  intro Abs; generalize (permut_InA_InA Abs H).
 | 
			
		||||
  inversion 1.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
(** Permutation for short lists. *)
 | 
			
		||||
 | 
			
		||||
Lemma permut_length_1:
 | 
			
		||||
  forall a b, permutation [a] [b] -> eqA a b.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros a b; unfold permutation, meq.
 | 
			
		||||
  intro P; specialize (P b); simpl in *.
 | 
			
		||||
  rewrite if_eqA_refl in *.
 | 
			
		||||
  destruct (eqA_dec a b); simpl; auto; discriminate.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_length_2 :
 | 
			
		||||
  forall a1 b1 a2 b2, permutation [a1; b1] [a2; b2] ->
 | 
			
		||||
    (eqA a1 a2) /\ (eqA b1 b2) \/ (eqA a1 b2) /\ (eqA a2 b1).
 | 
			
		||||
Proof.
 | 
			
		||||
  intros a1 b1 a2 b2 P.
 | 
			
		||||
  assert (H:=permut_cons_InA P).
 | 
			
		||||
  inversion_clear H.
 | 
			
		||||
  left; split; auto.
 | 
			
		||||
  apply permut_length_1.
 | 
			
		||||
  red; red; intros.
 | 
			
		||||
  specialize (P a). simpl in *.
 | 
			
		||||
  rewrite (@if_eqA_rewrite_l a1 a2 a) in P by auto. omega.
 | 
			
		||||
  right.
 | 
			
		||||
  inversion_clear H0; [|inversion H].
 | 
			
		||||
  split; auto.
 | 
			
		||||
  apply permut_length_1.
 | 
			
		||||
  red; red; intros.
 | 
			
		||||
  specialize (P a); simpl in *.
 | 
			
		||||
  rewrite (@if_eqA_rewrite_l a1 b2 a) in P by auto. omega.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
(** Permutation is compatible with length. *)
 | 
			
		||||
Lemma permut_length :
 | 
			
		||||
  forall l1 l2, permutation l1 l2 -> length l1 = length l2.
 | 
			
		||||
Proof.
 | 
			
		||||
  induction l1; intros l2 H.
 | 
			
		||||
  rewrite (permut_nil (permut_sym H)); auto.
 | 
			
		||||
  assert (H0:=permut_cons_InA H).
 | 
			
		||||
  destruct (InA_split H0) as (h2,(b,(t2,(H1,H2)))).
 | 
			
		||||
  subst l2.
 | 
			
		||||
  rewrite app_length.
 | 
			
		||||
  simpl; rewrite <- plus_n_Sm; f_equal.
 | 
			
		||||
  rewrite <- app_length.
 | 
			
		||||
  apply IHl1.
 | 
			
		||||
  apply permut_remove_hd with b.
 | 
			
		||||
  apply permut_trans with (a::l1); auto.
 | 
			
		||||
  revert H1; unfold permutation, meq; simpl.
 | 
			
		||||
  intros; f_equal; auto.
 | 
			
		||||
  rewrite (@if_eqA_rewrite_l a b a0); auto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma NoDupA_equivlistA_permut :
 | 
			
		||||
  forall l l', NoDupA eqA l -> NoDupA eqA l' ->
 | 
			
		||||
    equivlistA eqA l l' -> permutation l l'.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros.
 | 
			
		||||
  red; unfold meq; intros.
 | 
			
		||||
  rewrite multiplicity_NoDupA in H, H0.
 | 
			
		||||
  generalize (H a) (H0 a) (H1 a); clear H H0 H1.
 | 
			
		||||
  do 2 rewrite multiplicity_InA.
 | 
			
		||||
  destruct 3; omega.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
End Permut.
 | 
			
		||||
 | 
			
		||||
Section Permut_map.
 | 
			
		||||
 | 
			
		||||
Variables A B : Type.
 | 
			
		||||
 | 
			
		||||
Variable eqA : relation A.
 | 
			
		||||
Hypothesis eqA_dec : forall x y:A, {eqA x y} + {~ eqA x y}.
 | 
			
		||||
Hypothesis eqA_equiv : Equivalence eqA.
 | 
			
		||||
 | 
			
		||||
Variable eqB : B->B->Prop.
 | 
			
		||||
Hypothesis eqB_dec : forall x y:B, { eqB x y }+{ ~eqB x y }.
 | 
			
		||||
Hypothesis eqB_trans : Transitive eqB.
 | 
			
		||||
 | 
			
		||||
(** Permutation is compatible with map. *)
 | 
			
		||||
 | 
			
		||||
Lemma permut_map :
 | 
			
		||||
  forall f,
 | 
			
		||||
    (Proper (eqA==>eqB) f) ->
 | 
			
		||||
    forall l1 l2, permutation _ eqA_dec l1 l2 ->
 | 
			
		||||
      permutation _ eqB_dec (map f l1) (map f l2).
 | 
			
		||||
Proof.
 | 
			
		||||
  intros f; induction l1.
 | 
			
		||||
  intros l2 P; rewrite (permut_nil eqA_equiv (permut_sym P)); apply permut_refl.
 | 
			
		||||
  intros l2 P.
 | 
			
		||||
  simpl.
 | 
			
		||||
  assert (H0:=permut_cons_InA eqA_equiv P).
 | 
			
		||||
  destruct (InA_split H0) as (h2,(b,(t2,(H1,H2)))).
 | 
			
		||||
  subst l2.
 | 
			
		||||
  rewrite map_app.
 | 
			
		||||
  simpl.
 | 
			
		||||
  apply permut_trans with (f b :: map f l1).
 | 
			
		||||
  revert H1; unfold permutation, meq; simpl.
 | 
			
		||||
  intros; f_equal; auto.
 | 
			
		||||
  destruct (eqB_dec (f b) a0) as [H2|H2];
 | 
			
		||||
    destruct (eqB_dec (f a) a0) as [H3|H3]; auto.
 | 
			
		||||
  destruct H3; transitivity (f b); auto with *.
 | 
			
		||||
  destruct H2; transitivity (f a); auto with *.
 | 
			
		||||
  apply permut_add_cons_inside.
 | 
			
		||||
  rewrite <- map_app.
 | 
			
		||||
  apply IHl1; auto.
 | 
			
		||||
  apply permut_remove_hd with b; trivial.
 | 
			
		||||
  apply permut_trans with (a::l1); auto.
 | 
			
		||||
  revert H1; unfold permutation, meq; simpl.
 | 
			
		||||
  intros; f_equal; auto.
 | 
			
		||||
  rewrite (@if_eqA_rewrite_l _ _ eqA_equiv eqA_dec a b a0); auto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
End Permut_map.
 | 
			
		||||
 | 
			
		||||
Require Import Permutation.
 | 
			
		||||
 | 
			
		||||
Section Permut_permut.
 | 
			
		||||
 | 
			
		||||
Variable A : Type.
 | 
			
		||||
 | 
			
		||||
Variable eqA : relation A.
 | 
			
		||||
Hypothesis eqA_dec : forall x y:A, {eqA x y} + {~ eqA x y}.
 | 
			
		||||
Hypothesis eqA_equiv : Equivalence eqA.
 | 
			
		||||
 | 
			
		||||
Lemma Permutation_impl_permutation : forall l l',
 | 
			
		||||
  Permutation l l' -> permutation _ eqA_dec l l'.
 | 
			
		||||
Proof.
 | 
			
		||||
  induction 1.
 | 
			
		||||
    apply permut_refl.
 | 
			
		||||
    apply permut_cons; auto using Equivalence_Reflexive.
 | 
			
		||||
    change (x :: y :: l) with ([x] ++ y :: l);
 | 
			
		||||
      apply permut_add_cons_inside; simpl;
 | 
			
		||||
      apply permut_cons_eq; auto using Equivalence_Reflexive, permut_refl.
 | 
			
		||||
    apply permut_trans with l'; trivial.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permut_eqA : forall l l', Forall2 eqA l l' -> permutation _ eqA_dec l l'.
 | 
			
		||||
Proof.
 | 
			
		||||
  induction 1.
 | 
			
		||||
    apply permut_refl.
 | 
			
		||||
    apply permut_cons_eq; trivial.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma permutation_Permutation : forall l l',
 | 
			
		||||
  permutation _ eqA_dec l l' <->
 | 
			
		||||
  exists l'', Permutation l l'' /\ Forall2 eqA l'' l'.
 | 
			
		||||
Proof.
 | 
			
		||||
  split; intro H.
 | 
			
		||||
  (* -> *)
 | 
			
		||||
  induction l in l', H |- *.
 | 
			
		||||
    exists []; apply permut_sym, permut_nil in H as ->; auto using Forall2.
 | 
			
		||||
    pose proof H as H'.
 | 
			
		||||
    apply permut_cons_InA, InA_split in H
 | 
			
		||||
      as (l1 & y & l2 & Heq & ->); trivial.
 | 
			
		||||
    apply permut_remove_hd_eq, IHl in H'
 | 
			
		||||
      as (l'' & IHP & IHA); clear IHl; trivial.
 | 
			
		||||
    apply Forall2_app_inv_r in IHA as (l1'' & l2'' & Hl1 & Hl2 & ->).
 | 
			
		||||
    exists (l1'' ++ a :: l2''); split.
 | 
			
		||||
      apply Permutation_cons_app; trivial.
 | 
			
		||||
      apply Forall2_app, Forall2_cons; trivial.
 | 
			
		||||
  (* <- *)
 | 
			
		||||
  destruct H as (l'' & H & Heq).
 | 
			
		||||
  apply permut_trans with l''.
 | 
			
		||||
    apply Permutation_impl_permutation; trivial.
 | 
			
		||||
    apply permut_eqA; trivial.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
End Permut_permut.
 | 
			
		||||
 | 
			
		||||
(* begin hide *)
 | 
			
		||||
(** For compatibilty *)
 | 
			
		||||
Notation permut_right := permut_cons (only parsing).
 | 
			
		||||
Notation permut_tran := permut_trans (only parsing).
 | 
			
		||||
(* end hide *)
 | 
			
		||||
@@ -1,632 +0,0 @@
 | 
			
		||||
(* Adapted in May 2006 by Jean-Marc Notin from initial contents by
 | 
			
		||||
   Laurent Thery (Huffmann contribution, October 2003) *)
 | 
			
		||||
 | 
			
		||||
Require Import List Setoid Compare_dec Morphisms.
 | 
			
		||||
Import ListNotations. (* For notations [] and [a;b;c] *)
 | 
			
		||||
Set Implicit Arguments.
 | 
			
		||||
 | 
			
		||||
Section Permutation.
 | 
			
		||||
 | 
			
		||||
Variable A:Type.
 | 
			
		||||
 | 
			
		||||
Inductive Permutation : list A -> list A -> Prop :=
 | 
			
		||||
| perm_nil: Permutation [] []
 | 
			
		||||
| perm_skip x l l' : Permutation l l' -> Permutation (x::l) (x::l')
 | 
			
		||||
| perm_swap x y l : Permutation (y::x::l) (x::y::l)
 | 
			
		||||
| perm_trans l l' l'' :
 | 
			
		||||
    Permutation l l' -> Permutation l' l'' -> Permutation l l''.
 | 
			
		||||
 | 
			
		||||
Local Hint Constructors Permutation.
 | 
			
		||||
 | 
			
		||||
(** Some facts about [Permutation] *)
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_nil : forall (l : list A), Permutation [] l -> l = [].
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l HF.
 | 
			
		||||
  remember (@nil A) as m in HF.
 | 
			
		||||
  induction HF; discriminate || auto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_nil_cons : forall (l : list A) (x : A),
 | 
			
		||||
 ~ Permutation nil (x::l).
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l x HF.
 | 
			
		||||
  apply Permutation_nil in HF; discriminate.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
(** Permutation over lists is a equivalence relation *)
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_refl : forall l : list A, Permutation l l.
 | 
			
		||||
Proof.
 | 
			
		||||
  induction l; constructor. exact IHl.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_sym : forall l l' : list A,
 | 
			
		||||
 Permutation l l' -> Permutation l' l.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l l' Hperm; induction Hperm; auto.
 | 
			
		||||
  apply perm_trans with (l':=l'); assumption.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_trans : forall l l' l'' : list A,
 | 
			
		||||
 Permutation l l' -> Permutation l' l'' -> Permutation l l''.
 | 
			
		||||
Proof.
 | 
			
		||||
  exact perm_trans.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
End Permutation.
 | 
			
		||||
 | 
			
		||||
Hint Resolve Permutation_refl perm_nil perm_skip.
 | 
			
		||||
 | 
			
		||||
(* These hints do not reduce the size of the problem to solve and they
 | 
			
		||||
   must be used with care to avoid combinatoric explosions *)
 | 
			
		||||
 | 
			
		||||
Local Hint Resolve perm_swap perm_trans.
 | 
			
		||||
Local Hint Resolve Permutation_sym Permutation_trans.
 | 
			
		||||
 | 
			
		||||
(* This provides reflexivity, symmetry and transitivity and rewriting
 | 
			
		||||
   on morphims to come *)
 | 
			
		||||
 | 
			
		||||
Instance Permutation_Equivalence A : Equivalence (@Permutation A) | 10 := {
 | 
			
		||||
  Equivalence_Reflexive := @Permutation_refl A ;
 | 
			
		||||
  Equivalence_Symmetric := @Permutation_sym A ;
 | 
			
		||||
  Equivalence_Transitive := @Permutation_trans A }.
 | 
			
		||||
 | 
			
		||||
Instance Permutation_cons A :
 | 
			
		||||
 Proper (Logic.eq ==> @Permutation A ==> @Permutation A) (@cons A) | 10.
 | 
			
		||||
Proof.
 | 
			
		||||
  repeat intro; subst; auto using perm_skip.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Section Permutation_properties.
 | 
			
		||||
 | 
			
		||||
Variable A:Type.
 | 
			
		||||
 | 
			
		||||
Implicit Types a b : A.
 | 
			
		||||
Implicit Types l m : list A.
 | 
			
		||||
 | 
			
		||||
(** Compatibility with others operations on lists *)
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_in : forall (l l' : list A) (x : A),
 | 
			
		||||
 Permutation l l' -> In x l -> In x l'.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l l' x Hperm; induction Hperm; simpl; tauto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Global Instance Permutation_in' :
 | 
			
		||||
 Proper (Logic.eq ==> @Permutation A ==> iff) (@In A) | 10.
 | 
			
		||||
Proof.
 | 
			
		||||
  repeat red; intros; subst; eauto using Permutation_in.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma Permutation_app_tail : forall (l l' tl : list A),
 | 
			
		||||
 Permutation l l' -> Permutation (l++tl) (l'++tl).
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l l' tl Hperm; induction Hperm as [|x l l'|x y l|l l' l'']; simpl; auto.
 | 
			
		||||
  eapply Permutation_trans with (l':=l'++tl); trivial.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma Permutation_app_head : forall (l tl tl' : list A),
 | 
			
		||||
 Permutation tl tl' -> Permutation (l++tl) (l++tl').
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l tl tl' Hperm; induction l;
 | 
			
		||||
   [trivial | repeat rewrite <- app_comm_cons; constructor; assumption].
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_app : forall (l m l' m' : list A),
 | 
			
		||||
 Permutation l l' -> Permutation m m' -> Permutation (l++m) (l'++m').
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l m l' m' Hpermll' Hpermmm';
 | 
			
		||||
   induction Hpermll' as [|x l l'|x y l|l l' l''];
 | 
			
		||||
    repeat rewrite <- app_comm_cons; auto.
 | 
			
		||||
  apply Permutation_trans with (l' := (x :: y :: l ++ m));
 | 
			
		||||
   [idtac | repeat rewrite app_comm_cons; apply Permutation_app_head]; trivial.
 | 
			
		||||
  apply Permutation_trans with (l' := (l' ++ m')); try assumption.
 | 
			
		||||
  apply Permutation_app_tail; assumption.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Global Instance Permutation_app' :
 | 
			
		||||
 Proper (@Permutation A ==> @Permutation A ==> @Permutation A) (@app A) | 10.
 | 
			
		||||
Proof.
 | 
			
		||||
  repeat intro; now apply Permutation_app.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma Permutation_add_inside : forall a (l l' tl tl' : list A),
 | 
			
		||||
  Permutation l l' -> Permutation tl tl' ->
 | 
			
		||||
  Permutation (l ++ a :: tl) (l' ++ a :: tl').
 | 
			
		||||
Proof.
 | 
			
		||||
  intros; apply Permutation_app; auto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma Permutation_cons_append : forall (l : list A) x,
 | 
			
		||||
  Permutation (x :: l) (l ++ x :: nil).
 | 
			
		||||
Proof. induction l; intros; auto. simpl. rewrite <- IHl; auto. Qed.
 | 
			
		||||
Local Hint Resolve Permutation_cons_append.
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_app_comm : forall (l l' : list A),
 | 
			
		||||
  Permutation (l ++ l') (l' ++ l).
 | 
			
		||||
Proof.
 | 
			
		||||
  induction l as [|x l]; simpl; intro l'.
 | 
			
		||||
  rewrite app_nil_r; trivial. rewrite IHl.
 | 
			
		||||
  rewrite app_comm_cons, Permutation_cons_append.
 | 
			
		||||
  now rewrite <- app_assoc.
 | 
			
		||||
Qed.
 | 
			
		||||
Local Hint Resolve Permutation_app_comm.
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_cons_app : forall (l l1 l2:list A) a,
 | 
			
		||||
  Permutation l (l1 ++ l2) -> Permutation (a :: l) (l1 ++ a :: l2).
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l l1 l2 a H. rewrite H.
 | 
			
		||||
  rewrite app_comm_cons, Permutation_cons_append.
 | 
			
		||||
  now rewrite <- app_assoc.
 | 
			
		||||
Qed.
 | 
			
		||||
Local Hint Resolve Permutation_cons_app.
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_middle : forall (l1 l2:list A) a,
 | 
			
		||||
  Permutation (a :: l1 ++ l2) (l1 ++ a :: l2).
 | 
			
		||||
Proof.
 | 
			
		||||
  auto.
 | 
			
		||||
Qed.
 | 
			
		||||
Local Hint Resolve Permutation_middle.
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_rev : forall (l : list A), Permutation l (rev l).
 | 
			
		||||
Proof.
 | 
			
		||||
  induction l as [| x l]; simpl; trivial. now rewrite IHl at 1.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Global Instance Permutation_rev' :
 | 
			
		||||
 Proper (@Permutation A ==> @Permutation A) (@rev A) | 10.
 | 
			
		||||
Proof.
 | 
			
		||||
  repeat intro; now rewrite <- 2 Permutation_rev.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_length : forall (l l' : list A),
 | 
			
		||||
 Permutation l l' -> length l = length l'.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l l' Hperm; induction Hperm; simpl; auto. now transitivity (length l').
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Global Instance Permutation_length' :
 | 
			
		||||
 Proper (@Permutation A ==> Logic.eq) (@length A) | 10.
 | 
			
		||||
Proof.
 | 
			
		||||
  exact Permutation_length.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_ind_bis :
 | 
			
		||||
 forall P : list A -> list A -> Prop,
 | 
			
		||||
   P [] [] ->
 | 
			
		||||
   (forall x l l', Permutation l l' -> P l l' -> P (x :: l) (x :: l')) ->
 | 
			
		||||
   (forall x y l l', Permutation l l' -> P l l' -> P (y :: x :: l) (x :: y :: l')) ->
 | 
			
		||||
   (forall l l' l'', Permutation l l' -> P l l' -> Permutation l' l'' -> P l' l'' -> P l l'') ->
 | 
			
		||||
   forall l l', Permutation l l' -> P l l'.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros P Hnil Hskip Hswap Htrans.
 | 
			
		||||
  induction 1; auto.
 | 
			
		||||
  apply Htrans with (x::y::l); auto.
 | 
			
		||||
  apply Hswap; auto.
 | 
			
		||||
  induction l; auto.
 | 
			
		||||
  apply Hskip; auto.
 | 
			
		||||
  apply Hskip; auto.
 | 
			
		||||
  induction l; auto.
 | 
			
		||||
  eauto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Ltac break_list l x l' H :=
 | 
			
		||||
  destruct l as [|x l']; simpl in *;
 | 
			
		||||
  injection H; intros; subst; clear H.
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_nil_app_cons : forall (l l' : list A) (x : A),
 | 
			
		||||
 ~ Permutation nil (l++x::l').
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l l' x HF.
 | 
			
		||||
  apply Permutation_nil in HF. destruct l; discriminate.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_app_inv : forall (l1 l2 l3 l4:list A) a,
 | 
			
		||||
  Permutation (l1++a::l2) (l3++a::l4) -> Permutation (l1++l2) (l3 ++ l4).
 | 
			
		||||
Proof.
 | 
			
		||||
  intros l1 l2 l3 l4 a; revert l1 l2 l3 l4.
 | 
			
		||||
  set (P l l' :=
 | 
			
		||||
       forall l1 l2 l3 l4, l=l1++a::l2 -> l'=l3++a::l4 ->
 | 
			
		||||
       Permutation (l1++l2) (l3++l4)).
 | 
			
		||||
  cut (forall l l', Permutation l l' -> P l l').
 | 
			
		||||
  intros H; intros; eapply H; eauto.
 | 
			
		||||
  apply (Permutation_ind_bis P); unfold P; clear P.
 | 
			
		||||
  - (* nil *)
 | 
			
		||||
    intros; now destruct l1.
 | 
			
		||||
  - (* skip *)
 | 
			
		||||
    intros x l l' H IH; intros.
 | 
			
		||||
    break_list l1 b l1' H0; break_list l3 c l3' H1.
 | 
			
		||||
    auto.
 | 
			
		||||
    now rewrite H.
 | 
			
		||||
    now rewrite <- H.
 | 
			
		||||
    now rewrite (IH _ _ _ _ eq_refl eq_refl).
 | 
			
		||||
  - (* swap *)
 | 
			
		||||
    intros x y l l' Hp IH; intros.
 | 
			
		||||
    break_list l1 b l1' H; break_list l3 c l3' H0.
 | 
			
		||||
    auto.
 | 
			
		||||
    break_list l3' b l3'' H.
 | 
			
		||||
    auto.
 | 
			
		||||
    constructor. now rewrite Permutation_middle.
 | 
			
		||||
    break_list l1' c l1'' H1.
 | 
			
		||||
    auto.
 | 
			
		||||
    constructor. now rewrite Permutation_middle.
 | 
			
		||||
    break_list l3' d l3'' H; break_list l1' e l1'' H1.
 | 
			
		||||
    auto.
 | 
			
		||||
    rewrite perm_swap. constructor. now rewrite Permutation_middle.
 | 
			
		||||
    rewrite perm_swap. constructor. now rewrite Permutation_middle.
 | 
			
		||||
    now rewrite perm_swap, (IH _ _ _ _ eq_refl eq_refl).
 | 
			
		||||
  - (*trans*)
 | 
			
		||||
    intros.
 | 
			
		||||
    destruct (In_split a l') as (l'1,(l'2,H6)).
 | 
			
		||||
    rewrite <- H.
 | 
			
		||||
    subst l.
 | 
			
		||||
    apply in_or_app; right; red; auto.
 | 
			
		||||
    apply perm_trans with (l'1++l'2).
 | 
			
		||||
    apply (H0 _ _ _ _ H3 H6).
 | 
			
		||||
    apply (H2 _ _ _ _ H6 H4).
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_cons_inv l l' a :
 | 
			
		||||
 Permutation (a::l) (a::l') -> Permutation l l'.
 | 
			
		||||
Proof.
 | 
			
		||||
  intro H; exact (Permutation_app_inv [] l [] l' a H).
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_cons_app_inv l l1 l2 a :
 | 
			
		||||
 Permutation (a :: l) (l1 ++ a :: l2) -> Permutation l (l1 ++ l2).
 | 
			
		||||
Proof.
 | 
			
		||||
  intro H; exact (Permutation_app_inv [] l l1 l2 a H).
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_app_inv_l : forall l l1 l2,
 | 
			
		||||
 Permutation (l ++ l1) (l ++ l2) -> Permutation l1 l2.
 | 
			
		||||
Proof.
 | 
			
		||||
  induction l; simpl; auto.
 | 
			
		||||
  intros.
 | 
			
		||||
  apply IHl.
 | 
			
		||||
  apply Permutation_cons_inv with a; auto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Theorem Permutation_app_inv_r : forall l l1 l2,
 | 
			
		||||
 Permutation (l1 ++ l) (l2 ++ l) -> Permutation l1 l2.
 | 
			
		||||
Proof.
 | 
			
		||||
  induction l.
 | 
			
		||||
  intros l1 l2; do 2 rewrite app_nil_r; auto.
 | 
			
		||||
  intros.
 | 
			
		||||
  apply IHl.
 | 
			
		||||
  apply Permutation_app_inv with a; auto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma Permutation_length_1_inv: forall a l, Permutation [a] l -> l = [a].
 | 
			
		||||
Proof.
 | 
			
		||||
  intros a l H; remember [a] as m in H.
 | 
			
		||||
  induction H; try (injection Heqm as -> ->; clear Heqm);
 | 
			
		||||
    discriminate || auto.
 | 
			
		||||
  apply Permutation_nil in H as ->; trivial.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma Permutation_length_1: forall a b, Permutation [a] [b] -> a = b.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros a b H.
 | 
			
		||||
  apply Permutation_length_1_inv in H; injection H as ->; trivial.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma Permutation_length_2_inv :
 | 
			
		||||
  forall a1 a2 l, Permutation [a1;a2] l -> l = [a1;a2] \/ l = [a2;a1].
 | 
			
		||||
Proof.
 | 
			
		||||
  intros a1 a2 l H; remember [a1;a2] as m in H.
 | 
			
		||||
  revert a1 a2 Heqm.
 | 
			
		||||
  induction H; intros; try (injection Heqm; intros; subst; clear Heqm);
 | 
			
		||||
    discriminate || (try tauto).
 | 
			
		||||
  apply Permutation_length_1_inv in H as ->; left; auto.
 | 
			
		||||
  apply IHPermutation1 in Heqm as [H1|H1]; apply IHPermutation2 in H1 as ();
 | 
			
		||||
    auto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma Permutation_length_2 :
 | 
			
		||||
  forall a1 a2 b1 b2, Permutation [a1;a2] [b1;b2] ->
 | 
			
		||||
    a1 = b1 /\ a2 = b2 \/ a1 = b2 /\ a2 = b1.
 | 
			
		||||
Proof.
 | 
			
		||||
  intros a1 b1 a2 b2 H.
 | 
			
		||||
  apply Permutation_length_2_inv in H as [H|H]; injection H as -> ->; auto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Let in_middle l l1 l2 (a:A) : l = l1 ++ a :: l2 ->
 | 
			
		||||
 forall x, In x l <-> a = x \/ In x (l1++l2).
 | 
			
		||||
Proof.
 | 
			
		||||
 intros; subst; rewrite !in_app_iff; simpl. tauto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma NoDup_cardinal_incl (l l' : list A) : NoDup l -> NoDup l' ->
 | 
			
		||||
  length l = length l' -> incl l l' -> incl l' l.
 | 
			
		||||
Proof.
 | 
			
		||||
 intros N. revert l'. induction N as [|a l Hal Hl IH].
 | 
			
		||||
 - destruct l'; now auto.
 | 
			
		||||
 - intros l' Hl' E H x Hx.
 | 
			
		||||
   assert (Ha : In a l') by (apply H; simpl; auto).
 | 
			
		||||
   destruct (in_split _ _ Ha) as (l1 & l2 & H12). clear Ha.
 | 
			
		||||
   rewrite in_middle in Hx; eauto.
 | 
			
		||||
   destruct Hx as [Hx|Hx]; [left|right]; auto.
 | 
			
		||||
   apply (IH (l1++l2)); auto.
 | 
			
		||||
   * apply NoDup_remove_1 with a; rewrite <- H12; auto.
 | 
			
		||||
   * apply eq_add_S.
 | 
			
		||||
     simpl in E; rewrite E, H12, !app_length; simpl; auto with arith.
 | 
			
		||||
   * intros y Hy. assert (Hy' : In y l') by (apply H; simpl; auto).
 | 
			
		||||
     rewrite in_middle in Hy'; eauto.
 | 
			
		||||
     destruct Hy'; auto. subst y; intuition.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma NoDup_Permutation l l' : NoDup l -> NoDup l' ->
 | 
			
		||||
  (forall x:A, In x l <-> In x l') -> Permutation l l'.
 | 
			
		||||
Proof.
 | 
			
		||||
 intros N. revert l'. induction N as [|a l Hal Hl IH].
 | 
			
		||||
 - destruct l'; simpl; auto.
 | 
			
		||||
   intros Hl' H. exfalso. rewrite (H a); auto.
 | 
			
		||||
 - intros l' Hl' H.
 | 
			
		||||
   assert (Ha : In a l') by (apply H; simpl; auto).
 | 
			
		||||
   destruct (In_split _ _ Ha) as (l1 & l2 & H12).
 | 
			
		||||
   rewrite H12.
 | 
			
		||||
   apply Permutation_cons_app.
 | 
			
		||||
   apply IH; auto.
 | 
			
		||||
   * apply NoDup_remove_1 with a; rewrite <- H12; auto.
 | 
			
		||||
   * intro x. split; intros Hx.
 | 
			
		||||
     + assert (Hx' : In x l') by (apply H; simpl; auto).
 | 
			
		||||
       rewrite in_middle in Hx'; eauto.
 | 
			
		||||
       destruct Hx'; auto. subst; intuition.
 | 
			
		||||
     + assert (Hx' : In x l') by (rewrite (in_middle l1 l2 a); eauto).
 | 
			
		||||
       rewrite <- H in Hx'. destruct Hx'; auto.
 | 
			
		||||
       subst. destruct (NoDup_remove_2 _ _ _ Hl' Hx).
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma NoDup_Permutation_bis l l' : NoDup l -> NoDup l' ->
 | 
			
		||||
  length l = length l' -> incl l l' -> Permutation l l'.
 | 
			
		||||
Proof.
 | 
			
		||||
 intros. apply NoDup_Permutation; auto.
 | 
			
		||||
 split; auto. apply NoDup_cardinal_incl; auto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma Permutation_NoDup l l' : Permutation l l' -> NoDup l -> NoDup l'.
 | 
			
		||||
Proof.
 | 
			
		||||
 induction 1; auto.
 | 
			
		||||
 * inversion_clear 1; constructor; eauto using Permutation_in.
 | 
			
		||||
 * inversion_clear 1 as [|? ? H1 H2]. inversion_clear H2; simpl in *.
 | 
			
		||||
   constructor. simpl; intuition. constructor; intuition.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Global Instance Permutation_NoDup' :
 | 
			
		||||
 Proper (@Permutation A ==> iff) (@NoDup A) | 10.
 | 
			
		||||
Proof.
 | 
			
		||||
  repeat red; eauto using Permutation_NoDup.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
End Permutation_properties.
 | 
			
		||||
 | 
			
		||||
Section Permutation_map.
 | 
			
		||||
 | 
			
		||||
Variable A B : Type.
 | 
			
		||||
Variable f : A -> B.
 | 
			
		||||
 | 
			
		||||
Lemma Permutation_map l l' :
 | 
			
		||||
  Permutation l l' -> Permutation (map f l) (map f l').
 | 
			
		||||
Proof.
 | 
			
		||||
 induction 1; simpl; eauto.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Global Instance Permutation_map' :
 | 
			
		||||
  Proper (@Permutation A ==> @Permutation B) (map f) | 10.
 | 
			
		||||
Proof.
 | 
			
		||||
  exact Permutation_map.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
End Permutation_map.
 | 
			
		||||
 | 
			
		||||
Section Injection.
 | 
			
		||||
 | 
			
		||||
Definition injective {A B} (f : A->B) :=
 | 
			
		||||
 forall x y, f x = f y -> x = y.
 | 
			
		||||
 | 
			
		||||
Lemma injective_map_NoDup {A B} (f:A->B) (l:list A) :
 | 
			
		||||
 injective f -> NoDup l -> NoDup (map f l).
 | 
			
		||||
Proof.
 | 
			
		||||
 intros Hf. induction 1 as [|x l Hx Hl IH]; simpl; constructor; trivial.
 | 
			
		||||
 rewrite in_map_iff. intros (y & Hy & Hy'). apply Hf in Hy. now subst.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma injective_bounded_surjective n f :
 | 
			
		||||
 injective f ->
 | 
			
		||||
 (forall x, x < n -> f x < n) ->
 | 
			
		||||
 (forall y, y < n -> exists x, x < n /\ f x = y).
 | 
			
		||||
Proof.
 | 
			
		||||
 intros Hf H.
 | 
			
		||||
 set (l := seq 0 n).
 | 
			
		||||
 assert (P : incl (map f l) l).
 | 
			
		||||
 { intros x. rewrite in_map_iff. intros (y & <- & Hy').
 | 
			
		||||
   unfold l in *. rewrite in_seq in *. simpl in *.
 | 
			
		||||
   destruct Hy' as (_,Hy'). auto with arith. }
 | 
			
		||||
 assert (P' : incl l (map f l)).
 | 
			
		||||
 { unfold l.
 | 
			
		||||
   apply NoDup_cardinal_incl; auto using injective_map_NoDup, seq_NoDup.
 | 
			
		||||
   now rewrite map_length. }
 | 
			
		||||
 intros x Hx.
 | 
			
		||||
 assert (Hx' : In x l) by (unfold l; rewrite in_seq; auto with arith).
 | 
			
		||||
 apply P' in Hx'.
 | 
			
		||||
 rewrite in_map_iff in Hx'. destruct Hx' as (y & Hy & Hy').
 | 
			
		||||
 exists y; split; auto. unfold l in *; rewrite in_seq in Hy'.
 | 
			
		||||
 destruct Hy'; auto with arith.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma nat_bijection_Permutation n f :
 | 
			
		||||
 injective f -> (forall x, x < n -> f x < n) ->
 | 
			
		||||
 let l := seq 0 n in Permutation (map f l) l.
 | 
			
		||||
Proof.
 | 
			
		||||
 intros Hf BD.
 | 
			
		||||
 apply NoDup_Permutation_bis; auto using injective_map_NoDup, seq_NoDup.
 | 
			
		||||
 * now rewrite map_length.
 | 
			
		||||
 * intros x. rewrite in_map_iff. intros (y & <- & Hy').
 | 
			
		||||
   rewrite in_seq in *. simpl in *.
 | 
			
		||||
   destruct Hy' as (_,Hy'). auto with arith.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
End Injection.
 | 
			
		||||
 | 
			
		||||
Section Permutation_alt.
 | 
			
		||||
Variable A:Type.
 | 
			
		||||
Implicit Type a : A.
 | 
			
		||||
Implicit Type l : list A.
 | 
			
		||||
 | 
			
		||||
(** Alternative characterization of permutation
 | 
			
		||||
    via [nth_error] and [nth] *)
 | 
			
		||||
 | 
			
		||||
Let adapt f n :=
 | 
			
		||||
 let m := f (S n) in if le_lt_dec m (f 0) then m else pred m.
 | 
			
		||||
 | 
			
		||||
Let adapt_injective f : injective f -> injective (adapt f).
 | 
			
		||||
Proof.
 | 
			
		||||
 unfold adapt. intros Hf x y EQ.
 | 
			
		||||
 destruct le_lt_dec as [LE|LT]; destruct le_lt_dec as [LE'|LT'].
 | 
			
		||||
 - now apply eq_add_S, Hf.
 | 
			
		||||
 - apply Lt.le_lt_or_eq in LE.
 | 
			
		||||
   destruct LE as [LT|EQ']; [|now apply Hf in EQ'].
 | 
			
		||||
   unfold lt in LT. rewrite EQ in LT.
 | 
			
		||||
   rewrite <- (Lt.S_pred _ _ LT') in LT.
 | 
			
		||||
   elim (Lt.lt_not_le _ _ LT' LT).
 | 
			
		||||
 - apply Lt.le_lt_or_eq in LE'.
 | 
			
		||||
   destruct LE' as [LT'|EQ']; [|now apply Hf in EQ'].
 | 
			
		||||
   unfold lt in LT'. rewrite <- EQ in LT'.
 | 
			
		||||
   rewrite <- (Lt.S_pred _ _ LT) in LT'.
 | 
			
		||||
   elim (Lt.lt_not_le _ _ LT LT').
 | 
			
		||||
 - apply eq_add_S, Hf.
 | 
			
		||||
   now rewrite (Lt.S_pred _ _ LT), (Lt.S_pred _ _ LT'), EQ.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Let adapt_ok a l1 l2 f : injective f -> length l1 = f 0 ->
 | 
			
		||||
 forall n, nth_error (l1++a::l2) (f (S n)) = nth_error (l1++l2) (adapt f n).
 | 
			
		||||
Proof.
 | 
			
		||||
 unfold adapt. intros Hf E n.
 | 
			
		||||
 destruct le_lt_dec as [LE|LT].
 | 
			
		||||
 - apply Lt.le_lt_or_eq in LE.
 | 
			
		||||
   destruct LE as [LT|EQ]; [|now apply Hf in EQ].
 | 
			
		||||
   rewrite <- E in LT.
 | 
			
		||||
   rewrite 2 nth_error_app1; auto.
 | 
			
		||||
 - rewrite (Lt.S_pred _ _ LT) at 1.
 | 
			
		||||
   rewrite <- E, (Lt.S_pred _ _ LT) in LT.
 | 
			
		||||
   rewrite 2 nth_error_app2; auto with arith.
 | 
			
		||||
   rewrite <- Minus.minus_Sn_m; auto with arith.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma Permutation_nth_error l l' :
 | 
			
		||||
 Permutation l l' <->
 | 
			
		||||
  (length l = length l' /\
 | 
			
		||||
   exists f:nat->nat,
 | 
			
		||||
    injective f /\ forall n, nth_error l' n = nth_error l (f n)).
 | 
			
		||||
Proof.
 | 
			
		||||
 split.
 | 
			
		||||
 { intros P.
 | 
			
		||||
   split; [now apply Permutation_length|].
 | 
			
		||||
   induction P.
 | 
			
		||||
   - exists (fun n => n).
 | 
			
		||||
     split; try red; auto.
 | 
			
		||||
   - destruct IHP as (f & Hf & Hf').
 | 
			
		||||
     exists (fun n => match n with O => O | S n => S (f n) end).
 | 
			
		||||
     split; try red.
 | 
			
		||||
     * intros [|y] [|z]; simpl; now auto.
 | 
			
		||||
     * intros [|n]; simpl; auto.
 | 
			
		||||
   - exists (fun n => match n with 0 => 1 | 1 => 0 | n => n end).
 | 
			
		||||
     split; try red.
 | 
			
		||||
     * intros [|[|z]] [|[|t]]; simpl; now auto.
 | 
			
		||||
     * intros [|[|n]]; simpl; auto.
 | 
			
		||||
   - destruct IHP1 as (f & Hf & Hf').
 | 
			
		||||
     destruct IHP2 as (g & Hg & Hg').
 | 
			
		||||
     exists (fun n => f (g n)).
 | 
			
		||||
     split; try red.
 | 
			
		||||
     * auto.
 | 
			
		||||
     * intros n. rewrite <- Hf'; auto. }
 | 
			
		||||
 { revert l. induction l'.
 | 
			
		||||
   - intros [|l] (E & _); now auto.
 | 
			
		||||
   - intros l (E & f & Hf & Hf').
 | 
			
		||||
     simpl in E.
 | 
			
		||||
     assert (Ha : nth_error l (f 0) = Some a)
 | 
			
		||||
      by (symmetry; apply (Hf' 0)).
 | 
			
		||||
     destruct (nth_error_split l (f 0) Ha) as (l1 & l2 & L12 & L1).
 | 
			
		||||
     rewrite L12. rewrite <- Permutation_middle. constructor.
 | 
			
		||||
     apply IHl'; split; [|exists (adapt f); split].
 | 
			
		||||
     * revert E. rewrite L12, !app_length. simpl.
 | 
			
		||||
       rewrite <- plus_n_Sm. now injection 1.
 | 
			
		||||
     * now apply adapt_injective.
 | 
			
		||||
     * intro n. rewrite <- (adapt_ok a), <- L12; auto.
 | 
			
		||||
       apply (Hf' (S n)). }
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma Permutation_nth_error_bis l l' :
 | 
			
		||||
 Permutation l l' <->
 | 
			
		||||
  exists f:nat->nat,
 | 
			
		||||
    injective f /\
 | 
			
		||||
    (forall n, n < length l -> f n < length l) /\
 | 
			
		||||
    (forall n, nth_error l' n = nth_error l (f n)).
 | 
			
		||||
Proof.
 | 
			
		||||
 rewrite Permutation_nth_error; split.
 | 
			
		||||
 - intros (E & f & Hf & Hf').
 | 
			
		||||
   exists f. do 2 (split; trivial).
 | 
			
		||||
   intros n Hn.
 | 
			
		||||
   destruct (Lt.le_or_lt (length l) (f n)) as [LE|LT]; trivial.
 | 
			
		||||
   rewrite <- nth_error_None, <- Hf', nth_error_None, <- E in LE.
 | 
			
		||||
   elim (Lt.lt_not_le _ _ Hn LE).
 | 
			
		||||
 - intros (f & Hf & Hf2 & Hf3); split; [|exists f; auto].
 | 
			
		||||
   assert (H : length l' <= length l') by auto with arith.
 | 
			
		||||
   rewrite <- nth_error_None, Hf3, nth_error_None in H.
 | 
			
		||||
   destruct (Lt.le_or_lt (length l) (length l')) as [LE|LT];
 | 
			
		||||
    [|apply Hf2 in LT; elim (Lt.lt_not_le _ _ LT H)].
 | 
			
		||||
   apply Lt.le_lt_or_eq in LE. destruct LE as [LT|EQ]; trivial.
 | 
			
		||||
   rewrite <- nth_error_Some, Hf3, nth_error_Some in LT.
 | 
			
		||||
   destruct (injective_bounded_surjective Hf Hf2 LT) as (y & Hy & Hy').
 | 
			
		||||
   apply Hf in Hy'. subst y. elim (Lt.lt_irrefl _ Hy).
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma Permutation_nth l l' d :
 | 
			
		||||
 Permutation l l' <->
 | 
			
		||||
  (let n := length l in
 | 
			
		||||
   length l' = n /\
 | 
			
		||||
   exists f:nat->nat,
 | 
			
		||||
    (forall x, x < n -> f x < n) /\
 | 
			
		||||
    (forall x y, x < n -> y < n -> f x = f y -> x = y) /\
 | 
			
		||||
    (forall x, x < n -> nth x l' d = nth (f x) l d)).
 | 
			
		||||
Proof.
 | 
			
		||||
 split.
 | 
			
		||||
 - intros H.
 | 
			
		||||
   assert (E := Permutation_length H).
 | 
			
		||||
   split; auto.
 | 
			
		||||
   apply Permutation_nth_error_bis in H.
 | 
			
		||||
   destruct H as (f & Hf & Hf2 & Hf3).
 | 
			
		||||
   exists f. split; [|split]; auto.
 | 
			
		||||
   intros n Hn. rewrite <- 2 nth_default_eq. unfold nth_default.
 | 
			
		||||
    now rewrite Hf3.
 | 
			
		||||
 - intros (E & f & Hf1 & Hf2 & Hf3).
 | 
			
		||||
   rewrite Permutation_nth_error.
 | 
			
		||||
   split; auto.
 | 
			
		||||
   exists (fun n => if le_lt_dec (length l) n then n else f n).
 | 
			
		||||
   split.
 | 
			
		||||
   * intros x y.
 | 
			
		||||
     destruct le_lt_dec as [LE|LT];
 | 
			
		||||
      destruct le_lt_dec as [LE'|LT']; auto.
 | 
			
		||||
     + apply Hf1 in LT'. intros ->.
 | 
			
		||||
       elim (Lt.lt_irrefl (f y)). eapply Lt.lt_le_trans; eauto.
 | 
			
		||||
     + apply Hf1 in LT. intros <-.
 | 
			
		||||
       elim (Lt.lt_irrefl (f x)). eapply Lt.lt_le_trans; eauto.
 | 
			
		||||
   * intros n.
 | 
			
		||||
     destruct le_lt_dec as [LE|LT].
 | 
			
		||||
     + assert (LE' : length l' <= n) by (now rewrite E).
 | 
			
		||||
       rewrite <- nth_error_None in LE, LE'. congruence.
 | 
			
		||||
     + assert (LT' : n < length l') by (now rewrite E).
 | 
			
		||||
       specialize (Hf3 n LT). rewrite <- 2 nth_default_eq in Hf3.
 | 
			
		||||
       unfold nth_default in Hf3.
 | 
			
		||||
       apply Hf1 in LT.
 | 
			
		||||
       rewrite <- nth_error_Some in LT, LT'.
 | 
			
		||||
       do 2 destruct nth_error; congruence.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
End Permutation_alt.
 | 
			
		||||
 | 
			
		||||
(* begin hide *)
 | 
			
		||||
Notation Permutation_app_swap := Permutation_app_comm (only parsing).
 | 
			
		||||
(* end hide *)
 | 
			
		||||
@@ -1,2 +0,0 @@
 | 
			
		||||
Require Export Sorted.
 | 
			
		||||
Require Export Mergesort.
 | 
			
		||||
							
								
								
									
										62
									
								
								samples/Coq/Spec.v
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										62
									
								
								samples/Coq/Spec.v
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,62 @@
 | 
			
		||||
(** Specifications. *)
 | 
			
		||||
Require Import Coq.Lists.List.
 | 
			
		||||
Require Import Coq.Strings.Ascii.
 | 
			
		||||
Require Import FunctionNinjas.All.
 | 
			
		||||
Require Import ListString.All.
 | 
			
		||||
Require Import Computation.
 | 
			
		||||
 | 
			
		||||
Import ListNotations.
 | 
			
		||||
Local Open Scope char.
 | 
			
		||||
 | 
			
		||||
(** A run is an execution of the program with explicit answers for the
 | 
			
		||||
    system calls. *)
 | 
			
		||||
Module Run.
 | 
			
		||||
  (** We define a run by induction on the structure of a computation. *)
 | 
			
		||||
  Inductive t : C.t -> Type :=
 | 
			
		||||
  | Ret : t C.Ret
 | 
			
		||||
  | Call : forall (command : Command.t) (answer : Command.answer command)
 | 
			
		||||
    {handler : Command.answer command -> C.t}, t (handler answer) ->
 | 
			
		||||
    t (C.Call command handler).
 | 
			
		||||
 | 
			
		||||
  (** The trace of a run. *)
 | 
			
		||||
  Fixpoint trace {x : C.t} (run : t x)
 | 
			
		||||
    : list {command : Command.t & Command.answer command} :=
 | 
			
		||||
    match run with
 | 
			
		||||
    | Ret => []
 | 
			
		||||
    | Call command answer _ run => existT _ command answer :: trace run
 | 
			
		||||
    end.
 | 
			
		||||
End Run.
 | 
			
		||||
 | 
			
		||||
Module Temporal.
 | 
			
		||||
  Module All.
 | 
			
		||||
    Inductive t (P : Command.t -> Prop) : C.t -> Prop :=
 | 
			
		||||
    | Ret : t P C.Ret
 | 
			
		||||
    | Call : forall (c : Command.t) (h : Command.answer c -> C.t),
 | 
			
		||||
      P c -> (forall a, t P (h a)) ->
 | 
			
		||||
      t P (C.Call c h).
 | 
			
		||||
  End All.
 | 
			
		||||
 | 
			
		||||
  Module One.
 | 
			
		||||
    Inductive t (P : Command.t -> Prop) : C.t -> Prop :=
 | 
			
		||||
    | CallThis : forall (c : Command.t) (h : Command.answer c -> C.t),
 | 
			
		||||
      P c ->
 | 
			
		||||
      t P (C.Call c h)
 | 
			
		||||
    | CallOther : forall (c : Command.t) (h : Command.answer c -> C.t),
 | 
			
		||||
      (forall a, t P (h a)) ->
 | 
			
		||||
      t P (C.Call c h).
 | 
			
		||||
  End One.
 | 
			
		||||
 | 
			
		||||
  Module Then.
 | 
			
		||||
    Inductive t (P1 P2 : Command.t -> Prop) : C.t -> Prop :=
 | 
			
		||||
    | Ret : t P1 P2 C.Ret
 | 
			
		||||
    | Call : forall (c : Command.t) (h : Command.answer c -> C.t),
 | 
			
		||||
      (forall a, t P1 P2 (h a)) ->
 | 
			
		||||
      t P1 P2 (C.Call c h)
 | 
			
		||||
    | CallThen : forall (c : Command.t) (h : Command.answer c -> C.t),
 | 
			
		||||
      P1 c -> (forall a, One.t P2 (h a)) ->
 | 
			
		||||
      t P1 P2 (C.Call c h).
 | 
			
		||||
  End Then.
 | 
			
		||||
End Temporal.
 | 
			
		||||
 | 
			
		||||
Module CardBeforeMoney.
 | 
			
		||||
End CardBeforeMoney.
 | 
			
		||||
@@ -1,419 +0,0 @@
 | 
			
		||||
(** Sketch of the proof of {p:nat|p<=n} = {p:nat|p<=m} -> n=m
 | 
			
		||||
 | 
			
		||||
  - preliminary results on the irrelevance of boundedness proofs
 | 
			
		||||
  - introduce the notion of finite cardinal |A|
 | 
			
		||||
  - prove that |{p:nat|p<=n}| = n
 | 
			
		||||
  - prove that |A| = n /\ |A| = m -> n = m if equality is decidable on A
 | 
			
		||||
  - prove that equality is decidable on A
 | 
			
		||||
  - conclude
 | 
			
		||||
*)
 | 
			
		||||
 | 
			
		||||
(** * Preliminary results on [nat] and [le] *)
 | 
			
		||||
 | 
			
		||||
(** Proving axiom K on [nat] *)
 | 
			
		||||
 | 
			
		||||
Require Import Eqdep_dec.
 | 
			
		||||
Require Import Arith.
 | 
			
		||||
 | 
			
		||||
Theorem eq_rect_eq_nat :
 | 
			
		||||
  forall (p:nat) (Q:nat->Type) (x:Q p) (h:p=p), x = eq_rect p Q x p h.
 | 
			
		||||
Proof.
 | 
			
		||||
intros.
 | 
			
		||||
apply K_dec_set with (p := h).
 | 
			
		||||
apply eq_nat_dec.
 | 
			
		||||
reflexivity.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
(** Proving unicity of proofs of [(n<=m)%nat] *)
 | 
			
		||||
 | 
			
		||||
Scheme le_ind' := Induction for le Sort Prop.
 | 
			
		||||
 | 
			
		||||
Theorem le_uniqueness_proof : forall (n m : nat) (p q : n <= m), p = q.
 | 
			
		||||
Proof.
 | 
			
		||||
induction p using le_ind'; intro q.
 | 
			
		||||
 replace (le_n n) with
 | 
			
		||||
  (eq_rect _ (fun n0 => n <= n0) (le_n n) _ (refl_equal n)).
 | 
			
		||||
 2:reflexivity.
 | 
			
		||||
  generalize (refl_equal n).
 | 
			
		||||
    pattern n at 2 4 6 10, q; case q; [intro | intros m l e].
 | 
			
		||||
     rewrite <- eq_rect_eq_nat; trivial.
 | 
			
		||||
     contradiction (le_Sn_n m); rewrite <- e; assumption.
 | 
			
		||||
 replace (le_S n m p) with
 | 
			
		||||
  (eq_rect _ (fun n0 => n <= n0) (le_S n m p) _ (refl_equal (S m))).
 | 
			
		||||
 2:reflexivity.
 | 
			
		||||
  generalize (refl_equal (S m)).
 | 
			
		||||
    pattern (S m) at 1 3 4 6, q; case q; [intro Heq | intros m0 l HeqS].
 | 
			
		||||
     contradiction (le_Sn_n m); rewrite Heq; assumption.
 | 
			
		||||
     injection HeqS; intro Heq; generalize l HeqS.
 | 
			
		||||
      rewrite <- Heq; intros; rewrite <- eq_rect_eq_nat.
 | 
			
		||||
      rewrite (IHp l0); reflexivity.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
(** Proving irrelevance of boundedness proofs while building
 | 
			
		||||
    elements of interval *)
 | 
			
		||||
 | 
			
		||||
Lemma dep_pair_intro :
 | 
			
		||||
  forall (n x y:nat) (Hx : x<=n) (Hy : y<=n), x=y ->
 | 
			
		||||
    exist (fun x => x <= n) x Hx = exist (fun x => x <= n) y Hy.
 | 
			
		||||
Proof.
 | 
			
		||||
intros n x y Hx Hy Heq.
 | 
			
		||||
generalize Hy.
 | 
			
		||||
rewrite <- Heq.
 | 
			
		||||
intros.
 | 
			
		||||
rewrite (le_uniqueness_proof x n Hx Hy0).
 | 
			
		||||
reflexivity.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
(** * Proving that {p:nat|p<=n} = {p:nat|p<=m} -> n=m *)
 | 
			
		||||
 | 
			
		||||
(** Definition of having finite cardinality [n+1] for a set [A] *)
 | 
			
		||||
 | 
			
		||||
Definition card (A:Set) n :=
 | 
			
		||||
  exists f,
 | 
			
		||||
    (forall x:A, f x <= n) /\
 | 
			
		||||
    (forall x y:A, f x = f y -> x = y) /\
 | 
			
		||||
    (forall m, m <= n -> exists x:A, f x = m).
 | 
			
		||||
 | 
			
		||||
Require Import Arith.
 | 
			
		||||
 | 
			
		||||
(** Showing that the interval [0;n] has cardinality [n+1] *)
 | 
			
		||||
 | 
			
		||||
Theorem card_interval : forall n, card {x:nat|x<=n} n.
 | 
			
		||||
Proof.
 | 
			
		||||
intro n.
 | 
			
		||||
exists (fun x:{x:nat|x<=n} => proj1_sig x).
 | 
			
		||||
split.
 | 
			
		||||
(* bounded *)
 | 
			
		||||
intro x; apply (proj2_sig x).
 | 
			
		||||
split.
 | 
			
		||||
(* injectivity *)
 | 
			
		||||
intros (p,Hp) (q,Hq).
 | 
			
		||||
simpl.
 | 
			
		||||
intro Hpq.
 | 
			
		||||
apply dep_pair_intro; assumption.
 | 
			
		||||
(* surjectivity *)
 | 
			
		||||
intros m Hmn.
 | 
			
		||||
exists (exist (fun x : nat => x <= n) m Hmn).
 | 
			
		||||
reflexivity.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
(** Showing that equality on the interval [0;n] is decidable *)
 | 
			
		||||
 | 
			
		||||
Lemma interval_dec :
 | 
			
		||||
  forall n (x y : {m:nat|m<=n}), {x=y}+{x<>y}.
 | 
			
		||||
Proof.
 | 
			
		||||
intros n (p,Hp).
 | 
			
		||||
induction p; intros ([|q],Hq).
 | 
			
		||||
left.
 | 
			
		||||
  apply dep_pair_intro.
 | 
			
		||||
  reflexivity.
 | 
			
		||||
right.
 | 
			
		||||
  intro H; discriminate H.
 | 
			
		||||
right.
 | 
			
		||||
  intro H; discriminate H.
 | 
			
		||||
assert (Hp' : p <= n).
 | 
			
		||||
  apply le_Sn_le; assumption.
 | 
			
		||||
assert (Hq' : q <= n).
 | 
			
		||||
  apply le_Sn_le; assumption.
 | 
			
		||||
destruct (IHp Hp' (exist (fun m => m <= n) q Hq'))
 | 
			
		||||
  as [Heq|Hneq].
 | 
			
		||||
left.
 | 
			
		||||
  injection Heq; intro Heq'.
 | 
			
		||||
  apply dep_pair_intro.
 | 
			
		||||
  apply eq_S.
 | 
			
		||||
  assumption.
 | 
			
		||||
right.
 | 
			
		||||
  intro HeqS.
 | 
			
		||||
  injection HeqS; intro Heq.
 | 
			
		||||
  apply Hneq.
 | 
			
		||||
  apply dep_pair_intro.
 | 
			
		||||
  assumption.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
(** Showing that the cardinality relation is functional on decidable sets *)
 | 
			
		||||
 | 
			
		||||
Lemma card_inj_aux :
 | 
			
		||||
  forall (A:Type) f g n,
 | 
			
		||||
    (forall x:A, f x <= 0) ->
 | 
			
		||||
    (forall x y:A, f x = f y -> x = y) ->
 | 
			
		||||
    (forall m, m <= S n -> exists x:A, g x = m)
 | 
			
		||||
     -> False.
 | 
			
		||||
Proof.
 | 
			
		||||
intros A f g n Hfbound Hfinj Hgsurj.
 | 
			
		||||
destruct (Hgsurj (S n) (le_n _)) as (x,Hx).
 | 
			
		||||
destruct (Hgsurj n (le_S _ _ (le_n _))) as (x',Hx').
 | 
			
		||||
assert (Hfx : 0 = f x).
 | 
			
		||||
apply le_n_O_eq.
 | 
			
		||||
apply Hfbound.
 | 
			
		||||
assert (Hfx' : 0 = f x').
 | 
			
		||||
apply le_n_O_eq.
 | 
			
		||||
apply Hfbound.
 | 
			
		||||
assert (x=x').
 | 
			
		||||
apply Hfinj.
 | 
			
		||||
rewrite <- Hfx.
 | 
			
		||||
rewrite <- Hfx'.
 | 
			
		||||
reflexivity.
 | 
			
		||||
rewrite H in Hx.
 | 
			
		||||
rewrite Hx' in Hx.
 | 
			
		||||
apply (n_Sn _ Hx).
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
(** For [dec_restrict], we use a lemma on the negation of equality
 | 
			
		||||
that requires proof-irrelevance. It should be possible to avoid this
 | 
			
		||||
lemma by generalizing over a first-order definition of [x<>y], say
 | 
			
		||||
[neq] such that [{x=y}+{neq x y}] and [~(x=y /\ neq x y)]; for such
 | 
			
		||||
[neq], unicity of proofs could be proven *)
 | 
			
		||||
 | 
			
		||||
  Require Import Classical.
 | 
			
		||||
  Lemma neq_dep_intro :
 | 
			
		||||
   forall (A:Set) (z x y:A) (p:x<>z) (q:y<>z), x=y ->
 | 
			
		||||
      exist (fun x => x <> z) x p = exist (fun x => x <> z) y q.
 | 
			
		||||
  Proof.
 | 
			
		||||
  intros A z x y p q Heq.
 | 
			
		||||
   generalize q; clear q; rewrite <- Heq; intro q.
 | 
			
		||||
   rewrite (proof_irrelevance _ p q); reflexivity.
 | 
			
		||||
  Qed.
 | 
			
		||||
 | 
			
		||||
Lemma dec_restrict :
 | 
			
		||||
  forall (A:Set),
 | 
			
		||||
    (forall x y :A, {x=y}+{x<>y}) ->
 | 
			
		||||
     forall z (x y :{a:A|a<>z}), {x=y}+{x<>y}.
 | 
			
		||||
Proof.
 | 
			
		||||
intros A Hdec z (x,Hx) (y,Hy).
 | 
			
		||||
destruct (Hdec x y) as [Heq|Hneq].
 | 
			
		||||
left; apply neq_dep_intro; assumption.
 | 
			
		||||
right; intro Heq; injection Heq; exact Hneq.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma pred_inj : forall n m,
 | 
			
		||||
  0 <> n -> 0 <> m -> pred m = pred n -> m = n.
 | 
			
		||||
Proof.
 | 
			
		||||
destruct n.
 | 
			
		||||
intros m H; destruct H; reflexivity.
 | 
			
		||||
destruct m.
 | 
			
		||||
intros _ H; destruct H; reflexivity.
 | 
			
		||||
simpl; intros _ _ H.
 | 
			
		||||
rewrite H.
 | 
			
		||||
reflexivity.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma le_neq_lt : forall n m, n <= m -> n<>m -> n < m.
 | 
			
		||||
Proof.
 | 
			
		||||
intros n m Hle Hneq.
 | 
			
		||||
destruct (le_lt_eq_dec n m Hle).
 | 
			
		||||
assumption.
 | 
			
		||||
contradiction.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Lemma inj_restrict :
 | 
			
		||||
  forall (A:Set) (f:A->nat) x y z,
 | 
			
		||||
    (forall x y : A, f x = f y -> x = y)
 | 
			
		||||
    -> x <> z -> f y < f z -> f z <= f x
 | 
			
		||||
    -> pred (f x) = f y
 | 
			
		||||
    -> False.
 | 
			
		||||
 | 
			
		||||
(* Search error sans le type de f !! *)
 | 
			
		||||
Proof.
 | 
			
		||||
intros A f x y z Hfinj Hneqx Hfy Hfx Heq.
 | 
			
		||||
assert (f z <> f x).
 | 
			
		||||
  apply sym_not_eq.
 | 
			
		||||
  intro Heqf.
 | 
			
		||||
  apply Hneqx.
 | 
			
		||||
  apply Hfinj.
 | 
			
		||||
  assumption.
 | 
			
		||||
assert (f x = S (f y)).
 | 
			
		||||
  assert (0 < f x).
 | 
			
		||||
    apply le_lt_trans with (f z).
 | 
			
		||||
    apply le_O_n.
 | 
			
		||||
    apply le_neq_lt; assumption.
 | 
			
		||||
  apply pred_inj.
 | 
			
		||||
  apply O_S.
 | 
			
		||||
  apply lt_O_neq; assumption.
 | 
			
		||||
  exact Heq.
 | 
			
		||||
assert (f z <= f y).
 | 
			
		||||
destruct (le_lt_or_eq _ _ Hfx).
 | 
			
		||||
  apply lt_n_Sm_le.
 | 
			
		||||
  rewrite <- H0.
 | 
			
		||||
  assumption.
 | 
			
		||||
  contradiction Hneqx.
 | 
			
		||||
  symmetry.
 | 
			
		||||
  apply Hfinj.
 | 
			
		||||
  assumption.
 | 
			
		||||
contradiction (lt_not_le (f y) (f z)).
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
Theorem card_inj : forall m n (A:Set),
 | 
			
		||||
  (forall x y :A, {x=y}+{x<>y}) ->
 | 
			
		||||
  card A m -> card A n -> m = n.
 | 
			
		||||
Proof.
 | 
			
		||||
induction m; destruct n;
 | 
			
		||||
intros A Hdec
 | 
			
		||||
 (f,(Hfbound,(Hfinj,Hfsurj)))
 | 
			
		||||
 (g,(Hgbound,(Hginj,Hgsurj))).
 | 
			
		||||
(* 0/0 *)
 | 
			
		||||
reflexivity.
 | 
			
		||||
(* 0/Sm *)
 | 
			
		||||
destruct (card_inj_aux _ _ _ _ Hfbound Hfinj Hgsurj).
 | 
			
		||||
(* Sn/0 *)
 | 
			
		||||
destruct (card_inj_aux _ _ _ _ Hgbound Hginj Hfsurj).
 | 
			
		||||
(* Sn/Sm *)
 | 
			
		||||
destruct (Hgsurj (S n) (le_n _)) as (xSn,HSnx).
 | 
			
		||||
rewrite IHm with (n:=n) (A := {x:A|x<>xSn}).
 | 
			
		||||
reflexivity.
 | 
			
		||||
(* decidability of eq on {x:A|x<>xSm} *)
 | 
			
		||||
apply dec_restrict.
 | 
			
		||||
assumption.
 | 
			
		||||
(* cardinality of {x:A|x<>xSn} is m *)
 | 
			
		||||
pose (f' := fun x' : {x:A|x<>xSn} =>
 | 
			
		||||
    let (x,Hneq) := x' in
 | 
			
		||||
    if le_lt_dec (f xSn) (f x)
 | 
			
		||||
    then pred (f x)
 | 
			
		||||
    else f x).
 | 
			
		||||
exists f'.
 | 
			
		||||
split.
 | 
			
		||||
(* f' is bounded *)
 | 
			
		||||
unfold f'.
 | 
			
		||||
intros (x,_).
 | 
			
		||||
destruct (le_lt_dec (f xSn) (f x)) as [Hle|Hge].
 | 
			
		||||
change m with (pred (S m)).
 | 
			
		||||
apply le_pred.
 | 
			
		||||
apply Hfbound.
 | 
			
		||||
apply le_S_n.
 | 
			
		||||
apply le_trans with (f xSn).
 | 
			
		||||
exact Hge.
 | 
			
		||||
apply Hfbound.
 | 
			
		||||
split.
 | 
			
		||||
(* f' is injective *)
 | 
			
		||||
unfold f'.
 | 
			
		||||
intros (x,Hneqx) (y,Hneqy) Heqf'.
 | 
			
		||||
destruct (le_lt_dec (f xSn) (f x)) as [Hlefx|Hgefx];
 | 
			
		||||
destruct (le_lt_dec (f xSn) (f y)) as [Hlefy|Hgefy].
 | 
			
		||||
(* f xSn <= f x et f xSn <= f y *)
 | 
			
		||||
assert (Heq : x = y).
 | 
			
		||||
  apply Hfinj.
 | 
			
		||||
  assert (f xSn <> f y).
 | 
			
		||||
    apply sym_not_eq.
 | 
			
		||||
    intro Heqf.
 | 
			
		||||
    apply Hneqy.
 | 
			
		||||
    apply Hfinj.
 | 
			
		||||
    assumption.
 | 
			
		||||
  assert (0 < f y).
 | 
			
		||||
    apply le_lt_trans with (f xSn).
 | 
			
		||||
    apply le_O_n.
 | 
			
		||||
    apply le_neq_lt; assumption.
 | 
			
		||||
  assert (f xSn <> f x).
 | 
			
		||||
    apply sym_not_eq.
 | 
			
		||||
    intro Heqf.
 | 
			
		||||
    apply Hneqx.
 | 
			
		||||
    apply Hfinj.
 | 
			
		||||
    assumption.
 | 
			
		||||
  assert (0 < f x).
 | 
			
		||||
    apply le_lt_trans with (f xSn).
 | 
			
		||||
    apply le_O_n.
 | 
			
		||||
    apply le_neq_lt; assumption.
 | 
			
		||||
  apply pred_inj.
 | 
			
		||||
  apply lt_O_neq; assumption.
 | 
			
		||||
  apply lt_O_neq; assumption.
 | 
			
		||||
  assumption.
 | 
			
		||||
apply neq_dep_intro; assumption.
 | 
			
		||||
(* f y < f xSn <= f x *)
 | 
			
		||||
destruct (inj_restrict A f x y xSn); assumption.
 | 
			
		||||
(* f x < f xSn <= f y *)
 | 
			
		||||
symmetry in Heqf'.
 | 
			
		||||
destruct (inj_restrict A f y x xSn); assumption.
 | 
			
		||||
(* f x < f xSn et f y < f xSn *)
 | 
			
		||||
assert (Heq : x=y).
 | 
			
		||||
  apply Hfinj; assumption.
 | 
			
		||||
apply neq_dep_intro; assumption.
 | 
			
		||||
(* f' is surjective *)
 | 
			
		||||
intros p Hlep.
 | 
			
		||||
destruct (le_lt_dec (f xSn) p) as [Hle|Hlt].
 | 
			
		||||
(* case f xSn <= p *)
 | 
			
		||||
destruct (Hfsurj (S p) (le_n_S _ _ Hlep)) as (x,Hx).
 | 
			
		||||
assert (Hneq : x <> xSn).
 | 
			
		||||
  intro Heqx.
 | 
			
		||||
  rewrite Heqx in Hx.
 | 
			
		||||
  rewrite Hx in Hle.
 | 
			
		||||
  apply le_Sn_n with p; assumption.
 | 
			
		||||
exists (exist (fun a => a<>xSn) x Hneq).
 | 
			
		||||
unfold f'.
 | 
			
		||||
destruct (le_lt_dec (f xSn) (f x)) as [Hle'|Hlt'].
 | 
			
		||||
rewrite Hx; reflexivity.
 | 
			
		||||
rewrite Hx in Hlt'.
 | 
			
		||||
contradiction (le_not_lt (f xSn) p).
 | 
			
		||||
apply lt_trans with (S p).
 | 
			
		||||
apply lt_n_Sn.
 | 
			
		||||
assumption.
 | 
			
		||||
(* case p < f xSn *)
 | 
			
		||||
destruct (Hfsurj p (le_S _ _ Hlep)) as (x,Hx).
 | 
			
		||||
assert (Hneq : x <> xSn).
 | 
			
		||||
  intro Heqx.
 | 
			
		||||
  rewrite Heqx in Hx.
 | 
			
		||||
  rewrite Hx in Hlt.
 | 
			
		||||
  apply (lt_irrefl p).
 | 
			
		||||
  assumption.
 | 
			
		||||
exists (exist (fun a => a<>xSn) x Hneq).
 | 
			
		||||
unfold f'.
 | 
			
		||||
destruct (le_lt_dec (f xSn) (f x)) as [Hle'|Hlt'].
 | 
			
		||||
  rewrite Hx in Hle'.
 | 
			
		||||
  contradiction (lt_irrefl p).
 | 
			
		||||
  apply lt_le_trans with (f xSn); assumption.
 | 
			
		||||
  assumption.
 | 
			
		||||
(* cardinality of {x:A|x<>xSn} is n *)
 | 
			
		||||
pose (g' := fun x' : {x:A|x<>xSn} =>
 | 
			
		||||
   let (x,Hneq) := x' in
 | 
			
		||||
   if Hdec x xSn then 0 else g x).
 | 
			
		||||
exists g'.
 | 
			
		||||
split.
 | 
			
		||||
(* g is bounded *)
 | 
			
		||||
unfold g'.
 | 
			
		||||
intros (x,_).
 | 
			
		||||
destruct (Hdec x xSn) as [_|Hneq].
 | 
			
		||||
apply le_O_n.
 | 
			
		||||
assert (Hle_gx:=Hgbound x).
 | 
			
		||||
destruct (le_lt_or_eq _ _ Hle_gx).
 | 
			
		||||
apply lt_n_Sm_le.
 | 
			
		||||
assumption.
 | 
			
		||||
contradiction Hneq.
 | 
			
		||||
apply Hginj.
 | 
			
		||||
rewrite HSnx.
 | 
			
		||||
assumption.
 | 
			
		||||
split.
 | 
			
		||||
(* g is injective *)
 | 
			
		||||
unfold g'.
 | 
			
		||||
intros (x,Hneqx) (y,Hneqy) Heqg'.
 | 
			
		||||
destruct (Hdec x xSn) as [Heqx|_].
 | 
			
		||||
contradiction Hneqx.
 | 
			
		||||
destruct (Hdec y xSn) as [Heqy|_].
 | 
			
		||||
contradiction Hneqy.
 | 
			
		||||
assert (Heq : x=y).
 | 
			
		||||
  apply Hginj; assumption.
 | 
			
		||||
apply neq_dep_intro; assumption.
 | 
			
		||||
(* g is surjective *)
 | 
			
		||||
intros p Hlep.
 | 
			
		||||
destruct (Hgsurj p (le_S _ _ Hlep)) as (x,Hx).
 | 
			
		||||
assert (Hneq : x<>xSn).
 | 
			
		||||
  intro Heq.
 | 
			
		||||
  rewrite Heq in Hx.
 | 
			
		||||
  rewrite Hx in HSnx.
 | 
			
		||||
  rewrite HSnx in Hlep.
 | 
			
		||||
  contradiction (le_Sn_n _ Hlep).
 | 
			
		||||
exists (exist (fun a => a<>xSn) x Hneq).
 | 
			
		||||
simpl.
 | 
			
		||||
destruct (Hdec x xSn) as [Heqx|_].
 | 
			
		||||
contradiction Hneq.
 | 
			
		||||
assumption.
 | 
			
		||||
Qed.
 | 
			
		||||
 | 
			
		||||
(** Conclusion *)
 | 
			
		||||
 | 
			
		||||
Theorem interval_discr :
 | 
			
		||||
  forall n m, {p:nat|p<=n} = {p:nat|p<=m} -> n=m.
 | 
			
		||||
Proof.
 | 
			
		||||
intros n m Heq.
 | 
			
		||||
apply card_inj with (A := {p:nat|p<=n}).
 | 
			
		||||
apply interval_dec.
 | 
			
		||||
apply card_interval.
 | 
			
		||||
rewrite Heq.
 | 
			
		||||
apply card_interval.
 | 
			
		||||
Qed.
 | 
			
		||||
							
								
								
									
										24
									
								
								samples/EBNF/grammar.ebnf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								samples/EBNF/grammar.ebnf
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,24 @@
 | 
			
		||||
(*
 | 
			
		||||
  Source:  https://github.com/sunjay/lion
 | 
			
		||||
  License: MIT
 | 
			
		||||
*)
 | 
			
		||||
 | 
			
		||||
Statement = ( NamedFunction | AnonymousFunction | Assignment | Expr ) , "\n" ;
 | 
			
		||||
Expr = AnonymousFunction | Term | "(" , Expr , ")" ,
 | 
			
		||||
    { AnonymousFunction | Term | "(" , Expr , ")" } ;
 | 
			
		||||
 | 
			
		||||
Assignment = Symbol , "=" , Expr ;
 | 
			
		||||
 | 
			
		||||
AnonymousFunction = "\" , FunctionRHS ;
 | 
			
		||||
NamedFunction = Symbol , FunctionRHS ;
 | 
			
		||||
 | 
			
		||||
FunctionRHS = FunctionParams , "=" , FunctionBody ;
 | 
			
		||||
FunctionParams = FunctionParam , { FunctionParam } ;
 | 
			
		||||
FunctionParam = Term ;
 | 
			
		||||
FunctionBody = Expr ;
 | 
			
		||||
 | 
			
		||||
Term = Symbol | Number | SingleWordString ;
 | 
			
		||||
SingleWordString = '"' , Symbol , '"' ;
 | 
			
		||||
(* Symbol is a collection of valid symbol characters, not defined here *)
 | 
			
		||||
(* Number is a valid numeric literal *)
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										40
									
								
								samples/EBNF/material.ebnf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								samples/EBNF/material.ebnf
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,40 @@
 | 
			
		||||
(*
 | 
			
		||||
  Source:  https://github.com/io7m/jsom0
 | 
			
		||||
  License: ISC
 | 
			
		||||
*)
 | 
			
		||||
 | 
			
		||||
name =
 | 
			
		||||
  "name" , string , ";" ;
 | 
			
		||||
 | 
			
		||||
diffuse =
 | 
			
		||||
  "diffuse" , real , real , real , ";" ;
 | 
			
		||||
 | 
			
		||||
ambient =
 | 
			
		||||
  "ambient" , real , real , real , ";" ;
 | 
			
		||||
 | 
			
		||||
specular =
 | 
			
		||||
  "specular" , real , real , real , real , ";" ;
 | 
			
		||||
 | 
			
		||||
shininess =
 | 
			
		||||
  "shininess" , real , ";" ;
 | 
			
		||||
 | 
			
		||||
alpha =
 | 
			
		||||
  "alpha" , real , ";" ;
 | 
			
		||||
 | 
			
		||||
mapping =
 | 
			
		||||
  "map_chrome" | "map_uv" ;
 | 
			
		||||
 | 
			
		||||
texture =
 | 
			
		||||
  "texture" , string , real , mapping , ";" ;
 | 
			
		||||
 | 
			
		||||
material =
 | 
			
		||||
  "material" , ";" ,
 | 
			
		||||
    name ,
 | 
			
		||||
    diffuse ,
 | 
			
		||||
    ambient ,
 | 
			
		||||
    specular ,
 | 
			
		||||
    shininess ,
 | 
			
		||||
    alpha ,
 | 
			
		||||
    [ texture ] ,
 | 
			
		||||
  "end" , ";" ;
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										61
									
								
								samples/EBNF/object.ebnf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										61
									
								
								samples/EBNF/object.ebnf
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,61 @@
 | 
			
		||||
(*
 | 
			
		||||
  Source:  https://github.com/io7m/jsom0
 | 
			
		||||
  License: ISC
 | 
			
		||||
*)
 | 
			
		||||
 | 
			
		||||
vertex_p3n3_name =
 | 
			
		||||
  "vertex_p3n3" ;
 | 
			
		||||
 | 
			
		||||
vertex_p3n3t2_name =
 | 
			
		||||
  "vertex_p3n3t2" ;
 | 
			
		||||
 | 
			
		||||
vertex_type =
 | 
			
		||||
  vertex_p3n3_name | vertex_p3n3t2_name ;
 | 
			
		||||
 | 
			
		||||
vertex_position =
 | 
			
		||||
  "position" , real , real , real , ";" ;
 | 
			
		||||
 | 
			
		||||
vertex_normal =
 | 
			
		||||
  "normal" , real , real , real , ";" ;
 | 
			
		||||
 | 
			
		||||
vertex_uv =
 | 
			
		||||
  "uv" , real , real , ";" ;
 | 
			
		||||
 | 
			
		||||
vertex_p3n3 =
 | 
			
		||||
  vertex_p3n3_name , vertex_position , vertex_normal , "end" , ";" ;
 | 
			
		||||
 | 
			
		||||
vertex_p3n3t2 =
 | 
			
		||||
  vertex_p3n3t2_name , vertex_position , vertex_normal , vertex_uv , "end" , ";" ;
 | 
			
		||||
 | 
			
		||||
vertex =
 | 
			
		||||
  vertex_p3n3 | vertex_p3n3t2 ;
 | 
			
		||||
 | 
			
		||||
vertex_array =
 | 
			
		||||
  "array" , positive , vertex_type , { vertex } , "end" , ";" ;
 | 
			
		||||
 | 
			
		||||
vertices =
 | 
			
		||||
  "vertices" , ";" , vertex_array , "end" , ";" ;
 | 
			
		||||
 | 
			
		||||
triangle =
 | 
			
		||||
  "triangle" , natural , natural , natural , ";" ;
 | 
			
		||||
 | 
			
		||||
triangle_array =
 | 
			
		||||
  "array" , positive, "triangle" , { triangle } , "end" , ";" ;
 | 
			
		||||
 | 
			
		||||
triangles =
 | 
			
		||||
  "triangles" , ";" , triangle_array , "end" , ";" ;
 | 
			
		||||
 | 
			
		||||
name =
 | 
			
		||||
  "name" , string , ";" ;
 | 
			
		||||
 | 
			
		||||
material_name =
 | 
			
		||||
  "material_name" , string , ";" ;
 | 
			
		||||
 | 
			
		||||
object =
 | 
			
		||||
  "object" , ";" ,
 | 
			
		||||
    name ,
 | 
			
		||||
    material_name ,
 | 
			
		||||
    vertices ,
 | 
			
		||||
    triangles ,
 | 
			
		||||
  "end" , ";" ;
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										20
									
								
								samples/EBNF/types.ebnf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								samples/EBNF/types.ebnf
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,20 @@
 | 
			
		||||
(*
 | 
			
		||||
  Source:  https://github.com/io7m/jsom0
 | 
			
		||||
  License: ISC
 | 
			
		||||
*)
 | 
			
		||||
 | 
			
		||||
digit_without_zero =
 | 
			
		||||
  "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" ;
 | 
			
		||||
 | 
			
		||||
digit =
 | 
			
		||||
  "0" | digit_without_zero ;
 | 
			
		||||
 | 
			
		||||
positive =
 | 
			
		||||
  digit_without_zero , { digit } ;
 | 
			
		||||
 | 
			
		||||
natural =
 | 
			
		||||
  "0" | positive ;
 | 
			
		||||
 | 
			
		||||
real =
 | 
			
		||||
  [ "-" ] , digit , [ "." , { digit } ] ;
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										6
									
								
								samples/Emacs Lisp/filenames/.abbrev_defs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								samples/Emacs Lisp/filenames/.abbrev_defs
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,6 @@
 | 
			
		||||
(define-abbrev-table 'c-mode-abbrev-table '(
 | 
			
		||||
	))
 | 
			
		||||
(define-abbrev-table 'fundamental-mode-abbrev-table '(
 | 
			
		||||
	("TM" "™" nil 0)
 | 
			
		||||
	("(R)" "®" nil 0)
 | 
			
		||||
	("C=" "€" nil 0)))
 | 
			
		||||
							
								
								
									
										20
									
								
								samples/Emacs Lisp/filenames/.gnus
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								samples/Emacs Lisp/filenames/.gnus
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,20 @@
 | 
			
		||||
(setq user-full-name "Alhadis")
 | 
			
		||||
(setq user-mail-address "fake.account@gmail.com")
 | 
			
		||||
 | 
			
		||||
(auto-image-file-mode)
 | 
			
		||||
(setq mm-inline-large-images t)
 | 
			
		||||
(add-to-list 'mm-attachment-override-types "image/*")
 | 
			
		||||
 | 
			
		||||
(setq gnus-select-method 
 | 
			
		||||
	  '(nnimap "gmail"
 | 
			
		||||
		(nnimap-address "imap.gmail.com")
 | 
			
		||||
		(nnimap-server-port 777)
 | 
			
		||||
		(nnimap-stream ssl)))
 | 
			
		||||
 | 
			
		||||
(setq message-send-mail-function 'smtpmail-send-it
 | 
			
		||||
	  smtpmail-starttls-credentials '(("smtp.gmail.com" 600 nil nil))
 | 
			
		||||
	  smtpmail-auth-credentials '(("smtp.gmail.com" 700 "me@lisp.com" nil))
 | 
			
		||||
	  smtpmail-default-smtp-server "smtp.gmail.com"
 | 
			
		||||
	  smtpmail-smtp-server "smtp.gmail.com"
 | 
			
		||||
	  smtpmail-smtp-service 800
 | 
			
		||||
	  setq gnus-ignored-from-addresses "^from\\.Telstra[ \t\r\n]+Thanks")
 | 
			
		||||
							
								
								
									
										197
									
								
								samples/Emacs Lisp/filenames/.spacemacs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										197
									
								
								samples/Emacs Lisp/filenames/.spacemacs
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,197 @@
 | 
			
		||||
;; -*- mode: emacs-lisp -*-
 | 
			
		||||
;; This file is loaded by Spacemacs at startup.
 | 
			
		||||
;; It must be stored in your home directory.
 | 
			
		||||
 | 
			
		||||
(defun dotspacemacs/layers ()
 | 
			
		||||
  "Configuration Layers declaration."
 | 
			
		||||
  (setq-default
 | 
			
		||||
   ;; List of additional paths where to look for configuration layers.
 | 
			
		||||
   ;; Paths must have a trailing slash (i.e. `~/.mycontribs/')
 | 
			
		||||
   dotspacemacs-configuration-layer-path '()
 | 
			
		||||
   ;; List of configuration layers to load. If it is the symbol `all' instead
 | 
			
		||||
   ;; of a list then all discovered layers will be installed.
 | 
			
		||||
   dotspacemacs-configuration-layers
 | 
			
		||||
   '(
 | 
			
		||||
     ;; ----------------------------------------------------------------
 | 
			
		||||
     ;; Example of useful layers you may want to use right away.
 | 
			
		||||
     ;; Uncomment some layer names and press <SPC f e R> (Vim style) or
 | 
			
		||||
     ;; <M-m f e R> (Emacs style) to install them.
 | 
			
		||||
     ;; ----------------------------------------------------------------
 | 
			
		||||
     emacs-lisp
 | 
			
		||||
     charlock_holmes
 | 
			
		||||
     escape_utils
 | 
			
		||||
     mime-types
 | 
			
		||||
     rugged
 | 
			
		||||
     minitest
 | 
			
		||||
     mocha
 | 
			
		||||
     plist
 | 
			
		||||
     pry
 | 
			
		||||
     rake
 | 
			
		||||
     yajl-ruby
 | 
			
		||||
     colour-proximity
 | 
			
		||||
     licensed
 | 
			
		||||
     licensee
 | 
			
		||||
   ;; List of additional packages that will be installed without being
 | 
			
		||||
   ;; wrapped in a layer. If you need some configuration for these
 | 
			
		||||
   ;; packages then consider to create a layer, you can also put the
 | 
			
		||||
   ;; configuration in `dotspacemacs/config'.
 | 
			
		||||
   dotspacemacs-additional-packages '()
 | 
			
		||||
   ;; A list of packages and/or extensions that will not be install and loaded.
 | 
			
		||||
   dotspacemacs-excluded-packages '()
 | 
			
		||||
   ;; If non-nil spacemacs will delete any orphan packages, i.e. packages that
 | 
			
		||||
   ;; are declared in a layer which is not a member of
 | 
			
		||||
   ;; the list `dotspacemacs-configuration-layers'
 | 
			
		||||
   dotspacemacs-delete-orphan-packages t))
 | 
			
		||||
 | 
			
		||||
(defun dotspacemacs/init ()
 | 
			
		||||
  "Initialization function.
 | 
			
		||||
This function is called at the very startup of Spacemacs initialization
 | 
			
		||||
before layers configuration."
 | 
			
		||||
  ;; This setq-default sexp is an exhaustive list of all the supported
 | 
			
		||||
  ;; spacemacs settings.
 | 
			
		||||
  (setq-default
 | 
			
		||||
   ;; Either `vim' or `emacs'. Evil is always enabled but if the variable
 | 
			
		||||
   ;; is `emacs' then the `holy-mode' is enabled at startup.
 | 
			
		||||
   dotspacemacs-editing-style 'vim
 | 
			
		||||
   ;; If non nil output loading progress in `*Messages*' buffer.
 | 
			
		||||
   dotspacemacs-verbose-loading nil
 | 
			
		||||
   ;; Specify the startup banner. Default value is `official', it displays
 | 
			
		||||
   ;; the official spacemacs logo. An integer value is the index of text
 | 
			
		||||
   ;; banner, `random' chooses a random text banner in `core/banners'
 | 
			
		||||
   ;; directory. A string value must be a path to an image format supported
 | 
			
		||||
   ;; by your Emacs build.
 | 
			
		||||
   ;; If the value is nil then no banner is displayed.
 | 
			
		||||
   dotspacemacs-startup-banner 'official
 | 
			
		||||
   ;; List of items to show in the startup buffer. If nil it is disabled.
 | 
			
		||||
   ;; Possible values are: `recents' `bookmarks' `projects'."
 | 
			
		||||
   dotspacemacs-startup-lists '(bookmarks projects recents)
 | 
			
		||||
   ;; List of themes, the first of the list is loaded when spacemacs starts.
 | 
			
		||||
   ;; Press <SPC> T n to cycle to the next theme in the list (works great
 | 
			
		||||
   ;; with 2 themes variants, one dark and one light)
 | 
			
		||||
   dotspacemacs-themes '(
 | 
			
		||||
                         spacemacs-dark
 | 
			
		||||
                         spacemacs-light
 | 
			
		||||
                         solarized-dark
 | 
			
		||||
                         solarized-light
 | 
			
		||||
                         atom-light-ui
 | 
			
		||||
                         atom-dark-ui
 | 
			
		||||
                         atom-material-ui
 | 
			
		||||
                         zenburn
 | 
			
		||||
   ;; If non nil the cursor colour matches the state colour.
 | 
			
		||||
   dotspacemacs-colorize-cursor-according-to-state t
 | 
			
		||||
   ;; Default font. `powerline-scale' allows to quickly tweak the mode-line
 | 
			
		||||
   ;; size to make separators look not too crappy.
 | 
			
		||||
   dotspacemacs-default-font '("Menloco"
 | 
			
		||||
                               :size 11
 | 
			
		||||
                               :weight normal
 | 
			
		||||
                               :width normal
 | 
			
		||||
                               :powerline-scale 1.1)
 | 
			
		||||
   ;; The leader key
 | 
			
		||||
   dotspacemacs-leader-key "SPC"
 | 
			
		||||
   ;; The leader key accessible in `emacs state' and `insert state'
 | 
			
		||||
   dotspacemacs-emacs-leader-key "M-m"
 | 
			
		||||
   ;; Major mode leader key is a shortcut key which is the equivalent of
 | 
			
		||||
   ;; pressing `<leader> m`. Set it to `nil` to disable it.
 | 
			
		||||
   dotspacemacs-major-mode-leader-key ","
 | 
			
		||||
   ;; Major mode leader key accessible in `emacs state' and `insert state'
 | 
			
		||||
   dotspacemacs-major-mode-emacs-leader-key "C-M-m"
 | 
			
		||||
   ;; The command key used for Evil commands (ex-commands) and
 | 
			
		||||
   ;; Emacs commands (M-x).
 | 
			
		||||
   ;; By default the command key is `:' so ex-commands are executed like in Vim
 | 
			
		||||
   ;; with `:' and Emacs commands are executed with `<leader> :'.
 | 
			
		||||
   dotspacemacs-command-key ":"
 | 
			
		||||
   ;; Location where to auto-save files. Possible values are `original' to
 | 
			
		||||
   ;; auto-save the file in-place, `cache' to auto-save the file to another
 | 
			
		||||
   ;; file stored in the cache directory and `nil' to disable auto-saving.
 | 
			
		||||
   ;; Default value is `cache'.
 | 
			
		||||
   dotspacemacs-auto-save-file-location 'cache
 | 
			
		||||
   ;; If non nil then `ido' replaces `helm' for some commands. For now only
 | 
			
		||||
   ;; `find-files' (SPC f f) is replaced.
 | 
			
		||||
   dotspacemacs-use-ido nil
 | 
			
		||||
   ;; If non nil the paste micro-state is enabled. When enabled pressing `p`
 | 
			
		||||
   ;; several times cycle between the kill ring content.
 | 
			
		||||
   dotspacemacs-enable-paste-micro-state nil
 | 
			
		||||
   ;; Guide-key delay in seconds. The Guide-key is the popup buffer listing
 | 
			
		||||
   ;; the commands bound to the current keystrokes.
 | 
			
		||||
   dotspacemacs-guide-key-delay 0.4
 | 
			
		||||
   ;; If non nil a progress bar is displayed when spacemacs is loading. This
 | 
			
		||||
   ;; may increase the boot time on some systems and emacs builds, set it to
 | 
			
		||||
   ;; nil ;; to boost the loading time.
 | 
			
		||||
   dotspacemacs-loading-progress-bar t
 | 
			
		||||
   ;; If non nil the frame is fullscreen when Emacs starts up.
 | 
			
		||||
   ;; (Emacs 24.4+ only)
 | 
			
		||||
   dotspacemacs-fullscreen-at-startup nil
 | 
			
		||||
   ;; If non nil `spacemacs/toggle-fullscreen' will not use native fullscreen.
 | 
			
		||||
   ;; Use to disable fullscreen animations in OSX."
 | 
			
		||||
   dotspacemacs-fullscreen-use-non-native nil
 | 
			
		||||
   ;; If non nil the frame is maximized when Emacs starts up.
 | 
			
		||||
   ;; Takes effect only if `dotspacemacs-fullscreen-at-startup' is nil.
 | 
			
		||||
   ;; (Emacs 24.4+ only)
 | 
			
		||||
   dotspacemacs-maximized-at-startup nil
 | 
			
		||||
   ;; A value from the range (0..100), in increasing opacity, which describes
 | 
			
		||||
   ;; the transparency level of a frame when it's active or selected.
 | 
			
		||||
   ;; Transparency can be toggled through `toggle-transparency'.
 | 
			
		||||
   dotspacemacs-active-transparency 90
 | 
			
		||||
   ;; A value from the range (0..100), in increasing opacity, which describes
 | 
			
		||||
   ;; the transparency level of a frame when it's inactive or deselected.
 | 
			
		||||
   ;; Transparency can be toggled through `toggle-transparency'.
 | 
			
		||||
   dotspacemacs-inactive-transparency 90
 | 
			
		||||
   ;; If non nil unicode symbols are displayed in the mode line.
 | 
			
		||||
   dotspacemacs-mode-line-unicode-symbols t
 | 
			
		||||
   ;; If non nil smooth scrolling (native-scrolling) is enabled. Smooth
 | 
			
		||||
   ;; scrolling overrides the default behavior of Emacs which recenters the
 | 
			
		||||
   ;; point when it reaches the top or bottom of the screen.
 | 
			
		||||
   dotspacemacs-smooth-scrolling t
 | 
			
		||||
   ;; If non-nil smartparens-strict-mode will be enabled in programming modes.
 | 
			
		||||
   dotspacemacs-smartparens-strict-mode nil
 | 
			
		||||
   ;; Select a scope to highlight delimiters. Possible value is `all',
 | 
			
		||||
   ;; `current' or `nil'. Default is `all'
 | 
			
		||||
   dotspacemacs-highlight-delimiters 'all
 | 
			
		||||
   ;; If non nil advises quit functions to keep server open when quitting.
 | 
			
		||||
   dotspacemacs-persistent-server nil
 | 
			
		||||
   ;; List of search tool executable names. Spacemacs uses the first installed
 | 
			
		||||
   ;; tool of the list. Supported tools are `ag', `pt', `ack' and `grep'.
 | 
			
		||||
   dotspacemacs-search-tools '("ag" "pt" "ack" "grep")
 | 
			
		||||
   ;; The default package repository used if no explicit repository has been
 | 
			
		||||
   ;; specified with an installed package.
 | 
			
		||||
   ;; Not used for now.
 | 
			
		||||
   dotspacemacs-default-package-repository nil
 | 
			
		||||
 | 
			
		||||
   ;; If non nil line numbers are turned on in all `prog-mode' and `text-mode'
 | 
			
		||||
   ;; derivatives. If set to `relative', also turns on relative line numbers.
 | 
			
		||||
   ;; (default nil)
 | 
			
		||||
   dotspacemacs-line-numbers 'relative
 | 
			
		||||
 | 
			
		||||
   ;; Delete whitespace while saving buffer. Possible values are `all',
 | 
			
		||||
   ;; `trailing', `changed' or `nil'. Default is `changed' (cleanup whitespace
 | 
			
		||||
   ;; on changed lines) (default 'changed)
 | 
			
		||||
   dotspacemacs-whitespace-cleanup 'changed
 | 
			
		||||
   )
 | 
			
		||||
  ;; User initialization goes here
 | 
			
		||||
  )
 | 
			
		||||
 | 
			
		||||
(defun dotspacemacs/user-config ()
 | 
			
		||||
  "Configuration function.
 | 
			
		||||
 This function is called at the very end of Spacemacs initialization after
 | 
			
		||||
layers configuration."
 | 
			
		||||
  (add-hook 'alchemist-mode-hook 'company-mode)
 | 
			
		||||
 | 
			
		||||
  (add-hook 'projectile-mode-hook 'projectile-rails-on)
 | 
			
		||||
  (setq ruby-insert-encoding-magic-comment nil)
 | 
			
		||||
 | 
			
		||||
  (setq web-mode-markup-indent-offset 2)
 | 
			
		||||
  (setq web-mode-code-indent-offset 2)
 | 
			
		||||
 | 
			
		||||
  (spacemacs/toggle-golden-ratio-on)
 | 
			
		||||
  (spacemacs/toggle-indent-guide-globally-on)
 | 
			
		||||
  (spacemacs/toggle-centered-point-globally-on)
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
;; Do not write anything past this comment. This is where Emacs will
 | 
			
		||||
;; auto-generate custom variable definitions.
 | 
			
		||||
(custom-set-variables
 | 
			
		||||
 ;; custom-set-variables was added by Custom.
 | 
			
		||||
 ;; If you edit it by hand, you could mess it up, so be careful.
 | 
			
		||||
 ;; Your init file should contain only one such instance.
 | 
			
		||||
 ;; If there is more than one, they won't work right.
 | 
			
		||||
)
 | 
			
		||||
							
								
								
									
										10
									
								
								samples/Emacs Lisp/filenames/.viper
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								samples/Emacs Lisp/filenames/.viper
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,10 @@
 | 
			
		||||
(setq viper-inhibit-startup-message 't)
 | 
			
		||||
(setq viper-expert-level '5)
 | 
			
		||||
 | 
			
		||||
; Key bindings
 | 
			
		||||
(define-key viper-vi-global-user-map "\C-d" 'end-of-line)
 | 
			
		||||
 | 
			
		||||
; Return to top of window
 | 
			
		||||
(defun my-viper-return-to-top ()
 | 
			
		||||
  (interactive)
 | 
			
		||||
  (beginning-of-buffer))
 | 
			
		||||
							
								
								
									
										9
									
								
								samples/Emacs Lisp/filenames/Cask
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								samples/Emacs Lisp/filenames/Cask
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,9 @@
 | 
			
		||||
(package "composer" "0.0.7" "Interface to PHP Composer")
 | 
			
		||||
(source "melpa" "https://melpa.org/packages/")
 | 
			
		||||
 | 
			
		||||
(package-file "composer.el")
 | 
			
		||||
 | 
			
		||||
(depends-on "f")
 | 
			
		||||
(depends-on "s")
 | 
			
		||||
(depends-on "request")
 | 
			
		||||
(depends-on "seq")
 | 
			
		||||
							
								
								
									
										34
									
								
								samples/Emacs Lisp/filenames/Project.ede
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										34
									
								
								samples/Emacs Lisp/filenames/Project.ede
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,34 @@
 | 
			
		||||
;; Object EDE
 | 
			
		||||
(ede-proj-project "Linguist"
 | 
			
		||||
  :name "Linguist"
 | 
			
		||||
  :version "4.9"
 | 
			
		||||
  :file "Project.ede"
 | 
			
		||||
  :targets (list 
 | 
			
		||||
   (ede-proj-target-elisp-autoloads "autoloads"
 | 
			
		||||
    :name "autoloads"
 | 
			
		||||
    :path "test/samples/Emacs Lisp"
 | 
			
		||||
    :autoload-file "dude.el"
 | 
			
		||||
    )
 | 
			
		||||
   (ede-proj-target-elisp "init"
 | 
			
		||||
    :name "init"
 | 
			
		||||
    :path ""
 | 
			
		||||
    :source '("ede-load.el" "wait-what.el")
 | 
			
		||||
    :compiler 'ede-emacs-preload-compiler
 | 
			
		||||
    :pre-load-packages '("sample-names")
 | 
			
		||||
    )
 | 
			
		||||
   (ede-proj-target-elisp "what"
 | 
			
		||||
    :name "the"
 | 
			
		||||
    :path ""
 | 
			
		||||
    :source '("h.el" "am-i-writing.el")
 | 
			
		||||
    :versionsource '("hell.el")
 | 
			
		||||
    :compiler 'ede-emacs-preload-compiler
 | 
			
		||||
    :aux-packages '("what" "the" "hell-files" "am-i-writing")
 | 
			
		||||
    )
 | 
			
		||||
   )
 | 
			
		||||
  :web-site-url "https://github.com/github/linguist"
 | 
			
		||||
  :web-site-directory "../"
 | 
			
		||||
  :web-site-file "CONTRIBUTING.md"
 | 
			
		||||
  :ftp-upload-site "/ftp@git.hub.com:/madeup"
 | 
			
		||||
  :configuration-variables 'nil
 | 
			
		||||
  :metasubproject 't
 | 
			
		||||
  )
 | 
			
		||||
							
								
								
									
										70
									
								
								samples/Emacs Lisp/filenames/_emacs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										70
									
								
								samples/Emacs Lisp/filenames/_emacs
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,70 @@
 | 
			
		||||
;; UTF-8 support
 | 
			
		||||
;; (set-language-environment "UTF-8")
 | 
			
		||||
(setenv "LANG" "en_AU.UTF-8")
 | 
			
		||||
(setenv "LC_ALL" "en_AU.UTF-8")
 | 
			
		||||
(setq default-tab-width 4)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
;;; Function to load all ".el" files in ~/.emacs.d/config
 | 
			
		||||
(defun load-directory (directory)
 | 
			
		||||
  "Recursively load all Emacs Lisp files in a directory."
 | 
			
		||||
  (dolist (element (directory-files-and-attributes directory nil nil nil))
 | 
			
		||||
    (let* ((path (car element))
 | 
			
		||||
           (fullpath (concat directory "/" path))
 | 
			
		||||
           (isdir (car (cdr element)))
 | 
			
		||||
           (ignore-dir (or (string= path ".") (string= path ".."))))
 | 
			
		||||
      (cond
 | 
			
		||||
       ((and (eq isdir t) (not ignore-dir))
 | 
			
		||||
        (load-directory fullpath))
 | 
			
		||||
       ((and (eq isdir nil) (string= (substring path -3) ".el"))
 | 
			
		||||
        (load (file-name-sans-extension fullpath)))))))
 | 
			
		||||
 | 
			
		||||
;; Tell Emacs we'd like to use Hunspell for spell-checking
 | 
			
		||||
(setq ispell-program-name (executable-find "hunspell"))
 | 
			
		||||
 | 
			
		||||
;; Load Homebrew-installed packages
 | 
			
		||||
(let ((default-directory "/usr/local/share/emacs/site-lisp/"))
 | 
			
		||||
  (normal-top-level-add-subdirs-to-load-path))
 | 
			
		||||
(load "aggressive-indent")
 | 
			
		||||
(add-hook 'emacs-lisp-mode-hook #'aggressive-indent-mode)
 | 
			
		||||
(autoload 'rust-mode "rust-mode" nil t)
 | 
			
		||||
(add-to-list 'auto-mode-alist '("\\.rs\\'" . rust-mode))
 | 
			
		||||
 | 
			
		||||
;; Load Git-related syntax highlighting
 | 
			
		||||
(add-to-list 'load-path "~/.emacs.d/lisp/")
 | 
			
		||||
(load "git-modes")
 | 
			
		||||
(load "git-commit")
 | 
			
		||||
 | 
			
		||||
;; Keybindings
 | 
			
		||||
(global-set-key (kbd "C-u") (lambda ()
 | 
			
		||||
                             (interactive)
 | 
			
		||||
                             (kill-line 0)))
 | 
			
		||||
 | 
			
		||||
;; Show cursor's current column number
 | 
			
		||||
(setq column-number-mode t)
 | 
			
		||||
 | 
			
		||||
;; Disable autosave
 | 
			
		||||
(setq auto-save-default nil)
 | 
			
		||||
 | 
			
		||||
;; Use a single directory for storing backup files
 | 
			
		||||
(setq backup-directory-alist `(("." . "~/.emacs.d/auto-save-list")))
 | 
			
		||||
(setq backup-by-copying t)
 | 
			
		||||
(setq delete-old-versions t
 | 
			
		||||
      kept-new-versions 6
 | 
			
		||||
      kept-old-versions 2
 | 
			
		||||
      version-control t)
 | 
			
		||||
 | 
			
		||||
(custom-set-variables
 | 
			
		||||
 ;; custom-set-variables was added by Custom.
 | 
			
		||||
 ;; If you edit it by hand, you could mess it up, so be careful.
 | 
			
		||||
 ;; Your init file should contain only one such instance.
 | 
			
		||||
 ;; If there is more than one, they won't work right.
 | 
			
		||||
 '(blink-cursor-mode nil)
 | 
			
		||||
 '(column-number-mode t)
 | 
			
		||||
 '(show-paren-mode t))
 | 
			
		||||
(custom-set-faces
 | 
			
		||||
 ;; custom-set-faces was added by Custom.
 | 
			
		||||
 ;; If you edit it by hand, you could mess it up, so be careful.
 | 
			
		||||
 ;; Your init file should contain only one such instance.
 | 
			
		||||
 ;; If there is more than one, they won't work right.
 | 
			
		||||
 )
 | 
			
		||||
							
								
								
									
										8
									
								
								samples/Emacs Lisp/filenames/abbrev_defs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								samples/Emacs Lisp/filenames/abbrev_defs
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,8 @@
 | 
			
		||||
(define-abbrev-table 'fundamental-mode-abbrev-table '(
 | 
			
		||||
	("cat" "Concatenate" nil 0)
 | 
			
		||||
	("WTF" "World Trade Federation " nil 0)
 | 
			
		||||
	("rtbtm" "Read that back to me" nil 0)))
 | 
			
		||||
 | 
			
		||||
(define-abbrev-table 'shell-script-mode-abbrev-table '(
 | 
			
		||||
	("brake", "bundle rake exec" nil 0)
 | 
			
		||||
	("pls", "warning: setting Encoding.default_external")))
 | 
			
		||||
							
								
								
									
										7
									
								
								samples/Erlang/filenames/Emakefile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								samples/Erlang/filenames/Emakefile
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,7 @@
 | 
			
		||||
{"src/*", [
 | 
			
		||||
   report, 
 | 
			
		||||
   verbose, 
 | 
			
		||||
   {i, "include"}, 
 | 
			
		||||
   {outdir, "ebin"},
 | 
			
		||||
   debug_info 
 | 
			
		||||
]}.
 | 
			
		||||
							
								
								
									
										51
									
								
								samples/Filebench WML/copyfiles.f
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										51
									
								
								samples/Filebench WML/copyfiles.f
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,51 @@
 | 
			
		||||
#
 | 
			
		||||
# CDDL HEADER START
 | 
			
		||||
#
 | 
			
		||||
# The contents of this file are subject to the terms of the
 | 
			
		||||
# Common Development and Distribution License (the "License").
 | 
			
		||||
# You may not use this file except in compliance with the License.
 | 
			
		||||
#
 | 
			
		||||
# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
 | 
			
		||||
# or http://www.opensolaris.org/os/licensing.
 | 
			
		||||
# See the License for the specific language governing permissions
 | 
			
		||||
# and limitations under the License.
 | 
			
		||||
#
 | 
			
		||||
# When distributing Covered Code, include this CDDL HEADER in each
 | 
			
		||||
# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
 | 
			
		||||
# If applicable, add the following below this CDDL HEADER, with the
 | 
			
		||||
# fields enclosed by brackets "[]" replaced with your own identifying
 | 
			
		||||
# information: Portions Copyright [yyyy] [name of copyright owner]
 | 
			
		||||
#
 | 
			
		||||
# CDDL HEADER END
 | 
			
		||||
#
 | 
			
		||||
#
 | 
			
		||||
# Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
 | 
			
		||||
# Use is subject to license terms.
 | 
			
		||||
#
 | 
			
		||||
 | 
			
		||||
set $dir=/tmp
 | 
			
		||||
set $nfiles=1000
 | 
			
		||||
set $meandirwidth=20
 | 
			
		||||
set $meanfilesize=16k
 | 
			
		||||
set $iosize=1m
 | 
			
		||||
set $nthreads=1
 | 
			
		||||
 | 
			
		||||
set mode quit firstdone
 | 
			
		||||
 | 
			
		||||
define fileset name=bigfileset,path=$dir,size=$meanfilesize,entries=$nfiles,dirwidth=$meandirwidth,prealloc=100,paralloc
 | 
			
		||||
define fileset name=destfiles,path=$dir,size=$meanfilesize,entries=$nfiles,dirwidth=$meandirwidth
 | 
			
		||||
 | 
			
		||||
define process name=filereader,instances=1
 | 
			
		||||
{
 | 
			
		||||
  thread name=filereaderthread,memsize=10m,instances=$nthreads
 | 
			
		||||
  {
 | 
			
		||||
    flowop openfile name=openfile1,filesetname=bigfileset,fd=1
 | 
			
		||||
    flowop readwholefile name=readfile1,fd=1,iosize=$iosize
 | 
			
		||||
    flowop createfile name=createfile2,filesetname=destfiles,fd=2
 | 
			
		||||
    flowop writewholefile name=writefile2,fd=2,srcfd=1,iosize=$iosize
 | 
			
		||||
    flowop closefile name=closefile1,fd=1
 | 
			
		||||
    flowop closefile name=closefile2,fd=2
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
echo  "Copyfiles Version 3.0 personality successfully loaded"
 | 
			
		||||
							
								
								
									
										6
									
								
								samples/Fortran/bug-185631.f
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								samples/Fortran/bug-185631.f
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,6 @@
 | 
			
		||||
! Codes/HYCOM/hycom/ATLb2.00/src_2.0.01_22_one/
 | 
			
		||||
      real onemu, twomu
 | 
			
		||||
      data onemu/0.0098/
 | 
			
		||||
      data twomu/1./
 | 
			
		||||
      data threemu/0.e9/
 | 
			
		||||
      end
 | 
			
		||||
							
								
								
									
										59
									
								
								samples/GN/BUILD.2.gn
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										59
									
								
								samples/GN/BUILD.2.gn
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,59 @@
 | 
			
		||||
# Copyright 2016 the V8 project authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import("../gni/isolate.gni")
 | 
			
		||||
 | 
			
		||||
group("gn_all") {
 | 
			
		||||
  testonly = true
 | 
			
		||||
 | 
			
		||||
  if (v8_test_isolation_mode != "noop") {
 | 
			
		||||
    deps = [
 | 
			
		||||
      ":check-static-initializers_run",
 | 
			
		||||
      ":jsfunfuzz_run",
 | 
			
		||||
      ":run-deopt-fuzzer_run",
 | 
			
		||||
      ":run-gcmole_run",
 | 
			
		||||
      ":run-valgrind_run",
 | 
			
		||||
    ]
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
v8_isolate_run("check-static-initializers") {
 | 
			
		||||
  deps = [
 | 
			
		||||
    "..:d8_run",
 | 
			
		||||
  ]
 | 
			
		||||
 | 
			
		||||
  isolate = "check-static-initializers.isolate"
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
v8_isolate_run("jsfunfuzz") {
 | 
			
		||||
  deps = [
 | 
			
		||||
    "..:d8_run",
 | 
			
		||||
  ]
 | 
			
		||||
 | 
			
		||||
  isolate = "jsfunfuzz/jsfunfuzz.isolate"
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
v8_isolate_run("run-deopt-fuzzer") {
 | 
			
		||||
  deps = [
 | 
			
		||||
    "..:d8_run",
 | 
			
		||||
  ]
 | 
			
		||||
 | 
			
		||||
  isolate = "run-deopt-fuzzer.isolate"
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
v8_isolate_run("run-gcmole") {
 | 
			
		||||
  deps = [
 | 
			
		||||
    "..:d8_run",
 | 
			
		||||
  ]
 | 
			
		||||
 | 
			
		||||
  isolate = "gcmole/run-gcmole.isolate"
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
v8_isolate_run("run-valgrind") {
 | 
			
		||||
  deps = [
 | 
			
		||||
    "..:d8_run",
 | 
			
		||||
  ]
 | 
			
		||||
 | 
			
		||||
  isolate = "run-valgrind.isolate"
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										1646
									
								
								samples/GN/BUILD.3.gn
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1646
									
								
								samples/GN/BUILD.3.gn
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										2583
									
								
								samples/GN/BUILD.gn
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2583
									
								
								samples/GN/BUILD.gn
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										2781
									
								
								samples/GN/android-rules.gni
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2781
									
								
								samples/GN/android-rules.gni
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										13
									
								
								samples/GN/clang.gni
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								samples/GN/clang.gni
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,13 @@
 | 
			
		||||
# Copyright 2014 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import("//build/toolchain/toolchain.gni")
 | 
			
		||||
 | 
			
		||||
declare_args() {
 | 
			
		||||
  # Indicates if the build should use the Chrome-specific plugins for enforcing
 | 
			
		||||
  # coding guidelines, etc. Only used when compiling with Clang.
 | 
			
		||||
  clang_use_chrome_plugins = is_clang && !is_nacl && !use_xcode_clang
 | 
			
		||||
 | 
			
		||||
  clang_base_path = "//third_party/llvm-build/Release+Asserts"
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										25
									
								
								samples/GN/filenames/.gn
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										25
									
								
								samples/GN/filenames/.gn
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,25 @@
 | 
			
		||||
# This file is used by the GN meta build system to find the root of the source
 | 
			
		||||
# tree and to set startup options. For documentation on the values set in this
 | 
			
		||||
# file, run "gn help dotfile" at the command line.
 | 
			
		||||
 | 
			
		||||
import("//build/dotfile_settings.gni")
 | 
			
		||||
 | 
			
		||||
# The location of the build configuration file.
 | 
			
		||||
buildconfig = "//build/config/BUILDCONFIG.gn"
 | 
			
		||||
 | 
			
		||||
# The secondary source root is a parallel directory tree where
 | 
			
		||||
# GN build files are placed when they can not be placed directly
 | 
			
		||||
# in the source tree, e.g. for third party source trees.
 | 
			
		||||
secondary_source = "//build/secondary/"
 | 
			
		||||
 | 
			
		||||
# These are the targets to check headers for by default. The files in targets
 | 
			
		||||
# matching these patterns (see "gn help label_pattern" for format) will have
 | 
			
		||||
# their includes checked for proper dependencies when you run either
 | 
			
		||||
# "gn check" or "gn gen --check".
 | 
			
		||||
check_targets = []
 | 
			
		||||
 | 
			
		||||
# These are the list of GN files that run exec_script. This whitelist exists
 | 
			
		||||
# to force additional review for new uses of exec_script, which is strongly
 | 
			
		||||
# discouraged except for gypi_to_gn calls.
 | 
			
		||||
exec_script_whitelist =
 | 
			
		||||
    build_dotfile_settings.exec_script_whitelist + [ "//test/test262/BUILD.gn" ]
 | 
			
		||||
							
								
								
									
										503
									
								
								samples/GN/gcc_toolchain.gni
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										503
									
								
								samples/GN/gcc_toolchain.gni
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,503 @@
 | 
			
		||||
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import("//build/config/android/config.gni")
 | 
			
		||||
import("//build/config/clang/clang.gni")
 | 
			
		||||
import("//build/config/nacl/config.gni")
 | 
			
		||||
import("//build/config/sanitizers/sanitizers.gni")
 | 
			
		||||
import("//build/config/v8_target_cpu.gni")
 | 
			
		||||
import("//build/toolchain/cc_wrapper.gni")
 | 
			
		||||
import("//build/toolchain/goma.gni")
 | 
			
		||||
import("//build/toolchain/toolchain.gni")
 | 
			
		||||
 | 
			
		||||
# This template defines a toolchain for something that works like gcc
 | 
			
		||||
# (including clang).
 | 
			
		||||
#
 | 
			
		||||
# It requires the following variables specifying the executables to run:
 | 
			
		||||
#  - ar
 | 
			
		||||
#  - cc
 | 
			
		||||
#  - cxx
 | 
			
		||||
#  - ld
 | 
			
		||||
#
 | 
			
		||||
# Optional parameters that control the tools:
 | 
			
		||||
#
 | 
			
		||||
#  - extra_cflags
 | 
			
		||||
#      Extra flags to be appended when compiling C files (but not C++ files).
 | 
			
		||||
#  - extra_cppflags
 | 
			
		||||
#      Extra flags to be appended when compiling both C and C++ files. "CPP"
 | 
			
		||||
#      stands for "C PreProcessor" in this context, although it can be
 | 
			
		||||
#      used for non-preprocessor flags as well. Not to be confused with
 | 
			
		||||
#      "CXX" (which follows).
 | 
			
		||||
#  - extra_cxxflags
 | 
			
		||||
#      Extra flags to be appended when compiling C++ files (but not C files).
 | 
			
		||||
#  - extra_ldflags
 | 
			
		||||
#      Extra flags to be appended when linking
 | 
			
		||||
#
 | 
			
		||||
#  - libs_section_prefix
 | 
			
		||||
#  - libs_section_postfix
 | 
			
		||||
#      The contents of these strings, if specified, will be placed around
 | 
			
		||||
#      the libs section of the linker line. It allows one to inject libraries
 | 
			
		||||
#      at the beginning and end for all targets in a toolchain.
 | 
			
		||||
#  - solink_libs_section_prefix
 | 
			
		||||
#  - solink_libs_section_postfix
 | 
			
		||||
#      Same as libs_section_{pre,post}fix except used for solink instead of link.
 | 
			
		||||
#  - link_outputs
 | 
			
		||||
#      The content of this array, if specified, will be added to the list of
 | 
			
		||||
#      outputs from the link command. This can be useful in conjunction with
 | 
			
		||||
#      the post_link parameter.
 | 
			
		||||
#  - post_link
 | 
			
		||||
#      The content of this string, if specified, will be run as a separate
 | 
			
		||||
#      command following the the link command.
 | 
			
		||||
#  - deps
 | 
			
		||||
#      Just forwarded to the toolchain definition.
 | 
			
		||||
#  - executable_extension
 | 
			
		||||
#      If this string is specified it will be used for the file extension
 | 
			
		||||
#      for an executable, rather than using no extension; targets will
 | 
			
		||||
#      still be able to override the extension using the output_extension
 | 
			
		||||
#      variable.
 | 
			
		||||
#  - rebuild_define
 | 
			
		||||
#      The contents of this string, if specified, will be passed as a #define
 | 
			
		||||
#      to the toolchain. It can be used to force recompiles whenever a
 | 
			
		||||
#      toolchain is updated.
 | 
			
		||||
#  - shlib_extension
 | 
			
		||||
#      If this string is specified it will be used for the file extension
 | 
			
		||||
#      for a shared library, rather than default value specified in
 | 
			
		||||
#      toolchain.gni
 | 
			
		||||
#  - strip
 | 
			
		||||
#      Location of the strip executable. When specified, strip will be run on
 | 
			
		||||
#      all shared libraries and executables as they are built. The pre-stripped
 | 
			
		||||
#      artifacts will be put in lib.unstripped/ and exe.unstripped/.
 | 
			
		||||
template("gcc_toolchain") {
 | 
			
		||||
  toolchain(target_name) {
 | 
			
		||||
    assert(defined(invoker.ar), "gcc_toolchain() must specify a \"ar\" value")
 | 
			
		||||
    assert(defined(invoker.cc), "gcc_toolchain() must specify a \"cc\" value")
 | 
			
		||||
    assert(defined(invoker.cxx), "gcc_toolchain() must specify a \"cxx\" value")
 | 
			
		||||
    assert(defined(invoker.ld), "gcc_toolchain() must specify a \"ld\" value")
 | 
			
		||||
 | 
			
		||||
    # This define changes when the toolchain changes, forcing a rebuild.
 | 
			
		||||
    # Nothing should ever use this define.
 | 
			
		||||
    if (defined(invoker.rebuild_define)) {
 | 
			
		||||
      rebuild_string = "-D" + invoker.rebuild_define + " "
 | 
			
		||||
    } else {
 | 
			
		||||
      rebuild_string = ""
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    # GN's syntax can't handle more than one scope dereference at once, like
 | 
			
		||||
    # "invoker.toolchain_args.foo", so make a temporary to hold the toolchain
 | 
			
		||||
    # args so we can do "invoker_toolchain_args.foo".
 | 
			
		||||
    assert(defined(invoker.toolchain_args),
 | 
			
		||||
           "Toolchains must specify toolchain_args")
 | 
			
		||||
    invoker_toolchain_args = invoker.toolchain_args
 | 
			
		||||
    assert(defined(invoker_toolchain_args.current_cpu),
 | 
			
		||||
           "toolchain_args must specify a current_cpu")
 | 
			
		||||
    assert(defined(invoker_toolchain_args.current_os),
 | 
			
		||||
           "toolchain_args must specify a current_os")
 | 
			
		||||
 | 
			
		||||
    # When invoking this toolchain not as the default one, these args will be
 | 
			
		||||
    # passed to the build. They are ignored when this is the default toolchain.
 | 
			
		||||
    toolchain_args = {
 | 
			
		||||
      # Populate toolchain args from the invoker.
 | 
			
		||||
      forward_variables_from(invoker_toolchain_args, "*")
 | 
			
		||||
 | 
			
		||||
      # The host toolchain value computed by the default toolchain's setup
 | 
			
		||||
      # needs to be passed through unchanged to all secondary toolchains to
 | 
			
		||||
      # ensure that it's always the same, regardless of the values that may be
 | 
			
		||||
      # set on those toolchains.
 | 
			
		||||
      host_toolchain = host_toolchain
 | 
			
		||||
 | 
			
		||||
      if (!defined(invoker_toolchain_args.v8_current_cpu)) {
 | 
			
		||||
        v8_current_cpu = invoker_toolchain_args.current_cpu
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    # When the invoker has explicitly overridden use_goma or cc_wrapper in the
 | 
			
		||||
    # toolchain args, use those values, otherwise default to the global one.
 | 
			
		||||
    # This works because the only reasonable override that toolchains might
 | 
			
		||||
    # supply for these values are to force-disable them.
 | 
			
		||||
    if (defined(toolchain_args.use_goma)) {
 | 
			
		||||
      toolchain_uses_goma = toolchain_args.use_goma
 | 
			
		||||
    } else {
 | 
			
		||||
      toolchain_uses_goma = use_goma
 | 
			
		||||
    }
 | 
			
		||||
    if (defined(toolchain_args.cc_wrapper)) {
 | 
			
		||||
      toolchain_cc_wrapper = toolchain_args.cc_wrapper
 | 
			
		||||
    } else {
 | 
			
		||||
      toolchain_cc_wrapper = cc_wrapper
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    # Compute the compiler prefix.
 | 
			
		||||
    if (toolchain_uses_goma) {
 | 
			
		||||
      assert(toolchain_cc_wrapper == "",
 | 
			
		||||
             "Goma and cc_wrapper can't be used together.")
 | 
			
		||||
      compiler_prefix = "$goma_dir/gomacc "
 | 
			
		||||
    } else if (toolchain_cc_wrapper != "") {
 | 
			
		||||
      compiler_prefix = toolchain_cc_wrapper + " "
 | 
			
		||||
    } else {
 | 
			
		||||
      compiler_prefix = ""
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    cc = compiler_prefix + invoker.cc
 | 
			
		||||
    cxx = compiler_prefix + invoker.cxx
 | 
			
		||||
    ar = invoker.ar
 | 
			
		||||
    ld = invoker.ld
 | 
			
		||||
    if (defined(invoker.readelf)) {
 | 
			
		||||
      readelf = invoker.readelf
 | 
			
		||||
    } else {
 | 
			
		||||
      readelf = "readelf"
 | 
			
		||||
    }
 | 
			
		||||
    if (defined(invoker.nm)) {
 | 
			
		||||
      nm = invoker.nm
 | 
			
		||||
    } else {
 | 
			
		||||
      nm = "nm"
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (defined(invoker.shlib_extension)) {
 | 
			
		||||
      default_shlib_extension = invoker.shlib_extension
 | 
			
		||||
    } else {
 | 
			
		||||
      default_shlib_extension = shlib_extension
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (defined(invoker.executable_extension)) {
 | 
			
		||||
      default_executable_extension = invoker.executable_extension
 | 
			
		||||
    } else {
 | 
			
		||||
      default_executable_extension = ""
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    # Bring these into our scope for string interpolation with default values.
 | 
			
		||||
    if (defined(invoker.libs_section_prefix)) {
 | 
			
		||||
      libs_section_prefix = invoker.libs_section_prefix
 | 
			
		||||
    } else {
 | 
			
		||||
      libs_section_prefix = ""
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (defined(invoker.libs_section_postfix)) {
 | 
			
		||||
      libs_section_postfix = invoker.libs_section_postfix
 | 
			
		||||
    } else {
 | 
			
		||||
      libs_section_postfix = ""
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (defined(invoker.solink_libs_section_prefix)) {
 | 
			
		||||
      solink_libs_section_prefix = invoker.solink_libs_section_prefix
 | 
			
		||||
    } else {
 | 
			
		||||
      solink_libs_section_prefix = ""
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (defined(invoker.solink_libs_section_postfix)) {
 | 
			
		||||
      solink_libs_section_postfix = invoker.solink_libs_section_postfix
 | 
			
		||||
    } else {
 | 
			
		||||
      solink_libs_section_postfix = ""
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (defined(invoker.extra_cflags) && invoker.extra_cflags != "") {
 | 
			
		||||
      extra_cflags = " " + invoker.extra_cflags
 | 
			
		||||
    } else {
 | 
			
		||||
      extra_cflags = ""
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (defined(invoker.extra_cppflags) && invoker.extra_cppflags != "") {
 | 
			
		||||
      extra_cppflags = " " + invoker.extra_cppflags
 | 
			
		||||
    } else {
 | 
			
		||||
      extra_cppflags = ""
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (defined(invoker.extra_cxxflags) && invoker.extra_cxxflags != "") {
 | 
			
		||||
      extra_cxxflags = " " + invoker.extra_cxxflags
 | 
			
		||||
    } else {
 | 
			
		||||
      extra_cxxflags = ""
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (defined(invoker.extra_ldflags) && invoker.extra_ldflags != "") {
 | 
			
		||||
      extra_ldflags = " " + invoker.extra_ldflags
 | 
			
		||||
    } else {
 | 
			
		||||
      extra_ldflags = ""
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    # These library switches can apply to all tools below.
 | 
			
		||||
    lib_switch = "-l"
 | 
			
		||||
    lib_dir_switch = "-L"
 | 
			
		||||
 | 
			
		||||
    # Object files go in this directory.
 | 
			
		||||
    object_subdir = "{{target_out_dir}}/{{label_name}}"
 | 
			
		||||
 | 
			
		||||
    tool("cc") {
 | 
			
		||||
      depfile = "{{output}}.d"
 | 
			
		||||
      command = "$cc -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}${extra_cppflags}${extra_cflags} -c {{source}} -o {{output}}"
 | 
			
		||||
      depsformat = "gcc"
 | 
			
		||||
      description = "CC {{output}}"
 | 
			
		||||
      outputs = [
 | 
			
		||||
        # The whitelist file is also an output, but ninja does not
 | 
			
		||||
        # currently support multiple outputs for tool("cc").
 | 
			
		||||
        "$object_subdir/{{source_name_part}}.o",
 | 
			
		||||
      ]
 | 
			
		||||
      if (enable_resource_whitelist_generation) {
 | 
			
		||||
        compile_wrapper =
 | 
			
		||||
            rebase_path("//build/toolchain/gcc_compile_wrapper.py",
 | 
			
		||||
                        root_build_dir)
 | 
			
		||||
        command = "$python_path \"$compile_wrapper\" --resource-whitelist=\"{{output}}.whitelist\" $command"
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    tool("cxx") {
 | 
			
		||||
      depfile = "{{output}}.d"
 | 
			
		||||
      command = "$cxx -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}${extra_cppflags}${extra_cxxflags} -c {{source}} -o {{output}}"
 | 
			
		||||
      depsformat = "gcc"
 | 
			
		||||
      description = "CXX {{output}}"
 | 
			
		||||
      outputs = [
 | 
			
		||||
        # The whitelist file is also an output, but ninja does not
 | 
			
		||||
        # currently support multiple outputs for tool("cxx").
 | 
			
		||||
        "$object_subdir/{{source_name_part}}.o",
 | 
			
		||||
      ]
 | 
			
		||||
      if (enable_resource_whitelist_generation) {
 | 
			
		||||
        compile_wrapper =
 | 
			
		||||
            rebase_path("//build/toolchain/gcc_compile_wrapper.py",
 | 
			
		||||
                        root_build_dir)
 | 
			
		||||
        command = "$python_path \"$compile_wrapper\" --resource-whitelist=\"{{output}}.whitelist\" $command"
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    tool("asm") {
 | 
			
		||||
      # For GCC we can just use the C compiler to compile assembly.
 | 
			
		||||
      depfile = "{{output}}.d"
 | 
			
		||||
      command = "$cc -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{asmflags}} -c {{source}} -o {{output}}"
 | 
			
		||||
      depsformat = "gcc"
 | 
			
		||||
      description = "ASM {{output}}"
 | 
			
		||||
      outputs = [
 | 
			
		||||
        "$object_subdir/{{source_name_part}}.o",
 | 
			
		||||
      ]
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    tool("alink") {
 | 
			
		||||
      rspfile = "{{output}}.rsp"
 | 
			
		||||
      whitelist_flag = " "
 | 
			
		||||
      if (enable_resource_whitelist_generation) {
 | 
			
		||||
        whitelist_flag = " --resource-whitelist=\"{{output}}.whitelist\""
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      # This needs a Python script to avoid using simple sh features in this
 | 
			
		||||
      # command, in case the host does not use a POSIX shell (e.g. compiling
 | 
			
		||||
      # POSIX-like toolchains such as NaCl on Windows).
 | 
			
		||||
      ar_wrapper =
 | 
			
		||||
          rebase_path("//build/toolchain/gcc_ar_wrapper.py", root_build_dir)
 | 
			
		||||
      command = "$python_path \"$ar_wrapper\"$whitelist_flag --output={{output}} --ar=\"$ar\" {{arflags}} rcsD @\"$rspfile\""
 | 
			
		||||
      description = "AR {{output}}"
 | 
			
		||||
      rspfile_content = "{{inputs}}"
 | 
			
		||||
      outputs = [
 | 
			
		||||
        "{{output_dir}}/{{target_output_name}}{{output_extension}}",
 | 
			
		||||
      ]
 | 
			
		||||
 | 
			
		||||
      # Shared libraries go in the target out directory by default so we can
 | 
			
		||||
      # generate different targets with the same name and not have them collide.
 | 
			
		||||
      default_output_dir = "{{target_out_dir}}"
 | 
			
		||||
      default_output_extension = ".a"
 | 
			
		||||
      output_prefix = "lib"
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    tool("solink") {
 | 
			
		||||
      soname = "{{target_output_name}}{{output_extension}}"  # e.g. "libfoo.so".
 | 
			
		||||
      sofile = "{{output_dir}}/$soname"  # Possibly including toolchain dir.
 | 
			
		||||
      rspfile = sofile + ".rsp"
 | 
			
		||||
      pool = "//build/toolchain:link_pool($default_toolchain)"
 | 
			
		||||
      whitelist_flag = " "
 | 
			
		||||
      if (enable_resource_whitelist_generation) {
 | 
			
		||||
        whitelist_file = "$sofile.whitelist"
 | 
			
		||||
        whitelist_flag = " --resource-whitelist=\"$whitelist_file\""
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      if (defined(invoker.strip)) {
 | 
			
		||||
        unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$soname"
 | 
			
		||||
      } else {
 | 
			
		||||
        unstripped_sofile = sofile
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      # These variables are not built into GN but are helpers that
 | 
			
		||||
      # implement (1) linking to produce a .so, (2) extracting the symbols
 | 
			
		||||
      # from that file (3) if the extracted list differs from the existing
 | 
			
		||||
      # .TOC file, overwrite it, otherwise, don't change it.
 | 
			
		||||
      tocfile = sofile + ".TOC"
 | 
			
		||||
 | 
			
		||||
      link_command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" -Wl,-soname=\"$soname\" @\"$rspfile\""
 | 
			
		||||
 | 
			
		||||
      assert(defined(readelf), "to solink you must have a readelf")
 | 
			
		||||
      assert(defined(nm), "to solink you must have an nm")
 | 
			
		||||
      strip_switch = ""
 | 
			
		||||
      if (defined(invoker.strip)) {
 | 
			
		||||
        strip_switch = "--strip=${invoker.strip}"
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      # This needs a Python script to avoid using a complex shell command
 | 
			
		||||
      # requiring sh control structures, pipelines, and POSIX utilities.
 | 
			
		||||
      # The host might not have a POSIX shell and utilities (e.g. Windows).
 | 
			
		||||
      solink_wrapper = rebase_path("//build/toolchain/gcc_solink_wrapper.py")
 | 
			
		||||
      command = "$python_path \"$solink_wrapper\" --readelf=\"$readelf\" --nm=\"$nm\" $strip_switch --sofile=\"$unstripped_sofile\" --tocfile=\"$tocfile\" --output=\"$sofile\"$whitelist_flag -- $link_command"
 | 
			
		||||
 | 
			
		||||
      rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
 | 
			
		||||
 | 
			
		||||
      description = "SOLINK $sofile"
 | 
			
		||||
 | 
			
		||||
      # Use this for {{output_extension}} expansions unless a target manually
 | 
			
		||||
      # overrides it (in which case {{output_extension}} will be what the target
 | 
			
		||||
      # specifies).
 | 
			
		||||
      default_output_extension = default_shlib_extension
 | 
			
		||||
 | 
			
		||||
      default_output_dir = "{{root_out_dir}}"
 | 
			
		||||
      if (shlib_subdir != ".") {
 | 
			
		||||
        default_output_dir += "/$shlib_subdir"
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      output_prefix = "lib"
 | 
			
		||||
 | 
			
		||||
      # Since the above commands only updates the .TOC file when it changes, ask
 | 
			
		||||
      # Ninja to check if the timestamp actually changed to know if downstream
 | 
			
		||||
      # dependencies should be recompiled.
 | 
			
		||||
      restat = true
 | 
			
		||||
 | 
			
		||||
      # Tell GN about the output files. It will link to the sofile but use the
 | 
			
		||||
      # tocfile for dependency management.
 | 
			
		||||
      outputs = [
 | 
			
		||||
        sofile,
 | 
			
		||||
        tocfile,
 | 
			
		||||
      ]
 | 
			
		||||
      if (enable_resource_whitelist_generation) {
 | 
			
		||||
        outputs += [ whitelist_file ]
 | 
			
		||||
      }
 | 
			
		||||
      if (sofile != unstripped_sofile) {
 | 
			
		||||
        outputs += [ unstripped_sofile ]
 | 
			
		||||
      }
 | 
			
		||||
      link_output = sofile
 | 
			
		||||
      depend_output = tocfile
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    tool("solink_module") {
 | 
			
		||||
      soname = "{{target_output_name}}{{output_extension}}"  # e.g. "libfoo.so".
 | 
			
		||||
      sofile = "{{output_dir}}/$soname"
 | 
			
		||||
      rspfile = sofile + ".rsp"
 | 
			
		||||
      pool = "//build/toolchain:link_pool($default_toolchain)"
 | 
			
		||||
 | 
			
		||||
      if (defined(invoker.strip)) {
 | 
			
		||||
        unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$soname"
 | 
			
		||||
      } else {
 | 
			
		||||
        unstripped_sofile = sofile
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" -Wl,-soname=\"$soname\" @\"$rspfile\""
 | 
			
		||||
 | 
			
		||||
      if (defined(invoker.strip)) {
 | 
			
		||||
        strip_command = "${invoker.strip} --strip-unneeded -o \"$sofile\" \"$unstripped_sofile\""
 | 
			
		||||
        command += " && " + strip_command
 | 
			
		||||
      }
 | 
			
		||||
      rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
 | 
			
		||||
 | 
			
		||||
      description = "SOLINK_MODULE $sofile"
 | 
			
		||||
 | 
			
		||||
      # Use this for {{output_extension}} expansions unless a target manually
 | 
			
		||||
      # overrides it (in which case {{output_extension}} will be what the target
 | 
			
		||||
      # specifies).
 | 
			
		||||
      if (defined(invoker.loadable_module_extension)) {
 | 
			
		||||
        default_output_extension = invoker.loadable_module_extension
 | 
			
		||||
      } else {
 | 
			
		||||
        default_output_extension = default_shlib_extension
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      default_output_dir = "{{root_out_dir}}"
 | 
			
		||||
      if (shlib_subdir != ".") {
 | 
			
		||||
        default_output_dir += "/$shlib_subdir"
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      output_prefix = "lib"
 | 
			
		||||
 | 
			
		||||
      outputs = [
 | 
			
		||||
        sofile,
 | 
			
		||||
      ]
 | 
			
		||||
      if (sofile != unstripped_sofile) {
 | 
			
		||||
        outputs += [ unstripped_sofile ]
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    tool("link") {
 | 
			
		||||
      exename = "{{target_output_name}}{{output_extension}}"
 | 
			
		||||
      outfile = "{{output_dir}}/$exename"
 | 
			
		||||
      rspfile = "$outfile.rsp"
 | 
			
		||||
      unstripped_outfile = outfile
 | 
			
		||||
      pool = "//build/toolchain:link_pool($default_toolchain)"
 | 
			
		||||
 | 
			
		||||
      # Use this for {{output_extension}} expansions unless a target manually
 | 
			
		||||
      # overrides it (in which case {{output_extension}} will be what the target
 | 
			
		||||
      # specifies).
 | 
			
		||||
      default_output_extension = default_executable_extension
 | 
			
		||||
 | 
			
		||||
      default_output_dir = "{{root_out_dir}}"
 | 
			
		||||
 | 
			
		||||
      if (defined(invoker.strip)) {
 | 
			
		||||
        unstripped_outfile = "{{root_out_dir}}/exe.unstripped/$exename"
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      command = "$ld {{ldflags}}${extra_ldflags} -o \"$unstripped_outfile\" -Wl,--start-group @\"$rspfile\" {{solibs}} -Wl,--end-group $libs_section_prefix {{libs}} $libs_section_postfix"
 | 
			
		||||
      if (defined(invoker.strip)) {
 | 
			
		||||
        link_wrapper =
 | 
			
		||||
            rebase_path("//build/toolchain/gcc_link_wrapper.py", root_build_dir)
 | 
			
		||||
        command = "$python_path \"$link_wrapper\" --strip=\"${invoker.strip}\" --unstripped-file=\"$unstripped_outfile\" --output=\"$outfile\" -- $command"
 | 
			
		||||
      }
 | 
			
		||||
      description = "LINK $outfile"
 | 
			
		||||
      rspfile_content = "{{inputs}}"
 | 
			
		||||
      outputs = [
 | 
			
		||||
        outfile,
 | 
			
		||||
      ]
 | 
			
		||||
      if (outfile != unstripped_outfile) {
 | 
			
		||||
        outputs += [ unstripped_outfile ]
 | 
			
		||||
      }
 | 
			
		||||
      if (defined(invoker.link_outputs)) {
 | 
			
		||||
        outputs += invoker.link_outputs
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    # These two are really entirely generic, but have to be repeated in
 | 
			
		||||
    # each toolchain because GN doesn't allow a template to be used here.
 | 
			
		||||
    # See //build/toolchain/toolchain.gni for details.
 | 
			
		||||
    tool("stamp") {
 | 
			
		||||
      command = stamp_command
 | 
			
		||||
      description = stamp_description
 | 
			
		||||
    }
 | 
			
		||||
    tool("copy") {
 | 
			
		||||
      command = copy_command
 | 
			
		||||
      description = copy_description
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    forward_variables_from(invoker, [ "deps" ])
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# This is a shorthand for gcc_toolchain instances based on the Chromium-built
 | 
			
		||||
# version of Clang. Only the toolchain_cpu and toolchain_os variables need to
 | 
			
		||||
# be specified by the invoker, and optionally toolprefix if it's a
 | 
			
		||||
# cross-compile case. Note that for a cross-compile case this toolchain
 | 
			
		||||
# requires a config to pass the appropriate -target option, or else it will
 | 
			
		||||
# actually just be doing a native compile. The invoker can optionally override
 | 
			
		||||
# use_gold too.
 | 
			
		||||
template("clang_toolchain") {
 | 
			
		||||
  if (defined(invoker.toolprefix)) {
 | 
			
		||||
    toolprefix = invoker.toolprefix
 | 
			
		||||
  } else {
 | 
			
		||||
    toolprefix = ""
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  gcc_toolchain(target_name) {
 | 
			
		||||
    prefix = rebase_path("$clang_base_path/bin", root_build_dir)
 | 
			
		||||
    cc = "$prefix/clang"
 | 
			
		||||
    cxx = "$prefix/clang++"
 | 
			
		||||
    ld = cxx
 | 
			
		||||
 | 
			
		||||
    readelf = "${toolprefix}readelf"
 | 
			
		||||
    ar = "${toolprefix}ar"
 | 
			
		||||
    nm = "${toolprefix}nm"
 | 
			
		||||
 | 
			
		||||
    forward_variables_from(invoker, [ "strip" ])
 | 
			
		||||
 | 
			
		||||
    toolchain_args = {
 | 
			
		||||
      if (defined(invoker.toolchain_args)) {
 | 
			
		||||
        forward_variables_from(invoker.toolchain_args, "*")
 | 
			
		||||
      }
 | 
			
		||||
      is_clang = true
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										235
									
								
								samples/GN/icu.gn
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										235
									
								
								samples/GN/icu.gn
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,235 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import("//build/config/linux/pkg_config.gni")
 | 
			
		||||
import("//build/shim_headers.gni")
 | 
			
		||||
 | 
			
		||||
group("icu") {
 | 
			
		||||
  public_deps = [
 | 
			
		||||
    ":icui18n",
 | 
			
		||||
    ":icuuc",
 | 
			
		||||
  ]
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
config("icu_config") {
 | 
			
		||||
  defines = [
 | 
			
		||||
    "USING_SYSTEM_ICU=1",
 | 
			
		||||
    "ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_STATIC",
 | 
			
		||||
  ]
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pkg_config("system_icui18n") {
 | 
			
		||||
  packages = [ "icu-i18n" ]
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
pkg_config("system_icuuc") {
 | 
			
		||||
  packages = [ "icu-uc" ]
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
source_set("icui18n") {
 | 
			
		||||
  deps = [
 | 
			
		||||
    ":icui18n_shim",
 | 
			
		||||
  ]
 | 
			
		||||
  public_configs = [
 | 
			
		||||
    ":icu_config",
 | 
			
		||||
    ":system_icui18n",
 | 
			
		||||
  ]
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
source_set("icuuc") {
 | 
			
		||||
  deps = [
 | 
			
		||||
    ":icuuc_shim",
 | 
			
		||||
  ]
 | 
			
		||||
  public_configs = [
 | 
			
		||||
    ":icu_config",
 | 
			
		||||
    ":system_icuuc",
 | 
			
		||||
  ]
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
shim_headers("icui18n_shim") {
 | 
			
		||||
  root_path = "source/i18n"
 | 
			
		||||
  headers = [
 | 
			
		||||
    # This list can easily be updated using the command below:
 | 
			
		||||
    # find third_party/icu/source/i18n/unicode \
 | 
			
		||||
    # -iname '*.h' -printf '"%p",\n' | \
 | 
			
		||||
    # sed -e 's|third_party/icu/i18n/common/||' | sort -u
 | 
			
		||||
    "unicode/alphaindex.h",
 | 
			
		||||
    "unicode/basictz.h",
 | 
			
		||||
    "unicode/calendar.h",
 | 
			
		||||
    "unicode/choicfmt.h",
 | 
			
		||||
    "unicode/coleitr.h",
 | 
			
		||||
    "unicode/coll.h",
 | 
			
		||||
    "unicode/compactdecimalformat.h",
 | 
			
		||||
    "unicode/curramt.h",
 | 
			
		||||
    "unicode/currpinf.h",
 | 
			
		||||
    "unicode/currunit.h",
 | 
			
		||||
    "unicode/datefmt.h",
 | 
			
		||||
    "unicode/dcfmtsym.h",
 | 
			
		||||
    "unicode/decimfmt.h",
 | 
			
		||||
    "unicode/dtfmtsym.h",
 | 
			
		||||
    "unicode/dtitvfmt.h",
 | 
			
		||||
    "unicode/dtitvinf.h",
 | 
			
		||||
    "unicode/dtptngen.h",
 | 
			
		||||
    "unicode/dtrule.h",
 | 
			
		||||
    "unicode/fieldpos.h",
 | 
			
		||||
    "unicode/fmtable.h",
 | 
			
		||||
    "unicode/format.h",
 | 
			
		||||
    "unicode/fpositer.h",
 | 
			
		||||
    "unicode/gender.h",
 | 
			
		||||
    "unicode/gregocal.h",
 | 
			
		||||
    "unicode/locdspnm.h",
 | 
			
		||||
    "unicode/measfmt.h",
 | 
			
		||||
    "unicode/measunit.h",
 | 
			
		||||
    "unicode/measure.h",
 | 
			
		||||
    "unicode/msgfmt.h",
 | 
			
		||||
    "unicode/numfmt.h",
 | 
			
		||||
    "unicode/numsys.h",
 | 
			
		||||
    "unicode/plurfmt.h",
 | 
			
		||||
    "unicode/plurrule.h",
 | 
			
		||||
    "unicode/rbnf.h",
 | 
			
		||||
    "unicode/rbtz.h",
 | 
			
		||||
    "unicode/regex.h",
 | 
			
		||||
    "unicode/region.h",
 | 
			
		||||
    "unicode/reldatefmt.h",
 | 
			
		||||
    "unicode/scientificnumberformatter.h",
 | 
			
		||||
    "unicode/search.h",
 | 
			
		||||
    "unicode/selfmt.h",
 | 
			
		||||
    "unicode/simpletz.h",
 | 
			
		||||
    "unicode/smpdtfmt.h",
 | 
			
		||||
    "unicode/sortkey.h",
 | 
			
		||||
    "unicode/stsearch.h",
 | 
			
		||||
    "unicode/tblcoll.h",
 | 
			
		||||
    "unicode/timezone.h",
 | 
			
		||||
    "unicode/tmunit.h",
 | 
			
		||||
    "unicode/tmutamt.h",
 | 
			
		||||
    "unicode/tmutfmt.h",
 | 
			
		||||
    "unicode/translit.h",
 | 
			
		||||
    "unicode/tzfmt.h",
 | 
			
		||||
    "unicode/tznames.h",
 | 
			
		||||
    "unicode/tzrule.h",
 | 
			
		||||
    "unicode/tztrans.h",
 | 
			
		||||
    "unicode/ucal.h",
 | 
			
		||||
    "unicode/ucol.h",
 | 
			
		||||
    "unicode/ucoleitr.h",
 | 
			
		||||
    "unicode/ucsdet.h",
 | 
			
		||||
    "unicode/ucurr.h",
 | 
			
		||||
    "unicode/udat.h",
 | 
			
		||||
    "unicode/udateintervalformat.h",
 | 
			
		||||
    "unicode/udatpg.h",
 | 
			
		||||
    "unicode/udisplaycontext.h",
 | 
			
		||||
    "unicode/ufieldpositer.h",
 | 
			
		||||
    "unicode/uformattable.h",
 | 
			
		||||
    "unicode/ugender.h",
 | 
			
		||||
    "unicode/uldnames.h",
 | 
			
		||||
    "unicode/ulocdata.h",
 | 
			
		||||
    "unicode/umsg.h",
 | 
			
		||||
    "unicode/unirepl.h",
 | 
			
		||||
    "unicode/unum.h",
 | 
			
		||||
    "unicode/unumsys.h",
 | 
			
		||||
    "unicode/upluralrules.h",
 | 
			
		||||
    "unicode/uregex.h",
 | 
			
		||||
    "unicode/uregion.h",
 | 
			
		||||
    "unicode/usearch.h",
 | 
			
		||||
    "unicode/uspoof.h",
 | 
			
		||||
    "unicode/utmscale.h",
 | 
			
		||||
    "unicode/utrans.h",
 | 
			
		||||
    "unicode/vtzone.h",
 | 
			
		||||
  ]
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
shim_headers("icuuc_shim") {
 | 
			
		||||
  root_path = "source/common"
 | 
			
		||||
  headers = [
 | 
			
		||||
    # This list can easily be updated using the command below:
 | 
			
		||||
    # find third_party/icu/source/common/unicode \
 | 
			
		||||
    # -iname '*.h' -printf '"%p",\n' | \
 | 
			
		||||
    # sed -e 's|third_party/icu/source/common/||' | sort -u
 | 
			
		||||
    "unicode/appendable.h",
 | 
			
		||||
    "unicode/brkiter.h",
 | 
			
		||||
    "unicode/bytestream.h",
 | 
			
		||||
    "unicode/bytestrie.h",
 | 
			
		||||
    "unicode/bytestriebuilder.h",
 | 
			
		||||
    "unicode/caniter.h",
 | 
			
		||||
    "unicode/chariter.h",
 | 
			
		||||
    "unicode/dbbi.h",
 | 
			
		||||
    "unicode/docmain.h",
 | 
			
		||||
    "unicode/dtintrv.h",
 | 
			
		||||
    "unicode/enumset.h",
 | 
			
		||||
    "unicode/errorcode.h",
 | 
			
		||||
    "unicode/filteredbrk.h",
 | 
			
		||||
    "unicode/icudataver.h",
 | 
			
		||||
    "unicode/icuplug.h",
 | 
			
		||||
    "unicode/idna.h",
 | 
			
		||||
    "unicode/listformatter.h",
 | 
			
		||||
    "unicode/localpointer.h",
 | 
			
		||||
    "unicode/locid.h",
 | 
			
		||||
    "unicode/messagepattern.h",
 | 
			
		||||
    "unicode/normalizer2.h",
 | 
			
		||||
    "unicode/normlzr.h",
 | 
			
		||||
    "unicode/parseerr.h",
 | 
			
		||||
    "unicode/parsepos.h",
 | 
			
		||||
    "unicode/platform.h",
 | 
			
		||||
    "unicode/ptypes.h",
 | 
			
		||||
    "unicode/putil.h",
 | 
			
		||||
    "unicode/rbbi.h",
 | 
			
		||||
    "unicode/rep.h",
 | 
			
		||||
    "unicode/resbund.h",
 | 
			
		||||
    "unicode/schriter.h",
 | 
			
		||||
    "unicode/std_string.h",
 | 
			
		||||
    "unicode/strenum.h",
 | 
			
		||||
    "unicode/stringpiece.h",
 | 
			
		||||
    "unicode/stringtriebuilder.h",
 | 
			
		||||
    "unicode/symtable.h",
 | 
			
		||||
    "unicode/ubidi.h",
 | 
			
		||||
    "unicode/ubrk.h",
 | 
			
		||||
    "unicode/ucasemap.h",
 | 
			
		||||
    "unicode/ucat.h",
 | 
			
		||||
    "unicode/uchar.h",
 | 
			
		||||
    "unicode/ucharstrie.h",
 | 
			
		||||
    "unicode/ucharstriebuilder.h",
 | 
			
		||||
    "unicode/uchriter.h",
 | 
			
		||||
    "unicode/uclean.h",
 | 
			
		||||
    "unicode/ucnv.h",
 | 
			
		||||
    "unicode/ucnv_cb.h",
 | 
			
		||||
    "unicode/ucnv_err.h",
 | 
			
		||||
    "unicode/ucnvsel.h",
 | 
			
		||||
    "unicode/uconfig.h",
 | 
			
		||||
    "unicode/udata.h",
 | 
			
		||||
    "unicode/uenum.h",
 | 
			
		||||
    "unicode/uidna.h",
 | 
			
		||||
    "unicode/uiter.h",
 | 
			
		||||
    "unicode/ulistformatter.h",
 | 
			
		||||
    "unicode/uloc.h",
 | 
			
		||||
    "unicode/umachine.h",
 | 
			
		||||
    "unicode/umisc.h",
 | 
			
		||||
    "unicode/unifilt.h",
 | 
			
		||||
    "unicode/unifunct.h",
 | 
			
		||||
    "unicode/unimatch.h",
 | 
			
		||||
    "unicode/uniset.h",
 | 
			
		||||
    "unicode/unistr.h",
 | 
			
		||||
    "unicode/unorm.h",
 | 
			
		||||
    "unicode/unorm2.h",
 | 
			
		||||
    "unicode/uobject.h",
 | 
			
		||||
    "unicode/urename.h",
 | 
			
		||||
    "unicode/urep.h",
 | 
			
		||||
    "unicode/ures.h",
 | 
			
		||||
    "unicode/uscript.h",
 | 
			
		||||
    "unicode/uset.h",
 | 
			
		||||
    "unicode/usetiter.h",
 | 
			
		||||
    "unicode/ushape.h",
 | 
			
		||||
    "unicode/usprep.h",
 | 
			
		||||
    "unicode/ustring.h",
 | 
			
		||||
    "unicode/ustringtrie.h",
 | 
			
		||||
    "unicode/utext.h",
 | 
			
		||||
    "unicode/utf.h",
 | 
			
		||||
    "unicode/utf16.h",
 | 
			
		||||
    "unicode/utf32.h",
 | 
			
		||||
    "unicode/utf8.h",
 | 
			
		||||
    "unicode/utf_old.h",
 | 
			
		||||
    "unicode/utrace.h",
 | 
			
		||||
    "unicode/utypes.h",
 | 
			
		||||
    "unicode/uvernum.h",
 | 
			
		||||
    "unicode/uversion.h",
 | 
			
		||||
  ]
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										2788
									
								
								samples/GN/internal_rules.gni
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2788
									
								
								samples/GN/internal_rules.gni
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										1422
									
								
								samples/GN/ios-rules.gni
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1422
									
								
								samples/GN/ios-rules.gni
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										193
									
								
								samples/GN/isolate.gni
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										193
									
								
								samples/GN/isolate.gni
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,193 @@
 | 
			
		||||
# Copyright 2016 the V8 project authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import("//build/config/sanitizers/sanitizers.gni")
 | 
			
		||||
import("//third_party/icu/config.gni")
 | 
			
		||||
import("v8.gni")
 | 
			
		||||
 | 
			
		||||
declare_args() {
 | 
			
		||||
  # Sets the test isolation mode (noop|prepare|check).
 | 
			
		||||
  v8_test_isolation_mode = "noop"
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
template("v8_isolate_run") {
 | 
			
		||||
  forward_variables_from(invoker,
 | 
			
		||||
                         "*",
 | 
			
		||||
                         [
 | 
			
		||||
                           "deps",
 | 
			
		||||
                           "isolate",
 | 
			
		||||
                         ])
 | 
			
		||||
 | 
			
		||||
  # Remember target name as within the action scope the target name will be
 | 
			
		||||
  # different.
 | 
			
		||||
  name = target_name
 | 
			
		||||
 | 
			
		||||
  assert(defined(invoker.deps))
 | 
			
		||||
  assert(defined(invoker.isolate))
 | 
			
		||||
 | 
			
		||||
  if (name != "" && v8_test_isolation_mode != "noop") {
 | 
			
		||||
    action(name + "_run") {
 | 
			
		||||
      testonly = true
 | 
			
		||||
 | 
			
		||||
      deps = invoker.deps
 | 
			
		||||
 | 
			
		||||
      script = "//tools/isolate_driver.py"
 | 
			
		||||
 | 
			
		||||
      sources = [
 | 
			
		||||
        invoker.isolate,
 | 
			
		||||
      ]
 | 
			
		||||
 | 
			
		||||
      inputs = [
 | 
			
		||||
        # Files that are known to be involved in this step.
 | 
			
		||||
        "//tools/swarming_client/isolate.py",
 | 
			
		||||
        "//tools/swarming_client/run_isolated.py",
 | 
			
		||||
      ]
 | 
			
		||||
 | 
			
		||||
      if (v8_test_isolation_mode == "prepare") {
 | 
			
		||||
        outputs = [
 | 
			
		||||
          "$root_out_dir/$name.isolated.gen.json",
 | 
			
		||||
        ]
 | 
			
		||||
      } else if (v8_test_isolation_mode == "check") {
 | 
			
		||||
        outputs = [
 | 
			
		||||
          "$root_out_dir/$name.isolated",
 | 
			
		||||
          "$root_out_dir/$name.isolated.state",
 | 
			
		||||
        ]
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      # Translate gn to gyp variables.
 | 
			
		||||
      if (is_asan) {
 | 
			
		||||
        asan = "1"
 | 
			
		||||
      } else {
 | 
			
		||||
        asan = "0"
 | 
			
		||||
      }
 | 
			
		||||
      if (is_msan) {
 | 
			
		||||
        msan = "1"
 | 
			
		||||
      } else {
 | 
			
		||||
        msan = "0"
 | 
			
		||||
      }
 | 
			
		||||
      if (is_tsan) {
 | 
			
		||||
        tsan = "1"
 | 
			
		||||
      } else {
 | 
			
		||||
        tsan = "0"
 | 
			
		||||
      }
 | 
			
		||||
      if (is_cfi) {
 | 
			
		||||
        cfi_vptr = "1"
 | 
			
		||||
      } else {
 | 
			
		||||
        cfi_vptr = "0"
 | 
			
		||||
      }
 | 
			
		||||
      if (target_cpu == "x86") {
 | 
			
		||||
        target_arch = "ia32"
 | 
			
		||||
      } else {
 | 
			
		||||
        target_arch = target_cpu
 | 
			
		||||
      }
 | 
			
		||||
      if (is_debug) {
 | 
			
		||||
        configuration_name = "Debug"
 | 
			
		||||
      } else {
 | 
			
		||||
        configuration_name = "Release"
 | 
			
		||||
      }
 | 
			
		||||
      if (is_component_build) {
 | 
			
		||||
        component = "shared_library"
 | 
			
		||||
      } else {
 | 
			
		||||
        component = "static_library"
 | 
			
		||||
      }
 | 
			
		||||
      if (icu_use_data_file) {
 | 
			
		||||
        icu_use_data_file_flag = "1"
 | 
			
		||||
      } else {
 | 
			
		||||
        icu_use_data_file_flag = "0"
 | 
			
		||||
      }
 | 
			
		||||
      if (v8_enable_inspector) {
 | 
			
		||||
        enable_inspector = "1"
 | 
			
		||||
      } else {
 | 
			
		||||
        enable_inspector = "0"
 | 
			
		||||
      }
 | 
			
		||||
      if (v8_use_external_startup_data) {
 | 
			
		||||
        use_external_startup_data = "1"
 | 
			
		||||
      } else {
 | 
			
		||||
        use_external_startup_data = "0"
 | 
			
		||||
      }
 | 
			
		||||
      if (v8_use_snapshot) {
 | 
			
		||||
        use_snapshot = "true"
 | 
			
		||||
      } else {
 | 
			
		||||
        use_snapshot = "false"
 | 
			
		||||
      }
 | 
			
		||||
      if (v8_has_valgrind) {
 | 
			
		||||
        has_valgrind = "1"
 | 
			
		||||
      } else {
 | 
			
		||||
        has_valgrind = "0"
 | 
			
		||||
      }
 | 
			
		||||
      if (v8_gcmole) {
 | 
			
		||||
        gcmole = "1"
 | 
			
		||||
      } else {
 | 
			
		||||
        gcmole = "0"
 | 
			
		||||
      }
 | 
			
		||||
 | 
			
		||||
      # Note, all paths will be rebased in isolate_driver.py to be relative to
 | 
			
		||||
      # the isolate file.
 | 
			
		||||
      args = [
 | 
			
		||||
        v8_test_isolation_mode,
 | 
			
		||||
        "--isolated",
 | 
			
		||||
        rebase_path("$root_out_dir/$name.isolated", root_build_dir),
 | 
			
		||||
        "--isolate",
 | 
			
		||||
        rebase_path(invoker.isolate, root_build_dir),
 | 
			
		||||
 | 
			
		||||
        # Path variables are used to replace file paths when loading a .isolate
 | 
			
		||||
        # file
 | 
			
		||||
        "--path-variable",
 | 
			
		||||
        "DEPTH",
 | 
			
		||||
        rebase_path("//", root_build_dir),
 | 
			
		||||
        "--path-variable",
 | 
			
		||||
        "PRODUCT_DIR",
 | 
			
		||||
        rebase_path(root_out_dir, root_build_dir),
 | 
			
		||||
 | 
			
		||||
        # TODO(machenbach): Set variables for remaining features.
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "CONFIGURATION_NAME=$configuration_name",
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "OS=$target_os",
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "asan=$asan",
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "cfi_vptr=$cfi_vptr",
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "gcmole=$gcmole",
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "has_valgrind=$has_valgrind",
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "icu_use_data_file_flag=$icu_use_data_file_flag",
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "is_gn=1",
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "msan=$msan",
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "tsan=$tsan",
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "coverage=0",
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "sanitizer_coverage=0",
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "component=$component",
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "target_arch=$target_arch",
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "v8_enable_inspector=$enable_inspector",
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "v8_use_external_startup_data=$use_external_startup_data",
 | 
			
		||||
        "--config-variable",
 | 
			
		||||
        "v8_use_snapshot=$use_snapshot",
 | 
			
		||||
      ]
 | 
			
		||||
 | 
			
		||||
      if (is_win) {
 | 
			
		||||
        args += [
 | 
			
		||||
          "--config-variable",
 | 
			
		||||
          "msvs_version=2015",
 | 
			
		||||
        ]
 | 
			
		||||
      } else {
 | 
			
		||||
        args += [
 | 
			
		||||
          "--config-variable",
 | 
			
		||||
          "msvs_version=0",
 | 
			
		||||
        ]
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										12
									
								
								samples/Genie/Class.gs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								samples/Genie/Class.gs
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,12 @@
 | 
			
		||||
init
 | 
			
		||||
	new Demo( "Demonstration class" ).run()
 | 
			
		||||
 | 
			
		||||
class Demo
 | 
			
		||||
	_message:string = ""
 | 
			
		||||
 | 
			
		||||
	construct ( message:string = "Optional argument - no message passed in constructor" )
 | 
			
		||||
		_message = message
 | 
			
		||||
 | 
			
		||||
	def run()
 | 
			
		||||
		print( _message )
 | 
			
		||||
		
 | 
			
		||||
							
								
								
									
										2
									
								
								samples/Genie/Hello.gs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								samples/Genie/Hello.gs
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,2 @@
 | 
			
		||||
init
 | 
			
		||||
	print( "Hello, World!" )
 | 
			
		||||
							
								
								
									
										135
									
								
								samples/HCL/main.tf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										135
									
								
								samples/HCL/main.tf
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,135 @@
 | 
			
		||||
resource "aws_security_group" "elb_sec_group" {
 | 
			
		||||
  description = "Allow traffic from the internet to ELB port 80"
 | 
			
		||||
  vpc_id = "${var.vpc_id}"
 | 
			
		||||
 | 
			
		||||
  ingress {
 | 
			
		||||
      from_port = 80
 | 
			
		||||
      to_port = 80
 | 
			
		||||
      protocol = "tcp"
 | 
			
		||||
      cidr_blocks = ["${split(",", var.allowed_cidr_blocks)}"]
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  egress {
 | 
			
		||||
      from_port = 0
 | 
			
		||||
      to_port = 0
 | 
			
		||||
      protocol = "-1"
 | 
			
		||||
      cidr_blocks = ["0.0.0.0/0"]
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
resource "aws_security_group" "dokku_allow_ssh_from_internal" {
 | 
			
		||||
  description = "Allow git access over ssh from the private subnet"
 | 
			
		||||
  vpc_id = "${var.vpc_id}"
 | 
			
		||||
 | 
			
		||||
  ingress {
 | 
			
		||||
      from_port = 22
 | 
			
		||||
      to_port = 22
 | 
			
		||||
      protocol = "tcp"
 | 
			
		||||
      cidr_blocks = ["${var.private_subnet_cidr}"]
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  egress {
 | 
			
		||||
      from_port = 0
 | 
			
		||||
      to_port = 0
 | 
			
		||||
      protocol = "-1"
 | 
			
		||||
      cidr_blocks = ["0.0.0.0/0"]
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
resource "aws_security_group" "allow_from_elb_to_instance" {
 | 
			
		||||
  description = "Allow traffic from the ELB to the private instance"
 | 
			
		||||
  vpc_id = "${var.vpc_id}"
 | 
			
		||||
 | 
			
		||||
  ingress {
 | 
			
		||||
      security_groups = ["${aws_security_group.elb_sec_group.id}"]
 | 
			
		||||
      from_port = 80
 | 
			
		||||
      to_port = 80
 | 
			
		||||
      protocol = "tcp"
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  egress {
 | 
			
		||||
      from_port = 0
 | 
			
		||||
      to_port = 0
 | 
			
		||||
      protocol = "-1"
 | 
			
		||||
      cidr_blocks = ["0.0.0.0/0"]
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
resource "aws_instance" "dokku" {
 | 
			
		||||
  ami = "ami-47a23a30"
 | 
			
		||||
  instance_type = "${var.instance_type}"
 | 
			
		||||
  associate_public_ip_address = false
 | 
			
		||||
  key_name = "${var.key_name}"
 | 
			
		||||
  subnet_id = "${var.private_subnet_id}"
 | 
			
		||||
  vpc_security_group_ids = [
 | 
			
		||||
    "${var.bastion_sec_group_id}",
 | 
			
		||||
    "${aws_security_group.allow_from_elb_to_instance.id}",
 | 
			
		||||
    "${aws_security_group.dokku_allow_ssh_from_internal.id}"
 | 
			
		||||
  ]
 | 
			
		||||
  tags {
 | 
			
		||||
    Name = "${var.name}"
 | 
			
		||||
  }
 | 
			
		||||
  connection {
 | 
			
		||||
    user = "ubuntu"
 | 
			
		||||
    private_key = "${var.private_key}"
 | 
			
		||||
    bastion_host = "${var.bastion_host}"
 | 
			
		||||
    bastion_port = "${var.bastion_port}"
 | 
			
		||||
    bastion_user = "${var.bastion_user}"
 | 
			
		||||
    bastion_private_key = "${var.bastion_private_key}"
 | 
			
		||||
  }
 | 
			
		||||
  provisioner "file" {
 | 
			
		||||
    source = "${path.module}/../scripts/install-dokku.sh"
 | 
			
		||||
    destination = "/home/ubuntu/install-dokku.sh"
 | 
			
		||||
  }
 | 
			
		||||
  provisioner "remote-exec" {
 | 
			
		||||
    inline = [
 | 
			
		||||
      "chmod +x /home/ubuntu/install-dokku.sh",
 | 
			
		||||
      "HOSTNAME=${var.hostname} /home/ubuntu/install-dokku.sh"
 | 
			
		||||
    ]
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
resource "aws_elb" "elb_dokku" {
 | 
			
		||||
  name = "elb-dokku-${var.name}"
 | 
			
		||||
  subnets = ["${var.public_subnet_id}"]
 | 
			
		||||
  security_groups = ["${aws_security_group.elb_sec_group.id}"]
 | 
			
		||||
 | 
			
		||||
  listener {
 | 
			
		||||
    instance_port = 80
 | 
			
		||||
    instance_protocol = "http"
 | 
			
		||||
    lb_port = 80
 | 
			
		||||
    lb_protocol = "http"
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  health_check {
 | 
			
		||||
    healthy_threshold = 2
 | 
			
		||||
    unhealthy_threshold = 2
 | 
			
		||||
    timeout = 3
 | 
			
		||||
    target = "HTTP:80/"
 | 
			
		||||
    interval = 30
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  instances = ["${aws_instance.dokku.id}"]
 | 
			
		||||
  cross_zone_load_balancing = false
 | 
			
		||||
  idle_timeout = 400
 | 
			
		||||
 | 
			
		||||
  tags {
 | 
			
		||||
    Name = "elb-dokku-${var.name}"
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
resource "aws_route53_record" "dokku-deploy" {
 | 
			
		||||
   zone_id = "${var.zone_id}"
 | 
			
		||||
   name = "deploy.${var.hostname}"
 | 
			
		||||
   type = "A"
 | 
			
		||||
   ttl = "300"
 | 
			
		||||
   records = ["${aws_instance.dokku.private_ip}"]
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
resource "aws_route53_record" "dokku-wildcard" {
 | 
			
		||||
   zone_id = "${var.zone_id}"
 | 
			
		||||
   name = "*.${var.hostname}"
 | 
			
		||||
   type = "CNAME"
 | 
			
		||||
   ttl = "300"
 | 
			
		||||
   records = ["${aws_elb.elb_dokku.dns_name}"]
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										48
									
								
								samples/HTML+Django/nunjucks.njk
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								samples/HTML+Django/nunjucks.njk
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,48 @@
 | 
			
		||||
{% from "forms.html" import label as description %}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
{% macro field(name, value='', type='text') %}
 | 
			
		||||
    <div class="field">
 | 
			
		||||
        <input type="{{ type }}" name="{{ name }}"
 | 
			
		||||
                value="{{ value | escape }}" />
 | 
			
		||||
    </div>
 | 
			
		||||
{% endmacro %}
 | 
			
		||||
 | 
			
		||||
<html>
 | 
			
		||||
<head>
 | 
			
		||||
    {% extends "head.html" %}
 | 
			
		||||
</head>
 | 
			
		||||
<body>
 | 
			
		||||
{% if horse %}
 | 
			
		||||
    Chuck Norris once kicked a horse in the chin. Its descendants are known today as Giraffes.
 | 
			
		||||
{% elif optimus %}
 | 
			
		||||
    Chuck Norris once urinated in a semi truck's gas tank as a joke....that truck is now known as Optimus Prime.
 | 
			
		||||
{% else %}
 | 
			
		||||
    Chuck Norris threw a grenade and killed 50 people, then the grenade exploded.
 | 
			
		||||
{% endif %}
 | 
			
		||||
 | 
			
		||||
{% block left %}
 | 
			
		||||
    This is the left side!
 | 
			
		||||
{% endblock %}
 | 
			
		||||
 | 
			
		||||
{% block right %}
 | 
			
		||||
    This is the right side!
 | 
			
		||||
{% endblock %}
 | 
			
		||||
 | 
			
		||||
{{ description('Username') }}
 | 
			
		||||
{{ field('user') }}
 | 
			
		||||
{{ field('pass', type='password') }}
 | 
			
		||||
 | 
			
		||||
<h1>Posts</h1>
 | 
			
		||||
<ul>
 | 
			
		||||
    {% for item in items %}
 | 
			
		||||
        <li>{{ item.title }}</li>
 | 
			
		||||
    {% else %}
 | 
			
		||||
        <li>This would display if the 'item' collection were empty</li>
 | 
			
		||||
    {% endfor %}
 | 
			
		||||
</ul>
 | 
			
		||||
 | 
			
		||||
{# Don't escape foo #}
 | 
			
		||||
{{ foo | safe }}
 | 
			
		||||
</body>
 | 
			
		||||
</html>
 | 
			
		||||
							
								
								
									
										6
									
								
								samples/JSON5/filenames/.babelrc
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								samples/JSON5/filenames/.babelrc
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,6 @@
 | 
			
		||||
{
 | 
			
		||||
  "presets": [
 | 
			
		||||
    "es2015",
 | 
			
		||||
    "es2016"
 | 
			
		||||
  ]
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										923
									
								
								samples/JavaScript/ccalc-lex.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										923
									
								
								samples/JavaScript/ccalc-lex.js
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,923 @@
 | 
			
		||||
/* generated by jison-lex 0.3.4-159 */
 | 
			
		||||
var ccalcLex = (function () {
 | 
			
		||||
// See also:
 | 
			
		||||
// http://stackoverflow.com/questions/1382107/whats-a-good-way-to-extend-error-in-javascript/#35881508
 | 
			
		||||
// but we keep the prototype.constructor and prototype.name assignment lines too for compatibility
 | 
			
		||||
// with userland code which might access the derived class in a 'classic' way.
 | 
			
		||||
function JisonLexerError(msg, hash) {
 | 
			
		||||
    Object.defineProperty(this, 'name', {
 | 
			
		||||
        enumerable: false,
 | 
			
		||||
        writable: false,
 | 
			
		||||
        value: 'JisonLexerError'
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    if (msg == null) msg = '???';
 | 
			
		||||
 | 
			
		||||
    Object.defineProperty(this, 'message', {
 | 
			
		||||
        enumerable: false,
 | 
			
		||||
        writable: true,
 | 
			
		||||
        value: msg
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    this.hash = hash;
 | 
			
		||||
 | 
			
		||||
    var stacktrace;
 | 
			
		||||
    if (hash && hash.exception instanceof Error) {
 | 
			
		||||
        var ex2 = hash.exception;
 | 
			
		||||
        this.message = ex2.message || msg;
 | 
			
		||||
        stacktrace = ex2.stack;
 | 
			
		||||
    }
 | 
			
		||||
    if (!stacktrace) {
 | 
			
		||||
        if (Error.hasOwnProperty('captureStackTrace')) { // V8
 | 
			
		||||
            Error.captureStackTrace(this, this.constructor);
 | 
			
		||||
        } else {
 | 
			
		||||
            stacktrace = (new Error(msg)).stack;
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
    if (stacktrace) {
 | 
			
		||||
        Object.defineProperty(this, 'stack', {
 | 
			
		||||
            enumerable: false,
 | 
			
		||||
            writable: false,
 | 
			
		||||
            value: stacktrace
 | 
			
		||||
        });
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
if (typeof Object.setPrototypeOf === 'function') {
 | 
			
		||||
    Object.setPrototypeOf(JisonLexerError.prototype, Error.prototype);
 | 
			
		||||
} else {
 | 
			
		||||
    JisonLexerError.prototype = Object.create(Error.prototype);
 | 
			
		||||
}
 | 
			
		||||
JisonLexerError.prototype.constructor = JisonLexerError;
 | 
			
		||||
JisonLexerError.prototype.name = 'JisonLexerError';
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
var lexer = {
 | 
			
		||||
    EOF: 1,
 | 
			
		||||
    ERROR: 2,
 | 
			
		||||
 | 
			
		||||
    // JisonLexerError: JisonLexerError,        // <-- injected by the code generator
 | 
			
		||||
 | 
			
		||||
    // options: {},                             // <-- injected by the code generator
 | 
			
		||||
 | 
			
		||||
    // yy: ...,                                 // <-- injected by setInput()
 | 
			
		||||
 | 
			
		||||
    __currentRuleSet__: null,                   // <-- internal rule set cache for the current lexer state
 | 
			
		||||
 | 
			
		||||
    __error_infos: [],                          // INTERNAL USE ONLY: the set of lexErrorInfo objects created since the last cleanup
 | 
			
		||||
 | 
			
		||||
    __decompressed: false,                      // INTERNAL USE ONLY: mark whether the lexer instance has been 'unfolded' completely and is now ready for use
 | 
			
		||||
 | 
			
		||||
    done: false,                                // INTERNAL USE ONLY
 | 
			
		||||
    _backtrack: false,                          // INTERNAL USE ONLY
 | 
			
		||||
    _input: '',                                 // INTERNAL USE ONLY
 | 
			
		||||
    _more: false,                               // INTERNAL USE ONLY
 | 
			
		||||
    _signaled_error_token: false,               // INTERNAL USE ONLY
 | 
			
		||||
 | 
			
		||||
    conditionStack: [],                         // INTERNAL USE ONLY; managed via `pushState()`, `popState()`, `topState()` and `stateStackSize()`
 | 
			
		||||
 | 
			
		||||
    match: '',                                  // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: tracks input which has been matched so far for the lexer token under construction. `match` is identical to `yytext` except that this one still contains the matched input string after `lexer.performAction()` has been invoked, where userland code MAY have changed/replaced the `yytext` value entirely!
 | 
			
		||||
    matched: '',                                // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: tracks entire input which has been matched so far
 | 
			
		||||
    matches: false,                             // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: tracks RE match result for last (successful) match attempt
 | 
			
		||||
    yytext: '',                                 // ADVANCED USE ONLY: tracks input which has been matched so far for the lexer token under construction; this value is transferred to the parser as the 'token value' when the parser consumes the lexer token produced through a call to the `lex()` API.
 | 
			
		||||
    offset: 0,                                  // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: tracks the 'cursor position' in the input string, i.e. the number of characters matched so far
 | 
			
		||||
    yyleng: 0,                                  // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: length of matched input for the token under construction (`yytext`)
 | 
			
		||||
    yylineno: 0,                                // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: 'line number' at which the token under construction is located
 | 
			
		||||
    yylloc: null,                               // READ-ONLY EXTERNAL ACCESS - ADVANCED USE ONLY: tracks location info (lines + columns) for the token under construction
 | 
			
		||||
 | 
			
		||||
    // INTERNAL USE: construct a suitable error info hash object instance for `parseError`.
 | 
			
		||||
    constructLexErrorInfo: function lexer_constructLexErrorInfo(msg, recoverable) {
 | 
			
		||||
        var pei = {
 | 
			
		||||
            errStr: msg,
 | 
			
		||||
            recoverable: !!recoverable,
 | 
			
		||||
            text: this.match,           // This one MAY be empty; userland code should use the `upcomingInput` API to obtain more text which follows the 'lexer cursor position'...
 | 
			
		||||
            token: null,
 | 
			
		||||
            line: this.yylineno,
 | 
			
		||||
            loc: this.yylloc,
 | 
			
		||||
            yy: this.yy,
 | 
			
		||||
            lexer: this,
 | 
			
		||||
 | 
			
		||||
            // and make sure the error info doesn't stay due to potential
 | 
			
		||||
            // ref cycle via userland code manipulations.
 | 
			
		||||
            // These would otherwise all be memory leak opportunities!
 | 
			
		||||
            //
 | 
			
		||||
            // Note that only array and object references are nuked as those
 | 
			
		||||
            // constitute the set of elements which can produce a cyclic ref.
 | 
			
		||||
            // The rest of the members is kept intact as they are harmless.
 | 
			
		||||
            destroy: function destructLexErrorInfo() {
 | 
			
		||||
                // remove cyclic references added to error info:
 | 
			
		||||
                // info.yy = null;
 | 
			
		||||
                // info.lexer = null;
 | 
			
		||||
                // ...
 | 
			
		||||
                var rec = !!this.recoverable;
 | 
			
		||||
                for (var key in this) {
 | 
			
		||||
                    if (this.hasOwnProperty(key) && typeof key === 'object') {
 | 
			
		||||
                        this[key] = undefined;
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
                this.recoverable = rec;
 | 
			
		||||
            }
 | 
			
		||||
        };
 | 
			
		||||
        // track this instance so we can `destroy()` it once we deem it superfluous and ready for garbage collection!
 | 
			
		||||
        this.__error_infos.push(pei);
 | 
			
		||||
        return pei;
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    parseError: function lexer_parseError(str, hash) {
 | 
			
		||||
        if (this.yy.parser && typeof this.yy.parser.parseError === 'function') {
 | 
			
		||||
            return this.yy.parser.parseError(str, hash) || this.ERROR;
 | 
			
		||||
        } else if (typeof this.yy.parseError === 'function') {
 | 
			
		||||
            return this.yy.parseError.call(this, str, hash) || this.ERROR;
 | 
			
		||||
        } else {
 | 
			
		||||
            throw new this.JisonLexerError(str);
 | 
			
		||||
        }
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // final cleanup function for when we have completed lexing the input; 
 | 
			
		||||
    // make it an API so that external code can use this one once userland
 | 
			
		||||
    // code has decided it's time to destroy any lingering lexer error
 | 
			
		||||
    // hash object instances and the like: this function helps to clean
 | 
			
		||||
    // up these constructs, which *may* carry cyclic references which would
 | 
			
		||||
    // otherwise prevent the instances from being properly and timely
 | 
			
		||||
    // garbage-collected, i.e. this function helps prevent memory leaks!
 | 
			
		||||
    cleanupAfterLex: function lexer_cleanupAfterLex(do_not_nuke_errorinfos) {
 | 
			
		||||
        var rv;
 | 
			
		||||
 | 
			
		||||
        // prevent lingering circular references from causing memory leaks:
 | 
			
		||||
        this.setInput('', {});
 | 
			
		||||
 | 
			
		||||
        // nuke the error hash info instances created during this run.
 | 
			
		||||
        // Userland code must COPY any data/references
 | 
			
		||||
        // in the error hash instance(s) it is more permanently interested in.
 | 
			
		||||
        if (!do_not_nuke_errorinfos) {
 | 
			
		||||
            for (var i = this.__error_infos.length - 1; i >= 0; i--) {
 | 
			
		||||
                var el = this.__error_infos[i];
 | 
			
		||||
                if (el && typeof el.destroy === 'function') {
 | 
			
		||||
                    el.destroy();
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
            this.__error_infos.length = 0;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        return this;
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // clear the lexer token context; intended for internal use only
 | 
			
		||||
    clear: function lexer_clear() {
 | 
			
		||||
        this.yytext = '';
 | 
			
		||||
        this.yyleng = 0;
 | 
			
		||||
        this.match = '';
 | 
			
		||||
        this.matches = false;
 | 
			
		||||
        this._more = false;
 | 
			
		||||
        this._backtrack = false;
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // resets the lexer, sets new input
 | 
			
		||||
    setInput: function lexer_setInput(input, yy) {
 | 
			
		||||
        this.yy = yy || this.yy || {};
 | 
			
		||||
 | 
			
		||||
        // also check if we've fully initialized the lexer instance,
 | 
			
		||||
        // including expansion work to be done to go from a loaded
 | 
			
		||||
        // lexer to a usable lexer:
 | 
			
		||||
        if (!this.__decompressed) {
 | 
			
		||||
          // step 1: decompress the regex list:
 | 
			
		||||
          var rules = this.rules;
 | 
			
		||||
          for (var i = 0, len = rules.length; i < len; i++) {
 | 
			
		||||
            var rule_re = rules[i];
 | 
			
		||||
 | 
			
		||||
            // compression: is the RE an xref to another RE slot in the rules[] table?
 | 
			
		||||
            if (typeof rule_re === 'number') {
 | 
			
		||||
              rules[i] = rules[rule_re];
 | 
			
		||||
            }
 | 
			
		||||
          }
 | 
			
		||||
 | 
			
		||||
          // step 2: unfold the conditions[] set to make these ready for use:
 | 
			
		||||
          var conditions = this.conditions;
 | 
			
		||||
          for (var k in conditions) {
 | 
			
		||||
            var spec = conditions[k];
 | 
			
		||||
 | 
			
		||||
            var rule_ids = spec.rules;
 | 
			
		||||
 | 
			
		||||
            var len = rule_ids.length;
 | 
			
		||||
            var rule_regexes = new Array(len + 1);            // slot 0 is unused; we use a 1-based index approach here to keep the hottest code in `lexer_next()` fast and simple!
 | 
			
		||||
            var rule_new_ids = new Array(len + 1);
 | 
			
		||||
 | 
			
		||||
            if (this.rules_prefix1) {
 | 
			
		||||
                var rule_prefixes = new Array(65536);
 | 
			
		||||
                var first_catch_all_index = 0;
 | 
			
		||||
 | 
			
		||||
                for (var i = 0; i < len; i++) {
 | 
			
		||||
                  var idx = rule_ids[i];
 | 
			
		||||
                  var rule_re = rules[idx];
 | 
			
		||||
                  rule_regexes[i + 1] = rule_re;
 | 
			
		||||
                  rule_new_ids[i + 1] = idx;
 | 
			
		||||
 | 
			
		||||
                  var prefix = this.rules_prefix1[idx];
 | 
			
		||||
                  // compression: is the PREFIX-STRING an xref to another PREFIX-STRING slot in the rules_prefix1[] table?
 | 
			
		||||
                  if (typeof prefix === 'number') {
 | 
			
		||||
                    prefix = this.rules_prefix1[prefix];
 | 
			
		||||
                  }
 | 
			
		||||
                  // init the prefix lookup table: first come, first serve...
 | 
			
		||||
                  if (!prefix) {
 | 
			
		||||
                    if (!first_catch_all_index) {
 | 
			
		||||
                      first_catch_all_index = i + 1;
 | 
			
		||||
                    }
 | 
			
		||||
                  } else {
 | 
			
		||||
                    for (var j = 0, pfxlen = prefix.length; j < pfxlen; j++) {
 | 
			
		||||
                      var pfxch = prefix.charCodeAt(j);
 | 
			
		||||
                      // first come, first serve:
 | 
			
		||||
                      if (!rule_prefixes[pfxch]) {
 | 
			
		||||
                        rule_prefixes[pfxch] = i + 1;
 | 
			
		||||
                      }  
 | 
			
		||||
                    }
 | 
			
		||||
                  }
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
                // if no catch-all prefix has been encountered yet, it means all
 | 
			
		||||
                // rules have limited prefix sets and it MAY be that particular
 | 
			
		||||
                // input characters won't be recognized by any rule in this 
 | 
			
		||||
                // condition state.
 | 
			
		||||
                // 
 | 
			
		||||
                // To speed up their discovery at run-time while keeping the
 | 
			
		||||
                // remainder of the lexer kernel code very simple (and fast),
 | 
			
		||||
                // we point these to an 'illegal' rule set index *beyond*
 | 
			
		||||
                // the end of the rule set.
 | 
			
		||||
                if (!first_catch_all_index) {
 | 
			
		||||
                  first_catch_all_index = len + 1;
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
                for (var i = 0; i < 65536; i++) {
 | 
			
		||||
                  if (!rule_prefixes[i]) {
 | 
			
		||||
                    rule_prefixes[i] = first_catch_all_index; 
 | 
			
		||||
                  }
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
                spec.__dispatch_lut = rule_prefixes;
 | 
			
		||||
            } else {
 | 
			
		||||
                for (var i = 0; i < len; i++) {
 | 
			
		||||
                  var idx = rule_ids[i];
 | 
			
		||||
                  var rule_re = rules[idx];
 | 
			
		||||
                  rule_regexes[i + 1] = rule_re;
 | 
			
		||||
                  rule_new_ids[i + 1] = idx;
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            spec.rules = rule_new_ids;
 | 
			
		||||
            spec.__rule_regexes = rule_regexes;
 | 
			
		||||
            spec.__rule_count = len;
 | 
			
		||||
          }
 | 
			
		||||
 | 
			
		||||
          this.__decompressed = true;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        this._input = input || '';
 | 
			
		||||
        this.clear();
 | 
			
		||||
        this._signaled_error_token = false;
 | 
			
		||||
        this.done = false;
 | 
			
		||||
        this.yylineno = 0;
 | 
			
		||||
        this.matched = '';
 | 
			
		||||
        this.conditionStack = ['INITIAL'];
 | 
			
		||||
        this.__currentRuleSet__ = null;
 | 
			
		||||
        this.yylloc = {
 | 
			
		||||
            first_line: 1,
 | 
			
		||||
            first_column: 0,
 | 
			
		||||
            last_line: 1,
 | 
			
		||||
            last_column: 0
 | 
			
		||||
        };
 | 
			
		||||
        if (this.options.ranges) {
 | 
			
		||||
            this.yylloc.range = [0, 0];
 | 
			
		||||
        }
 | 
			
		||||
        this.offset = 0;
 | 
			
		||||
        return this;
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // consumes and returns one char from the input
 | 
			
		||||
    input: function lexer_input() {
 | 
			
		||||
        if (!this._input) {
 | 
			
		||||
            this.done = true;
 | 
			
		||||
            return null;
 | 
			
		||||
        }
 | 
			
		||||
        var ch = this._input[0];
 | 
			
		||||
        this.yytext += ch;
 | 
			
		||||
        this.yyleng++;
 | 
			
		||||
        this.offset++;
 | 
			
		||||
        this.match += ch;
 | 
			
		||||
        this.matched += ch;
 | 
			
		||||
        // Count the linenumber up when we hit the LF (or a stand-alone CR).
 | 
			
		||||
        // On CRLF, the linenumber is incremented when you fetch the CR or the CRLF combo
 | 
			
		||||
        // and we advance immediately past the LF as well, returning both together as if
 | 
			
		||||
        // it was all a single 'character' only.
 | 
			
		||||
        var slice_len = 1;
 | 
			
		||||
        var lines = false;
 | 
			
		||||
        if (ch === '\n') {
 | 
			
		||||
            lines = true;
 | 
			
		||||
        } else if (ch === '\r') {
 | 
			
		||||
            lines = true;
 | 
			
		||||
            var ch2 = this._input[1];
 | 
			
		||||
            if (ch2 === '\n') {
 | 
			
		||||
                slice_len++;
 | 
			
		||||
                ch += ch2;
 | 
			
		||||
                this.yytext += ch2;
 | 
			
		||||
                this.yyleng++;
 | 
			
		||||
                this.offset++;
 | 
			
		||||
                this.match += ch2;
 | 
			
		||||
                this.matched += ch2;
 | 
			
		||||
                if (this.options.ranges) {
 | 
			
		||||
                    this.yylloc.range[1]++;
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        if (lines) {
 | 
			
		||||
            this.yylineno++;
 | 
			
		||||
            this.yylloc.last_line++;
 | 
			
		||||
        } else {
 | 
			
		||||
            this.yylloc.last_column++;
 | 
			
		||||
        }
 | 
			
		||||
        if (this.options.ranges) {
 | 
			
		||||
            this.yylloc.range[1]++;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        this._input = this._input.slice(slice_len);
 | 
			
		||||
        return ch;
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // unshifts one char (or a string) into the input
 | 
			
		||||
    unput: function lexer_unput(ch) {
 | 
			
		||||
        var len = ch.length;
 | 
			
		||||
        var lines = ch.split(/(?:\r\n?|\n)/g);
 | 
			
		||||
 | 
			
		||||
        this._input = ch + this._input;
 | 
			
		||||
        this.yytext = this.yytext.substr(0, this.yytext.length - len);
 | 
			
		||||
        //this.yyleng -= len;
 | 
			
		||||
        this.offset -= len;
 | 
			
		||||
        var oldLines = this.match.split(/(?:\r\n?|\n)/g);
 | 
			
		||||
        this.match = this.match.substr(0, this.match.length - len);
 | 
			
		||||
        this.matched = this.matched.substr(0, this.matched.length - len);
 | 
			
		||||
 | 
			
		||||
        if (lines.length - 1) {
 | 
			
		||||
            this.yylineno -= lines.length - 1;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        this.yylloc.last_line = this.yylineno + 1;
 | 
			
		||||
        this.yylloc.last_column = (lines ?
 | 
			
		||||
                (lines.length === oldLines.length ? this.yylloc.first_column : 0)
 | 
			
		||||
                + oldLines[oldLines.length - lines.length].length - lines[0].length :
 | 
			
		||||
                this.yylloc.first_column - len);
 | 
			
		||||
 | 
			
		||||
        if (this.options.ranges) {
 | 
			
		||||
            this.yylloc.range[1] = this.yylloc.range[0] + this.yyleng - len;
 | 
			
		||||
        }
 | 
			
		||||
        this.yyleng = this.yytext.length;
 | 
			
		||||
        this.done = false;
 | 
			
		||||
        return this;
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // When called from action, caches matched text and appends it on next action
 | 
			
		||||
    more: function lexer_more() {
 | 
			
		||||
        this._more = true;
 | 
			
		||||
        return this;
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead.
 | 
			
		||||
    reject: function lexer_reject() {
 | 
			
		||||
        if (this.options.backtrack_lexer) {
 | 
			
		||||
            this._backtrack = true;
 | 
			
		||||
        } else {
 | 
			
		||||
            // when the parseError() call returns, we MUST ensure that the error is registered.
 | 
			
		||||
            // We accomplish this by signaling an 'error' token to be produced for the current
 | 
			
		||||
            // .lex() run.
 | 
			
		||||
            var p = this.constructLexErrorInfo('Lexical error on line ' + (this.yylineno + 1) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this.showPosition(), false);
 | 
			
		||||
            this._signaled_error_token = (this.parseError(p.errStr, p) || this.ERROR);
 | 
			
		||||
        }
 | 
			
		||||
        return this;
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // retain first n characters of the match
 | 
			
		||||
    less: function lexer_less(n) {
 | 
			
		||||
        return this.unput(this.match.slice(n));
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // return (part of the) already matched input, i.e. for error messages.
 | 
			
		||||
    // Limit the returned string length to `maxSize` (default: 20).
 | 
			
		||||
    // Limit the returned string to the `maxLines` number of lines of input (default: 1).
 | 
			
		||||
    // Negative limit values equal *unlimited*.
 | 
			
		||||
    pastInput: function lexer_pastInput(maxSize, maxLines) {
 | 
			
		||||
        var past = this.matched.substring(0, this.matched.length - this.match.length);
 | 
			
		||||
        if (maxSize < 0)
 | 
			
		||||
            maxSize = past.length;
 | 
			
		||||
        else if (!maxSize)
 | 
			
		||||
            maxSize = 20;
 | 
			
		||||
        if (maxLines < 0)
 | 
			
		||||
            maxLines = past.length;         // can't ever have more input lines than this!
 | 
			
		||||
        else if (!maxLines)
 | 
			
		||||
            maxLines = 1;
 | 
			
		||||
        // `substr` anticipation: treat \r\n as a single character and take a little
 | 
			
		||||
        // more than necessary so that we can still properly check against maxSize
 | 
			
		||||
        // after we've transformed and limited the newLines in here:
 | 
			
		||||
        past = past.substr(-maxSize * 2 - 2);
 | 
			
		||||
        // now that we have a significantly reduced string to process, transform the newlines
 | 
			
		||||
        // and chop them, then limit them:
 | 
			
		||||
        var a = past.replace(/\r\n|\r/g, '\n').split('\n');
 | 
			
		||||
        a = a.slice(-maxLines);
 | 
			
		||||
        past = a.join('\n');
 | 
			
		||||
        // When, after limiting to maxLines, we still have too much to return, 
 | 
			
		||||
        // do add an ellipsis prefix...
 | 
			
		||||
        if (past.length > maxSize) {
 | 
			
		||||
            past = '...' + past.substr(-maxSize);
 | 
			
		||||
        }
 | 
			
		||||
        return past;
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // return (part of the) upcoming input, i.e. for error messages.
 | 
			
		||||
    // Limit the returned string length to `maxSize` (default: 20).
 | 
			
		||||
    // Limit the returned string to the `maxLines` number of lines of input (default: 1).
 | 
			
		||||
    // Negative limit values equal *unlimited*.
 | 
			
		||||
    upcomingInput: function lexer_upcomingInput(maxSize, maxLines) {
 | 
			
		||||
        var next = this.match;
 | 
			
		||||
        if (maxSize < 0)
 | 
			
		||||
            maxSize = next.length + this._input.length;
 | 
			
		||||
        else if (!maxSize)
 | 
			
		||||
            maxSize = 20;
 | 
			
		||||
        if (maxLines < 0)
 | 
			
		||||
            maxLines = maxSize;         // can't ever have more input lines than this!
 | 
			
		||||
        else if (!maxLines)
 | 
			
		||||
            maxLines = 1;
 | 
			
		||||
        // `substring` anticipation: treat \r\n as a single character and take a little
 | 
			
		||||
        // more than necessary so that we can still properly check against maxSize
 | 
			
		||||
        // after we've transformed and limited the newLines in here:
 | 
			
		||||
        if (next.length < maxSize * 2 + 2) {
 | 
			
		||||
            next += this._input.substring(0, maxSize * 2 + 2);  // substring is faster on Chrome/V8
 | 
			
		||||
        }
 | 
			
		||||
        // now that we have a significantly reduced string to process, transform the newlines
 | 
			
		||||
        // and chop them, then limit them:
 | 
			
		||||
        var a = next.replace(/\r\n|\r/g, '\n').split('\n');
 | 
			
		||||
        a = a.slice(0, maxLines);
 | 
			
		||||
        next = a.join('\n');
 | 
			
		||||
        // When, after limiting to maxLines, we still have too much to return, 
 | 
			
		||||
        // do add an ellipsis postfix...
 | 
			
		||||
        if (next.length > maxSize) {
 | 
			
		||||
            next = next.substring(0, maxSize) + '...';
 | 
			
		||||
        }
 | 
			
		||||
        return next;
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // return a string which displays the character position where the lexing error occurred, i.e. for error messages
 | 
			
		||||
    showPosition: function lexer_showPosition(maxPrefix, maxPostfix) {
 | 
			
		||||
        var pre = this.pastInput(maxPrefix).replace(/\s/g, ' ');
 | 
			
		||||
        var c = new Array(pre.length + 1).join('-');
 | 
			
		||||
        return pre + this.upcomingInput(maxPostfix).replace(/\s/g, ' ') + '\n' + c + '^';
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // helper function, used to produce a human readable description as a string, given
 | 
			
		||||
    // the input `yylloc` location object. 
 | 
			
		||||
    // Set `display_range_too` to TRUE to include the string character index position(s)
 | 
			
		||||
    // in the description if the `yylloc.range` is available. 
 | 
			
		||||
    describeYYLLOC: function lexer_describe_yylloc(yylloc, display_range_too) {
 | 
			
		||||
        var l1 = yylloc.first_line;
 | 
			
		||||
        var l2 = yylloc.last_line;
 | 
			
		||||
        var o1 = yylloc.first_column;
 | 
			
		||||
        var o2 = yylloc.last_column - 1;
 | 
			
		||||
        var dl = l2 - l1;
 | 
			
		||||
        var d_o = (dl === 0 ? o2 - o1 : 1000);
 | 
			
		||||
        var rv;
 | 
			
		||||
        if (dl === 0) {
 | 
			
		||||
            rv = 'line ' + l1 + ', ';
 | 
			
		||||
            if (d_o === 0) {
 | 
			
		||||
                rv += 'column ' + o1;
 | 
			
		||||
            } else {
 | 
			
		||||
                rv += 'columns ' + o1 + ' .. ' + o2;
 | 
			
		||||
            }
 | 
			
		||||
        } else {
 | 
			
		||||
            rv = 'lines ' + l1 + '(column ' + o1 + ') .. ' + l2 + '(column ' + o2 + ')';
 | 
			
		||||
        }
 | 
			
		||||
        if (yylloc.range && display_range_too) {
 | 
			
		||||
            var r1 = yylloc.range[0];
 | 
			
		||||
            var r2 = yylloc.range[1] - 1;
 | 
			
		||||
            if (r2 === r1) {
 | 
			
		||||
                rv += ' {String Offset: ' + r1 + '}';
 | 
			
		||||
            } else {
 | 
			
		||||
                rv += ' {String Offset range: ' + r1 + ' .. ' + r2 + '}';
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        return rv;
 | 
			
		||||
        // return JSON.stringify(yylloc);
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // test the lexed token: return FALSE when not a match, otherwise return token.
 | 
			
		||||
    //
 | 
			
		||||
    // `match` is supposed to be an array coming out of a regex match, i.e. `match[0]`
 | 
			
		||||
    // contains the actually matched text string.
 | 
			
		||||
    //
 | 
			
		||||
    // Also move the input cursor forward and update the match collectors:
 | 
			
		||||
    // - yytext
 | 
			
		||||
    // - yyleng
 | 
			
		||||
    // - match
 | 
			
		||||
    // - matches
 | 
			
		||||
    // - yylloc
 | 
			
		||||
    // - offset
 | 
			
		||||
    test_match: function lexer_test_match(match, indexed_rule) {
 | 
			
		||||
        var token,
 | 
			
		||||
            lines,
 | 
			
		||||
            backup,
 | 
			
		||||
            match_str;
 | 
			
		||||
 | 
			
		||||
        if (this.options.backtrack_lexer) {
 | 
			
		||||
            // save context
 | 
			
		||||
            backup = {
 | 
			
		||||
                yylineno: this.yylineno,
 | 
			
		||||
                yylloc: {
 | 
			
		||||
                    first_line: this.yylloc.first_line,
 | 
			
		||||
                    last_line: this.last_line,
 | 
			
		||||
                    first_column: this.yylloc.first_column,
 | 
			
		||||
                    last_column: this.yylloc.last_column
 | 
			
		||||
                },
 | 
			
		||||
                yytext: this.yytext,
 | 
			
		||||
                match: this.match,
 | 
			
		||||
                matches: this.matches,
 | 
			
		||||
                matched: this.matched,
 | 
			
		||||
                yyleng: this.yyleng,
 | 
			
		||||
                offset: this.offset,
 | 
			
		||||
                _more: this._more,
 | 
			
		||||
                _input: this._input,
 | 
			
		||||
                yy: this.yy,
 | 
			
		||||
                conditionStack: this.conditionStack.slice(0),
 | 
			
		||||
                done: this.done
 | 
			
		||||
            };
 | 
			
		||||
            if (this.options.ranges) {
 | 
			
		||||
                backup.yylloc.range = this.yylloc.range.slice(0);
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        match_str = match[0];
 | 
			
		||||
        lines = match_str.match(/(?:\r\n?|\n).*/g);
 | 
			
		||||
        if (lines) {
 | 
			
		||||
            this.yylineno += lines.length;
 | 
			
		||||
        }
 | 
			
		||||
        this.yylloc = {
 | 
			
		||||
            first_line: this.yylloc.last_line,
 | 
			
		||||
            last_line: this.yylineno + 1,
 | 
			
		||||
            first_column: this.yylloc.last_column,
 | 
			
		||||
            last_column: lines ?
 | 
			
		||||
                         lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length :
 | 
			
		||||
                         this.yylloc.last_column + match_str.length
 | 
			
		||||
        };
 | 
			
		||||
        this.yytext += match_str;
 | 
			
		||||
        this.match += match_str;
 | 
			
		||||
        this.matches = match;
 | 
			
		||||
        this.yyleng = this.yytext.length;
 | 
			
		||||
        if (this.options.ranges) {
 | 
			
		||||
            this.yylloc.range = [this.offset, this.offset + this.yyleng];
 | 
			
		||||
        }
 | 
			
		||||
        // previous lex rules MAY have invoked the `more()` API rather than producing a token:
 | 
			
		||||
        // those rules will already have moved this `offset` forward matching their match lengths,
 | 
			
		||||
        // hence we must only add our own match length now:
 | 
			
		||||
        this.offset += match_str.length;
 | 
			
		||||
        this._more = false;
 | 
			
		||||
        this._backtrack = false;
 | 
			
		||||
        this._input = this._input.slice(match_str.length);
 | 
			
		||||
        this.matched += match_str;
 | 
			
		||||
 | 
			
		||||
        // calling this method: 
 | 
			
		||||
        //
 | 
			
		||||
        //   function lexer__performAction(yy, yy_, $avoiding_name_collisions, YY_START) {...}
 | 
			
		||||
        token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1] /* = YY_START */);
 | 
			
		||||
        // otherwise, when the action codes are all simple return token statements:
 | 
			
		||||
        //token = this.simpleCaseActionClusters[indexed_rule];
 | 
			
		||||
 | 
			
		||||
        if (this.done && this._input) {
 | 
			
		||||
            this.done = false;
 | 
			
		||||
        }
 | 
			
		||||
        if (token) {
 | 
			
		||||
            return token;
 | 
			
		||||
        } else if (this._backtrack) {
 | 
			
		||||
            // recover context
 | 
			
		||||
            for (var k in backup) {
 | 
			
		||||
                this[k] = backup[k];
 | 
			
		||||
            }
 | 
			
		||||
            this.__currentRuleSet__ = null;
 | 
			
		||||
            return false; // rule action called reject() implying the next rule should be tested instead.
 | 
			
		||||
        } else if (this._signaled_error_token) {
 | 
			
		||||
            // produce one 'error' token as .parseError() in reject() did not guarantee a failure signal by throwing an exception!
 | 
			
		||||
            token = this._signaled_error_token;
 | 
			
		||||
            this._signaled_error_token = false;
 | 
			
		||||
            return token;
 | 
			
		||||
        }
 | 
			
		||||
        return false;
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // return next match in input
 | 
			
		||||
    next: function lexer_next() {
 | 
			
		||||
        if (this.done) {
 | 
			
		||||
            this.clear();
 | 
			
		||||
            return this.EOF;
 | 
			
		||||
        }
 | 
			
		||||
        if (!this._input) {
 | 
			
		||||
            this.done = true;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        var token,
 | 
			
		||||
            match,
 | 
			
		||||
            tempMatch,
 | 
			
		||||
            index;
 | 
			
		||||
        if (!this._more) {
 | 
			
		||||
            this.clear();
 | 
			
		||||
        }
 | 
			
		||||
        var spec = this.__currentRuleSet__;
 | 
			
		||||
        if (!spec) {
 | 
			
		||||
            // Update the ruleset cache as we apparently encountered a state change or just started lexing.
 | 
			
		||||
            // The cache is set up for fast lookup -- we assume a lexer will switch states much less often than it will
 | 
			
		||||
            // invoke the `lex()` token-producing API and related APIs, hence caching the set for direct access helps
 | 
			
		||||
            // speed up those activities a tiny bit.
 | 
			
		||||
            spec = this.__currentRuleSet__ = this._currentRules();
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        var rule_ids = spec.rules;
 | 
			
		||||
//        var dispatch = spec.__dispatch_lut;
 | 
			
		||||
        var regexes = spec.__rule_regexes;
 | 
			
		||||
        var len = spec.__rule_count;
 | 
			
		||||
 | 
			
		||||
//        var c0 = this._input[0];
 | 
			
		||||
 | 
			
		||||
        // Note: the arrays are 1-based, while `len` itself is a valid index, 
 | 
			
		||||
        // hence the non-standard less-or-equal check in the next loop condition!
 | 
			
		||||
        // 
 | 
			
		||||
        // `dispatch` is a lookup table which lists the *first* rule which matches the 1-char *prefix* of the rule-to-match.
 | 
			
		||||
        // By using that array as a jumpstart, we can cut down on the otherwise O(n*m) behaviour of this lexer, down to
 | 
			
		||||
        // O(n) ideally, where:
 | 
			
		||||
        // 
 | 
			
		||||
        // - N is the number of input particles -- which is not precisely characters 
 | 
			
		||||
        //   as we progress on a per-regex-match basis rather than on a per-character basis
 | 
			
		||||
        //   
 | 
			
		||||
        // - M is the number of rules (regexes) to test in the active condition state.
 | 
			
		||||
        //  
 | 
			
		||||
        for (var i = 1 /* (dispatch[c0] || 1) */ ; i <= len; i++) {
 | 
			
		||||
            tempMatch = this._input.match(regexes[i]);
 | 
			
		||||
            if (tempMatch && (!match || tempMatch[0].length > match[0].length)) {
 | 
			
		||||
                match = tempMatch;
 | 
			
		||||
                index = i;
 | 
			
		||||
                if (this.options.backtrack_lexer) {
 | 
			
		||||
                    token = this.test_match(tempMatch, rule_ids[i]);
 | 
			
		||||
                    if (token !== false) {
 | 
			
		||||
                        return token;
 | 
			
		||||
                    } else if (this._backtrack) {
 | 
			
		||||
                        match = undefined;
 | 
			
		||||
                        continue; // rule action called reject() implying a rule MISmatch.
 | 
			
		||||
                    } else {
 | 
			
		||||
                        // else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
 | 
			
		||||
                        return false;
 | 
			
		||||
                    }
 | 
			
		||||
                } else if (!this.options.flex) {
 | 
			
		||||
                    break;
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        if (match) {
 | 
			
		||||
            token = this.test_match(match, rule_ids[index]);
 | 
			
		||||
            if (token !== false) {
 | 
			
		||||
                return token;
 | 
			
		||||
            }
 | 
			
		||||
            // else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
 | 
			
		||||
            return false;
 | 
			
		||||
        }
 | 
			
		||||
        if (this._input === '') {
 | 
			
		||||
            this.done = true;
 | 
			
		||||
            return this.EOF;
 | 
			
		||||
        } else {
 | 
			
		||||
            var p = this.constructLexErrorInfo('Lexical error on line ' + (this.yylineno + 1) + '. Unrecognized text.\n' + this.showPosition(), this.options.lexer_errors_are_recoverable);
 | 
			
		||||
            token = (this.parseError(p.errStr, p) || this.ERROR);
 | 
			
		||||
            if (token === this.ERROR) {
 | 
			
		||||
                // we can try to recover from a lexer error that parseError() did not 'recover' for us, by moving forward at least one character at a time:
 | 
			
		||||
                if (!this.match.length) {
 | 
			
		||||
                    this.input();
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
            return token;
 | 
			
		||||
        }
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // return next match that has a token
 | 
			
		||||
    lex: function lexer_lex() {
 | 
			
		||||
        var r;
 | 
			
		||||
        // allow the PRE/POST handlers set/modify the return token for maximum flexibility of the generated lexer:
 | 
			
		||||
        if (typeof this.options.pre_lex === 'function') {
 | 
			
		||||
            r = this.options.pre_lex.call(this);
 | 
			
		||||
        }
 | 
			
		||||
        while (!r) {
 | 
			
		||||
            r = this.next();
 | 
			
		||||
        }
 | 
			
		||||
        if (typeof this.options.post_lex === 'function') {
 | 
			
		||||
            // (also account for a userdef function which does not return any value: keep the token as is)
 | 
			
		||||
            r = this.options.post_lex.call(this, r) || r;
 | 
			
		||||
        }
 | 
			
		||||
        return r;
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // backwards compatible alias for `pushState()`;
 | 
			
		||||
    // the latter is symmetrical with `popState()` and we advise to use
 | 
			
		||||
    // those APIs in any modern lexer code, rather than `begin()`.
 | 
			
		||||
    begin: function lexer_begin(condition) {
 | 
			
		||||
        return this.pushState(condition);
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // activates a new lexer condition state (pushes the new lexer condition state onto the condition stack)
 | 
			
		||||
    pushState: function lexer_pushState(condition) {
 | 
			
		||||
        this.conditionStack.push(condition);
 | 
			
		||||
        this.__currentRuleSet__ = null;
 | 
			
		||||
        return this;
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // pop the previously active lexer condition state off the condition stack
 | 
			
		||||
    popState: function lexer_popState() {
 | 
			
		||||
        var n = this.conditionStack.length - 1;
 | 
			
		||||
        if (n > 0) {
 | 
			
		||||
            this.__currentRuleSet__ = null;
 | 
			
		||||
            return this.conditionStack.pop();
 | 
			
		||||
        } else {
 | 
			
		||||
            return this.conditionStack[0];
 | 
			
		||||
        }
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available
 | 
			
		||||
    topState: function lexer_topState(n) {
 | 
			
		||||
        n = this.conditionStack.length - 1 - Math.abs(n || 0);
 | 
			
		||||
        if (n >= 0) {
 | 
			
		||||
            return this.conditionStack[n];
 | 
			
		||||
        } else {
 | 
			
		||||
            return 'INITIAL';
 | 
			
		||||
        }
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // (internal) determine the lexer rule set which is active for the currently active lexer condition state
 | 
			
		||||
    _currentRules: function lexer__currentRules() {
 | 
			
		||||
        if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) {
 | 
			
		||||
            return this.conditions[this.conditionStack[this.conditionStack.length - 1]];
 | 
			
		||||
        } else {
 | 
			
		||||
            return this.conditions['INITIAL'];
 | 
			
		||||
        }
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    // return the number of states currently on the stack
 | 
			
		||||
    stateStackSize: function lexer_stateStackSize() {
 | 
			
		||||
        return this.conditionStack.length;
 | 
			
		||||
    },
 | 
			
		||||
options: {},
 | 
			
		||||
JisonLexerError: JisonLexerError,
 | 
			
		||||
performAction: function lexer__performAction(yy, yy_, $avoiding_name_collisions, YY_START) {
 | 
			
		||||
 | 
			
		||||
var YYSTATE = YY_START;
 | 
			
		||||
switch($avoiding_name_collisions) {
 | 
			
		||||
case 0 : 
 | 
			
		||||
/*! Conditions:: INITIAL */ 
 | 
			
		||||
/*! Rule::       [ \t\r\n]+ */ 
 | 
			
		||||
 
 | 
			
		||||
    /* eat up whitespace */
 | 
			
		||||
    BeginToken(yy_.yytext); 
 | 
			
		||||
     
 | 
			
		||||
break;
 | 
			
		||||
case 1 : 
 | 
			
		||||
/*! Conditions:: INITIAL */ 
 | 
			
		||||
/*! Rule::       {DIGIT}+ */ 
 | 
			
		||||
 
 | 
			
		||||
    BeginToken(yy_.yytext); 
 | 
			
		||||
    yylval.value = atof(yy_.yytext);
 | 
			
		||||
    return VALUE;
 | 
			
		||||
     
 | 
			
		||||
break;
 | 
			
		||||
case 2 : 
 | 
			
		||||
/*! Conditions:: INITIAL */ 
 | 
			
		||||
/*! Rule::       {DIGIT}+\.{DIGIT}* */ 
 | 
			
		||||
 
 | 
			
		||||
    BeginToken(yy_.yytext);
 | 
			
		||||
    yylval.value = atof(yy_.yytext);
 | 
			
		||||
    return VALUE;
 | 
			
		||||
     
 | 
			
		||||
break;
 | 
			
		||||
case 3 : 
 | 
			
		||||
/*! Conditions:: INITIAL */ 
 | 
			
		||||
/*! Rule::       {DIGIT}+[eE]["+""-"]?{DIGIT}* */ 
 | 
			
		||||
 
 | 
			
		||||
    BeginToken(yy_.yytext);
 | 
			
		||||
    yylval.value = atof(yy_.yytext);
 | 
			
		||||
    return VALUE;
 | 
			
		||||
     
 | 
			
		||||
break;
 | 
			
		||||
case 4 : 
 | 
			
		||||
/*! Conditions:: INITIAL */ 
 | 
			
		||||
/*! Rule::       {DIGIT}+\.{DIGIT}*[eE]["+""-"]?{DIGIT}* */ 
 | 
			
		||||
 
 | 
			
		||||
    BeginToken(yy_.yytext);
 | 
			
		||||
    yylval.value = atof(yy_.yytext);
 | 
			
		||||
    return VALUE;
 | 
			
		||||
     
 | 
			
		||||
break;
 | 
			
		||||
case 5 : 
 | 
			
		||||
/*! Conditions:: INITIAL */ 
 | 
			
		||||
/*! Rule::       {ID} */ 
 | 
			
		||||
 
 | 
			
		||||
    BeginToken(yy_.yytext);
 | 
			
		||||
    yylval.string = malloc(strlen(yy_.yytext)+1);
 | 
			
		||||
    strcpy(yylval.string, yy_.yytext);
 | 
			
		||||
    return IDENTIFIER;
 | 
			
		||||
     
 | 
			
		||||
break;
 | 
			
		||||
case 6 : 
 | 
			
		||||
/*! Conditions:: INITIAL */ 
 | 
			
		||||
/*! Rule::       \+ */ 
 | 
			
		||||
  BeginToken(yy_.yytext); return ADD;  
 | 
			
		||||
break;
 | 
			
		||||
case 7 : 
 | 
			
		||||
/*! Conditions:: INITIAL */ 
 | 
			
		||||
/*! Rule::       - */ 
 | 
			
		||||
  BeginToken(yy_.yytext); return SUB;  
 | 
			
		||||
break;
 | 
			
		||||
case 8 : 
 | 
			
		||||
/*! Conditions:: INITIAL */ 
 | 
			
		||||
/*! Rule::       \* */ 
 | 
			
		||||
  BeginToken(yy_.yytext); return MULT;  
 | 
			
		||||
break;
 | 
			
		||||
case 9 : 
 | 
			
		||||
/*! Conditions:: INITIAL */ 
 | 
			
		||||
/*! Rule::       \/ */ 
 | 
			
		||||
  BeginToken(yy_.yytext); return DIV;  
 | 
			
		||||
break;
 | 
			
		||||
case 10 : 
 | 
			
		||||
/*! Conditions:: INITIAL */ 
 | 
			
		||||
/*! Rule::       \( */ 
 | 
			
		||||
  BeginToken(yy_.yytext); return LBRACE;  
 | 
			
		||||
break;
 | 
			
		||||
case 11 : 
 | 
			
		||||
/*! Conditions:: INITIAL */ 
 | 
			
		||||
/*! Rule::       \) */ 
 | 
			
		||||
  BeginToken(yy_.yytext); return RBRACE;  
 | 
			
		||||
break;
 | 
			
		||||
case 12 : 
 | 
			
		||||
/*! Conditions:: INITIAL */ 
 | 
			
		||||
/*! Rule::       ; */ 
 | 
			
		||||
  BeginToken(yy_.yytext); return SEMICOLON;  
 | 
			
		||||
break;
 | 
			
		||||
case 13 : 
 | 
			
		||||
/*! Conditions:: INITIAL */ 
 | 
			
		||||
/*! Rule::       = */ 
 | 
			
		||||
  BeginToken(yy_.yytext); return ASSIGN;  
 | 
			
		||||
break;
 | 
			
		||||
case 14 : 
 | 
			
		||||
/*! Conditions:: INITIAL */ 
 | 
			
		||||
/*! Rule::       . */ 
 | 
			
		||||
 
 | 
			
		||||
    BeginToken(yy_.yytext);
 | 
			
		||||
    return yy_.yytext[0];
 | 
			
		||||
     
 | 
			
		||||
break;
 | 
			
		||||
default:
 | 
			
		||||
  return this.simpleCaseActionClusters[$avoiding_name_collisions];
 | 
			
		||||
}
 | 
			
		||||
},
 | 
			
		||||
simpleCaseActionClusters: {
 | 
			
		||||
 | 
			
		||||
},
 | 
			
		||||
rules: [
 | 
			
		||||
/^(?:[ \t\r\n]+)/,
 | 
			
		||||
/^(?:(\d)+)/,
 | 
			
		||||
/^(?:(\d)+\.(\d)*)/,
 | 
			
		||||
/^(?:(\d)+[Ee]["+]?(\d)*)/,
 | 
			
		||||
/^(?:(\d)+\.(\d)*[Ee]["+]?(\d)*)/,
 | 
			
		||||
/^(?:([^\W\d]\w*))/,
 | 
			
		||||
/^(?:\+)/,
 | 
			
		||||
/^(?:-)/,
 | 
			
		||||
/^(?:\*)/,
 | 
			
		||||
/^(?:\/)/,
 | 
			
		||||
/^(?:\()/,
 | 
			
		||||
/^(?:\))/,
 | 
			
		||||
/^(?:;)/,
 | 
			
		||||
/^(?:=)/,
 | 
			
		||||
/^(?:.)/
 | 
			
		||||
],
 | 
			
		||||
conditions: {
 | 
			
		||||
  "INITIAL": {
 | 
			
		||||
    rules: [
 | 
			
		||||
      0,
 | 
			
		||||
      1,
 | 
			
		||||
      2,
 | 
			
		||||
      3,
 | 
			
		||||
      4,
 | 
			
		||||
      5,
 | 
			
		||||
      6,
 | 
			
		||||
      7,
 | 
			
		||||
      8,
 | 
			
		||||
      9,
 | 
			
		||||
      10,
 | 
			
		||||
      11,
 | 
			
		||||
      12,
 | 
			
		||||
      13,
 | 
			
		||||
      14
 | 
			
		||||
    ],
 | 
			
		||||
    inclusive: true
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
/*--------------------------------------------------------------------
 | 
			
		||||
 * lex.l
 | 
			
		||||
 *------------------------------------------------------------------*/;
 | 
			
		||||
return lexer;
 | 
			
		||||
})();
 | 
			
		||||
							
								
								
									
										2145
									
								
								samples/JavaScript/ccalc-parse.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2145
									
								
								samples/JavaScript/ccalc-parse.js
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										31
									
								
								samples/JavaScript/proto.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										31
									
								
								samples/JavaScript/proto.js
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,31 @@
 | 
			
		||||
/**
 | 
			
		||||
 * @fileoverview
 | 
			
		||||
 * @enhanceable
 | 
			
		||||
 * @public
 | 
			
		||||
 */
 | 
			
		||||
// GENERATED CODE -- DO NOT EDIT!
 | 
			
		||||
 | 
			
		||||
goog.provide('proto.google.protobuf.Timestamp');
 | 
			
		||||
 | 
			
		||||
goog.require('jspb.Message');
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Generated by JsPbCodeGenerator.
 | 
			
		||||
 * @param {Array=} opt_data Optional initial data array, typically from a
 | 
			
		||||
 * server response, or constructed directly in Javascript. The array is used
 | 
			
		||||
 * in place and becomes part of the constructed object. It is not cloned.
 | 
			
		||||
 * If no data is provided, the constructed object will be empty, but still
 | 
			
		||||
 * valid.
 | 
			
		||||
 * @extends {jspb.Message}
 | 
			
		||||
 * @constructor
 | 
			
		||||
 */
 | 
			
		||||
proto.google.protobuf.Timestamp = function(opt_data) {
 | 
			
		||||
  jspb.Message.initialize(this, opt_data, 0, -1, null, null);
 | 
			
		||||
};
 | 
			
		||||
goog.inherits(proto.google.protobuf.Timestamp, jspb.Message);
 | 
			
		||||
if (goog.DEBUG && !COMPILED) {
 | 
			
		||||
  proto.google.protobuf.Timestamp.displayName = 'proto.google.protobuf.Timestamp';
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
// Remainder elided
 | 
			
		||||
							
								
								
									
										39
									
								
								samples/Jison Lex/classy.jisonlex
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										39
									
								
								samples/Jison Lex/classy.jisonlex
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,39 @@
 | 
			
		||||
digit                       [0-9]
 | 
			
		||||
id                          [a-zA-Z][a-zA-Z0-9]*
 | 
			
		||||
 | 
			
		||||
%%
 | 
			
		||||
"//".*                      /* ignore comment */
 | 
			
		||||
"main"                      return 'MAIN';
 | 
			
		||||
"class"                     return 'CLASS';
 | 
			
		||||
"extends"                   return 'EXTENDS';
 | 
			
		||||
"nat"                       return 'NATTYPE';
 | 
			
		||||
"if"                        return 'IF';
 | 
			
		||||
"else"                      return 'ELSE';
 | 
			
		||||
"for"                       return 'FOR';
 | 
			
		||||
"printNat"                  return 'PRINTNAT';
 | 
			
		||||
"readNat"                   return 'READNAT';
 | 
			
		||||
"this"                      return 'THIS';
 | 
			
		||||
"new"                       return 'NEW';
 | 
			
		||||
"var"                       return 'VAR';
 | 
			
		||||
"null"                      return 'NUL';
 | 
			
		||||
{digit}+                    return 'NATLITERAL';
 | 
			
		||||
{id}                        return 'ID';
 | 
			
		||||
"=="                        return 'EQUALITY';
 | 
			
		||||
"="                         return 'ASSIGN';
 | 
			
		||||
"+"                         return 'PLUS';
 | 
			
		||||
"-"                         return 'MINUS';
 | 
			
		||||
"*"                         return 'TIMES';
 | 
			
		||||
">"                         return 'GREATER';
 | 
			
		||||
"||"                        return 'OR';
 | 
			
		||||
"!"                         return 'NOT';
 | 
			
		||||
"."                         return 'DOT';
 | 
			
		||||
"{"                         return 'LBRACE';
 | 
			
		||||
"}"                         return 'RBRACE';
 | 
			
		||||
"("                         return 'LPAREN';
 | 
			
		||||
")"                         return 'RPAREN';
 | 
			
		||||
";"                         return 'SEMICOLON';
 | 
			
		||||
\s+                         /* skip whitespace */
 | 
			
		||||
"."                         throw 'Illegal character';
 | 
			
		||||
<<EOF>>                     return 'ENDOFFILE';
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										29
									
								
								samples/Jison Lex/lex_grammar.jisonlex
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								samples/Jison Lex/lex_grammar.jisonlex
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,29 @@
 | 
			
		||||
 | 
			
		||||
%%
 | 
			
		||||
\n+                         {yy.freshLine = true;}
 | 
			
		||||
\s+                         {yy.freshLine = false;}
 | 
			
		||||
"y{"[^}]*"}"                {yytext = yytext.substr(2, yyleng - 3); return 'ACTION';}
 | 
			
		||||
[a-zA-Z_][a-zA-Z0-9_-]*     {return 'NAME';}
 | 
			
		||||
'"'([^"]|'\"')*'"'          {return 'STRING_LIT';}
 | 
			
		||||
"'"([^']|"\'")*"'"          {return 'STRING_LIT';}
 | 
			
		||||
"|"                         {return '|';}
 | 
			
		||||
"["("\]"|[^\]])*"]"         {return 'ANY_GROUP_REGEX';}
 | 
			
		||||
"("                         {return '(';}
 | 
			
		||||
")"                         {return ')';}
 | 
			
		||||
"+"                         {return '+';}
 | 
			
		||||
"*"                         {return '*';}
 | 
			
		||||
"?"                         {return '?';}
 | 
			
		||||
"^"                         {return '^';}
 | 
			
		||||
"/"                         {return '/';}
 | 
			
		||||
"\\"[a-zA-Z0]               {return 'ESCAPE_CHAR';}
 | 
			
		||||
"$"                         {return '$';}
 | 
			
		||||
"<<EOF>>"                   {return '$';}
 | 
			
		||||
"."                         {return '.';}
 | 
			
		||||
"%%"                        {return '%%';}
 | 
			
		||||
"{"\d+(","\s?\d+|",")?"}"   {return 'RANGE_REGEX';}
 | 
			
		||||
/"{"                        %{if (yy.freshLine) { this.input('{'); return '{'; } else { this.unput('y'); }%}
 | 
			
		||||
"}"                         %{return '}';%}
 | 
			
		||||
"%{"(.|\n)*?"}%"            {yytext = yytext.substr(2, yyleng - 4); return 'ACTION';}
 | 
			
		||||
.                           {/* ignore bad characters */}
 | 
			
		||||
<<EOF>>                     {return 'EOF';}
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										418
									
								
								samples/Jison/ansic.jison
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										418
									
								
								samples/Jison/ansic.jison
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,418 @@
 | 
			
		||||
%token IDENTIFIER CONSTANT STRING_LITERAL SIZEOF
 | 
			
		||||
%token PTR_OP INC_OP DEC_OP LEFT_OP RIGHT_OP LE_OP GE_OP EQ_OP NE_OP
 | 
			
		||||
%token AND_OP OR_OP MUL_ASSIGN DIV_ASSIGN MOD_ASSIGN ADD_ASSIGN
 | 
			
		||||
%token SUB_ASSIGN LEFT_ASSIGN RIGHT_ASSIGN AND_ASSIGN
 | 
			
		||||
%token XOR_ASSIGN OR_ASSIGN TYPE_NAME
 | 
			
		||||
 | 
			
		||||
%token TYPEDEF EXTERN STATIC AUTO REGISTER
 | 
			
		||||
%token CHAR SHORT INT LONG SIGNED UNSIGNED FLOAT DOUBLE CONST VOLATILE VOID
 | 
			
		||||
%token STRUCT UNION ENUM ELLIPSIS
 | 
			
		||||
 | 
			
		||||
%token CASE DEFAULT IF ELSE SWITCH WHILE DO FOR GOTO CONTINUE BREAK RETURN
 | 
			
		||||
 | 
			
		||||
%nonassoc IF_WITHOUT_ELSE
 | 
			
		||||
%nonassoc ELSE
 | 
			
		||||
 | 
			
		||||
%start translation_unit
 | 
			
		||||
%%
 | 
			
		||||
 | 
			
		||||
primary_expression
 | 
			
		||||
    : IDENTIFIER
 | 
			
		||||
    | CONSTANT
 | 
			
		||||
    | STRING_LITERAL
 | 
			
		||||
    | '(' expression ')'
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
postfix_expression
 | 
			
		||||
    : primary_expression
 | 
			
		||||
    | postfix_expression '[' expression ']'
 | 
			
		||||
    | postfix_expression '(' ')'
 | 
			
		||||
    | postfix_expression '(' argument_expression_list ')'
 | 
			
		||||
    | postfix_expression '.' IDENTIFIER
 | 
			
		||||
    | postfix_expression PTR_OP IDENTIFIER
 | 
			
		||||
    | postfix_expression INC_OP
 | 
			
		||||
    | postfix_expression DEC_OP
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
argument_expression_list
 | 
			
		||||
    : assignment_expression
 | 
			
		||||
    | argument_expression_list ',' assignment_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
unary_expression
 | 
			
		||||
    : postfix_expression
 | 
			
		||||
    | INC_OP unary_expression
 | 
			
		||||
    | DEC_OP unary_expression
 | 
			
		||||
    | unary_operator cast_expression
 | 
			
		||||
    | SIZEOF unary_expression
 | 
			
		||||
    | SIZEOF '(' type_name ')'
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
unary_operator
 | 
			
		||||
    : '&'
 | 
			
		||||
    | '*'
 | 
			
		||||
    | '+'
 | 
			
		||||
    | '-'
 | 
			
		||||
    | '~'
 | 
			
		||||
    | '!'
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
cast_expression
 | 
			
		||||
    : unary_expression
 | 
			
		||||
    | '(' type_name ')' cast_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
multiplicative_expression
 | 
			
		||||
    : cast_expression
 | 
			
		||||
    | multiplicative_expression '*' cast_expression
 | 
			
		||||
    | multiplicative_expression '/' cast_expression
 | 
			
		||||
    | multiplicative_expression '%' cast_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
additive_expression
 | 
			
		||||
    : multiplicative_expression
 | 
			
		||||
    | additive_expression '+' multiplicative_expression
 | 
			
		||||
    | additive_expression '-' multiplicative_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
shift_expression
 | 
			
		||||
    : additive_expression
 | 
			
		||||
    | shift_expression LEFT_OP additive_expression
 | 
			
		||||
    | shift_expression RIGHT_OP additive_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
relational_expression
 | 
			
		||||
    : shift_expression
 | 
			
		||||
    | relational_expression '<' shift_expression
 | 
			
		||||
    | relational_expression '>' shift_expression
 | 
			
		||||
    | relational_expression LE_OP shift_expression
 | 
			
		||||
    | relational_expression GE_OP shift_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
equality_expression
 | 
			
		||||
    : relational_expression
 | 
			
		||||
    | equality_expression EQ_OP relational_expression
 | 
			
		||||
    | equality_expression NE_OP relational_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
and_expression
 | 
			
		||||
    : equality_expression
 | 
			
		||||
    | and_expression '&' equality_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
exclusive_or_expression
 | 
			
		||||
    : and_expression
 | 
			
		||||
    | exclusive_or_expression '^' and_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
inclusive_or_expression
 | 
			
		||||
    : exclusive_or_expression
 | 
			
		||||
    | inclusive_or_expression '|' exclusive_or_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
logical_and_expression
 | 
			
		||||
    : inclusive_or_expression
 | 
			
		||||
    | logical_and_expression AND_OP inclusive_or_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
logical_or_expression
 | 
			
		||||
    : logical_and_expression
 | 
			
		||||
    | logical_or_expression OR_OP logical_and_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
conditional_expression
 | 
			
		||||
    : logical_or_expression
 | 
			
		||||
    | logical_or_expression '?' expression ':' conditional_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
assignment_expression
 | 
			
		||||
    : conditional_expression
 | 
			
		||||
    | unary_expression assignment_operator assignment_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
assignment_operator
 | 
			
		||||
    : '='
 | 
			
		||||
    | MUL_ASSIGN
 | 
			
		||||
    | DIV_ASSIGN
 | 
			
		||||
    | MOD_ASSIGN
 | 
			
		||||
    | ADD_ASSIGN
 | 
			
		||||
    | SUB_ASSIGN
 | 
			
		||||
    | LEFT_ASSIGN
 | 
			
		||||
    | RIGHT_ASSIGN
 | 
			
		||||
    | AND_ASSIGN
 | 
			
		||||
    | XOR_ASSIGN
 | 
			
		||||
    | OR_ASSIGN
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
expression
 | 
			
		||||
    : assignment_expression
 | 
			
		||||
    | expression ',' assignment_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
constant_expression
 | 
			
		||||
    : conditional_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
declaration
 | 
			
		||||
    : declaration_specifiers ';'
 | 
			
		||||
    | declaration_specifiers init_declarator_list ';'
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
declaration_specifiers
 | 
			
		||||
    : storage_class_specifier
 | 
			
		||||
    | storage_class_specifier declaration_specifiers
 | 
			
		||||
    | type_specifier
 | 
			
		||||
    | type_specifier declaration_specifiers
 | 
			
		||||
    | type_qualifier
 | 
			
		||||
    | type_qualifier declaration_specifiers
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
init_declarator_list
 | 
			
		||||
    : init_declarator
 | 
			
		||||
    | init_declarator_list ',' init_declarator
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
init_declarator
 | 
			
		||||
    : declarator
 | 
			
		||||
    | declarator '=' initializer
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
storage_class_specifier
 | 
			
		||||
    : TYPEDEF
 | 
			
		||||
    | EXTERN
 | 
			
		||||
    | STATIC
 | 
			
		||||
    | AUTO
 | 
			
		||||
    | REGISTER
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
type_specifier
 | 
			
		||||
    : VOID
 | 
			
		||||
    | CHAR
 | 
			
		||||
    | SHORT
 | 
			
		||||
    | INT
 | 
			
		||||
    | LONG
 | 
			
		||||
    | FLOAT
 | 
			
		||||
    | DOUBLE
 | 
			
		||||
    | SIGNED
 | 
			
		||||
    | UNSIGNED
 | 
			
		||||
    | struct_or_union_specifier
 | 
			
		||||
    | enum_specifier
 | 
			
		||||
    | TYPE_NAME
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
struct_or_union_specifier
 | 
			
		||||
    : struct_or_union IDENTIFIER '{' struct_declaration_list '}'
 | 
			
		||||
    | struct_or_union '{' struct_declaration_list '}'
 | 
			
		||||
    | struct_or_union IDENTIFIER
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
struct_or_union
 | 
			
		||||
    : STRUCT
 | 
			
		||||
    | UNION
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
struct_declaration_list
 | 
			
		||||
    : struct_declaration
 | 
			
		||||
    | struct_declaration_list struct_declaration
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
struct_declaration
 | 
			
		||||
    : specifier_qualifier_list struct_declarator_list ';'
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
specifier_qualifier_list
 | 
			
		||||
    : type_specifier specifier_qualifier_list
 | 
			
		||||
    | type_specifier
 | 
			
		||||
    | type_qualifier specifier_qualifier_list
 | 
			
		||||
    | type_qualifier
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
struct_declarator_list
 | 
			
		||||
    : struct_declarator
 | 
			
		||||
    | struct_declarator_list ',' struct_declarator
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
struct_declarator
 | 
			
		||||
    : declarator
 | 
			
		||||
    | ':' constant_expression
 | 
			
		||||
    | declarator ':' constant_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
enum_specifier
 | 
			
		||||
    : ENUM '{' enumerator_list '}'
 | 
			
		||||
    | ENUM IDENTIFIER '{' enumerator_list '}'
 | 
			
		||||
    | ENUM IDENTIFIER
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
enumerator_list
 | 
			
		||||
    : enumerator
 | 
			
		||||
    | enumerator_list ',' enumerator
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
enumerator
 | 
			
		||||
    : IDENTIFIER
 | 
			
		||||
    | IDENTIFIER '=' constant_expression
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
type_qualifier
 | 
			
		||||
    : CONST
 | 
			
		||||
    | VOLATILE
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
declarator
 | 
			
		||||
    : pointer direct_declarator
 | 
			
		||||
    | direct_declarator
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
direct_declarator
 | 
			
		||||
    : IDENTIFIER
 | 
			
		||||
    | '(' declarator ')'
 | 
			
		||||
    | direct_declarator '[' constant_expression ']'
 | 
			
		||||
    | direct_declarator '[' ']'
 | 
			
		||||
    | direct_declarator '(' parameter_type_list ')'
 | 
			
		||||
    | direct_declarator '(' identifier_list ')'
 | 
			
		||||
    | direct_declarator '(' ')'
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
pointer
 | 
			
		||||
    : '*'
 | 
			
		||||
    | '*' type_qualifier_list
 | 
			
		||||
    | '*' pointer
 | 
			
		||||
    | '*' type_qualifier_list pointer
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
type_qualifier_list
 | 
			
		||||
    : type_qualifier
 | 
			
		||||
    | type_qualifier_list type_qualifier
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
parameter_type_list
 | 
			
		||||
    : parameter_list
 | 
			
		||||
    | parameter_list ',' ELLIPSIS
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
parameter_list
 | 
			
		||||
    : parameter_declaration
 | 
			
		||||
    | parameter_list ',' parameter_declaration
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
parameter_declaration
 | 
			
		||||
    : declaration_specifiers declarator
 | 
			
		||||
    | declaration_specifiers abstract_declarator
 | 
			
		||||
    | declaration_specifiers
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
identifier_list
 | 
			
		||||
    : IDENTIFIER
 | 
			
		||||
    | identifier_list ',' IDENTIFIER
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
type_name
 | 
			
		||||
    : specifier_qualifier_list
 | 
			
		||||
    | specifier_qualifier_list abstract_declarator
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
abstract_declarator
 | 
			
		||||
    : pointer
 | 
			
		||||
    | direct_abstract_declarator
 | 
			
		||||
    | pointer direct_abstract_declarator
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
direct_abstract_declarator
 | 
			
		||||
    : '(' abstract_declarator ')'
 | 
			
		||||
    | '[' ']'
 | 
			
		||||
    | '[' constant_expression ']'
 | 
			
		||||
    | direct_abstract_declarator '[' ']'
 | 
			
		||||
    | direct_abstract_declarator '[' constant_expression ']'
 | 
			
		||||
    | '(' ')'
 | 
			
		||||
    | '(' parameter_type_list ')'
 | 
			
		||||
    | direct_abstract_declarator '(' ')'
 | 
			
		||||
    | direct_abstract_declarator '(' parameter_type_list ')'
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
initializer
 | 
			
		||||
    : assignment_expression
 | 
			
		||||
    | '{' initializer_list '}'
 | 
			
		||||
    | '{' initializer_list ',' '}'
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
initializer_list
 | 
			
		||||
    : initializer
 | 
			
		||||
    | initializer_list ',' initializer
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
statement
 | 
			
		||||
    : labeled_statement
 | 
			
		||||
    | compound_statement
 | 
			
		||||
    | expression_statement
 | 
			
		||||
    | selection_statement
 | 
			
		||||
    | iteration_statement
 | 
			
		||||
    | jump_statement
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
labeled_statement
 | 
			
		||||
    : IDENTIFIER ':' statement
 | 
			
		||||
    | CASE constant_expression ':' statement
 | 
			
		||||
    | DEFAULT ':' statement
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
compound_statement
 | 
			
		||||
    : '{' '}'
 | 
			
		||||
    | '{' statement_list '}'
 | 
			
		||||
    | '{' declaration_list '}'
 | 
			
		||||
    | '{' declaration_list statement_list '}'
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
declaration_list
 | 
			
		||||
    : declaration
 | 
			
		||||
    | declaration_list declaration
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
statement_list
 | 
			
		||||
    : statement
 | 
			
		||||
    | statement_list statement
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
expression_statement
 | 
			
		||||
    : ';'
 | 
			
		||||
    | expression ';'
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
selection_statement
 | 
			
		||||
    : IF '(' expression ')' statement %prec IF_WITHOUT_ELSE
 | 
			
		||||
    | IF '(' expression ')' statement ELSE statement
 | 
			
		||||
    | SWITCH '(' expression ')' statement
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
iteration_statement
 | 
			
		||||
    : WHILE '(' expression ')' statement
 | 
			
		||||
    | DO statement WHILE '(' expression ')' ';'
 | 
			
		||||
    | FOR '(' expression_statement expression_statement ')' statement
 | 
			
		||||
    | FOR '(' expression_statement expression_statement expression ')' statement
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
jump_statement
 | 
			
		||||
    : GOTO IDENTIFIER ';'
 | 
			
		||||
    | CONTINUE ';'
 | 
			
		||||
    | BREAK ';'
 | 
			
		||||
    | RETURN ';'
 | 
			
		||||
    | RETURN expression ';'
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
translation_unit
 | 
			
		||||
    : external_declaration
 | 
			
		||||
    | translation_unit external_declaration
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
external_declaration
 | 
			
		||||
    : function_definition
 | 
			
		||||
    | declaration
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
function_definition
 | 
			
		||||
    : declaration_specifiers declarator declaration_list compound_statement
 | 
			
		||||
    | declaration_specifiers declarator compound_statement
 | 
			
		||||
    | declarator declaration_list compound_statement
 | 
			
		||||
    | declarator compound_statement
 | 
			
		||||
    ;
 | 
			
		||||
							
								
								
									
										84
									
								
								samples/Jison/classy.jison
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										84
									
								
								samples/Jison/classy.jison
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,84 @@
 | 
			
		||||
 | 
			
		||||
/* description: ClassyLang grammar. Very classy. */
 | 
			
		||||
/*
 | 
			
		||||
  To build parser:
 | 
			
		||||
 | 
			
		||||
    $ ./bin/jison examples/classy.jison examples/classy.jisonlex
 | 
			
		||||
 | 
			
		||||
*/
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
/* author: Zach Carter */
 | 
			
		||||
 | 
			
		||||
%right ASSIGN
 | 
			
		||||
%left OR
 | 
			
		||||
%nonassoc EQUALITY GREATER
 | 
			
		||||
%left PLUS MINUS
 | 
			
		||||
%left TIMES
 | 
			
		||||
%right NOT
 | 
			
		||||
%left DOT
 | 
			
		||||
 | 
			
		||||
%%
 | 
			
		||||
 | 
			
		||||
pgm
 | 
			
		||||
    : cdl MAIN LBRACE vdl el RBRACE ENDOFFILE
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
cdl
 | 
			
		||||
    : c cdl
 | 
			
		||||
    |
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
c
 | 
			
		||||
    : CLASS id EXTENDS id LBRACE vdl mdl RBRACE
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
vdl
 | 
			
		||||
    : VAR t id SEMICOLON vdl
 | 
			
		||||
    |
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
mdl
 | 
			
		||||
    : t id LPAREN t id RPAREN LBRACE vdl el RBRACE mdl
 | 
			
		||||
    |
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
t
 | 
			
		||||
    : NATTYPE
 | 
			
		||||
    | id
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
id
 | 
			
		||||
    : ID
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
el
 | 
			
		||||
    : e SEMICOLON el
 | 
			
		||||
    | e SEMICOLON
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
e
 | 
			
		||||
    : NATLITERAL
 | 
			
		||||
    | NUL
 | 
			
		||||
    | id
 | 
			
		||||
    | NEW id
 | 
			
		||||
    | THIS
 | 
			
		||||
    | IF LPAREN e RPAREN LBRACE el RBRACE ELSE LBRACE el RBRACE
 | 
			
		||||
    | FOR LPAREN e SEMICOLON e SEMICOLON e RPAREN LBRACE el RBRACE
 | 
			
		||||
    | READNAT LPAREN RPAREN
 | 
			
		||||
    | PRINTNAT LPAREN e RPAREN
 | 
			
		||||
    | e PLUS e
 | 
			
		||||
    | e MINUS e
 | 
			
		||||
    | e TIMES e
 | 
			
		||||
    | e EQUALITY e
 | 
			
		||||
    | e GREATER e
 | 
			
		||||
    | NOT e
 | 
			
		||||
    | e OR e
 | 
			
		||||
    | e DOT id
 | 
			
		||||
    | id ASSIGN e
 | 
			
		||||
    | e DOT id ASSIGN e
 | 
			
		||||
    | id LPAREN e RPAREN
 | 
			
		||||
    | e DOT id LPAREN e RPAREN
 | 
			
		||||
    | LPAREN e RPAREN
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										145
									
								
								samples/Jison/lex.jison
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										145
									
								
								samples/Jison/lex.jison
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,145 @@
 | 
			
		||||
 | 
			
		||||
// `%nonassoc` tells the parser compiler (JISON) that these tokens cannot occur more than once,
 | 
			
		||||
// i.e. input like '//a' (tokens '/', '/' and 'a') is not a legal input while '/a' (tokens '/' and 'a')
 | 
			
		||||
// *is* legal input for this grammar.
 | 
			
		||||
 
 | 
			
		||||
%nonassoc '/' '/!'
 | 
			
		||||
 | 
			
		||||
// Likewise for `%left`: this informs the LALR(1) grammar compiler (JISON) that these tokens
 | 
			
		||||
// *can* occur repeatedly, e.g. 'a?*' and even 'a**' are considered legal inputs given this
 | 
			
		||||
// grammar!
 | 
			
		||||
//
 | 
			
		||||
// Token `RANGE_REGEX` may seem the odd one out here but really isn't: given the `regex_base`
 | 
			
		||||
// choice/rule `regex_base range_regex`, which is recursive, this grammar tells JISON that 
 | 
			
		||||
// any input matching a sequence like `regex_base range_regex range_regex` *is* legal.
 | 
			
		||||
// If you do not want that to be legal, you MUST adjust the grammar rule set you match your
 | 
			
		||||
// actual intent.
 | 
			
		||||
 | 
			
		||||
%left '*' '+' '?' RANGE_REGEX
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
%% 
 | 
			
		||||
 | 
			
		||||
lex 
 | 
			
		||||
    : definitions include '%%' rules '%%' EOF
 | 
			
		||||
        {{ $$ = {macros: $1, rules: $4};
 | 
			
		||||
          if ($2) $$.actionInclude = $2;
 | 
			
		||||
          return $$; }}
 | 
			
		||||
    | definitions include '%%' rules EOF
 | 
			
		||||
        {{ $$ = {macros: $1, rules: $4};
 | 
			
		||||
          if ($2) $$.actionInclude = $2;
 | 
			
		||||
          return $$; }}
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
include
 | 
			
		||||
    : action
 | 
			
		||||
    |
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
definitions
 | 
			
		||||
    : definitions definition
 | 
			
		||||
        { $$ = $1; $$.concat($2); }
 | 
			
		||||
    | definition
 | 
			
		||||
        { $$ = [$1]; }
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
definition
 | 
			
		||||
    : name regex
 | 
			
		||||
        { $$ = [$1, $2]; }
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
name
 | 
			
		||||
    : NAME
 | 
			
		||||
        { $$ = yytext; }
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
rules
 | 
			
		||||
    : rules rule
 | 
			
		||||
        { $$ = $1; $$.push($2); }
 | 
			
		||||
    | rule
 | 
			
		||||
        { $$ = [$1]; }
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
rule
 | 
			
		||||
    : regex action
 | 
			
		||||
        { $$ = [$1, $2]; }
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
action
 | 
			
		||||
    : ACTION 
 | 
			
		||||
        { $$ = yytext; }
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
regex
 | 
			
		||||
    : start_caret regex_list end_dollar
 | 
			
		||||
        { $$ = $1+$2+$3; }
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
start_caret
 | 
			
		||||
    : '^'
 | 
			
		||||
        { $$ = '^'; }
 | 
			
		||||
    |
 | 
			
		||||
        { $$ = ''; }
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
end_dollar
 | 
			
		||||
    : '$'
 | 
			
		||||
        { $$ = '$'; }
 | 
			
		||||
    |
 | 
			
		||||
        { $$ = ''; }
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
regex_list
 | 
			
		||||
    : regex_list '|' regex_chain
 | 
			
		||||
        { $$ = $1+'|'+$3; }
 | 
			
		||||
    | regex_chain
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
regex_chain
 | 
			
		||||
    : regex_chain regex_base
 | 
			
		||||
        { $$ = $1+$2;}
 | 
			
		||||
    | regex_base
 | 
			
		||||
        { $$ = $1;}
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
regex_base
 | 
			
		||||
    : '(' regex_list ')'
 | 
			
		||||
        { $$ = '('+$2+')'; }
 | 
			
		||||
    | regex_base '+'
 | 
			
		||||
        { $$ = $1+'+'; }
 | 
			
		||||
    | regex_base '*'
 | 
			
		||||
        { $$ = $1+'*'; }
 | 
			
		||||
    | regex_base '?'
 | 
			
		||||
        { $$ = $1+'?'; }
 | 
			
		||||
    | '/' regex_base
 | 
			
		||||
        { $$ = '(?=' + $regex_base + ')'; }
 | 
			
		||||
    | '/!' regex_base
 | 
			
		||||
        { $$ = '(?!' + $regex_base + ')'; }
 | 
			
		||||
    | name_expansion
 | 
			
		||||
    | regex_base range_regex
 | 
			
		||||
        { $$ = $1+$2; }
 | 
			
		||||
    | any_group_regex
 | 
			
		||||
    | '.'
 | 
			
		||||
        { $$ = '.'; }
 | 
			
		||||
    | string
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
name_expansion
 | 
			
		||||
    : '{' name '}'
 | 
			
		||||
        {{ $$ = '{'+$2+'}'; }}
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
any_group_regex
 | 
			
		||||
    : ANY_GROUP_REGEX
 | 
			
		||||
        { $$ = yytext; }
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
range_regex
 | 
			
		||||
    : RANGE_REGEX
 | 
			
		||||
        { $$ = yytext; }
 | 
			
		||||
    ;
 | 
			
		||||
 | 
			
		||||
string
 | 
			
		||||
    : STRING_LIT
 | 
			
		||||
        { $$ = yy.prepareString(yytext.substr(1, yyleng-2)); }
 | 
			
		||||
    ;
 | 
			
		||||
							
								
								
									
										60
									
								
								samples/Julia/julia
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										60
									
								
								samples/Julia/julia
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,60 @@
 | 
			
		||||
#!/usr/bin/env julia
 | 
			
		||||
 | 
			
		||||
# From https://github.com/JoshCheek/language-sampler-for-fullpath/blob/b766dcdbd249ec63516f491390a75315e78cba95/julia/fullpath
 | 
			
		||||
help_screen = """
 | 
			
		||||
usage: fullpath *[relative-paths] [-c]
 | 
			
		||||
 | 
			
		||||
  Prints the fullpath of the paths
 | 
			
		||||
  If no paths are given as args, it will read them from stdin
 | 
			
		||||
 | 
			
		||||
  If there is only one path, the trailing newline is omitted
 | 
			
		||||
 | 
			
		||||
  The -c flag will copy the results into your pasteboard
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
help  = false
 | 
			
		||||
copy  = false
 | 
			
		||||
dir   = pwd()
 | 
			
		||||
paths = []
 | 
			
		||||
 | 
			
		||||
for arg = ARGS
 | 
			
		||||
  if arg == "-h" || arg == "--help"
 | 
			
		||||
    help = true
 | 
			
		||||
  elseif arg == "-c" || arg == "--copy"
 | 
			
		||||
    copy = true
 | 
			
		||||
  elseif arg != ""
 | 
			
		||||
    push!(paths, arg)
 | 
			
		||||
  end
 | 
			
		||||
end
 | 
			
		||||
 | 
			
		||||
if help
 | 
			
		||||
  print(help_screen)
 | 
			
		||||
  exit()
 | 
			
		||||
end
 | 
			
		||||
 | 
			
		||||
function notempty(string)
 | 
			
		||||
  return !isempty(string)
 | 
			
		||||
end
 | 
			
		||||
 | 
			
		||||
if length(paths) == 0
 | 
			
		||||
  paths = filter(notempty, map(chomp, readlines()))
 | 
			
		||||
end
 | 
			
		||||
 | 
			
		||||
function print_paths(stream, paths)
 | 
			
		||||
  if length(paths) == 1
 | 
			
		||||
    path = paths[1]
 | 
			
		||||
    print(stream, "$dir/$path")
 | 
			
		||||
  else
 | 
			
		||||
    for path = paths
 | 
			
		||||
      println(stream, "$dir/$path")
 | 
			
		||||
    end
 | 
			
		||||
  end
 | 
			
		||||
end
 | 
			
		||||
 | 
			
		||||
if copy
 | 
			
		||||
  read, write, process = readandwrite(`pbcopy`)
 | 
			
		||||
  print_paths(write, paths)
 | 
			
		||||
  close(write)
 | 
			
		||||
end
 | 
			
		||||
 | 
			
		||||
print_paths(STDOUT, paths)
 | 
			
		||||
							
								
								
									
										49
									
								
								samples/LookML/example.model.lkml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										49
									
								
								samples/LookML/example.model.lkml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,49 @@
 | 
			
		||||
- label:          'desired label name'
 | 
			
		||||
- connection:     connection_name
 | 
			
		||||
- include:        filename_or_pattern
 | 
			
		||||
  # Possibly more include declarations
 | 
			
		||||
- persist_for:    N (seconds | minutes | hours)
 | 
			
		||||
- case_sensitive: true | false
 | 
			
		||||
- week_start_day: monday | tuesday | wednesday | thursday | friday | saturday | sunday
 | 
			
		||||
- value_formats:
 | 
			
		||||
  - name: desired_format_name
 | 
			
		||||
    value_format: 'excel-style formatting string'
 | 
			
		||||
  # Possibly more value formats
 | 
			
		||||
 | 
			
		||||
- explore: view_name
 | 
			
		||||
  label:  'desired label name'
 | 
			
		||||
  description: 'description string'
 | 
			
		||||
  symmetric_aggregates: true | false
 | 
			
		||||
  hidden: true | false
 | 
			
		||||
  fields: [field_or_set, field_or_set, …]
 | 
			
		||||
 | 
			
		||||
  sql_always_where: SQL WHERE condition
 | 
			
		||||
  always_filter:
 | 
			
		||||
    field_name: 'looker filter expression'
 | 
			
		||||
  conditionally_filter:
 | 
			
		||||
    field_name: 'looker filter expression'
 | 
			
		||||
    unless: [field_or_set, field_or_set, …]
 | 
			
		||||
  access_filter_fields: [fully_scoped_field, fully_scoped_field, …]
 | 
			
		||||
 | 
			
		||||
  always_join: [view_name, view_name, …]
 | 
			
		||||
  joins:
 | 
			
		||||
    - join: view_name
 | 
			
		||||
      type: left_outer | full_outer | inner | cross
 | 
			
		||||
      relationship: one_to_one | many_to_one | one_to_many | many_to_many
 | 
			
		||||
      from: view_name
 | 
			
		||||
      sql_table_name: table_name
 | 
			
		||||
      view_label: 'desired label name'
 | 
			
		||||
      fields: [field_or_set, field_or_set, …]
 | 
			
		||||
      required_joins: [view_name, view_name, …]
 | 
			
		||||
      foreign_key: dimension_name
 | 
			
		||||
      sql_on: SQL ON clause
 | 
			
		||||
    # Possibly more join declarations
 | 
			
		||||
 | 
			
		||||
  persist_for: N (seconds | minutes | hours)
 | 
			
		||||
  from: view_name
 | 
			
		||||
  view: view_name
 | 
			
		||||
  case_sensitive: true | false
 | 
			
		||||
  sql_table_name: table_name
 | 
			
		||||
  cancel_grouping_fields: [fully_scoped_field, fully_scoped_field, …]
 | 
			
		||||
 | 
			
		||||
# Possibly more explore declarations
 | 
			
		||||
							
								
								
									
										90
									
								
								samples/LookML/example.view.lkml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										90
									
								
								samples/LookML/example.view.lkml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,90 @@
 | 
			
		||||
- view: view_name
 | 
			
		||||
  sql_table_name: table_name
 | 
			
		||||
  suggestions: true | false
 | 
			
		||||
 | 
			
		||||
  derived_table:
 | 
			
		||||
    sql: SQL query
 | 
			
		||||
    persist_for: N (seconds | minutes | hours)
 | 
			
		||||
    sql_trigger_value: SQL query
 | 
			
		||||
    distribution: column_name
 | 
			
		||||
    distribution_style: ALL | EVEN
 | 
			
		||||
    sortkeys: [column_name, column_name, …]
 | 
			
		||||
    indexes: [column_name, column_name, …]
 | 
			
		||||
 | 
			
		||||
  sets:
 | 
			
		||||
    set_name:
 | 
			
		||||
      - field_or_set
 | 
			
		||||
      - field_or_set
 | 
			
		||||
      - …
 | 
			
		||||
    # Possibly more set declarations
 | 
			
		||||
 | 
			
		||||
  fields:
 | 
			
		||||
  - (dimension | dimension_group | measure | filter): field_name
 | 
			
		||||
    label: 'desired label name'
 | 
			
		||||
    view_label: 'desired label name'
 | 
			
		||||
    group_label: 'desired label name'
 | 
			
		||||
    description: 'description string'
 | 
			
		||||
    hidden: true | false
 | 
			
		||||
    alias: [old_field_name, old_field_name, …]
 | 
			
		||||
    value_format: 'excel-style formatting string'
 | 
			
		||||
    value_format_name: format_name
 | 
			
		||||
    html: HTML expression using Liquid template elements
 | 
			
		||||
    sql: SQL expression to generate the field value
 | 
			
		||||
    required_fields: [field_name, field_name, …]
 | 
			
		||||
    drill_fields: [field_or_set, field_or_set, …]
 | 
			
		||||
    can_filter: true | false
 | 
			
		||||
    fanout_on: repeated_record_name
 | 
			
		||||
 | 
			
		||||
    # DIMENSION SPECIFIC PARAMETERS
 | 
			
		||||
 | 
			
		||||
    type: dimension_field_type
 | 
			
		||||
    primary_key: true | false
 | 
			
		||||
    sql_case:
 | 
			
		||||
      value: SQL condition
 | 
			
		||||
      value: SQL condition
 | 
			
		||||
      # Possibly more sql_case statements
 | 
			
		||||
    alpha_sort: true | false
 | 
			
		||||
    tiers: [N, N, …]
 | 
			
		||||
    style: classic | interval | integer | relational
 | 
			
		||||
    sql_latitude: SQL expression to generate a latitude
 | 
			
		||||
    sql_longitude: SQL expression to generate a longitude
 | 
			
		||||
    suggestable: true | false
 | 
			
		||||
    suggest_persist_for: N (seconds | minutes | hours)
 | 
			
		||||
    suggest_dimension: dimension_name
 | 
			
		||||
    suggest_explore: explore_name
 | 
			
		||||
    suggestions: ['suggestion string', 'suggestion string', …]
 | 
			
		||||
    bypass_suggest_restrictions: true | false
 | 
			
		||||
    full_suggestions: true | false
 | 
			
		||||
    skip_drill_filter: true | false
 | 
			
		||||
    case_sensitive: true | false
 | 
			
		||||
    order_by_field: dimension_name
 | 
			
		||||
    map_layer: name_of_map_layer
 | 
			
		||||
    links:
 | 
			
		||||
      - label: 'desired label name'
 | 
			
		||||
        url: desired_url
 | 
			
		||||
        icon_url: url_of_an_ico_file
 | 
			
		||||
      # Possibly more links
 | 
			
		||||
 | 
			
		||||
    # DIMENSION GROUP SPECIFIC PARAMETERS
 | 
			
		||||
 | 
			
		||||
    timeframes: [timeframe, timeframe, …]
 | 
			
		||||
    convert_tz: true | false
 | 
			
		||||
    datatype: epoch | timestamp | datetime | date | yyyymmdd
 | 
			
		||||
 | 
			
		||||
    # MEASURE SPECIFIC PARAMETERS
 | 
			
		||||
 | 
			
		||||
    type: measure_field_type
 | 
			
		||||
    direction: row | column
 | 
			
		||||
    approximate: true | false
 | 
			
		||||
    approximate_threshold: N
 | 
			
		||||
    sql_distinct_key: SQL expression to define repeated entities
 | 
			
		||||
    list_field: dimension_name
 | 
			
		||||
    filters:
 | 
			
		||||
      dimension_name: 'looker filter expression'
 | 
			
		||||
      # Possibly more filters statements
 | 
			
		||||
 | 
			
		||||
    # FILTER SPECIFIC PARAMETERS
 | 
			
		||||
 | 
			
		||||
    default_value: 'desired default value'
 | 
			
		||||
 | 
			
		||||
  # Possibly more dimension or measure declarations
 | 
			
		||||
							
								
								
									
										47
									
								
								samples/MQL4/header-sample.mqh
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										47
									
								
								samples/MQL4/header-sample.mqh
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,47 @@
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//|                                                header-sample.mqh |
 | 
			
		||||
//|                                   Copyright 2016, Andrey Osorgin |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//|                     The MIT License (MIT)                        |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| Permission is hereby granted, free of charge, to any person      |
 | 
			
		||||
//| obtaining a copy of this software and associated documentation   |
 | 
			
		||||
//| files (the "Software"), to deal in the Software without          |
 | 
			
		||||
//| restriction, including without limitation the rights to use,     |
 | 
			
		||||
//| copy, modify, merge, publish, distribute, sublicense, and/or sell|
 | 
			
		||||
//| copies of the Software, and to permit persons to whom the        |
 | 
			
		||||
//| Software is furnished to do so, subject to the following         |
 | 
			
		||||
//| conditions:                                                      |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| The above copyright notice and this permission notice shall be   |
 | 
			
		||||
//| included in all copies or substantial portions of the Software.  |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,  |
 | 
			
		||||
//| EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES  |
 | 
			
		||||
//| OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND         |
 | 
			
		||||
//| NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT      |
 | 
			
		||||
//| HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,     |
 | 
			
		||||
//| WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING     |
 | 
			
		||||
//| FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR    |
 | 
			
		||||
//| OTHER DEALINGS IN THE SOFTWARE.                                  |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| A copy of the MIT License (MIT) is available at                  |
 | 
			
		||||
//| https://opensource.org/licenses/MIT                              |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
#property strict
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
class CSomeObject
 | 
			
		||||
  {
 | 
			
		||||
protected:
 | 
			
		||||
   int               m_someproperty;
 | 
			
		||||
private:
 | 
			
		||||
   bool              SomeFunction() {return true;}
 | 
			
		||||
public:
 | 
			
		||||
                     CSomeObject(void): m_someproperty(0) {}
 | 
			
		||||
                    ~CSomeObject(void)                    {}
 | 
			
		||||
   void              SetName(int n){m_someproperty=n;}// sets somepropery
 | 
			
		||||
   int               GetName(){return(m_someproperty);} // returns someproperty
 | 
			
		||||
  };
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
							
								
								
									
										61
									
								
								samples/MQL4/indicator-sample.mq4
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										61
									
								
								samples/MQL4/indicator-sample.mq4
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,61 @@
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//|                                             indicator-sample.mq4 |
 | 
			
		||||
//|                                   Copyright 2016, Andrey Osorgin |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//|                     The MIT License (MIT)                        |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| Permission is hereby granted, free of charge, to any person      |
 | 
			
		||||
//| obtaining a copy of this software and associated documentation   |
 | 
			
		||||
//| files (the "Software"), to deal in the Software without          |
 | 
			
		||||
//| restriction, including without limitation the rights to use,     |
 | 
			
		||||
//| copy, modify, merge, publish, distribute, sublicense, and/or sell|
 | 
			
		||||
//| copies of the Software, and to permit persons to whom the        |
 | 
			
		||||
//| Software is furnished to do so, subject to the following         |
 | 
			
		||||
//| conditions:                                                      |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| The above copyright notice and this permission notice shall be   |
 | 
			
		||||
//| included in all copies or substantial portions of the Software.  |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,  |
 | 
			
		||||
//| EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES  |
 | 
			
		||||
//| OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND         |
 | 
			
		||||
//| NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT      |
 | 
			
		||||
//| HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,     |
 | 
			
		||||
//| WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING     |
 | 
			
		||||
//| FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR    |
 | 
			
		||||
//| OTHER DEALINGS IN THE SOFTWARE.                                  |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| A copy of the MIT License (MIT) is available at                  |
 | 
			
		||||
//| https://opensource.org/licenses/MIT                              |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
#property version   "1.00"
 | 
			
		||||
#property strict
 | 
			
		||||
 | 
			
		||||
#property indicator_chart_window
 | 
			
		||||
#property indicator_plots 0
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//| Custom indicator initialization function                         |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
void OnInit(void)
 | 
			
		||||
  {
 | 
			
		||||
 //---
 | 
			
		||||
   }
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//| Bears Power                                                      |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
int OnCalculate(const int rates_total,
 | 
			
		||||
                const int prev_calculated,
 | 
			
		||||
                const datetime &time[],
 | 
			
		||||
                const double &open[],
 | 
			
		||||
                const double &high[],
 | 
			
		||||
                const double &low[],
 | 
			
		||||
                const double &close[],
 | 
			
		||||
                const long &tick_volume[],
 | 
			
		||||
                const long &volume[],
 | 
			
		||||
                const int &spread[])
 | 
			
		||||
  {
 | 
			
		||||
  Print("The number of bars on the current chart: ",iBars(Symbol(),Period()));
 | 
			
		||||
//---
 | 
			
		||||
   return(rates_total);
 | 
			
		||||
  }
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
							
								
								
									
										51
									
								
								samples/MQL4/script-sample.mq4
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										51
									
								
								samples/MQL4/script-sample.mq4
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,51 @@
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//|                                                script-sample.mq4 |
 | 
			
		||||
//|                                   Copyright 2016, Andrey Osorgin |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//|                     The MIT License (MIT)                        |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| Permission is hereby granted, free of charge, to any person      |
 | 
			
		||||
//| obtaining a copy of this software and associated documentation   |
 | 
			
		||||
//| files (the "Software"), to deal in the Software without          |
 | 
			
		||||
//| restriction, including without limitation the rights to use,     |
 | 
			
		||||
//| copy, modify, merge, publish, distribute, sublicense, and/or sell|
 | 
			
		||||
//| copies of the Software, and to permit persons to whom the        |
 | 
			
		||||
//| Software is furnished to do so, subject to the following         |
 | 
			
		||||
//| conditions:                                                      |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| The above copyright notice and this permission notice shall be   |
 | 
			
		||||
//| included in all copies or substantial portions of the Software.  |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,  |
 | 
			
		||||
//| EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES  |
 | 
			
		||||
//| OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND         |
 | 
			
		||||
//| NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT      |
 | 
			
		||||
//| HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,     |
 | 
			
		||||
//| WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING     |
 | 
			
		||||
//| FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR    |
 | 
			
		||||
//| OTHER DEALINGS IN THE SOFTWARE.                                  |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| A copy of the MIT License (MIT) is available at                  |
 | 
			
		||||
//| https://opensource.org/licenses/MIT                              |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
#property version   "1.00"
 | 
			
		||||
#property strict
 | 
			
		||||
#property script_show_inputs
 | 
			
		||||
 | 
			
		||||
input int StopLoss=100; //Stop Loss
 | 
			
		||||
input int TakeProfit=100; //Take Profit
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//| Script program start function                                    |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
void OnStart()
 | 
			
		||||
  {
 | 
			
		||||
   double minstoplevel=MarketInfo(Symbol(),MODE_STOPLEVEL);
 | 
			
		||||
   Print("Minimum Stop Level=",minstoplevel," points");
 | 
			
		||||
//---
 | 
			
		||||
   double sl=NormalizeDouble(Bid-StopLoss*Point,Digits);
 | 
			
		||||
   double tp=NormalizeDouble(Ask+TakeProfit*Point,Digits);
 | 
			
		||||
//---
 | 
			
		||||
   int result=OrderSend(Symbol(),OP_BUY,0.01,Ask,1,sl,tp,"Test",0,0,clrNONE);
 | 
			
		||||
   Print("Success? ",result);
 | 
			
		||||
  }
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
							
								
								
									
										1390
									
								
								samples/MQL5/Regex.mqh
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1390
									
								
								samples/MQL5/Regex.mqh
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										64
									
								
								samples/MQL5/indicator-sample.mq5
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										64
									
								
								samples/MQL5/indicator-sample.mq5
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,64 @@
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//|                                             indicator-sample.mq5 |
 | 
			
		||||
//|                                   Copyright 2016, Andrey Osorgin |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//|                     The MIT License (MIT)                        |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| Permission is hereby granted, free of charge, to any person      |
 | 
			
		||||
//| obtaining a copy of this software and associated documentation   |
 | 
			
		||||
//| files (the "Software"), to deal in the Software without          |
 | 
			
		||||
//| restriction, including without limitation the rights to use,     |
 | 
			
		||||
//| copy, modify, merge, publish, distribute, sublicense, and/or sell|
 | 
			
		||||
//| copies of the Software, and to permit persons to whom the        |
 | 
			
		||||
//| Software is furnished to do so, subject to the following         |
 | 
			
		||||
//| conditions:                                                      |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| The above copyright notice and this permission notice shall be   |
 | 
			
		||||
//| included in all copies or substantial portions of the Software.  |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,  |
 | 
			
		||||
//| EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES  |
 | 
			
		||||
//| OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND         |
 | 
			
		||||
//| NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT      |
 | 
			
		||||
//| HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,     |
 | 
			
		||||
//| WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING     |
 | 
			
		||||
//| FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR    |
 | 
			
		||||
//| OTHER DEALINGS IN THE SOFTWARE.                                  |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| A copy of the MIT License (MIT) is available at                  |
 | 
			
		||||
//| https://opensource.org/licenses/MIT                              |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
#property version   "1.00"
 | 
			
		||||
 | 
			
		||||
#property indicator_chart_window
 | 
			
		||||
#property indicator_plots   0
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//| Custom indicator initialization function                         |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
int OnInit()
 | 
			
		||||
  {
 | 
			
		||||
//---
 | 
			
		||||
   return(INIT_SUCCEEDED);
 | 
			
		||||
  }
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//| Custom indicator iteration function                              |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
int OnCalculate(const int rates_total,
 | 
			
		||||
                const int prev_calculated,
 | 
			
		||||
                const datetime &time[],
 | 
			
		||||
                const double &open[],
 | 
			
		||||
                const double &high[],
 | 
			
		||||
                const double &low[],
 | 
			
		||||
                const double &close[],
 | 
			
		||||
                const long &tick_volume[],
 | 
			
		||||
                const long &volume[],
 | 
			
		||||
                const int &spread[])
 | 
			
		||||
  {
 | 
			
		||||
//---
 | 
			
		||||
   int bars=Bars(Symbol(),0);
 | 
			
		||||
   Print("Bars = ",bars,", rates_total = ",rates_total,",  prev_calculated = ",prev_calculated);
 | 
			
		||||
   Print("time[0] = ",time[0]," time[rates_total-1] = ",time[rates_total-1]);
 | 
			
		||||
//--- return value of prev_calculated for next call
 | 
			
		||||
   return(rates_total);
 | 
			
		||||
  }
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
							
								
								
									
										56
									
								
								samples/MQL5/script-sample.mq5
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										56
									
								
								samples/MQL5/script-sample.mq5
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,56 @@
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//|                                                script-sample.mq5 |
 | 
			
		||||
//|                                   Copyright 2016, Andrey Osorgin |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//|                     The MIT License (MIT)                        |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| Permission is hereby granted, free of charge, to any person      |
 | 
			
		||||
//| obtaining a copy of this software and associated documentation   |
 | 
			
		||||
//| files (the "Software"), to deal in the Software without          |
 | 
			
		||||
//| restriction, including without limitation the rights to use,     |
 | 
			
		||||
//| copy, modify, merge, publish, distribute, sublicense, and/or sell|
 | 
			
		||||
//| copies of the Software, and to permit persons to whom the        |
 | 
			
		||||
//| Software is furnished to do so, subject to the following         |
 | 
			
		||||
//| conditions:                                                      |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| The above copyright notice and this permission notice shall be   |
 | 
			
		||||
//| included in all copies or substantial portions of the Software.  |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,  |
 | 
			
		||||
//| EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES  |
 | 
			
		||||
//| OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND         |
 | 
			
		||||
//| NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT      |
 | 
			
		||||
//| HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,     |
 | 
			
		||||
//| WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING     |
 | 
			
		||||
//| FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR    |
 | 
			
		||||
//| OTHER DEALINGS IN THE SOFTWARE.                                  |
 | 
			
		||||
//|                                                                  |
 | 
			
		||||
//| A copy of the MIT License (MIT) is available at                  |
 | 
			
		||||
//| https://opensource.org/licenses/MIT                              |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
#property version   "1.00"
 | 
			
		||||
#property script_show_inputs
 | 
			
		||||
 | 
			
		||||
#include <Trade\Trade.mqh>
 | 
			
		||||
 | 
			
		||||
input int StopLoss=100; // Stop Loss
 | 
			
		||||
input int TakeProfit=100; // Take Profit
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
//| Script program start function                                    |
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
void OnStart()
 | 
			
		||||
  {
 | 
			
		||||
   CTrade trade;
 | 
			
		||||
//---
 | 
			
		||||
   long stoplevel=SymbolInfoInteger(Symbol(),SYMBOL_TRADE_STOPS_LEVEL);
 | 
			
		||||
   Print("Minimum stop level is: ",stoplevel);
 | 
			
		||||
   double ask=SymbolInfoDouble(Symbol(),SYMBOL_ASK);
 | 
			
		||||
   double bid=SymbolInfoDouble(Symbol(),SYMBOL_BID);
 | 
			
		||||
   double sl = NormalizeDouble(bid - StopLoss*Point(),Digits());
 | 
			
		||||
   double tp = NormalizeDouble(ask + TakeProfit*Point(),Digits());
 | 
			
		||||
//---
 | 
			
		||||
   bool result=trade.Buy(0.01,Symbol(),ask,sl,tp,"test");
 | 
			
		||||
//---
 | 
			
		||||
   Print("Success? ",result);
 | 
			
		||||
  }
 | 
			
		||||
//+------------------------------------------------------------------+
 | 
			
		||||
							
								
								
									
										134
									
								
								samples/Makefile/file-icons.make
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										134
									
								
								samples/Makefile/file-icons.make
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,134 @@
 | 
			
		||||
charmap     := charmap.md
 | 
			
		||||
font-name   := file-icons
 | 
			
		||||
font-folder := dist
 | 
			
		||||
font-config := icomoon.json
 | 
			
		||||
icon-size   := 34
 | 
			
		||||
icon-folder := svg
 | 
			
		||||
repo-name   := Alhadis/FileIcons
 | 
			
		||||
svg         := $(wildcard $(icon-folder)/*.svg)
 | 
			
		||||
last-commit  = $(shell git log -1 --oneline --no-abbrev | cut -d' ' -f1)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
all: unpack $(font-folder)/$(font-name).woff2 charmap
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Aliases
 | 
			
		||||
unpack:  $(font-folder)/$(font-name).ttf
 | 
			
		||||
charmap: $(charmap)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Extract a downloaded IcoMoon folder
 | 
			
		||||
$(font-folder)/%.ttf: %.zip
 | 
			
		||||
	@rm -rf $(font-folder) tmp $(font-config)
 | 
			
		||||
	@unzip -qd tmp $^
 | 
			
		||||
	@mv tmp/fonts $(font-folder)
 | 
			
		||||
	@mv tmp/selection.json $(font-config)
 | 
			
		||||
	@rm -rf tmp $^
 | 
			
		||||
	@perl -pi -e 's|^( {2})+|"\t" x (length($$&)/2)|ge' $(font-config)
 | 
			
		||||
	@echo "" >> $(font-config) # Ensure trailing newline
 | 
			
		||||
	@echo "Files extracted."
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Generate a WOFF2 file from a TTF
 | 
			
		||||
%.woff2: %.ttf
 | 
			
		||||
	@[ ! -f $@ ] && { \
 | 
			
		||||
		hash woff2_compress 2>/dev/null || { \
 | 
			
		||||
			echo >&2 "WOFF2 conversion tools not found. Consult the readme file."; \
 | 
			
		||||
			exit 2; \
 | 
			
		||||
		}; \
 | 
			
		||||
		woff2_compress $^ >/dev/null; \
 | 
			
		||||
		echo "WOFF2 file generated."; \
 | 
			
		||||
	};
 | 
			
		||||
	
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Clean up SVG source
 | 
			
		||||
lint: $(svg)
 | 
			
		||||
	@perl -0777 -pi -e '\
 | 
			
		||||
		s/\r\n/\n/g; \
 | 
			
		||||
		s/<g id="icomoon-ignore">\s*<\/g>//gmi; \
 | 
			
		||||
		s/<g\s*>\s*<\/g>//gmi; \
 | 
			
		||||
		s/\s+(id|viewBox|xml:space)="[^"]*"/ /gmi; \
 | 
			
		||||
		s/<!DOCTYPE[^>]*>//gi; \
 | 
			
		||||
		s/<\?xml.*?\?>//gi; \
 | 
			
		||||
		s/<!--.*?-->//gm; \
 | 
			
		||||
		s/ style="enable-background:.*?;"//gmi; \
 | 
			
		||||
		s/"\s+>/">/g; \
 | 
			
		||||
		s/\x20{2,}/ /g; \
 | 
			
		||||
		s/[\t\n]+//gm;' $^
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Generate/update character map
 | 
			
		||||
$(charmap):
 | 
			
		||||
	@./create-map.pl -r=$(repo-name) -i=$(icon-folder) --size=$(icon-size) $(font-folder)/$(font-name).svg $@
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# POSIX systems only: reattach hard links to File-Icons package
 | 
			
		||||
relink:
 | 
			
		||||
	@$(call need-var,ATOM_FILE_ICONS,ERROR_NO_PKG)
 | 
			
		||||
	@ln -f $(font-folder)/$(font-name).woff2 $(wildcard $(ATOM_FILE_ICONS)/fonts/file-icons-*.woff2)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Force an icon's preview to be refreshed on GitHub
 | 
			
		||||
cachebust:
 | 
			
		||||
	@$(call need-var,icon,ERROR_NO_ICON)
 | 
			
		||||
	@base="https://cdn.rawgit.com/Alhadis/FileIcons/"; \
 | 
			
		||||
	perl -pi -e 's{$$base\K\w+(?=/svg/$(icon:%.svg=%)\.svg")}{$(last-commit)}ig;' $(charmap)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Dummy task to improve feedback if `cachebust` is mistyped
 | 
			
		||||
icon:
 | 
			
		||||
	$(call need-var,,ERROR_UNDEF_ICON)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Reset unstaged changes/additions in object directories
 | 
			
		||||
clean:
 | 
			
		||||
	@git clean -fd $(font-folder)
 | 
			
		||||
	@git checkout -- $(font-folder) 2>/dev/null || true
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Delete extracted and generated files
 | 
			
		||||
distclean:
 | 
			
		||||
	@rm -rf $(font-folder)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
.PHONY: clean distclean $(charmap) cachebust icon
 | 
			
		||||
.ONESHELL:
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Error message shown to users attempting to run `make relink` without a link
 | 
			
		||||
ERROR_NO_PKG := Environment variable ATOM_FILE_ICONS not found. \
 | 
			
		||||
	| \
 | 
			
		||||
	| Try this instead:\
 | 
			
		||||
	| \
 | 
			
		||||
	| \	make relink ATOM_FILE_ICONS=/path/to/your/file-icons/installation | 
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Error message shown when running `make cachebust` without an icon
 | 
			
		||||
ERROR_NO_ICON := No icon specified. Task aborted.| \
 | 
			
		||||
	| Usage: \
 | 
			
		||||
	| \	make icon=file[.svg] cachebust \
 | 
			
		||||
	| \
 | 
			
		||||
	| Examples: \
 | 
			
		||||
	| \	make icon=Manpage cachebust \
 | 
			
		||||
	| \	make icon=APL.svg cachebust | 
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Shown if user tries running `make icon NAME cachebust` by mistake
 | 
			
		||||
ERROR_UNDEF_ICON := No task named \"icon\". \
 | 
			
		||||
	| \
 | 
			
		||||
	| Did you mean this? \
 | 
			
		||||
	| \	make icon=NAME cachebust | 
 | 
			
		||||
	
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# If the given value is empty, die with an error message
 | 
			
		||||
need = @$(if $(1),,echo $(subst | ,$$'\n',$(2)); exit 2)
 | 
			
		||||
 | 
			
		||||
# Like `need`, but uses variable names instead of string values
 | 
			
		||||
need-var = @$(call need,$($(1)),$($(2)))
 | 
			
		||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user