mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 09:40:21 +00:00
Merge branch 'master' into revert-2014-revert-1976-path-for-fileblob
Conflicts: lib/linguist/version.rb
This commit is contained in:
92
.gitmodules
vendored
92
.gitmodules
vendored
@@ -121,9 +121,9 @@
|
||||
[submodule "vendor/grammars/Handlebars"]
|
||||
path = vendor/grammars/Handlebars
|
||||
url = https://github.com/daaain/Handlebars
|
||||
[submodule "vendor/grammars/powershell.tmbundle"]
|
||||
path = vendor/grammars/powershell.tmbundle
|
||||
url = https://github.com/davidpeckham/powershell.tmbundle
|
||||
[submodule "vendor/grammars/powershell"]
|
||||
path = vendor/grammars/powershell
|
||||
url = https://github.com/SublimeText/PowerShell
|
||||
[submodule "vendor/grammars/jade-tmbundle"]
|
||||
path = vendor/grammars/jade-tmbundle
|
||||
url = https://github.com/davidrios/jade-tmbundle
|
||||
@@ -249,7 +249,7 @@
|
||||
url = https://github.com/shellderp/sublime-robot-plugin
|
||||
[submodule "vendor/grammars/actionscript3-tmbundle"]
|
||||
path = vendor/grammars/actionscript3-tmbundle
|
||||
url = https://github.com/simongregory/actionscript3-tmbundle
|
||||
url = https://github.com/honzabrecka/actionscript3-tmbundle
|
||||
[submodule "vendor/grammars/Sublime-QML"]
|
||||
path = vendor/grammars/Sublime-QML
|
||||
url = https://github.com/skozlovf/Sublime-QML
|
||||
@@ -337,6 +337,9 @@
|
||||
[submodule "vendor/grammars/ini.tmbundle"]
|
||||
path = vendor/grammars/ini.tmbundle
|
||||
url = https://github.com/textmate/ini.tmbundle
|
||||
[submodule "vendor/grammars/desktop.tmbundle"]
|
||||
path = vendor/grammars/desktop.tmbundle
|
||||
url = https://github.com/Mailaender/desktop.tmbundle.git
|
||||
[submodule "vendor/grammars/io.tmbundle"]
|
||||
path = vendor/grammars/io.tmbundle
|
||||
url = https://github.com/textmate/io.tmbundle
|
||||
@@ -525,9 +528,6 @@
|
||||
[submodule "vendor/grammars/sublime-bsv"]
|
||||
path = vendor/grammars/sublime-bsv
|
||||
url = https://github.com/thotypous/sublime-bsv
|
||||
[submodule "vendor/grammars/AutoHotkey"]
|
||||
path = vendor/grammars/AutoHotkey
|
||||
url = https://github.com/robertcollier4/AutoHotkey
|
||||
[submodule "vendor/grammars/Sublime-HTTP"]
|
||||
path = vendor/grammars/Sublime-HTTP
|
||||
url = https://github.com/httpspec/sublime-highlighting
|
||||
@@ -549,3 +549,81 @@
|
||||
[submodule "vendor/grammars/turtle.tmbundle"]
|
||||
path = vendor/grammars/turtle.tmbundle
|
||||
url = https://github.com/peta/turtle.tmbundle
|
||||
[submodule "vendor/grammars/liquid.tmbundle"]
|
||||
path = vendor/grammars/liquid.tmbundle
|
||||
url = https://github.com/bastilian/validcode-textmate-bundles
|
||||
[submodule "vendor/grammars/ats.sublime"]
|
||||
path = vendor/grammars/ats.sublime
|
||||
url = https://github.com/steinwaywhw/ats-mode-sublimetext
|
||||
[submodule "vendor/grammars/Modelica"]
|
||||
path = vendor/grammars/Modelica
|
||||
url = https://github.com/BorisChumichev/modelicaSublimeTextPackage
|
||||
[submodule "vendor/grammars/sublime-apl"]
|
||||
path = vendor/grammars/sublime-apl
|
||||
url = https://github.com/StoneCypher/sublime-apl
|
||||
[submodule "vendor/grammars/CLIPS-sublime"]
|
||||
path = vendor/grammars/CLIPS-sublime
|
||||
url = https://github.com/psicomante/CLIPS-sublime
|
||||
[submodule "vendor/grammars/Creole"]
|
||||
path = vendor/grammars/Creole
|
||||
url = https://github.com/Siddley/Creole
|
||||
[submodule "vendor/grammars/GDScript-sublime"]
|
||||
path = vendor/grammars/GDScript-sublime
|
||||
url = https://github.com/beefsack/GDScript-sublime
|
||||
[submodule "vendor/grammars/sublime-golo"]
|
||||
path = vendor/grammars/sublime-golo
|
||||
url = https://github.com/TypeUnsafe/sublime-golo
|
||||
[submodule "vendor/grammars/JSyntax"]
|
||||
path = vendor/grammars/JSyntax
|
||||
url = https://github.com/bcj/JSyntax
|
||||
[submodule "vendor/grammars/TXL"]
|
||||
path = vendor/grammars/TXL
|
||||
url = https://github.com/MikeHoffert/Sublime-Text-TXL-syntax
|
||||
[submodule "vendor/grammars/G-Code"]
|
||||
path = vendor/grammars/G-Code
|
||||
url = https://github.com/robotmaster/sublime-text-syntax-highlighting
|
||||
[submodule "vendor/grammars/grace-tmbundle"]
|
||||
path = vendor/grammars/grace-tmbundle
|
||||
url = https://github.com/zmthy/grace-tmbundle
|
||||
[submodule "vendor/grammars/sublime-text-ox"]
|
||||
path = vendor/grammars/sublime-text-ox
|
||||
url = https://github.com/andreashetland/sublime-text-ox
|
||||
[submodule "vendor/grammars/AutoHotkey"]
|
||||
path = vendor/grammars/AutoHotkey
|
||||
url = https://github.com/ahkscript/SublimeAutoHotkey
|
||||
[submodule "vendor/grammars/ec.tmbundle"]
|
||||
path = vendor/grammars/ec.tmbundle
|
||||
url = https://github.com/ecere/ec.tmbundle
|
||||
[submodule "vendor/grammars/InnoSetup"]
|
||||
path = vendor/grammars/InnoSetup
|
||||
url = https://github.com/idleberg/InnoSetup-Sublime-Text
|
||||
[submodule "vendor/grammars/gap-tmbundle"]
|
||||
path = vendor/grammars/gap-tmbundle
|
||||
url = https://github.com/dhowden/gap-tmbundle
|
||||
[submodule "vendor/grammars/SublimePapyrus"]
|
||||
path = vendor/grammars/SublimePapyrus
|
||||
url = https://github.com/Kapiainen/SublimePapyrus
|
||||
[submodule "vendor/grammars/sublime-spintools"]
|
||||
path = vendor/grammars/sublime-spintools
|
||||
url = https://github.com/bitbased/sublime-spintools
|
||||
[submodule "vendor/grammars/PogoScript.tmbundle"]
|
||||
path = vendor/grammars/PogoScript.tmbundle
|
||||
url = https://github.com/featurist/PogoScript.tmbundle
|
||||
[submodule "vendor/grammars/sublime-opal"]
|
||||
path = vendor/grammars/sublime-opal
|
||||
url = https://github.com/artifactz/sublime-opal
|
||||
[submodule "vendor/grammars/mediawiki.tmbundle"]
|
||||
path = vendor/grammars/mediawiki.tmbundle
|
||||
url = https://github.com/textmate/mediawiki.tmbundle
|
||||
[submodule "vendor/grammars/BrightScript.tmbundle"]
|
||||
path = vendor/grammars/BrightScript.tmbundle
|
||||
url = https://github.com/cmink/BrightScript.tmbundle
|
||||
[submodule "vendor/grammars/Stylus"]
|
||||
path = vendor/grammars/Stylus
|
||||
url = https://github.com/billymoon/Stylus
|
||||
[submodule "vendor/grammars/asciidoc.tmbundle"]
|
||||
path = vendor/grammars/asciidoc.tmbundle
|
||||
url = https://github.com/zuckschwerdt/asciidoc.tmbundle
|
||||
[submodule "vendor/grammars/sublime-text-pig-latin"]
|
||||
path = vendor/grammars/sublime-text-pig-latin
|
||||
url = https://github.com/goblindegook/sublime-text-pig-latin
|
||||
|
||||
@@ -1,41 +1,81 @@
|
||||
## Contributing
|
||||
# Contributing
|
||||
|
||||
The majority of contributions won't need to touch any Ruby code at all. The [master language list][languages] is just a YAML configuration file.
|
||||
Hi there! We're thrilled that you'd like to contribute to this project. Your help is essential for keeping it great. The majority of contributions won't need to touch any Ruby code at all.
|
||||
|
||||
Almost all bug fixes or new language additions should come with some additional code samples. Just drop them under [`samples/`][samples] in the correct subdirectory and our test suite will automatically test them. In most cases you shouldn't need to add any new assertions.
|
||||
## Adding a language
|
||||
|
||||
### My code is detected as the wrong language
|
||||
We try only to add languages once they have some usage on GitHub. In most cases we prefer that languages be in use in hundreds of repositories before supporting them in Linguist.
|
||||
|
||||
This can usually be solved either by adding a new filename or file name extension to the language's entry in [`languages.yml`][languages] or adding more [samples][samples] for your language to the repository to make Linguist's classifier smarter.
|
||||
|
||||
### Syntax highlighting looks wrong
|
||||
|
||||
Assuming your code is being detected as the right language (see above), in most cases this is due to a bug in the language grammar rather than a bug in Linguist. [`grammars.yml`][grammars] lists all the grammars we use for syntax highlighting on github.com. Find the one corresponding to your code's programming language and submit a bug report upstream. If you can, try to reproduce the highlighting problem in the text editor that the grammar is designed for (TextMate, Sublime Text, or Atom) and include that information in your bug report.
|
||||
|
||||
You can also try to fix the bug yourself and submit a Pull Request. [This piece from TextMate's documentation](http://manual.macromates.com/en/language_grammars) offers a good introduction on how to work with TextMate-compatible grammars. You can test grammars using [Lightshow](https://lightshow.githubapp.com).
|
||||
|
||||
Once the bug has been fixed upstream, please let us know and we'll pick it up for GitHub.
|
||||
|
||||
### I want to add support for the `X` programming language
|
||||
|
||||
Great! You'll need to:
|
||||
To add support for a new language:
|
||||
|
||||
0. Add an entry for your language to [`languages.yml`][languages].
|
||||
0. Add a grammar for your language. Please only add grammars that have a license that permits redistribution.
|
||||
0. Add your grammar as a submodule: `git submodule add https://github.com/JaneSmith/MyGrammar vendor/grammars/MyGrammar`.
|
||||
0. Add your grammar to [`grammars.yml`][grammars] by running `script/convert-grammars --add vendor/grammars/MyGrammar`.
|
||||
0. Add samples for your language to the [samples directory][samples].
|
||||
0. Add your grammar as a submodule: `git submodule add https://github.com/JaneSmith/MyGrammar vendor/grammars/MyGrammar`.
|
||||
0. Add your grammar to [`grammars.yml`][grammars] by running `script/convert-grammars --add vendor/grammars/MyGrammar`.
|
||||
0. Add samples for your language to the [samples directory][samples] in the correct subdirectory.
|
||||
0. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
|
||||
In addition, if your new language defines an extension that's already listed in [`languages.yml`][languages] (such as `.foo`) then sometimes a few more steps will need to be taken:
|
||||
|
||||
0. Make sure that example `.foo` files are present in the [samples directory][samples] for each language that uses `.foo`.
|
||||
0. Make sure that example `.foo` files are present in the [samples directory][samples] for each language that uses `.foo`.
|
||||
0. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.foo` files. (ping @arfon or @bkeepers to help with this) to ensure we're not misclassifying files.
|
||||
0. If the Bayesian classifier does a bad job with the sample `.foo` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
|
||||
Remember, the goal here is to try and avoid false positives!
|
||||
|
||||
We try only to add languages once they have some usage on GitHub, so please note in-the-wild usage examples in your pull request. In most cases we prefer that languages already be in use in hundreds of repositories before supporting them in Linguist.
|
||||
## Fixing a misclassified language
|
||||
|
||||
Most languages are detected by their file extension defined in [languages.yml][languages]. For disambiguating between files with common extensions, linguist applies some [heuristics](/lib/linguist/heuristics.rb) and a [statistical classifier](lib/linguist/classifier.rb). This process can help differentiate between, for example, `.h` files which could be either C, C++, or Obj-C.
|
||||
|
||||
Misclassifications can often be solved by either adding a new filename or extension for the language or adding more [samples][samples] to make the classifier smarter.
|
||||
|
||||
## Fixing syntax highlighting
|
||||
|
||||
Syntax highlighting in GitHub is performed using TextMate-compatible grammars. These are the same grammars that TextMate, Sublime Text and Atom use. Every language in [languages.yml][languages] is mapped to its corresponding TM `scope`. This scope will be used when picking up a grammar for highlighting.
|
||||
|
||||
Assuming your code is being detected as the right language, in most cases this is due to a bug in the language grammar rather than a bug in Linguist. [`grammars.yml`][grammars] lists all the grammars we use for syntax highlighting on github.com. Find the one corresponding to your code's programming language and submit a bug report upstream. If you can, try to reproduce the highlighting problem in the text editor that the grammar is designed for (TextMate, Sublime Text, or Atom) and include that information in your bug report.
|
||||
|
||||
You can also try to fix the bug yourself and submit a Pull Request. [TextMate's documentation](http://manual.macromates.com/en/language_grammars) offers a good introduction on how to work with TextMate-compatible grammars. You can test grammars using [Lightshow](https://github-lightshow.herokuapp.com).
|
||||
|
||||
Once the bug has been fixed upstream, please let us know and we'll pick it up for GitHub.
|
||||
|
||||
## Testing
|
||||
|
||||
For development you are going to want to checkout out the source. To get it, clone the repo and run [Bundler](http://gembundler.com/) to install its dependencies.
|
||||
|
||||
git clone https://github.com/github/linguist.git
|
||||
cd linguist/
|
||||
script/bootstrap
|
||||
|
||||
To run the tests:
|
||||
|
||||
bundle exec rake test
|
||||
|
||||
Sometimes getting the tests running can be too much work, especially if you don't have much Ruby experience. It's okay: be lazy and let our build bot [Travis](http://travis-ci.org/#!/github/linguist) run the tests for you. Just open a pull request and the bot will start cranking away.
|
||||
|
||||
Here's our current build status: [](http://travis-ci.org/github/linguist)
|
||||
|
||||
## Releasing
|
||||
|
||||
If you are the current maintainer of this gem:
|
||||
|
||||
0. Create a branch for the release: `git checkout -b cut-release-vxx.xx.xx`
|
||||
0. Make sure your local dependencies are up to date: `script/bootstrap`
|
||||
0. If grammar submodules have not been updated recently, update them: `git submodule update --remote && git commit -a`
|
||||
0. Ensure that samples are updated: `bundle exec rake samples`
|
||||
0. Ensure that tests are green: `bundle exec rake test`
|
||||
0. Bump gem version in `lib/linguist/version.rb`, [like this](https://github.com/github/linguist/commit/8d2ea90a5ba3b2fe6e1508b7155aa4632eea2985).
|
||||
0. Make a PR to github/linguist, [like this](https://github.com/github/linguist/pull/1238).
|
||||
0. Build a local gem: `bundle exec rake build_gem`
|
||||
0. Test the gem:
|
||||
0. Bump the Gemfile and Gemfile.lock versions for an app which relies on this gem
|
||||
0. Install the new gem locally
|
||||
0. Test behavior locally, branch deploy, whatever needs to happen
|
||||
0. Merge github/linguist PR
|
||||
0. Tag and push: `git tag vx.xx.xx; git push --tags`
|
||||
0. Push to rubygems.org -- `gem push github-linguist-3.0.0.gem`
|
||||
|
||||
[grammars]: /grammars.yml
|
||||
[languages]: /lib/linguist/languages.yml
|
||||
[samples]: /samples
|
||||
[new-issue]: https://github.com/github/linguist/issues/new
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
||||
Copyright (c) 2011-2014 GitHub, Inc.
|
||||
Copyright (c) 2011-2015 GitHub, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
|
||||
225
README.md
225
README.md
@@ -1,45 +1,80 @@
|
||||
# Linguist
|
||||
|
||||
We use this library at GitHub to detect blob languages, ignore binary files, suppress generated files in diffs, and generate language breakdown graphs.
|
||||
[issues]: https://github.com/github/linguist/issues
|
||||
[new-issue]: https://github.com/github/linguist/issues/new
|
||||
|
||||
Tips for filing issues and creating pull requests can be found in [`CONTRIBUTING.md`](/CONTRIBUTING.md).
|
||||
This library is used on GitHub.com to detect blob languages, ignore binary or vendored files, suppress generated files in diffs, and generate language breakdown graphs.
|
||||
|
||||
## Features
|
||||
See [Troubleshooting](#troubleshooting) and [`CONTRIBUTING.md`](/CONTRIBUTING.md) before filing an issue or creating a pull request.
|
||||
|
||||
### Language detection
|
||||
## Troubleshooting
|
||||
|
||||
Linguist defines a list of all languages known to GitHub in a [yaml file](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml).
|
||||
### My repository is detected as the wrong language
|
||||
|
||||
Most languages are detected by their file extension. For disambiguating between files with common extensions, we first apply some common-sense heuristics to pick out obvious languages. After that, we use a
|
||||
[statistical
|
||||
classifier](https://github.com/github/linguist/blob/master/lib/linguist/classifier.rb).
|
||||
This process can help us tell the difference between, for example, `.h` files which could be either C, C++, or Obj-C.
|
||||

|
||||
|
||||
```ruby
|
||||
The Language stats bar is built by aggregating the languages of each file in that repository. If it is reporting a language that you don't expect:
|
||||
|
||||
Linguist::FileBlob.new("lib/linguist.rb").language.name #=> "Ruby"
|
||||
0. Click on the name of the language in the stats bar to see a list of the files that are identified as that language.
|
||||
0. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you an add, especially links to public repositories, is helpful.
|
||||
0. If there are no reported issues of this misclassification, [open an issue][new-issue] and include a link to the repository or a sample of the code that is being misclassified.
|
||||
|
||||
Linguist::FileBlob.new("bin/linguist").language.name #=> "Ruby"
|
||||
## Overrides
|
||||
|
||||
Linguist supports a number of different custom overrides strategies for language definitions and vendored paths.
|
||||
|
||||
### Using gitattributes
|
||||
|
||||
Add a `.gitattributes` file to your project and use standard git-style path matchers for the files you want to override to set `linguist-documentation`, `linguist-language`, and `linguist-vendored`.
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
*.rb linguist-language=Java
|
||||
```
|
||||
|
||||
See [lib/linguist/language.rb](https://github.com/github/linguist/blob/master/lib/linguist/language.rb) and [lib/linguist/languages.yml](https://github.com/github/linguist/blob/master/lib/linguist/languages.yml).
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository. Vendored files are also hidden by default in diffs on github.com.
|
||||
|
||||
### Syntax Highlighting
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths.
|
||||
|
||||
Syntax highlighting in GitHub is performed using TextMate-compatible grammars. These are the same grammars that TextMate, Sublime Text and Atom use.
|
||||
```
|
||||
$ cat .gitattributes
|
||||
special-vendored-path/* linguist-vendored
|
||||
jquery.js linguist-vendored=false
|
||||
```
|
||||
|
||||
Every language in `languages.yml` is mapped to its corresponding TM `scope`. This scope will be used when picking up a grammar for highlighting. **When adding a new language to Linguist, please add its corresponding scope too (assuming there's an existing TextMate bundle, Sublime Text package, or Atom package) so syntax highlighting works for it**.
|
||||
Similar to vendored files, Linguist excludes documentation files from your project's language stats. (Unlike vendored files, documentation files are displayed in diffs on github.com.) [lib/linguist/documentation.yml](lib/linguist/documentation.yml) lists common documentation paths and excludes them from the language statistics for your repository.
|
||||
|
||||
### Stats
|
||||
Use the `linguist-documentation` attribute to mark or unmark paths as documentation.
|
||||
|
||||
The Language stats bar that you see on every repository is built by aggregating the languages of each file in that repository. The top language in the graph determines the project's primary language.
|
||||
```
|
||||
$ cat .gitattributes
|
||||
project-docs/* linguist-documentation
|
||||
docs/formatter.rb linguist-documentation=false
|
||||
```
|
||||
|
||||
The repository stats API, accessed through `#languages`, can be used on a directory:
|
||||
### Using Emacs and Vim modelines
|
||||
|
||||
***API UPDATE***
|
||||
Alternatively, you can use Vim and Emacs style modelines to set the language for a single file. Modelines can be placed anywhere within a file and are respected when determining how to syntax-highlight a file on GitHub.com
|
||||
|
||||
Since [Version 3.0.0](https://github.com/github/linguist/releases/tag/v3.0.0) Linguist expects a git repository (in the form of a [Rugged::Repository](https://github.com/libgit2/rugged#repositories)) to be passed when initializing `Linguist::Repository`.
|
||||
```
|
||||
Vim
|
||||
vim: set filetype=prolog:
|
||||
vim: set ft=cpp:
|
||||
|
||||
Emacs
|
||||
-*- mode: php;-*-
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
Install the gem:
|
||||
|
||||
```
|
||||
$ gem install github-linguist
|
||||
```
|
||||
|
||||
Then use it in your application:
|
||||
|
||||
```ruby
|
||||
require 'rugged'
|
||||
@@ -51,147 +86,27 @@ project.language #=> "Ruby"
|
||||
project.languages #=> { "Ruby" => 119387 }
|
||||
```
|
||||
|
||||
These stats are also printed out by the `linguist` binary. You can use the
|
||||
These stats are also printed out by the `linguist` executable. You can use the
|
||||
`--breakdown` flag, and the binary will also output the breakdown of files by language.
|
||||
|
||||
You can try running `linguist` on the root directory in this repository itself:
|
||||
|
||||
$ bundle exec linguist --breakdown
|
||||
```
|
||||
$ bundle exec linguist --breakdown
|
||||
|
||||
100.00% Ruby
|
||||
100.00% Ruby
|
||||
|
||||
Ruby:
|
||||
Gemfile
|
||||
Rakefile
|
||||
bin/linguist
|
||||
github-linguist.gemspec
|
||||
lib/linguist.rb
|
||||
lib/linguist/blob_helper.rb
|
||||
lib/linguist/classifier.rb
|
||||
lib/linguist/file_blob.rb
|
||||
lib/linguist/generated.rb
|
||||
lib/linguist/heuristics.rb
|
||||
lib/linguist/language.rb
|
||||
lib/linguist/lazy_blob.rb
|
||||
lib/linguist/md5.rb
|
||||
lib/linguist/repository.rb
|
||||
lib/linguist/samples.rb
|
||||
lib/linguist/tokenizer.rb
|
||||
lib/linguist/version.rb
|
||||
test/test_blob.rb
|
||||
test/test_classifier.rb
|
||||
test/test_heuristics.rb
|
||||
test/test_language.rb
|
||||
test/test_md5.rb
|
||||
test/test_pedantic.rb
|
||||
test/test_repository.rb
|
||||
test/test_samples.rb
|
||||
test/test_tokenizer.rb
|
||||
|
||||
#### Ignore vendored files
|
||||
|
||||
Checking other code into your git repo is a common practice. But this often inflates your project's language stats and may even cause your project to be labeled as another language. We are able to identify some of these files and directories and exclude them.
|
||||
|
||||
```ruby
|
||||
Linguist::FileBlob.new("vendor/plugins/foo.rb").vendored? # => true
|
||||
Ruby:
|
||||
Gemfile
|
||||
Rakefile
|
||||
bin/linguist
|
||||
github-linguist.gemspec
|
||||
lib/linguist.rb
|
||||
…
|
||||
```
|
||||
|
||||
See [Linguist::BlobHelper#vendored?](https://github.com/github/linguist/blob/master/lib/linguist/blob_helper.rb) and [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml).
|
||||
## Contributing
|
||||
|
||||
#### Generated file detection
|
||||
Please check out our [contributing guidelines](CONTRIBUTING.md).
|
||||
|
||||
Not all plain text files are true source files. Generated files like minified js and compiled CoffeeScript can be detected and excluded from language stats. As an extra bonus, these files are suppressed in diffs.
|
||||
|
||||
```ruby
|
||||
Linguist::FileBlob.new("underscore.min.js").generated? # => true
|
||||
```
|
||||
|
||||
See [Linguist::Generated#generated?](https://github.com/github/linguist/blob/master/lib/linguist/generated.rb).
|
||||
|
||||
## Overrides
|
||||
|
||||
Linguist supports custom overrides for language definitions and vendored paths. Add a `.gitattributes` file to your project using the keys `linguist-language` and `linguist-vendored` with the standard git-style path matchers for the files you want to override.
|
||||
|
||||
Please note that the overrides currently only affect the language statistics for a repository and not the syntax-highlighting of files.
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
*.rb linguist-language=Java
|
||||
|
||||
$ linguist --breakdown
|
||||
100.00% Java
|
||||
|
||||
Java:
|
||||
ruby_file.rb
|
||||
```
|
||||
|
||||
By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository. Use the `linguist-vendored` attribute to vendor or un-vendor paths.
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
special-vendored-path/* linguist-vendored
|
||||
jquery.js linguist-vendored=false
|
||||
```
|
||||
|
||||
## Installation
|
||||
|
||||
Github.com is usually running the latest version of the `github-linguist` gem that is released on [RubyGems.org](http://rubygems.org/gems/github-linguist).
|
||||
|
||||
But for development you are going to want to checkout out the source. To get it, clone the repo and run [Bundler](http://gembundler.com/) to install its dependencies.
|
||||
|
||||
git clone https://github.com/github/linguist.git
|
||||
cd linguist/
|
||||
script/bootstrap
|
||||
|
||||
To run the tests:
|
||||
|
||||
bundle exec rake test
|
||||
|
||||
### A note on language extensions
|
||||
|
||||
Linguist has a number of methods available to it for identifying the language of a particular file. The initial lookup is based upon the extension of the file, possible file extensions are defined in an array called `extensions`. Take a look at this example for example for `Perl`:
|
||||
|
||||
```
|
||||
Perl:
|
||||
type: programming
|
||||
ace_mode: perl
|
||||
color: "#0298c3"
|
||||
extensions:
|
||||
- .pl
|
||||
- .PL
|
||||
- .perl
|
||||
- .ph
|
||||
- .plx
|
||||
- .pm
|
||||
- .pod
|
||||
- .psgi
|
||||
interpreters:
|
||||
- perl
|
||||
```
|
||||
Any of the extensions defined are valid but the first in this array should be the most popular.
|
||||
|
||||
### Testing
|
||||
|
||||
Sometimes getting the tests running can be too much work, especially if you don't have much Ruby experience. It's okay: be lazy and let our build bot [Travis](http://travis-ci.org/#!/github/linguist) run the tests for you. Just open a pull request and the bot will start cranking away.
|
||||
|
||||
Here's our current build status, which is hopefully green: [](http://travis-ci.org/github/linguist)
|
||||
|
||||
### Releasing
|
||||
|
||||
If you are the current maintainer of this gem:
|
||||
|
||||
0. Create a branch for the release: `git checkout -b cut-release-vxx.xx.xx`
|
||||
0. Make sure your local dependencies are up to date: `script/bootstrap`
|
||||
0. If grammar submodules have not been updated recently, update them: `git submodule update --remote && git commit -a`
|
||||
0. Ensure that samples are updated: `bundle exec rake samples`
|
||||
0. Ensure that tests are green: `bundle exec rake test`
|
||||
0. Bump gem version in `lib/linguist/version.rb`. For example, [like this](https://github.com/github/linguist/commit/8d2ea90a5ba3b2fe6e1508b7155aa4632eea2985).
|
||||
0. Make a PR to github/linguist. For example, [#1238](https://github.com/github/linguist/pull/1238).
|
||||
0. Build a local gem: `bundle exec rake build_gem`
|
||||
0. Testing:
|
||||
0. Bump the Gemfile and Gemfile.lock versions for an app which relies on this gem
|
||||
0. Install the new gem locally
|
||||
0. Test behavior locally, branch deploy, whatever needs to happen
|
||||
0. Merge github/linguist PR
|
||||
0. Tag and push: `git tag vx.xx.xx; git push --tags`
|
||||
0. Push to rubygems.org -- `gem push github-linguist-3.0.0.gem`
|
||||
##
|
||||
|
||||
65
grammars.yml
65
grammars.yml
@@ -24,28 +24,49 @@ vendor/grammars/Agda.tmbundle:
|
||||
- source.agda
|
||||
vendor/grammars/Alloy.tmbundle:
|
||||
- source.alloy
|
||||
vendor/grammars/AutoHotkey:
|
||||
vendor/grammars/AutoHotkey/:
|
||||
- source.ahk
|
||||
vendor/grammars/BrightScript.tmbundle/:
|
||||
- source.brightauthorproject
|
||||
- source.brightscript
|
||||
vendor/grammars/CLIPS-sublime:
|
||||
- source.clips
|
||||
vendor/grammars/ColdFusion:
|
||||
- source.cfscript
|
||||
- source.cfscript.cfc
|
||||
- text.cfml.basic
|
||||
- text.html.cfm
|
||||
vendor/grammars/Creole:
|
||||
- text.html.creole
|
||||
vendor/grammars/Docker.tmbundle:
|
||||
- source.dockerfile
|
||||
vendor/grammars/Elm.tmLanguage:
|
||||
- source.elm
|
||||
vendor/grammars/G-Code/:
|
||||
- source.LS
|
||||
- source.MCPOST
|
||||
- source.MOD
|
||||
- source.apt
|
||||
- source.gcode
|
||||
vendor/grammars/GDScript-sublime/:
|
||||
- source.gdscript
|
||||
vendor/grammars/Handlebars:
|
||||
- text.html.handlebars
|
||||
vendor/grammars/IDL-Syntax:
|
||||
- source.webidl
|
||||
vendor/grammars/InnoSetup/:
|
||||
- source.inno
|
||||
vendor/grammars/Isabelle.tmbundle:
|
||||
- source.isabelle.root
|
||||
- source.isabelle.theory
|
||||
vendor/grammars/JSyntax/:
|
||||
- source.j
|
||||
vendor/grammars/Julia.tmbundle:
|
||||
- source.julia
|
||||
vendor/grammars/LiveScript.tmbundle:
|
||||
- source.livescript
|
||||
vendor/grammars/Modelica/:
|
||||
- source.modelica
|
||||
vendor/grammars/NSIS:
|
||||
- source.nsis
|
||||
vendor/grammars/NimLime:
|
||||
@@ -54,6 +75,8 @@ vendor/grammars/NimLime:
|
||||
- source.nimcfg
|
||||
vendor/grammars/PHP-Twig.tmbundle:
|
||||
- text.html.twig
|
||||
vendor/grammars/PogoScript.tmbundle/:
|
||||
- source.pogoscript
|
||||
vendor/grammars/RDoc.tmbundle:
|
||||
- text.rdoc
|
||||
vendor/grammars/Racket:
|
||||
@@ -68,6 +91,8 @@ vendor/grammars/Slash.tmbundle:
|
||||
vendor/grammars/Stata.tmbundle:
|
||||
- source.mata
|
||||
- source.stata
|
||||
vendor/grammars/Stylus/:
|
||||
- source.stylus
|
||||
vendor/grammars/Sublime-Coq:
|
||||
- source.coq
|
||||
vendor/grammars/Sublime-HTTP:
|
||||
@@ -94,8 +119,14 @@ vendor/grammars/Sublime-VimL:
|
||||
- source.viml
|
||||
vendor/grammars/SublimeBrainfuck:
|
||||
- source.bf
|
||||
vendor/grammars/SublimePapyrus/:
|
||||
- source.compiled-papyrus
|
||||
- source.papyrus
|
||||
- source.papyrus-assembly
|
||||
vendor/grammars/SublimeXtend:
|
||||
- source.xtend
|
||||
vendor/grammars/TXL/:
|
||||
- source.txl
|
||||
vendor/grammars/Textmate-Gosu-Bundle:
|
||||
- source.gosu.2
|
||||
vendor/grammars/VBDotNetSyntax:
|
||||
@@ -119,6 +150,8 @@ vendor/grammars/apache.tmbundle:
|
||||
- source.apache-config.mod_perl
|
||||
vendor/grammars/applescript.tmbundle:
|
||||
- source.applescript
|
||||
vendor/grammars/asciidoc.tmbundle/:
|
||||
- text.html.asciidoc
|
||||
vendor/grammars/asp.tmbundle:
|
||||
- source.asp
|
||||
- text.html.asp
|
||||
@@ -128,6 +161,8 @@ vendor/grammars/assembly.tmbundle:
|
||||
vendor/grammars/atom-salt:
|
||||
- source.python.salt
|
||||
- source.yaml.salt
|
||||
vendor/grammars/ats.sublime:
|
||||
- source.ats
|
||||
vendor/grammars/autoitv3-tmbundle:
|
||||
- source.autoit.3
|
||||
vendor/grammars/awk-sublime:
|
||||
@@ -170,6 +205,8 @@ vendor/grammars/dart-sublime-bundle:
|
||||
- source.dart
|
||||
- source.pubspec
|
||||
- text.dart-doccomments
|
||||
vendor/grammars/desktop.tmbundle:
|
||||
- source.desktop
|
||||
vendor/grammars/diff.tmbundle:
|
||||
- source.diff
|
||||
vendor/grammars/dylan.tmbundle:
|
||||
@@ -178,6 +215,8 @@ vendor/grammars/dylan.tmbundle:
|
||||
- source.makegen
|
||||
vendor/grammars/ebundles/Bundles/MSDOS batch file.tmbundle:
|
||||
- source.dosbatch
|
||||
vendor/grammars/ec.tmbundle/:
|
||||
- source.c.ec
|
||||
vendor/grammars/eiffel.tmbundle:
|
||||
- source.eiffel
|
||||
vendor/grammars/elixir-tmbundle:
|
||||
@@ -199,12 +238,16 @@ vendor/grammars/fortran.tmbundle:
|
||||
- source.fortran.modern
|
||||
vendor/grammars/fsharpbinding:
|
||||
- source.fsharp
|
||||
vendor/grammars/gap-tmbundle/:
|
||||
- source.gap
|
||||
vendor/grammars/gettext.tmbundle:
|
||||
- source.po
|
||||
vendor/grammars/gnuplot-tmbundle:
|
||||
- source.gnuplot
|
||||
vendor/grammars/go-tmbundle:
|
||||
- source.go
|
||||
vendor/grammars/grace-tmbundle/:
|
||||
- source.grace
|
||||
vendor/grammars/gradle.tmbundle:
|
||||
- source.groovy.gradle
|
||||
vendor/grammars/graphviz.tmbundle:
|
||||
@@ -289,6 +332,8 @@ vendor/grammars/less.tmbundle:
|
||||
- source.css.less
|
||||
vendor/grammars/lilypond.tmbundle:
|
||||
- source.lilypond
|
||||
vendor/grammars/liquid.tmbundle:
|
||||
- text.html.liquid
|
||||
vendor/grammars/lisp.tmbundle:
|
||||
- source.lisp
|
||||
vendor/grammars/llvm.tmbundle:
|
||||
@@ -308,6 +353,8 @@ vendor/grammars/matlab.tmbundle:
|
||||
- source.octave
|
||||
vendor/grammars/maven.tmbundle:
|
||||
- text.xml.pom
|
||||
vendor/grammars/mediawiki.tmbundle/:
|
||||
- text.html.mediawiki
|
||||
vendor/grammars/mercury-tmlanguage:
|
||||
- source.mercury
|
||||
vendor/grammars/monkey.tmbundle:
|
||||
@@ -348,7 +395,7 @@ vendor/grammars/pike-textmate:
|
||||
- source.pike
|
||||
vendor/grammars/postscript.tmbundle:
|
||||
- source.postscript
|
||||
vendor/grammars/powershell.tmbundle:
|
||||
vendor/grammars/powershell:
|
||||
- source.powershell
|
||||
vendor/grammars/processing.tmbundle:
|
||||
- source.processing
|
||||
@@ -400,6 +447,8 @@ vendor/grammars/standard-ml.tmbundle:
|
||||
- source.ml
|
||||
vendor/grammars/sublime-MuPAD:
|
||||
- source.mupad
|
||||
vendor/grammars/sublime-apl/:
|
||||
- source.apl
|
||||
vendor/grammars/sublime-befunge:
|
||||
- source.befunge
|
||||
vendor/grammars/sublime-better-typescript:
|
||||
@@ -411,6 +460,8 @@ vendor/grammars/sublime-cirru:
|
||||
vendor/grammars/sublime-glsl:
|
||||
- source.essl
|
||||
- source.glsl
|
||||
vendor/grammars/sublime-golo/:
|
||||
- source.golo
|
||||
vendor/grammars/sublime-idris:
|
||||
- source.idris
|
||||
vendor/grammars/sublime-mask:
|
||||
@@ -419,14 +470,24 @@ vendor/grammars/sublime-nginx:
|
||||
- source.nginx
|
||||
vendor/grammars/sublime-nix:
|
||||
- source.nix
|
||||
vendor/grammars/sublime-opal/:
|
||||
- source.opal
|
||||
- source.opalsysdefs
|
||||
vendor/grammars/sublime-robot-plugin:
|
||||
- text.robot
|
||||
vendor/grammars/sublime-rust:
|
||||
- source.rust
|
||||
vendor/grammars/sublime-sourcepawn:
|
||||
- source.sp
|
||||
vendor/grammars/sublime-spintools/:
|
||||
- source.regexp.spin
|
||||
- source.spin
|
||||
vendor/grammars/sublime-tea:
|
||||
- source.tea
|
||||
vendor/grammars/sublime-text-ox/:
|
||||
- source.ox
|
||||
vendor/grammars/sublime-text-pig-latin/:
|
||||
- source.pig_latin
|
||||
vendor/grammars/sublime_cobol:
|
||||
- source.acucobol
|
||||
- source.cobol
|
||||
|
||||
@@ -236,6 +236,21 @@ module Linguist
|
||||
path =~ VendoredRegexp ? true : false
|
||||
end
|
||||
|
||||
documentation_paths = YAML.load_file(File.expand_path("../documentation.yml", __FILE__))
|
||||
DocumentationRegexp = Regexp.new(documentation_paths.join('|'))
|
||||
|
||||
# Public: Is the blob in a documentation directory?
|
||||
#
|
||||
# Documentation files are ignored by language statistics.
|
||||
#
|
||||
# See "documentation.yml" for a list of documentation conventions that match
|
||||
# this pattern.
|
||||
#
|
||||
# Return true or false
|
||||
def documentation?
|
||||
name =~ DocumentationRegexp ? true : false
|
||||
end
|
||||
|
||||
# Public: Get each line of data
|
||||
#
|
||||
# Requires Blob#data
|
||||
@@ -317,5 +332,15 @@ module Linguist
|
||||
def tm_scope
|
||||
language && language.tm_scope
|
||||
end
|
||||
|
||||
DETECTABLE_TYPES = [:programming, :markup].freeze
|
||||
|
||||
# Internal: Should this blob be included in repository language statistics?
|
||||
def include_in_language_stats?
|
||||
!vendored? &&
|
||||
!documentation? &&
|
||||
!generated? &&
|
||||
language && DETECTABLE_TYPES.include?(language.type)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
23
lib/linguist/documentation.yml
Normal file
23
lib/linguist/documentation.yml
Normal file
@@ -0,0 +1,23 @@
|
||||
# Documentation files and directories are excluded from language
|
||||
# statistics.
|
||||
#
|
||||
# Lines in this file are Regexps that are matched against the file
|
||||
# pathname.
|
||||
#
|
||||
# Please add additional test coverage to
|
||||
# `test/test_blob.rb#test_documentation` if you make any changes.
|
||||
|
||||
## Documentation directories ##
|
||||
|
||||
- ^docs?/
|
||||
- (^|/)[Dd]ocumentation/
|
||||
- (^|/)javadoc/
|
||||
- ^man/
|
||||
|
||||
## Documentation files ##
|
||||
|
||||
- (^|/)CONTRIBUTING(\.|$)
|
||||
- (^|/)COPYING(\.|$)
|
||||
- (^|/)INSTALL(\.|$)
|
||||
- (^|/)LICEN[CS]E(\.|$)
|
||||
- (^|/)README(\.|$)
|
||||
@@ -62,6 +62,7 @@ module Linguist
|
||||
generated_parser? ||
|
||||
generated_net_docfile? ||
|
||||
generated_postscript? ||
|
||||
generated_protocol_buffer_go? ||
|
||||
generated_protocol_buffer? ||
|
||||
generated_jni_header? ||
|
||||
vcr_cassette?
|
||||
@@ -202,6 +203,13 @@ module Linguist
|
||||
creator.include?("ImageMagick")
|
||||
end
|
||||
|
||||
def generated_protocol_buffer_go?
|
||||
return false unless extname == '.go'
|
||||
return false unless lines.count > 1
|
||||
|
||||
return lines[0].include?("Code generated by protoc-gen-go")
|
||||
end
|
||||
|
||||
# Internal: Is the blob a C++, Java or Python source file generated by the
|
||||
# Protocol Buffer compiler?
|
||||
#
|
||||
|
||||
@@ -61,6 +61,9 @@ module Linguist
|
||||
@heuristic.call(data)
|
||||
end
|
||||
|
||||
# Common heuristics
|
||||
ObjectiveCRegex = /^[ \t]*@(interface|class|protocol|property|end|synchronised|selector|implementation)\b/
|
||||
|
||||
disambiguate "BitBake", "BlitzBasic" do |data|
|
||||
if /^\s*; /.match(data) || data.include?("End Function")
|
||||
Language["BlitzBasic"]
|
||||
@@ -78,7 +81,7 @@ module Linguist
|
||||
end
|
||||
|
||||
disambiguate "Objective-C", "C++", "C" do |data|
|
||||
if (/^[ \t]*@(interface|class|protocol|property|end|synchronised|selector|implementation)\b/.match(data))
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif (/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/.match(data) ||
|
||||
/^\s*template\s*</.match(data) || /^[ \t]*try/.match(data) || /^[ \t]*catch\s*\(/.match(data) || /^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/.match(data) || /^[ \t]*(private|public|protected):$/.match(data) || /std::\w+/.match(data))
|
||||
@@ -89,7 +92,7 @@ module Linguist
|
||||
disambiguate "Perl", "Perl6", "Prolog" do |data|
|
||||
if data.include?("use v6")
|
||||
Language["Perl6"]
|
||||
elsif data.include?("use strict")
|
||||
elsif data.match(/use strict|use\s+v?5\./)
|
||||
Language["Perl"]
|
||||
elsif data.include?(":-")
|
||||
Language["Prolog"]
|
||||
@@ -112,6 +115,15 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "GAP", "Scilab" do |data|
|
||||
if (data.include?("gap> "))
|
||||
Language["GAP"]
|
||||
# Heads up - we don't usually write heuristics like this (with no regex match)
|
||||
else
|
||||
Language["Scilab"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Common Lisp", "OpenCL", "Cool" do |data|
|
||||
if data.include?("(defun ")
|
||||
Language["Common Lisp"]
|
||||
@@ -138,14 +150,20 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "AsciiDoc", "AGS Script" do |data|
|
||||
Language["AsciiDoc"] if /^=+(\s|\n)/.match(data)
|
||||
disambiguate "AsciiDoc", "AGS Script", "Public Key" do |data|
|
||||
if /^[=-]+(\s|\n)|{{[A-Za-z]/.match(data)
|
||||
Language["AsciiDoc"]
|
||||
elsif /^(\/\/.+|((import|export)\s+)?(function|int|float|char)\s+((room|repeatedly|on|game)_)?([A-Za-z]+[A-Za-z_0-9]+)\s*[;\(])/.match(data)
|
||||
Language["AGS Script"]
|
||||
elsif /^-----BEGIN/.match(data)
|
||||
Language["Public Key"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "FORTRAN", "Forth" do |data|
|
||||
if /^: /.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^([c*][^a-z]| (subroutine|program)\s|!)/i.match(data)
|
||||
elsif /^([c*][^a-z]| (subroutine|program)\s|\s*!)/i.match(data)
|
||||
Language["FORTRAN"]
|
||||
end
|
||||
end
|
||||
@@ -160,6 +178,20 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "M", "Mathematica", "Matlab", "Mercury", "Objective-C" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif data.include?(":- module")
|
||||
Language["Mercury"]
|
||||
elsif /^\s*;/.match(data)
|
||||
Language["M"]
|
||||
elsif /^\s*\(\*/.match(data)
|
||||
Language["Mathematica"]
|
||||
elsif /^\s*%/.match(data)
|
||||
Language["Matlab"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Gosu", "JavaScript" do |data|
|
||||
Language["Gosu"] if /^uses java\./.match(data)
|
||||
end
|
||||
@@ -172,6 +204,14 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "Common Lisp", "NewLisp" do |data|
|
||||
if /^\s*\((defun|in-package|defpackage) /.match(data)
|
||||
Language["Common Lisp"]
|
||||
elsif /^\s*\(define /.match(data)
|
||||
Language["NewLisp"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate "TypeScript", "XML" do |data|
|
||||
if data.include?("<TS ")
|
||||
Language["XML"]
|
||||
|
||||
@@ -11,6 +11,7 @@ require 'linguist/samples'
|
||||
require 'linguist/file_blob'
|
||||
require 'linguist/blob_helper'
|
||||
require 'linguist/strategy/filename'
|
||||
require 'linguist/strategy/modeline'
|
||||
require 'linguist/shebang'
|
||||
|
||||
module Linguist
|
||||
@@ -31,13 +32,6 @@ module Linguist
|
||||
# Valid Languages types
|
||||
TYPES = [:data, :markup, :programming, :prose]
|
||||
|
||||
# Names of non-programming languages that we will still detect
|
||||
#
|
||||
# Returns an array
|
||||
def self.detectable_markup
|
||||
["CSS", "Less", "Sass", "SCSS", "Stylus", "TeX"]
|
||||
end
|
||||
|
||||
# Detect languages by a specific type
|
||||
#
|
||||
# type - A symbol that exists within TYPES
|
||||
@@ -94,8 +88,9 @@ module Linguist
|
||||
end
|
||||
|
||||
STRATEGIES = [
|
||||
Linguist::Strategy::Filename,
|
||||
Linguist::Strategy::Modeline,
|
||||
Linguist::Shebang,
|
||||
Linguist::Strategy::Filename,
|
||||
Linguist::Heuristics,
|
||||
Linguist::Classifier
|
||||
]
|
||||
@@ -155,7 +150,7 @@ module Linguist
|
||||
# Language.find_by_alias('cpp')
|
||||
# # => #<Language name="C++">
|
||||
#
|
||||
# Returns the Lexer or nil if none was found.
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.find_by_alias(name)
|
||||
name && @alias_index[name.downcase]
|
||||
end
|
||||
@@ -219,7 +214,7 @@ module Linguist
|
||||
end
|
||||
|
||||
|
||||
# Public: Look up Language by its name or lexer.
|
||||
# Public: Look up Language by its name.
|
||||
#
|
||||
# name - The String name of the Language
|
||||
#
|
||||
@@ -243,7 +238,7 @@ module Linguist
|
||||
#
|
||||
# This list is configured in "popular.yml".
|
||||
#
|
||||
# Returns an Array of Lexers.
|
||||
# Returns an Array of Languages.
|
||||
def self.popular
|
||||
@popular ||= all.select(&:popular?).sort_by { |lang| lang.name.downcase }
|
||||
end
|
||||
@@ -255,7 +250,7 @@ module Linguist
|
||||
#
|
||||
# This list is created from all the languages not listed in "popular.yml".
|
||||
#
|
||||
# Returns an Array of Lexers.
|
||||
# Returns an Array of Languages.
|
||||
def self.unpopular
|
||||
@unpopular ||= all.select(&:unpopular?).sort_by { |lang| lang.name.downcase }
|
||||
end
|
||||
@@ -375,11 +370,6 @@ module Linguist
|
||||
# Returns the name String
|
||||
attr_reader :search_term
|
||||
|
||||
# Public: Get Lexer
|
||||
#
|
||||
# Returns the Lexer
|
||||
attr_reader :lexer
|
||||
|
||||
# Public: Get the name of a TextMate-compatible scope
|
||||
#
|
||||
# Returns the scope
|
||||
@@ -495,16 +485,6 @@ module Linguist
|
||||
@searchable
|
||||
end
|
||||
|
||||
# Public: Highlight syntax of text
|
||||
#
|
||||
# text - String of code to be highlighted
|
||||
# options - A Hash of options (defaults to {})
|
||||
#
|
||||
# Returns html String
|
||||
def colorize(text, options = {})
|
||||
lexer.highlight(text, options)
|
||||
end
|
||||
|
||||
# Public: Return name as String representation
|
||||
def to_s
|
||||
name
|
||||
@@ -580,7 +560,6 @@ module Linguist
|
||||
:color => options['color'],
|
||||
:type => options['type'],
|
||||
:aliases => options['aliases'],
|
||||
:lexer => options['lexer'],
|
||||
:tm_scope => options['tm_scope'],
|
||||
:ace_mode => options['ace_mode'],
|
||||
:wrap => options['wrap'],
|
||||
|
||||
@@ -54,13 +54,14 @@ APL:
|
||||
extensions:
|
||||
- .apl
|
||||
- .dyalog
|
||||
tm_scope: none
|
||||
tm_scope: source.apl
|
||||
ace_mode: text
|
||||
|
||||
ASP:
|
||||
type: programming
|
||||
color: "#6a40fd"
|
||||
search_term: aspx-vb
|
||||
tm_scope: text.html.asp
|
||||
aliases:
|
||||
- aspx
|
||||
- aspx-vb
|
||||
@@ -81,10 +82,9 @@ ATS:
|
||||
- ats2
|
||||
extensions:
|
||||
- .dats
|
||||
- .atxt
|
||||
- .hats
|
||||
- .sats
|
||||
tm_scope: source.ocaml
|
||||
tm_scope: source.ats
|
||||
ace_mode: ocaml
|
||||
|
||||
ActionScript:
|
||||
@@ -186,7 +186,7 @@ AsciiDoc:
|
||||
- .asciidoc
|
||||
- .adoc
|
||||
- .asc
|
||||
tm_scope: none
|
||||
tm_scope: text.html.asciidoc
|
||||
|
||||
AspectJ:
|
||||
type: programming
|
||||
@@ -198,7 +198,7 @@ AspectJ:
|
||||
|
||||
Assembly:
|
||||
type: programming
|
||||
color: "#a67219"
|
||||
color: "#6E4C13"
|
||||
search_term: nasm
|
||||
aliases:
|
||||
- nasm
|
||||
@@ -206,6 +206,7 @@ Assembly:
|
||||
- .asm
|
||||
- .ASM
|
||||
- .a51
|
||||
- .nasm
|
||||
tm_scope: source.asm.x86
|
||||
ace_mode: assembly_x86
|
||||
|
||||
@@ -335,7 +336,7 @@ Brightscript:
|
||||
type: programming
|
||||
extensions:
|
||||
- .brs
|
||||
tm_scope: none
|
||||
tm_scope: source.brightscript
|
||||
ace_mode: text
|
||||
|
||||
Bro:
|
||||
@@ -355,6 +356,8 @@ C:
|
||||
- .h
|
||||
- .idc
|
||||
- .w
|
||||
interpreters:
|
||||
- tcc
|
||||
ace_mode: c_cpp
|
||||
|
||||
C#:
|
||||
@@ -381,6 +384,7 @@ C++:
|
||||
- .cpp
|
||||
- .c++
|
||||
- .cc
|
||||
- .cp
|
||||
- .cxx
|
||||
- .h
|
||||
- .h++
|
||||
@@ -413,7 +417,7 @@ CLIPS:
|
||||
type: programming
|
||||
extensions:
|
||||
- .clp
|
||||
tm_scope: none
|
||||
tm_scope: source.clips
|
||||
ace_mode: text
|
||||
|
||||
CMake:
|
||||
@@ -437,6 +441,8 @@ COBOL:
|
||||
ace_mode: cobol
|
||||
|
||||
CSS:
|
||||
type: markup
|
||||
tm_scope: source.css
|
||||
ace_mode: css
|
||||
color: "#563d7c"
|
||||
extensions:
|
||||
@@ -622,7 +628,7 @@ Creole:
|
||||
wrap: true
|
||||
extensions:
|
||||
- .creole
|
||||
tm_scope: none
|
||||
tm_scope: text.html.creole
|
||||
ace_mode: text
|
||||
|
||||
Crystal:
|
||||
@@ -811,9 +817,11 @@ Emacs Lisp:
|
||||
- emacs
|
||||
filenames:
|
||||
- .emacs
|
||||
- .emacs.desktop
|
||||
extensions:
|
||||
- .el
|
||||
- .emacs
|
||||
- .emacs.desktop
|
||||
ace_mode: lisp
|
||||
|
||||
EmberScript:
|
||||
@@ -915,6 +923,7 @@ Forth:
|
||||
color: "#341708"
|
||||
extensions:
|
||||
- .fth
|
||||
- .4TH
|
||||
- .4th
|
||||
- .F
|
||||
- .f
|
||||
@@ -939,7 +948,7 @@ G-code:
|
||||
- .g
|
||||
- .gco
|
||||
- .gcode
|
||||
tm_scope: none
|
||||
tm_scope: source.gcode
|
||||
ace_mode: gcode
|
||||
|
||||
GAMS:
|
||||
@@ -956,7 +965,8 @@ GAP:
|
||||
- .gap
|
||||
- .gd
|
||||
- .gi
|
||||
tm_scope: none
|
||||
- .tst
|
||||
tm_scope: source.gap
|
||||
ace_mode: text
|
||||
|
||||
GAS:
|
||||
@@ -972,7 +982,7 @@ GDScript:
|
||||
type: programming
|
||||
extensions:
|
||||
- .gd
|
||||
tm_scope: none
|
||||
tm_scope: source.gdscript
|
||||
ace_mode: text
|
||||
|
||||
GLSL:
|
||||
@@ -1070,7 +1080,7 @@ Golo:
|
||||
color: "#f6a51f"
|
||||
extensions:
|
||||
- .golo
|
||||
tm_scope: none
|
||||
tm_scope: source.golo
|
||||
ace_mode: text
|
||||
|
||||
Gosu:
|
||||
@@ -1088,7 +1098,7 @@ Grace:
|
||||
type: programming
|
||||
extensions:
|
||||
- .grace
|
||||
tm_scope: none
|
||||
tm_scope: source.grace
|
||||
ace_mode: text
|
||||
|
||||
Gradle:
|
||||
@@ -1167,6 +1177,7 @@ HTML:
|
||||
type: markup
|
||||
tm_scope: text.html.basic
|
||||
ace_mode: html
|
||||
color: "#e44b23"
|
||||
aliases:
|
||||
- xhtml
|
||||
extensions:
|
||||
@@ -1236,6 +1247,7 @@ Handlebars:
|
||||
type: markup
|
||||
aliases:
|
||||
- hbs
|
||||
- htmlbars
|
||||
extensions:
|
||||
- .handlebars
|
||||
- .hbs
|
||||
@@ -1340,7 +1352,7 @@ Inform 7:
|
||||
Inno Setup:
|
||||
extensions:
|
||||
- .iss
|
||||
tm_scope: none
|
||||
tm_scope: source.inno
|
||||
ace_mode: text
|
||||
|
||||
Io:
|
||||
@@ -1369,9 +1381,10 @@ Isabelle:
|
||||
|
||||
J:
|
||||
type: programming
|
||||
color: "#2d8abd"
|
||||
extensions:
|
||||
- .ijs
|
||||
tm_scope: none
|
||||
tm_scope: source.j
|
||||
ace_mode: text
|
||||
|
||||
JSON:
|
||||
@@ -1596,7 +1609,7 @@ Liquid:
|
||||
type: markup
|
||||
extensions:
|
||||
- .liquid
|
||||
tm_scope: none
|
||||
tm_scope: text.html.liquid
|
||||
ace_mode: liquid
|
||||
|
||||
Literate Agda:
|
||||
@@ -1801,7 +1814,7 @@ MediaWiki:
|
||||
wrap: true
|
||||
extensions:
|
||||
- .mediawiki
|
||||
tm_scope: none
|
||||
tm_scope: text.html.mediawiki
|
||||
ace_mode: text
|
||||
|
||||
Mercury:
|
||||
@@ -1835,6 +1848,13 @@ Mirah:
|
||||
tm_scope: source.ruby
|
||||
ace_mode: ruby
|
||||
|
||||
Modelica:
|
||||
type: programming
|
||||
extensions:
|
||||
- .mo
|
||||
tm_scope: source.modelica
|
||||
ace_mode: text
|
||||
|
||||
Monkey:
|
||||
type: programming
|
||||
extensions:
|
||||
@@ -1883,6 +1903,19 @@ NetLogo:
|
||||
tm_scope: source.lisp
|
||||
ace_mode: lisp
|
||||
|
||||
NewLisp:
|
||||
type: programming
|
||||
lexer: NewLisp
|
||||
color: "#eedd66"
|
||||
extensions:
|
||||
- .nl
|
||||
- .lisp
|
||||
- .lsp
|
||||
interpreters:
|
||||
- newlisp
|
||||
tm_scope: source.lisp
|
||||
ace_mode: lisp
|
||||
|
||||
Nginx:
|
||||
type: markup
|
||||
extensions:
|
||||
@@ -2026,7 +2059,7 @@ Opal:
|
||||
color: "#f7ede0"
|
||||
extensions:
|
||||
- .opal
|
||||
tm_scope: none
|
||||
tm_scope: source.opal
|
||||
ace_mode: text
|
||||
|
||||
OpenCL:
|
||||
@@ -2055,7 +2088,7 @@ OpenSCAD:
|
||||
extensions:
|
||||
- .scad
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
ace_mode: scad
|
||||
|
||||
Org:
|
||||
type: prose
|
||||
@@ -2071,7 +2104,7 @@ Ox:
|
||||
- .ox
|
||||
- .oxh
|
||||
- .oxo
|
||||
tm_scope: none
|
||||
tm_scope: source.ox
|
||||
ace_mode: text
|
||||
|
||||
Oxygene:
|
||||
@@ -2132,7 +2165,7 @@ Papyrus:
|
||||
color: "#6600cc"
|
||||
extensions:
|
||||
- .psc
|
||||
tm_scope: none
|
||||
tm_scope: source.papyrus
|
||||
ace_mode: text
|
||||
|
||||
Parrot:
|
||||
@@ -2224,7 +2257,7 @@ PigLatin:
|
||||
color: "#fcd7de"
|
||||
extensions:
|
||||
- .pig
|
||||
tm_scope: none
|
||||
tm_scope: source.pig_latin
|
||||
ace_mode: text
|
||||
|
||||
Pike:
|
||||
@@ -2250,7 +2283,7 @@ PogoScript:
|
||||
color: "#d80074"
|
||||
extensions:
|
||||
- .pogo
|
||||
tm_scope: none
|
||||
tm_scope: source.pogoscript
|
||||
ace_mode: text
|
||||
|
||||
PostScript:
|
||||
@@ -2297,7 +2330,7 @@ Propeller Spin:
|
||||
color: "#2b446d"
|
||||
extensions:
|
||||
- .spin
|
||||
tm_scope: none
|
||||
tm_scope: source.spin
|
||||
ace_mode: text
|
||||
|
||||
Protocol Buffer:
|
||||
@@ -2379,6 +2412,8 @@ Python:
|
||||
- python
|
||||
- python2
|
||||
- python3
|
||||
aliases:
|
||||
- rusthon
|
||||
|
||||
Python traceback:
|
||||
type: data
|
||||
@@ -2426,7 +2461,6 @@ R:
|
||||
|
||||
RAML:
|
||||
type: data
|
||||
lexer: YAML
|
||||
ace_mode: yaml
|
||||
tm_scope: source.yaml
|
||||
color: "#77d9fb"
|
||||
@@ -2656,6 +2690,13 @@ STON:
|
||||
tm_scope: source.smalltalk
|
||||
ace_mode: text
|
||||
|
||||
SVG:
|
||||
type: data
|
||||
extensions:
|
||||
- .svg
|
||||
tm_scope: text.xml
|
||||
ace_mode: xml
|
||||
|
||||
Sage:
|
||||
type: programming
|
||||
group: Python
|
||||
@@ -2856,7 +2897,7 @@ Stylus:
|
||||
group: CSS
|
||||
extensions:
|
||||
- .styl
|
||||
tm_scope: none
|
||||
tm_scope: source.stylus
|
||||
ace_mode: stylus
|
||||
|
||||
SuperCollider:
|
||||
@@ -2895,7 +2936,7 @@ TXL:
|
||||
type: programming
|
||||
extensions:
|
||||
- .txl
|
||||
tm_scope: none
|
||||
tm_scope: source.txl
|
||||
ace_mode: text
|
||||
|
||||
Tcl:
|
||||
@@ -3127,7 +3168,7 @@ XC:
|
||||
ace_mode: c_cpp
|
||||
|
||||
XML:
|
||||
type: markup
|
||||
type: data
|
||||
ace_mode: xml
|
||||
aliases:
|
||||
- rss
|
||||
@@ -3171,7 +3212,6 @@ XML:
|
||||
- .srdf
|
||||
- .stTheme
|
||||
- .sublime-snippet
|
||||
- .svg
|
||||
- .targets
|
||||
- .tmCommand
|
||||
- .tmLanguage
|
||||
@@ -3291,13 +3331,21 @@ Zimpl:
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
|
||||
desktop:
|
||||
type: data
|
||||
extensions:
|
||||
- .desktop
|
||||
- .desktop.in
|
||||
tm_scope: source.desktop
|
||||
ace_mode: text
|
||||
|
||||
eC:
|
||||
type: programming
|
||||
search_term: ec
|
||||
extensions:
|
||||
- .ec
|
||||
- .eh
|
||||
tm_scope: none
|
||||
tm_scope: source.c.ec
|
||||
ace_mode: text
|
||||
|
||||
edn:
|
||||
|
||||
@@ -4,7 +4,7 @@ require 'rugged'
|
||||
|
||||
module Linguist
|
||||
class LazyBlob
|
||||
GIT_ATTR = ['linguist-language', 'linguist-vendored']
|
||||
GIT_ATTR = ['linguist-documentation', 'linguist-language', 'linguist-vendored']
|
||||
GIT_ATTR_OPTS = { :priority => [:index], :skip_system => true }
|
||||
GIT_ATTR_FLAGS = Rugged::Repository::Attributes.parse_opts(GIT_ATTR_OPTS)
|
||||
|
||||
@@ -39,11 +39,19 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
def documentation?
|
||||
if attr = git_attributes['linguist-documentation']
|
||||
boolean_attribute(attr)
|
||||
else
|
||||
super
|
||||
end
|
||||
end
|
||||
|
||||
def language
|
||||
return @language if defined?(@language)
|
||||
|
||||
@language = if lang = git_attributes['linguist-language']
|
||||
Language.find_by_name(lang)
|
||||
Language.find_by_alias(lang)
|
||||
else
|
||||
super
|
||||
end
|
||||
|
||||
@@ -156,13 +156,8 @@ module Linguist
|
||||
|
||||
blob = Linguist::LazyBlob.new(repository, delta.new_file[:oid], new, mode.to_s(8))
|
||||
|
||||
# Skip vendored or generated blobs
|
||||
next if blob.vendored? || blob.generated? || blob.language.nil?
|
||||
|
||||
# Only include programming languages and acceptable markup languages
|
||||
if blob.language.type == :programming || Language.detectable_markup.include?(blob.language.name)
|
||||
file_map[new] = [blob.language.group.name, blob.size]
|
||||
end
|
||||
next unless blob.include_in_language_stats?
|
||||
file_map[new] = [blob.language.group.name, blob.size]
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
30
lib/linguist/strategy/modeline.rb
Normal file
30
lib/linguist/strategy/modeline.rb
Normal file
@@ -0,0 +1,30 @@
|
||||
module Linguist
|
||||
module Strategy
|
||||
class Modeline
|
||||
EmacsModeline = /-\*-\s*mode:\s*(\w+);?\s*-\*-/i
|
||||
VimModeline = /\/\*\s*vim:\s*set\s*(?:ft|filetype)=(\w+):\s*\*\//i
|
||||
|
||||
# Public: Detects language based on Vim and Emacs modelines
|
||||
#
|
||||
# blob - An object that quacks like a blob.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Modeline.call(FileBlob.new("path/to/file"))
|
||||
#
|
||||
# Returns an Array with one Language if the blob has a Vim or Emacs modeline
|
||||
# that matches a Language name or alias. Returns an empty array if no match.
|
||||
def self.call(blob, _ = nil)
|
||||
Array(Language.find_by_alias(modeline(blob.data)))
|
||||
end
|
||||
|
||||
# Public: Get the modeline from the first n-lines of the file
|
||||
#
|
||||
# Returns a String or nil
|
||||
def self.modeline(data)
|
||||
match = data.match(EmacsModeline) || data.match(VimModeline)
|
||||
match[1] if match
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -33,7 +33,8 @@ module Linguist
|
||||
['<!--', '-->'], # XML
|
||||
['{-', '-}'], # Haskell
|
||||
['(*', '*)'], # Coq
|
||||
['"""', '"""'] # Python
|
||||
['"""', '"""'], # Python
|
||||
["'''", "'''"] # Python
|
||||
]
|
||||
|
||||
START_SINGLE_LINE_COMMENT = Regexp.compile(SINGLE_LINE_COMMENTS.map { |c|
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
# Minified JavaScript and CSS
|
||||
- (\.|-)min\.(js|css)$
|
||||
|
||||
#Stylesheets imported from packages
|
||||
# Stylesheets imported from packages
|
||||
- ([^\s]*)import\.(css|less|scss|styl)$
|
||||
|
||||
# Bootstrap css and js
|
||||
@@ -251,3 +251,6 @@
|
||||
# ProGuard
|
||||
- proguard.pro
|
||||
- proguard-rules.pro
|
||||
|
||||
# Android Google APIs
|
||||
- (^|/)\.google_apis/
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
module Linguist
|
||||
VERSION = "4.3.0b1"
|
||||
VERSION = "4.5.0b1"
|
||||
end
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"repository": "https://github.com/github/linguist",
|
||||
"dependencies": {
|
||||
"season": "~>3.0"
|
||||
"season": "~>5.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,215 +0,0 @@
|
||||
%{
|
||||
#include "./../ATEXT/atextfun.hats"
|
||||
%}
|
||||
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
|
||||
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
|
||||
<head>
|
||||
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
|
||||
<title>EFFECTIVATS-DiningPhil2</title>
|
||||
#patscode_style()
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<h1>
|
||||
Effective ATS: Dining Philosophers
|
||||
</h1>
|
||||
|
||||
In this article, I present an implementation of a slight variant of the
|
||||
famous problem of 5-Dining-Philosophers by Dijkstra that makes simple but
|
||||
convincing use of linear types.
|
||||
|
||||
<h2>
|
||||
The Original Problem
|
||||
</h2>
|
||||
|
||||
There are five philosophers sitting around a table and there are also 5
|
||||
forks placed on the table such that each fork is located between the left
|
||||
hand of a philosopher and the right hand of another philosopher. Each
|
||||
philosopher does the following routine repeatedly: thinking and dining. In
|
||||
order to dine, a philosopher needs to first acquire two forks: one located
|
||||
on his left-hand side and the other on his right-hand side. After
|
||||
finishing dining, a philosopher puts the two acquired forks onto the table:
|
||||
one on his left-hand side and the other on his right-hand side.
|
||||
|
||||
<h2>
|
||||
A Variant of the Original Problem
|
||||
</h2>
|
||||
|
||||
The following twist is added to the original version:
|
||||
|
||||
<p>
|
||||
|
||||
After a fork is used, it becomes a "dirty" fork and needs to be put in a
|
||||
tray for dirty forks. There is a cleaner who cleans dirty forks and then
|
||||
puts them back on the table.
|
||||
|
||||
<h2>
|
||||
Channels for Communication
|
||||
</h2>
|
||||
|
||||
A channel is just a shared queue of fixed capacity. The following two
|
||||
functions are for inserting an element into and taking an element out of a
|
||||
given channel:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_sats("\
|
||||
fun{a:vt0p} channel_insert (channel (a), a): void
|
||||
fun{a:vt0p} channel_takeout (chan: channel (a)): (a)
|
||||
")</pre>
|
||||
|
||||
If [channel_insert] is called on a channel that is full, then the caller is
|
||||
blocked until an element is taken out of the channel. If [channel_takeout]
|
||||
is called on a channel that is empty, then the caller is blocked until an
|
||||
element is inserted into the channel.
|
||||
|
||||
<h2>
|
||||
A Channel for Each Fork
|
||||
</h2>
|
||||
|
||||
Forks are resources given a linear type. Each fork is initially stored in a
|
||||
channel, which can be obtained by calling the following function:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_sats("\
|
||||
fun fork_changet (n: nphil): channel(fork)
|
||||
")</pre>
|
||||
|
||||
where the type [nphil] is defined to be [natLt(5)] (for natural numbers
|
||||
less than 5). The channels for storing forks are chosen to be of capacity
|
||||
2. The reason that channels of capacity 2 are chosen to store at most one
|
||||
element (in each of them) is to guarantee that these channels can never be
|
||||
full (so that there is no attempt made to send signals to awake callers
|
||||
supposedly being blocked due to channels being full).
|
||||
|
||||
|
||||
<h2>
|
||||
A Channel for the Fork Tray
|
||||
</h2>
|
||||
|
||||
A tray for storing "dirty" forks is also a channel, which can be obtained
|
||||
by calling the following function:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_sats("\
|
||||
fun forktray_changet ((*void*)): channel(fork)
|
||||
")</pre>
|
||||
|
||||
The capacity chosen for the channel is 6 (instead of 5) so that it can
|
||||
never become full (as there are only 5 forks in total).
|
||||
|
||||
<h2>
|
||||
Philosopher Loop
|
||||
</h2>
|
||||
|
||||
Each philosopher is implemented as a loop:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_dats('\
|
||||
implement
|
||||
phil_loop (n) = let
|
||||
//
|
||||
val () = phil_think (n)
|
||||
//
|
||||
val nl = phil_left (n) // = n
|
||||
val nr = phil_right (n) // = (n+1) % 5
|
||||
//
|
||||
val ch_lfork = fork_changet (nl)
|
||||
val ch_rfork = fork_changet (nr)
|
||||
//
|
||||
val lf = channel_takeout (ch_lfork)
|
||||
val () = println! ("phil_loop(", n, ") picks left fork")
|
||||
//
|
||||
val () = randsleep (2) // sleep up to 2 seconds
|
||||
//
|
||||
val rf = channel_takeout (ch_rfork)
|
||||
val () = println! ("phil_loop(", n, ") picks right fork")
|
||||
//
|
||||
val () = phil_dine (n, lf, rf)
|
||||
//
|
||||
val ch_forktray = forktray_changet ()
|
||||
val () = channel_insert (ch_forktray, lf) // left fork to dirty tray
|
||||
val () = channel_insert (ch_forktray, rf) // right fork to dirty tray
|
||||
//
|
||||
in
|
||||
phil_loop (n)
|
||||
end // end of [phil_loop]
|
||||
')</pre>
|
||||
|
||||
It should be straighforward to follow the code for [phil_loop].
|
||||
|
||||
<h2>
|
||||
Fork Cleaner Loop
|
||||
</h2>
|
||||
|
||||
A cleaner is implemented as a loop:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_dats('\
|
||||
implement
|
||||
cleaner_loop () = let
|
||||
//
|
||||
val ch = forktray_changet ()
|
||||
val f0 = channel_takeout (ch) // [f0] is dirty
|
||||
//
|
||||
val () = cleaner_wash (f0) // washes dirty [f0]
|
||||
val () = cleaner_return (f0) // puts back cleaned [f0]
|
||||
//
|
||||
in
|
||||
cleaner_loop ()
|
||||
end // end of [cleaner_loop]
|
||||
')</pre>
|
||||
|
||||
The function [cleaner_return] first finds out the number of a given fork
|
||||
and then uses the number to locate the channel for storing the fork. Its
|
||||
actual implementation is given as follows:
|
||||
|
||||
<pre
|
||||
class="patsyntax">
|
||||
#pats2xhtml_dats('\
|
||||
implement
|
||||
cleaner_return (f) =
|
||||
{
|
||||
val n = fork_get_num (f)
|
||||
val ch = fork_changet (n)
|
||||
val () = channel_insert (ch, f)
|
||||
}
|
||||
')</pre>
|
||||
|
||||
It should now be straighforward to follow the code for [cleaner_loop].
|
||||
|
||||
<h2>
|
||||
Testing
|
||||
</h2>
|
||||
|
||||
The entire code of this implementation is stored in the following files:
|
||||
|
||||
<pre>
|
||||
DiningPhil2.sats
|
||||
DiningPhil2.dats
|
||||
DiningPhil2_fork.dats
|
||||
DiningPhil2_thread.dats
|
||||
</pre>
|
||||
|
||||
There is also a Makefile available for compiling the ATS source code into
|
||||
an excutable for testing. One should be able to encounter a deadlock after
|
||||
running the simulation for a while.
|
||||
|
||||
<hr size="2">
|
||||
|
||||
This article is written by <a href="http://www.cs.bu.edu/~hwxi/">Hongwei Xi</a>.
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
||||
%{
|
||||
implement main () = fprint_filsub (stdout_ref, "main_atxt.txt")
|
||||
%}
|
||||
2841
samples/Assembly/forth.nasm
Normal file
2841
samples/Assembly/forth.nasm
Normal file
File diff suppressed because it is too large
Load Diff
116
samples/C++/qsciprinter.cp
Normal file
116
samples/C++/qsciprinter.cp
Normal file
@@ -0,0 +1,116 @@
|
||||
// This module defines interface to the QsciPrinter class.
|
||||
//
|
||||
// Copyright (c) 2011 Riverbank Computing Limited <info@riverbankcomputing.com>
|
||||
//
|
||||
// This file is part of QScintilla.
|
||||
//
|
||||
// This file may be used under the terms of the GNU General Public
|
||||
// License versions 2.0 or 3.0 as published by the Free Software
|
||||
// Foundation and appearing in the files LICENSE.GPL2 and LICENSE.GPL3
|
||||
// included in the packaging of this file. Alternatively you may (at
|
||||
// your option) use any later version of the GNU General Public
|
||||
// License if such license has been publicly approved by Riverbank
|
||||
// Computing Limited (or its successors, if any) and the KDE Free Qt
|
||||
// Foundation. In addition, as a special exception, Riverbank gives you
|
||||
// certain additional rights. These rights are described in the Riverbank
|
||||
// GPL Exception version 1.1, which can be found in the file
|
||||
// GPL_EXCEPTION.txt in this package.
|
||||
//
|
||||
// If you are unsure which license is appropriate for your use, please
|
||||
// contact the sales department at sales@riverbankcomputing.com.
|
||||
//
|
||||
// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
|
||||
// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
|
||||
|
||||
|
||||
#ifndef QSCIPRINTER_H
|
||||
#define QSCIPRINTER_H
|
||||
|
||||
#ifdef __APPLE__
|
||||
extern "C++" {
|
||||
#endif
|
||||
|
||||
#include <qprinter.h>
|
||||
|
||||
#include <Qsci/qsciglobal.h>
|
||||
#include <Qsci/qsciscintilla.h>
|
||||
|
||||
|
||||
QT_BEGIN_NAMESPACE
|
||||
class QRect;
|
||||
class QPainter;
|
||||
QT_END_NAMESPACE
|
||||
|
||||
class QsciScintillaBase;
|
||||
|
||||
|
||||
//! \brief The QsciPrinter class is a sub-class of the Qt QPrinter class that
|
||||
//! is able to print the text of a Scintilla document.
|
||||
//!
|
||||
//! The class can be further sub-classed to alter to layout of the text, adding
|
||||
//! headers and footers for example.
|
||||
class QSCINTILLA_EXPORT QsciPrinter : public QPrinter
|
||||
{
|
||||
public:
|
||||
//! Constructs a printer paint device with mode \a mode.
|
||||
QsciPrinter(PrinterMode mode = ScreenResolution);
|
||||
|
||||
//! Destroys the QsciPrinter instance.
|
||||
virtual ~QsciPrinter();
|
||||
|
||||
//! Format a page, by adding headers and footers for example, before the
|
||||
//! document text is drawn on it. \a painter is the painter to be used to
|
||||
//! add customised text and graphics. \a drawing is true if the page is
|
||||
//! actually being drawn rather than being sized. \a painter drawing
|
||||
//! methods must only be called when \a drawing is true. \a area is the
|
||||
//! area of the page that will be used to draw the text. This should be
|
||||
//! modified if it is necessary to reserve space for any customised text or
|
||||
//! graphics. By default the area is relative to the printable area of the
|
||||
//! page. Use QPrinter::setFullPage() because calling printRange() if you
|
||||
//! want to try and print over the whole page. \a pagenr is the number of
|
||||
//! the page. The first page is numbered 1.
|
||||
virtual void formatPage(QPainter &painter, bool drawing, QRect &area,
|
||||
int pagenr);
|
||||
|
||||
//! Return the number of points to add to each font when printing.
|
||||
//!
|
||||
//! \sa setMagnification()
|
||||
int magnification() const {return mag;}
|
||||
|
||||
//! Sets the number of points to add to each font when printing to \a
|
||||
//! magnification.
|
||||
//!
|
||||
//! \sa magnification()
|
||||
virtual void setMagnification(int magnification);
|
||||
|
||||
//! Print a range of lines from the Scintilla instance \a qsb. \a from is
|
||||
//! the first line to print and a negative value signifies the first line
|
||||
//! of text. \a to is the last line to print and a negative value
|
||||
//! signifies the last line of text. true is returned if there was no
|
||||
//! error.
|
||||
virtual int printRange(QsciScintillaBase *qsb, int from = -1, int to = -1);
|
||||
|
||||
//! Return the line wrap mode used when printing. The default is
|
||||
//! QsciScintilla::WrapWord.
|
||||
//!
|
||||
//! \sa setWrapMode()
|
||||
QsciScintilla::WrapMode wrapMode() const {return wrap;}
|
||||
|
||||
//! Sets the line wrap mode used when printing to \a wmode.
|
||||
//!
|
||||
//! \sa wrapMode()
|
||||
virtual void setWrapMode(QsciScintilla::WrapMode wmode);
|
||||
|
||||
private:
|
||||
int mag;
|
||||
QsciScintilla::WrapMode wrap;
|
||||
|
||||
QsciPrinter(const QsciPrinter &);
|
||||
QsciPrinter &operator=(const QsciPrinter &);
|
||||
};
|
||||
|
||||
#ifdef __APPLE__
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
2310
samples/C/filenames/script
Executable file
2310
samples/C/filenames/script
Executable file
File diff suppressed because it is too large
Load Diff
343
samples/CLIPS/demo.clp
Normal file
343
samples/CLIPS/demo.clp
Normal file
@@ -0,0 +1,343 @@
|
||||
;;;***************************
|
||||
;;;* DEFFACTS KNOWLEDGE BASE *
|
||||
;;;***************************
|
||||
|
||||
(deffacts MAIN::knowledge-base
|
||||
(welcome (message WelcomeMessage))
|
||||
(goal (variable type.animal))
|
||||
(legalanswers (values yes no))
|
||||
(displayanswers (values "Yes" "No"))
|
||||
(rule (if backbone is yes)
|
||||
(then superphylum is backbone))
|
||||
(rule (if backbone is no)
|
||||
(then superphylum is jellyback))
|
||||
(question (variable backbone)
|
||||
(query backbone.query))
|
||||
(rule (if superphylum is backbone and
|
||||
warm.blooded is yes)
|
||||
(then phylum is warm))
|
||||
(rule (if superphylum is backbone and
|
||||
warm.blooded is no)
|
||||
(then phylum is cold))
|
||||
(question (variable warm.blooded)
|
||||
(query warm.blooded.query))
|
||||
(rule (if superphylum is jellyback and
|
||||
live.prime.in.soil is yes)
|
||||
(then phylum is soil))
|
||||
(rule (if superphylum is jellyback and
|
||||
live.prime.in.soil is no)
|
||||
(then phylum is elsewhere))
|
||||
(question (variable live.prime.in.soil)
|
||||
(query live.prime.in.soil.query))
|
||||
(rule (if phylum is warm and
|
||||
has.breasts is yes)
|
||||
(then class is breasts))
|
||||
(rule (if phylum is warm and
|
||||
has.breasts is no)
|
||||
(then type.animal is bird))
|
||||
(question (variable has.breasts)
|
||||
(query has.breasts.query))
|
||||
(rule (if phylum is cold and
|
||||
always.in.water is yes)
|
||||
(then class is water))
|
||||
(rule (if phylum is cold and
|
||||
always.in.water is no)
|
||||
(then class is dry))
|
||||
(question (variable always.in.water)
|
||||
(query always.in.water.query))
|
||||
(rule (if phylum is soil and
|
||||
flat.bodied is yes)
|
||||
(then type.animal is flatworm))
|
||||
(rule (if phylum is soil and
|
||||
flat.bodied is no)
|
||||
(then type.animal is worm.leech))
|
||||
(question (variable flat.bodied)
|
||||
(query flat.bodied.query))
|
||||
(rule (if phylum is elsewhere and
|
||||
body.in.segments is yes)
|
||||
(then class is segments))
|
||||
(rule (if phylum is elsewhere and
|
||||
body.in.segments is no)
|
||||
(then class is unified))
|
||||
(question (variable body.in.segments)
|
||||
(query body.in.segments.query))
|
||||
(rule (if class is breasts and
|
||||
can.eat.meat is yes)
|
||||
(then order is meat))
|
||||
(rule (if class is breasts and
|
||||
can.eat.meat is no)
|
||||
(then order is vegy))
|
||||
(question (variable can.eat.meat)
|
||||
(query can.eat.meat.query))
|
||||
(rule (if class is water and
|
||||
boney is yes)
|
||||
(then type.animal is fish))
|
||||
(rule (if class is water and
|
||||
boney is no)
|
||||
(then type.animal is shark.ray))
|
||||
(question (variable boney)
|
||||
(query boney.query))
|
||||
(rule (if class is dry and
|
||||
scaly is yes)
|
||||
(then order is scales))
|
||||
(rule (if class is dry and
|
||||
scaly is no)
|
||||
(then order is soft))
|
||||
(question (variable scaly)
|
||||
(query scaly.query))
|
||||
(rule (if class is segments and
|
||||
shell is yes)
|
||||
(then order is shell))
|
||||
(rule (if class is segments and
|
||||
shell is no)
|
||||
(then type.animal is centipede.millipede.insect))
|
||||
(question (variable shell)
|
||||
(query shell.query))
|
||||
(rule (if class is unified and
|
||||
digest.cells is yes)
|
||||
(then order is cells))
|
||||
(rule (if class is unified and
|
||||
digest.cells is no)
|
||||
(then order is stomach))
|
||||
(question (variable digest.cells)
|
||||
(query digest.cells.query))
|
||||
(rule (if order is meat and
|
||||
fly is yes)
|
||||
(then type.animal is bat))
|
||||
(rule (if order is meat and
|
||||
fly is no)
|
||||
(then family is nowings))
|
||||
(question (variable fly)
|
||||
(query fly.query))
|
||||
(rule (if order is vegy and
|
||||
hooves is yes)
|
||||
(then family is hooves))
|
||||
(rule (if order is vegy and
|
||||
hooves is no)
|
||||
(then family is feet))
|
||||
(question (variable hooves)
|
||||
(query hooves.query))
|
||||
(rule (if order is scales and
|
||||
rounded.shell is yes)
|
||||
(then type.animal is turtle))
|
||||
(rule (if order is scales and
|
||||
rounded.shell is no)
|
||||
(then family is noshell))
|
||||
(question (variable rounded.shell)
|
||||
(query rounded.shell.query))
|
||||
(rule (if order is soft and
|
||||
jump is yes)
|
||||
(then type.animal is frog))
|
||||
(rule (if order is soft and
|
||||
jump is no)
|
||||
(then type.animal is salamander))
|
||||
(question (variable jump)
|
||||
(query jump.query))
|
||||
(rule (if order is shell and
|
||||
tail is yes)
|
||||
(then type.animal is lobster))
|
||||
(rule (if order is shell and
|
||||
tail is no)
|
||||
(then type.animal is crab))
|
||||
(question (variable tail)
|
||||
(query tail.query))
|
||||
(rule (if order is cells and
|
||||
stationary is yes)
|
||||
(then family is stationary))
|
||||
(rule (if order is cells and
|
||||
stationary is no)
|
||||
(then type.animal is jellyfish))
|
||||
(question (variable stationary)
|
||||
(query stationary.query))
|
||||
(rule (if order is stomach and
|
||||
multicelled is yes)
|
||||
(then family is multicelled))
|
||||
(rule (if order is stomach and
|
||||
multicelled is no)
|
||||
(then type.animal is protozoa))
|
||||
(question (variable multicelled)
|
||||
(query multicelled.query))
|
||||
(rule (if family is nowings and
|
||||
opposing.thumb is yes)
|
||||
(then genus is thumb))
|
||||
(rule (if family is nowings and
|
||||
opposing.thumb is no)
|
||||
(then genus is nothumb))
|
||||
(question (variable opposing.thumb)
|
||||
(query opposing.thumb.query))
|
||||
(rule (if family is hooves and
|
||||
two.toes is yes)
|
||||
(then genus is twotoes))
|
||||
(rule (if family is hooves and
|
||||
two.toes is no)
|
||||
(then genus is onetoe))
|
||||
(question (variable two.toes)
|
||||
(query two.toes.query))
|
||||
(rule (if family is feet and
|
||||
live.in.water is yes)
|
||||
(then genus is water))
|
||||
(rule (if family is feet and
|
||||
live.in.water is no)
|
||||
(then genus is dry))
|
||||
(question (variable live.in.water)
|
||||
(query live.in.water.query))
|
||||
(rule (if family is noshell and
|
||||
limbs is yes)
|
||||
(then type.animal is crocodile.alligator))
|
||||
(rule (if family is noshell and
|
||||
limbs is no)
|
||||
(then type.animal is snake))
|
||||
(question (variable limbs)
|
||||
(query limbs.query))
|
||||
(rule (if family is stationary and
|
||||
spikes is yes)
|
||||
(then type.animal is sea.anemone))
|
||||
(rule (if family is stationary and
|
||||
spikes is no)
|
||||
(then type.animal is coral.sponge))
|
||||
(question (variable spikes)
|
||||
(query spikes.query))
|
||||
(rule (if family is multicelled and
|
||||
spiral.shell is yes)
|
||||
(then type.animal is snail))
|
||||
(rule (if family is multicelled and
|
||||
spiral.shell is no)
|
||||
(then genus is noshell))
|
||||
(question (variable spiral.shell)
|
||||
(query spiral.shell.query))
|
||||
(rule (if genus is thumb and
|
||||
prehensile.tail is yes)
|
||||
(then type.animal is monkey))
|
||||
(rule (if genus is thumb and
|
||||
prehensile.tail is no)
|
||||
(then species is notail))
|
||||
(question (variable prehensile.tail)
|
||||
(query prehensile.tail.query))
|
||||
(rule (if genus is nothumb and
|
||||
over.400 is yes)
|
||||
(then species is 400))
|
||||
(rule (if genus is nothumb and
|
||||
over.400 is no)
|
||||
(then species is under400))
|
||||
(question (variable over.400)
|
||||
(query over.400.query))
|
||||
(rule (if genus is twotoes and
|
||||
horns is yes)
|
||||
(then species is horns))
|
||||
(rule (if genus is twotoes and
|
||||
horns is no)
|
||||
(then species is nohorns))
|
||||
(question (variable horns)
|
||||
(query horns.query))
|
||||
(rule (if genus is onetoe and
|
||||
plating is yes)
|
||||
(then type.animal is rhinoceros))
|
||||
(rule (if genus is onetoe and
|
||||
plating is no)
|
||||
(then type.animal is horse.zebra))
|
||||
(question (variable plating)
|
||||
(query plating.query))
|
||||
(rule (if genus is water and
|
||||
hunted is yes)
|
||||
(then type.animal is whale))
|
||||
(rule (if genus is water and
|
||||
hunted is no)
|
||||
(then type.animal is dolphin.porpoise))
|
||||
(question (variable hunted)
|
||||
(query hunted.query))
|
||||
(rule (if genus is dry and
|
||||
front.teeth is yes)
|
||||
(then species is teeth))
|
||||
(rule (if genus is dry and
|
||||
front.teeth is no)
|
||||
(then species is noteeth))
|
||||
(question (variable front.teeth)
|
||||
(query front.teeth.query))
|
||||
(rule (if genus is noshell and
|
||||
bivalve is yes)
|
||||
(then type.animal is clam.oyster))
|
||||
(rule (if genus is noshell and
|
||||
bivalve is no)
|
||||
(then type.animal is squid.octopus))
|
||||
(question (variable bivalve)
|
||||
(query bivalve.query))
|
||||
(rule (if species is notail and
|
||||
nearly.hairless is yes)
|
||||
(then type.animal is man))
|
||||
(rule (if species is notail and
|
||||
nearly.hairless is no)
|
||||
(then subspecies is hair))
|
||||
(question (variable nearly.hairless)
|
||||
(query nearly.hairless.query))
|
||||
(rule (if species is 400 and
|
||||
land.based is yes)
|
||||
(then type.animal is bear.tiger.lion))
|
||||
(rule (if species is 400 and
|
||||
land.based is no)
|
||||
(then type.animal is walrus))
|
||||
(question (variable land.based)
|
||||
(query land.based.query))
|
||||
(rule (if species is under400 and
|
||||
thintail is yes)
|
||||
(then type.animal is cat))
|
||||
(rule (if species is under400 and
|
||||
thintail is no)
|
||||
(then type.animal is coyote.wolf.fox.dog))
|
||||
(question (variable thintail)
|
||||
(query thintail.query))
|
||||
(rule (if species is nohorns and
|
||||
lives.in.desert is yes)
|
||||
(then type.animal is camel))
|
||||
(rule (if species is nohorns and
|
||||
lives.in.desert is no and
|
||||
semi.aquatic is no)
|
||||
(then type.animal is giraffe))
|
||||
(rule (if species is nohorns and
|
||||
lives.in.desert is no and
|
||||
semi.aquatic is yes)
|
||||
(then type.animal is hippopotamus))
|
||||
(question (variable lives.in.desert)
|
||||
(query lives.in.desert.query))
|
||||
(question (variable semi.aquatic)
|
||||
(query semi.aquatic.query))
|
||||
(rule (if species is teeth and
|
||||
large.ears is yes)
|
||||
(then type.animal is rabbit))
|
||||
(rule (if species is teeth and
|
||||
large.ears is no)
|
||||
(then type.animal is rat.mouse.squirrel.beaver.porcupine))
|
||||
(question (variable large.ears)
|
||||
(query large.ears.query))
|
||||
(rule (if species is noteeth and
|
||||
pouch is yes)
|
||||
(then type.animal is kangaroo.koala.bear))
|
||||
(rule (if species is noteeth and
|
||||
pouch is no)
|
||||
(then type.animal is mole.shrew.elephant))
|
||||
(question (variable pouch)
|
||||
(query pouch.query))
|
||||
(rule (if subspecies is hair and
|
||||
long.powerful.arms is yes)
|
||||
(then type.animal is orangutan.gorilla.chimpanzee))
|
||||
(rule (if subspecies is hair and
|
||||
long.powerful.arms is no)
|
||||
(then type.animal is baboon))
|
||||
(question (variable long.powerful.arms)
|
||||
(query long.powerful.arms.query))
|
||||
(rule (if species is horns and
|
||||
fleece is yes)
|
||||
(then type.animal is sheep.goat))
|
||||
(rule (if species is horns and
|
||||
fleece is no)
|
||||
(then subsubspecies is nofleece))
|
||||
(question (variable fleece)
|
||||
(query fleece.query))
|
||||
(rule (if subsubspecies is nofleece and
|
||||
domesticated is yes)
|
||||
(then type.animal is cow))
|
||||
(rule (if subsubspecies is nofleece and
|
||||
domesticated is no)
|
||||
(then type.animal is deer.moose.antelope))
|
||||
(question (variable domesticated)
|
||||
(query domesticated.query))
|
||||
(answer (prefix "I think your animal is a ") (variable type.animal) (postfix ".")))
|
||||
281
samples/CLIPS/sudoku.clp
Normal file
281
samples/CLIPS/sudoku.clp
Normal file
@@ -0,0 +1,281 @@
|
||||
;;; http://www.angusj.com/sudoku/hints
|
||||
;;; http://www.scanraid.com/BasicStrategies.htm
|
||||
;;; http://www.sudokuoftheday.com/pages/techniques-overview
|
||||
;;; http://www.sudokuonline.us/sudoku_solving_techniques
|
||||
;;; http://www.sadmansoftware.com/sudoku/techniques.htm
|
||||
;;; http://www.krazydad.com/blog/2005/09/29/an-index-of-sudoku-strategies/
|
||||
|
||||
;;; #######################
|
||||
;;; DEFTEMPLATES & DEFFACTS
|
||||
;;; #######################
|
||||
|
||||
(deftemplate possible
|
||||
(slot row)
|
||||
(slot column)
|
||||
(slot value)
|
||||
(slot group)
|
||||
(slot id))
|
||||
|
||||
(deftemplate impossible
|
||||
(slot id)
|
||||
(slot value)
|
||||
(slot priority)
|
||||
(slot reason))
|
||||
|
||||
(deftemplate technique-employed
|
||||
(slot reason)
|
||||
(slot priority))
|
||||
|
||||
(deftemplate technique
|
||||
(slot name)
|
||||
(slot priority))
|
||||
|
||||
(deffacts startup
|
||||
(phase grid-values))
|
||||
|
||||
(deftemplate size-value
|
||||
(slot size)
|
||||
(slot value))
|
||||
|
||||
(deffacts values
|
||||
(size-value (size 1) (value 1))
|
||||
(size-value (size 2) (value 2))
|
||||
(size-value (size 2) (value 3))
|
||||
(size-value (size 2) (value 4))
|
||||
(size-value (size 3) (value 5))
|
||||
(size-value (size 3) (value 6))
|
||||
(size-value (size 3) (value 7))
|
||||
(size-value (size 3) (value 8))
|
||||
(size-value (size 3) (value 9))
|
||||
(size-value (size 4) (value 10))
|
||||
(size-value (size 4) (value 11))
|
||||
(size-value (size 4) (value 12))
|
||||
(size-value (size 4) (value 13))
|
||||
(size-value (size 4) (value 14))
|
||||
(size-value (size 4) (value 15))
|
||||
(size-value (size 4) (value 16))
|
||||
(size-value (size 5) (value 17))
|
||||
(size-value (size 5) (value 18))
|
||||
(size-value (size 5) (value 19))
|
||||
(size-value (size 5) (value 20))
|
||||
(size-value (size 5) (value 21))
|
||||
(size-value (size 5) (value 22))
|
||||
(size-value (size 5) (value 23))
|
||||
(size-value (size 5) (value 24))
|
||||
(size-value (size 5) (value 25)))
|
||||
|
||||
;;; ###########
|
||||
;;; SETUP RULES
|
||||
;;; ###########
|
||||
|
||||
;;; ***********
|
||||
;;; stress-test
|
||||
;;; ***********
|
||||
|
||||
(defrule stress-test
|
||||
|
||||
(declare (salience 10))
|
||||
|
||||
(phase match)
|
||||
|
||||
(stress-test)
|
||||
|
||||
(priority ?last)
|
||||
|
||||
(not (priority ?p&:(> ?p ?last)))
|
||||
|
||||
(technique (priority ?next&:(> ?next ?last)))
|
||||
|
||||
(not (technique (priority ?p&:(> ?p ?last)&:(< ?p ?next))))
|
||||
|
||||
=>
|
||||
|
||||
(assert (priority ?next)))
|
||||
|
||||
;;; *****************
|
||||
;;; enable-techniques
|
||||
;;; *****************
|
||||
|
||||
(defrule enable-techniques
|
||||
|
||||
(declare (salience 10))
|
||||
|
||||
(phase match)
|
||||
|
||||
(size ?)
|
||||
|
||||
(not (possible (value any)))
|
||||
|
||||
=>
|
||||
|
||||
(assert (priority 1)))
|
||||
|
||||
;;; **********
|
||||
;;; expand-any
|
||||
;;; **********
|
||||
|
||||
(defrule expand-any
|
||||
|
||||
(declare (salience 10))
|
||||
|
||||
(phase expand-any)
|
||||
|
||||
?f <- (possible (row ?r) (column ?c) (value any) (group ?g) (id ?id))
|
||||
|
||||
(not (possible (value any) (id ?id2&:(< ?id2 ?id))))
|
||||
|
||||
(size ?s)
|
||||
|
||||
(size-value (size ?as&:(<= ?as ?s)) (value ?v))
|
||||
|
||||
(not (possible (row ?r) (column ?c) (value ?v)))
|
||||
|
||||
(not (and (size-value (value ?v2&:(< ?v2 ?v)))
|
||||
|
||||
(not (possible (row ?r) (column ?c) (value ?v2)))))
|
||||
|
||||
=>
|
||||
|
||||
(assert (possible (row ?r) (column ?c) (value ?v) (group ?g) (id ?id))))
|
||||
|
||||
;;; *****************
|
||||
;;; position-expanded
|
||||
;;; *****************
|
||||
|
||||
(defrule position-expanded
|
||||
|
||||
(declare (salience 10))
|
||||
|
||||
(phase expand-any)
|
||||
|
||||
?f <- (possible (row ?r) (column ?c) (value any) (group ?g) (id ?id))
|
||||
|
||||
(size ?s)
|
||||
|
||||
(not (and (size-value (size ?as&:(<= ?as ?s)) (value ?v))
|
||||
|
||||
(not (possible (row ?r) (column ?c) (value ?v)))))
|
||||
|
||||
=>
|
||||
|
||||
(retract ?f))
|
||||
|
||||
;;; ###########
|
||||
;;; PHASE RULES
|
||||
;;; ###########
|
||||
|
||||
;;; ***************
|
||||
;;; expand-any-done
|
||||
;;; ***************
|
||||
|
||||
(defrule expand-any-done
|
||||
|
||||
(declare (salience 10))
|
||||
|
||||
?f <- (phase expand-any)
|
||||
|
||||
(not (possible (value any)))
|
||||
|
||||
=>
|
||||
|
||||
(retract ?f)
|
||||
|
||||
(assert (phase initial-output))
|
||||
(assert (print-position 1 1)))
|
||||
|
||||
;;; ***********
|
||||
;;; begin-match
|
||||
;;; ***********
|
||||
|
||||
(defrule begin-match
|
||||
|
||||
(declare (salience -20))
|
||||
|
||||
?f <- (phase initial-output)
|
||||
|
||||
=>
|
||||
|
||||
(retract ?f)
|
||||
|
||||
(assert (phase match)))
|
||||
|
||||
;;; *****************
|
||||
;;; begin-elimination
|
||||
;;; *****************
|
||||
|
||||
(defrule begin-elimination
|
||||
|
||||
(declare (salience -20))
|
||||
|
||||
?f <- (phase match)
|
||||
|
||||
(not (not (impossible)))
|
||||
|
||||
=>
|
||||
|
||||
(retract ?f)
|
||||
|
||||
(assert (phase elimination)))
|
||||
|
||||
;;; *************
|
||||
;;; next-priority
|
||||
;;; *************
|
||||
|
||||
(defrule next-priority
|
||||
|
||||
(declare (salience -20))
|
||||
|
||||
(phase match)
|
||||
|
||||
(not (impossible))
|
||||
|
||||
(priority ?last)
|
||||
|
||||
(not (priority ?p&:(> ?p ?last)))
|
||||
|
||||
(technique (priority ?next&:(> ?next ?last)))
|
||||
|
||||
(not (technique (priority ?p&:(> ?p ?last)&:(< ?p ?next))))
|
||||
|
||||
=>
|
||||
|
||||
(assert (priority ?next)))
|
||||
|
||||
;;; ************
|
||||
;;; begin-output
|
||||
;;; ************
|
||||
|
||||
(defrule begin-output
|
||||
|
||||
(declare (salience -20))
|
||||
|
||||
?f <- (phase match)
|
||||
|
||||
(not (impossible))
|
||||
|
||||
(priority ?last)
|
||||
|
||||
(not (priority ?p&:(> ?p ?last)))
|
||||
|
||||
(not (technique (priority ?next&:(> ?next ?last))))
|
||||
|
||||
=>
|
||||
|
||||
(retract ?f)
|
||||
|
||||
(assert (phase final-output))
|
||||
(assert (print-position 1 1)))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
21
samples/Common Lisp/sample.lsp
Normal file
21
samples/Common Lisp/sample.lsp
Normal file
@@ -0,0 +1,21 @@
|
||||
;;;; -*- lisp -*-
|
||||
|
||||
(in-package :foo)
|
||||
|
||||
;;; Header comment.
|
||||
(defvar *foo*)
|
||||
|
||||
(eval-when (:execute :compile-toplevel :load-toplevel)
|
||||
(defun add (x &optional y &key z)
|
||||
(declare (ignore z))
|
||||
;; Inline comment.
|
||||
(+ x (or y 1))))
|
||||
|
||||
#|
|
||||
Multi-line comment.
|
||||
|#
|
||||
|
||||
(defmacro foo (x &body b)
|
||||
(if x
|
||||
`(1+ ,x) ;After-line comment.
|
||||
42))
|
||||
29
samples/Emacs Lisp/.emacs.desktop
Normal file
29
samples/Emacs Lisp/.emacs.desktop
Normal file
@@ -0,0 +1,29 @@
|
||||
;; -*- mode: emacs-lisp; coding: emacs-mule; -*-
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Desktop File for Emacs
|
||||
;; --------------------------------------------------------------------------
|
||||
;; Created Sat Jan 3 12:46:35 2015
|
||||
;; Desktop file format version 206
|
||||
;; Emacs version 24.3.1
|
||||
|
||||
;; Global section:
|
||||
(setq desktop-missing-file-warning nil)
|
||||
(setq tags-file-name nil)
|
||||
(setq tags-table-list nil)
|
||||
(setq search-ring nil)
|
||||
(setq regexp-search-ring nil)
|
||||
(setq register-alist nil)
|
||||
(setq file-name-history nil)
|
||||
|
||||
;; Buffer section -- buffers listed in same order as in buffer list:
|
||||
(desktop-create-buffer 206
|
||||
"/home/foo/bar"
|
||||
"bar"
|
||||
'fundamental-mode
|
||||
nil
|
||||
11572
|
||||
'(11554 nil)
|
||||
nil
|
||||
nil
|
||||
'((buffer-file-coding-system . undecided-unix)))
|
||||
|
||||
133
samples/Forth/tools.4TH
Normal file
133
samples/Forth/tools.4TH
Normal file
@@ -0,0 +1,133 @@
|
||||
\ -*- forth -*- Copyright 2004, 2013 Lars Brinkhoff
|
||||
|
||||
( Tools words. )
|
||||
|
||||
: .s ( -- )
|
||||
[char] < emit depth (.) ." > "
|
||||
'SP @ >r r@ depth 1- cells +
|
||||
begin
|
||||
dup r@ <>
|
||||
while
|
||||
dup @ .
|
||||
/cell -
|
||||
repeat r> 2drop ;
|
||||
|
||||
: ? @ . ;
|
||||
|
||||
: c? c@ . ;
|
||||
|
||||
: dump bounds do i ? /cell +loop cr ;
|
||||
|
||||
: cdump bounds do i c? loop cr ;
|
||||
|
||||
: again postpone branch , ; immediate
|
||||
|
||||
: see-find ( caddr -- end xt )
|
||||
>r here lastxt @
|
||||
begin
|
||||
dup 0= abort" Undefined word"
|
||||
dup r@ word= if r> drop exit then
|
||||
nip dup >nextxt
|
||||
again ;
|
||||
|
||||
: cabs ( char -- |char| ) dup 127 > if 256 swap - then ;
|
||||
|
||||
: xt. ( xt -- )
|
||||
( >name ) count cabs type ;
|
||||
|
||||
: xt? ( xt -- flag )
|
||||
>r lastxt @ begin
|
||||
?dup
|
||||
while
|
||||
dup r@ = if r> 2drop -1 exit then
|
||||
>nextxt
|
||||
repeat r> drop 0 ;
|
||||
|
||||
: disassemble ( x -- )
|
||||
dup xt? if
|
||||
( >name ) count
|
||||
dup 127 > if ." postpone " then
|
||||
cabs type
|
||||
else
|
||||
.
|
||||
then ;
|
||||
|
||||
: .addr dup . ;
|
||||
|
||||
: see-line ( addr -- )
|
||||
cr ." ( " .addr ." ) " @ disassemble ;
|
||||
|
||||
: see-word ( end xt -- )
|
||||
>r ." : " r@ xt.
|
||||
r@ >body do i see-line /cell +loop
|
||||
." ;" r> c@ 127 > if ." immediate" then ;
|
||||
|
||||
: see bl word see-find see-word cr ;
|
||||
|
||||
: #body bl word see-find >body - ;
|
||||
|
||||
: type-word ( end xt -- flag )
|
||||
xt. space drop 0 ;
|
||||
|
||||
: traverse-dictionary ( in.. xt -- out.. )
|
||||
\ xt execution: ( in.. end xt2 -- in.. 0 | in.. end xt2 -- out.. true )
|
||||
>r here lastxt @ begin
|
||||
?dup
|
||||
while
|
||||
r> 2dup >r >r execute
|
||||
if r> r> 2drop exit then
|
||||
r> dup >nextxt
|
||||
repeat r> 2drop ;
|
||||
|
||||
: words ( -- )
|
||||
['] type-word traverse-dictionary cr ;
|
||||
|
||||
\ ----------------------------------------------------------------------
|
||||
|
||||
( Tools extension words. )
|
||||
|
||||
\ ;code
|
||||
|
||||
\ assembler
|
||||
|
||||
\ in kernel: bye
|
||||
|
||||
\ code
|
||||
|
||||
\ cs-pick
|
||||
|
||||
\ cs-roll
|
||||
|
||||
\ editor
|
||||
|
||||
: forget ' dup >nextxt lastxt ! 'here ! reveal ;
|
||||
|
||||
\ Kernel: state
|
||||
|
||||
\ [else]
|
||||
|
||||
\ [if]
|
||||
|
||||
\ [then]
|
||||
|
||||
\ ----------------------------------------------------------------------
|
||||
|
||||
( Forth2012 tools extension words. )
|
||||
|
||||
\ TODO: n>r
|
||||
|
||||
\ TODO: nr>
|
||||
|
||||
\ TODO: synonym
|
||||
|
||||
: [undefined] bl-word find nip 0= ; immediate
|
||||
|
||||
: [defined] postpone [undefined] invert ; immediate
|
||||
|
||||
\ ----------------------------------------------------------------------
|
||||
|
||||
: @+ ( addr -- addr+/cell x ) dup cell+ swap @ ;
|
||||
|
||||
: !+ ( x addr -- addr+/cell ) tuck ! cell+ ;
|
||||
|
||||
: -rot swap >r swap r> ;
|
||||
161
samples/GAP/bugfix.tst
Normal file
161
samples/GAP/bugfix.tst
Normal file
@@ -0,0 +1,161 @@
|
||||
gap> START_TEST("Test for various former bugs");
|
||||
|
||||
|
||||
gap> # The following used to trigger an error starting with:
|
||||
gap> # "SolutionMat: matrix and vector incompatible called from"
|
||||
gap> K:=AbelianPcpGroup([3,3,3]);;
|
||||
gap> A:=Subgroup(K,[K.1]);;
|
||||
gap> cr:=CRRecordBySubgroup(K,A);;
|
||||
gap> ExtensionsCR(cr);;
|
||||
|
||||
|
||||
# Comparing homomorphisms used to be broken
|
||||
gap> K:=AbelianPcpGroup(1,[3]);;
|
||||
gap> hom1:=GroupHomomorphismByImages(K,K,[K.1],[K.1]);;
|
||||
gap> hom2:=GroupHomomorphismByImages(K,K,[K.1^2],[K.1^2]);;
|
||||
gap> hom1=hom2;
|
||||
true
|
||||
gap> hom1=IdentityMapping(K);
|
||||
true
|
||||
gap> hom2=IdentityMapping(K);
|
||||
true
|
||||
|
||||
|
||||
gap> # The following incorrectly triggered an error at some point
|
||||
gap> IsTorsionFree(ExamplesOfSomePcpGroups(5));
|
||||
true
|
||||
|
||||
|
||||
gap> # Verify IsGeneratorsOfMagmaWithInverses warnings are silenced
|
||||
gap> IsGeneratorsOfMagmaWithInverses(GeneratorsOfGroup(ExamplesOfSomePcpGroups(5)));
|
||||
true
|
||||
|
||||
|
||||
gap> # Check for a bug reported 2012-01-19 by Robert Morse
|
||||
gap> g := PcGroupToPcpGroup(SmallGroup(48,1));
|
||||
Pcp-group with orders [ 2, 2, 2, 2, 3 ]
|
||||
gap> # The next two commands used to trigger errors
|
||||
gap> NonAbelianTensorSquare(Centre(g));
|
||||
Pcp-group with orders [ 8 ]
|
||||
gap> NonAbelianExteriorSquare(Centre(g));
|
||||
Pcp-group with orders [ ]
|
||||
|
||||
|
||||
gap> # Check for a bug reported 2012-01-19 by Robert Morse
|
||||
gap> F := FreeGroup("x","y");
|
||||
<free group on the generators [ x, y ]>
|
||||
gap> x := F.1;; y := F.2;;
|
||||
gap> G := F/[x^2/y^24, y^24, y^x/y^23];
|
||||
<fp group on the generators [ x, y ]>
|
||||
gap> iso := IsomorphismPcGroup(G);
|
||||
[ x, y ] -> [ f1, f2*f5 ]
|
||||
gap> iso1 := IsomorphismPcpGroup(Image(iso));
|
||||
[ f1, f2, f3, f4, f5 ] -> [ g1, g2, g3, g4, g5 ]
|
||||
gap> G := Image(iso*iso1);
|
||||
Pcp-group with orders [ 2, 2, 2, 2, 3 ]
|
||||
gap> # The next command used to trigger an error
|
||||
gap> NonAbelianTensorSquare(Image(iso*iso1));
|
||||
Pcp-group with orders [ 2, 2, 3, 2, 2, 2, 2 ]
|
||||
|
||||
|
||||
gap> # The problem with the previous example is/was that Igs(G)
|
||||
gap> # is set to a non-standard value:
|
||||
gap> Igs(G);
|
||||
[ g1, g2*g5, g3*g4*g5^2, g4*g5, g5 ]
|
||||
gap> # Unfortunately, it seems that a lot of code that
|
||||
gap> # really should be using Ngs or Cgs is using Igs incorrectly.
|
||||
gap> # For example, direct products could return *invalid* embeddings:
|
||||
gap> D := DirectProduct(G, G);
|
||||
Pcp-group with orders [ 2, 2, 2, 2, 3, 2, 2, 2, 2, 3 ]
|
||||
gap> hom:=Embedding(D,1);;
|
||||
gap> mapi:=MappingGeneratorsImages(hom);;
|
||||
gap> GroupHomomorphismByImages(Source(hom),Range(hom),mapi[1],mapi[2]) <> fail;
|
||||
true
|
||||
gap> hom:=Projection(D,1);;
|
||||
gap> mapi:=MappingGeneratorsImages(hom);;
|
||||
gap> GroupHomomorphismByImages(Source(hom),Range(hom),mapi[1],mapi[2]) <> fail;
|
||||
true
|
||||
|
||||
|
||||
gap> # Check for bug computing Schur extension of infinite cyclic groups,
|
||||
gap> # found by Max Horn 2012-05-25
|
||||
gap> G:=AbelianPcpGroup(1,[0]);
|
||||
Pcp-group with orders [ 0 ]
|
||||
gap> # The next command used to trigger an error
|
||||
gap> SchurExtension(G);
|
||||
Pcp-group with orders [ 0 ]
|
||||
|
||||
|
||||
gap> # Check for bug computing Schur extensions of subgroups, found by MH 2012-05-25.
|
||||
gap> G:=HeisenbergPcpGroup(2);
|
||||
Pcp-group with orders [ 0, 0, 0, 0, 0 ]
|
||||
gap> H:=Subgroup(G,[G.2^3*G.3^2, G.1^9]);
|
||||
Pcp-group with orders [ 0, 0, 0 ]
|
||||
gap> # The next command used to trigger an error
|
||||
gap> SchurExtension(H);
|
||||
Pcp-group with orders [ 0, 0, 0, 0, 0, 0 ]
|
||||
|
||||
|
||||
gap> # Check for bug computing Schur extensions of subgroups, found by MH 2012-05-25.
|
||||
gap> G:=HeisenbergPcpGroup(2);
|
||||
Pcp-group with orders [ 0, 0, 0, 0, 0 ]
|
||||
gap> H:=Subgroup(G,[G.1, G.2]);
|
||||
Pcp-group with orders [ 0, 0 ]
|
||||
gap> # The next command used to trigger an error
|
||||
gap> SchurExtension(H);
|
||||
Pcp-group with orders [ 0, 0, 0 ]
|
||||
|
||||
|
||||
gap> # Check for bug computing normalizer of two subgroups, found by MH 2012-05-30.
|
||||
gap> # The problem was caused by incorrect resp. overly restrictive use of Parent().
|
||||
gap> G:=HeisenbergPcpGroup(2);
|
||||
Pcp-group with orders [ 0, 0, 0, 0, 0 ]
|
||||
gap> A:=Subgroup(Subgroup(G,[G.2,G.3,G.4,G.5]), [G.3]);
|
||||
Pcp-group with orders [ 0 ]
|
||||
gap> B:=Subgroup(Subgroup(G,[G.1,G.4,G.5]), [G.4]);
|
||||
Pcp-group with orders [ 0 ]
|
||||
gap> Normalizer(A,B);
|
||||
Pcp-group with orders [ 0 ]
|
||||
gap> # The following used to trigger the error "arguments must have a common parent group"
|
||||
gap> Normalizer(B,A);
|
||||
Pcp-group with orders [ 0 ]
|
||||
|
||||
|
||||
gap> # In polycyclic 2.9 and 2.10, the code for 2-cohomology computations was broken.
|
||||
gap> G := UnitriangularPcpGroup(3,0);
|
||||
Pcp-group with orders [ 0, 0, 0 ]
|
||||
gap> mats := G!.mats;
|
||||
[ [ [ 1, 1, 0 ], [ 0, 1, 0 ], [ 0, 0, 1 ] ],
|
||||
[ [ 1, 0, 0 ], [ 0, 1, 1 ], [ 0, 0, 1 ] ],
|
||||
[ [ 1, 0, 1 ], [ 0, 1, 0 ], [ 0, 0, 1 ] ] ]
|
||||
gap> C := CRRecordByMats(G,mats);;
|
||||
gap> cc := TwoCohomologyCR(C);;
|
||||
gap> cc.factor.rels;
|
||||
[ 2, 0, 0 ]
|
||||
gap> c := cc.factor.prei[2];
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, -1, 1 ]
|
||||
gap> cc.gcb;
|
||||
[ [ 0, 0, 1, 0, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, -1, 0, 0, 0, 0, 0, 1, 0, 0, -1, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, -1 ],
|
||||
[ -1, 0, 1, 1, 0, 0, 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 1 ] ]
|
||||
gap> cc.gcc;
|
||||
[ [ 1, 0, 0, 0, 0, -2, -1, 0, 1, 1, -1, -1, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 1, 0, 0, -1, -1, 0, 0, 1, 0, 0, -1, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 1, 0, 0, -2, 0, 0, 1, 0, 0, -1, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 1, 0, 0, -1, -1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 1, 0, 0, -1, 0, 0, 1, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, -1, 1 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, -1 ] ]
|
||||
|
||||
|
||||
gap> # LowerCentralSeriesOfGroup for non-nilpotent pcp-groups used to trigger
|
||||
gap> # an infinite recursion
|
||||
gap> G := PcGroupToPcpGroup(SmallGroup(6,1));
|
||||
Pcp-group with orders [ 2, 3 ]
|
||||
gap> LowerCentralSeriesOfGroup(G);
|
||||
[ Pcp-group with orders [ 2, 3 ], Pcp-group with orders [ 3 ] ]
|
||||
|
||||
|
||||
gap> STOP_TEST( "bugfix.tst", 10000000);
|
||||
21
samples/GAP/factor.tst
Normal file
21
samples/GAP/factor.tst
Normal file
@@ -0,0 +1,21 @@
|
||||
gap> START_TEST("Test of factor groups and natural homomorphisms");
|
||||
|
||||
gap> G:=HeisenbergPcpGroup(2);
|
||||
Pcp-group with orders [ 0, 0, 0, 0, 0 ]
|
||||
|
||||
gap> H:=Subgroup(G,[G.2,G.3,G.4,G.5]);
|
||||
gap> K:=G/H;
|
||||
gap> NaturalHomomorphism(K);
|
||||
|
||||
gap> A:=Subgroup(H, [G.3]);
|
||||
Pcp-group with orders [ 0 ]
|
||||
gap> B:=Subgroup(Subgroup(G,[G.1,G.4,G.5]), [G.4]);
|
||||
Pcp-group with orders [ 0 ]
|
||||
gap> Normalizer(A,B);
|
||||
Pcp-group with orders [ 0 ]
|
||||
gap> # The following used to trigger the error "arguments must have a common parent group"
|
||||
gap> Normalizer(B,A);
|
||||
Pcp-group with orders [ 0 ]
|
||||
|
||||
|
||||
gap> STOP_TEST( "factor.tst", 10000000);
|
||||
1157
samples/Go/api.pb.go
Normal file
1157
samples/Go/api.pb.go
Normal file
File diff suppressed because it is too large
Load Diff
69
samples/Inno Setup/expat.iss
Normal file
69
samples/Inno Setup/expat.iss
Normal file
@@ -0,0 +1,69 @@
|
||||
; Basic setup script for the Inno Setup installer builder. For more
|
||||
; information on the free installer builder, see www.jrsoftware.org.
|
||||
;
|
||||
; This script was contributed by Tim Peters.
|
||||
; It was designed for Inno Setup 2.0.19 but works with later versions as well.
|
||||
|
||||
[Setup]
|
||||
AppName=Expat
|
||||
AppId=expat
|
||||
AppVersion=2.1.0
|
||||
AppVerName=Expat 2.1.0
|
||||
AppCopyright=Copyright 1998-2012 Thai Open Source Software Center, Clark Cooper, and the Expat maintainers
|
||||
AppPublisher=The Expat Developers
|
||||
AppPublisherURL=http://www.libexpat.org/
|
||||
AppSupportURL=http://www.libexpat.org/
|
||||
AppUpdatesURL=http://www.libexpat.org/
|
||||
UninstallDisplayName=Expat XML Parser 2.1.0
|
||||
VersionInfoVersion=2.1.0
|
||||
|
||||
DefaultDirName={pf}\Expat 2.1.0
|
||||
UninstallFilesDir={app}\Uninstall
|
||||
|
||||
Compression=lzma
|
||||
SolidCompression=yes
|
||||
SourceDir=..
|
||||
OutputDir=win32
|
||||
DisableStartupPrompt=yes
|
||||
AllowNoIcons=yes
|
||||
DisableProgramGroupPage=yes
|
||||
DisableReadyPage=yes
|
||||
|
||||
[Files]
|
||||
Flags: ignoreversion; Source: win32\bin\Release\xmlwf.exe; DestDir: "{app}\Bin"
|
||||
Flags: ignoreversion; Source: win32\MANIFEST.txt; DestDir: "{app}"
|
||||
Flags: ignoreversion; Source: Changes; DestDir: "{app}"; DestName: Changes.txt
|
||||
Flags: ignoreversion; Source: COPYING; DestDir: "{app}"; DestName: COPYING.txt
|
||||
Flags: ignoreversion; Source: README; DestDir: "{app}"; DestName: README.txt
|
||||
Flags: ignoreversion; Source: doc\*.html; DestDir: "{app}\Doc"
|
||||
Flags: ignoreversion; Source: doc\*.css; DestDir: "{app}\Doc"
|
||||
Flags: ignoreversion; Source: doc\*.png; DestDir: "{app}\Doc"
|
||||
Flags: ignoreversion; Source: win32\bin\Release\*.dll; DestDir: "{app}\Bin"
|
||||
Flags: ignoreversion; Source: win32\bin\Release\*.lib; DestDir: "{app}\Bin"
|
||||
Flags: ignoreversion; Source: expat.dsw; DestDir: "{app}\Source"
|
||||
Flags: ignoreversion; Source: win32\README.txt; DestDir: "{app}\Source"
|
||||
Flags: ignoreversion; Source: bcb5\*.bp*; DestDir: "{app}\Source\bcb5"
|
||||
Flags: ignoreversion; Source: bcb5\*.mak; DestDir: "{app}\Source\bcb5"
|
||||
Flags: ignoreversion; Source: bcb5\*.def; DestDir: "{app}\Source\bcb5"
|
||||
Flags: ignoreversion; Source: bcb5\*.txt; DestDir: "{app}\Source\bcb5"
|
||||
Flags: ignoreversion; Source: bcb5\*.bat; DestDir: "{app}\Source\bcb5"
|
||||
Flags: ignoreversion; Source: lib\*.c; DestDir: "{app}\Source\lib"
|
||||
Flags: ignoreversion; Source: lib\*.h; DestDir: "{app}\Source\lib"
|
||||
Flags: ignoreversion; Source: lib\*.def; DestDir: "{app}\Source\lib"
|
||||
Flags: ignoreversion; Source: lib\*.dsp; DestDir: "{app}\Source\lib"
|
||||
Flags: ignoreversion; Source: examples\*.c; DestDir: "{app}\Source\examples"
|
||||
Flags: ignoreversion; Source: examples\*.dsp; DestDir: "{app}\Source\examples"
|
||||
Flags: ignoreversion; Source: tests\*.c; DestDir: "{app}\Source\tests"
|
||||
Flags: ignoreversion; Source: tests\*.cpp; DestDir: "{app}\Source\tests"
|
||||
Flags: ignoreversion; Source: tests\*.h; DestDir: "{app}\Source\tests"
|
||||
Flags: ignoreversion; Source: tests\README.txt; DestDir: "{app}\Source\tests"
|
||||
Flags: ignoreversion; Source: tests\benchmark\*.c; DestDir: "{app}\Source\tests\benchmark"
|
||||
Flags: ignoreversion; Source: tests\benchmark\*.ds*; DestDir: "{app}\Source\tests\benchmark"
|
||||
Flags: ignoreversion; Source: tests\benchmark\README.txt; DestDir: "{app}\Source\tests\benchmark"
|
||||
Flags: ignoreversion; Source: xmlwf\*.c*; DestDir: "{app}\Source\xmlwf"
|
||||
Flags: ignoreversion; Source: xmlwf\*.h; DestDir: "{app}\Source\xmlwf"
|
||||
Flags: ignoreversion; Source: xmlwf\*.dsp; DestDir: "{app}\Source\xmlwf"
|
||||
|
||||
[Messages]
|
||||
WelcomeLabel1=Welcome to the Expat XML Parser Setup Wizard
|
||||
WelcomeLabel2=This will install [name/ver] on your computer.%n%nExpat is an XML parser with a C-language API, and is primarily made available to allow developers to build applications which use XML using a portable API and fast implementation.%n%nIt is strongly recommended that you close all other applications you have running before continuing. This will help prevent any conflicts during the installation process.
|
||||
73
samples/J/stwij.ijs
Normal file
73
samples/J/stwij.ijs
Normal file
@@ -0,0 +1,73 @@
|
||||
NB. From "Continuing to write in J".
|
||||
NB. See http://www.jsoftware.com/help/jforc/continuing_to_write_in_j.htm
|
||||
|
||||
empno=: 316 317 319 320
|
||||
payrate=: 60 42 44 54
|
||||
billrate=: 120 90 90 108
|
||||
clientlist=: 10011 10012 10025
|
||||
emp_client=: 10012 10025 10012 10025
|
||||
hoursworked=: 4 31 $ 8 0 3 10 9 8 8 9 4 0 8 7 10 10 12 9 0 6 8 9 9 9 0 0 10 11 9 7 10 2 0 8 0 0 9 9 8 9 10 0 0 8 8 10 7 10 0 0 7 8 9 8 9 0 4 9 8 9 8 9 0 0 5 0 0 8 9 9 9 9 0 0 8 7 0 0 9 0 2 10 10 9 11 8 0 0 8 9 10 8 9 0 0 9 0 0 9 10 8 6 6 8 0 9 8 10 6 9 7 0 6 8 8 8 9 0 5 8 9 8 8 12 0 0
|
||||
|
||||
NB. Finds the number of hours each employee worked in the given month.
|
||||
emphours=: 3 : '+/"1 hoursworked'
|
||||
|
||||
NB. Determines the wages earned by each employee in the given month.
|
||||
empearnings=: 3 : 'payrate * +/"1 hoursworked'
|
||||
|
||||
NB. Determines the profit brought in by each employee.
|
||||
empprofit=: 3 : 0
|
||||
(billrate - payrate) * +/"1 hoursworked
|
||||
)
|
||||
|
||||
NB. Returns the amount to bill a given client.
|
||||
billclient=: 3 : 0
|
||||
mask=. emp_client = y
|
||||
+/ (mask # billrate) * +/"1 mask # hoursworked
|
||||
)
|
||||
|
||||
NB. Finds for each day of the month the employee who billed the most hours.
|
||||
dailydrudge=: 3 : 0
|
||||
((|: hoursworked) i."1 0 >./ hoursworked) { empno
|
||||
)
|
||||
|
||||
NB. Returns the employees, in descending order of the profit brought in by each.
|
||||
producers=: 3 : 'empno \: empprofit 0'
|
||||
|
||||
NB. Returns the clients, in descending order of the profit generated by each.
|
||||
custbyprofit=: 3 : 0
|
||||
clientlist \: +/ (clientlist ="1 0 emp_client) * empprofit 0
|
||||
)
|
||||
|
||||
NB. Calculates withholding tax on each employee's earnings.
|
||||
renderuntocaesar=: 3 : 0
|
||||
bktmin=. 0 6000 10000 20000 NB. Four brackets, 0..6000..10000..20000.._
|
||||
bktrate=. 0.05 0.10 0.20 0.30
|
||||
bktearns=. 0 >. ((1 |.!._ bktmin) <."1 0 empearnings'') -"1 bktmin
|
||||
+/"1 bktrate *"1 bktearns
|
||||
)
|
||||
|
||||
NB. Main
|
||||
|
||||
echo 'Problem 1'
|
||||
echo emphours''
|
||||
|
||||
echo 'Problem 2'
|
||||
echo empearnings''
|
||||
|
||||
echo 'Problem 3'
|
||||
echo empprofit''
|
||||
|
||||
echo 'Problem 4'
|
||||
echo billclient 10025
|
||||
|
||||
echo 'Problem 5'
|
||||
echo dailydrudge''
|
||||
|
||||
echo 'Problem 6'
|
||||
echo producers''
|
||||
|
||||
echo 'Problem 7'
|
||||
echo custbyprofit''
|
||||
|
||||
echo 'Problem 8'
|
||||
echo 0j2 ": renderuntocaesar''
|
||||
344
samples/Mathematica/HeyexImport.m
Normal file
344
samples/Mathematica/HeyexImport.m
Normal file
@@ -0,0 +1,344 @@
|
||||
(* Mathematica Package *)
|
||||
(* Created with IntelliJ IDEA and the Mathematica Language plugin *)
|
||||
|
||||
(* :Title: Importer for the RAW data-format of the Heidelberg Eye Explorer (known as HEYEX) *)
|
||||
|
||||
(* :Context: HeyexImport` *)
|
||||
|
||||
(* :Author: Patrick Scheibe pscheibe@trm.uni-leipzig.de *)
|
||||
|
||||
(* :Package Version: 1.0 *)
|
||||
|
||||
(* :Mathematica Version: 8.0 *)
|
||||
|
||||
(* :Copyright: Patrick Scheibe, 2013-2015 *)
|
||||
|
||||
(* :Discussion: This package registers a new importer which can load the RAW data-format exported by a
|
||||
Heidelberg Spectralis OCT. The import-functionality can access different information contained
|
||||
in a file:
|
||||
1. The file header which contains meta data like when the patient was scanned etc
|
||||
2. The scanned volume data
|
||||
3. Images which represent slices of the scanned volume
|
||||
4. The Scanning laser ophthalmoscopy (SLO) image which is taken with every scanned patient
|
||||
5. The segmentation data for different retina layers provided by the software
|
||||
|
||||
*)
|
||||
|
||||
(* :Keywords: Import, Heyex, OCT, Spectralis, Heidelberg Engineering *)
|
||||
|
||||
BeginPackage[ "HeyexImport`" ]
|
||||
|
||||
HeyexEyePosition::usage = "HeyexEyePosition[file] tries to extract which eye was scanned, left or right.";
|
||||
|
||||
HeyexImport::wrongHdr = "Error importing OCT data. Broken/Wrong file?";
|
||||
|
||||
|
||||
Begin[ "`Private`" ];
|
||||
|
||||
(*
|
||||
Registration of all import possibilities for the Heidelberg OCT.
|
||||
*)
|
||||
|
||||
ImportExport`RegisterImport[
|
||||
"Heyex" ,
|
||||
{
|
||||
"FileHeader" :> importHeader,
|
||||
{ "Data" , n_Integer} :> (importData[n][##]&),
|
||||
"Data" :> importData,
|
||||
{ "Images" , n_Integer} :> (importImages[n][##]&),
|
||||
"Images" :> importImages,
|
||||
"SLOImage" :> importSLOImage,
|
||||
"SegmentationData" :> importSegmentation,
|
||||
{ "SegmentationData" , n_Integer} :> (importSegmentation[n][##]&),
|
||||
"DataSize" :> importDataSize,
|
||||
importData
|
||||
},
|
||||
|
||||
{
|
||||
"Image3D" :> (Image3D["Data" /. #1]&)
|
||||
},
|
||||
|
||||
"AvailableElements" -> {"FileHeader", "Data", "DataSize", "Images", "SLOImage", "SegmentationData", "Image3D"}
|
||||
];
|
||||
|
||||
|
||||
If[Quiet[Check[TrueQ[Compile[{}, 0, CompilationTarget -> "C"][] == 0], False]],
|
||||
$compileTarget = CompilationTarget -> "C",
|
||||
$compileTarget = CompilationTarget -> "MVM"
|
||||
];
|
||||
|
||||
|
||||
(*
|
||||
Helper function which reads data from a stream. This is
|
||||
only a unification so I can map the read function over a
|
||||
list.
|
||||
*)
|
||||
read[{id_String, type_String}, str_] :=
|
||||
id -> BinaryRead[str, type];
|
||||
read[{type_String, n_Integer}, str_] := BinaryReadList[str, type, n];
|
||||
read[{id_String, {type_String, n_Integer}}, str_] := id -> BinaryReadList[str, type, n];
|
||||
(*
|
||||
Note that when reading bytes explicitly I convert them to
|
||||
a string and remove any zeroes at the end.
|
||||
*)
|
||||
read[{id_String, { "Byte" , n_Integer}}, str_] :=
|
||||
id -> StringJoin[
|
||||
FromCharacterCode /@ (Rest[
|
||||
NestList[BinaryRead[str, "Byte" ] &, Null,
|
||||
n]] /. {chars___Integer, Longest[0 ...]} :> {chars})];
|
||||
|
||||
(*
|
||||
The layout of a file exported with "Raw Export"
|
||||
|
||||
*****************
|
||||
* File Header *
|
||||
*****************
|
||||
* SLO Image *
|
||||
*****************
|
||||
* B-Scan #0 *
|
||||
*****************
|
||||
* ..... *
|
||||
*****************
|
||||
* B-Scan #n-1 *
|
||||
*****************
|
||||
*)
|
||||
|
||||
With[{i = "Integer32", f = "Real32", d = "Real64", b = "Byte"},
|
||||
|
||||
$fileHeaderInfo = Transpose[{
|
||||
{
|
||||
"Version" , "SizeX" , "NumBScans" , "SizeZ" , "ScaleX" , "Distance" ,
|
||||
"ScaleZ" , "SizeXSlo" , "SizeYSlo" , "ScaleXSlo" , "ScaleYSlo" ,
|
||||
"FieldSizeSlo" , "ScanFocus" , "ScanPosition" , "ExamTime" ,
|
||||
"ScanPattern" , "BScanHdrSize" , "ID" , "ReferenceID" , "PID" ,
|
||||
"PatientID" , "Padding" , "DOB" , "VID" , "VisitID" , "VisitDate" ,
|
||||
"Spare"
|
||||
},
|
||||
{
|
||||
{b, 12}, i, i, i, d, d, d, i, i, d, d, i, d, {b, 4}, {i, 2}, i, i,
|
||||
{b, 16}, {b, 16}, i, {b, 21}, {b, 3}, d, i, {b, 24}, d, {b, 1840}
|
||||
}
|
||||
}];
|
||||
|
||||
$bScanHeaderInfo = Transpose[{
|
||||
{
|
||||
"Version" , "BScanHdrSize" , "StartX" , "StartY" , "EndX" , "EndY" ,
|
||||
"NumSeg" , "OffSeg" , "Quality" , "Spare"
|
||||
},
|
||||
{{b, 12}, i, d, d, d, d, i, i, f, {b, 196}}
|
||||
}];
|
||||
];
|
||||
|
||||
|
||||
isHeyexRawFormat[{"Version" -> version_String, "SizeX" -> _Integer, "NumBScans" -> _Integer, _Rule..}] /; StringMatchQ[version, "HSF-OCT" ~~__] := True ;
|
||||
isHeyexRawFormat[___] := False;
|
||||
|
||||
readFileHeader[str_InputStream] := With[{hdr = Quiet[read[#, str]] & /@ $fileHeaderInfo},
|
||||
hdr /; TrueQ[isHeyexRawFormat[hdr]]
|
||||
];
|
||||
readFileHeader[___] := (Message[HeyexImport::wrongHdr]; Throw[$Failed]);
|
||||
|
||||
|
||||
(* Reads the camera image of the retina. Note that you must have the
|
||||
information from the fileheader and you must be at the right position
|
||||
of the file stream for this.*)
|
||||
readSLOImage[str_InputStream, fileHdr : {(_String -> _) ..}] :=
|
||||
Image[Partition[
|
||||
BinaryReadList[str, "Byte" , "SizeXSlo" * "SizeYSlo" /. fileHdr],
|
||||
"SizeXSlo" /. fileHdr], "Byte" ];
|
||||
|
||||
skipSLOImage[str_InputStream, fileHdr : {(_String -> _) ..}] :=
|
||||
Skip[str, "Byte" , "SizeXSlo" * "SizeYSlo" /. fileHdr];
|
||||
|
||||
|
||||
(* One single BScan consists itself again of a header and a data part *)
|
||||
readBScanHeader[str_InputStream, fileHdr : {(_String -> _) ..}] :=
|
||||
Module[{i = "Integer32", f = "Real32", d = "Real64", b = "Byte",
|
||||
bScanHdr},
|
||||
bScanHdr = read[#, str] & /@ Transpose[{
|
||||
{ "Version" , "BScanHdrSize" , "StartX" , "StartY" , "EndX" , "EndY" ,
|
||||
"NumSeg" , "OffSeg" , "Quality" , "Spare" },
|
||||
{{b, 12}, i, d, d, d, d, i, i, f, {b, 196}}}
|
||||
];
|
||||
AppendTo[bScanHdr,
|
||||
read[{ "SegArray" , { "Real32" ,
|
||||
"NumSeg" * "SizeX" /. bScanHdr /. fileHdr}}, str]
|
||||
];
|
||||
(*
|
||||
This is horrible slow, therefore I just skip the fillbytes
|
||||
|
||||
AppendTo[bScanHdr,
|
||||
read[{"Fillbytes", {"Byte",
|
||||
"BScanHdrSize" - 256 - "NumSeg"*"SizeX"*4 /. bScanHdr /.
|
||||
fileHdr}}, str]
|
||||
]
|
||||
*)
|
||||
Skip[str, "Byte" , "BScanHdrSize" - 256 - "NumSeg" * "SizeX" * 4 /. bScanHdr /. fileHdr];
|
||||
AppendTo[bScanHdr, "FillBytes" -> None]
|
||||
]
|
||||
|
||||
skipBScanHeader[str_InputStream, fileHdr : {(_String -> _) ..}] :=
|
||||
Skip[str, "Byte" , "BScanHdrSize" /. fileHdr];
|
||||
|
||||
readBScanData[str_InputStream, fileHdr : {(_String -> _) ..}] :=
|
||||
Module[{},
|
||||
Developer`ToPackedArray[
|
||||
Partition[read[{ "Real32" , "SizeX" * "SizeZ" /. fileHdr}, str],
|
||||
"SizeX" /. fileHdr]]
|
||||
];
|
||||
|
||||
skipBScanData[str_InputStream, fileHdr : {(_String -> _) ..}] :=
|
||||
Skip[str, "Byte" , "SizeX" * "SizeZ" * 4 /. fileHdr];
|
||||
|
||||
skipBScanBlocks[str_InputStream, fileHdr : {(_String -> _) ..}, n_Integer] :=
|
||||
Skip[str, "Byte" , n * ("BScanHdrSize" + "SizeX" * "SizeZ" * 4) /. fileHdr];
|
||||
|
||||
|
||||
importHeader[filename_String, ___] := Module[
|
||||
{str, header},
|
||||
str = OpenRead[filename, BinaryFormat -> True];
|
||||
header = readFileHeader[str];
|
||||
Close[str];
|
||||
"FileHeader" -> header
|
||||
];
|
||||
|
||||
|
||||
(* Imports the dimension of the scanned volume. *)
|
||||
importDataSize[filename_String, r___] := Module[{header = importHeader[filename]},
|
||||
"DataSize" -> ({"NumBScans", "SizeZ", "SizeXSlo"} /. ("FileHeader" /. header))
|
||||
]
|
||||
|
||||
importSLOImage[filename_String, ___] := Module[
|
||||
{str, header, slo},
|
||||
str = OpenRead[filename, BinaryFormat -> True];
|
||||
header = readFileHeader[str];
|
||||
slo = readSLOImage[str, header];
|
||||
Close[str];
|
||||
"SLOImage" -> slo
|
||||
]
|
||||
|
||||
importData[filename_String, ___] := Module[
|
||||
{str, header, nx, n, data},
|
||||
str = OpenRead[filename, BinaryFormat -> True];
|
||||
header = readFileHeader[str];
|
||||
{nx, n} = { "SizeX" , "SizeX" * "SizeZ"} /. header;
|
||||
skipSLOImage[str, header];
|
||||
data = Table[
|
||||
skipBScanHeader[str, header];
|
||||
Partition[read[{ "Real32" , n}, str], nx],
|
||||
{"NumBScans" /. header}
|
||||
];
|
||||
Close[str];
|
||||
"Data" -> Developer`ToPackedArray[data]
|
||||
];
|
||||
|
||||
importData[num_Integer][filename_String, ___] := Module[
|
||||
{str, header, nx, n, data},
|
||||
str = OpenRead[filename, BinaryFormat -> True];
|
||||
header = readFileHeader[str];
|
||||
{nx, n} = { "SizeX" , "SizeX" * "SizeZ"} /. header;
|
||||
skipSLOImage[str, header];
|
||||
skipBScanBlocks[str, header, Max[Min["NumBScans" /. header, num - 1], 0] ];
|
||||
skipBScanHeader[str, header];
|
||||
data = Partition[read[{ "Real32" , n}, str], nx];
|
||||
Close[str];
|
||||
{"Data" -> {num -> Developer`ToPackedArray[data]}}
|
||||
];
|
||||
|
||||
(*
|
||||
As suggested in the Heidelberg OCT Manual the importer will adjust
|
||||
the graylevels when importing images. Since this is very time-consuming
|
||||
for the whole scanned volume, I use an optimized version of this function.
|
||||
*)
|
||||
With[{$compileTarget = $compileTarget}, $adjustGraylevelFunc := ($adjustGraylevelFunc = Compile[{{values, _Real, 2}},
|
||||
Map[Floor[255.0 * Min[Max[0.0, #], 1.0]^(0.25) + 0.5] &, values, {2}],
|
||||
RuntimeAttributes -> {Listable},
|
||||
Parallelization -> True,
|
||||
RuntimeOptions -> "Speed",
|
||||
$compileTarget
|
||||
])];
|
||||
|
||||
importImages[filename_String, ___] := Module[
|
||||
{data},
|
||||
data = "Data" /. importData[filename];
|
||||
"Images" -> (Image[#, "Byte" ]& /@ $adjustGraylevelFunc[data])
|
||||
]
|
||||
|
||||
importImages[imageNumber_Integer][filename_String, ___] := Module[
|
||||
{data},
|
||||
data = {imageNumber /. ("Data" /. importData[imageNumber][filename])};
|
||||
{"Images" -> {imageNumber -> (Image[#, "Byte" ]& @@ $adjustGraylevelFunc[data])}}
|
||||
];
|
||||
|
||||
importSegmentation[filename_String, ___] := Module[
|
||||
{str, header, data},
|
||||
str = OpenRead[filename, BinaryFormat -> True];
|
||||
header = readFileHeader[str];
|
||||
skipSLOImage[str, header];
|
||||
data = Table[
|
||||
Module[{bScanHeader, t},
|
||||
{t, bScanHeader} = Timing@readBScanHeader[str, header];
|
||||
skipBScanData[str, header];
|
||||
bScanHeader
|
||||
], {"NumBScans" /. header}
|
||||
];
|
||||
Close[str];
|
||||
(*
|
||||
The BScanHeaderData contain the segmentation vectors as a single list
|
||||
of numbers. Before returning the result, I check how many segmentations
|
||||
there are inside the BScan an I transform the segmentation value list
|
||||
into separate vectors and call them "ILM", "RPE" and "NFL" like described
|
||||
in the manual
|
||||
*)
|
||||
"SegmentationData" -> Function[{bhdr},
|
||||
Block[{numVecs = "NumSeg" /. bhdr, vecNames, nx = "SizeX" /. header},
|
||||
If[numVecs > 0,
|
||||
vecNames = Take[{ "ILM" , "RPE" , "NFL" }, numVecs];
|
||||
bhdr /. ("SegArray" -> vec_) :> Sequence @@ (Rule @@@ Transpose[{vecNames, Partition[vec, nx]} ]),
|
||||
bhdr
|
||||
]
|
||||
]] /@ data
|
||||
]
|
||||
|
||||
importSegmentation[num_Integer][filename_String, ___] := Module[
|
||||
{str, header, bhdr},
|
||||
str = OpenRead[filename, BinaryFormat -> True];
|
||||
header = readFileHeader[str];
|
||||
skipSLOImage[str, header];
|
||||
skipBScanBlocks[str, header, Max[Min["NumBScans" /. header, num - 1], 0] ];
|
||||
bhdr = readBScanHeader[str, header];
|
||||
Close[str];
|
||||
(* See doc above *)
|
||||
{"SegmentationData" -> {num -> Block[
|
||||
{numVecs = "NumSeg" /. bhdr, vecNames, nx = "SizeX" /. header},
|
||||
If[ numVecs > 0,
|
||||
vecNames = Take[{ "ILM" , "RPE" , "NFL" }, numVecs];
|
||||
bhdr /. ("SegArray" -> vec_) :> Sequence @@ (Rule @@@ Transpose[{vecNames, Partition[vec, nx]} ]),
|
||||
bhdr
|
||||
]
|
||||
]
|
||||
}}
|
||||
]
|
||||
|
||||
(* Extracts which eye was scanned. This is stored in the header of the file *)
|
||||
(* OD stands for oculus dexter which is latin for "right eye" and OS stands
|
||||
for oculus sinister which is latin for "left eye" *)
|
||||
HeyexEyePosition[file_String /; FileExistsQ[file]] := Module[{position},
|
||||
Check[
|
||||
position = "ScanPosition" /. Import[file, { "Heyex" , "FileHeader" }];
|
||||
Switch[
|
||||
position,
|
||||
"OD" ,
|
||||
Right,
|
||||
"OS" ,
|
||||
Left,
|
||||
_,
|
||||
$Failed
|
||||
],
|
||||
$Failed
|
||||
]
|
||||
];
|
||||
|
||||
End[]
|
||||
|
||||
EndPackage[]
|
||||
46
samples/Mercury/switch_detection_bug.m
Normal file
46
samples/Mercury/switch_detection_bug.m
Normal file
@@ -0,0 +1,46 @@
|
||||
% This is a regression test for a bug in switch detection
|
||||
% where it was preferring incomplete switches to complete
|
||||
% one-case switches, and hence inferring the wrong determinism.
|
||||
|
||||
%------------------------------------------------------------------------------%
|
||||
|
||||
:- module switch_detection_bug.
|
||||
|
||||
:- interface.
|
||||
|
||||
:- type note ---> note(rank, modifier, octave).
|
||||
|
||||
:- type rank ---> c ; d ; e ; f ; g ; a ; b .
|
||||
|
||||
:- type modifier ---> natural ; sharp ; flat .
|
||||
|
||||
:- type octave == int.
|
||||
|
||||
:- type qualifier ---> maj ; min .
|
||||
|
||||
:- pred next_topnote(note, qualifier, note).
|
||||
:- mode next_topnote(in, in, out) is multi.
|
||||
|
||||
%------------------------------------------------------------------------------%
|
||||
|
||||
:- implementation.
|
||||
|
||||
next_topnote(note(c, _, Oct), _, note(d, natural, Oct)).
|
||||
next_topnote(note(d, _, Oct), _, note(c, natural, Oct)).
|
||||
next_topnote(note(d, _, Oct), maj, note(e, natural, Oct)).
|
||||
next_topnote(note(d, _, Oct), min, note(e, flat, Oct)).
|
||||
next_topnote(note(e, _, Oct), _, note(d, natural, Oct)).
|
||||
next_topnote(note(e, _, Oct), _, note(f, natural, Oct)).
|
||||
next_topnote(note(f, _, Oct), maj, note(e, natural, Oct)).
|
||||
next_topnote(note(f, _, Oct), min, note(e, flat, Oct)).
|
||||
next_topnote(note(g, _, Oct), _, note(f, natural, Oct)).
|
||||
next_topnote(note(g, _, Oct), min, note(a, flat, Oct)).
|
||||
next_topnote(note(g, _, Oct), maj, note(a, natural, Oct)).
|
||||
next_topnote(note(a, _, Oct), _, note(g, natural, Oct)).
|
||||
next_topnote(note(a, _, Oct), min, note(b, flat, Oct)).
|
||||
next_topnote(note(a, _, Oct), maj, note(b, natural, Oct)).
|
||||
next_topnote(note(b, _, Oct), maj, note(a, natural, Oct)).
|
||||
next_topnote(note(b, _, Oct), min, note(a, flat, Oct)).
|
||||
|
||||
%------------------------------------------------------------------------------%
|
||||
|
||||
5281
samples/Modelica/Translational.mo
Normal file
5281
samples/Modelica/Translational.mo
Normal file
File diff suppressed because it is too large
Load Diff
285
samples/Modelica/modelica.mo
Normal file
285
samples/Modelica/modelica.mo
Normal file
@@ -0,0 +1,285 @@
|
||||
within Modelica.Electrical.Analog;
|
||||
package Sensors "Potential, voltage, current, and power sensors"
|
||||
|
||||
extends Modelica.Icons.SensorsPackage;
|
||||
|
||||
model PotentialSensor "Sensor to measure the potential"
|
||||
extends Modelica.Icons.RotationalSensor;
|
||||
|
||||
Interfaces.PositivePin p "pin to be measured" annotation (Placement(
|
||||
transformation(extent={{-110,-10},{-90,10}}, rotation=0)));
|
||||
Modelica.Blocks.Interfaces.RealOutput phi
|
||||
"Absolute voltage potential as output signal"
|
||||
annotation (Placement(transformation(extent={{100,-10},{120,10}},
|
||||
rotation=0)));
|
||||
equation
|
||||
p.i = 0;
|
||||
phi = p.v;
|
||||
annotation (
|
||||
Icon(coordinateSystem(
|
||||
preserveAspectRatio=true,
|
||||
extent={{-100,-100},{100,100}},
|
||||
grid={1,1}), graphics={
|
||||
Text(
|
||||
extent={{-29,-11},{30,-70}},
|
||||
lineColor={0,0,0},
|
||||
textString="V"),
|
||||
Line(points={{-70,0},{-90,0}}, color={0,0,0}),
|
||||
Line(points={{100,0},{70,0}}, color={0,0,255}),
|
||||
Text(
|
||||
extent={{-150,80},{150,120}},
|
||||
textString="%name",
|
||||
lineColor={0,0,255})}),
|
||||
Diagram(coordinateSystem(
|
||||
preserveAspectRatio=true,
|
||||
extent={{-100,-100},{100,100}},
|
||||
grid={1,1}), graphics={Line(points={{-70,0},{-96,0}}, color={0,0,0}),
|
||||
Line(points={{100,0},{70,0}}, color={0,0,255})}),
|
||||
Documentation(revisions="<html>
|
||||
<ul>
|
||||
<li><i> 1998 </i>
|
||||
by Christoph Clauss<br> initially implemented<br>
|
||||
</li>
|
||||
</ul>
|
||||
</html>", info="<html>
|
||||
<p>The potential sensor converts the voltage of a node (with respect to the ground node) into a real valued signal. It does not influence the current sum at the node which voltage is measured, therefore, the electrical behavior is not influenced by the sensor.</p>
|
||||
</html>"));
|
||||
end PotentialSensor;
|
||||
|
||||
model VoltageSensor "Sensor to measure the voltage between two pins"
|
||||
extends Modelica.Icons.RotationalSensor;
|
||||
|
||||
Interfaces.PositivePin p "positive pin" annotation (Placement(
|
||||
transformation(extent={{-110,-10},{-90,10}}, rotation=0)));
|
||||
Interfaces.NegativePin n "negative pin" annotation (Placement(
|
||||
transformation(extent={{90,-10},{110,10}}, rotation=0)));
|
||||
Modelica.Blocks.Interfaces.RealOutput v
|
||||
"Voltage between pin p and n (= p.v - n.v) as output signal"
|
||||
annotation (Placement(transformation(
|
||||
origin={0,-100},
|
||||
extent={{10,-10},{-10,10}},
|
||||
rotation=90)));
|
||||
|
||||
equation
|
||||
p.i = 0;
|
||||
n.i = 0;
|
||||
v = p.v - n.v;
|
||||
annotation (
|
||||
Icon(coordinateSystem(
|
||||
preserveAspectRatio=true,
|
||||
extent={{-100,-100},{100,100}},
|
||||
grid={1,1}), graphics={
|
||||
Text(
|
||||
extent={{-29,-11},{30,-70}},
|
||||
lineColor={0,0,0},
|
||||
textString="V"),
|
||||
Line(points={{-70,0},{-90,0}}, color={0,0,0}),
|
||||
Line(points={{70,0},{90,0}}, color={0,0,0}),
|
||||
Line(points={{0,-90},{0,-70}}, color={0,0,255}),
|
||||
Text(
|
||||
extent={{-150,80},{150,120}},
|
||||
textString="%name",
|
||||
lineColor={0,0,255})}),
|
||||
Diagram(coordinateSystem(
|
||||
preserveAspectRatio=true,
|
||||
extent={{-100,-100},{100,100}},
|
||||
grid={1,1}), graphics={
|
||||
Line(points={{-70,0},{-96,0}}, color={0,0,0}),
|
||||
Line(points={{70,0},{96,0}}, color={0,0,0}),
|
||||
Line(points={{0,-90},{0,-70}}, color={0,0,255})}),
|
||||
Documentation(revisions="<html>
|
||||
<ul>
|
||||
<li><i> 1998 </i>
|
||||
by Christoph Clauss<br> initially implemented<br>
|
||||
</li>
|
||||
</ul>
|
||||
</html>", info="<html>
|
||||
<p>The voltage sensor converts the voltage between the two connectors into a real valued signal. It does not influence the current sum at the nodes in between the voltage is measured, therefore, the electrical behavior is not influenced by the sensor.</p>
|
||||
</html>"));
|
||||
end VoltageSensor;
|
||||
|
||||
model CurrentSensor "Sensor to measure the current in a branch"
|
||||
extends Modelica.Icons.RotationalSensor;
|
||||
|
||||
Interfaces.PositivePin p "positive pin" annotation (Placement(
|
||||
transformation(extent={{-110,-10},{-90,10}}, rotation=0)));
|
||||
Interfaces.NegativePin n "negative pin" annotation (Placement(
|
||||
transformation(extent={{90,-10},{110,10}}, rotation=0)));
|
||||
Modelica.Blocks.Interfaces.RealOutput i
|
||||
"current in the branch from p to n as output signal"
|
||||
annotation (Placement(transformation(
|
||||
origin={0,-100},
|
||||
extent={{10,-10},{-10,10}},
|
||||
rotation=90)));
|
||||
|
||||
equation
|
||||
p.v = n.v;
|
||||
p.i = i;
|
||||
n.i = -i;
|
||||
annotation (
|
||||
Icon(coordinateSystem(
|
||||
preserveAspectRatio=true,
|
||||
extent={{-100,-100},{100,100}},
|
||||
grid={1,1}), graphics={
|
||||
Text(
|
||||
extent={{-29,-11},{30,-70}},
|
||||
lineColor={0,0,0},
|
||||
textString="A"),
|
||||
Line(points={{-70,0},{-90,0}}, color={0,0,0}),
|
||||
Text(
|
||||
extent={{-150,80},{150,120}},
|
||||
textString="%name",
|
||||
lineColor={0,0,255}),
|
||||
Line(points={{70,0},{90,0}}, color={0,0,0}),
|
||||
Line(points={{0,-90},{0,-70}}, color={0,0,255})}),
|
||||
Diagram(coordinateSystem(
|
||||
preserveAspectRatio=true,
|
||||
extent={{-100,-100},{100,100}},
|
||||
grid={1,1}), graphics={
|
||||
Text(
|
||||
extent={{-153,79},{147,119}},
|
||||
textString="%name",
|
||||
lineColor={0,0,255}),
|
||||
Line(points={{-70,0},{-96,0}}, color={0,0,0}),
|
||||
Line(points={{70,0},{96,0}}, color={0,0,0}),
|
||||
Line(points={{0,-90},{0,-70}}, color={0,0,255})}),
|
||||
Documentation(revisions="<html>
|
||||
<ul>
|
||||
<li><i> 1998 </i>
|
||||
by Christoph Clauss<br> initially implemented<br>
|
||||
</li>
|
||||
</ul>
|
||||
</html>", info="<html>
|
||||
<p>The current sensor converts the current flowing between the two connectors into a real valued signal. The two connectors are in the sensor connected like a short cut. The sensor has to be placed within an electrical connection in series. It does not influence the current sum at the connected nodes. Therefore, the electrical behavior is not influenced by the sensor.</p>
|
||||
</html>"));
|
||||
end CurrentSensor;
|
||||
|
||||
model PowerSensor "Sensor to measure the power"
|
||||
|
||||
Modelica.Electrical.Analog.Interfaces.PositivePin pc
|
||||
"Positive pin, current path"
|
||||
annotation (Placement(transformation(extent={{-90,-10},{-110,10}}, rotation=
|
||||
0)));
|
||||
Modelica.Electrical.Analog.Interfaces.NegativePin nc
|
||||
"Negative pin, current path"
|
||||
annotation (Placement(transformation(extent={{110,-10},{90,10}}, rotation=0)));
|
||||
Modelica.Electrical.Analog.Interfaces.PositivePin pv
|
||||
"Positive pin, voltage path"
|
||||
annotation (Placement(transformation(extent={{-10,110},{10,90}}, rotation=0)));
|
||||
Modelica.Electrical.Analog.Interfaces.NegativePin nv
|
||||
"Negative pin, voltage path"
|
||||
annotation (Placement(transformation(extent={{10,-110},{-10,-90}}, rotation=
|
||||
0)));
|
||||
Modelica.Blocks.Interfaces.RealOutput power
|
||||
annotation (Placement(transformation(
|
||||
origin={-80,-110},
|
||||
extent={{-10,10},{10,-10}},
|
||||
rotation=270)));
|
||||
Modelica.Electrical.Analog.Sensors.VoltageSensor voltageSensor
|
||||
annotation (Placement(transformation(
|
||||
origin={0,-30},
|
||||
extent={{10,-10},{-10,10}},
|
||||
rotation=90)));
|
||||
Modelica.Electrical.Analog.Sensors.CurrentSensor currentSensor
|
||||
annotation (Placement(transformation(extent={{-50,-10},{-30,10}}, rotation=
|
||||
0)));
|
||||
Modelica.Blocks.Math.Product product
|
||||
annotation (Placement(transformation(
|
||||
origin={-30,-50},
|
||||
extent={{-10,-10},{10,10}},
|
||||
rotation=270)));
|
||||
|
||||
equation
|
||||
connect(pv, voltageSensor.p) annotation (Line(points={{0,100},{0,-20},{
|
||||
6.12323e-016,-20}}, color={0,0,255}));
|
||||
connect(voltageSensor.n, nv) annotation (Line(points={{-6.12323e-016,-40},{
|
||||
-6.12323e-016,-63},{0,-63},{0,-100}}, color={0,0,255}));
|
||||
connect(pc, currentSensor.p)
|
||||
annotation (Line(points={{-100,0},{-50,0}}, color={0,0,255}));
|
||||
connect(currentSensor.n, nc)
|
||||
annotation (Line(points={{-30,0},{100,0}}, color={0,0,255}));
|
||||
connect(currentSensor.i, product.u2) annotation (Line(points={{-40,-10},{-40,
|
||||
-30},{-36,-30},{-36,-38}}, color={0,0,127}));
|
||||
connect(voltageSensor.v, product.u1) annotation (Line(points={{10,-30},{-24,
|
||||
-30},{-24,-38}}, color={0,0,127}));
|
||||
connect(product.y, power) annotation (Line(points={{-30,-61},{-30,-80},{-80,
|
||||
-80},{-80,-110}}, color={0,0,127}));
|
||||
annotation (Icon(coordinateSystem(
|
||||
preserveAspectRatio=true,
|
||||
extent={{-100,-100},{100,100}},
|
||||
grid={2,2}), graphics={
|
||||
Ellipse(
|
||||
extent={{-70,70},{70,-70}},
|
||||
lineColor={0,0,0},
|
||||
fillColor={255,255,255},
|
||||
fillPattern=FillPattern.Solid),
|
||||
Line(points={{0,100},{0,70}}, color={0,0,255}),
|
||||
Line(points={{0,-70},{0,-100}}, color={0,0,255}),
|
||||
Line(points={{-80,-100},{-80,0}}, color={0,0,255}),
|
||||
Line(points={{-100,0},{100,0}}, color={0,0,255}),
|
||||
Text(
|
||||
extent={{150,120},{-150,160}},
|
||||
textString="%name",
|
||||
lineColor={0,0,255}),
|
||||
Line(points={{0,70},{0,40}}, color={0,0,0}),
|
||||
Line(points={{22.9,32.8},{40.2,57.3}}, color={0,0,0}),
|
||||
Line(points={{-22.9,32.8},{-40.2,57.3}}, color={0,0,0}),
|
||||
Line(points={{37.6,13.7},{65.8,23.9}}, color={0,0,0}),
|
||||
Line(points={{-37.6,13.7},{-65.8,23.9}}, color={0,0,0}),
|
||||
Line(points={{0,0},{9.02,28.6}}, color={0,0,0}),
|
||||
Polygon(
|
||||
points={{-0.48,31.6},{18,26},{18,57.2},{-0.48,31.6}},
|
||||
lineColor={0,0,0},
|
||||
fillColor={0,0,0},
|
||||
fillPattern=FillPattern.Solid),
|
||||
Ellipse(
|
||||
extent={{-5,5},{5,-5}},
|
||||
lineColor={0,0,0},
|
||||
fillColor={0,0,0},
|
||||
fillPattern=FillPattern.Solid),
|
||||
Text(
|
||||
extent={{-29,-11},{30,-70}},
|
||||
lineColor={0,0,0},
|
||||
textString="P")}),
|
||||
Diagram(coordinateSystem(
|
||||
preserveAspectRatio=true,
|
||||
extent={{-100,-100},{100,100}},
|
||||
grid={2,2}), graphics),
|
||||
Documentation(info="<html>
|
||||
<p>This power sensor measures instantaneous electrical power of a singlephase system and has a separated voltage and current path. The pins of the voltage path are pv and nv, the pins of the current path are pc and nc. The internal resistance of the current path is zero, the internal resistance of the voltage path is infinite.</p>
|
||||
</html>", revisions="<html>
|
||||
<ul>
|
||||
<li><i>January 12, 2006</i> by Anton Haumer implemented</li>
|
||||
</ul>
|
||||
</html>"));
|
||||
end PowerSensor;
|
||||
annotation (
|
||||
Documentation(info="<html>
|
||||
<p>This package contains potential, voltage, and current sensors. The sensors can be used to convert voltages or currents into real signal values o be connected to components of the Blocks package. The sensors are designed in such a way that they do not influence the electrical behavior.</p>
|
||||
</html>",
|
||||
revisions="<html>
|
||||
<dl>
|
||||
<dt>
|
||||
<b>Main Authors:</b>
|
||||
<dd>
|
||||
Christoph Clauß
|
||||
<<a href=\"mailto:Christoph.Clauss@eas.iis.fraunhofer.de\">Christoph.Clauss@eas.iis.fraunhofer.de</a>><br>
|
||||
André Schneider
|
||||
<<a href=\"mailto:Andre.Schneider@eas.iis.fraunhofer.de\">Andre.Schneider@eas.iis.fraunhofer.de</a>><br>
|
||||
Fraunhofer Institute for Integrated Circuits<br>
|
||||
Design Automation Department<br>
|
||||
Zeunerstraße 38<br>
|
||||
D-01069 Dresden<br>
|
||||
<p>
|
||||
<dt>
|
||||
<b>Copyright:</b>
|
||||
<dd>
|
||||
Copyright © 1998-2010, Modelica Association and Fraunhofer-Gesellschaft.<br>
|
||||
<i>The Modelica package is <b>free</b> software; it can be redistributed and/or modified
|
||||
under the terms of the <b>Modelica license</b>, see the license conditions
|
||||
and the accompanying <b>disclaimer</b> in the documentation of package
|
||||
Modelica in file \"Modelica/package.mo\".</i><br>
|
||||
<p>
|
||||
</dl>
|
||||
</html>"));
|
||||
end Sensors;
|
||||
239
samples/NewLisp/irc.lsp
Normal file
239
samples/NewLisp/irc.lsp
Normal file
@@ -0,0 +1,239 @@
|
||||
#!/usr/bin/env newlisp
|
||||
|
||||
;; @module IRC
|
||||
;; @description a basic irc library
|
||||
;; @version early alpha! 0.1 2013-01-02 20:11:22
|
||||
;; @author cormullion
|
||||
;; Usage:
|
||||
;; (IRC:init "newlithper") ; a username/nick (not that one obviously :-)
|
||||
;; (IRC:connect "irc.freenode.net" 6667) ; irc/server
|
||||
;; (IRC:join-channel {#newlisp}) ; join a room
|
||||
;; either (IRC:read-irc-loop) ; loop - monitor only, no input
|
||||
;; or (IRC:session) ; a command-line session, end with /QUIT
|
||||
|
||||
(context 'IRC)
|
||||
(define Inickname)
|
||||
(define Ichannels)
|
||||
(define Iserver)
|
||||
(define Iconnected)
|
||||
(define Icallbacks '())
|
||||
(define Idle-time 400) ; seconds
|
||||
(define Itime-stamp) ; time since last message was processed
|
||||
|
||||
(define (register-callback callback-name callback-function)
|
||||
(println {registering callback for } callback-name { : } (sym (term callback-function) (prefix callback-function)))
|
||||
(push (list callback-name (sym (term callback-function) (prefix callback-function))) Icallbacks))
|
||||
|
||||
(define (deregister-callback callback-name)
|
||||
(println {deregistering callback for } callback-name)
|
||||
(setf (assoc "idle-event" Icallbacks) nil)
|
||||
(println {current callbacks: } Icallbacks))
|
||||
|
||||
(define (do-callback callback-name data)
|
||||
(when (set 'func (lookup callback-name Icallbacks)) ; find first callback
|
||||
(if-not (catch (apply func (list data)) 'error)
|
||||
(println {error in callback } callback-name {: } error))))
|
||||
|
||||
(define (do-callbacks callback-name data)
|
||||
(dolist (rf (ref-all callback-name Icallbacks))
|
||||
(set 'callback-entry (Icallbacks (first rf)))
|
||||
(when (set 'func (last callback-entry))
|
||||
(if-not (catch (apply func (list data)) 'error)
|
||||
(println {error in callback } callback-name {: } error)))))
|
||||
|
||||
(define (init str)
|
||||
(set 'Inickname str)
|
||||
(set 'Iconnected nil)
|
||||
(set 'Ichannels '())
|
||||
(set 'Itime-stamp (time-of-day)))
|
||||
|
||||
(define (connect server port)
|
||||
(set 'Iserver (net-connect server port))
|
||||
(net-send Iserver (format "USER %s %s %s :%s\r\n" Inickname Inickname Inickname Inickname))
|
||||
(net-send Iserver (format "NICK %s \r\n" Inickname))
|
||||
(set 'Iconnected true)
|
||||
(do-callbacks "connect" (list (list "server" server) (list "port" port))))
|
||||
|
||||
(define (identify password)
|
||||
(net-send Iserver (format "PRIVMSG nickserv :identify %s\r\n" password)))
|
||||
|
||||
(define (join-channel channel)
|
||||
(when (net-send Iserver (format "JOIN %s \r\n" channel))
|
||||
(push channel Ichannels)
|
||||
(do-callbacks "join-channel" (list (list "channel" channel) (list "nickname" Inickname)))))
|
||||
|
||||
(define (part chan)
|
||||
(if-not (empty? chan)
|
||||
; leave specified
|
||||
(begin
|
||||
(net-send Iserver (format "PART %s\r\n" chan))
|
||||
(replace channel Ichannels)
|
||||
(do-callbacks "part" (list (list "channel" channel))))
|
||||
; leave all
|
||||
(begin
|
||||
(dolist (channel Ichannels)
|
||||
(net-send Iserver (format "PART %s\r\n" channel))
|
||||
(replace channel Ichannels)
|
||||
(do-callbacks "part" (list (list "channel" channel)))))))
|
||||
|
||||
(define (do-quit message)
|
||||
(do-callbacks "quit" '()) ; chance to do stuff before quit...
|
||||
(net-send Iserver (format "QUIT :%s\r\n" message))
|
||||
(sleep 1000)
|
||||
(set 'Ichannels '())
|
||||
(close Iserver)
|
||||
(set 'Iconnected nil))
|
||||
|
||||
(define (privmsg user message)
|
||||
(net-send Iserver (format "PRIVMSG %s :%s\r\n" user message)))
|
||||
|
||||
(define (notice user message)
|
||||
(net-send Iserver (format "NOTICE %s :%s\r\n" user message)))
|
||||
|
||||
(define (send-to-server message (channel nil))
|
||||
(cond
|
||||
((starts-with message {/}) ; default command character
|
||||
(set 'the-message (replace "^/" (copy message) {} 0)) ; keep original
|
||||
(net-send Iserver (format "%s \r\n" the-message)) ; send it
|
||||
; do a quit
|
||||
(if (starts-with (lower-case the-message) "quit")
|
||||
(do-quit { enough})))
|
||||
(true
|
||||
(if (nil? channel)
|
||||
; say to all channels
|
||||
(dolist (c Ichannels)
|
||||
(net-send Iserver (format "PRIVMSG %s :%s\r\n" c message)))
|
||||
; say to specified channel
|
||||
(if (find channel Ichannels)
|
||||
(net-send Iserver (format "PRIVMSG %s :%s\r\n" channel message))))))
|
||||
(do-callbacks "send-to-server" (list (list "channel" channel) (list "message" message))))
|
||||
|
||||
(define (process-command sender command text)
|
||||
(cond
|
||||
((= sender "PING")
|
||||
(net-send Iserver (format "PONG %s\r\n" command)))
|
||||
((or (= command "NOTICE") (= command "PRIVMSG"))
|
||||
(process-message sender command text))
|
||||
((= command "JOIN")
|
||||
(set 'username (first (clean empty? (parse sender {!|:} 0))))
|
||||
(set 'channel (last (clean empty? (parse sender {!|:} 0))))
|
||||
(println {username } username { joined } channel)
|
||||
(do-callbacks "join" (list (list "channel" channel) (list "username" username))))
|
||||
(true
|
||||
nil)))
|
||||
|
||||
(define (process-message sender command text)
|
||||
(let ((username {} target {} message {}))
|
||||
(set 'username (first (clean empty? (parse sender {!|:} 0))))
|
||||
(set 'target (trim (first (clean empty? (parse text {!|:} 0)))))
|
||||
(set 'message (slice text (+ (find {:} text) 1)))
|
||||
(cond
|
||||
((starts-with message "\001")
|
||||
(process-ctcp username target message))
|
||||
((find target Ichannels)
|
||||
(cond
|
||||
((= command {PRIVMSG})
|
||||
(do-callbacks "channel-message" (list (list "channel" target) (list "username" username) (list "message" message))))
|
||||
((= command {NOTICE})
|
||||
(do-callbacks "channel-notice" (list (list "channel" target) (list "username" username) (list "message" message))))))
|
||||
((= target Inickname)
|
||||
(cond
|
||||
((= command {PRIVMSG})
|
||||
(do-callbacks "private-message" (list (list "username" username) (list "message" message))))
|
||||
((= command {NOTICE})
|
||||
(do-callbacks "private-notice" (list (list "username" username) (list "message" message))))))
|
||||
(true
|
||||
nil))))
|
||||
|
||||
(define (process-ctcp username target message)
|
||||
(cond
|
||||
((starts-with message "\001VERSION\001")
|
||||
(net-send Iserver (format "NOTICE %s :\001VERSION %s\001\r\n" username message)))
|
||||
((starts-with message "\001PING")
|
||||
(set 'data (first (rest (clean empty? (parse message { } 0)))))
|
||||
(set 'data (trim data "\001" "\001"))
|
||||
(net-send Iserver (format "NOTICE %s :\001PING %s\001\r\n" username data)))
|
||||
((starts-with message "\001ACTION")
|
||||
; (set 'data (first (rest (clean empty? (parse message { } 0)))))
|
||||
; (set 'data (join data { }))
|
||||
; (set 'data (trim data "\001" "\001"))
|
||||
(if (find target Ichannels)
|
||||
(do-callbacks "channel-action" (list (list "username" username) (list "message" message))))
|
||||
(if (= target Inickname)
|
||||
(do-callbacks "private-action" (list (list "username" username) (list "message" message)))))
|
||||
((starts-with message "\001TIME\001")
|
||||
(net-send Iserver (format "NOTICE %s:\001TIME :%s\001\r\n" username (date))))))
|
||||
|
||||
(define (parse-buffer raw-buffer)
|
||||
(let ((messages (clean empty? (parse raw-buffer "\r\n" 0)))
|
||||
(sender {} command {} text {}))
|
||||
; check for elapsed time since last activity
|
||||
(when (> (sub (time-of-day) Itime-stamp) (mul Idle-time 1000))
|
||||
(do-callbacks "idle-event")
|
||||
(set 'Itime-stamp (time-of-day)))
|
||||
(dolist (message messages)
|
||||
(set 'message-parts (parse message { }))
|
||||
(unless (empty? message-parts)
|
||||
(set 'sender (first message-parts))
|
||||
(catch (set 'command (first (rest message-parts))) 'error)
|
||||
(catch (set 'text (join (rest (rest message-parts)) { })) 'error))
|
||||
(process-command sender command text))))
|
||||
|
||||
(define (read-irc)
|
||||
(let ((buffer {}))
|
||||
(when (!= (net-peek Iserver) 0)
|
||||
(net-receive Iserver buffer 8192 "\n")
|
||||
(unless (empty? buffer)
|
||||
(parse-buffer buffer)))))
|
||||
|
||||
(define (read-irc-loop) ; monitoring
|
||||
(let ((buffer {}))
|
||||
(while Iconnected
|
||||
(read-irc)
|
||||
(sleep 1000))))
|
||||
|
||||
(define (print-raw-message data) ; example of using a callback
|
||||
(set 'raw-data (lookup "message" data))
|
||||
(set 'channel (lookup "channel" data))
|
||||
(set 'message-text raw-data)
|
||||
(println (date (date-value) 0 {%H:%M:%S }) username {> } message-text))
|
||||
|
||||
(define (print-outgoing-message data)
|
||||
(set 'raw-data (lookup "message" data))
|
||||
(set 'channel (lookup "channel" data))
|
||||
(set 'message-text raw-data)
|
||||
(println (date (date-value) 0 {%H:%M:%S }) Inickname {> } message-text))
|
||||
|
||||
(define (session); interactive terminal
|
||||
; must add callbacks to display messages
|
||||
(register-callback "channel-message" 'print-raw-message)
|
||||
(register-callback "send-to-server" 'print-outgoing-message)
|
||||
(while Iconnected
|
||||
(while (zero? (peek 0))
|
||||
(read-irc)
|
||||
(sleep 1000))
|
||||
(send-to-server (string (read-line 0))))
|
||||
(println {finished session } (date))
|
||||
(exit))
|
||||
|
||||
; end of IRC code
|
||||
|
||||
[text]
|
||||
|
||||
simple bot code:
|
||||
(load (string (env {HOME}) {/projects/programming/newlisp-projects/irc.lsp}))
|
||||
(context 'BOT)
|
||||
(define bot-name "bot")
|
||||
(define (join-channel data)
|
||||
(println {in BOT:join-channel with data: } data))
|
||||
(define (process-message data)
|
||||
????)
|
||||
(IRC:register-callback "join-channel" 'join-channel)
|
||||
(IRC:register-callback "channel-message" 'process-message)
|
||||
(IRC:register-callback "idle-event" 'do-idle-event)
|
||||
(IRC:register-callback "send-to-server" 'do-send-event)
|
||||
(IRC:init bot-name)
|
||||
(IRC:connect "irc.freenode.net" 6667)
|
||||
(IRC:join-channel {#newlisp})
|
||||
(IRC:read-irc-loop)
|
||||
[/text]
|
||||
195
samples/NewLisp/log-to-database.lisp
Normal file
195
samples/NewLisp/log-to-database.lisp
Normal file
@@ -0,0 +1,195 @@
|
||||
(module "sqlite3.lsp") ; loads the SQLite3 database module
|
||||
|
||||
; FUNCTIONS-------------------------------------------------
|
||||
|
||||
(define (displayln str-to-display)
|
||||
(println str-to-display)
|
||||
)
|
||||
|
||||
(define (open-database sql-db-to-open)
|
||||
(if (sql3:open (string sql-db-to-open ".db"))
|
||||
(displayln "")
|
||||
(displayln "There was a problem opening the database " sql-db-to-open ": " (sql3:error))))
|
||||
|
||||
(define (close-database)
|
||||
(if (sql3:close)
|
||||
(displayln "")
|
||||
(displayln "There was a problem closing the database: " (sql3:error))))
|
||||
|
||||
;====== SAFE-FOR-SQL ===============================================================
|
||||
; this function makes strings safe for inserting into SQL statements
|
||||
; to avoid SQL injection issues
|
||||
; it's simple right now but will add to it later
|
||||
;===================================================================================
|
||||
(define (safe-for-sql str-sql-query)
|
||||
(if (string? str-sql-query) (begin
|
||||
(replace "&" str-sql-query "&")
|
||||
(replace "'" str-sql-query "'")
|
||||
(replace "\"" str-sql-query """)
|
||||
))
|
||||
(set 'result str-sql-query))
|
||||
|
||||
(define (query sql-text)
|
||||
(set 'sqlarray (sql3:sql sql-text)) ; results of query
|
||||
(if sqlarray
|
||||
(setq query-return sqlarray)
|
||||
(if (sql3:error)
|
||||
(displayln (sql3:error) " query problem ")
|
||||
(setq query-return nil))))
|
||||
|
||||
(define-macro (create-record)
|
||||
; first save the values
|
||||
(set 'temp-record-values nil)
|
||||
(set 'temp-table-name (first (args)))
|
||||
;(displayln "<BR>Arguments: " (args))
|
||||
(dolist (s (rest (args))) (push (eval s) temp-record-values -1))
|
||||
; now save the arguments as symbols under the context "DB"
|
||||
(dolist (s (rest (args)))
|
||||
(set 'temp-index-num (string $idx)) ; we need to number the symbols to keep them in the correct order
|
||||
(if (= (length temp-index-num) 1) (set 'temp-index-num (string "0" temp-index-num))) ; leading 0 keeps the max at 100.
|
||||
(sym (string temp-index-num s) 'DB))
|
||||
; now create the sql query
|
||||
(set 'temp-sql-query (string "INSERT INTO " temp-table-name " ("))
|
||||
;(displayln "<P>TABLE NAME: " temp-table-name)
|
||||
;(displayln "<P>SYMBOLS: " (symbols DB))
|
||||
;(displayln "<BR>VALUES: " temp-record-values)
|
||||
(dolist (d (symbols DB)) (extend temp-sql-query (rest (rest (rest (rest (rest (string d)))))) ", "))
|
||||
(set 'temp-sql-query (chop (chop temp-sql-query)))
|
||||
(extend temp-sql-query ") VALUES (")
|
||||
(dolist (q temp-record-values)
|
||||
(if (string? q) (extend temp-sql-query "'")) ; only quote if value is non-numeric
|
||||
(extend temp-sql-query (string (safe-for-sql q)))
|
||||
(if (string? q) (extend temp-sql-query "'")) ; close quote if value is non-numeric
|
||||
(extend temp-sql-query ", ")) ; all values are sanitized to avoid SQL injection
|
||||
(set 'temp-sql-query (chop (chop temp-sql-query)))
|
||||
(extend temp-sql-query ");")
|
||||
;(displayln "<p>***** SQL QUERY: " temp-sql-query)
|
||||
(displayln (query temp-sql-query)) ; actually run the query against the database
|
||||
(delete 'DB) ; we're done, so delete all symbols in the DB context.
|
||||
)
|
||||
|
||||
(define-macro (update-record)
|
||||
; first save the values
|
||||
(set 'temp-record-values nil)
|
||||
(set 'temp-table-name (first (args)))
|
||||
(set 'continue true) ; debugging
|
||||
(dolist (s (rest (args))) (push (eval s) temp-record-values -1))
|
||||
; now save the arguments as symbols under the context "D2"
|
||||
(dolist (st (rest (args)))
|
||||
(set 'temp-index-num (string $idx)) ; we need to number the symbols to keep them in the correct order
|
||||
(if (= (length temp-index-num) 1) (set 'temp-index-num (string "0" temp-index-num))) ; leading 0 keeps the max at 100.
|
||||
;(displayln "<br>SYMBOL>>>>" (string temp-index-num st) "<<<") ; debugging
|
||||
(sym (string temp-index-num st) 'D2)
|
||||
)
|
||||
(if continue (begin ; --- temporary debugging
|
||||
; now create the sql query
|
||||
(set 'temp-sql-query (string "UPDATE " temp-table-name " SET "))
|
||||
;(displayln "<P>TABLE NAME: " temp-table-name)
|
||||
;(displayln "<P>SYMBOLS: " (symbols D2))
|
||||
;(displayln "<BR>VALUES: " temp-record-values)
|
||||
(dolist (d (rest (symbols D2))) ; ignore the first argument, as it will be the ConditionColumn for later
|
||||
(extend temp-sql-query (rest (rest (rest (rest (rest (string d)))))) "=")
|
||||
(set 'q (temp-record-values (+ $idx 1)))
|
||||
(if (string? q) (extend temp-sql-query "'")) ; only quote if value is non-numeric
|
||||
(extend temp-sql-query (string (safe-for-sql q)))
|
||||
(if (string? q) (extend temp-sql-query "'")) ; close quote if value is non-numeric
|
||||
(extend temp-sql-query ", ") ; all values are sanitized to avoid SQL injection
|
||||
)
|
||||
(set 'temp-sql-query (chop (chop temp-sql-query)))
|
||||
; okay now add the ConditionColumn value
|
||||
(extend temp-sql-query (string " WHERE " (rest (rest (rest (rest (rest (string (first (symbols D2)))))))) "="))
|
||||
(if (string? (first temp-record-values)) (extend temp-sql-query "'"))
|
||||
(extend temp-sql-query (string (safe-for-sql (first temp-record-values))))
|
||||
(if (string? (first temp-record-values)) (extend temp-sql-query "'"))
|
||||
(extend temp-sql-query ";")
|
||||
;(displayln "<p>***** SQL QUERY: " temp-sql-query)
|
||||
(query temp-sql-query) ; actually run the query against the database
|
||||
(delete 'D2) ; we're done, so delete all symbols in the DB context.
|
||||
)) ; --- end temporary debugging
|
||||
)
|
||||
|
||||
(define-macro (delete-record)
|
||||
(set 'temp-table-name (first (args)))
|
||||
(set 'temp-record-values nil)
|
||||
(dolist (s (rest (args))) (push (eval s) temp-record-values -1)) ; only one value for NOW...
|
||||
(sym (first (rest (args))) 'DB) ; put the second argument (for now) into a symbol in the DB context
|
||||
; this will have to be in a dolist loop of (rest (args)) when I add more
|
||||
(set 'temp-sql-query (string "DELETE FROM " temp-table-name " WHERE "))
|
||||
(dolist (d (symbols DB)) (extend temp-sql-query (rest (rest (rest (string d))))))
|
||||
(extend temp-sql-query "=")
|
||||
; why am I doing a loop here? There should be only one value, right? But maybe for future extension...
|
||||
(dolist (q temp-record-values)
|
||||
(if (string? q) (extend temp-sql-query "'")) ; only quote if value is non-numeric
|
||||
(extend temp-sql-query (string (safe-for-sql q)))
|
||||
(if (string? q) (extend temp-sql-query "'"))) ; close quote if value is non-numeric
|
||||
(extend temp-sql-query ";")
|
||||
;(displayln "TEMP-DELETE-QUERY: " temp-sql-query)
|
||||
(query temp-sql-query)
|
||||
(delete 'DB) ; we're done, so delete all symbols in the DB context.
|
||||
)
|
||||
|
||||
(define-macro (get-record)
|
||||
(set 'temp-table-name (first (args)))
|
||||
; if you have more arguments than just the table name, they become the elements of the WHERE clause
|
||||
(if (> (length (args)) 1) (begin
|
||||
(set 'temp-record-values nil)
|
||||
(dolist (s (rest (args))) (push (eval s) temp-record-values -1)) ; only one value for NOW...
|
||||
(sym (first (rest (args))) 'DB) ; put the second argument (for now) into a symbol in the DB context
|
||||
; this will have to be in a dolist loop of (rest (args)) when I add more
|
||||
(set 'temp-sql-query (string "SELECT * FROM " temp-table-name " WHERE "))
|
||||
(dolist (d (symbols DB)) (extend temp-sql-query (rest (rest (rest (string d))))))
|
||||
(extend temp-sql-query "=")
|
||||
; why am I doing a loop here? There should be only one value, right? But maybe for future extension...
|
||||
(dolist (q temp-record-values)
|
||||
(if (string? q) (extend temp-sql-query "'")) ; only quote if value is non-numeric
|
||||
(extend temp-sql-query (string (safe-for-sql q)))
|
||||
(if (string? q) (extend temp-sql-query "'"))) ; close quote if value is non-numeric
|
||||
(extend temp-sql-query ";")
|
||||
)
|
||||
; otherwise, just get everything in that table
|
||||
(set 'temp-sql-query (string "SELECT * FROM " temp-table-name ";"))
|
||||
)
|
||||
;(displayln "TEMP-GET-QUERY: " temp-sql-query)
|
||||
(delete 'DB) ; we're done, so delete all symbols in the DB context.
|
||||
(set 'return-value (query temp-sql-query)) ; this returns a list of everything in the record
|
||||
)
|
||||
|
||||
; END FUNCTIONS ===================
|
||||
|
||||
|
||||
(open-database "SERVER-LOGS")
|
||||
(query "CREATE TABLE Logs (Id INTEGER PRIMARY KEY, IP TEXT, UserId TEXT, UserName TEXT, Date DATE, Request TEXT, Result TEXT, Size INTEGER, Referrer TEXT, UserAgent TEXT)")
|
||||
;(print (query "SELECT * from SQLITE_MASTER;"))
|
||||
(set 'access-log (read-file "/var/log/apache2/access.log"))
|
||||
(set 'access-list (parse access-log "\n"))
|
||||
(set 'max-items (integer (first (first (query "select count(*) from Logs")))))
|
||||
(println "Number of items in database: " max-items)
|
||||
(println "Number of lines in log: " (length access-list))
|
||||
(dolist (line access-list)
|
||||
(set 'line-list (parse line))
|
||||
;(println "Line# " $idx " - " line-list)
|
||||
;(println "Length of line: " (length line-list))
|
||||
(if (> (length line-list) 0) (begin
|
||||
(++ max-items)
|
||||
(set 'Id max-items) (print $idx "/" (length access-list))
|
||||
(set 'IP (string (line-list 0) (line-list 1) (line-list 2)))
|
||||
(set 'UserId (line-list 3))
|
||||
(set 'UserName (line-list 4))
|
||||
(set 'Date (line-list 5))
|
||||
(set 'Date (trim Date "["))
|
||||
(set 'Date (trim Date "]"))
|
||||
;(println "DATE: " Date)
|
||||
(set 'date-parsed (date-parse Date "%d/%b/%Y:%H:%M:%S -0700"))
|
||||
;(println "DATE-PARSED: " date-parsed)
|
||||
(set 'Date (date date-parsed 0 "%Y-%m-%dT%H:%M:%S"))
|
||||
(println " " Date)
|
||||
(set 'Request (line-list 6))
|
||||
(set 'Result (line-list 7))
|
||||
(set 'Size (line-list 8))
|
||||
(set 'Referrer (line-list 9))
|
||||
(set 'UserAgent (line-list 10))
|
||||
(create-record "Logs" Id IP UserId UserName Date Request Result Size Referrer UserAgent)
|
||||
))
|
||||
)
|
||||
(close-database)
|
||||
(exit)
|
||||
121
samples/Pascal/cwindirs.pp
Normal file
121
samples/Pascal/cwindirs.pp
Normal file
@@ -0,0 +1,121 @@
|
||||
|
||||
unit cwindirs;
|
||||
|
||||
|
||||
|
||||
|
||||
interface
|
||||
|
||||
uses
|
||||
windows,
|
||||
strings;
|
||||
|
||||
Const
|
||||
CSIDL_PROGRAMS = $0002;
|
||||
CSIDL_PERSONAL = $0005;
|
||||
CSIDL_FAVORITES = $0006;
|
||||
CSIDL_STARTUP = $0007;
|
||||
CSIDL_RECENT = $0008;
|
||||
CSIDL_SENDTO = $0009;
|
||||
CSIDL_STARTMENU = $000B;
|
||||
CSIDL_MYMUSIC = $000D;
|
||||
CSIDL_MYVIDEO = $000E;
|
||||
CSIDL_DESKTOPDIRECTORY = $0010;
|
||||
CSIDL_NETHOOD = $0013;
|
||||
CSIDL_TEMPLATES = $0015;
|
||||
CSIDL_COMMON_STARTMENU = $0016;
|
||||
CSIDL_COMMON_PROGRAMS = $0017;
|
||||
CSIDL_COMMON_STARTUP = $0018;
|
||||
CSIDL_COMMON_DESKTOPDIRECTORY = $0019;
|
||||
CSIDL_APPDATA = $001A;
|
||||
CSIDL_PRINTHOOD = $001B;
|
||||
CSIDL_LOCAL_APPDATA = $001C;
|
||||
CSIDL_COMMON_FAVORITES = $001F;
|
||||
CSIDL_INTERNET_CACHE = $0020;
|
||||
CSIDL_COOKIES = $0021;
|
||||
CSIDL_HISTORY = $0022;
|
||||
CSIDL_COMMON_APPDATA = $0023;
|
||||
CSIDL_WINDOWS = $0024;
|
||||
CSIDL_SYSTEM = $0025;
|
||||
CSIDL_PROGRAM_FILES = $0026;
|
||||
CSIDL_MYPICTURES = $0027;
|
||||
CSIDL_PROFILE = $0028;
|
||||
CSIDL_PROGRAM_FILES_COMMON = $002B;
|
||||
CSIDL_COMMON_TEMPLATES = $002D;
|
||||
CSIDL_COMMON_DOCUMENTS = $002E;
|
||||
CSIDL_COMMON_ADMINTOOLS = $002F;
|
||||
CSIDL_ADMINTOOLS = $0030;
|
||||
CSIDL_COMMON_MUSIC = $0035;
|
||||
CSIDL_COMMON_PICTURES = $0036;
|
||||
CSIDL_COMMON_VIDEO = $0037;
|
||||
CSIDL_CDBURN_AREA = $003B;
|
||||
CSIDL_PROFILES = $003E;
|
||||
|
||||
CSIDL_FLAG_CREATE = $8000;
|
||||
|
||||
Function GetWindowsSpecialDir(ID : Integer) : String;
|
||||
|
||||
implementation
|
||||
|
||||
uses
|
||||
sysutils;
|
||||
|
||||
Type
|
||||
PFNSHGetFolderPath = Function(Ahwnd: HWND; Csidl: Integer; Token: THandle; Flags: DWord; Path: PChar): HRESULT; stdcall;
|
||||
|
||||
|
||||
var
|
||||
SHGetFolderPath : PFNSHGetFolderPath = Nil;
|
||||
CFGDLLHandle : THandle = 0;
|
||||
|
||||
Procedure InitDLL;
|
||||
|
||||
Var
|
||||
pathBuf: array[0..MAX_PATH-1] of char;
|
||||
pathLength: Integer;
|
||||
begin
|
||||
{ Load shfolder.dll using a full path, in order to prevent spoofing (Mantis #18185)
|
||||
Don't bother loading shell32.dll because shfolder.dll itself redirects SHGetFolderPath
|
||||
to shell32.dll whenever possible. }
|
||||
pathLength:=GetSystemDirectory(pathBuf, MAX_PATH);
|
||||
if (pathLength>0) and (pathLength<MAX_PATH-14) then
|
||||
begin
|
||||
StrLCopy(@pathBuf[pathLength],'\shfolder.dll',MAX_PATH-pathLength-1);
|
||||
CFGDLLHandle:=LoadLibrary(pathBuf);
|
||||
|
||||
if (CFGDLLHandle<>0) then
|
||||
begin
|
||||
Pointer(ShGetFolderPath):=GetProcAddress(CFGDLLHandle,'SHGetFolderPathA');
|
||||
If @ShGetFolderPath=nil then
|
||||
begin
|
||||
FreeLibrary(CFGDLLHandle);
|
||||
CFGDllHandle:=0;
|
||||
end;
|
||||
end;
|
||||
end;
|
||||
If (@ShGetFolderPath=Nil) then
|
||||
Raise Exception.Create('Could not determine SHGetFolderPath Function');
|
||||
end;
|
||||
|
||||
Function GetWindowsSpecialDir(ID : Integer) : String;
|
||||
|
||||
Var
|
||||
APath : Array[0..MAX_PATH] of char;
|
||||
|
||||
begin
|
||||
Result:='';
|
||||
if (CFGDLLHandle=0) then
|
||||
InitDLL;
|
||||
If (SHGetFolderPath<>Nil) then
|
||||
begin
|
||||
if SHGetFolderPath(0,ID or CSIDL_FLAG_CREATE,0,0,@APATH[0])=S_OK then
|
||||
Result:=IncludeTrailingPathDelimiter(StrPas(@APath[0]));
|
||||
end;
|
||||
end;
|
||||
|
||||
Initialization
|
||||
Finalization
|
||||
if CFGDLLHandle<>0 then
|
||||
FreeLibrary(CFGDllHandle);
|
||||
end.
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
{ $Id$ }
|
||||
{
|
||||
---------------------------------------------------------------------------
|
||||
gtkextra.pp - GTK(2) widgetset - additional gdk/gtk functions
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
This unit contains missing gdk/gtk functions and defines for certain
|
||||
versions of gtk or fpc.
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
@created(Sun Jan 28th WET 2006)
|
||||
@lastmod($Date$)
|
||||
@author(Marc Weustink <marc@@dommelstein.nl>)
|
||||
|
||||
*****************************************************************************
|
||||
This file is part of the Lazarus Component Library (LCL)
|
||||
|
||||
See the file COPYING.modifiedLGPL.txt, included in this distribution,
|
||||
for details about the license.
|
||||
*****************************************************************************
|
||||
}
|
||||
|
||||
unit GtkExtra;
|
||||
|
||||
{$mode objfpc}{$H+}
|
||||
|
||||
interface
|
||||
|
||||
{$I gtkdefines.inc}
|
||||
|
||||
{$ifdef gtk1}
|
||||
{$I gtk1extrah.inc}
|
||||
{$endif}
|
||||
|
||||
{$ifdef gtk2}
|
||||
{$I gtk2extrah.inc}
|
||||
{$endif}
|
||||
|
||||
|
||||
implementation
|
||||
|
||||
{$ifdef gtk1}
|
||||
{$I gtk1extra.inc}
|
||||
{$endif}
|
||||
|
||||
{$ifdef gtk2}
|
||||
{$I gtk2extra.inc}
|
||||
{$endif}
|
||||
|
||||
end.
|
||||
22
samples/Pascal/large.pp
Normal file
22
samples/Pascal/large.pp
Normal file
@@ -0,0 +1,22 @@
|
||||
program large;
|
||||
|
||||
const
|
||||
max = 100000000;
|
||||
|
||||
type
|
||||
tlist = array[1..max] of longint;
|
||||
|
||||
var
|
||||
data : tlist;
|
||||
i : integer;
|
||||
|
||||
begin
|
||||
|
||||
i := 0;
|
||||
while(i < max)
|
||||
do
|
||||
begin
|
||||
data[i] := 0;
|
||||
Writeln(data[i])
|
||||
end
|
||||
end.
|
||||
26
samples/Pascal/tw27294.pp
Normal file
26
samples/Pascal/tw27294.pp
Normal file
@@ -0,0 +1,26 @@
|
||||
uses
|
||||
uw27294;
|
||||
|
||||
var
|
||||
p : procedure;
|
||||
|
||||
procedure test;
|
||||
|
||||
begin
|
||||
p:=@test;
|
||||
writeln('OK');
|
||||
end;
|
||||
|
||||
procedure global;
|
||||
begin
|
||||
p:=nil;
|
||||
test;
|
||||
p();
|
||||
end;
|
||||
|
||||
begin
|
||||
global;
|
||||
uw27294.global;
|
||||
end.
|
||||
|
||||
|
||||
3
samples/Perl/use5.pl
Normal file
3
samples/Perl/use5.pl
Normal file
@@ -0,0 +1,3 @@
|
||||
use Mojolicious::Lite;
|
||||
use 5.20.0;
|
||||
use experimental 'signatures';
|
||||
116
samples/PowerShell/ZLocation.psd1
Normal file
116
samples/PowerShell/ZLocation.psd1
Normal file
@@ -0,0 +1,116 @@
|
||||
#
|
||||
# Module manifest for module 'ZLocation'
|
||||
#
|
||||
# Generated by: sevoroby
|
||||
#
|
||||
# Generated on: 12/10/2014
|
||||
#
|
||||
|
||||
@{
|
||||
|
||||
# Script module or binary module file associated with this manifest.
|
||||
RootModule = 'ZLocation.psm1'
|
||||
|
||||
# Version number of this module.
|
||||
ModuleVersion = '0.1'
|
||||
|
||||
# ID used to uniquely identify this module
|
||||
GUID = '18e8ca17-7f67-4f1c-85ff-159373bf66f5'
|
||||
|
||||
# Author of this module
|
||||
Author = 'Sergei Vorobev'
|
||||
|
||||
# Company or vendor of this module
|
||||
CompanyName = 'Microsoft'
|
||||
|
||||
# Copyright statement for this module
|
||||
Copyright = '(c) 2014 Sergei Vorobev. All rights reserved.'
|
||||
|
||||
# Description of the functionality provided by this module
|
||||
# Description = ''
|
||||
|
||||
# Minimum version of the Windows PowerShell engine required by this module
|
||||
# PowerShellVersion = ''
|
||||
|
||||
# Name of the Windows PowerShell host required by this module
|
||||
# PowerShellHostName = ''
|
||||
|
||||
# Minimum version of the Windows PowerShell host required by this module
|
||||
# PowerShellHostVersion = ''
|
||||
|
||||
# Minimum version of Microsoft .NET Framework required by this module
|
||||
# DotNetFrameworkVersion = ''
|
||||
|
||||
# Minimum version of the common language runtime (CLR) required by this module
|
||||
# CLRVersion = ''
|
||||
|
||||
# Processor architecture (None, X86, Amd64) required by this module
|
||||
# ProcessorArchitecture = ''
|
||||
|
||||
# Modules that must be imported into the global environment prior to importing this module
|
||||
# RequiredModules = @()
|
||||
|
||||
# Assemblies that must be loaded prior to importing this module
|
||||
# RequiredAssemblies = @()
|
||||
|
||||
# Script files (.ps1) that are run in the caller's environment prior to importing this module.
|
||||
# ScriptsToProcess = @()
|
||||
|
||||
# Type files (.ps1xml) to be loaded when importing this module
|
||||
# TypesToProcess = @()
|
||||
|
||||
# Format files (.ps1xml) to be loaded when importing this module
|
||||
# FormatsToProcess = @()
|
||||
|
||||
# Modules to import as nested modules of the module specified in RootModule/ModuleToProcess
|
||||
NestedModules = @("ZLocation.Storage.psm1", "ZLocation.Search.psm1")
|
||||
|
||||
# Functions to export from this module
|
||||
FunctionsToExport = '*'
|
||||
|
||||
# Cmdlets to export from this module
|
||||
CmdletsToExport = '*'
|
||||
|
||||
# Variables to export from this module
|
||||
VariablesToExport = '*'
|
||||
|
||||
# Aliases to export from this module
|
||||
AliasesToExport = '*'
|
||||
|
||||
# List of all modules packaged with this module
|
||||
# ModuleList = @()
|
||||
|
||||
# List of all files packaged with this module
|
||||
# FileList = @()
|
||||
|
||||
# Private data to pass to the module specified in RootModule/ModuleToProcess. This may also contain a PSData hashtable with additional module metadata used by PowerShell.
|
||||
PrivateData = @{
|
||||
|
||||
PSData = @{
|
||||
|
||||
# Tags applied to this module. These help with module discovery in online galleries.
|
||||
# Tags = @()
|
||||
|
||||
# A URL to the license for this module.
|
||||
# LicenseUri = ''
|
||||
|
||||
# A URL to the main website for this project.
|
||||
# ProjectUri = ''
|
||||
|
||||
# A URL to an icon representing this module.
|
||||
# IconUri = ''
|
||||
|
||||
# ReleaseNotes of this module
|
||||
# ReleaseNotes = ''
|
||||
|
||||
} # End of PSData hashtable
|
||||
|
||||
} # End of PrivateData hashtable
|
||||
|
||||
# HelpInfo URI of this module
|
||||
# HelpInfoURI = ''
|
||||
|
||||
# Default prefix for commands exported from this module. Override the default prefix using Import-Module -Prefix.
|
||||
# DefaultCommandPrefix = ''
|
||||
|
||||
}
|
||||
91
samples/PowerShell/ZLocation.psm1
Normal file
91
samples/PowerShell/ZLocation.psm1
Normal file
@@ -0,0 +1,91 @@
|
||||
#
|
||||
# Weight function.
|
||||
#
|
||||
function Update-ZLocation([string]$path)
|
||||
{
|
||||
$now = [datetime]::Now
|
||||
if (Test-Path variable:global:__zlocation_current)
|
||||
{
|
||||
$prev = $global:__zlocation_current
|
||||
$weight = $now.Subtract($prev.Time).TotalSeconds
|
||||
Add-ZWeight ($prev.Location) $weight
|
||||
}
|
||||
|
||||
$global:__zlocation_current = @{
|
||||
Location = $path
|
||||
Time = [datetime]::Now
|
||||
}
|
||||
|
||||
# populate folder immidiatly after the first cd
|
||||
Add-ZWeight $path 0
|
||||
}
|
||||
|
||||
# this approach hurts `cd` performance (0.0008 sec vs 0.025 sec).
|
||||
# Consider replace it with OnIdle Event.
|
||||
(Get-Variable pwd).attributes.Add((new-object ValidateScript { Update-ZLocation $_.Path; return $true }))
|
||||
#
|
||||
# End of weight function.
|
||||
#
|
||||
|
||||
|
||||
#
|
||||
# Tab complention.
|
||||
#
|
||||
if (Test-Path Function:\TabExpansion) {
|
||||
Rename-Item Function:\TabExpansion PreZTabExpansion
|
||||
}
|
||||
|
||||
function Get-EscapedPath
|
||||
{
|
||||
param(
|
||||
[Parameter(
|
||||
Position=0,
|
||||
Mandatory=$true,
|
||||
ValueFromPipeline=$true,
|
||||
ValueFromPipelineByPropertyName=$true)
|
||||
]
|
||||
[string]$path
|
||||
)
|
||||
|
||||
process {
|
||||
if ($path.Contains(' '))
|
||||
{
|
||||
return '"' + $path + '"'
|
||||
}
|
||||
return $path
|
||||
}
|
||||
}
|
||||
|
||||
function global:TabExpansion($line, $lastWord) {
|
||||
switch -regex ($line) {
|
||||
"^(Set-ZLocation|z) .*" {
|
||||
$arguments = $line -split ' ' | Where { $_.length -gt 0 } | select -Skip 1
|
||||
Find-Matches (Get-ZLocation) $arguments | Get-EscapedPath
|
||||
}
|
||||
default {
|
||||
if (Test-Path Function:\PreZTabExpansion) {
|
||||
PreZTabExpansion $line $lastWord
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#
|
||||
# End of tab completion.
|
||||
#
|
||||
|
||||
function Set-ZLocation()
|
||||
{
|
||||
if (-not $args) {
|
||||
$args = @()
|
||||
}
|
||||
$matches = Find-Matches (Get-ZLocation) $args
|
||||
if ($matches) {
|
||||
Push-Location ($matches | Select-Object -First 1)
|
||||
} else {
|
||||
Write-Warning "Cannot find matching location"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Set-Alias -Name z -Value Set-ZLocation
|
||||
Export-ModuleMember -Function Set-ZLocation, Get-ZLocation -Alias z
|
||||
@@ -1,2 +0,0 @@
|
||||
# Hello world in powershell
|
||||
Write-Host 'Hello World'
|
||||
@@ -1,5 +0,0 @@
|
||||
# Hello World powershell module
|
||||
|
||||
function hello() {
|
||||
Write-Host 'Hello World'
|
||||
}
|
||||
65
samples/PowerShell/history.ps1
Normal file
65
samples/PowerShell/history.ps1
Normal file
@@ -0,0 +1,65 @@
|
||||
function Save-HistoryAll() {
|
||||
$history = Get-History -Count $MaximumHistoryCount
|
||||
[array]::Reverse($history)
|
||||
$history = $history | Group CommandLine | Foreach {$_.Group[0]}
|
||||
[array]::Reverse($history)
|
||||
$history | Export-Csv $historyPath
|
||||
}
|
||||
|
||||
function Save-HistoryIncremental() {
|
||||
# Get-History -Count $MaximumHistoryCount | Group CommandLine | Foreach {$_.Group[0]} | Export-Csv $historyPath
|
||||
Get-History -Count 1 | Export-Csv -Append $historyPath
|
||||
}
|
||||
|
||||
# hook powershell's exiting event & hide the registration with -supportevent.
|
||||
#Register-EngineEvent -SourceIdentifier powershell.exiting -SupportEvent -Action { Save-History }
|
||||
|
||||
$oldPrompt = Get-Content function:\prompt
|
||||
|
||||
if( $oldPrompt -notlike '*Save-HistoryIncremental*' )
|
||||
{
|
||||
$newPrompt = @'
|
||||
Save-HistoryIncremental
|
||||
|
||||
'@
|
||||
$newPrompt += $oldPrompt
|
||||
$function:prompt = [ScriptBlock]::Create($newPrompt)
|
||||
}
|
||||
|
||||
# load previous history, if it exists
|
||||
if ((Test-Path $historyPath)) {
|
||||
$loadTime =
|
||||
(
|
||||
Measure-Command {
|
||||
Import-Csv $historyPath | Add-History
|
||||
Save-HistoryAll
|
||||
Clear-History
|
||||
Import-Csv $historyPath | ? {$count++;$true} | Add-History
|
||||
}
|
||||
).totalseconds
|
||||
Write-Host -Fore Green "`nLoaded $count history item(s) in $loadTime seconds.`n"
|
||||
}
|
||||
|
||||
|
||||
function Search-History()
|
||||
{
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Retrive and filter history based on query
|
||||
.DESCRIPTION
|
||||
.PARAMETER Name
|
||||
.EXAMPLE
|
||||
.LINK
|
||||
#>
|
||||
|
||||
param(
|
||||
[string[]] $query
|
||||
)
|
||||
|
||||
$history = Get-History -Count $MaximumHistoryCount
|
||||
foreach ($item in $query){
|
||||
$item = $item.ToLower()
|
||||
$history = $history | where {$_.CommandLine.ToLower().Contains($item)}
|
||||
}
|
||||
$history
|
||||
}
|
||||
21
samples/desktop/example.desktop
Normal file
21
samples/desktop/example.desktop
Normal file
@@ -0,0 +1,21 @@
|
||||
# http://standards.freedesktop.org/desktop-entry-spec/latest/apa.html
|
||||
|
||||
[Desktop Entry]
|
||||
Version=1.0
|
||||
Type=Application
|
||||
Name=Foo Viewer
|
||||
Comment=The best viewer for Foo objects available!
|
||||
TryExec=fooview
|
||||
Exec=fooview %F
|
||||
Icon=fooview
|
||||
MimeType=image/x-foo;
|
||||
Actions=Gallery;Create;
|
||||
|
||||
[Desktop Action Gallery]
|
||||
Exec=fooview --gallery
|
||||
Name=Browse Gallery
|
||||
|
||||
[Desktop Action Create]
|
||||
Exec=fooview --create-new
|
||||
Name=Create a new Foo!
|
||||
Icon=fooview-new
|
||||
337
samples/eC/Designer.ec
Normal file
337
samples/eC/Designer.ec
Normal file
@@ -0,0 +1,337 @@
|
||||
import "ide"
|
||||
|
||||
class Designer : DesignerBase
|
||||
{
|
||||
~Designer()
|
||||
{
|
||||
if(GetActiveDesigner() == this)
|
||||
{
|
||||
SetActiveDesigner(null);
|
||||
}
|
||||
if(classDesigner)
|
||||
delete classDesigner;
|
||||
}
|
||||
|
||||
// *** DesignerBase Implementation ***
|
||||
|
||||
void ModifyCode()
|
||||
{
|
||||
codeEditor.ModifyCode();
|
||||
}
|
||||
|
||||
void UpdateProperties()
|
||||
{
|
||||
codeEditor.DesignerModifiedObject();
|
||||
}
|
||||
|
||||
void CodeAddObject(Instance instance, ObjectInfo * object)
|
||||
{
|
||||
codeEditor.AddObject(instance, object);
|
||||
}
|
||||
|
||||
void SheetAddObject(ObjectInfo object)
|
||||
{
|
||||
codeEditor.sheet.AddObject(object, object.name, typeData, true); //className, true);
|
||||
}
|
||||
|
||||
void AddToolBoxClass(Class _class)
|
||||
{
|
||||
((IDEWorkSpace)master).toolBox.AddControl(_class);
|
||||
}
|
||||
|
||||
void AddDefaultMethod(Instance instance, Instance classInstance)
|
||||
{
|
||||
Class _class = instance._class;
|
||||
Method defaultMethod = null;
|
||||
|
||||
for( ; _class; _class = _class.base)
|
||||
{
|
||||
Method method;
|
||||
int minID = MAXINT;
|
||||
for(method = (Method)_class.methods.first; method; method = (Method)((BTNode)method).next)
|
||||
{
|
||||
if(method.type == virtualMethod)
|
||||
{
|
||||
if(!method.dataType)
|
||||
method.dataType = ProcessTypeString(method.dataTypeString, false);
|
||||
if(method.vid < minID && (instance == classInstance || (method.dataType.thisClass && eClass_IsDerived(classInstance._class, method.dataType.thisClass.registered))))
|
||||
{
|
||||
defaultMethod = method;
|
||||
minID = method.vid;
|
||||
}
|
||||
}
|
||||
}
|
||||
if(defaultMethod)
|
||||
break;
|
||||
}
|
||||
codeEditor.AddMethod(defaultMethod);
|
||||
}
|
||||
|
||||
bool ObjectContainsCode(ObjectInfo object)
|
||||
{
|
||||
// Confirmation if control contains code
|
||||
if(object.instCode)
|
||||
{
|
||||
MembersInit members;
|
||||
if(object.instCode.members)
|
||||
{
|
||||
for(members = object.instCode.members->first; members; members = members.next)
|
||||
{
|
||||
if(members.type == methodMembersInit)
|
||||
{
|
||||
//if(!Code_IsFunctionEmpty(members.function))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void DeleteObject(ObjectInfo object)
|
||||
{
|
||||
if(codeEditor)
|
||||
codeEditor.DeleteObject(object);
|
||||
}
|
||||
|
||||
void RenameObject(ObjectInfo object, const char * name)
|
||||
{
|
||||
if(object && (name || !object.classDefinition))
|
||||
codeEditor.RenameObject(object, name);
|
||||
}
|
||||
|
||||
bool FindObject(Instance * object, const char * string)
|
||||
{
|
||||
ObjectInfo classObject;
|
||||
for(classObject = codeEditor.classes.first; classObject; classObject = classObject.next)
|
||||
{
|
||||
ObjectInfo check;
|
||||
if(classObject.name && !strcmp(string, classObject.name))
|
||||
{
|
||||
*object = classObject.instance;
|
||||
break;
|
||||
}
|
||||
for(check = classObject.instances.first; check; check = check.next)
|
||||
{
|
||||
if(check.name && !strcmp(string, check.name))
|
||||
{
|
||||
*object = check.instance;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(check)
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void SelectObjectFromDesigner(ObjectInfo object)
|
||||
{
|
||||
codeEditor.SelectObjectFromDesigner(object);
|
||||
}
|
||||
|
||||
borderStyle = sizable;
|
||||
isActiveClient = true;
|
||||
hasVertScroll = true;
|
||||
hasHorzScroll = true;
|
||||
hasClose = true;
|
||||
hasMaximize = true;
|
||||
hasMinimize = true;
|
||||
text = $"Designer";
|
||||
menu = Menu { };
|
||||
anchor = Anchor { left = 300, right = 150, top = 0, bottom = 0 };
|
||||
|
||||
ToolBox toolBox;
|
||||
CodeEditor codeEditor;
|
||||
|
||||
Menu fileMenu { menu, $"File", f };
|
||||
MenuItem fileSaveItem
|
||||
{
|
||||
fileMenu, $"Save", s, ctrlS;
|
||||
bool NotifySelect(MenuItem selection, Modifiers mods)
|
||||
{
|
||||
return codeEditor.MenuFileSave(selection, mods);
|
||||
}
|
||||
};
|
||||
MenuItem fileSaveAsItem
|
||||
{
|
||||
fileMenu, $"Save As...", a;
|
||||
bool NotifySelect(MenuItem selection, Modifiers mods)
|
||||
{
|
||||
return codeEditor.MenuFileSaveAs(selection, mods);
|
||||
}
|
||||
};
|
||||
bool debugClosing;
|
||||
|
||||
bool OnClose(bool parentClosing)
|
||||
{
|
||||
if(!parentClosing)
|
||||
{
|
||||
if(codeEditor && codeEditor.inUseDebug && !debugClosing)
|
||||
{
|
||||
debugClosing = true;
|
||||
closing = false;
|
||||
if(CloseConfirmation(false))
|
||||
{
|
||||
visible = false;
|
||||
if(modifiedDocument)
|
||||
OnFileModified({ modified = true }, null);
|
||||
}
|
||||
debugClosing = false;
|
||||
return false;
|
||||
}
|
||||
if(codeEditor && !codeEditor.closing && !debugClosing)
|
||||
{
|
||||
if(!codeEditor.visible)
|
||||
{
|
||||
if(!codeEditor.Destroy(0))
|
||||
return false;
|
||||
else
|
||||
codeEditor = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
visible = false;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool OnActivate(bool active, Window previous, bool * goOnWithActivation, bool direct)
|
||||
{
|
||||
if(active)
|
||||
{
|
||||
codeEditor.EnsureUpToDate();
|
||||
codeEditor.fixCaret = true;
|
||||
/*
|
||||
if(classDesigner)
|
||||
classDesigner.Activate();
|
||||
*/
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool OnKeyHit(Key key, unichar ch)
|
||||
{
|
||||
return codeEditor.sheet.OnKeyHit(key, ch);
|
||||
}
|
||||
|
||||
watch(modifiedDocument)
|
||||
{
|
||||
fileSaveItem.disabled = !modifiedDocument && codeEditor.fileName;
|
||||
};
|
||||
|
||||
// *** METHODS ACCESSED FROM PROPERTY SHEET/TOOLBOX/CODE EDITOR ***
|
||||
void Reset()
|
||||
{
|
||||
if(classDesigner)
|
||||
{
|
||||
classDesigner.Reset();
|
||||
classDesigner.SelectObject(null, null);
|
||||
classDesigner.Destroy(0);
|
||||
delete classDesigner;
|
||||
}
|
||||
}
|
||||
|
||||
void FillToolBox()
|
||||
{
|
||||
if(this && classDesigner)
|
||||
classDesigner.ListToolBoxClasses(this);
|
||||
}
|
||||
|
||||
void SelectObject(ObjectInfo object, Instance instance)
|
||||
{
|
||||
ClassDesignerBase classDesigner = this.classDesigner;
|
||||
#ifdef _DEBUG
|
||||
if(instance && instance._class.module.application != codeEditor.privateModule)
|
||||
printf("warning: SelectObject: instance._class.module.application != codeEditor.privateModule\n");
|
||||
#endif
|
||||
if(!classDesigner || !instance || classDesigner._class != (Class)eInstance_GetDesigner(instance))
|
||||
{
|
||||
if(classDesigner)
|
||||
{
|
||||
classDesigner.SelectObject(null, null);
|
||||
classDesigner.Destroy(0);
|
||||
classDesigner = null;
|
||||
delete this.classDesigner;
|
||||
}
|
||||
if(instance)
|
||||
{
|
||||
this.classDesigner = classDesigner = eInstance_New(eInstance_GetDesigner(instance));
|
||||
incref classDesigner;
|
||||
//if(!classDesigner.parent)
|
||||
{
|
||||
classDesigner.parent = this;
|
||||
classDesigner.anchor = Anchor { left = 0, right = 0, top = 0, bottom = 0 };
|
||||
}
|
||||
classDesigner.Create();
|
||||
}
|
||||
}
|
||||
// Call class editor SelectObject
|
||||
if(classDesigner)
|
||||
classDesigner.SelectObject(object, instance);
|
||||
}
|
||||
|
||||
void AddObject()
|
||||
{
|
||||
// Call class editor AddObject
|
||||
if(classDesigner)
|
||||
classDesigner.AddObject();
|
||||
if(visible)
|
||||
Activate();
|
||||
else
|
||||
codeEditor.Activate();
|
||||
}
|
||||
|
||||
void CreateObject(Instance instance, ObjectInfo object, bool isClass, Instance iclass)
|
||||
{
|
||||
subclass(ClassDesignerBase) designerClass = eInstance_GetDesigner(instance);
|
||||
|
||||
// Call class editor CreateObject
|
||||
if(designerClass)
|
||||
designerClass.CreateObject(this, instance, object, isClass, iclass);
|
||||
}
|
||||
|
||||
void ::PostCreateObject(Instance instance, ObjectInfo object, bool isClass, Instance iclass)
|
||||
{
|
||||
subclass(ClassDesignerBase) designerClass = eInstance_GetDesigner(instance);
|
||||
|
||||
// Call class editor PostCreateObject
|
||||
if(designerClass)
|
||||
designerClass.PostCreateObject(instance, object, isClass, iclass);
|
||||
}
|
||||
|
||||
void ::DroppedObject(Instance instance, ObjectInfo object, bool isClass, Instance iclass)
|
||||
{
|
||||
subclass(ClassDesignerBase) designerClass = eInstance_GetDesigner(instance);
|
||||
|
||||
// Call class editor PostCreateObject
|
||||
if(designerClass)
|
||||
designerClass.DroppedObject(instance, object, isClass, iclass);
|
||||
}
|
||||
|
||||
void PrepareTestObject(Instance instance)
|
||||
{
|
||||
subclass(ClassDesignerBase) designerClass = eInstance_GetDesigner(instance);
|
||||
if(designerClass)
|
||||
designerClass.PrepareTestObject(this, instance);
|
||||
}
|
||||
|
||||
void ::DestroyObject(Instance instance)
|
||||
{
|
||||
subclass(ClassDesignerBase) designerClass = eInstance_GetDesigner(instance);
|
||||
if(designerClass)
|
||||
designerClass.DestroyObject(instance);
|
||||
}
|
||||
|
||||
void ::FixProperty(Property prop, Instance instance)
|
||||
{
|
||||
subclass(ClassDesignerBase) designerClass = eInstance_GetDesigner(instance);
|
||||
if(designerClass)
|
||||
designerClass.FixProperty(prop, instance);
|
||||
}
|
||||
}
|
||||
1
test/fixtures/Data/Modelines/example_smalltalk.md
vendored
Normal file
1
test/fixtures/Data/Modelines/example_smalltalk.md
vendored
Normal file
@@ -0,0 +1 @@
|
||||
; -*-mode:Smalltalk-*-
|
||||
1
test/fixtures/Data/Modelines/iamphp.inc
vendored
Normal file
1
test/fixtures/Data/Modelines/iamphp.inc
vendored
Normal file
@@ -0,0 +1 @@
|
||||
; -*- MoDe: PhP;-*-
|
||||
3
test/fixtures/Data/Modelines/not_perl.pl
vendored
Normal file
3
test/fixtures/Data/Modelines/not_perl.pl
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
/* vim: set filEtype=pRoloG: */
|
||||
|
||||
# I am Prolog
|
||||
3
test/fixtures/Data/Modelines/ruby
vendored
Normal file
3
test/fixtures/Data/Modelines/ruby
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
/* vim: set filetype=ruby: */
|
||||
|
||||
# I am Ruby
|
||||
3
test/fixtures/Data/Modelines/seeplusplus
vendored
Normal file
3
test/fixtures/Data/Modelines/seeplusplus
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
/* vim: set ft=cpp: */
|
||||
|
||||
I would like to be C++ please.
|
||||
57
test/fixtures/Shell/crossbuild_liblua5.1
vendored
Normal file
57
test/fixtures/Shell/crossbuild_liblua5.1
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
#! /bin/sh
|
||||
#
|
||||
# Builds and installs liblua5.1 for the cross toolchain.
|
||||
# Executed by build-uqm-dependencies.chroot
|
||||
|
||||
# Include our common functions
|
||||
. /usr/lib/crossbuild/crossbuild.subr
|
||||
|
||||
# envvar LIBLUA51_URL
|
||||
#
|
||||
# Specifies the URL of the liblua5.1 source tarball you want to use.
|
||||
export LIBLUA51_URL="http://www.lua.org/ftp/lua-5.1.5.tar.gz"
|
||||
|
||||
# envvar INSTALL_TOP
|
||||
#
|
||||
# This determines where lua's makefiles install everything (we don't want to use
|
||||
# /usr/local!).
|
||||
export INSTALL_TOP="/usr/${HOST_TRIPLET}"
|
||||
|
||||
# envvar TO_BIN
|
||||
#
|
||||
# Names of the binary files to install (that's right, lua's makefiles don't
|
||||
# determine this automatically, and since we end up with files named according
|
||||
# to Windows conventions the install chokes without these)
|
||||
export TO_BIN="lua.exe luac.exe"
|
||||
|
||||
# envvar TO_LIB
|
||||
#
|
||||
# Names of the libraries to install, see TO_BIN
|
||||
export TO_LIB="liblua.a lua51.dll"
|
||||
|
||||
|
||||
# liblua5.1 uses custom makefiles and does not natively support cross-building.
|
||||
# However, with our cross toolchain in its PATH it successfully builds the mingw
|
||||
# target.
|
||||
export PATH=/usr/${HOST_TRIPLET}/bin:${PATH}
|
||||
|
||||
echo "*************************************************************************"
|
||||
echo "--- BEGIN: crossbuild_liblua5.1 ---"
|
||||
|
||||
get_tarball "liblua5.1" "${LIBLUA51_URL}" gz
|
||||
|
||||
cd ${SRC_ROOT_DIR}/liblua5.1/*
|
||||
|
||||
if [ -f Makefile ]; then
|
||||
make clean
|
||||
make --environment-overrides mingw install
|
||||
|
||||
|
||||
else
|
||||
echo "crossbuild_liblua5.1 failed: Could not find Makefile"
|
||||
echo "(is the liblua5.1 source tarball sane?)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "--- END: crossbuild_liblua5.1 ---"
|
||||
echo "*************************************************************************"
|
||||
99
test/fixtures/Shell/graylog2-server.init.d
vendored
Executable file
99
test/fixtures/Shell/graylog2-server.init.d
vendored
Executable file
@@ -0,0 +1,99 @@
|
||||
#!/bin/bash
|
||||
|
||||
### BEGIN INIT INFO
|
||||
# Provides: graylog2-server
|
||||
# Required-Start: $network
|
||||
# Required-Stop: $network
|
||||
# Default-Start: 2 3 4 5
|
||||
# Default-Stop: 1
|
||||
# Short-Description: Start Graylog2 server
|
||||
### END INIT INFO
|
||||
|
||||
# Written by Lital Natan <litaln@gmail.com>
|
||||
|
||||
PREFIX=/usr
|
||||
SHAREDIR=$PREFIX/share/graylog2-server
|
||||
SERVER_JAR=$SHAREDIR/graylog2-server.jar
|
||||
SYSLOG4J_JAR=$SHAREDIR/syslog4j-0.9.46-bin.jar
|
||||
SVCNAME="graylog2-server"
|
||||
|
||||
CONFIG="/etc/graylog2.conf"
|
||||
LOGFILE="/var/log/graylog2.log"
|
||||
PIDFILE="/var/run/graylog2.pid"
|
||||
|
||||
start() {
|
||||
if [ ! -e $CONFIG ]; then
|
||||
echo "Config file $CONFIG does not exist"
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "Starting ${SVCNAME}"
|
||||
nohup `which java` -cp $SERVER_JAR:$SYSLOG4J_JAR org.graylog2.Main \
|
||||
-p ${PIDFILE} -f ${CONFIG} > $LOGFILE 2>&1 &
|
||||
|
||||
# Sleep before testing the service
|
||||
sleep 2
|
||||
|
||||
graylog2_test || return 1
|
||||
}
|
||||
|
||||
stop() {
|
||||
pid=`< $PIDFILE`
|
||||
kill $pid
|
||||
rm -f ${PIDFILE} # just in case
|
||||
}
|
||||
|
||||
graylog2_test() {
|
||||
# Graylog2 only deletes its PID file if it hits a config error
|
||||
if [ ! -e ${PIDFILE} ]; then
|
||||
echo "Configuration error, check ${CONFIG}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local pid=`cat ${PIDFILE}`
|
||||
|
||||
# Graylog2 isn't running, so that means there was a problem
|
||||
if [ ! -e /proc/$pid ]; then
|
||||
echo "Something went wrong, check ${LOGFILE}"
|
||||
rm -f ${PIDFILE}
|
||||
return 1
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
status() {
|
||||
graylog2_test > /dev/null 2>&1
|
||||
if [ "$?" == "0" ]; then
|
||||
echo "Graylog2 server is up"
|
||||
return 0
|
||||
else
|
||||
echo "Graylog2 server is down"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
restart() {
|
||||
stop
|
||||
start
|
||||
}
|
||||
|
||||
case "$1" in
|
||||
start)
|
||||
start
|
||||
;;
|
||||
stop)
|
||||
stop
|
||||
;;
|
||||
status)
|
||||
status
|
||||
;;
|
||||
restart)
|
||||
restart
|
||||
;;
|
||||
*)
|
||||
echo "Usage $0 {start|stop|restart|status}"
|
||||
RETVAL=1
|
||||
esac
|
||||
@@ -2,3 +2,21 @@ require "bundler/setup"
|
||||
require "minitest/autorun"
|
||||
require "mocha/setup"
|
||||
require "linguist"
|
||||
|
||||
def fixtures_path
|
||||
File.expand_path("../fixtures", __FILE__)
|
||||
end
|
||||
|
||||
def fixture_blob(name)
|
||||
name = File.join(fixtures_path, name) unless name =~ /^\//
|
||||
Linguist::FileBlob.new(name, fixtures_path)
|
||||
end
|
||||
|
||||
def samples_path
|
||||
File.expand_path("../../samples", __FILE__)
|
||||
end
|
||||
|
||||
def sample_blob(name)
|
||||
name = File.join(samples_path, name) unless name =~ /^\//
|
||||
Linguist::FileBlob.new(name, samples_path)
|
||||
end
|
||||
|
||||
@@ -14,24 +14,6 @@ class TestBlob < Minitest::Test
|
||||
Encoding.default_external = @original_external
|
||||
end
|
||||
|
||||
def samples_path
|
||||
File.expand_path("../../samples", __FILE__)
|
||||
end
|
||||
|
||||
def fixtures_path
|
||||
File.expand_path("../fixtures", __FILE__)
|
||||
end
|
||||
|
||||
def sample_blob(name)
|
||||
name = File.join(samples_path, name) unless name =~ /^\//
|
||||
FileBlob.new(name, samples_path)
|
||||
end
|
||||
|
||||
def fixture_blob(name)
|
||||
name = File.join(fixtures_path, name) unless name =~ /^\//
|
||||
FileBlob.new(name, fixtures_path)
|
||||
end
|
||||
|
||||
def script_blob(name)
|
||||
blob = sample_blob(name)
|
||||
blob.instance_variable_set(:@name, 'script')
|
||||
@@ -234,6 +216,7 @@ class TestBlob < Minitest::Test
|
||||
assert sample_blob("C++/protocol-buffer.pb.cc").generated?
|
||||
assert sample_blob("Java/ProtocolBuffer.java").generated?
|
||||
assert sample_blob("Python/protocol_buffer_pb2.py").generated?
|
||||
assert sample_blob("Go/api.pb.go").generated?
|
||||
|
||||
# Generated JNI
|
||||
assert sample_blob("C/jni_layer.h").generated?
|
||||
@@ -467,6 +450,54 @@ class TestBlob < Minitest::Test
|
||||
assert sample_blob("activator.bat").vendored?
|
||||
assert sample_blob("subproject/activator").vendored?
|
||||
assert sample_blob("subproject/activator.bat").vendored?
|
||||
|
||||
assert_predicate fixture_blob(".google_apis/bar.jar"), :vendored?
|
||||
assert_predicate fixture_blob("foo/.google_apis/bar.jar"), :vendored?
|
||||
end
|
||||
|
||||
def test_documentation
|
||||
assert_predicate fixture_blob("doc/foo.html"), :documentation?
|
||||
assert_predicate fixture_blob("docs/foo.html"), :documentation?
|
||||
refute_predicate fixture_blob("project/doc/foo.html"), :documentation?
|
||||
refute_predicate fixture_blob("project/docs/foo.html"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("Documentation/foo.md"), :documentation?
|
||||
assert_predicate fixture_blob("documentation/foo.md"), :documentation?
|
||||
assert_predicate fixture_blob("project/Documentation/foo.md"), :documentation?
|
||||
assert_predicate fixture_blob("project/documentation/foo.md"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("javadoc/foo.html"), :documentation?
|
||||
assert_predicate fixture_blob("project/javadoc/foo.html"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("man/foo.html"), :documentation?
|
||||
refute_predicate fixture_blob("project/man/foo.html"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("README"), :documentation?
|
||||
assert_predicate fixture_blob("README.md"), :documentation?
|
||||
assert_predicate fixture_blob("README.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/README"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("CONTRIBUTING"), :documentation?
|
||||
assert_predicate fixture_blob("CONTRIBUTING.md"), :documentation?
|
||||
assert_predicate fixture_blob("CONTRIBUTING.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/CONTRIBUTING"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("LICENSE"), :documentation?
|
||||
assert_predicate fixture_blob("LICENCE.md"), :documentation?
|
||||
assert_predicate fixture_blob("LICENSE.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/LICENSE"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("COPYING"), :documentation?
|
||||
assert_predicate fixture_blob("COPYING.md"), :documentation?
|
||||
assert_predicate fixture_blob("COPYING.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/COPYING"), :documentation?
|
||||
|
||||
assert_predicate fixture_blob("INSTALL"), :documentation?
|
||||
assert_predicate fixture_blob("INSTALL.md"), :documentation?
|
||||
assert_predicate fixture_blob("INSTALL.txt"), :documentation?
|
||||
assert_predicate fixture_blob("foo/INSTALL"), :documentation?
|
||||
|
||||
refute_predicate fixture_blob("foo.md"), :documentation?
|
||||
end
|
||||
|
||||
def test_language
|
||||
@@ -513,4 +544,29 @@ class TestBlob < Minitest::Test
|
||||
refute blob.new(" ").empty?
|
||||
refute blob.new("nope").empty?
|
||||
end
|
||||
|
||||
def test_include_in_language_stats
|
||||
vendored = sample_blob("bower_components/custom/custom.js")
|
||||
assert_predicate vendored, :vendored?
|
||||
refute_predicate vendored, :include_in_language_stats?
|
||||
|
||||
documentation = fixture_blob("README")
|
||||
assert_predicate documentation, :documentation?
|
||||
refute_predicate documentation, :include_in_language_stats?
|
||||
|
||||
generated = sample_blob("CSS/bootstrap.min.css")
|
||||
assert_predicate generated, :generated?
|
||||
refute_predicate generated, :include_in_language_stats?
|
||||
|
||||
data = sample_blob("Ant Build System/filenames/ant.xml")
|
||||
assert_equal :data, data.language.type
|
||||
refute_predicate data, :include_in_language_stats?
|
||||
|
||||
prose = sample_blob("Markdown/tender.md")
|
||||
assert_equal :prose, prose.language.type
|
||||
refute_predicate prose, :include_in_language_stats?
|
||||
|
||||
included = sample_blob("HTML/pages.html")
|
||||
assert_predicate included, :include_in_language_stats?
|
||||
end
|
||||
end
|
||||
|
||||
@@ -3,10 +3,6 @@ require_relative "./helper"
|
||||
class TestClassifier < Minitest::Test
|
||||
include Linguist
|
||||
|
||||
def samples_path
|
||||
File.expand_path("../../samples", __FILE__)
|
||||
end
|
||||
|
||||
def fixture(name)
|
||||
File.read(File.join(samples_path, name))
|
||||
end
|
||||
|
||||
@@ -3,10 +3,6 @@ require_relative "./helper"
|
||||
class TestGenerated < Minitest::Test
|
||||
include Linguist
|
||||
|
||||
def samples_path
|
||||
File.expand_path("../../samples", __FILE__)
|
||||
end
|
||||
|
||||
class DataLoadedError < StandardError; end
|
||||
|
||||
def generated_without_loading_data(name)
|
||||
|
||||
@@ -3,12 +3,18 @@ require_relative "./helper"
|
||||
class TestGrammars < Minitest::Test
|
||||
ROOT = File.expand_path("../..", __FILE__)
|
||||
|
||||
# These grammars have no license but have been grandfathered in. New grammars
|
||||
# must have a license that allows redistribution.
|
||||
UNLICENSED_GRAMMARS_WHITELIST = %w[
|
||||
vendor/grammars/Sublime-Lasso
|
||||
vendor/grammars/Sublime-REBOL
|
||||
vendor/grammars/x86-assembly-textmate-bundle
|
||||
LICENSE_WHITELIST = [
|
||||
# This grammar's MIT license is inside a subdirectory.
|
||||
"vendor/grammars/SublimePapyrus",
|
||||
|
||||
# This grammar has a nonstandard but acceptable license.
|
||||
"vendor/grammars/gap-tmbundle",
|
||||
|
||||
# These grammars have no license but have been grandfathered in. New grammars
|
||||
# must have a license that allows redistribution.
|
||||
"vendor/grammars/Sublime-Lasso",
|
||||
"vendor/grammars/Sublime-REBOL",
|
||||
"vendor/grammars/x86-assembly-textmate-bundle",
|
||||
].freeze
|
||||
|
||||
def setup
|
||||
@@ -77,9 +83,9 @@ class TestGrammars < Minitest::Test
|
||||
|
||||
unlicensed = categories[:unlicensed] || []
|
||||
unrecognized = categories[:unrecognized] || []
|
||||
disallowed_unlicensed = unlicensed - UNLICENSED_GRAMMARS_WHITELIST
|
||||
disallowed_unrecognized = unrecognized - UNLICENSED_GRAMMARS_WHITELIST
|
||||
extra_whitelist_entries = UNLICENSED_GRAMMARS_WHITELIST - (unlicensed | unrecognized)
|
||||
disallowed_unlicensed = unlicensed - LICENSE_WHITELIST
|
||||
disallowed_unrecognized = unrecognized - LICENSE_WHITELIST
|
||||
extra_whitelist_entries = LICENSE_WHITELIST - (unlicensed | unrecognized)
|
||||
|
||||
message = ""
|
||||
if disallowed_unlicensed.any?
|
||||
@@ -93,7 +99,7 @@ class TestGrammars < Minitest::Test
|
||||
end
|
||||
if extra_whitelist_entries.any?
|
||||
message << "\n\n" unless message.empty?
|
||||
message << "The following grammar submodules are listed in UNLICENSED_GRAMMARS_WHITELIST but either have a license (yay!)\n"
|
||||
message << "The following grammar submodules are listed in LICENSE_WHITELIST but either have a license (yay!)\n"
|
||||
message << "or have been removed from the repository. Please remove them from the whitelist.\n"
|
||||
message << extra_whitelist_entries.sort.join("\n")
|
||||
end
|
||||
@@ -131,6 +137,8 @@ class TestGrammars < Minitest::Test
|
||||
"unlicense"
|
||||
elsif content.include?("http://www.wtfpl.net/txt/copying/")
|
||||
"WTFPL"
|
||||
elsif content.include?("zlib") && content.include?("license") && content.include?("2. Altered source versions must be plainly marked as such")
|
||||
"zlib"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -3,10 +3,6 @@ require_relative "./helper"
|
||||
class TestHeuristcs < Minitest::Test
|
||||
include Linguist
|
||||
|
||||
def samples_path
|
||||
File.expand_path("../../samples", __FILE__)
|
||||
end
|
||||
|
||||
def fixture(name)
|
||||
File.read(File.join(samples_path, name))
|
||||
end
|
||||
@@ -52,7 +48,7 @@ class TestHeuristcs < Minitest::Test
|
||||
def test_pl_prolog_perl_by_heuristics
|
||||
assert_heuristics({
|
||||
"Prolog" => "Prolog/turing.pl",
|
||||
"Perl" => "Perl/perl-test.t",
|
||||
"Perl" => ["Perl/perl-test.t", "Perl/use5.pl"]
|
||||
})
|
||||
end
|
||||
|
||||
@@ -72,11 +68,12 @@ class TestHeuristcs < Minitest::Test
|
||||
})
|
||||
end
|
||||
|
||||
# Candidate languages = ["AGS Script", "AsciiDoc"]
|
||||
def test_asc_asciidoc_by_heuristics
|
||||
# Candidate languages = ["AGS Script", "AsciiDoc", "Public Key"]
|
||||
def test_asc_by_heuristics
|
||||
assert_heuristics({
|
||||
"AsciiDoc" => "AsciiDoc/list.asc",
|
||||
"AGS Script" => nil
|
||||
"AGS Script" => "AGS Script/GlobalScript.asc",
|
||||
"Public Key" => "Public Key/sunCert.asc"
|
||||
})
|
||||
end
|
||||
|
||||
@@ -133,6 +130,13 @@ class TestHeuristcs < Minitest::Test
|
||||
})
|
||||
end
|
||||
|
||||
def test_lsp_by_heuristics
|
||||
assert_heuristics({
|
||||
"Common Lisp" => all_fixtures("Common Lisp"),
|
||||
"NewLisp" => all_fixtures("NewLisp")
|
||||
})
|
||||
end
|
||||
|
||||
def test_cs_by_heuristics
|
||||
assert_heuristics({
|
||||
"C#" => all_fixtures("C#", "*.cs"),
|
||||
|
||||
25
test/test_modelines.rb
Normal file
25
test/test_modelines.rb
Normal file
@@ -0,0 +1,25 @@
|
||||
require_relative "./helper"
|
||||
|
||||
class TestModelines < Minitest::Test
|
||||
include Linguist
|
||||
|
||||
def assert_modeline(language, blob)
|
||||
assert_equal language, Linguist::Strategy::Modeline.call(blob).first
|
||||
end
|
||||
|
||||
def test_modeline_strategy
|
||||
assert_modeline Language["Ruby"], fixture_blob("Data/Modelines/ruby")
|
||||
assert_modeline Language["C++"], fixture_blob("Data/Modelines/seeplusplus")
|
||||
assert_modeline Language["Prolog"], fixture_blob("Data/Modelines/not_perl.pl")
|
||||
assert_modeline Language["Smalltalk"], fixture_blob("Data/Modelines/example_smalltalk.md")
|
||||
assert_modeline Language["PHP"], fixture_blob("Data/Modelines/iamphp.inc")
|
||||
end
|
||||
|
||||
def test_modeline_languages
|
||||
assert_equal Language["Ruby"], fixture_blob("Data/Modelines/ruby").language
|
||||
assert_equal Language["C++"], fixture_blob("Data/Modelines/seeplusplus").language
|
||||
assert_equal Language["Prolog"], fixture_blob("Data/Modelines/not_perl.pl").language
|
||||
assert_equal Language["Smalltalk"], fixture_blob("Data/Modelines/example_smalltalk.md").language
|
||||
assert_equal Language["PHP"], fixture_blob("Data/Modelines/iamphp.inc").language
|
||||
end
|
||||
end
|
||||
@@ -99,4 +99,16 @@ class TestRepository < Minitest::Test
|
||||
# overridden .gitattributes
|
||||
assert !override_unvendored.vendored?
|
||||
end
|
||||
|
||||
def test_linguist_override_documentation?
|
||||
attr_commit = "d4c8fb8a28e91f97a7e53428a365c0abbac36d3d"
|
||||
repo = linguist_repo(attr_commit).read_index
|
||||
|
||||
readme = Linguist::LazyBlob.new(rugged_repository, attr_commit, "README.md")
|
||||
arduino = Linguist::LazyBlob.new(rugged_repository, attr_commit, "samples/Arduino/hello.ino")
|
||||
|
||||
# overridden by .gitattributes
|
||||
refute_predicate readme, :documentation?
|
||||
assert_predicate arduino, :documentation?
|
||||
end
|
||||
end
|
||||
|
||||
@@ -3,10 +3,6 @@ require_relative "./helper"
|
||||
class TestTokenizer < Minitest::Test
|
||||
include Linguist
|
||||
|
||||
def samples_path
|
||||
File.expand_path("../../samples", __FILE__)
|
||||
end
|
||||
|
||||
def tokenize(data)
|
||||
data = File.read(File.join(samples_path, data.to_s)) if data.is_a?(Symbol)
|
||||
Tokenizer.tokenize(data)
|
||||
@@ -41,6 +37,8 @@ class TestTokenizer < Minitest::Test
|
||||
assert_equal %w(foo), tokenize("foo {- Comment -}")
|
||||
assert_equal %w(foo), tokenize("foo (* Comment *)")
|
||||
assert_equal %w(%), tokenize("2 % 10\n% Comment")
|
||||
assert_equal %w(foo bar), tokenize("foo\n\"\"\"\nComment\n\"\"\"\nbar")
|
||||
assert_equal %w(foo bar), tokenize("foo\n'''\nComment\n'''\nbar")
|
||||
end
|
||||
|
||||
def test_sgml_tags
|
||||
|
||||
2
vendor/grammars/AutoHotkey
vendored
2
vendor/grammars/AutoHotkey
vendored
Submodule vendor/grammars/AutoHotkey updated: a220735978...9b42c86e75
1
vendor/grammars/BrightScript.tmbundle
vendored
Submodule
1
vendor/grammars/BrightScript.tmbundle
vendored
Submodule
Submodule vendor/grammars/BrightScript.tmbundle added at 905791b02b
1
vendor/grammars/CLIPS-sublime
vendored
Submodule
1
vendor/grammars/CLIPS-sublime
vendored
Submodule
Submodule vendor/grammars/CLIPS-sublime added at f6904baa78
1
vendor/grammars/Creole
vendored
Submodule
1
vendor/grammars/Creole
vendored
Submodule
Submodule vendor/grammars/Creole added at bac4656c8d
1
vendor/grammars/G-Code
vendored
Submodule
1
vendor/grammars/G-Code
vendored
Submodule
Submodule vendor/grammars/G-Code added at 81e8b03e3d
1
vendor/grammars/GDScript-sublime
vendored
Submodule
1
vendor/grammars/GDScript-sublime
vendored
Submodule
Submodule vendor/grammars/GDScript-sublime added at 44ac5c4af2
2
vendor/grammars/Handlebars
vendored
2
vendor/grammars/Handlebars
vendored
Submodule vendor/grammars/Handlebars updated: 87669eb08d...7bbedb0258
1
vendor/grammars/InnoSetup
vendored
Submodule
1
vendor/grammars/InnoSetup
vendored
Submodule
Submodule vendor/grammars/InnoSetup added at 875ba96c32
1
vendor/grammars/JSyntax
vendored
Submodule
1
vendor/grammars/JSyntax
vendored
Submodule
Submodule vendor/grammars/JSyntax added at 74971149b5
1
vendor/grammars/Modelica
vendored
Submodule
1
vendor/grammars/Modelica
vendored
Submodule
Submodule vendor/grammars/Modelica added at e1fd853290
2
vendor/grammars/NimLime
vendored
2
vendor/grammars/NimLime
vendored
Submodule vendor/grammars/NimLime updated: 3aab3b3841...fac6b182e8
1
vendor/grammars/PogoScript.tmbundle
vendored
Submodule
1
vendor/grammars/PogoScript.tmbundle
vendored
Submodule
Submodule vendor/grammars/PogoScript.tmbundle added at 2255586f9e
2
vendor/grammars/SCSS.tmbundle
vendored
2
vendor/grammars/SCSS.tmbundle
vendored
Submodule vendor/grammars/SCSS.tmbundle updated: 8ef6283dcb...49a74571e7
1
vendor/grammars/Stylus
vendored
Submodule
1
vendor/grammars/Stylus
vendored
Submodule
Submodule vendor/grammars/Stylus added at b9214d1ffd
2
vendor/grammars/Sublime-SQF-Language
vendored
2
vendor/grammars/Sublime-SQF-Language
vendored
Submodule vendor/grammars/Sublime-SQF-Language updated: 708c78a0ba...0313fbe6fb
1
vendor/grammars/SublimePapyrus
vendored
Submodule
1
vendor/grammars/SublimePapyrus
vendored
Submodule
Submodule vendor/grammars/SublimePapyrus added at 27313007d9
1
vendor/grammars/TXL
vendored
Submodule
1
vendor/grammars/TXL
vendored
Submodule
Submodule vendor/grammars/TXL added at c1c98dfa86
2
vendor/grammars/actionscript3-tmbundle
vendored
2
vendor/grammars/actionscript3-tmbundle
vendored
Submodule vendor/grammars/actionscript3-tmbundle updated: d69fcc8884...d24ad7dec9
1
vendor/grammars/asciidoc.tmbundle
vendored
Submodule
1
vendor/grammars/asciidoc.tmbundle
vendored
Submodule
Submodule vendor/grammars/asciidoc.tmbundle added at 28063ea46c
2
vendor/grammars/asp.tmbundle
vendored
2
vendor/grammars/asp.tmbundle
vendored
Submodule vendor/grammars/asp.tmbundle updated: e2c7290317...144b21081a
1
vendor/grammars/ats.sublime
vendored
Submodule
1
vendor/grammars/ats.sublime
vendored
Submodule
Submodule vendor/grammars/ats.sublime added at 2565468fd4
2
vendor/grammars/c.tmbundle
vendored
2
vendor/grammars/c.tmbundle
vendored
Submodule vendor/grammars/c.tmbundle updated: f825425262...f6048afe69
2
vendor/grammars/ceylon-sublimetext
vendored
2
vendor/grammars/ceylon-sublimetext
vendored
Submodule vendor/grammars/ceylon-sublimetext updated: a81ad702b4...070298013e
2
vendor/grammars/dart-sublime-bundle
vendored
2
vendor/grammars/dart-sublime-bundle
vendored
Submodule vendor/grammars/dart-sublime-bundle updated: 3b97e1691e...d55b1d4278
1
vendor/grammars/desktop.tmbundle
vendored
Submodule
1
vendor/grammars/desktop.tmbundle
vendored
Submodule
Submodule vendor/grammars/desktop.tmbundle added at 34f9b8ab98
1
vendor/grammars/ec.tmbundle
vendored
Submodule
1
vendor/grammars/ec.tmbundle
vendored
Submodule
Submodule vendor/grammars/ec.tmbundle added at b8ec2d32af
2
vendor/grammars/elixir-tmbundle
vendored
2
vendor/grammars/elixir-tmbundle
vendored
Submodule vendor/grammars/elixir-tmbundle updated: dcf1fc125c...9c63ff09bd
2
vendor/grammars/factor
vendored
2
vendor/grammars/factor
vendored
Submodule vendor/grammars/factor updated: 208f01416d...ec896cd5ad
2
vendor/grammars/fsharpbinding
vendored
2
vendor/grammars/fsharpbinding
vendored
Submodule vendor/grammars/fsharpbinding updated: 9ea14cbe3c...0cd6439b51
1
vendor/grammars/gap-tmbundle
vendored
Submodule
1
vendor/grammars/gap-tmbundle
vendored
Submodule
Submodule vendor/grammars/gap-tmbundle added at 291a0469dd
1
vendor/grammars/grace-tmbundle
vendored
Submodule
1
vendor/grammars/grace-tmbundle
vendored
Submodule
Submodule vendor/grammars/grace-tmbundle added at acbf9a247c
2
vendor/grammars/haxe-sublime-bundle
vendored
2
vendor/grammars/haxe-sublime-bundle
vendored
Submodule vendor/grammars/haxe-sublime-bundle updated: 6359431d88...50c5aa0e10
2
vendor/grammars/java.tmbundle
vendored
2
vendor/grammars/java.tmbundle
vendored
Submodule vendor/grammars/java.tmbundle updated: a74cb835b8...ccdebdf888
2
vendor/grammars/javadoc.tmbundle
vendored
2
vendor/grammars/javadoc.tmbundle
vendored
Submodule vendor/grammars/javadoc.tmbundle updated: 484d468f47...5276d7a93f
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user