mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Compare commits
125 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
14a7cb2d1b | ||
|
|
54ae7e7b4d | ||
|
|
5363e045bb | ||
|
|
cc4da98616 | ||
|
|
a9ff59aef5 | ||
|
|
7b9ec3d1b3 | ||
|
|
51d3711faf | ||
|
|
14fcd75773 | ||
|
|
34c623eaba | ||
|
|
324bc83914 | ||
|
|
ecc62784ca | ||
|
|
f452612666 | ||
|
|
0bf4b8a482 | ||
|
|
718c9efaca | ||
|
|
49593a6a6d | ||
|
|
ba1cf12776 | ||
|
|
03f394626d | ||
|
|
cf385d9e77 | ||
|
|
dd3b1eec91 | ||
|
|
2b65318a61 | ||
|
|
1b3cdda4f7 | ||
|
|
50d46eed38 | ||
|
|
1bbcfa5683 | ||
|
|
c2d3170064 | ||
|
|
fe3981ff03 | ||
|
|
3769216c7a | ||
|
|
052c048fb5 | ||
|
|
cec3a26496 | ||
|
|
4f0f9bd51d | ||
|
|
04e7956407 | ||
|
|
2abf488e65 | ||
|
|
812797b51d | ||
|
|
dc32876113 | ||
|
|
a18ad1d489 | ||
|
|
25ac140d58 | ||
|
|
f7835f7119 | ||
|
|
a7f835a653 | ||
|
|
6220286f42 | ||
|
|
15e2b74dec | ||
|
|
969333610c | ||
|
|
8438c6cd3e | ||
|
|
60f748d47b | ||
|
|
8da6ddf9d9 | ||
|
|
fef7a12c85 | ||
|
|
b80ca35b75 | ||
|
|
c8171322f5 | ||
|
|
4c1e61892a | ||
|
|
4db659dede | ||
|
|
ed73a72cbe | ||
|
|
512f077da8 | ||
|
|
3260b06241 | ||
|
|
ef3b0b6af3 | ||
|
|
434023460e | ||
|
|
8e628ecc36 | ||
|
|
ca714340e8 | ||
|
|
a4e6fc78c8 | ||
|
|
db1d4f7893 | ||
|
|
bee7e55618 | ||
|
|
5fbe9c0902 | ||
|
|
a840668599 | ||
|
|
38cb8871ba | ||
|
|
d0b906f128 | ||
|
|
d4c2d83af9 | ||
|
|
0b81b21983 | ||
|
|
1a769c4665 | ||
|
|
e7e64bf39a | ||
|
|
e4b9430024 | ||
|
|
a76805e40d | ||
|
|
8d27845f8c | ||
|
|
9a8ab45b6f | ||
|
|
e335d48625 | ||
|
|
4f46155c05 | ||
|
|
38901d51d2 | ||
|
|
ded0dc74e0 | ||
|
|
c5d1bb5370 | ||
|
|
c8ca48856b | ||
|
|
7be6fb0138 | ||
|
|
8c516655bc | ||
|
|
9dceffce2f | ||
|
|
33be70eb28 | ||
|
|
9c4dc3047c | ||
|
|
d8e5f3c965 | ||
|
|
71bf640a47 | ||
|
|
c9b3d19c6f | ||
|
|
0f4955e5d5 | ||
|
|
d968b0e9ee | ||
|
|
1f5ed3b3fe | ||
|
|
297be948d1 | ||
|
|
b4492e7205 | ||
|
|
c05bc99004 | ||
|
|
99eaf5faf9 | ||
|
|
21babbceb1 | ||
|
|
15885701cd | ||
|
|
9b942086f7 | ||
|
|
93cd47822f | ||
|
|
ea3e79a631 | ||
|
|
0af9a35ff1 | ||
|
|
44048c9ba8 | ||
|
|
e51b5ec9b7 | ||
|
|
a47008ea00 | ||
|
|
a0b38e8207 | ||
|
|
10dfe9f296 | ||
|
|
0b9c05f989 | ||
|
|
95dca67e2b | ||
|
|
e98728595b | ||
|
|
4cd558c374 | ||
|
|
adf6206ef5 | ||
|
|
c2d558b71d | ||
|
|
78c58f956e | ||
|
|
fc1404985a | ||
|
|
5d48ccd757 | ||
|
|
3530a18e46 | ||
|
|
ae8f4f9228 | ||
|
|
7c34d38786 | ||
|
|
38bc5fd336 | ||
|
|
6b06e47c67 | ||
|
|
061712ff78 | ||
|
|
7707585d5e | ||
|
|
fa7d433886 | ||
|
|
998e24cf36 | ||
|
|
63ff51e2ed | ||
|
|
b541b53b78 | ||
|
|
a878620a8e | ||
|
|
5633fd3668 | ||
|
|
9d0af0da40 |
26
.github/ISSUE_TEMPLATE.md
vendored
Normal file
26
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
<!--- Provide a general summary of the issue in the Title above -->
|
||||
|
||||
## Preliminary Steps
|
||||
|
||||
Please confirm you have...
|
||||
- [ ] reviewed [How Linguist Works](https://github.com/github/linguist#how-linguist-works),
|
||||
- [ ] reviewed the [Troubleshooting](https://github.com/github/linguist#troubleshooting) docs,
|
||||
- [ ] considered implementing an [override](https://github.com/github/linguist#overrides),
|
||||
- [ ] verified an issue has not already been logged for your issue ([linguist issues](https://github.com/issues?utf8=%E2%9C%93&q=is%3Aissue+repo%3Agithub/linguist)).
|
||||
|
||||
<!-- Please review these preliminary steps before logging your issue. You may find the information referenced may answer or explain the behaviour you are seeing. It'll help us to know you've reviewed this information. -->
|
||||
|
||||
## Problem Description
|
||||
|
||||
<!--- Provide a more detailed introduction to the issue itself, and why you consider it to be a bug -->
|
||||
|
||||
### URL of the affected repository:
|
||||
|
||||
### Last modified on:
|
||||
<!-- YYYY-MM-DD -->
|
||||
|
||||
### Expected language:
|
||||
<!-- expected language -->
|
||||
|
||||
### Detected language:
|
||||
<!-- detected language -->
|
||||
46
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
46
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
<!--- Briefly describe what you're changing. -->
|
||||
|
||||
## Description
|
||||
<!--- If necessary, go into depth of what this pull request is doing. -->
|
||||
|
||||
## Checklist:
|
||||
<!--- Go over all the following points, and put an `x` in all the boxes that apply. -->
|
||||
<!--- If you're unsure about any of these, don't hesitate to ask. We're here to help! -->
|
||||
- [ ] **I am associating a language with a new file extension.**
|
||||
- [ ] The new extension is used in hundreds of repositories on GitHub.com
|
||||
- Search results for each extension:
|
||||
<!-- Replace FOOBAR with the new extension, and KEYWORDS with keywords unique to the language. Repeat for each extension added. -->
|
||||
- https://github.com/search?utf8=%E2%9C%93&type=Code&ref=searchresults&q=extension%3AFOOBAR+KEYWORDS+NOT+nothack
|
||||
- [ ] I have included a real-world usage sample for all extensions added in this PR:
|
||||
- Sample source(s):
|
||||
- [URL to each sample source, if applicable]
|
||||
- Sample license(s):
|
||||
- [ ] I have included a change to the heuristics to distinguish my language from others using the same extension.
|
||||
|
||||
- [ ] **I am adding a new language.**
|
||||
- [ ] The extension of the new language is used in hundreds of repositories on GitHub.com.
|
||||
- Search results for each extension:
|
||||
<!-- Replace FOOBAR with the new extension, and KEYWORDS with keywords unique to the language. Repeat for each extension added. -->
|
||||
- https://github.com/search?utf8=%E2%9C%93&type=Code&ref=searchresults&q=extension%3AFOOBAR+KEYWORDS+NOT+nothack
|
||||
- [ ] I have included a real-world usage sample for all extensions added in this PR:
|
||||
- Sample source(s):
|
||||
- [URL to each sample source, if applicable]
|
||||
- Sample license(s):
|
||||
- [ ] I have included a syntax highlighting grammar.
|
||||
- [ ] I have included a change to the heuristics to distinguish my language from others using the same extension.
|
||||
|
||||
- [ ] **I am fixing a misclassified language**
|
||||
- [ ] I have included a new sample for the misclassified language:
|
||||
- Sample source(s):
|
||||
- [URL to each sample source, if applicable]
|
||||
- Sample license(s):
|
||||
- [ ] I have included a change to the heuristics to distinguish my language from others using the same extension.
|
||||
|
||||
- [ ] **I am changing the source of a syntax highlighting grammar**
|
||||
<!-- Update the Lightshow URLs below to show the new and old grammars in action. -->
|
||||
- Old: https://github-lightshow.herokuapp.com/
|
||||
- New: https://github-lightshow.herokuapp.com/
|
||||
|
||||
- [ ] **I am adding new or changing current functionality**
|
||||
<!-- This includes modifying the vendor, documentation, and generated lists. -->
|
||||
- [ ] I have added or updated the tests for the new or changed functionality.
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -8,3 +8,6 @@ lib/linguist/samples.json
|
||||
/node_modules
|
||||
test/fixtures/ace_modes.json
|
||||
/vendor/gems/
|
||||
/tmp
|
||||
*.bundle
|
||||
*.so
|
||||
|
||||
1432
.gitmodules
vendored
1432
.gitmodules
vendored
File diff suppressed because it is too large
Load Diff
15
.travis.yml
15
.travis.yml
@@ -5,23 +5,18 @@ addons:
|
||||
apt:
|
||||
packages:
|
||||
- libicu-dev
|
||||
- libicu48
|
||||
- libicu52
|
||||
|
||||
before_install: script/travis/before_install
|
||||
|
||||
script:
|
||||
- bundle exec rake
|
||||
- script/licensed verify
|
||||
- script/licensed status
|
||||
|
||||
rvm:
|
||||
- 2.1
|
||||
- 2.2
|
||||
- 2.3.3
|
||||
- 2.4.0
|
||||
|
||||
matrix:
|
||||
allow_failures:
|
||||
- rvm: 2.4.0
|
||||
- 2.5.0
|
||||
|
||||
notifications:
|
||||
disabled: true
|
||||
@@ -31,4 +26,6 @@ git:
|
||||
depth: 3
|
||||
|
||||
cache: bundler
|
||||
dist: precise
|
||||
dist: trusty
|
||||
|
||||
bundler_args: --without debug
|
||||
|
||||
158
CONTRIBUTING.md
158
CONTRIBUTING.md
@@ -1,67 +1,16 @@
|
||||
# Contributing
|
||||
|
||||
Hi there! We're thrilled that you'd like to contribute to this project. Your help is essential for keeping it great. This project adheres to the [Contributor Covenant Code of Conduct](http://contributor-covenant.org/). By participating, you are expected to uphold this code.
|
||||
Hi there! We're thrilled that you'd like to contribute to this project. Your help is essential for keeping it great.
|
||||
|
||||
Contributions to this project are [released](https://help.github.com/articles/github-terms-of-service/#6-contributions-under-repository-license) to the public under the [project's open source license](LICENSE).
|
||||
|
||||
This project adheres to the [Contributor Covenant Code of Conduct](http://contributor-covenant.org/). By participating, you are expected to uphold this code.
|
||||
|
||||
The majority of contributions won't need to touch any Ruby code at all.
|
||||
|
||||
## Adding an extension to a language
|
||||
## Getting started
|
||||
|
||||
We try only to add new extensions once they have some usage on GitHub. In most cases we prefer that extensions be in use in hundreds of repositories before supporting them in Linguist.
|
||||
|
||||
To add support for a new extension:
|
||||
|
||||
1. Add your extension to the language entry in [`languages.yml`][languages], keeping the extensions in alphabetical order.
|
||||
1. Add at least one sample for your extension to the [samples directory][samples] in the correct subdirectory.
|
||||
1. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
|
||||
In addition, if this extension is already listed in [`languages.yml`][languages] then sometimes a few more steps will need to be taken:
|
||||
|
||||
1. Make sure that example `.yourextension` files are present in the [samples directory][samples] for each language that uses `.yourextension`.
|
||||
1. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.yourextension` files. (ping **@bkeepers** to help with this) to ensure we're not misclassifying files.
|
||||
1. If the Bayesian classifier does a bad job with the sample `.yourextension` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
|
||||
|
||||
## Adding a language
|
||||
|
||||
We try only to add languages once they have some usage on GitHub. In most cases we prefer that each new file extension be in use in hundreds of repositories before supporting them in Linguist.
|
||||
|
||||
To add support for a new language:
|
||||
|
||||
1. Add an entry for your language to [`languages.yml`][languages]. Omit the `language_id` field for now.
|
||||
1. Add a grammar for your language: `script/add-grammar https://github.com/JaneSmith/MyGrammar`. Please only add grammars that have [one of these licenses][licenses].
|
||||
1. Add samples for your language to the [samples directory][samples] in the correct subdirectory.
|
||||
1. Add a `language_id` for your language using `script/set-language-ids`. **You should only ever need to run `script/set-language-ids --update`. Anything other than this risks breaking GitHub search :cry:**
|
||||
1. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
|
||||
In addition, if your new language defines an extension that's already listed in [`languages.yml`][languages] (such as `.foo`) then sometimes a few more steps will need to be taken:
|
||||
|
||||
1. Make sure that example `.foo` files are present in the [samples directory][samples] for each language that uses `.foo`.
|
||||
1. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.foo` files. (ping **@bkeepers** to help with this) to ensure we're not misclassifying files.
|
||||
1. If the Bayesian classifier does a bad job with the sample `.foo` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
|
||||
Remember, the goal here is to try and avoid false positives!
|
||||
|
||||
|
||||
## Fixing a misclassified language
|
||||
|
||||
Most languages are detected by their file extension defined in [languages.yml][languages]. For disambiguating between files with common extensions, linguist applies some [heuristics](/lib/linguist/heuristics.rb) and a [statistical classifier](lib/linguist/classifier.rb). This process can help differentiate between, for example, `.h` files which could be either C, C++, or Obj-C.
|
||||
|
||||
Misclassifications can often be solved by either adding a new filename or extension for the language or adding more [samples][samples] to make the classifier smarter.
|
||||
|
||||
|
||||
## Fixing syntax highlighting
|
||||
|
||||
Syntax highlighting in GitHub is performed using TextMate-compatible grammars. These are the same grammars that TextMate, Sublime Text and Atom use. Every language in [languages.yml][languages] is mapped to its corresponding TM `scope`. This scope will be used when picking up a grammar for highlighting.
|
||||
|
||||
Assuming your code is being detected as the right language, in most cases this is due to a bug in the language grammar rather than a bug in Linguist. [`grammars.yml`][grammars] lists all the grammars we use for syntax highlighting on github.com. Find the one corresponding to your code's programming language and submit a bug report upstream. If you can, try to reproduce the highlighting problem in the text editor that the grammar is designed for (TextMate, Sublime Text, or Atom) and include that information in your bug report.
|
||||
|
||||
You can also try to fix the bug yourself and submit a Pull Request. [TextMate's documentation](https://manual.macromates.com/en/language_grammars) offers a good introduction on how to work with TextMate-compatible grammars. You can test grammars using [Lightshow](https://github-lightshow.herokuapp.com).
|
||||
|
||||
Once the bug has been fixed upstream, we'll pick it up for GitHub in the next release of Linguist.
|
||||
|
||||
## Testing
|
||||
|
||||
For development you are going to want to checkout out the source. To get it, clone the repo and run [Bundler](http://gembundler.com/) to install its dependencies.
|
||||
Before you can start contributing to Linguist, you'll need to set up your environment first. Clone the repo and run `script/bootstrap` to install its dependencies.
|
||||
|
||||
git clone https://github.com/github/linguist.git
|
||||
cd linguist/
|
||||
@@ -77,7 +26,91 @@ To run Linguist from the cloned repository:
|
||||
|
||||
bundle exec bin/linguist --breakdown
|
||||
|
||||
To run the tests:
|
||||
### Dependencies
|
||||
|
||||
Linguist uses the [`charlock_holmes`](https://github.com/brianmario/charlock_holmes) character encoding detection library which in turn uses [ICU](http://site.icu-project.org/), and the libgit2 bindings for Ruby provided by [`rugged`](https://github.com/libgit2/rugged). [Docker](https://www.docker.com/) is also required when adding or updating grammars. These components have their own dependencies - `icu4c`, and `cmake` and `pkg-config` respectively - which you may need to install before you can install Linguist.
|
||||
|
||||
For example, on macOS with [Homebrew](http://brew.sh/): `brew install cmake pkg-config icu4c docker` and on Ubuntu: `apt-get install cmake pkg-config libicu-dev docker-ce`.
|
||||
|
||||
## Adding an extension to a language
|
||||
|
||||
We try only to add new extensions once they have some usage on GitHub. In most cases we prefer that extensions be in use in hundreds of repositories before supporting them in Linguist.
|
||||
|
||||
To add support for a new extension:
|
||||
|
||||
1. Add your extension to the language entry in [`languages.yml`][languages], keeping the extensions in alphabetical and case-sensitive (uppercase before lowercase) order, with the exception of the primary extension; the primary extension should be first.
|
||||
1. Add at least one sample for your extension to the [samples directory][samples] in the correct subdirectory. We'd prefer examples of real-world code showing common usage. The more representative of the structure of the language, the better.
|
||||
1. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
If you are adding a sample, please state clearly the license covering the code in the sample, and if possible, link to the original source of the sample.
|
||||
|
||||
Additionally, if this extension is already listed in [`languages.yml`][languages] and associated with another language, then sometimes a few more steps will need to be taken:
|
||||
|
||||
1. Make sure that example `.yourextension` files are present in the [samples directory][samples] for each language that uses `.yourextension`.
|
||||
1. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.yourextension` files. (ping **@lildude** to help with this) to ensure we're not misclassifying files.
|
||||
1. If the Bayesian classifier does a bad job with the sample `.yourextension` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
|
||||
|
||||
## Adding a language
|
||||
|
||||
We try only to add languages once they have some usage on GitHub. In most cases we prefer that each new file extension be in use in hundreds of repositories before supporting them in Linguist.
|
||||
|
||||
To add support for a new language:
|
||||
|
||||
1. Add an entry for your language to [`languages.yml`][languages]. Omit the `language_id` field for now.
|
||||
1. Add a syntax-highlighting grammar for your language using: `script/add-grammar https://github.com/JaneSmith/MyGrammar`
|
||||
This command will analyze the grammar and, if no problems are found, add it to the repository. If problems are found, please report them to the grammar maintainer as you will not be able to add the grammar if problems are found.
|
||||
**Please only add grammars that have [one of these licenses][licenses].**
|
||||
1. Add samples for your language to the [samples directory][samples] in the correct subdirectory.
|
||||
1. Add a `language_id` for your language using `script/set-language-ids`.
|
||||
**You should only ever need to run `script/set-language-ids --update`. Anything other than this risks breaking GitHub search :cry:**
|
||||
1. Open a pull request, linking to a [GitHub search results](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.
|
||||
Please state clearly the license covering the code in the samples. Link directly to the original source if possible.
|
||||
|
||||
In addition, if your new language defines an extension that's already listed in [`languages.yml`][languages] (such as `.foo`) then sometimes a few more steps will need to be taken:
|
||||
|
||||
1. Make sure that example `.foo` files are present in the [samples directory][samples] for each language that uses `.foo`.
|
||||
1. Test the performance of the Bayesian classifier with a relatively large number (1000s) of sample `.foo` files. (ping **@lildude** to help with this) to ensure we're not misclassifying files.
|
||||
1. If the Bayesian classifier does a bad job with the sample `.foo` files then a [heuristic](https://github.com/github/linguist/blob/master/lib/linguist/heuristics.rb) may need to be written to help.
|
||||
|
||||
Remember, the goal here is to try and avoid false positives!
|
||||
|
||||
|
||||
## Fixing a misclassified language
|
||||
|
||||
Most languages are detected by their file extension defined in [`languages.yml`][languages]. For disambiguating between files with common extensions, Linguist applies some [heuristics](/lib/linguist/heuristics.rb) and a [statistical classifier](lib/linguist/classifier.rb). This process can help differentiate between, for example, `.h` files which could be either C, C++, or Obj-C.
|
||||
|
||||
Misclassifications can often be solved by either adding a new filename or extension for the language or adding more [samples][samples] to make the classifier smarter.
|
||||
|
||||
|
||||
## Fixing syntax highlighting
|
||||
|
||||
Syntax highlighting in GitHub is performed using TextMate-compatible grammars. These are the same grammars that TextMate, Sublime Text and Atom use. Every language in [`languages.yml`][languages] is mapped to its corresponding TextMate `scopeName`. This scope name will be used when picking up a grammar for highlighting.
|
||||
|
||||
Assuming your code is being detected as the right language, in most cases syntax highlighting problems are due to a bug in the language grammar rather than a bug in Linguist. [`vendor/README.md`][grammars] lists all the grammars we use for syntax highlighting on GitHub.com. Find the one corresponding to your code's programming language and submit a bug report upstream. If you can, try to reproduce the highlighting problem in the text editor that the grammar is designed for (TextMate, Sublime Text, or Atom) and include that information in your bug report.
|
||||
|
||||
You can also try to fix the bug yourself and submit a Pull Request. [TextMate's documentation](https://manual.macromates.com/en/language_grammars) offers a good introduction on how to work with TextMate-compatible grammars. You can test grammars using [Lightshow](https://github-lightshow.herokuapp.com).
|
||||
|
||||
Once the bug has been fixed upstream, we'll pick it up for GitHub in the next release of Linguist.
|
||||
|
||||
|
||||
## Changing the source of a syntax highlighting grammar
|
||||
|
||||
We'd like to ensure Linguist and GitHub.com are using the latest and greatest grammars that are consistent with the current usage but understand that sometimes a grammar can lag behind the evolution of a language or even stop being developed. This often results in someone grasping the opportunity to create a newer and better and more actively maintained grammar, and we'd love to use it and pass on it's functionality to our users.
|
||||
|
||||
Switching the source of a grammar is really easy:
|
||||
|
||||
script/add-grammar --replace MyGrammar https://github.com/PeterPan/MyGrammar
|
||||
|
||||
This command will analyze the grammar and, if no problems are found, add it to the repository. If problems are found, please report these problems to the grammar maintainer as you will not be able to add the grammar if problems are found.
|
||||
|
||||
**Please only add grammars that have [one of these licenses][licenses].**
|
||||
|
||||
Please then open a pull request for the updated grammar.
|
||||
|
||||
|
||||
## Testing
|
||||
|
||||
You can run the tests locally with:
|
||||
|
||||
bundle exec rake test
|
||||
|
||||
@@ -93,6 +126,7 @@ Linguist is maintained with :heart: by:
|
||||
- **@BenEddy** (GitHub staff)
|
||||
- **@Caged** (GitHub staff)
|
||||
- **@grantr** (GitHub staff)
|
||||
- **@kivikakk** (GitHub staff)
|
||||
- **@larsbrinkhoff**
|
||||
- **@lildude** (GitHub staff)
|
||||
- **@pchaigno**
|
||||
@@ -101,7 +135,7 @@ Linguist is maintained with :heart: by:
|
||||
|
||||
As Linguist is a production dependency for GitHub we have a couple of workflow restrictions:
|
||||
|
||||
- Anyone with commit rights can merge Pull Requests provided that there is a :+1: from a GitHub member of staff
|
||||
- Anyone with commit rights can merge Pull Requests provided that there is a :+1: from a GitHub staff member.
|
||||
- Releases are performed by GitHub staff so we can ensure GitHub.com always stays up to date with the latest release of Linguist and there are no regressions in production.
|
||||
|
||||
### Releasing
|
||||
@@ -122,9 +156,11 @@ If you are the current maintainer of this gem:
|
||||
1. Test behavior locally, branch deploy, whatever needs to happen
|
||||
1. Merge github/linguist PR
|
||||
1. Tag and push: `git tag vx.xx.xx; git push --tags`
|
||||
1. Create a GitHub release with the pushed tag (https://github.com/github/linguist/releases/new)
|
||||
1. Build a grammars tarball (`./script/build-grammars-tarball`) and attach it to the GitHub release
|
||||
1. Push to rubygems.org -- `gem push github-linguist-3.0.0.gem`
|
||||
|
||||
[grammars]: /grammars.yml
|
||||
[grammars]: /vendor/README.md
|
||||
[languages]: /lib/linguist/languages.yml
|
||||
[licenses]: https://github.com/github/linguist/blob/257425141d4e2a5232786bf0b13c901ada075f93/vendor/licenses/config.yml#L2-L11
|
||||
[samples]: /samples
|
||||
|
||||
5
Gemfile
5
Gemfile
@@ -1,3 +1,6 @@
|
||||
source 'https://rubygems.org'
|
||||
gemspec :name => "github-linguist"
|
||||
gem 'byebug' if RUBY_VERSION >= '2.0'
|
||||
|
||||
group :debug do
|
||||
gem 'byebug' if RUBY_VERSION >= '2.2'
|
||||
end
|
||||
|
||||
176
README.md
176
README.md
@@ -1,38 +1,130 @@
|
||||
# Linguist
|
||||
|
||||
[](https://travis-ci.org/github/linguist)
|
||||
|
||||
[issues]: https://github.com/github/linguist/issues
|
||||
[new-issue]: https://github.com/github/linguist/issues/new
|
||||
|
||||
This library is used on GitHub.com to detect blob languages, ignore binary or vendored files, suppress generated files in diffs, and generate language breakdown graphs.
|
||||
|
||||
See [Troubleshooting](#troubleshooting) and [`CONTRIBUTING.md`](/CONTRIBUTING.md) before filing an issue or creating a pull request.
|
||||
See [Troubleshooting](#troubleshooting) and [`CONTRIBUTING.md`](CONTRIBUTING.md) before filing an issue or creating a pull request.
|
||||
|
||||
## How Linguist works
|
||||
|
||||
Linguist takes the list of languages it knows from [`languages.yml`](/lib/linguist/languages.yml) and uses a number of methods to try and determine the language used by each file, and the overall repository breakdown.
|
||||
|
||||
Linguist starts by going through all the files in a repository and excludes all files that it determines to be binary data, [vendored code](#vendored-code), [generated code](#generated-code), [documentation](#documentation), or are defined as `data` (e.g. SQL) or `prose` (e.g. Markdown) languages, whilst taking into account any [overrides](#overrides).
|
||||
|
||||
If an [explicit language override](#using-gitattributes) has been used, that language is used for the matching files. The language of each remaining file is then determined using the following strategies, in order, with each step either identifying the precise language or reducing the number of likely languages passed down to the next strategy:
|
||||
|
||||
- Vim or Emacs modeline,
|
||||
- commonly used filename,
|
||||
- shell shebang,
|
||||
- file extension,
|
||||
- heuristics,
|
||||
- naïve Bayesian classification
|
||||
|
||||
The result of this analysis is used to produce the language stats bar which displays the languages percentages for the files in the repository. The percentages are calculated based on the bytes of code for each language as reported by the [List Languages](https://developer.github.com/v3/repos/#list-languages) API.
|
||||
|
||||

|
||||
|
||||
### How Linguist works on GitHub.com
|
||||
|
||||
When you push changes to a repository on GitHub.com, a low priority background job is enqueued to analyze your repository as explained above. The results of this analysis are cached for the lifetime of your repository and are only updated when the repository is updated. As this analysis is performed by a low priority background job, it can take a while, particularly during busy periods, for your language statistics bar to reflect your changes.
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
Install the gem:
|
||||
|
||||
$ gem install github-linguist
|
||||
|
||||
#### Dependencies
|
||||
|
||||
Linguist uses the [`charlock_holmes`](https://github.com/brianmario/charlock_holmes) character encoding detection library which in turn uses [ICU](http://site.icu-project.org/), and the libgit2 bindings for Ruby provided by [`rugged`](https://github.com/libgit2/rugged). These components have their own dependencies - `icu4c`, and `cmake` and `pkg-config` respectively - which you may need to install before you can install Linguist.
|
||||
|
||||
For example, on macOS with [Homebrew](http://brew.sh/): `brew install cmake pkg-config icu4c` and on Ubuntu: `apt-get install cmake pkg-config libicu-dev`.
|
||||
|
||||
### Application usage
|
||||
|
||||
Linguist can be used in your application as follows:
|
||||
|
||||
```ruby
|
||||
require 'rugged'
|
||||
require 'linguist'
|
||||
|
||||
repo = Rugged::Repository.new('.')
|
||||
project = Linguist::Repository.new(repo, repo.head.target_id)
|
||||
project.language #=> "Ruby"
|
||||
project.languages #=> { "Ruby" => 119387 }
|
||||
```
|
||||
|
||||
### Command line usage
|
||||
|
||||
A repository's languages stats can also be assessed from the command line using the `linguist` executable. Without any options, `linguist` will output the breakdown that correlates to what is shown in the language stats bar. The `--breakdown` flag will additionally show the breakdown of files by language.
|
||||
|
||||
You can try running `linguist` on the root directory in this repository itself:
|
||||
|
||||
```console
|
||||
$ bundle exec bin/linguist --breakdown
|
||||
68.57% Ruby
|
||||
22.90% C
|
||||
6.93% Go
|
||||
1.21% Lex
|
||||
0.39% Shell
|
||||
|
||||
Ruby:
|
||||
Gemfile
|
||||
Rakefile
|
||||
bin/git-linguist
|
||||
bin/linguist
|
||||
ext/linguist/extconf.rb
|
||||
github-linguist.gemspec
|
||||
lib/linguist.rb
|
||||
…
|
||||
```
|
||||
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### My repository is detected as the wrong language
|
||||
|
||||

|
||||
If the language stats bar is reporting a language that you don't expect:
|
||||
|
||||
The Language stats bar displays languages percentages for the files in the repository. The percentages are calculated based on the bytes of code for each language as reported by the [List Languages](https://developer.github.com/v3/repos/#list-languages) API. If the bar is reporting a language that you don't expect:
|
||||
|
||||
1. Click on the name of the language in the stats bar to see a list of the files that are identified as that language.
|
||||
1. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
1. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you can add, especially links to public repositories, is helpful.
|
||||
1. Click on the name of the language in the stats bar to see a list of the files that are identified as that language.
|
||||
Keep in mind this performs a search so the [code search restrictions](https://help.github.com/articles/searching-code/#considerations-for-code-search) may result in files identified in the language statistics not appearing in the search results. [Installing Linguist locally](#usage) and running it from the [command line](#command-line-usage) will give you accurate results.
|
||||
1. If you see files that you didn't write in the search results, consider moving the files into one of the [paths for vendored code](/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
|
||||
1. If the files are misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you can add, especially links to public repositories, is helpful. You can also use the [manual overrides](#overrides) feature to correctly classify them in your repository.
|
||||
1. If there are no reported issues of this misclassification, [open an issue][new-issue] and include a link to the repository or a sample of the code that is being misclassified.
|
||||
|
||||
Keep in mind that the repository language stats are only [updated when you push changes](#how-linguist-works-on-githubcom), and the results are cached for the lifetime of your repository. If you have not made any changes to your repository in a while, you may find pushing another change will correct the stats.
|
||||
|
||||
### My repository isn't showing my language
|
||||
|
||||
Linguist does not consider [vendored code](#vendored-code), [generated code](#generated-code), [documentation](#documentation), or `data` (e.g. SQL) or `prose` (e.g. Markdown) languages (as defined by the `type` attribute in [`languages.yml`](/lib/linguist/languages.yml)) when calculating the repository language statistics.
|
||||
|
||||
If the language statistics bar is not showing your language at all, it could be for a few reasons:
|
||||
|
||||
1. Linguist doesn't know about your language.
|
||||
1. The extension you have chosen is not associated with your language in [`languages.yml`](/lib/linguist/languages.yml).
|
||||
1. All the files in your repository fall into one of the categories listed above that Linguist excludes by default.
|
||||
|
||||
If Linguist doesn't know about the language or the extension you're using, consider [contributing](CONTRIBUTING.md) to Linguist by opening a pull request to add support for your language or extension. For everything else, you can use the [manual overrides](#overrides) feature to tell Linguist to include your files in the language statistics.
|
||||
|
||||
### There's a problem with the syntax highlighting of a file
|
||||
|
||||
Linguist detects the language of a file but the actual syntax-highlighting is powered by a set of language grammars which are included in this project as a set of submodules [and may be found here](https://github.com/github/linguist/blob/master/vendor/README.md).
|
||||
Linguist detects the language of a file but the actual syntax-highlighting is powered by a set of language grammars which are included in this project as a set of submodules [as listed here](/vendor/README.md).
|
||||
|
||||
If you experience an issue with the syntax-highlighting on GitHub, **please report the issue to the upstream grammar repository, not here.** Grammars are updated every time we build the Linguist gem so upstream bug fixes are automatically incorporated as they are fixed.
|
||||
|
||||
If you experience an issue with the syntax-highlighting on GitHub, **please report the issue to the upstream grammar repository, not here.** Grammars are updated every time we build the Linguist gem and so upstream bug fixes are automatically incorporated as they are fixed.
|
||||
|
||||
## Overrides
|
||||
|
||||
Linguist supports a number of different custom overrides strategies for language definitions and vendored paths.
|
||||
Linguist supports a number of different custom override strategies for language definitions and file paths.
|
||||
|
||||
### Using gitattributes
|
||||
|
||||
Add a `.gitattributes` file to your project and use standard git-style path matchers for the files you want to override to set `linguist-documentation`, `linguist-language`, `linguist-vendored`, and `linguist-generated`. `.gitattributes` will be used to determine language statistics and will be used to syntax highlight files. You can also manually set syntax highlighting using [Vim or Emacs modelines](#using-emacs-or-vim-modelines).
|
||||
Add a `.gitattributes` file to your project and use standard git-style path matchers for the files you want to override using the `linguist-documentation`, `linguist-language`, `linguist-vendored`, `linguist-generated` and `linguist-detectable` attributes. `.gitattributes` will be used to determine language statistics and will be used to syntax highlight files. You can also manually set syntax highlighting using [Vim or Emacs modelines](#using-emacs-or-vim-modelines).
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
@@ -41,7 +133,7 @@ $ cat .gitattributes
|
||||
|
||||
#### Vendored code
|
||||
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository.
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [`vendor.yml`](/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository.
|
||||
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths.
|
||||
|
||||
@@ -53,7 +145,7 @@ jquery.js linguist-vendored=false
|
||||
|
||||
#### Documentation
|
||||
|
||||
Just like vendored files, Linguist excludes documentation files from your project's language stats. [lib/linguist/documentation.yml](lib/linguist/documentation.yml) lists common documentation paths and excludes them from the language statistics for your repository.
|
||||
Just like vendored files, Linguist excludes documentation files from your project's language stats. [`documentation.yml`](/lib/linguist/documentation.yml) lists common documentation paths and excludes them from the language statistics for your repository.
|
||||
|
||||
Use the `linguist-documentation` attribute to mark or unmark paths as documentation.
|
||||
|
||||
@@ -65,13 +157,28 @@ docs/formatter.rb linguist-documentation=false
|
||||
|
||||
#### Generated code
|
||||
|
||||
Not all plain text files are true source files. Generated files like minified js and compiled CoffeeScript can be detected and excluded from language stats. As an added bonus, unlike vendored and documentation files, these files are suppressed in diffs.
|
||||
Not all plain text files are true source files. Generated files like minified JavaScript and compiled CoffeeScript can be detected and excluded from language stats. As an added bonus, unlike vendored and documentation files, these files are suppressed in diffs. [`generated.rb`](/lib/linguist/generated.rb) lists common generated paths and excludes them from the language statistics of your repository.
|
||||
|
||||
Use the `linguist-generated` attribute to mark or unmark paths as generated.
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
Api.elm linguist-generated=true
|
||||
```
|
||||
|
||||
#### Detectable
|
||||
|
||||
Only programming languages are included in the language statistics. Languages of a different type (as defined in [`languages.yml`](/lib/linguist/languages.yml)) are not "detectable" causing them not to be included in the language statistics.
|
||||
|
||||
Use the `linguist-detectable` attribute to mark or unmark paths as detectable.
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
*.kicad_pcb linguist-detectable=true
|
||||
*.sch linguist-detectable=true
|
||||
tools/export_bom.py linguist-detectable=false
|
||||
```
|
||||
|
||||
### Using Emacs or Vim modelines
|
||||
|
||||
If you do not want to use `.gitattributes` to override the syntax highlighting used on GitHub.com, you can use Vim or Emacs style modelines to set the language for a single file. Modelines can be placed anywhere within a file and are respected when determining how to syntax-highlight a file on GitHub.com
|
||||
@@ -90,52 +197,15 @@ vim: set ft=cpp:
|
||||
-*- mode: php;-*-
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
Install the gem:
|
||||
|
||||
```
|
||||
$ gem install github-linguist
|
||||
```
|
||||
|
||||
Then use it in your application:
|
||||
|
||||
```ruby
|
||||
require 'rugged'
|
||||
require 'linguist'
|
||||
|
||||
repo = Rugged::Repository.new('.')
|
||||
project = Linguist::Repository.new(repo, repo.head.target_id)
|
||||
project.language #=> "Ruby"
|
||||
project.languages #=> { "Ruby" => 119387 }
|
||||
```
|
||||
|
||||
These stats are also printed out by the `linguist` executable. You can use the
|
||||
`--breakdown` flag, and the binary will also output the breakdown of files by language.
|
||||
|
||||
You can try running `linguist` on the root directory in this repository itself:
|
||||
|
||||
```
|
||||
$ bundle exec linguist --breakdown
|
||||
|
||||
100.00% Ruby
|
||||
|
||||
Ruby:
|
||||
Gemfile
|
||||
Rakefile
|
||||
bin/linguist
|
||||
github-linguist.gemspec
|
||||
lib/linguist.rb
|
||||
…
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
Please check out our [contributing guidelines](CONTRIBUTING.md).
|
||||
|
||||
|
||||
## License
|
||||
|
||||
The language grammars included in this gem are covered by their repositories'
|
||||
respective licenses. `grammars.yml` specifies the repository for each grammar.
|
||||
respective licenses. [`vendor/README.md`](/vendor/README.md) lists the repository for each grammar.
|
||||
|
||||
All other files are covered by the MIT license, see `LICENSE`.
|
||||
|
||||
20
Rakefile
20
Rakefile
@@ -1,6 +1,7 @@
|
||||
require 'bundler/setup'
|
||||
require 'rake/clean'
|
||||
require 'rake/testtask'
|
||||
require 'rake/extensiontask'
|
||||
require 'yaml'
|
||||
require 'yajl'
|
||||
require 'open-uri'
|
||||
@@ -10,8 +11,14 @@ task :default => :test
|
||||
|
||||
Rake::TestTask.new
|
||||
|
||||
gem_spec = Gem::Specification.load('github-linguist.gemspec')
|
||||
|
||||
Rake::ExtensionTask.new('linguist', gem_spec) do |ext|
|
||||
ext.lib_dir = File.join('lib', 'linguist')
|
||||
end
|
||||
|
||||
# Extend test task to check for samples and fetch latest Ace modes
|
||||
task :test => [:check_samples, :fetch_ace_modes]
|
||||
task :test => [:compile, :check_samples, :fetch_ace_modes]
|
||||
|
||||
desc "Check that we have samples.json generated"
|
||||
task :check_samples do
|
||||
@@ -34,15 +41,22 @@ task :fetch_ace_modes do
|
||||
end
|
||||
end
|
||||
|
||||
task :samples do
|
||||
task :samples => :compile do
|
||||
require 'linguist/samples'
|
||||
json = Yajl.dump(Linguist::Samples.data, :pretty => true)
|
||||
File.write 'lib/linguist/samples.json', json
|
||||
end
|
||||
|
||||
task :flex do
|
||||
if `flex -V` !~ /^flex \d+\.\d+\.\d+/
|
||||
fail "flex not detected"
|
||||
end
|
||||
system "cd ext/linguist && flex tokenizer.l"
|
||||
end
|
||||
|
||||
task :build_gem => :samples do
|
||||
rm_rf "grammars"
|
||||
sh "script/convert-grammars"
|
||||
sh "script/grammar-compiler compile -o grammars || true"
|
||||
languages = YAML.load_file("lib/linguist/languages.yml")
|
||||
File.write("lib/linguist/languages.json", Yajl.dump(languages))
|
||||
`gem build github-linguist.gemspec`
|
||||
|
||||
@@ -117,9 +117,8 @@ def git_linguist(args)
|
||||
end
|
||||
|
||||
parser.parse!(args)
|
||||
|
||||
git_dir = `git rev-parse --git-dir`.strip
|
||||
raise "git-linguist must be run in a Git repository (#{Dir.pwd})" unless $?.success?
|
||||
raise "git-linguist must be run in a Git repository" unless $?.success?
|
||||
wrapper = GitLinguist.new(git_dir, commit, incremental)
|
||||
|
||||
case args.pop
|
||||
@@ -141,6 +140,10 @@ def git_linguist(args)
|
||||
$stderr.print(parser.help)
|
||||
exit 1
|
||||
end
|
||||
rescue Exception => e
|
||||
$stderr.puts e.message
|
||||
$stderr.puts e.backtrace
|
||||
exit 1
|
||||
end
|
||||
|
||||
git_linguist(ARGV)
|
||||
|
||||
3
ext/linguist/extconf.rb
Normal file
3
ext/linguist/extconf.rb
Normal file
@@ -0,0 +1,3 @@
|
||||
require 'mkmf'
|
||||
dir_config('linguist')
|
||||
create_makefile('linguist/linguist')
|
||||
2226
ext/linguist/lex.linguist_yy.c
Normal file
2226
ext/linguist/lex.linguist_yy.c
Normal file
File diff suppressed because it is too large
Load Diff
336
ext/linguist/lex.linguist_yy.h
Normal file
336
ext/linguist/lex.linguist_yy.h
Normal file
@@ -0,0 +1,336 @@
|
||||
#ifndef linguist_yyHEADER_H
|
||||
#define linguist_yyHEADER_H 1
|
||||
#define linguist_yyIN_HEADER 1
|
||||
|
||||
#line 6 "lex.linguist_yy.h"
|
||||
|
||||
#define YY_INT_ALIGNED short int
|
||||
|
||||
/* A lexical scanner generated by flex */
|
||||
|
||||
#define FLEX_SCANNER
|
||||
#define YY_FLEX_MAJOR_VERSION 2
|
||||
#define YY_FLEX_MINOR_VERSION 5
|
||||
#define YY_FLEX_SUBMINOR_VERSION 35
|
||||
#if YY_FLEX_SUBMINOR_VERSION > 0
|
||||
#define FLEX_BETA
|
||||
#endif
|
||||
|
||||
/* First, we deal with platform-specific or compiler-specific issues. */
|
||||
|
||||
/* begin standard C headers. */
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <errno.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
/* end standard C headers. */
|
||||
|
||||
/* flex integer type definitions */
|
||||
|
||||
#ifndef FLEXINT_H
|
||||
#define FLEXINT_H
|
||||
|
||||
/* C99 systems have <inttypes.h>. Non-C99 systems may or may not. */
|
||||
|
||||
#if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
|
||||
|
||||
/* C99 says to define __STDC_LIMIT_MACROS before including stdint.h,
|
||||
* if you want the limit (max/min) macros for int types.
|
||||
*/
|
||||
#ifndef __STDC_LIMIT_MACROS
|
||||
#define __STDC_LIMIT_MACROS 1
|
||||
#endif
|
||||
|
||||
#include <inttypes.h>
|
||||
typedef int8_t flex_int8_t;
|
||||
typedef uint8_t flex_uint8_t;
|
||||
typedef int16_t flex_int16_t;
|
||||
typedef uint16_t flex_uint16_t;
|
||||
typedef int32_t flex_int32_t;
|
||||
typedef uint32_t flex_uint32_t;
|
||||
typedef uint64_t flex_uint64_t;
|
||||
#else
|
||||
typedef signed char flex_int8_t;
|
||||
typedef short int flex_int16_t;
|
||||
typedef int flex_int32_t;
|
||||
typedef unsigned char flex_uint8_t;
|
||||
typedef unsigned short int flex_uint16_t;
|
||||
typedef unsigned int flex_uint32_t;
|
||||
#endif /* ! C99 */
|
||||
|
||||
/* Limits of integral types. */
|
||||
#ifndef INT8_MIN
|
||||
#define INT8_MIN (-128)
|
||||
#endif
|
||||
#ifndef INT16_MIN
|
||||
#define INT16_MIN (-32767-1)
|
||||
#endif
|
||||
#ifndef INT32_MIN
|
||||
#define INT32_MIN (-2147483647-1)
|
||||
#endif
|
||||
#ifndef INT8_MAX
|
||||
#define INT8_MAX (127)
|
||||
#endif
|
||||
#ifndef INT16_MAX
|
||||
#define INT16_MAX (32767)
|
||||
#endif
|
||||
#ifndef INT32_MAX
|
||||
#define INT32_MAX (2147483647)
|
||||
#endif
|
||||
#ifndef UINT8_MAX
|
||||
#define UINT8_MAX (255U)
|
||||
#endif
|
||||
#ifndef UINT16_MAX
|
||||
#define UINT16_MAX (65535U)
|
||||
#endif
|
||||
#ifndef UINT32_MAX
|
||||
#define UINT32_MAX (4294967295U)
|
||||
#endif
|
||||
|
||||
#endif /* ! FLEXINT_H */
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
||||
/* The "const" storage-class-modifier is valid. */
|
||||
#define YY_USE_CONST
|
||||
|
||||
#else /* ! __cplusplus */
|
||||
|
||||
/* C99 requires __STDC__ to be defined as 1. */
|
||||
#if defined (__STDC__)
|
||||
|
||||
#define YY_USE_CONST
|
||||
|
||||
#endif /* defined (__STDC__) */
|
||||
#endif /* ! __cplusplus */
|
||||
|
||||
#ifdef YY_USE_CONST
|
||||
#define yyconst const
|
||||
#else
|
||||
#define yyconst
|
||||
#endif
|
||||
|
||||
/* An opaque pointer. */
|
||||
#ifndef YY_TYPEDEF_YY_SCANNER_T
|
||||
#define YY_TYPEDEF_YY_SCANNER_T
|
||||
typedef void* yyscan_t;
|
||||
#endif
|
||||
|
||||
/* For convenience, these vars (plus the bison vars far below)
|
||||
are macros in the reentrant scanner. */
|
||||
#define yyin yyg->yyin_r
|
||||
#define yyout yyg->yyout_r
|
||||
#define yyextra yyg->yyextra_r
|
||||
#define yyleng yyg->yyleng_r
|
||||
#define yytext yyg->yytext_r
|
||||
#define yylineno (YY_CURRENT_BUFFER_LVALUE->yy_bs_lineno)
|
||||
#define yycolumn (YY_CURRENT_BUFFER_LVALUE->yy_bs_column)
|
||||
#define yy_flex_debug yyg->yy_flex_debug_r
|
||||
|
||||
/* Size of default input buffer. */
|
||||
#ifndef YY_BUF_SIZE
|
||||
#define YY_BUF_SIZE 16384
|
||||
#endif
|
||||
|
||||
#ifndef YY_TYPEDEF_YY_BUFFER_STATE
|
||||
#define YY_TYPEDEF_YY_BUFFER_STATE
|
||||
typedef struct yy_buffer_state *YY_BUFFER_STATE;
|
||||
#endif
|
||||
|
||||
#ifndef YY_TYPEDEF_YY_SIZE_T
|
||||
#define YY_TYPEDEF_YY_SIZE_T
|
||||
typedef size_t yy_size_t;
|
||||
#endif
|
||||
|
||||
#ifndef YY_STRUCT_YY_BUFFER_STATE
|
||||
#define YY_STRUCT_YY_BUFFER_STATE
|
||||
struct yy_buffer_state
|
||||
{
|
||||
FILE *yy_input_file;
|
||||
|
||||
char *yy_ch_buf; /* input buffer */
|
||||
char *yy_buf_pos; /* current position in input buffer */
|
||||
|
||||
/* Size of input buffer in bytes, not including room for EOB
|
||||
* characters.
|
||||
*/
|
||||
yy_size_t yy_buf_size;
|
||||
|
||||
/* Number of characters read into yy_ch_buf, not including EOB
|
||||
* characters.
|
||||
*/
|
||||
yy_size_t yy_n_chars;
|
||||
|
||||
/* Whether we "own" the buffer - i.e., we know we created it,
|
||||
* and can realloc() it to grow it, and should free() it to
|
||||
* delete it.
|
||||
*/
|
||||
int yy_is_our_buffer;
|
||||
|
||||
/* Whether this is an "interactive" input source; if so, and
|
||||
* if we're using stdio for input, then we want to use getc()
|
||||
* instead of fread(), to make sure we stop fetching input after
|
||||
* each newline.
|
||||
*/
|
||||
int yy_is_interactive;
|
||||
|
||||
/* Whether we're considered to be at the beginning of a line.
|
||||
* If so, '^' rules will be active on the next match, otherwise
|
||||
* not.
|
||||
*/
|
||||
int yy_at_bol;
|
||||
|
||||
int yy_bs_lineno; /**< The line count. */
|
||||
int yy_bs_column; /**< The column count. */
|
||||
|
||||
/* Whether to try to fill the input buffer when we reach the
|
||||
* end of it.
|
||||
*/
|
||||
int yy_fill_buffer;
|
||||
|
||||
int yy_buffer_status;
|
||||
|
||||
};
|
||||
#endif /* !YY_STRUCT_YY_BUFFER_STATE */
|
||||
|
||||
void linguist_yyrestart (FILE *input_file ,yyscan_t yyscanner );
|
||||
void linguist_yy_switch_to_buffer (YY_BUFFER_STATE new_buffer ,yyscan_t yyscanner );
|
||||
YY_BUFFER_STATE linguist_yy_create_buffer (FILE *file,int size ,yyscan_t yyscanner );
|
||||
void linguist_yy_delete_buffer (YY_BUFFER_STATE b ,yyscan_t yyscanner );
|
||||
void linguist_yy_flush_buffer (YY_BUFFER_STATE b ,yyscan_t yyscanner );
|
||||
void linguist_yypush_buffer_state (YY_BUFFER_STATE new_buffer ,yyscan_t yyscanner );
|
||||
void linguist_yypop_buffer_state (yyscan_t yyscanner );
|
||||
|
||||
YY_BUFFER_STATE linguist_yy_scan_buffer (char *base,yy_size_t size ,yyscan_t yyscanner );
|
||||
YY_BUFFER_STATE linguist_yy_scan_string (yyconst char *yy_str ,yyscan_t yyscanner );
|
||||
YY_BUFFER_STATE linguist_yy_scan_bytes (yyconst char *bytes,yy_size_t len ,yyscan_t yyscanner );
|
||||
|
||||
void *linguist_yyalloc (yy_size_t ,yyscan_t yyscanner );
|
||||
void *linguist_yyrealloc (void *,yy_size_t ,yyscan_t yyscanner );
|
||||
void linguist_yyfree (void * ,yyscan_t yyscanner );
|
||||
|
||||
/* Begin user sect3 */
|
||||
|
||||
#define yytext_ptr yytext_r
|
||||
|
||||
#ifdef YY_HEADER_EXPORT_START_CONDITIONS
|
||||
#define INITIAL 0
|
||||
#define sgml 1
|
||||
#define c_comment 2
|
||||
#define xml_comment 3
|
||||
#define haskell_comment 4
|
||||
#define ocaml_comment 5
|
||||
#define python_dcomment 6
|
||||
#define python_scomment 7
|
||||
|
||||
#endif
|
||||
|
||||
#ifndef YY_NO_UNISTD_H
|
||||
/* Special case for "unistd.h", since it is non-ANSI. We include it way
|
||||
* down here because we want the user's section 1 to have been scanned first.
|
||||
* The user has a chance to override it with an option.
|
||||
*/
|
||||
#include <unistd.h>
|
||||
#endif
|
||||
|
||||
#define YY_EXTRA_TYPE struct tokenizer_extra *
|
||||
|
||||
int linguist_yylex_init (yyscan_t* scanner);
|
||||
|
||||
int linguist_yylex_init_extra (YY_EXTRA_TYPE user_defined,yyscan_t* scanner);
|
||||
|
||||
/* Accessor methods to globals.
|
||||
These are made visible to non-reentrant scanners for convenience. */
|
||||
|
||||
int linguist_yylex_destroy (yyscan_t yyscanner );
|
||||
|
||||
int linguist_yyget_debug (yyscan_t yyscanner );
|
||||
|
||||
void linguist_yyset_debug (int debug_flag ,yyscan_t yyscanner );
|
||||
|
||||
YY_EXTRA_TYPE linguist_yyget_extra (yyscan_t yyscanner );
|
||||
|
||||
void linguist_yyset_extra (YY_EXTRA_TYPE user_defined ,yyscan_t yyscanner );
|
||||
|
||||
FILE *linguist_yyget_in (yyscan_t yyscanner );
|
||||
|
||||
void linguist_yyset_in (FILE * in_str ,yyscan_t yyscanner );
|
||||
|
||||
FILE *linguist_yyget_out (yyscan_t yyscanner );
|
||||
|
||||
void linguist_yyset_out (FILE * out_str ,yyscan_t yyscanner );
|
||||
|
||||
yy_size_t linguist_yyget_leng (yyscan_t yyscanner );
|
||||
|
||||
char *linguist_yyget_text (yyscan_t yyscanner );
|
||||
|
||||
int linguist_yyget_lineno (yyscan_t yyscanner );
|
||||
|
||||
void linguist_yyset_lineno (int line_number ,yyscan_t yyscanner );
|
||||
|
||||
/* Macros after this point can all be overridden by user definitions in
|
||||
* section 1.
|
||||
*/
|
||||
|
||||
#ifndef YY_SKIP_YYWRAP
|
||||
#ifdef __cplusplus
|
||||
extern "C" int linguist_yywrap (yyscan_t yyscanner );
|
||||
#else
|
||||
extern int linguist_yywrap (yyscan_t yyscanner );
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifndef yytext_ptr
|
||||
static void yy_flex_strncpy (char *,yyconst char *,int ,yyscan_t yyscanner);
|
||||
#endif
|
||||
|
||||
#ifdef YY_NEED_STRLEN
|
||||
static int yy_flex_strlen (yyconst char * ,yyscan_t yyscanner);
|
||||
#endif
|
||||
|
||||
#ifndef YY_NO_INPUT
|
||||
|
||||
#endif
|
||||
|
||||
/* Amount of stuff to slurp up with each read. */
|
||||
#ifndef YY_READ_BUF_SIZE
|
||||
#define YY_READ_BUF_SIZE 8192
|
||||
#endif
|
||||
|
||||
/* Number of entries by which start-condition stack grows. */
|
||||
#ifndef YY_START_STACK_INCR
|
||||
#define YY_START_STACK_INCR 25
|
||||
#endif
|
||||
|
||||
/* Default declaration of generated scanner - a define so the user can
|
||||
* easily add parameters.
|
||||
*/
|
||||
#ifndef YY_DECL
|
||||
#define YY_DECL_IS_OURS 1
|
||||
|
||||
extern int linguist_yylex (yyscan_t yyscanner);
|
||||
|
||||
#define YY_DECL int linguist_yylex (yyscan_t yyscanner)
|
||||
#endif /* !YY_DECL */
|
||||
|
||||
/* yy_get_previous_state - get the state just before the EOB char was reached */
|
||||
|
||||
#undef YY_NEW_FILE
|
||||
#undef YY_FLUSH_BUFFER
|
||||
#undef yy_set_bol
|
||||
#undef yy_new_buffer
|
||||
#undef yy_set_interactive
|
||||
#undef YY_DO_BEFORE_ACTION
|
||||
|
||||
#ifdef YY_DECL_IS_OURS
|
||||
#undef YY_DECL_IS_OURS
|
||||
#undef YY_DECL
|
||||
#endif
|
||||
|
||||
#line 118 "tokenizer.l"
|
||||
|
||||
|
||||
#line 335 "lex.linguist_yy.h"
|
||||
#undef linguist_yyIN_HEADER
|
||||
#endif /* linguist_yyHEADER_H */
|
||||
75
ext/linguist/linguist.c
Normal file
75
ext/linguist/linguist.c
Normal file
@@ -0,0 +1,75 @@
|
||||
#include "ruby.h"
|
||||
#include "linguist.h"
|
||||
#include "lex.linguist_yy.h"
|
||||
|
||||
// Anything longer is unlikely to be useful.
|
||||
#define MAX_TOKEN_LEN 32
|
||||
|
||||
int linguist_yywrap(yyscan_t yyscanner) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
static VALUE rb_tokenizer_extract_tokens(VALUE self, VALUE rb_data) {
|
||||
YY_BUFFER_STATE buf;
|
||||
yyscan_t scanner;
|
||||
struct tokenizer_extra extra;
|
||||
VALUE ary, s;
|
||||
long len;
|
||||
int r;
|
||||
|
||||
Check_Type(rb_data, T_STRING);
|
||||
|
||||
len = RSTRING_LEN(rb_data);
|
||||
if (len > 100000)
|
||||
len = 100000;
|
||||
|
||||
linguist_yylex_init_extra(&extra, &scanner);
|
||||
buf = linguist_yy_scan_bytes(RSTRING_PTR(rb_data), (int) len, scanner);
|
||||
|
||||
ary = rb_ary_new();
|
||||
do {
|
||||
extra.type = NO_ACTION;
|
||||
extra.token = NULL;
|
||||
r = linguist_yylex(scanner);
|
||||
switch (extra.type) {
|
||||
case NO_ACTION:
|
||||
break;
|
||||
case REGULAR_TOKEN:
|
||||
len = strlen(extra.token);
|
||||
if (len <= MAX_TOKEN_LEN)
|
||||
rb_ary_push(ary, rb_str_new(extra.token, len));
|
||||
free(extra.token);
|
||||
break;
|
||||
case SHEBANG_TOKEN:
|
||||
len = strlen(extra.token);
|
||||
if (len <= MAX_TOKEN_LEN) {
|
||||
s = rb_str_new2("SHEBANG#!");
|
||||
rb_str_cat(s, extra.token, len);
|
||||
rb_ary_push(ary, s);
|
||||
}
|
||||
free(extra.token);
|
||||
break;
|
||||
case SGML_TOKEN:
|
||||
len = strlen(extra.token);
|
||||
if (len <= MAX_TOKEN_LEN) {
|
||||
s = rb_str_new(extra.token, len);
|
||||
rb_str_cat2(s, ">");
|
||||
rb_ary_push(ary, s);
|
||||
}
|
||||
free(extra.token);
|
||||
break;
|
||||
}
|
||||
} while (r);
|
||||
|
||||
linguist_yy_delete_buffer(buf, scanner);
|
||||
linguist_yylex_destroy(scanner);
|
||||
|
||||
return ary;
|
||||
}
|
||||
|
||||
__attribute__((visibility("default"))) void Init_linguist() {
|
||||
VALUE rb_mLinguist = rb_define_module("Linguist");
|
||||
VALUE rb_cTokenizer = rb_define_class_under(rb_mLinguist, "Tokenizer", rb_cObject);
|
||||
|
||||
rb_define_method(rb_cTokenizer, "extract_tokens", rb_tokenizer_extract_tokens, 1);
|
||||
}
|
||||
11
ext/linguist/linguist.h
Normal file
11
ext/linguist/linguist.h
Normal file
@@ -0,0 +1,11 @@
|
||||
enum tokenizer_type {
|
||||
NO_ACTION,
|
||||
REGULAR_TOKEN,
|
||||
SHEBANG_TOKEN,
|
||||
SGML_TOKEN,
|
||||
};
|
||||
|
||||
struct tokenizer_extra {
|
||||
char *token;
|
||||
enum tokenizer_type type;
|
||||
};
|
||||
119
ext/linguist/tokenizer.l
Normal file
119
ext/linguist/tokenizer.l
Normal file
@@ -0,0 +1,119 @@
|
||||
%{
|
||||
|
||||
#include "linguist.h"
|
||||
|
||||
#define feed_token(tok, typ) do { \
|
||||
yyextra->token = (tok); \
|
||||
yyextra->type = (typ); \
|
||||
} while (0)
|
||||
|
||||
#define eat_until_eol() do { \
|
||||
int c; \
|
||||
while ((c = input(yyscanner)) != '\n' && c != EOF && c); \
|
||||
if (c == EOF || !c) \
|
||||
return 0; \
|
||||
} while (0)
|
||||
|
||||
#define eat_until_unescaped(q) do { \
|
||||
int c; \
|
||||
while ((c = input(yyscanner)) != EOF && c) { \
|
||||
if (c == '\n') \
|
||||
break; \
|
||||
if (c == '\\') { \
|
||||
c = input(yyscanner); \
|
||||
if (c == EOF || !c) \
|
||||
return 0; \
|
||||
} else if (c == q) \
|
||||
break; \
|
||||
} \
|
||||
if (c == EOF || !c) \
|
||||
return 0; \
|
||||
} while (0)
|
||||
|
||||
%}
|
||||
|
||||
%option never-interactive yywrap reentrant nounput warn nodefault header-file="lex.linguist_yy.h" extra-type="struct tokenizer_extra *" prefix="linguist_yy"
|
||||
%x sgml c_comment xml_comment haskell_comment ocaml_comment python_dcomment python_scomment
|
||||
|
||||
%%
|
||||
|
||||
^#![ \t]*([[:alnum:]_\/]*\/)?env([ \t]+([^ \t=]*=[^ \t]*))*[ \t]+[[:alpha:]_]+ {
|
||||
const char *off = strrchr(yytext, ' ');
|
||||
if (!off)
|
||||
off = yytext;
|
||||
else
|
||||
++off;
|
||||
feed_token(strdup(off), SHEBANG_TOKEN);
|
||||
eat_until_eol();
|
||||
return 1;
|
||||
}
|
||||
|
||||
^#![ \t]*[[:alpha:]_\/]+ {
|
||||
const char *off = strrchr(yytext, '/');
|
||||
if (!off)
|
||||
off = yytext;
|
||||
else
|
||||
++off;
|
||||
if (strcmp(off, "env") == 0) {
|
||||
eat_until_eol();
|
||||
} else {
|
||||
feed_token(strdup(off), SHEBANG_TOKEN);
|
||||
eat_until_eol();
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
^[ \t]*(\/\/|--|\#|%|\")" ".* { /* nothing */ }
|
||||
|
||||
"/*" { BEGIN(c_comment); }
|
||||
/* See below for xml_comment start. */
|
||||
"{-" { BEGIN(haskell_comment); }
|
||||
"(*" { BEGIN(ocaml_comment); }
|
||||
"\"\"\"" { BEGIN(python_dcomment); }
|
||||
"'''" { BEGIN(python_scomment); }
|
||||
|
||||
<c_comment,xml_comment,haskell_comment,ocaml_comment,python_dcomment,python_scomment>.|\n { /* nothing */ }
|
||||
<c_comment>"*/" { BEGIN(INITIAL); }
|
||||
<xml_comment>"-->" { BEGIN(INITIAL); }
|
||||
<haskell_comment>"-}" { BEGIN(INITIAL); }
|
||||
<ocaml_comment>"*)" { BEGIN(INITIAL); }
|
||||
<python_dcomment>"\"\"\"" { BEGIN(INITIAL); }
|
||||
<python_scomment>"'''" { BEGIN(INITIAL); }
|
||||
|
||||
\"\"|'' { /* nothing */ }
|
||||
\" { eat_until_unescaped('"'); }
|
||||
' { eat_until_unescaped('\''); }
|
||||
(0x[0-9a-fA-F]([0-9a-fA-F]|\.)*|[0-9]([0-9]|\.)*)([uU][lL]{0,2}|([eE][-+][0-9]*)?[fFlL]*) { /* nothing */ }
|
||||
\<[[:alnum:]_!./?-]+ {
|
||||
if (strcmp(yytext, "<!--") == 0) {
|
||||
BEGIN(xml_comment);
|
||||
} else {
|
||||
feed_token(strdup(yytext), SGML_TOKEN);
|
||||
BEGIN(sgml);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
<sgml>[[:alnum:]_]+=\" { feed_token(strndup(yytext, strlen(yytext) - 1), REGULAR_TOKEN); eat_until_unescaped('"'); return 1; }
|
||||
<sgml>[[:alnum:]_]+=' { feed_token(strndup(yytext, strlen(yytext) - 1), REGULAR_TOKEN); eat_until_unescaped('\''); return 1; }
|
||||
<sgml>[[:alnum:]_]+=[[:alnum:]_]* { feed_token(strdup(yytext), REGULAR_TOKEN); *(strchr(yyextra->token, '=') + 1) = 0; return 1; }
|
||||
<sgml>[[:alnum:]_]+ { feed_token(strdup(yytext), REGULAR_TOKEN); return 1; }
|
||||
<sgml>\> { BEGIN(INITIAL); }
|
||||
<sgml>.|\n { /* nothing */ }
|
||||
;|\{|\}|\(|\)|\[|\] { feed_token(strdup(yytext), REGULAR_TOKEN); return 1; }
|
||||
[[:alnum:]_.@#/*]+ {
|
||||
if (strncmp(yytext, "/*", 2) == 0) {
|
||||
if (strlen(yytext) >= 4 && strcmp(yytext + strlen(yytext) - 2, "*/") == 0) {
|
||||
/* nothing */
|
||||
} else {
|
||||
BEGIN(c_comment);
|
||||
}
|
||||
} else {
|
||||
feed_token(strdup(yytext), REGULAR_TOKEN);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
\<\<?|\+|\-|\*|\/|%|&&?|\|\|? { feed_token(strdup(yytext), REGULAR_TOKEN); return 1; }
|
||||
.|\n { /* nothing */ }
|
||||
|
||||
%%
|
||||
|
||||
@@ -2,7 +2,7 @@ require File.expand_path('../lib/linguist/version', __FILE__)
|
||||
|
||||
Gem::Specification.new do |s|
|
||||
s.name = 'github-linguist'
|
||||
s.version = Linguist::VERSION
|
||||
s.version = ENV['GEM_VERSION'] || Linguist::VERSION
|
||||
s.summary = "GitHub Language detection"
|
||||
s.description = 'We use this library at GitHub to detect blob languages, highlight code, ignore binary files, suppress generated files in diffs, and generate language breakdown graphs.'
|
||||
|
||||
@@ -10,21 +10,23 @@ Gem::Specification.new do |s|
|
||||
s.homepage = "https://github.com/github/linguist"
|
||||
s.license = "MIT"
|
||||
|
||||
s.files = Dir['lib/**/*'] + Dir['grammars/*'] + ['LICENSE']
|
||||
s.files = Dir['lib/**/*'] + Dir['ext/**/*'] + Dir['grammars/*'] + ['LICENSE']
|
||||
s.executables = ['linguist', 'git-linguist']
|
||||
s.extensions = ['ext/linguist/extconf.rb']
|
||||
|
||||
s.add_dependency 'charlock_holmes', '~> 0.7.3'
|
||||
s.add_dependency 'escape_utils', '~> 1.1.0'
|
||||
s.add_dependency 'charlock_holmes', '~> 0.7.6'
|
||||
s.add_dependency 'escape_utils', '~> 1.2.0'
|
||||
s.add_dependency 'mime-types', '>= 1.19'
|
||||
s.add_dependency 'rugged', '>= 0.25.1'
|
||||
|
||||
s.add_development_dependency 'minitest', '>= 5.0'
|
||||
s.add_development_dependency 'rake-compiler', '~> 0.9'
|
||||
s.add_development_dependency 'mocha'
|
||||
s.add_development_dependency 'plist', '~>3.1'
|
||||
s.add_development_dependency 'pry'
|
||||
s.add_development_dependency 'rake'
|
||||
s.add_development_dependency 'yajl-ruby'
|
||||
s.add_development_dependency 'color-proximity', '~> 0.2.1'
|
||||
s.add_development_dependency 'licensed'
|
||||
s.add_development_dependency 'licensee', '~> 8.8.0'
|
||||
s.add_development_dependency 'licensed', '~> 1.0.0'
|
||||
s.add_development_dependency 'licensee'
|
||||
end
|
||||
|
||||
77
grammars.yml
77
grammars.yml
@@ -1,4 +1,3 @@
|
||||
---
|
||||
https://bitbucket.org/Clams/sublimesystemverilog/get/default.tar.gz:
|
||||
- source.systemverilog
|
||||
- source.ucfconstraints
|
||||
@@ -10,6 +9,8 @@ vendor/grammars/Agda.tmbundle:
|
||||
- source.agda
|
||||
vendor/grammars/Alloy.tmbundle:
|
||||
- source.alloy
|
||||
vendor/grammars/Assembly-Syntax-Definition:
|
||||
- source.assembly.unix
|
||||
vendor/grammars/AutoHotkey:
|
||||
- source.ahk
|
||||
vendor/grammars/BrightScript.tmbundle:
|
||||
@@ -45,12 +46,12 @@ vendor/grammars/Isabelle.tmbundle:
|
||||
- source.isabelle.theory
|
||||
vendor/grammars/JSyntax:
|
||||
- source.j
|
||||
vendor/grammars/Julia.tmbundle:
|
||||
- source.julia
|
||||
vendor/grammars/Lean.tmbundle:
|
||||
- source.lean
|
||||
vendor/grammars/LiveScript.tmbundle:
|
||||
- source.livescript
|
||||
vendor/grammars/MATLAB-Language-grammar:
|
||||
- source.matlab
|
||||
vendor/grammars/MQL5-sublime:
|
||||
- source.mql5
|
||||
vendor/grammars/MagicPython:
|
||||
@@ -130,6 +131,9 @@ vendor/grammars/SublimePuppet:
|
||||
- source.puppet
|
||||
vendor/grammars/SublimeXtend:
|
||||
- source.xtend
|
||||
vendor/grammars/Syntax-highlighting-for-PostCSS:
|
||||
- source.css.postcss.sugarss
|
||||
- source.postcss
|
||||
vendor/grammars/TLA:
|
||||
- source.tla
|
||||
vendor/grammars/TXL:
|
||||
@@ -138,6 +142,11 @@ vendor/grammars/Terraform.tmLanguage:
|
||||
- source.terraform
|
||||
vendor/grammars/Textmate-Gosu-Bundle:
|
||||
- source.gosu.2
|
||||
vendor/grammars/TypeScript-TmLanguage:
|
||||
- source.ts
|
||||
- source.tsx
|
||||
- text.error-list
|
||||
- text.find-refs
|
||||
vendor/grammars/UrWeb-Language-Definition:
|
||||
- source.ur
|
||||
vendor/grammars/VBDotNetSyntax:
|
||||
@@ -187,6 +196,12 @@ vendor/grammars/atom-language-1c-bsl:
|
||||
vendor/grammars/atom-language-clean:
|
||||
- source.clean
|
||||
- text.restructuredtext.clean
|
||||
vendor/grammars/atom-language-julia:
|
||||
- source.julia
|
||||
- source.julia.console
|
||||
vendor/grammars/atom-language-nextflow:
|
||||
- source.nextflow
|
||||
- source.nextflow-groovy
|
||||
vendor/grammars/atom-language-p4:
|
||||
- source.p4
|
||||
vendor/grammars/atom-language-perl6:
|
||||
@@ -235,6 +250,8 @@ vendor/grammars/chapel-tmbundle:
|
||||
vendor/grammars/cmake.tmbundle:
|
||||
- source.cache.cmake
|
||||
- source.cmake
|
||||
vendor/grammars/conllu-linguist-grammar:
|
||||
- text.conllu
|
||||
vendor/grammars/cool-tmbundle:
|
||||
- source.cool
|
||||
vendor/grammars/cpp-qt.tmbundle:
|
||||
@@ -252,6 +269,8 @@ vendor/grammars/d.tmbundle:
|
||||
vendor/grammars/dartlang:
|
||||
- source.dart
|
||||
- source.yaml-ext
|
||||
vendor/grammars/data-weave-tmLanguage:
|
||||
- source.data-weave
|
||||
vendor/grammars/desktop.tmbundle:
|
||||
- source.desktop
|
||||
vendor/grammars/diff.tmbundle:
|
||||
@@ -303,12 +322,9 @@ vendor/grammars/graphviz.tmbundle:
|
||||
- source.dot
|
||||
vendor/grammars/groovy.tmbundle:
|
||||
- source.groovy
|
||||
vendor/grammars/haxe-sublime-bundle:
|
||||
- source.erazor
|
||||
- source.haxe.2
|
||||
- source.hss.1
|
||||
vendor/grammars/haxe-TmLanguage:
|
||||
- source.hx
|
||||
- source.hxml
|
||||
- source.nmml
|
||||
vendor/grammars/html.tmbundle:
|
||||
- text.html.basic
|
||||
vendor/grammars/idl.tmbundle:
|
||||
@@ -333,14 +349,14 @@ vendor/grammars/java.tmbundle:
|
||||
- source.java-properties
|
||||
- text.html.jsp
|
||||
- text.junit-test-report
|
||||
vendor/grammars/javadoc.tmbundle:
|
||||
- text.html.javadoc
|
||||
vendor/grammars/javascript-objective-j.tmbundle:
|
||||
- source.js.objj
|
||||
vendor/grammars/jflex.tmbundle:
|
||||
- source.jflex
|
||||
vendor/grammars/json.tmbundle:
|
||||
- source.json
|
||||
vendor/grammars/kotlin-sublime-package:
|
||||
- source.Kotlin
|
||||
vendor/grammars/language-agc:
|
||||
- source.agc
|
||||
vendor/grammars/language-apl:
|
||||
@@ -350,6 +366,8 @@ vendor/grammars/language-asn1:
|
||||
vendor/grammars/language-babel:
|
||||
- source.js.jsx
|
||||
- source.regexp.babel
|
||||
vendor/grammars/language-ballerina:
|
||||
- source.ballerina
|
||||
vendor/grammars/language-batchfile:
|
||||
- source.batchfile
|
||||
vendor/grammars/language-blade:
|
||||
@@ -370,13 +388,14 @@ vendor/grammars/language-csharp:
|
||||
- source.cake
|
||||
- source.cs
|
||||
- source.csx
|
||||
- source.nant-build
|
||||
vendor/grammars/language-csound:
|
||||
- source.csound
|
||||
- source.csound-document
|
||||
- source.csound-score
|
||||
vendor/grammars/language-css:
|
||||
- source.css
|
||||
vendor/grammars/language-cwl:
|
||||
- source.cwl
|
||||
vendor/grammars/language-emacs-lisp:
|
||||
- source.emacs.lisp
|
||||
vendor/grammars/language-fontforge:
|
||||
@@ -394,6 +413,7 @@ vendor/grammars/language-haml:
|
||||
- text.haml
|
||||
- text.hamlc
|
||||
vendor/grammars/language-haskell:
|
||||
- annotation.liquidhaskell.haskell
|
||||
- hint.haskell
|
||||
- hint.message.haskell
|
||||
- hint.type.haskell
|
||||
@@ -401,6 +421,7 @@ vendor/grammars/language-haskell:
|
||||
- source.cabal
|
||||
- source.haskell
|
||||
- source.hsc2hs
|
||||
- source.hsig
|
||||
- text.tex.latex.haskell
|
||||
vendor/grammars/language-inform7:
|
||||
- source.inform7
|
||||
@@ -418,6 +439,8 @@ vendor/grammars/language-jolie:
|
||||
vendor/grammars/language-jsoniq:
|
||||
- source.jq
|
||||
- source.xq
|
||||
vendor/grammars/language-kotlin:
|
||||
- source.kotlin
|
||||
vendor/grammars/language-less:
|
||||
- source.css.less
|
||||
vendor/grammars/language-maxscript:
|
||||
@@ -459,6 +482,12 @@ vendor/grammars/language-roff:
|
||||
vendor/grammars/language-rpm-spec:
|
||||
- source.changelogs.rpm-spec
|
||||
- source.rpm-spec
|
||||
vendor/grammars/language-ruby:
|
||||
- source.ruby
|
||||
- source.ruby.gemfile
|
||||
- text.html.erb
|
||||
vendor/grammars/language-sed:
|
||||
- source.sed
|
||||
vendor/grammars/language-shellscript:
|
||||
- source.shell
|
||||
- text.shell-session
|
||||
@@ -485,6 +514,8 @@ vendor/grammars/language-yaml:
|
||||
- source.yaml
|
||||
vendor/grammars/language-yang:
|
||||
- source.yang
|
||||
vendor/grammars/language-yara:
|
||||
- source.yara
|
||||
vendor/grammars/latex.tmbundle:
|
||||
- text.bibtex
|
||||
- text.log.latex
|
||||
@@ -514,9 +545,6 @@ vendor/grammars/marko-tmbundle:
|
||||
- text.marko
|
||||
vendor/grammars/mathematica-tmbundle:
|
||||
- source.mathematica
|
||||
vendor/grammars/matlab.tmbundle:
|
||||
- source.matlab
|
||||
- source.octave
|
||||
vendor/grammars/maven.tmbundle:
|
||||
- text.xml.pom
|
||||
vendor/grammars/mediawiki.tmbundle:
|
||||
@@ -551,7 +579,7 @@ vendor/grammars/opa.tmbundle:
|
||||
- source.opa
|
||||
vendor/grammars/openscad.tmbundle:
|
||||
- source.scad
|
||||
vendor/grammars/oz-tmbundle/Syntaxes/Oz.tmLanguage:
|
||||
vendor/grammars/oz-tmbundle:
|
||||
- source.oz
|
||||
vendor/grammars/parrot:
|
||||
- source.parrot.pir
|
||||
@@ -588,9 +616,6 @@ vendor/grammars/rascal-syntax-highlighting:
|
||||
- source.rascal
|
||||
vendor/grammars/ruby-slim.tmbundle:
|
||||
- text.slim
|
||||
vendor/grammars/ruby.tmbundle:
|
||||
- source.ruby
|
||||
- text.html.erb
|
||||
vendor/grammars/sas.tmbundle:
|
||||
- source.SASLog
|
||||
- source.sas
|
||||
@@ -598,7 +623,6 @@ vendor/grammars/sass-textmate-bundle:
|
||||
- source.sass
|
||||
vendor/grammars/scala.tmbundle:
|
||||
- source.sbt
|
||||
- source.scala
|
||||
vendor/grammars/scheme.tmbundle:
|
||||
- source.scheme
|
||||
vendor/grammars/scilab.tmbundle:
|
||||
@@ -616,6 +640,8 @@ vendor/grammars/sourcepawn:
|
||||
- source.sp
|
||||
vendor/grammars/sql.tmbundle:
|
||||
- source.sql
|
||||
vendor/grammars/squirrel-language:
|
||||
- source.nut
|
||||
vendor/grammars/st2-zonefile:
|
||||
- text.zone_file
|
||||
vendor/grammars/standard-ml.tmbundle:
|
||||
@@ -623,6 +649,8 @@ vendor/grammars/standard-ml.tmbundle:
|
||||
- source.ml
|
||||
vendor/grammars/sublime-MuPAD:
|
||||
- source.mupad
|
||||
vendor/grammars/sublime-angelscript:
|
||||
- source.angelscript
|
||||
vendor/grammars/sublime-aspectj:
|
||||
- source.aspectj
|
||||
vendor/grammars/sublime-autoit:
|
||||
@@ -644,6 +672,8 @@ vendor/grammars/sublime-golo:
|
||||
- source.golo
|
||||
vendor/grammars/sublime-mask:
|
||||
- source.mask
|
||||
vendor/grammars/sublime-nearley:
|
||||
- source.ne
|
||||
vendor/grammars/sublime-netlinx:
|
||||
- source.netlinx
|
||||
- source.netlinx.erb
|
||||
@@ -669,11 +699,6 @@ vendor/grammars/sublime-terra:
|
||||
- source.terra
|
||||
vendor/grammars/sublime-text-ox:
|
||||
- source.ox
|
||||
vendor/grammars/sublime-typescript:
|
||||
- source.ts
|
||||
- source.tsx
|
||||
- text.error-list
|
||||
- text.find-refs
|
||||
vendor/grammars/sublime-varnish:
|
||||
- source.varnish.vcl
|
||||
vendor/grammars/sublime_cobol:
|
||||
@@ -704,8 +729,12 @@ vendor/grammars/verilog.tmbundle:
|
||||
- source.verilog
|
||||
vendor/grammars/vhdl:
|
||||
- source.vhdl
|
||||
vendor/grammars/vscode-scala-syntax:
|
||||
- source.scala
|
||||
vendor/grammars/vue-syntax-highlight:
|
||||
- text.html.vue
|
||||
vendor/grammars/wdl-sublime-syntax-highlighter:
|
||||
- source.wdl
|
||||
vendor/grammars/xc.tmbundle:
|
||||
- source.xc
|
||||
vendor/grammars/xml.tmbundle:
|
||||
|
||||
@@ -11,11 +11,13 @@ module Linguist
|
||||
#
|
||||
# path - A path String (does not necessarily exists on the file system).
|
||||
# content - Content of the file.
|
||||
# symlink - Whether the file is a symlink.
|
||||
#
|
||||
# Returns a Blob.
|
||||
def initialize(path, content)
|
||||
def initialize(path, content, symlink: false)
|
||||
@path = path
|
||||
@content = content
|
||||
@symlink = symlink
|
||||
end
|
||||
|
||||
# Public: Filename
|
||||
@@ -69,5 +71,12 @@ module Linguist
|
||||
"." + segments[index..-1].join(".")
|
||||
end
|
||||
end
|
||||
|
||||
# Public: Is this a symlink?
|
||||
#
|
||||
# Returns true or false.
|
||||
def symlink?
|
||||
@symlink
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -275,10 +275,8 @@ module Linguist
|
||||
# also--importantly--without having to duplicate many (potentially
|
||||
# large) strings.
|
||||
begin
|
||||
encoded_newlines = ["\r\n", "\r", "\n"].
|
||||
map { |nl| nl.encode(ruby_encoding, "ASCII-8BIT").force_encoding(data.encoding) }
|
||||
|
||||
data.split(Regexp.union(encoded_newlines), -1)
|
||||
|
||||
data.split(encoded_newlines_re, -1)
|
||||
rescue Encoding::ConverterNotFoundError
|
||||
# The data is not splittable in the detected encoding. Assume it's
|
||||
# one big line.
|
||||
@@ -289,6 +287,51 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
def encoded_newlines_re
|
||||
@encoded_newlines_re ||= Regexp.union(["\r\n", "\r", "\n"].
|
||||
map { |nl| nl.encode(ruby_encoding, "ASCII-8BIT").force_encoding(data.encoding) })
|
||||
|
||||
end
|
||||
|
||||
def first_lines(n)
|
||||
return lines[0...n] if defined? @lines
|
||||
return [] unless viewable? && data
|
||||
|
||||
i, c = 0, 0
|
||||
while c < n && j = data.index(encoded_newlines_re, i)
|
||||
i = j + $&.length
|
||||
c += 1
|
||||
end
|
||||
data[0...i].split(encoded_newlines_re, -1)
|
||||
end
|
||||
|
||||
def last_lines(n)
|
||||
if defined? @lines
|
||||
if n >= @lines.length
|
||||
@lines
|
||||
else
|
||||
lines[-n..-1]
|
||||
end
|
||||
end
|
||||
return [] unless viewable? && data
|
||||
|
||||
no_eol = true
|
||||
i, c = data.length, 0
|
||||
k = i
|
||||
while c < n && j = data.rindex(encoded_newlines_re, i - 1)
|
||||
if c == 0 && j + $&.length == i
|
||||
no_eol = false
|
||||
n += 1
|
||||
end
|
||||
i = j
|
||||
k = j + $&.length
|
||||
c += 1
|
||||
end
|
||||
r = data[k..-1].split(encoded_newlines_re, -1)
|
||||
r.pop if !no_eol
|
||||
r
|
||||
end
|
||||
|
||||
# Public: Get number of lines of code
|
||||
#
|
||||
# Requires Blob#data
|
||||
@@ -340,7 +383,10 @@ module Linguist
|
||||
!vendored? &&
|
||||
!documentation? &&
|
||||
!generated? &&
|
||||
language && DETECTABLE_TYPES.include?(language.type)
|
||||
language && ( defined?(detectable?) && !detectable?.nil? ?
|
||||
detectable? :
|
||||
DETECTABLE_TYPES.include?(language.type)
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -3,6 +3,8 @@ require 'linguist/tokenizer'
|
||||
module Linguist
|
||||
# Language bayesian classifier.
|
||||
class Classifier
|
||||
CLASSIFIER_CONSIDER_BYTES = 50 * 1024
|
||||
|
||||
# Public: Use the classifier to detect language of the blob.
|
||||
#
|
||||
# blob - An object that quacks like a blob.
|
||||
@@ -17,7 +19,7 @@ module Linguist
|
||||
# Returns an Array of Language objects, most probable first.
|
||||
def self.call(blob, possible_languages)
|
||||
language_names = possible_languages.map(&:name)
|
||||
classify(Samples.cache, blob.data, language_names).map do |name, _|
|
||||
classify(Samples.cache, blob.data[0...CLASSIFIER_CONSIDER_BYTES], language_names).map do |name, _|
|
||||
Language[name] # Return the actual Language objects
|
||||
end
|
||||
end
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
- ^[Mm]an/
|
||||
- ^[Ee]xamples/
|
||||
- ^[Dd]emos?/
|
||||
- (^|/)inst/doc/
|
||||
|
||||
## Documentation files ##
|
||||
|
||||
|
||||
@@ -23,21 +23,26 @@ module Linguist
|
||||
#
|
||||
# Returns a String like '100644'
|
||||
def mode
|
||||
File.stat(@fullpath).mode.to_s(8)
|
||||
@mode ||= File.stat(@fullpath).mode.to_s(8)
|
||||
end
|
||||
|
||||
def symlink?
|
||||
return @symlink if defined? @symlink
|
||||
@symlink = (File.symlink?(@fullpath) rescue false)
|
||||
end
|
||||
|
||||
# Public: Read file contents.
|
||||
#
|
||||
# Returns a String.
|
||||
def data
|
||||
File.read(@fullpath)
|
||||
@data ||= File.read(@fullpath)
|
||||
end
|
||||
|
||||
# Public: Get byte size
|
||||
#
|
||||
# Returns an Integer.
|
||||
def size
|
||||
File.size(@fullpath)
|
||||
@size ||= File.size(@fullpath)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -52,9 +52,12 @@ module Linguist
|
||||
# Return true or false
|
||||
def generated?
|
||||
xcode_file? ||
|
||||
cocoapods? ||
|
||||
carthage_build? ||
|
||||
generated_net_designer_file? ||
|
||||
generated_net_specflow_feature_file? ||
|
||||
composer_lock? ||
|
||||
cargo_lock? ||
|
||||
node_modules? ||
|
||||
go_vendor? ||
|
||||
npm_shrinkwrap_or_package_lock? ||
|
||||
@@ -95,6 +98,20 @@ module Linguist
|
||||
['.nib', '.xcworkspacedata', '.xcuserstate'].include?(extname)
|
||||
end
|
||||
|
||||
# Internal: Is the blob part of Pods/, which contains dependencies not meant for humans in pull requests.
|
||||
#
|
||||
# Returns true or false.
|
||||
def cocoapods?
|
||||
!!name.match(/(^Pods|\/Pods)\//)
|
||||
end
|
||||
|
||||
# Internal: Is the blob part of Carthage/Build/, which contains dependencies not meant for humans in pull requests.
|
||||
#
|
||||
# Returns true or false.
|
||||
def carthage_build?
|
||||
!!name.match(/(^|\/)Carthage\/Build\//)
|
||||
end
|
||||
|
||||
# Internal: Is the blob minified files?
|
||||
#
|
||||
# Consider a file minified if the average line length is
|
||||
@@ -206,7 +223,7 @@ module Linguist
|
||||
#
|
||||
# Returns true or false
|
||||
def generated_net_designer_file?
|
||||
name.downcase =~ /\.designer\.cs$/
|
||||
name.downcase =~ /\.designer\.(cs|vb)$/
|
||||
end
|
||||
|
||||
# Internal: Is this a codegen file for Specflow feature file?
|
||||
@@ -362,6 +379,13 @@ module Linguist
|
||||
!!name.match(/.\.zep\.(?:c|h|php)$/)
|
||||
end
|
||||
|
||||
# Internal: Is the blob a generated Rust Cargo lock file?
|
||||
#
|
||||
# Returns true or false.
|
||||
def cargo_lock?
|
||||
!!name.match(/Cargo\.lock/)
|
||||
end
|
||||
|
||||
# Is the blob a VCR Cassette file?
|
||||
#
|
||||
# Returns true or false
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
module Linguist
|
||||
# A collection of simple heuristics that can be used to better analyze languages.
|
||||
class Heuristics
|
||||
HEURISTICS_CONSIDER_BYTES = 50 * 1024
|
||||
|
||||
# Public: Use heuristics to detect language of the blob.
|
||||
#
|
||||
# blob - An object that quacks like a blob.
|
||||
@@ -14,7 +16,9 @@ module Linguist
|
||||
#
|
||||
# Returns an Array of languages, or empty if none matched or were inconclusive.
|
||||
def self.call(blob, candidates)
|
||||
data = blob.data
|
||||
return [] if blob.symlink?
|
||||
|
||||
data = blob.data[0...HEURISTICS_CONSIDER_BYTES]
|
||||
|
||||
@heuristics.each do |heuristic|
|
||||
if heuristic.matches?(blob.name, candidates)
|
||||
@@ -71,7 +75,25 @@ module Linguist
|
||||
end
|
||||
|
||||
# Common heuristics
|
||||
CPlusPlusRegex = Regexp.union(
|
||||
/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/,
|
||||
/^\s*template\s*</,
|
||||
/^[ \t]*try/,
|
||||
/^[ \t]*catch\s*\(/,
|
||||
/^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/,
|
||||
/^[ \t]*(private|public|protected):$/,
|
||||
/std::\w+/)
|
||||
ObjectiveCRegex = /^\s*(@(interface|class|protocol|property|end|synchronised|selector|implementation)\b|#import\s+.+\.h[">])/
|
||||
Perl5Regex = /\buse\s+(?:strict\b|v?5\.)/
|
||||
Perl6Regex = /^\s*(?:use\s+v6\b|\bmodule\b|\b(?:my\s+)?class\b)/
|
||||
|
||||
disambiguate ".as" do |data|
|
||||
if /^\s*(package\s+[a-z0-9_\.]+|import\s+[a-zA-Z0-9_\.]+;|class\s+[A-Za-z0-9_]+\s+extends\s+[A-Za-z0-9_]+)/.match(data)
|
||||
Language["ActionScript"]
|
||||
else
|
||||
Language["AngelScript"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".asc" do |data|
|
||||
if /^(----[- ]BEGIN|ssh-(rsa|dss)) /.match(data)
|
||||
@@ -211,8 +233,7 @@ module Linguist
|
||||
disambiguate ".h" do |data|
|
||||
if ObjectiveCRegex.match(data)
|
||||
Language["Objective-C"]
|
||||
elsif (/^\s*#\s*include <(cstdint|string|vector|map|list|array|bitset|queue|stack|forward_list|unordered_map|unordered_set|(i|o|io)stream)>/.match(data) ||
|
||||
/^\s*template\s*</.match(data) || /^[ \t]*try/.match(data) || /^[ \t]*catch\s*\(/.match(data) || /^[ \t]*(class|(using[ \t]+)?namespace)\s+\w+/.match(data) || /^[ \t]*(private|public|protected):$/.match(data) || /std::\w+/.match(data))
|
||||
elsif CPlusPlusRegex.match(data)
|
||||
Language["C++"]
|
||||
end
|
||||
end
|
||||
@@ -342,33 +363,25 @@ module Linguist
|
||||
disambiguate ".pl" do |data|
|
||||
if /^[^#]*:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif /use strict|use\s+v?5\./.match(data)
|
||||
elsif Perl5Regex.match(data)
|
||||
Language["Perl"]
|
||||
elsif /^(use v6|(my )?class|module)/.match(data)
|
||||
elsif Perl6Regex.match(data)
|
||||
Language["Perl 6"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pm" do |data|
|
||||
if /^\s*(?:use\s+v6\s*;|(?:\bmy\s+)?class|module)\b/.match(data)
|
||||
Language["Perl 6"]
|
||||
elsif /\buse\s+(?:strict\b|v?5\.)/.match(data)
|
||||
if Perl5Regex.match(data)
|
||||
Language["Perl"]
|
||||
elsif Perl6Regex.match(data)
|
||||
Language["Perl 6"]
|
||||
elsif /^\s*\/\* XPM \*\//.match(data)
|
||||
Language["XPM"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pod", "Pod", "Perl" do |data|
|
||||
if /^=\w+\b/.match(data)
|
||||
Language["Pod"]
|
||||
else
|
||||
Language["Perl"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".pro" do |data|
|
||||
if /^[^#]+:-/.match(data)
|
||||
if /^[^\[#]+:-/.match(data)
|
||||
Language["Prolog"]
|
||||
elsif data.include?("last_client=")
|
||||
Language["INI"]
|
||||
@@ -450,12 +463,12 @@ module Linguist
|
||||
end
|
||||
|
||||
disambiguate ".t" do |data|
|
||||
if /^\s*%[ \t]+|^\s*var\s+\w+\s*:=\s*\w+/.match(data)
|
||||
Language["Turing"]
|
||||
elsif /^\s*(?:use\s+v6\s*;|\bmodule\b|\b(?:my\s+)?class\b)/.match(data)
|
||||
Language["Perl 6"]
|
||||
elsif /\buse\s+(?:strict\b|v?5\.)/.match(data)
|
||||
if Perl5Regex.match(data)
|
||||
Language["Perl"]
|
||||
elsif Perl6Regex.match(data)
|
||||
Language["Perl 6"]
|
||||
elsif /^\s*%[ \t]+|^\s*var\s+\w+\s*:=\s*\w+/.match(data)
|
||||
Language["Turing"]
|
||||
end
|
||||
end
|
||||
|
||||
@@ -468,7 +481,7 @@ module Linguist
|
||||
end
|
||||
|
||||
disambiguate ".ts" do |data|
|
||||
if data.include?("<TS")
|
||||
if /<TS\b/.match(data)
|
||||
Language["XML"]
|
||||
else
|
||||
Language["TypeScript"]
|
||||
@@ -491,5 +504,24 @@ module Linguist
|
||||
Language["XML"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".w" do |data|
|
||||
if (data.include?("&ANALYZE-SUSPEND _UIB-CODE-BLOCK _CUSTOM _DEFINITIONS"))
|
||||
Language["OpenEdge ABL"]
|
||||
elsif /^@(<|\w+\.)/.match(data)
|
||||
Language["CWeb"]
|
||||
end
|
||||
end
|
||||
|
||||
disambiguate ".x" do |data|
|
||||
if /\b(program|version)\s+\w+\s*{|\bunion\s+\w+\s+switch\s*\(/.match(data)
|
||||
Language["RPC"]
|
||||
elsif /^%(end|ctor|hook|group)\b/.match(data)
|
||||
Language["Logos"]
|
||||
elsif /OUTPUT_ARCH\(|OUTPUT_FORMAT\(|SECTIONS/.match(data)
|
||||
Language["Linker Script"]
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
@@ -110,7 +110,7 @@ module Linguist
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.find_by_name(name)
|
||||
return nil if !name.is_a?(String) || name.to_s.empty?
|
||||
name && (@name_index[name.downcase] || @name_index[name.split(',').first.downcase])
|
||||
name && (@name_index[name.downcase] || @name_index[name.split(',', 2).first.downcase])
|
||||
end
|
||||
|
||||
# Public: Look up Language by one of its aliases.
|
||||
@@ -125,7 +125,7 @@ module Linguist
|
||||
# Returns the Language or nil if none was found.
|
||||
def self.find_by_alias(name)
|
||||
return nil if !name.is_a?(String) || name.to_s.empty?
|
||||
name && (@alias_index[name.downcase] || @alias_index[name.split(',').first.downcase])
|
||||
name && (@alias_index[name.downcase] || @alias_index[name.split(',', 2).first.downcase])
|
||||
end
|
||||
|
||||
# Public: Look up Languages by filename.
|
||||
@@ -219,10 +219,7 @@ module Linguist
|
||||
lang = @index[name.downcase]
|
||||
return lang if lang
|
||||
|
||||
name = name.split(',').first
|
||||
return nil if name.to_s.empty?
|
||||
|
||||
@index[name.downcase]
|
||||
@index[name.split(',', 2).first.downcase]
|
||||
end
|
||||
|
||||
# Public: A List of popular languages
|
||||
@@ -542,14 +539,6 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
if fns = filenames[name]
|
||||
fns.each do |filename|
|
||||
if !options['filenames'].include?(filename)
|
||||
options['filenames'] << filename
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
Language.create(
|
||||
:name => name,
|
||||
:color => options['color'],
|
||||
|
||||
@@ -210,6 +210,17 @@ Alpine Abuild:
|
||||
codemirror_mode: shell
|
||||
codemirror_mime_type: text/x-sh
|
||||
language_id: 14
|
||||
AngelScript:
|
||||
type: programming
|
||||
color: "#C7D7DC"
|
||||
extensions:
|
||||
- ".as"
|
||||
- ".angelscript"
|
||||
tm_scope: source.angelscript
|
||||
ace_mode: text
|
||||
codemirror_mode: clike
|
||||
codemirror_mime_type: text/x-c++src
|
||||
language_id: 389477596
|
||||
Ant Build System:
|
||||
type: data
|
||||
tm_scope: text.xml.ant
|
||||
@@ -221,13 +232,17 @@ Ant Build System:
|
||||
codemirror_mime_type: application/xml
|
||||
language_id: 15
|
||||
ApacheConf:
|
||||
type: markup
|
||||
type: data
|
||||
aliases:
|
||||
- aconf
|
||||
- apache
|
||||
extensions:
|
||||
- ".apacheconf"
|
||||
- ".vhost"
|
||||
filenames:
|
||||
- ".htaccess"
|
||||
- apache2.conf
|
||||
- httpd.conf
|
||||
tm_scope: source.apache-config
|
||||
ace_mode: apache_conf
|
||||
language_id: 16
|
||||
@@ -268,16 +283,6 @@ Arc:
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
language_id: 20
|
||||
Arduino:
|
||||
type: programming
|
||||
color: "#bd79d1"
|
||||
extensions:
|
||||
- ".ino"
|
||||
tm_scope: source.c++
|
||||
ace_mode: c_cpp
|
||||
codemirror_mode: clike
|
||||
codemirror_mime_type: text/x-c++src
|
||||
language_id: 21
|
||||
AsciiDoc:
|
||||
type: prose
|
||||
ace_mode: asciidoc
|
||||
@@ -300,6 +305,7 @@ Assembly:
|
||||
type: programming
|
||||
color: "#6E4C13"
|
||||
aliases:
|
||||
- asm
|
||||
- nasm
|
||||
extensions:
|
||||
- ".asm"
|
||||
@@ -354,6 +360,14 @@ Awk:
|
||||
- nawk
|
||||
ace_mode: text
|
||||
language_id: 28
|
||||
Ballerina:
|
||||
type: programming
|
||||
extensions:
|
||||
- ".bal"
|
||||
tm_scope: source.ballerina
|
||||
ace_mode: text
|
||||
color: "#FF5000"
|
||||
language_id: 720859680
|
||||
Batchfile:
|
||||
type: programming
|
||||
aliases:
|
||||
@@ -509,6 +523,7 @@ C++:
|
||||
- ".hxx"
|
||||
- ".inc"
|
||||
- ".inl"
|
||||
- ".ino"
|
||||
- ".ipp"
|
||||
- ".re"
|
||||
- ".tcc"
|
||||
@@ -625,8 +640,10 @@ CartoCSS:
|
||||
language_id: 53
|
||||
Ceylon:
|
||||
type: programming
|
||||
color: "#dfa535"
|
||||
extensions:
|
||||
- ".ceylon"
|
||||
tm_scope: source.ceylon
|
||||
ace_mode: text
|
||||
language_id: 54
|
||||
Chapel:
|
||||
@@ -717,6 +734,17 @@ Closure Templates:
|
||||
- ".soy"
|
||||
tm_scope: text.html.soy
|
||||
language_id: 357046146
|
||||
CoNLL-U:
|
||||
type: data
|
||||
extensions:
|
||||
- ".conllu"
|
||||
- ".conll"
|
||||
tm_scope: text.conllu
|
||||
ace_mode: text
|
||||
aliases:
|
||||
- CoNLL
|
||||
- CoNLL-X
|
||||
language_id: 421026389
|
||||
CoffeeScript:
|
||||
type: programming
|
||||
tm_scope: source.coffee
|
||||
@@ -786,6 +814,19 @@ Common Lisp:
|
||||
codemirror_mode: commonlisp
|
||||
codemirror_mime_type: text/x-common-lisp
|
||||
language_id: 66
|
||||
Common Workflow Language:
|
||||
alias: cwl
|
||||
type: programming
|
||||
ace_mode: yaml
|
||||
codemirror_mode: yaml
|
||||
codemirror_mime_type: text/x-yaml
|
||||
extensions:
|
||||
- ".cwl"
|
||||
interpreters:
|
||||
- cwl-runner
|
||||
color: "#B5314C"
|
||||
tm_scope: source.cwl
|
||||
language_id: 988547172
|
||||
Component Pascal:
|
||||
type: programming
|
||||
color: "#B0CE4E"
|
||||
@@ -855,7 +896,7 @@ Csound:
|
||||
- ".orc"
|
||||
- ".udo"
|
||||
tm_scope: source.csound
|
||||
ace_mode: text
|
||||
ace_mode: csound_orchestra
|
||||
language_id: 73
|
||||
Csound Document:
|
||||
type: programming
|
||||
@@ -864,7 +905,7 @@ Csound Document:
|
||||
extensions:
|
||||
- ".csd"
|
||||
tm_scope: source.csound-document
|
||||
ace_mode: text
|
||||
ace_mode: csound_document
|
||||
language_id: 74
|
||||
Csound Score:
|
||||
type: programming
|
||||
@@ -873,7 +914,7 @@ Csound Score:
|
||||
extensions:
|
||||
- ".sco"
|
||||
tm_scope: source.csound-score
|
||||
ace_mode: text
|
||||
ace_mode: csound_score
|
||||
language_id: 75
|
||||
Cuda:
|
||||
type: programming
|
||||
@@ -986,6 +1027,14 @@ Dart:
|
||||
codemirror_mode: dart
|
||||
codemirror_mime_type: application/dart
|
||||
language_id: 87
|
||||
DataWeave:
|
||||
type: programming
|
||||
color: "#003a52"
|
||||
extensions:
|
||||
- ".dwl"
|
||||
ace_mode: text
|
||||
tm_scope: source.data-weave
|
||||
language_id: 974514097
|
||||
Diff:
|
||||
type: data
|
||||
extensions:
|
||||
@@ -1086,8 +1135,7 @@ EQ:
|
||||
codemirror_mime_type: text/x-csharp
|
||||
language_id: 96
|
||||
Eagle:
|
||||
type: markup
|
||||
color: "#814C05"
|
||||
type: data
|
||||
extensions:
|
||||
- ".sch"
|
||||
- ".brd"
|
||||
@@ -1116,6 +1164,15 @@ Ecere Projects:
|
||||
codemirror_mode: javascript
|
||||
codemirror_mime_type: application/json
|
||||
language_id: 98
|
||||
Edje Data Collection:
|
||||
type: data
|
||||
extensions:
|
||||
- ".edc"
|
||||
tm_scope: source.json
|
||||
ace_mode: json
|
||||
codemirror_mode: javascript
|
||||
codemirror_mime_type: application/json
|
||||
language_id: 342840478
|
||||
Eiffel:
|
||||
type: programming
|
||||
color: "#946d57"
|
||||
@@ -1414,6 +1471,8 @@ GN:
|
||||
- ".gni"
|
||||
interpreters:
|
||||
- gn
|
||||
filenames:
|
||||
- ".gn"
|
||||
tm_scope: source.gn
|
||||
ace_mode: python
|
||||
codemirror_mode: python
|
||||
@@ -1487,8 +1546,8 @@ Gerber Image:
|
||||
- ".gtp"
|
||||
- ".gts"
|
||||
interpreters:
|
||||
- "gerbv"
|
||||
- "gerbview"
|
||||
- gerbv
|
||||
- gerbview
|
||||
tm_scope: source.gerber
|
||||
ace_mode: text
|
||||
language_id: 404627610
|
||||
@@ -1605,6 +1664,7 @@ GraphQL:
|
||||
type: data
|
||||
extensions:
|
||||
- ".graphql"
|
||||
- ".gql"
|
||||
tm_scope: source.graphql
|
||||
ace_mode: text
|
||||
language_id: 139
|
||||
@@ -1650,6 +1710,7 @@ HCL:
|
||||
extensions:
|
||||
- ".hcl"
|
||||
- ".tf"
|
||||
- ".tfvars"
|
||||
ace_mode: ruby
|
||||
codemirror_mode: ruby
|
||||
codemirror_mime_type: text/x-ruby
|
||||
@@ -1690,6 +1751,7 @@ HTML+Django:
|
||||
group: HTML
|
||||
extensions:
|
||||
- ".jinja"
|
||||
- ".jinja2"
|
||||
- ".mustache"
|
||||
- ".njk"
|
||||
aliases:
|
||||
@@ -1759,6 +1821,13 @@ HTTP:
|
||||
codemirror_mode: http
|
||||
codemirror_mime_type: message/http
|
||||
language_id: 152
|
||||
HXML:
|
||||
type: data
|
||||
ace_mode: text
|
||||
extensions:
|
||||
- ".hxml"
|
||||
tm_scope: source.hxml
|
||||
language_id: 786683730
|
||||
Hack:
|
||||
type: programming
|
||||
ace_mode: php
|
||||
@@ -1821,7 +1890,7 @@ Haxe:
|
||||
extensions:
|
||||
- ".hx"
|
||||
- ".hxsl"
|
||||
tm_scope: source.haxe.2
|
||||
tm_scope: source.hx
|
||||
language_id: 158
|
||||
Hy:
|
||||
type: programming
|
||||
@@ -1868,6 +1937,10 @@ INI:
|
||||
- ".prefs"
|
||||
- ".pro"
|
||||
- ".properties"
|
||||
filenames:
|
||||
- ".editorconfig"
|
||||
- ".gitconfig"
|
||||
- buildozer.spec
|
||||
tm_scope: source.ini
|
||||
aliases:
|
||||
- dosini
|
||||
@@ -1890,6 +1963,7 @@ IRC log:
|
||||
language_id: 164
|
||||
Idris:
|
||||
type: programming
|
||||
color: "#b30000"
|
||||
extensions:
|
||||
- ".idr"
|
||||
- ".lidr"
|
||||
@@ -1978,12 +2052,23 @@ JSON:
|
||||
searchable: false
|
||||
extensions:
|
||||
- ".json"
|
||||
- ".avsc"
|
||||
- ".geojson"
|
||||
- ".gltf"
|
||||
- ".JSON-tmLanguage"
|
||||
- ".jsonl"
|
||||
- ".tfstate"
|
||||
- ".tfstate.backup"
|
||||
- ".topojson"
|
||||
- ".webapp"
|
||||
- ".webmanifest"
|
||||
filenames:
|
||||
- ".arcconfig"
|
||||
- ".htmlhintrc"
|
||||
- ".jscsrc"
|
||||
- ".jshintrc"
|
||||
- ".tern-config"
|
||||
- ".tern-project"
|
||||
- composer.lock
|
||||
- mcmod.info
|
||||
language_id: 174
|
||||
@@ -1993,6 +2078,7 @@ JSON5:
|
||||
- ".json5"
|
||||
filenames:
|
||||
- ".babelrc"
|
||||
- ".jslintrc"
|
||||
tm_scope: source.js
|
||||
ace_mode: javascript
|
||||
codemirror_mode: javascript
|
||||
@@ -2078,6 +2164,7 @@ JavaScript:
|
||||
- ".jsfl"
|
||||
- ".jsm"
|
||||
- ".jss"
|
||||
- ".mjs"
|
||||
- ".njs"
|
||||
- ".pac"
|
||||
- ".sjs"
|
||||
@@ -2149,13 +2236,6 @@ KRL:
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
language_id: 186
|
||||
KiCad Board:
|
||||
type: data
|
||||
extensions:
|
||||
- ".brd"
|
||||
tm_scope: source.pcb.board
|
||||
ace_mode: text
|
||||
language_id: 140848857
|
||||
KiCad Layout:
|
||||
type: data
|
||||
aliases:
|
||||
@@ -2171,6 +2251,13 @@ KiCad Layout:
|
||||
codemirror_mode: commonlisp
|
||||
codemirror_mime_type: text/x-common-lisp
|
||||
language_id: 187
|
||||
KiCad Legacy Layout:
|
||||
type: data
|
||||
extensions:
|
||||
- ".brd"
|
||||
tm_scope: source.pcb.board
|
||||
ace_mode: text
|
||||
language_id: 140848857
|
||||
KiCad Schematic:
|
||||
type: data
|
||||
aliases:
|
||||
@@ -2196,16 +2283,16 @@ Kotlin:
|
||||
- ".kt"
|
||||
- ".ktm"
|
||||
- ".kts"
|
||||
tm_scope: source.Kotlin
|
||||
tm_scope: source.kotlin
|
||||
ace_mode: text
|
||||
codemirror_mode: clike
|
||||
codemirror_mime_type: text/x-kotlin
|
||||
language_id: 189
|
||||
LFE:
|
||||
type: programming
|
||||
color: "#4C3023"
|
||||
extensions:
|
||||
- ".lfe"
|
||||
group: Erlang
|
||||
tm_scope: source.lisp
|
||||
ace_mode: lisp
|
||||
codemirror_mode: commonlisp
|
||||
@@ -2317,6 +2404,7 @@ Linker Script:
|
||||
extensions:
|
||||
- ".ld"
|
||||
- ".lds"
|
||||
- ".x"
|
||||
filenames:
|
||||
- ld.script
|
||||
tm_scope: none
|
||||
@@ -2428,6 +2516,7 @@ Lua:
|
||||
- ".lua"
|
||||
- ".fcgi"
|
||||
- ".nse"
|
||||
- ".p8"
|
||||
- ".pd_lua"
|
||||
- ".rbxs"
|
||||
- ".wlua"
|
||||
@@ -2614,7 +2703,7 @@ Mathematica:
|
||||
language_id: 224
|
||||
Matlab:
|
||||
type: programming
|
||||
color: "#bb92ac"
|
||||
color: "#e16737"
|
||||
aliases:
|
||||
- octave
|
||||
extensions:
|
||||
@@ -2741,6 +2830,7 @@ Monkey:
|
||||
type: programming
|
||||
extensions:
|
||||
- ".monkey"
|
||||
- ".monkey2"
|
||||
ace_mode: text
|
||||
tm_scope: source.monkey
|
||||
language_id: 236
|
||||
@@ -2790,6 +2880,15 @@ NSIS:
|
||||
codemirror_mode: nsis
|
||||
codemirror_mime_type: text/x-nsis
|
||||
language_id: 242
|
||||
Nearley:
|
||||
type: programming
|
||||
ace_mode: text
|
||||
color: "#990000"
|
||||
extensions:
|
||||
- ".ne"
|
||||
- ".nearley"
|
||||
tm_scope: source.ne
|
||||
language_id: 521429430
|
||||
Nemerle:
|
||||
type: programming
|
||||
color: "#3d3c6e"
|
||||
@@ -2840,8 +2939,20 @@ NewLisp:
|
||||
codemirror_mode: commonlisp
|
||||
codemirror_mime_type: text/x-common-lisp
|
||||
language_id: 247
|
||||
Nextflow:
|
||||
type: programming
|
||||
ace_mode: groovy
|
||||
tm_scope: source.nextflow
|
||||
color: "#3ac486"
|
||||
extensions:
|
||||
- ".nf"
|
||||
filenames:
|
||||
- nextflow.config
|
||||
interpreters:
|
||||
- nextflow
|
||||
language_id: 506780613
|
||||
Nginx:
|
||||
type: markup
|
||||
type: data
|
||||
extensions:
|
||||
- ".nginxconf"
|
||||
- ".vhost"
|
||||
@@ -2853,7 +2964,6 @@ Nginx:
|
||||
ace_mode: text
|
||||
codemirror_mode: nginx
|
||||
codemirror_mime_type: text/x-nginx-conf
|
||||
color: "#9469E9"
|
||||
language_id: 248
|
||||
Nim:
|
||||
type: programming
|
||||
@@ -3028,6 +3138,7 @@ OpenEdge ABL:
|
||||
extensions:
|
||||
- ".p"
|
||||
- ".cls"
|
||||
- ".w"
|
||||
tm_scope: source.abl
|
||||
ace_mode: text
|
||||
language_id: 264
|
||||
@@ -3130,6 +3241,7 @@ PHP:
|
||||
- ".phps"
|
||||
- ".phpt"
|
||||
filenames:
|
||||
- ".php"
|
||||
- ".php_cs"
|
||||
- ".php_cs.dist"
|
||||
- Phakefile
|
||||
@@ -3271,13 +3383,18 @@ Perl:
|
||||
- ".ph"
|
||||
- ".plx"
|
||||
- ".pm"
|
||||
- ".pod"
|
||||
- ".psgi"
|
||||
- ".t"
|
||||
filenames:
|
||||
- Makefile.PL
|
||||
- Rexfile
|
||||
- ack
|
||||
- cpanfile
|
||||
interpreters:
|
||||
- cperl
|
||||
- perl
|
||||
aliases:
|
||||
- cperl
|
||||
language_id: 282
|
||||
Perl 6:
|
||||
type: programming
|
||||
@@ -3294,10 +3411,10 @@ Perl 6:
|
||||
- ".pm"
|
||||
- ".pm6"
|
||||
- ".t"
|
||||
filenames:
|
||||
- Rexfile
|
||||
interpreters:
|
||||
- perl6
|
||||
aliases:
|
||||
- perl6
|
||||
tm_scope: source.perl6fe
|
||||
ace_mode: perl
|
||||
codemirror_mode: perl
|
||||
@@ -3376,6 +3493,14 @@ Pony:
|
||||
tm_scope: source.pony
|
||||
ace_mode: text
|
||||
language_id: 290
|
||||
PostCSS:
|
||||
type: markup
|
||||
tm_scope: source.postcss
|
||||
group: CSS
|
||||
extensions:
|
||||
- ".pcss"
|
||||
ace_mode: text
|
||||
language_id: 262764437
|
||||
PostScript:
|
||||
type: markup
|
||||
color: "#da291c"
|
||||
@@ -3411,6 +3536,8 @@ PowerShell:
|
||||
- ".ps1"
|
||||
- ".psd1"
|
||||
- ".psm1"
|
||||
interpreters:
|
||||
- pwsh
|
||||
language_id: 293
|
||||
Processing:
|
||||
type: programming
|
||||
@@ -3442,7 +3569,7 @@ Propeller Spin:
|
||||
ace_mode: text
|
||||
language_id: 296
|
||||
Protocol Buffer:
|
||||
type: markup
|
||||
type: data
|
||||
aliases:
|
||||
- protobuf
|
||||
- Protocol Buffers
|
||||
@@ -3487,8 +3614,7 @@ Puppet:
|
||||
tm_scope: source.puppet
|
||||
language_id: 299
|
||||
Pure Data:
|
||||
type: programming
|
||||
color: "#91de79"
|
||||
type: data
|
||||
extensions:
|
||||
- ".pd"
|
||||
tm_scope: none
|
||||
@@ -3542,6 +3668,7 @@ Python:
|
||||
- ".gclient"
|
||||
- BUCK
|
||||
- BUILD
|
||||
- BUILD.bazel
|
||||
- SConscript
|
||||
- SConstruct
|
||||
- Snakefile
|
||||
@@ -3553,6 +3680,7 @@ Python:
|
||||
- python3
|
||||
aliases:
|
||||
- rusthon
|
||||
- python3
|
||||
language_id: 303
|
||||
Python console:
|
||||
type: programming
|
||||
@@ -3603,6 +3731,7 @@ R:
|
||||
- ".rsx"
|
||||
filenames:
|
||||
- ".Rprofile"
|
||||
- expr-dist
|
||||
interpreters:
|
||||
- Rscript
|
||||
ace_mode: r
|
||||
@@ -3675,6 +3804,17 @@ RMarkdown:
|
||||
- ".rmd"
|
||||
tm_scope: source.gfm
|
||||
language_id: 313
|
||||
RPC:
|
||||
type: programming
|
||||
aliases:
|
||||
- rpcgen
|
||||
- oncrpc
|
||||
- xdr
|
||||
ace_mode: c_cpp
|
||||
extensions:
|
||||
- ".x"
|
||||
tm_scope: source.c
|
||||
language_id: 1031374237
|
||||
RPM Spec:
|
||||
type: data
|
||||
tm_scope: source.rpm-spec
|
||||
@@ -3918,6 +4058,7 @@ Ruby:
|
||||
- Berksfile
|
||||
- Brewfile
|
||||
- Buildfile
|
||||
- Capfile
|
||||
- Dangerfile
|
||||
- Deliverfile
|
||||
- Fastfile
|
||||
@@ -4098,6 +4239,7 @@ Scala:
|
||||
color: "#c22d40"
|
||||
extensions:
|
||||
- ".scala"
|
||||
- ".kojo"
|
||||
- ".sbt"
|
||||
- ".sc"
|
||||
interpreters:
|
||||
@@ -4180,8 +4322,29 @@ Shell:
|
||||
- ".bash_logout"
|
||||
- ".bash_profile"
|
||||
- ".bashrc"
|
||||
- ".cshrc"
|
||||
- ".login"
|
||||
- ".profile"
|
||||
- ".zlogin"
|
||||
- ".zlogout"
|
||||
- ".zprofile"
|
||||
- ".zshenv"
|
||||
- ".zshrc"
|
||||
- 9fs
|
||||
- PKGBUILD
|
||||
- bash_logout
|
||||
- bash_profile
|
||||
- bashrc
|
||||
- cshrc
|
||||
- gradlew
|
||||
- login
|
||||
- man
|
||||
- profile
|
||||
- zlogin
|
||||
- zlogout
|
||||
- zprofile
|
||||
- zshenv
|
||||
- zshrc
|
||||
interpreters:
|
||||
- ash
|
||||
- bash
|
||||
@@ -4262,6 +4425,12 @@ Smarty:
|
||||
codemirror_mime_type: text/x-smarty
|
||||
tm_scope: text.html.smarty
|
||||
language_id: 353
|
||||
Solidity:
|
||||
type: programming
|
||||
color: "#AA6746"
|
||||
ace_mode: text
|
||||
tm_scope: source.solidity
|
||||
language_id: 237469032
|
||||
SourcePawn:
|
||||
type: programming
|
||||
color: "#5c7611"
|
||||
@@ -4363,6 +4532,14 @@ Sublime Text Config:
|
||||
- ".sublime_metrics"
|
||||
- ".sublime_session"
|
||||
language_id: 423
|
||||
SugarSS:
|
||||
type: markup
|
||||
tm_scope: source.css.postcss.sugarss
|
||||
group: CSS
|
||||
extensions:
|
||||
- ".sss"
|
||||
ace_mode: text
|
||||
language_id: 826404698
|
||||
SuperCollider:
|
||||
type: programming
|
||||
color: "#46390b"
|
||||
@@ -4417,6 +4594,8 @@ TOML:
|
||||
type: data
|
||||
extensions:
|
||||
- ".toml"
|
||||
filenames:
|
||||
- Cargo.lock
|
||||
tm_scope: source.toml
|
||||
ace_mode: toml
|
||||
codemirror_mode: toml
|
||||
@@ -4436,6 +4615,9 @@ Tcl:
|
||||
- ".tcl"
|
||||
- ".adp"
|
||||
- ".tm"
|
||||
filenames:
|
||||
- owh
|
||||
- starfield
|
||||
interpreters:
|
||||
- tclsh
|
||||
- wish
|
||||
@@ -4511,6 +4693,7 @@ Text:
|
||||
- ".no"
|
||||
filenames:
|
||||
- COPYING
|
||||
- COPYING.regex
|
||||
- COPYRIGHT.regex
|
||||
- FONTLOG
|
||||
- INSTALL
|
||||
@@ -4525,6 +4708,7 @@ Text:
|
||||
- delete.me
|
||||
- keep.me
|
||||
- read.me
|
||||
- readme.1st
|
||||
- test.me
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
@@ -4626,7 +4810,7 @@ Unix Assembly:
|
||||
extensions:
|
||||
- ".s"
|
||||
- ".ms"
|
||||
tm_scope: source.assembly
|
||||
tm_scope: source.assembly.unix
|
||||
ace_mode: assembly_x86
|
||||
language_id: 120
|
||||
Uno:
|
||||
@@ -4660,8 +4844,8 @@ UrWeb:
|
||||
ace_mode: text
|
||||
language_id: 383
|
||||
VCL:
|
||||
group: Perl
|
||||
type: programming
|
||||
color: "#0298c3"
|
||||
extensions:
|
||||
- ".vcl"
|
||||
tm_scope: source.varnish.vcl
|
||||
@@ -4712,6 +4896,7 @@ Vim script:
|
||||
extensions:
|
||||
- ".vim"
|
||||
filenames:
|
||||
- ".gvimrc"
|
||||
- ".nvimrc"
|
||||
- ".vimrc"
|
||||
- _vimrc
|
||||
@@ -4773,8 +4958,7 @@ Wavefront Object:
|
||||
ace_mode: text
|
||||
language_id: 393
|
||||
Web Ontology Language:
|
||||
type: markup
|
||||
color: "#9cc9dd"
|
||||
type: data
|
||||
extensions:
|
||||
- ".owl"
|
||||
tm_scope: text.xml
|
||||
@@ -4855,12 +5039,16 @@ XML:
|
||||
- ".ant"
|
||||
- ".axml"
|
||||
- ".builds"
|
||||
- ".ccproj"
|
||||
- ".ccxml"
|
||||
- ".clixml"
|
||||
- ".cproject"
|
||||
- ".cscfg"
|
||||
- ".csdef"
|
||||
- ".csl"
|
||||
- ".csproj"
|
||||
- ".ct"
|
||||
- ".depproj"
|
||||
- ".dita"
|
||||
- ".ditamap"
|
||||
- ".ditaval"
|
||||
@@ -4883,6 +5071,8 @@ XML:
|
||||
- ".mm"
|
||||
- ".mod"
|
||||
- ".mxml"
|
||||
- ".natvis"
|
||||
- ".ndproj"
|
||||
- ".nproj"
|
||||
- ".nuspec"
|
||||
- ".odd"
|
||||
@@ -4890,6 +5080,7 @@ XML:
|
||||
- ".pkgproj"
|
||||
- ".plist"
|
||||
- ".pluginspec"
|
||||
- ".proj"
|
||||
- ".props"
|
||||
- ".ps1xml"
|
||||
- ".psc1"
|
||||
@@ -4900,6 +5091,7 @@ XML:
|
||||
- ".sch"
|
||||
- ".scxml"
|
||||
- ".sfproj"
|
||||
- ".shproj"
|
||||
- ".srdf"
|
||||
- ".storyboard"
|
||||
- ".stTheme"
|
||||
@@ -4943,6 +5135,7 @@ XML:
|
||||
- ".zcml"
|
||||
filenames:
|
||||
- ".classpath"
|
||||
- ".cproject"
|
||||
- ".project"
|
||||
- App.config
|
||||
- NuGet.config
|
||||
@@ -4961,11 +5154,11 @@ XPM:
|
||||
tm_scope: source.c
|
||||
language_id: 781846279
|
||||
XPages:
|
||||
type: programming
|
||||
type: data
|
||||
extensions:
|
||||
- ".xsp-config"
|
||||
- ".xsp.metadata"
|
||||
tm_scope: none
|
||||
tm_scope: text.xml
|
||||
ace_mode: xml
|
||||
codemirror_mode: xml
|
||||
codemirror_mime_type: text/xml
|
||||
@@ -5050,6 +5243,8 @@ YAML:
|
||||
- ".yml.mysql"
|
||||
filenames:
|
||||
- ".clang-format"
|
||||
- ".clang-tidy"
|
||||
- ".gemrc"
|
||||
ace_mode: yaml
|
||||
codemirror_mode: yaml
|
||||
codemirror_mime_type: text/x-yaml
|
||||
@@ -5061,6 +5256,14 @@ YANG:
|
||||
tm_scope: source.yang
|
||||
ace_mode: text
|
||||
language_id: 408
|
||||
YARA:
|
||||
type: data
|
||||
ace_mode: text
|
||||
extensions:
|
||||
- ".yar"
|
||||
- ".yara"
|
||||
tm_scope: source.yara
|
||||
language_id: 805122868
|
||||
Yacc:
|
||||
type: programming
|
||||
extensions:
|
||||
@@ -5159,6 +5362,27 @@ reStructuredText:
|
||||
codemirror_mode: rst
|
||||
codemirror_mime_type: text/x-rst
|
||||
language_id: 419
|
||||
sed:
|
||||
type: programming
|
||||
color: "#64b970"
|
||||
extensions:
|
||||
- ".sed"
|
||||
interpreters:
|
||||
- gsed
|
||||
- minised
|
||||
- sed
|
||||
- ssed
|
||||
ace_mode: text
|
||||
tm_scope: source.sed
|
||||
language_id: 847830017
|
||||
wdl:
|
||||
type: programming
|
||||
color: "#42f1f4"
|
||||
extensions:
|
||||
- ".wdl"
|
||||
tm_scope: source.wdl
|
||||
ace_mode: text
|
||||
language_id: 374521672
|
||||
wisp:
|
||||
type: programming
|
||||
ace_mode: clojure
|
||||
|
||||
@@ -7,7 +7,8 @@ module Linguist
|
||||
GIT_ATTR = ['linguist-documentation',
|
||||
'linguist-language',
|
||||
'linguist-vendored',
|
||||
'linguist-generated']
|
||||
'linguist-generated',
|
||||
'linguist-detectable']
|
||||
|
||||
GIT_ATTR_OPTS = { :priority => [:index], :skip_system => true }
|
||||
GIT_ATTR_FLAGS = Rugged::Repository::Attributes.parse_opts(GIT_ATTR_OPTS)
|
||||
@@ -70,6 +71,14 @@ module Linguist
|
||||
end
|
||||
end
|
||||
|
||||
def detectable?
|
||||
if attr = git_attributes['linguist-detectable']
|
||||
return boolean_attribute(attr)
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def data
|
||||
load_blob!
|
||||
@data
|
||||
@@ -80,6 +89,11 @@ module Linguist
|
||||
@size
|
||||
end
|
||||
|
||||
def symlink?
|
||||
# We don't create LazyBlobs for symlinks.
|
||||
false
|
||||
end
|
||||
|
||||
def cleanup!
|
||||
@data.clear if @data
|
||||
end
|
||||
|
||||
@@ -3,15 +3,20 @@ module Linguist
|
||||
# Public: Use shebang to detect language of the blob.
|
||||
#
|
||||
# blob - An object that quacks like a blob.
|
||||
# candidates - A list of candidate languages.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Shebang.call(FileBlob.new("path/to/file"))
|
||||
#
|
||||
# Returns an Array with one Language if the blob has a shebang with a valid
|
||||
# interpreter, or empty if there is no shebang.
|
||||
def self.call(blob, _ = nil)
|
||||
Language.find_by_interpreter interpreter(blob.data)
|
||||
# Returns an array of languages from the candidate list for which the
|
||||
# blob's shebang is valid. Returns an empty list if there is no shebang.
|
||||
# If the candidate list is empty, any language is a valid candidate.
|
||||
def self.call(blob, candidates)
|
||||
return [] if blob.symlink?
|
||||
|
||||
languages = Language.find_by_interpreter interpreter(blob.data)
|
||||
candidates.any? ? candidates & languages : languages
|
||||
end
|
||||
|
||||
# Public: Get the interpreter from the shebang
|
||||
|
||||
@@ -2,8 +2,21 @@ module Linguist
|
||||
module Strategy
|
||||
# Detects language based on extension
|
||||
class Extension
|
||||
def self.call(blob, _)
|
||||
Language.find_by_extension(blob.name.to_s)
|
||||
# Public: Use the file extension to detect the blob's language.
|
||||
#
|
||||
# blob - An object that quacks like a blob.
|
||||
# candidates - A list of candidate languages.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Extension.call(FileBlob.new("path/to/file"))
|
||||
#
|
||||
# Returns an array of languages associated with a blob's file extension.
|
||||
# Selected languages must be in the candidate list, except if it's empty,
|
||||
# in which case any language is a valid candidate.
|
||||
def self.call(blob, candidates)
|
||||
languages = Language.find_by_extension(blob.name.to_s)
|
||||
candidates.any? ? candidates & languages : languages
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -2,9 +2,22 @@ module Linguist
|
||||
module Strategy
|
||||
# Detects language based on filename
|
||||
class Filename
|
||||
def self.call(blob, _)
|
||||
# Public: Use the filename to detect the blob's language.
|
||||
#
|
||||
# blob - An object that quacks like a blob.
|
||||
# candidates - A list of candidate languages.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# Filename.call(FileBlob.new("path/to/file"))
|
||||
#
|
||||
# Returns an array of languages with a associated blob's filename.
|
||||
# Selected languages must be in the candidate list, except if it's empty,
|
||||
# in which case any language is a valid candidate.
|
||||
def self.call(blob, candidates)
|
||||
name = blob.name.to_s
|
||||
Language.find_by_filename(name)
|
||||
languages = Language.find_by_filename(name)
|
||||
candidates.any? ? candidates & languages : languages
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -109,8 +109,10 @@ module Linguist
|
||||
# Returns an Array with one Language if the blob has a Vim or Emacs modeline
|
||||
# that matches a Language name or alias. Returns an empty array if no match.
|
||||
def self.call(blob, _ = nil)
|
||||
header = blob.lines.first(SEARCH_SCOPE).join("\n")
|
||||
footer = blob.lines.last(SEARCH_SCOPE).join("\n")
|
||||
return [] if blob.symlink?
|
||||
|
||||
header = blob.first_lines(SEARCH_SCOPE).join("\n")
|
||||
footer = blob.last_lines(SEARCH_SCOPE).join("\n")
|
||||
Array(Language.find_by_alias(modeline(header + footer)))
|
||||
end
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
require 'strscan'
|
||||
require 'linguist/linguist'
|
||||
|
||||
module Linguist
|
||||
# Generic programming language tokenizer.
|
||||
@@ -15,191 +16,5 @@ module Linguist
|
||||
def self.tokenize(data)
|
||||
new.extract_tokens(data)
|
||||
end
|
||||
|
||||
# Read up to 100KB
|
||||
BYTE_LIMIT = 100_000
|
||||
|
||||
# Start state on token, ignore anything till the next newline
|
||||
SINGLE_LINE_COMMENTS = [
|
||||
'//', # C
|
||||
'--', # Ada, Haskell, AppleScript
|
||||
'#', # Ruby
|
||||
'%', # Tex
|
||||
'"', # Vim
|
||||
]
|
||||
|
||||
# Start state on opening token, ignore anything until the closing
|
||||
# token is reached.
|
||||
MULTI_LINE_COMMENTS = [
|
||||
['/*', '*/'], # C
|
||||
['<!--', '-->'], # XML
|
||||
['{-', '-}'], # Haskell
|
||||
['(*', '*)'], # Coq
|
||||
['"""', '"""'], # Python
|
||||
["'''", "'''"] # Python
|
||||
]
|
||||
|
||||
START_SINGLE_LINE_COMMENT = Regexp.compile(SINGLE_LINE_COMMENTS.map { |c|
|
||||
"\s*#{Regexp.escape(c)} "
|
||||
}.join("|"))
|
||||
|
||||
START_MULTI_LINE_COMMENT = Regexp.compile(MULTI_LINE_COMMENTS.map { |c|
|
||||
Regexp.escape(c[0])
|
||||
}.join("|"))
|
||||
|
||||
# Internal: Extract generic tokens from data.
|
||||
#
|
||||
# data - String to scan.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# extract_tokens("printf('Hello')")
|
||||
# # => ['printf', '(', ')']
|
||||
#
|
||||
# Returns Array of token Strings.
|
||||
def extract_tokens(data)
|
||||
s = StringScanner.new(data)
|
||||
|
||||
tokens = []
|
||||
until s.eos?
|
||||
break if s.pos >= BYTE_LIMIT
|
||||
|
||||
if token = s.scan(/^#!.+$/)
|
||||
if name = extract_shebang(token)
|
||||
tokens << "SHEBANG#!#{name}"
|
||||
end
|
||||
|
||||
# Single line comment
|
||||
elsif s.beginning_of_line? && token = s.scan(START_SINGLE_LINE_COMMENT)
|
||||
# tokens << token.strip
|
||||
s.skip_until(/\n|\Z/)
|
||||
|
||||
# Multiline comments
|
||||
elsif token = s.scan(START_MULTI_LINE_COMMENT)
|
||||
# tokens << token
|
||||
close_token = MULTI_LINE_COMMENTS.assoc(token)[1]
|
||||
s.skip_until(Regexp.compile(Regexp.escape(close_token)))
|
||||
# tokens << close_token
|
||||
|
||||
# Skip single or double quoted strings
|
||||
elsif s.scan(/"/)
|
||||
if s.peek(1) == "\""
|
||||
s.getch
|
||||
else
|
||||
s.skip_until(/(?<!\\)"/)
|
||||
end
|
||||
elsif s.scan(/'/)
|
||||
if s.peek(1) == "'"
|
||||
s.getch
|
||||
else
|
||||
s.skip_until(/(?<!\\)'/)
|
||||
end
|
||||
|
||||
# Skip number literals
|
||||
elsif s.scan(/(0x\h(\h|\.)*|\d(\d|\.)*)([uU][lL]{0,2}|([eE][-+]\d*)?[fFlL]*)/)
|
||||
|
||||
# SGML style brackets
|
||||
elsif token = s.scan(/<[^\s<>][^<>]*>/)
|
||||
extract_sgml_tokens(token).each { |t| tokens << t }
|
||||
|
||||
# Common programming punctuation
|
||||
elsif token = s.scan(/;|\{|\}|\(|\)|\[|\]/)
|
||||
tokens << token
|
||||
|
||||
# Regular token
|
||||
elsif token = s.scan(/[\w\.@#\/\*]+/)
|
||||
tokens << token
|
||||
|
||||
# Common operators
|
||||
elsif token = s.scan(/<<?|\+|\-|\*|\/|%|&&?|\|\|?/)
|
||||
tokens << token
|
||||
|
||||
else
|
||||
s.getch
|
||||
end
|
||||
end
|
||||
|
||||
tokens
|
||||
end
|
||||
|
||||
# Internal: Extract normalized shebang command token.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# extract_shebang("#!/usr/bin/ruby")
|
||||
# # => "ruby"
|
||||
#
|
||||
# extract_shebang("#!/usr/bin/env node")
|
||||
# # => "node"
|
||||
#
|
||||
# extract_shebang("#!/usr/bin/env A=B foo=bar awk -f")
|
||||
# # => "awk"
|
||||
#
|
||||
# Returns String token or nil it couldn't be parsed.
|
||||
def extract_shebang(data)
|
||||
s = StringScanner.new(data)
|
||||
|
||||
if path = s.scan(/^#!\s*\S+/)
|
||||
script = path.split('/').last
|
||||
if script == 'env'
|
||||
s.scan(/\s+/)
|
||||
s.scan(/.*=[^\s]+\s+/)
|
||||
script = s.scan(/\S+/)
|
||||
end
|
||||
script = script[/[^\d]+/, 0] if script
|
||||
return script
|
||||
end
|
||||
|
||||
nil
|
||||
end
|
||||
|
||||
# Internal: Extract tokens from inside SGML tag.
|
||||
#
|
||||
# data - SGML tag String.
|
||||
#
|
||||
# Examples
|
||||
#
|
||||
# extract_sgml_tokens("<a href='' class=foo>")
|
||||
# # => ["<a>", "href="]
|
||||
#
|
||||
# Returns Array of token Strings.
|
||||
def extract_sgml_tokens(data)
|
||||
s = StringScanner.new(data)
|
||||
|
||||
tokens = []
|
||||
|
||||
until s.eos?
|
||||
# Emit start token
|
||||
if token = s.scan(/<\/?[^\s>]+/)
|
||||
tokens << "#{token}>"
|
||||
|
||||
# Emit attributes with trailing =
|
||||
elsif token = s.scan(/\w+=/)
|
||||
tokens << token
|
||||
|
||||
# Then skip over attribute value
|
||||
if s.scan(/"/)
|
||||
s.skip_until(/[^\\]"/)
|
||||
elsif s.scan(/'/)
|
||||
s.skip_until(/[^\\]'/)
|
||||
else
|
||||
s.skip_until(/\w+/)
|
||||
end
|
||||
|
||||
# Emit lone attributes
|
||||
elsif token = s.scan(/\w+/)
|
||||
tokens << token
|
||||
|
||||
# Stop at the end of the tag
|
||||
elsif s.scan(/>/)
|
||||
s.terminate
|
||||
|
||||
else
|
||||
s.getch
|
||||
end
|
||||
end
|
||||
|
||||
tokens
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -19,9 +19,7 @@
|
||||
- (^|/)dist/
|
||||
|
||||
# C deps
|
||||
# https://github.com/joyent/node
|
||||
- ^deps/
|
||||
- ^tools/
|
||||
- (^|/)configure$
|
||||
- (^|/)config.guess$
|
||||
- (^|/)config.sub$
|
||||
@@ -65,6 +63,7 @@
|
||||
|
||||
# Font Awesome
|
||||
- (^|/)font-awesome\.(css|less|scss|styl)$
|
||||
- (^|/)font-awesome/.*\.(css|less|scss|styl)$
|
||||
|
||||
# Foundation css
|
||||
- (^|/)foundation\.(css|less|scss|styl)$
|
||||
@@ -81,6 +80,12 @@
|
||||
# Animate.css
|
||||
- (^|/)animate\.(css|less|scss|styl)$
|
||||
|
||||
# Materialize.css
|
||||
- (^|/)materialize\.(css|less|scss|styl|js)$
|
||||
|
||||
# Select2
|
||||
- (^|/)select2/.*\.(css|scss|js)$
|
||||
|
||||
# Vendored dependencies
|
||||
- third[-_]?party/
|
||||
- 3rd[-_]?party/
|
||||
@@ -119,6 +124,15 @@
|
||||
# jQuery File Upload
|
||||
- (^|/)jquery\.fileupload(-\w+)?\.js$
|
||||
|
||||
# jQuery dataTables
|
||||
- jquery.dataTables.js
|
||||
|
||||
# bootboxjs
|
||||
- bootbox.js
|
||||
|
||||
# pdf-worker
|
||||
- pdf.worker.js
|
||||
|
||||
# Slick
|
||||
- (^|/)slick\.\w+.js$
|
||||
|
||||
@@ -135,6 +149,9 @@
|
||||
- .sublime-project
|
||||
- .sublime-workspace
|
||||
|
||||
# VS Code workspace files
|
||||
- .vscode
|
||||
|
||||
# Prototype
|
||||
- (^|/)prototype(.*)\.js$
|
||||
- (^|/)effects\.js$
|
||||
@@ -227,10 +244,7 @@
|
||||
- \.imageset/
|
||||
|
||||
# Carthage
|
||||
- ^Carthage/
|
||||
|
||||
# Cocoapods
|
||||
- ^Pods/
|
||||
- (^|/)Carthage/
|
||||
|
||||
# Sparkle
|
||||
- (^|/)Sparkle/
|
||||
@@ -262,6 +276,13 @@
|
||||
- (^|/)gradlew\.bat$
|
||||
- (^|/)gradle/wrapper/
|
||||
|
||||
## Java ##
|
||||
|
||||
# Maven
|
||||
- (^|/)mvnw$
|
||||
- (^|/)mvnw\.cmd$
|
||||
- (^|/)\.mvn/wrapper/
|
||||
|
||||
## .NET ##
|
||||
|
||||
# Visual Studio IntelliSense
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
module Linguist
|
||||
VERSION = "5.2.0"
|
||||
VERSION = "6.0.1"
|
||||
end
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"repository": "https://github.com/github/linguist",
|
||||
"dependencies": {
|
||||
"season": "~>5.4"
|
||||
},
|
||||
"license": "MIT"
|
||||
}
|
||||
35
samples/ActionScript/FooBar.as
Normal file
35
samples/ActionScript/FooBar.as
Normal file
@@ -0,0 +1,35 @@
|
||||
// A sample for Actionscript.
|
||||
|
||||
package foobar
|
||||
{
|
||||
import flash.display.MovieClip;
|
||||
|
||||
class Bar
|
||||
{
|
||||
public function getNumber():Number
|
||||
{
|
||||
return 10;
|
||||
}
|
||||
}
|
||||
|
||||
class Foo extends Bar
|
||||
{
|
||||
private var ourNumber:Number = 25;
|
||||
|
||||
override public function getNumber():Number
|
||||
{
|
||||
return ourNumber;
|
||||
}
|
||||
}
|
||||
|
||||
class Main extends MovieClip
|
||||
{
|
||||
public function Main()
|
||||
{
|
||||
var x:Bar = new Bar();
|
||||
var y:Foo = new Foo();
|
||||
trace(x.getNumber());
|
||||
trace(y.getNumber());
|
||||
}
|
||||
}
|
||||
}
|
||||
13
samples/ActionScript/HelloWorld.as
Normal file
13
samples/ActionScript/HelloWorld.as
Normal file
@@ -0,0 +1,13 @@
|
||||
package mypackage
|
||||
{
|
||||
public class Hello
|
||||
{
|
||||
/* Let's say hello!
|
||||
* This is just a test script for Linguist's Actionscript detection.
|
||||
*/
|
||||
public function sayHello():void
|
||||
{
|
||||
trace("Hello, world");
|
||||
}
|
||||
}
|
||||
}
|
||||
77
samples/AngelScript/botmanager.as
Normal file
77
samples/AngelScript/botmanager.as
Normal file
@@ -0,0 +1,77 @@
|
||||
/*
|
||||
* This is a sample script.
|
||||
*/
|
||||
|
||||
#include "BotManagerInterface.acs"
|
||||
|
||||
BotManager::BotManager g_BotManager( @CreateDumbBot );
|
||||
|
||||
CConCommand@ m_pAddBot;
|
||||
|
||||
void PluginInit()
|
||||
{
|
||||
g_BotManager.PluginInit();
|
||||
|
||||
@m_pAddBot = @CConCommand( "addbot", "Adds a new bot with the given name", @AddBotCallback );
|
||||
}
|
||||
|
||||
void AddBotCallback( const CCommand@ args )
|
||||
{
|
||||
if( args.ArgC() < 2 )
|
||||
{
|
||||
g_Game.AlertMessage( at_console, "Usage: addbot <name>" );
|
||||
return;
|
||||
}
|
||||
|
||||
BotManager::BaseBot@ pBot = g_BotManager.CreateBot( args[ 1 ] );
|
||||
|
||||
if( pBot !is null )
|
||||
{
|
||||
g_Game.AlertMessage( at_console, "Created bot " + args[ 1 ] + "\n" );
|
||||
}
|
||||
else
|
||||
{
|
||||
g_Game.AlertMessage( at_console, "Could not create bot\n" );
|
||||
}
|
||||
}
|
||||
|
||||
final class DumbBot : BotManager::BaseBot
|
||||
{
|
||||
DumbBot( CBasePlayer@ pPlayer )
|
||||
{
|
||||
super( pPlayer );
|
||||
}
|
||||
|
||||
void Think()
|
||||
{
|
||||
BotManager::BaseBot::Think();
|
||||
|
||||
// If the bot is dead and can be respawned, send a button press
|
||||
if( Player.pev.deadflag >= DEAD_RESPAWNABLE )
|
||||
{
|
||||
Player.pev.button |= IN_ATTACK;
|
||||
}
|
||||
else
|
||||
Player.pev.button &= ~IN_ATTACK;
|
||||
|
||||
KeyValueBuffer@ pInfoBuffer = g_EngineFuncs.GetInfoKeyBuffer( Player.edict() );
|
||||
|
||||
pInfoBuffer.SetValue( "topcolor", Math.RandomLong( 0, 255 ) );
|
||||
pInfoBuffer.SetValue( "bottomcolor", Math.RandomLong( 0, 255 ) );
|
||||
|
||||
if( Math.RandomLong( 0, 100 ) > 10 )
|
||||
Player.pev.button |= IN_ATTACK;
|
||||
else
|
||||
Player.pev.button &= ~IN_ATTACK;
|
||||
|
||||
for( uint uiIndex = 0; uiIndex < 3; ++uiIndex )
|
||||
{
|
||||
m_vecVelocity[ uiIndex ] = Math.RandomLong( -50, 50 );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
BotManager::BaseBot@ CreateDumbBot( CBasePlayer@ pPlayer )
|
||||
{
|
||||
return @DumbBot( pPlayer );
|
||||
}
|
||||
396
samples/AngelScript/payload.as
Normal file
396
samples/AngelScript/payload.as
Normal file
@@ -0,0 +1,396 @@
|
||||
// Sample script.
|
||||
// Source: https://github.com/codecat/ssbd-payload
|
||||
|
||||
array<WorldScript::PayloadBeginTrigger@> g_payloadBeginTriggers;
|
||||
array<WorldScript::PayloadTeamForcefield@> g_teamForceFields;
|
||||
|
||||
[GameMode]
|
||||
class Payload : TeamVersusGameMode
|
||||
{
|
||||
[Editable]
|
||||
UnitFeed PayloadUnit;
|
||||
|
||||
[Editable]
|
||||
UnitFeed FirstNode;
|
||||
|
||||
[Editable default=10]
|
||||
int PrepareTime;
|
||||
|
||||
[Editable default=300]
|
||||
int TimeLimit;
|
||||
|
||||
[Editable default=90]
|
||||
int TimeAddCheckpoint;
|
||||
|
||||
[Editable default=2]
|
||||
float TimeOvertime;
|
||||
|
||||
[Editable default=1000]
|
||||
int TimePayloadHeal;
|
||||
|
||||
[Editable default=1]
|
||||
int PayloadHeal;
|
||||
|
||||
PayloadBehavior@ m_payload;
|
||||
|
||||
int m_tmStarting;
|
||||
int m_tmStarted;
|
||||
int m_tmLimitCustom;
|
||||
int m_tmOvertime;
|
||||
int m_tmInOvertime;
|
||||
|
||||
PayloadHUD@ m_payloadHUD;
|
||||
PayloadClassSwitchWindow@ m_switchClass;
|
||||
|
||||
array<SValue@>@ m_switchedSidesData;
|
||||
|
||||
Payload(Scene@ scene)
|
||||
{
|
||||
super(scene);
|
||||
|
||||
m_tmRespawnCountdown = 5000;
|
||||
|
||||
@m_payloadHUD = PayloadHUD(m_guiBuilder);
|
||||
@m_switchTeam = PayloadTeamSwitchWindow(m_guiBuilder);
|
||||
@m_switchClass = PayloadClassSwitchWindow(m_guiBuilder);
|
||||
}
|
||||
|
||||
void UpdateFrame(int ms, GameInput& gameInput, MenuInput& menuInput) override
|
||||
{
|
||||
TeamVersusGameMode::UpdateFrame(ms, gameInput, menuInput);
|
||||
|
||||
m_payloadHUD.Update(ms);
|
||||
|
||||
if (Network::IsServer())
|
||||
{
|
||||
uint64 tmNow = CurrPlaytimeLevel();
|
||||
|
||||
if (m_tmStarting == 0)
|
||||
{
|
||||
if (GetPlayersInTeam(0) > 0 && GetPlayersInTeam(1) > 0)
|
||||
{
|
||||
m_tmStarting = tmNow;
|
||||
(Network::Message("GameStarting") << m_tmStarting).SendToAll();
|
||||
}
|
||||
}
|
||||
|
||||
if (m_tmStarting > 0 && m_tmStarted == 0 && tmNow - m_tmStarting > PrepareTime * 1000)
|
||||
{
|
||||
m_tmStarted = tmNow;
|
||||
(Network::Message("GameStarted") << m_tmStarted).SendToAll();
|
||||
|
||||
for (uint i = 0; i < g_payloadBeginTriggers.length(); i++)
|
||||
{
|
||||
WorldScript@ ws = WorldScript::GetWorldScript(g_scene, g_payloadBeginTriggers[i]);
|
||||
ws.Execute();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!m_ended && m_tmStarted > 0)
|
||||
CheckTimeReached(ms);
|
||||
}
|
||||
|
||||
string NameForTeam(int index) override
|
||||
{
|
||||
if (index == 0)
|
||||
return "Defenders";
|
||||
else if (index == 1)
|
||||
return "Attackers";
|
||||
|
||||
return "Unknown";
|
||||
}
|
||||
|
||||
void CheckTimeReached(int dt)
|
||||
{
|
||||
// Check if time limit is not reached yet
|
||||
if (m_tmLimitCustom - (CurrPlaytimeLevel() - m_tmStarted) > 0)
|
||||
{
|
||||
// Don't need to continue checking
|
||||
m_tmOvertime = 0;
|
||||
m_tmInOvertime = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
// Count how long we're in overtime for later time limit fixing when we reach a checkpoint
|
||||
if (m_tmOvertime > 0)
|
||||
m_tmInOvertime += dt;
|
||||
|
||||
// Check if there are any attackers still inside
|
||||
if (m_payload.AttackersInside() > 0)
|
||||
{
|
||||
// We have overtime
|
||||
m_tmOvertime = int(TimeOvertime * 1000);
|
||||
return;
|
||||
}
|
||||
|
||||
// If we have overtime
|
||||
if (m_tmOvertime > 0)
|
||||
{
|
||||
// Decrease timer
|
||||
m_tmOvertime -= dt;
|
||||
if (m_tmOvertime <= 0)
|
||||
{
|
||||
// Overtime countdown reached, time limit reached
|
||||
TimeReached();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// No overtime, so time limit is reached
|
||||
TimeReached();
|
||||
}
|
||||
}
|
||||
|
||||
void TimeReached()
|
||||
{
|
||||
if (!Network::IsServer())
|
||||
return;
|
||||
|
||||
(Network::Message("TimeReached")).SendToAll();
|
||||
SetWinner(false);
|
||||
}
|
||||
|
||||
bool ShouldFreezeControls() override
|
||||
{
|
||||
return m_switchClass.m_visible
|
||||
|| TeamVersusGameMode::ShouldFreezeControls();
|
||||
}
|
||||
|
||||
bool ShouldDisplayCursor() override
|
||||
{
|
||||
return m_switchClass.m_visible
|
||||
|| TeamVersusGameMode::ShouldDisplayCursor();
|
||||
}
|
||||
|
||||
bool CanSwitchTeams() override
|
||||
{
|
||||
return m_tmStarted == 0;
|
||||
}
|
||||
|
||||
PlayerRecord@ CreatePlayerRecord() override
|
||||
{
|
||||
return PayloadPlayerRecord();
|
||||
}
|
||||
|
||||
int GetPlayerClassCount(PlayerClass playerClass, TeamVersusScore@ team)
|
||||
{
|
||||
if (team is null)
|
||||
return 0;
|
||||
|
||||
int ret = 0;
|
||||
for (uint i = 0; i < team.m_players.length(); i++)
|
||||
{
|
||||
if (team.m_players[i].peer == 255)
|
||||
continue;
|
||||
auto record = cast<PayloadPlayerRecord>(team.m_players[i]);
|
||||
if (record.playerClass == playerClass)
|
||||
ret++;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
void PlayerClassesUpdated()
|
||||
{
|
||||
m_switchClass.PlayerClassesUpdated();
|
||||
}
|
||||
|
||||
void SetWinner(bool attackers)
|
||||
{
|
||||
if (attackers)
|
||||
print("Attackers win!");
|
||||
else
|
||||
print("Defenders win!");
|
||||
|
||||
m_payloadHUD.Winner(attackers);
|
||||
EndMatch();
|
||||
}
|
||||
|
||||
void DisplayPlayerName(int idt, SpriteBatch& sb, PlayerRecord@ record, PlayerHusk@ plr, vec2 pos) override
|
||||
{
|
||||
TeamVersusGameMode::DisplayPlayerName(idt, sb, record, plr, pos);
|
||||
|
||||
m_payloadHUD.DisplayPlayerName(idt, sb, cast<PayloadPlayerRecord>(record), plr, pos);
|
||||
}
|
||||
|
||||
void RenderFrame(int idt, SpriteBatch& sb) override
|
||||
{
|
||||
Player@ player = GetLocalPlayer();
|
||||
if (player !is null)
|
||||
{
|
||||
PlayerHealgun@ healgun = cast<PlayerHealgun>(player.m_currWeapon);
|
||||
if (healgun !is null)
|
||||
healgun.RenderMarkers(idt, sb);
|
||||
}
|
||||
|
||||
TeamVersusGameMode::RenderFrame(idt, sb);
|
||||
}
|
||||
|
||||
void RenderWidgets(PlayerRecord@ player, int idt, SpriteBatch& sb) override
|
||||
{
|
||||
m_payloadHUD.Draw(sb, idt);
|
||||
|
||||
TeamVersusGameMode::RenderWidgets(player, idt, sb);
|
||||
|
||||
m_switchClass.Draw(sb, idt);
|
||||
}
|
||||
|
||||
void GoNextMap() override
|
||||
{
|
||||
if (m_switchedSidesData !is null)
|
||||
{
|
||||
TeamVersusGameMode::GoNextMap();
|
||||
return;
|
||||
}
|
||||
|
||||
ChangeLevel(GetCurrentLevelFilename());
|
||||
}
|
||||
|
||||
void SpawnPlayers() override
|
||||
{
|
||||
if (m_switchedSidesData is null)
|
||||
{
|
||||
TeamVersusGameMode::SpawnPlayers();
|
||||
return;
|
||||
}
|
||||
|
||||
if (Network::IsServer())
|
||||
{
|
||||
for (uint i = 0; i < m_switchedSidesData.length(); i += 2)
|
||||
{
|
||||
uint peer = uint(m_switchedSidesData[i].GetInteger());
|
||||
uint team = uint(m_switchedSidesData[i + 1].GetInteger());
|
||||
|
||||
TeamVersusScore@ joinScore = FindTeamScore(team);
|
||||
if (joinScore is m_teamScores[0])
|
||||
@joinScore = m_teamScores[1];
|
||||
else
|
||||
@joinScore = m_teamScores[0];
|
||||
|
||||
for (uint j = 0; j < g_players.length(); j++)
|
||||
{
|
||||
if (g_players[j].peer != peer)
|
||||
continue;
|
||||
SpawnPlayer(j, vec2(), 0, joinScore.m_team);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Save(SValueBuilder& builder) override
|
||||
{
|
||||
if (m_switchedSidesData is null)
|
||||
{
|
||||
builder.PushArray("teams");
|
||||
for (uint i = 0; i < g_players.length(); i++)
|
||||
{
|
||||
if (g_players[i].peer == 255)
|
||||
continue;
|
||||
builder.PushInteger(g_players[i].peer);
|
||||
builder.PushInteger(g_players[i].team);
|
||||
}
|
||||
builder.PopArray();
|
||||
}
|
||||
|
||||
TeamVersusGameMode::Save(builder);
|
||||
}
|
||||
|
||||
void Start(uint8 peer, SValue@ save, StartMode sMode) override
|
||||
{
|
||||
if (save !is null)
|
||||
@m_switchedSidesData = GetParamArray(UnitPtr(), save, "teams", false);
|
||||
|
||||
TeamVersusGameMode::Start(peer, save, sMode);
|
||||
|
||||
m_tmLimit = 0; // infinite time limit as far as VersusGameMode is concerned
|
||||
m_tmLimitCustom = TimeLimit * 1000; // 5 minutes by default
|
||||
|
||||
@m_payload = cast<PayloadBehavior>(PayloadUnit.FetchFirst().GetScriptBehavior());
|
||||
|
||||
if (m_payload is null)
|
||||
PrintError("PayloadUnit is not a PayloadBehavior!");
|
||||
|
||||
UnitPtr unitFirstNode = FirstNode.FetchFirst();
|
||||
if (unitFirstNode.IsValid())
|
||||
{
|
||||
auto node = cast<WorldScript::PayloadNode>(unitFirstNode.GetScriptBehavior());
|
||||
if (node !is null)
|
||||
@m_payload.m_targetNode = node;
|
||||
else
|
||||
PrintError("First target node is not a PayloadNode script!");
|
||||
}
|
||||
else
|
||||
PrintError("First target node was not set!");
|
||||
|
||||
WorldScript::PayloadNode@ prevNode;
|
||||
|
||||
float totalDistance = 0.0f;
|
||||
|
||||
UnitPtr unitNode = unitFirstNode;
|
||||
while (unitNode.IsValid())
|
||||
{
|
||||
auto node = cast<WorldScript::PayloadNode>(unitNode.GetScriptBehavior());
|
||||
if (node is null)
|
||||
break;
|
||||
|
||||
unitNode = node.NextNode.FetchFirst();
|
||||
|
||||
@node.m_prevNode = prevNode;
|
||||
@node.m_nextNode = cast<WorldScript::PayloadNode>(unitNode.GetScriptBehavior());
|
||||
|
||||
if (prevNode !is null)
|
||||
totalDistance += dist(prevNode.Position, node.Position);
|
||||
|
||||
@prevNode = node;
|
||||
}
|
||||
|
||||
float currDistance = 0.0f;
|
||||
|
||||
auto distNode = cast<WorldScript::PayloadNode>(unitFirstNode.GetScriptBehavior());
|
||||
while (distNode !is null)
|
||||
{
|
||||
if (distNode.m_prevNode is null)
|
||||
distNode.m_locationFactor = 0.0f;
|
||||
else
|
||||
{
|
||||
currDistance += dist(distNode.m_prevNode.Position, distNode.Position);
|
||||
distNode.m_locationFactor = currDistance / totalDistance;
|
||||
}
|
||||
|
||||
@distNode = distNode.m_nextNode;
|
||||
}
|
||||
|
||||
m_payloadHUD.AddCheckpoints();
|
||||
}
|
||||
|
||||
void SpawnPlayer(int i, vec2 pos = vec2(), int unitId = 0, uint team = 0) override
|
||||
{
|
||||
TeamVersusGameMode::SpawnPlayer(i, pos, unitId, team);
|
||||
|
||||
PayloadPlayerRecord@ record = cast<PayloadPlayerRecord>(g_players[i]);
|
||||
record.HandlePlayerClass();
|
||||
|
||||
if (g_players[i].local)
|
||||
{
|
||||
//TODO: This doesn't work well
|
||||
bool localAttackers = (team == HashString("player_1"));
|
||||
for (uint j = 0; j < g_teamForceFields.length(); j++)
|
||||
{
|
||||
bool hasCollision = (localAttackers != g_teamForceFields[j].Attackers);
|
||||
|
||||
auto units = g_teamForceFields[j].Units.FetchAll();
|
||||
for (uint k = 0; k < units.length(); k++)
|
||||
{
|
||||
PhysicsBody@ body = units[k].GetPhysicsBody();
|
||||
if (body is null)
|
||||
{
|
||||
PrintError("PhysicsBody for unit " + units[k].GetDebugName() + "is null");
|
||||
continue;
|
||||
}
|
||||
body.SetActive(hasCollision);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
16
samples/Ballerina/hello-world-service.bal
Normal file
16
samples/Ballerina/hello-world-service.bal
Normal file
@@ -0,0 +1,16 @@
|
||||
import ballerina.lang.messages;
|
||||
import ballerina.net.http;
|
||||
import ballerina.doc;
|
||||
|
||||
@doc:Description {value:"By default Ballerina assumes that the service is to be exposed via HTTP/1.1 using the system default port and that all requests coming to the HTTP server will be delivered to this service."}
|
||||
service<http> helloWorld {
|
||||
@doc:Description {value:"All resources are invoked with an argument of type message, the built-in reference type representing a network invocation."}
|
||||
resource sayHello (message m) {
|
||||
// Creates an empty message.
|
||||
message response = {};
|
||||
// A util method that can be used to set string payload.
|
||||
messages:setStringPayload(response, "Hello, World!");
|
||||
// Reply keyword sends the response back to the client.
|
||||
reply response;
|
||||
}
|
||||
}
|
||||
6
samples/Ballerina/hello-world.bal
Normal file
6
samples/Ballerina/hello-world.bal
Normal file
@@ -0,0 +1,6 @@
|
||||
import ballerina.lang.system;
|
||||
|
||||
function main (string[] args) {
|
||||
system:println("Hello, World!");
|
||||
}
|
||||
|
||||
31
samples/Ballerina/json.bal
Normal file
31
samples/Ballerina/json.bal
Normal file
@@ -0,0 +1,31 @@
|
||||
import ballerina.lang.system;
|
||||
|
||||
function main (string[] args) {
|
||||
// JSON string value.
|
||||
json j1 = "Apple";
|
||||
system:println(j1);
|
||||
|
||||
// JSON number value.
|
||||
json j2 = 5.36;
|
||||
system:println(j2);
|
||||
|
||||
// JSON true value.
|
||||
json j3 = true;
|
||||
system:println(j3);
|
||||
|
||||
// JSON false value.
|
||||
json j4 = false;
|
||||
system:println(j4);
|
||||
|
||||
// JSON null value.
|
||||
json j5 = null;
|
||||
|
||||
//JSON Objects.
|
||||
json j6 = {name:"apple", color:"red", price:j2};
|
||||
system:println(j6);
|
||||
|
||||
//JSON Arrays. They are arrays of any JSON value.
|
||||
json j7 = [1, false, null, "foo",
|
||||
{first:"John", last:"Pala"}];
|
||||
system:println(j7);
|
||||
}
|
||||
28
samples/Ballerina/var.bal
Normal file
28
samples/Ballerina/var.bal
Normal file
@@ -0,0 +1,28 @@
|
||||
import ballerina.lang.system;
|
||||
|
||||
function divideBy10 (int d) (int, int) {
|
||||
return d / 10, d % 10;
|
||||
}
|
||||
|
||||
function main (string[] args) {
|
||||
//Here the variable type is inferred type from the initial value. This is same as "int k = 5";
|
||||
var k = 5;
|
||||
system:println(10 + k);
|
||||
|
||||
//Here the type of the 'strVar' is 'string'.
|
||||
var strVar = "Hello!";
|
||||
system:println(strVar);
|
||||
|
||||
//Multiple assignment with 'var' allows you to define the variable then and there.
|
||||
//Variable type is inferred from the right-hand side.
|
||||
var q, r = divideBy10(6);
|
||||
system:println("06/10: " + "quotient=" + q + " " +
|
||||
"remainder=" + r);
|
||||
|
||||
//To ignore a particular return value in a multiple assignment statement, use '_'.
|
||||
var q1, _ = divideBy10(57);
|
||||
system:println("57/10: " + "quotient=" + q1);
|
||||
|
||||
var _, r1 = divideBy10(9);
|
||||
system:println("09/10: " + "remainder=" + r1);
|
||||
}
|
||||
26
samples/Ballerina/xml.bal
Normal file
26
samples/Ballerina/xml.bal
Normal file
@@ -0,0 +1,26 @@
|
||||
import ballerina.lang.system;
|
||||
|
||||
function main (string[] args) {
|
||||
|
||||
// XML element. Can only have one root element.
|
||||
xml x1 = xml `<book>The Lost World</book>`;
|
||||
system:println(x1);
|
||||
|
||||
// XML text
|
||||
xml x2 = xml `Hello, world!`;
|
||||
system:println(x2);
|
||||
|
||||
// XML comment
|
||||
xml x3 = xml `<!--I am a comment-->`;
|
||||
system:println(x3);
|
||||
|
||||
// XML processing instruction
|
||||
xml x4 = xml `<?target data?>`;
|
||||
system:println(x4);
|
||||
|
||||
// Multiple XML items can be combined to form a sequence of XML. The resulting sequence is again an XML on its own.
|
||||
xml x5 = x1 + x2 + x3 + x4;
|
||||
system:println("\nResulting XML sequence:");
|
||||
system:println(x5);
|
||||
|
||||
}
|
||||
159
samples/CoNLL-U/CF1.conllu
Normal file
159
samples/CoNLL-U/CF1.conllu
Normal file
@@ -0,0 +1,159 @@
|
||||
# text = PT no governo
|
||||
# source = CETENFolha n=1 cad=Opinião sec=opi sem=94a
|
||||
# sent_id = CF1-1
|
||||
# id = 1
|
||||
1 PT PT PROPN PROP|M|S|@NPHR Gender=Masc|Number=Sing 0 root _ _
|
||||
2-3 no _ _ _ _ _ _ _ _
|
||||
2 em em ADP <sam->|PRP|@N< _ 4 case _ _
|
||||
3 o o DET <-sam>|<artd>|ART|M|S|@>N Definite=Def|Gender=Masc|Number=Sing|PronType=Art 4 det _ _
|
||||
4 governo governo NOUN <np-def>|N|M|S|@P< Gender=Masc|Number=Sing 1 nmod _ _
|
||||
|
||||
# text = BRASÍLIA Pesquisa Datafolha publicada hoje revela um dado supreendente: recusando uma postura radical, a esmagadora maioria (77%) dos eleitores quer o PT participando do Governo Fernando Henrique Cardoso.
|
||||
# source = CETENFolha n=1 cad=Opinião sec=opi sem=94a &W
|
||||
# sent_id = CF1-3
|
||||
# id = 2
|
||||
1 BRASÍLIA Brasília PROPN PROP|F|S|@ADVL> Gender=Fem|Number=Sing 6 dep _ _
|
||||
2 Pesquisa Pesquisa PROPN _ Gender=Fem|Number=Sing 6 nsubj _ ChangedBy=Issue119|MWE=Pesquisa_Datafolha|MWEPOS=PROPN
|
||||
3 Datafolha Datafolha PROPN _ Number=Sing 2 flat:name _ ChangedBy=Issue119
|
||||
4 publicada publicar VERB <mv>|V|PCP|F|S|@ICL-N< Gender=Fem|Number=Sing|VerbForm=Part 2 acl _ _
|
||||
5 hoje hoje ADV ADV|@<ADVL _ 4 advmod _ _
|
||||
6 revela revelar VERB <mv>|V|PR|3S|IND|@FS-STA Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin 0 root _ _
|
||||
7 um um DET <arti>|ART|M|S|@>N Definite=Ind|Gender=Masc|Number=Sing|PronType=Art 8 det _ _
|
||||
8 dado dado NOUN <np-idf>|N|M|S|@<ACC Gender=Masc|Number=Sing 6 obj _ _
|
||||
9 supreendente surpreendente ADJ ADJ|M|S|@N< Gender=Masc|Number=Sing 8 amod _ ChangedBy=Issue165|SpaceAfter=No
|
||||
10 : : PUNCT PU|@PU _ 26 punct _ _
|
||||
11 recusando recusar VERB <mv>|V|GER|@ICL-ADVL> VerbForm=Ger 26 advcl _ _
|
||||
12 uma um DET <arti>|ART|F|S|@>N Definite=Ind|Gender=Fem|Number=Sing|PronType=Art 13 det _ _
|
||||
13 postura postura NOUN <np-idf>|N|F|S|@<ACC Gender=Fem|Number=Sing 11 obj _ _
|
||||
14 radical radical ADJ ADJ|F|S|@N< Gender=Fem|Number=Sing 13 amod _ ChangedBy=Issue165|SpaceAfter=No
|
||||
15 , , PUNCT PU|@PU _ 26 punct _ _
|
||||
16 a o DET <artd>|ART|F|S|@>N Definite=Def|Gender=Fem|Number=Sing|PronType=Art 18 det _ _
|
||||
17 esmagadora esmagador ADJ ADJ|F|S|@>N Gender=Fem|Number=Sing 18 amod _ _
|
||||
18 maioria maioria NOUN <np-def>|N|F|S|@SUBJ> Gender=Fem|Number=Sing 26 nsubj _ _
|
||||
19 ( ( PUNCT PU|@PU _ 21 punct _ ChangedBy=Issue165|SpaceAfter=No
|
||||
20 77 77 NUM <card>|NUM|M|P|@>N NumType=Card 21 nummod _ ChangedBy=Issue165|ChangedBy=Issue168|SpaceAfter=No
|
||||
21 % % SYM <np-def>|N|M|P|@N<PRED Gender=Masc|Number=Plur 18 appos _ ChangedBy=Issue165|SpaceAfter=No
|
||||
22 ) ) PUNCT PU|@PU _ 21 punct _ _
|
||||
23-24 dos _ _ _ _ _ _ _ _
|
||||
23 de de ADP <sam->|PRP|@N< _ 25 case _ _
|
||||
24 os o DET <-sam>|<artd>|ART|M|P|@>N Definite=Def|Gender=Masc|Number=Plur|PronType=Art 25 det _ _
|
||||
25 eleitores eleitor NOUN <np-def>|N|M|P|@P< Gender=Masc|Number=Plur 18 nmod _ _
|
||||
26 quer querer VERB <mv>|V|PR|3S|IND|@FS-N<PRED Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin 8 acl:relcl _ _
|
||||
27 o o DET <artd>|ART|M|S|@>N Definite=Def|Gender=Masc|Number=Sing|PronType=Art 28 det _ _
|
||||
28 PT PT PROPN PROP|M|S|@<ACC Gender=Masc|Number=Sing 26 obj _ _
|
||||
29 participando participar VERB <mv>|V|GER|@ICL-<OC VerbForm=Ger 26 xcomp _ _
|
||||
30-31 do _ _ _ _ _ _ _ _
|
||||
30 de de ADP <sam->|PRP|@<PIV _ 32 case _ _
|
||||
31 o o DET <-sam>|<artd>|ART|M|S|@>N Definite=Def|Gender=Masc|Number=Sing|PronType=Art 32 det _ _
|
||||
32 Governo governo NOUN <prop>|<np-def>|N|M|S|@P< Gender=Masc|Number=Sing 29 obl _ _
|
||||
33 Fernando Fernando PROPN _ Gender=Masc|Number=Sing 32 nmod _ ChangedBy=Issue119|MWE=Fernando_Henrique_Cardoso|MWEPOS=PROPN
|
||||
34 Henrique Henrique PROPN _ Number=Sing 33 flat:name _ ChangedBy=Issue119
|
||||
35 Cardoso Cardoso PROPN _ Number=Sing 33 flat:name _ SpaceAfter=No
|
||||
36 . . PUNCT PU|@PU _ 6 punct _ _
|
||||
|
||||
# text = Tem sentido -- aliás, muitíssimo sentido.
|
||||
# source = CETENFolha n=1 cad=Opinião sec=opi sem=94a &D
|
||||
# sent_id = CF1-4
|
||||
# id = 3
|
||||
1 Tem ter VERB <mv>|V|PR|3S|IND|@FS-STA Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin 0 root _ _
|
||||
2 sentido sentido NOUN <np-idf>|N|M|S|@<ACC Gender=Masc|Number=Sing 1 obj _ _
|
||||
3 -- -- PUNCT PU|@PU _ 1 punct _ _
|
||||
4 aliás aliás ADV <kc>|ADV|@<ADVL _ 1 advmod _ ChangedBy=Issue165|SpaceAfter=No
|
||||
5 , , PUNCT PU|@PU _ 7 punct _ _
|
||||
6 muitíssimo muitíssimo DET <quant>|<SUP>|DET|M|S|@>N Gender=Masc|Number=Sing|PronType=Ind 7 det _ _
|
||||
7 sentido sentido NOUN <np-idf>|N|M|S|@N<PRED Gender=Masc|Number=Sing 2 appos _ ChangedBy=Issue165|SpaceAfter=No
|
||||
8 . . PUNCT PU|@PU _ 1 punct _ _
|
||||
|
||||
# text = Muito mais do que nos tempos na ditadura, a solidez do PT está, agora, ameaçada.
|
||||
# source = CETENFolha n=1 cad=Opinião sec=opi sem=94a
|
||||
# sent_id = CF1-5
|
||||
# id = 4
|
||||
1 Muito muito ADV <quant>|ADV|@>A _ 2 advmod _ _
|
||||
2 mais mais ADV <quant>|<KOMP>|<COMP>|ADV|@ADVL> _ 22 advmod _ _
|
||||
3-4 do _ _ _ _ _ _ _ _
|
||||
3 de de ADP <sam->|PRP|@COM _ 8 case _ _
|
||||
4 o o PRON <dem>|<-sam>|DET|M|S|@P< Gender=Masc|Number=Sing|PronType=Dem 3 fixed _ _
|
||||
5 que que PRON <rel>|INDP|M|S|@N< Gender=Masc|Number=Sing|PronType=Rel 3 fixed _ _
|
||||
6-7 nos _ _ _ _ _ _ _ _
|
||||
6 em em ADP <sam->|<first-cjt>|PRP|@KOMP< _ 8 case _ _
|
||||
7 os o DET <-sam>|<artd>|ART|M|P|@>N Definite=Def|Gender=Masc|Number=Plur|PronType=Art 8 det _ _
|
||||
8 tempos tempo NOUN <first-cjt>|<np-def>|N|M|P|@P< Gender=Masc|Number=Plur 2 obl _ _
|
||||
9-10 na _ _ _ _ _ _ _ _
|
||||
9 em em ADP <sam->|PRP|@N< _ 11 case _ _
|
||||
10 a o DET <-sam>|<artd>|ART|F|S|@>N Definite=Def|Gender=Fem|Number=Sing|PronType=Art 11 det _ _
|
||||
11 ditadura ditadura NOUN <np-def>|N|F|S|@P< Gender=Fem|Number=Sing 8 nmod _ ChangedBy=Issue165|SpaceAfter=No
|
||||
12 , , PUNCT PU|@PU _ 2 punct _ _
|
||||
13 a o DET <artd>|ART|F|S|@>N Definite=Def|Gender=Fem|Number=Sing|PronType=Art 14 det _ _
|
||||
14 solidez solidez NOUN <np-def>|N|F|S|@SUBJ> Gender=Fem|Number=Sing 22 nsubj _ _
|
||||
15-16 do _ _ _ _ _ _ _ _
|
||||
15 de de ADP <sam->|PRP|@N< _ 17 case _ _
|
||||
16 o o DET <-sam>|<artd>|ART|M|S|@>N Definite=Def|Gender=Masc|Number=Sing|PronType=Art 17 det _ _
|
||||
17 PT PT PROPN PROP|M|S|@P< Gender=Masc|Number=Sing 14 nmod _ _
|
||||
18 está estar AUX <mv>|V|PR|3S|IND|@FS-STA Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin 22 cop _ ChangedBy=Issue165|ChangedBy=Issue167|SpaceAfter=No
|
||||
19 , , PUNCT PU|@PU _ 20 punct _ _
|
||||
20 agora agora ADV <kc>|ADV|@<ADVL _ 22 advmod _ ChangedBy=Issue165|SpaceAfter=No
|
||||
21 , , PUNCT PU|@PU _ 20 punct _ _
|
||||
22 ameaçada ameaçar VERB <mv>|V|PCP|F|S|@ICL-<SC Gender=Fem|Number=Sing|VerbForm=Part 0 root _ ChangedBy=Issue165|SpaceAfter=No
|
||||
23 . . PUNCT PU|@PU _ 22 punct _ _
|
||||
|
||||
# text = Nem Lula nem o partido ainda encontraram um discurso para se diferenciar.
|
||||
# source = CETENFolha n=1 cad=Opinião sec=opi sem=94a
|
||||
# sent_id = CF1-6
|
||||
# id = 5
|
||||
1 Nem nem CCONJ <parkc-1>|KC|@CO _ 2 cc _ _
|
||||
2 Lula Lula PROPN <first-cjt>|PROP|M|S|@SUBJ> Gender=Masc|Number=Sing 7 nsubj _ _
|
||||
3 nem nem CCONJ <co-subj>|<parkc-2>|KC|@CO _ 5 cc _ _
|
||||
4 o o DET <artd>|ART|M|S|@>N Definite=Def|Gender=Masc|Number=Sing|PronType=Art 5 det _ _
|
||||
5 partido partido NOUN <cjt>|<np-def>|N|M|S|@SUBJ> Gender=Masc|Number=Sing 2 conj _ _
|
||||
6 ainda ainda ADV ADV|@ADVL> _ 7 advmod _ _
|
||||
7 encontraram encontrar VERB <mv>|V|PS/MQP|3P|IND|@FS-STA Mood=Ind|Number=Plur|Person=3|VerbForm=Fin 0 root _ _
|
||||
8 um um DET _ Definite=Ind|Gender=Masc|Number=Sing|PronType=Art 9 det _ _
|
||||
9 discurso discurso NOUN <np-idf>|N|M|S|@<ACC Gender=Masc|Number=Sing 7 obj _ _
|
||||
10 para para ADP _ _ 12 case _ _
|
||||
11 se se PRON PERS|M|3S|ACC|@ACC>-PASS Case=Acc|Gender=Masc|Number=Sing|Person=3|PronType=Prs 12 expl _ ChangedBy=Issue135
|
||||
12 diferenciar diferenciar VERB _ VerbForm=Inf 9 acl _ ChangedBy=Issue165|SpaceAfter=No
|
||||
13 . . PUNCT PU|@PU _ 7 punct _ _
|
||||
|
||||
# text = Eles se dizem oposição, mas ainda não informaram o que vão combater.
|
||||
# source = CETENFolha n=1 cad=Opinião sec=opi sem=94a
|
||||
# sent_id = CF1-7
|
||||
# id = 6
|
||||
1 Eles eles PRON PERS|M|3P|NOM|@SUBJ> Case=Nom|Gender=Masc|Number=Plur|Person=3|PronType=Prs 3 nsubj _ _
|
||||
2 se se PRON PERS|M|3P|ACC|@ACC>-PASS Case=Acc|Gender=Masc|Number=Plur|Person=3|PronType=Prs 3 expl _ ChangedBy=Issue135
|
||||
3 dizem dizer VERB <first-cjt>|<mv>|<se-passive>|V|PR|3P|IND|@FS-STA Mood=Ind|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin 0 root _ _
|
||||
4 oposição oposição NOUN <np-idf>|N|F|S|@<OC Gender=Fem|Number=Sing 3 xcomp _ ChangedBy=Issue165|SpaceAfter=No
|
||||
5 , , PUNCT PU|@PU _ 9 punct _ _
|
||||
6 mas mas CCONJ <co-fcl>|KC|@CO _ 9 cc _ _
|
||||
7 ainda ainda ADV ADV|@>A _ 8 advmod _ _
|
||||
8 não não ADV _ Polarity=Neg 9 advmod _ _
|
||||
9 informaram informar VERB <cjt>|<mv>|V|PS/MQP|3P|IND|@FS-STA Mood=Ind|Number=Plur|Person=3|VerbForm=Fin 3 conj _ _
|
||||
10 o o PRON _ Gender=Masc|Number=Sing|PronType=Dem 11 det _ _
|
||||
11 que que PRON <interr>|INDP|M|S|@ACC> Gender=Masc|Number=Sing|PronType=Int 13 obj _ _
|
||||
12 vão ir AUX <aux>|V|PR|3P|IND|@FS-<ACC Mood=Ind|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin 13 aux _ _
|
||||
13 combater combater VERB <mv>|V|INF|@ICL-AUX< VerbForm=Inf 9 ccomp _ ChangedBy=Issue165|SpaceAfter=No
|
||||
14 . . PUNCT PU|@PU _ 3 punct _ _
|
||||
|
||||
# text = Muitas das prioridades do novo governo coincidem com as prioridades do PT.
|
||||
# source = CETENFolha n=1 cad=Opinião sec=opi sem=94a
|
||||
# sent_id = CF1-8
|
||||
# id = 7
|
||||
1 Muitas muito PRON <quant>|DET|F|P|@SUBJ> Gender=Fem|Number=Plur|PronType=Ind 9 nsubj _ _
|
||||
2-3 das _ _ _ _ _ _ _ _
|
||||
2 de de ADP <sam->|PRP|@N< _ 4 case _ _
|
||||
3 as o DET <-sam>|<artd>|ART|F|P|@>N Definite=Def|Gender=Fem|Number=Plur|PronType=Art 4 det _ _
|
||||
4 prioridades prioridade NOUN <np-def>|N|F|P|@P< Gender=Fem|Number=Plur 1 nmod _ _
|
||||
5-6 do _ _ _ _ _ _ _ _
|
||||
5 de de ADP <sam->|PRP|@N< _ 8 case _ _
|
||||
6 o o DET <-sam>|<artd>|ART|M|S|@>N Definite=Def|Gender=Masc|Number=Sing|PronType=Art 8 det _ _
|
||||
7 novo novo ADJ ADJ|M|S|@>N Gender=Masc|Number=Sing 8 amod _ _
|
||||
8 governo governo NOUN <np-def>|N|M|S|@P< Gender=Masc|Number=Sing 4 nmod _ _
|
||||
9 coincidem coincidir VERB <mv>|V|PR|3P|IND|@FS-STA Mood=Ind|Number=Plur|Person=3|Tense=Pres|VerbForm=Fin 0 root _ _
|
||||
10 com com ADP PRP|@<PIV _ 12 case _ _
|
||||
11 as o DET <artd>|ART|F|P|@>N Definite=Def|Gender=Fem|Number=Plur|PronType=Art 12 det _ _
|
||||
12 prioridades prioridade NOUN <np-def>|N|F|P|@P< Gender=Fem|Number=Plur 9 obj _ _
|
||||
13-14 do _ _ _ _ _ _ _ _
|
||||
13 de de ADP <sam->|PRP|@N< _ 15 case _ _
|
||||
14 o o DET <-sam>|<artd>|ART|M|S|@>N Definite=Def|Gender=Masc|Number=Sing|PronType=Art 15 det _ _
|
||||
15 PT PT PROPN PROP|M|S|@P< Gender=Masc|Number=Sing 12 nmod _ ChangedBy=Issue165|SpaceAfter=No
|
||||
16 . . PUNCT PU|@PU _ 9 punct _ _
|
||||
|
||||
122
samples/CoNLL-U/en-ud-test-abridged.conllu
Normal file
122
samples/CoNLL-U/en-ud-test-abridged.conllu
Normal file
@@ -0,0 +1,122 @@
|
||||
# newdoc id = weblog-blogspot.com_zentelligence_20040423000200_ENG_20040423_000200
|
||||
# sent_id = weblog-blogspot.com_zentelligence_20040423000200_ENG_20040423_000200-0001
|
||||
# text = What if Google Morphed Into GoogleOS?
|
||||
1 What what PRON WP PronType=Int 0 root 0:root _
|
||||
2 if if SCONJ IN _ 4 mark 4:mark _
|
||||
3 Google Google PROPN NNP Number=Sing 4 nsubj 4:nsubj _
|
||||
4 Morphed morph VERB VBD Mood=Ind|Tense=Past|VerbForm=Fin 1 advcl 1:advcl _
|
||||
5 Into into ADP IN _ 6 case 6:case _
|
||||
6 GoogleOS GoogleOS PROPN NNP Number=Sing 4 obl 4:obl SpaceAfter=No
|
||||
7 ? ? PUNCT . _ 4 punct 4:punct _
|
||||
|
||||
# sent_id = weblog-blogspot.com_zentelligence_20040423000200_ENG_20040423_000200-0002
|
||||
# text = What if Google expanded on its search-engine (and now e-mail) wares into a full-fledged operating system?
|
||||
1 What what PRON WP PronType=Int 0 root 0:root _
|
||||
2 if if SCONJ IN _ 4 mark 4:mark _
|
||||
3 Google Google PROPN NNP Number=Sing 4 nsubj 4:nsubj _
|
||||
4 expanded expand VERB VBD Mood=Ind|Tense=Past|VerbForm=Fin 1 advcl 1:advcl _
|
||||
5 on on ADP IN _ 15 case 15:case _
|
||||
6 its its PRON PRP$ Gender=Neut|Number=Sing|Person=3|Poss=Yes|PronType=Prs 15 nmod:poss 15:nmod:poss _
|
||||
7 search search NOUN NN Number=Sing 9 compound 9:compound SpaceAfter=No
|
||||
8 - - PUNCT HYPH _ 9 punct 9:punct SpaceAfter=No
|
||||
9 engine engine NOUN NN Number=Sing 15 compound 15:compound _
|
||||
10 ( ( PUNCT -LRB- _ 9 punct 9:punct SpaceAfter=No
|
||||
11 and and CCONJ CC _ 13 cc 13:cc _
|
||||
12 now now ADV RB _ 13 advmod 13:advmod _
|
||||
13 e-mail e-mail NOUN NN Number=Sing 9 conj 9:conj SpaceAfter=No
|
||||
14 ) ) PUNCT -RRB- _ 15 punct 15:punct _
|
||||
15 wares wares NOUN NNS Number=Plur 4 obl 4:obl _
|
||||
16 into into ADP IN _ 22 case 22:case _
|
||||
17 a a DET DT Definite=Ind|PronType=Art 22 det 22:det _
|
||||
18 full full ADV RB _ 20 advmod 20:advmod SpaceAfter=No
|
||||
19 - - PUNCT HYPH _ 20 punct 20:punct SpaceAfter=No
|
||||
20 fledged fledged ADJ JJ Degree=Pos 22 amod 22:amod _
|
||||
21 operating operating NOUN NN Number=Sing 22 compound 22:compound _
|
||||
22 system system NOUN NN Number=Sing 4 obl 4:obl SpaceAfter=No
|
||||
23 ? ? PUNCT . _ 4 punct 4:punct _
|
||||
|
||||
# sent_id = weblog-blogspot.com_zentelligence_20040423000200_ENG_20040423_000200-0003
|
||||
# text = [via Microsoft Watch from Mary Jo Foley ]
|
||||
1 [ [ PUNCT -LRB- _ 4 punct 4:punct SpaceAfter=No
|
||||
2 via via ADP IN _ 4 case 4:case _
|
||||
3 Microsoft Microsoft PROPN NNP Number=Sing 4 compound 4:compound _
|
||||
4 Watch Watch PROPN NNP Number=Sing 0 root 0:root _
|
||||
5 from from ADP IN _ 6 case 6:case _
|
||||
6 Mary Mary PROPN NNP Number=Sing 4 nmod 4:nmod _
|
||||
7 Jo Jo PROPN NNP Number=Sing 6 flat 6:flat _
|
||||
8 Foley Foley PROPN NNP Number=Sing 6 flat 6:flat _
|
||||
9 ] ] PUNCT -RRB- _ 4 punct 4:punct _
|
||||
|
||||
# newdoc id = weblog-blogspot.com_marketview_20050511222700_ENG_20050511_222700
|
||||
# sent_id = weblog-blogspot.com_marketview_20050511222700_ENG_20050511_222700-0001
|
||||
# text = (And, by the way, is anybody else just a little nostalgic for the days when that was a good thing?)
|
||||
1 ( ( PUNCT -LRB- _ 14 punct 14:punct SpaceAfter=No
|
||||
2 And and CCONJ CC _ 14 cc 14:cc SpaceAfter=No
|
||||
3 , , PUNCT , _ 14 punct 14:punct _
|
||||
4 by by ADP IN _ 6 case 6:case _
|
||||
5 the the DET DT Definite=Def|PronType=Art 6 det 6:det _
|
||||
6 way way NOUN NN Number=Sing 14 obl 14:obl SpaceAfter=No
|
||||
7 , , PUNCT , _ 14 punct 14:punct _
|
||||
8 is be AUX VBZ Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin 14 cop 14:cop _
|
||||
9 anybody anybody PRON NN Number=Sing 14 nsubj 14:nsubj _
|
||||
10 else else ADJ JJ Degree=Pos 9 amod 9:amod _
|
||||
11 just just ADV RB _ 13 advmod 13:advmod _
|
||||
12 a a DET DT Definite=Ind|PronType=Art 13 det 13:det _
|
||||
13 little little ADJ JJ Degree=Pos 14 obl:npmod 14:obl:npmod _
|
||||
14 nostalgic nostalgic NOUN NN Number=Sing 0 root 0:root _
|
||||
15 for for ADP IN _ 17 case 17:case _
|
||||
16 the the DET DT Definite=Def|PronType=Art 17 det 17:det _
|
||||
17 days day NOUN NNS Number=Plur 14 nmod 14:nmod _
|
||||
18 when when ADV WRB PronType=Rel 23 advmod 23:advmod _
|
||||
19 that that PRON DT Number=Sing|PronType=Dem 23 nsubj 23:nsubj _
|
||||
20 was be AUX VBD Mood=Ind|Number=Sing|Person=3|Tense=Past|VerbForm=Fin 23 cop 23:cop _
|
||||
21 a a DET DT Definite=Ind|PronType=Art 23 det 23:det _
|
||||
22 good good ADJ JJ Degree=Pos 23 amod 23:amod _
|
||||
23 thing thing NOUN NN Number=Sing 17 acl:relcl 17:acl:relcl SpaceAfter=No
|
||||
24 ? ? PUNCT . _ 14 punct 14:punct SpaceAfter=No
|
||||
25 ) ) PUNCT -RRB- _ 14 punct 14:punct _
|
||||
|
||||
# sent_id = weblog-blogspot.com_marketview_20050511222700_ENG_20050511_222700-0002
|
||||
# text = This BuzzMachine post argues that Google's rush toward ubiquity might backfire -- which we've all heard before, but it's particularly well-put in this post.
|
||||
1 This this DET DT Number=Sing|PronType=Dem 3 det 3:det _
|
||||
2 BuzzMachine BuzzMachine PROPN NNP Number=Sing 3 compound 3:compound _
|
||||
3 post post NOUN NN Number=Sing 4 nsubj 4:nsubj _
|
||||
4 argues argue VERB VBZ Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin 0 root 0:root _
|
||||
5 that that SCONJ IN _ 12 mark 12:mark _
|
||||
6 Google Google PROPN NNP Number=Sing 8 nmod:poss 8:nmod:poss SpaceAfter=No
|
||||
7 's 's PART POS _ 6 case 6:case _
|
||||
8 rush rush NOUN NN Number=Sing 12 nsubj 12:nsubj _
|
||||
9 toward toward ADP IN _ 10 case 10:case _
|
||||
10 ubiquity ubiquity NOUN NN Number=Sing 8 nmod 8:nmod _
|
||||
11 might might AUX MD VerbForm=Fin 12 aux 12:aux _
|
||||
12 backfire backfire VERB VB VerbForm=Inf 4 ccomp 4:ccomp _
|
||||
13 -- -- PUNCT , _ 12 punct 12:punct _
|
||||
14 which which PRON WDT PronType=Rel 18 obj 18:obj _
|
||||
15 we we PRON PRP Case=Nom|Number=Plur|Person=1|PronType=Prs 18 nsubj 18:nsubj SpaceAfter=No
|
||||
16 've have AUX VBP Mood=Ind|Tense=Pres|VerbForm=Fin 18 aux 18:aux _
|
||||
17 all all ADV RB _ 18 advmod 18:advmod _
|
||||
18 heard hear VERB VBN Tense=Past|VerbForm=Part 12 acl:relcl 12:acl:relcl _
|
||||
19 before before ADV RB _ 18 advmod 18:advmod SpaceAfter=No
|
||||
20 , , PUNCT , _ 27 punct 27:punct _
|
||||
21 but but CCONJ CC _ 27 cc 27:cc _
|
||||
22 it it PRON PRP Case=Nom|Gender=Neut|Number=Sing|Person=3|PronType=Prs 27 nsubj:pass 27:nsubj:pass SpaceAfter=No
|
||||
23 's be VERB VBZ Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin 27 aux:pass 27:aux:pass _
|
||||
24 particularly particularly ADV RB _ 27 advmod 27:advmod _
|
||||
25 well well ADV RB Degree=Pos 27 advmod 27:advmod SpaceAfter=No
|
||||
26 - - PUNCT HYPH _ 27 punct 27:punct SpaceAfter=No
|
||||
27 put put VERB VBN Tense=Past|VerbForm=Part 4 conj 4:conj _
|
||||
28 in in ADP IN _ 30 case 30:case _
|
||||
29 this this DET DT Number=Sing|PronType=Dem 30 det 30:det _
|
||||
30 post post NOUN NN Number=Sing 27 obl 27:obl SpaceAfter=No
|
||||
31 . . PUNCT . _ 4 punct 4:punct _
|
||||
|
||||
# sent_id = weblog-blogspot.com_marketview_20050511222700_ENG_20050511_222700-0003
|
||||
# text = Google is a nice search engine.
|
||||
1 Google Google PROPN NNP Number=Sing 6 nsubj 6:nsubj _
|
||||
2 is be AUX VBZ Mood=Ind|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin 6 cop 6:cop _
|
||||
3 a a DET DT Definite=Ind|PronType=Art 6 det 6:det _
|
||||
4 nice nice ADJ JJ Degree=Pos 6 amod 6:amod _
|
||||
5 search search NOUN NN Number=Sing 6 compound 6:compound _
|
||||
6 engine engine NOUN NN Number=Sing 0 root 0:root SpaceAfter=No
|
||||
7 . . PUNCT . _ 6 punct 6:punct _
|
||||
|
||||
121
samples/CoNLL-U/ug-ud-test-abridged.conllu
Normal file
121
samples/CoNLL-U/ug-ud-test-abridged.conllu
Normal file
@@ -0,0 +1,121 @@
|
||||
# sent_id = s1
|
||||
# text = ئاسماننى كۆپكۈك، دەريا، كۆل سۇلىرىنى سۈپسۈزۈك تۇرۇشقا، دەل - دەرەخلەرنى بۈك - باراقسان بولۇشقا، ھايۋانلارنى ئەركىن ئازادە ياشاشقا ئىگە قىلىش... بىزنىڭ ئورتاق ئارزۇيىمىز.
|
||||
1 ئاسماننى _ NOUN N _ 30 csubj _ Translit=asmanni
|
||||
2 كۆپكۈك _ VERB V _ 1 orphan _ SpaceAfter=No|Translit=köpkük
|
||||
3 ، _ PUNCT Y _ 2 punct _ Translit=,
|
||||
4 دەريا _ NOUN N _ 7 nmod:poss _ SpaceAfter=No|Translit=derya
|
||||
5 ، _ PUNCT Y _ 4 punct _ Translit=,
|
||||
6 كۆل _ NOUN N _ 4 conj _ Translit=köl
|
||||
7 سۇلىرىنى _ NOUN N _ 9 obj _ Translit=sulirini
|
||||
8 سۈپسۈزۈك _ ADJ A _ 9 advmod _ Translit=süpsüzük
|
||||
9 تۇرۇشقا _ VERB V _ 1 conj _ SpaceAfter=No|Translit=turushqa
|
||||
10 ، _ PUNCT Y _ 1 punct _ Translit=,
|
||||
11 دەل _ ADV D _ 13 compound:redup _ Translit=del
|
||||
12 - _ PUNCT Y _ 11 punct _ Translit=-
|
||||
13 دەرەخلەرنى _ NOUN N _ 17 obj _ Translit=derexlerni
|
||||
14 بۈك _ ADJ A _ 16 compound _ Translit=bük
|
||||
15 - _ PUNCT Y _ 14 punct _ Translit=-
|
||||
16 باراقسان _ ADJ A _ 17 advmod _ Translit=baraqsan
|
||||
17 بولۇشقا _ VERB V _ 9 orphan _ SpaceAfter=No|Translit=bolushqa
|
||||
18 ، _ PUNCT Y _ 17 punct _ Translit=,
|
||||
19 ھايۋانلارنى _ NOUN N _ 24 obj _ Translit=haywanlarni
|
||||
20 ئەركىن _ ADJ A _ 21 compound:redup _ Translit=erkin
|
||||
21 ئازادە _ ADJ A _ 22 advmod _ Translit=azade
|
||||
22 ياشاشقا _ NOUN N _ 24 advcl _ Translit=yashashqa
|
||||
23 ئىگە _ NOUN N _ 24 compound _ Translit=ige
|
||||
24 قىلىش _ VERB V _ 1 conj _ SpaceAfter=No|Translit=qilish
|
||||
25 . _ PUNCT Y _ 1 punct _ SpaceAfter=No|Translit=.
|
||||
26 . _ PUNCT Y _ 1 punct _ SpaceAfter=No|Translit=.
|
||||
27 . _ PUNCT Y _ 1 punct _ Translit=.
|
||||
28 بىزنىڭ _ PRON P _ 30 nmod:poss _ Translit=bizning
|
||||
29 ئورتاق _ ADJ A _ 30 amod _ Translit=ortaq
|
||||
30 ئارزۇيىمىز _ NOUN N _ 0 root _ SpaceAfter=No|Translit=arzuyimiz
|
||||
31 . _ PUNCT Y _ 30 punct _ Translit=.
|
||||
|
||||
# sent_id = s2
|
||||
# text = بۇ بۆلەكتىكى تېكىستلەرنى ئوقۇش ئارقىلىق، كىشىلەرنىڭ ھايۋانلار ۋە ئۆسۈملۈكلەرگە قانداق مۇئامىلە قىلغانلىقى، ئاقىۋىتىنىڭ قانداق بولغانلىقىنى كۆرۈپ باقايلى،
|
||||
1 بۇ _ PRON P _ 2 det _ Translit=bu
|
||||
2 بۆلەكتىكى _ NOUN N _ 3 nmod _ Translit=bölektiki
|
||||
3 تېكىستلەرنى _ NOUN N _ 4 obj _ Translit=tëkistlerni
|
||||
4 ئوقۇش _ VERB V _ 18 advcl _ Translit=oqush
|
||||
5 ئارقىلىق _ ADP R _ 4 case _ SpaceAfter=No|Translit=arqiliq
|
||||
6 ، _ PUNCT Y _ 5 punct _ Translit=,
|
||||
7 كىشىلەرنىڭ _ NOUN N _ 13 nsubj _ Translit=kishilerning
|
||||
8 ھايۋانلار _ NOUN N _ 13 obl _ Translit=haywanlar
|
||||
9 ۋە _ CCONJ C _ 10 cc _ Translit=we
|
||||
10 ئۆسۈملۈكلەرگە _ NOUN N _ 8 conj _ Translit=ösümlüklerge
|
||||
11 قانداق _ PRON P _ 13 advmod _ Translit=qandaq
|
||||
12 مۇئامىلە _ NOUN N _ 13 compound _ Translit=muamile
|
||||
13 قىلغانلىقى _ VERB V _ 18 conj _ SpaceAfter=No|Translit=qilghanliqi
|
||||
14 ، _ PUNCT Y _ 13 punct _ Translit=,
|
||||
15 ئاقىۋىتىنىڭ _ NOUN N _ 17 nsubj _ Translit=aqiwitining
|
||||
16 قانداق _ PRON P _ 17 advmod _ Translit=qandaq
|
||||
17 بولغانلىقىنى _ VERB V _ 18 obj _ Translit=bolghanliqini
|
||||
18 كۆرۈپ _ VERB V _ 0 root _ Translit=körüp
|
||||
19 باقايلى _ VERB V _ 18 aux _ SpaceAfter=No|Translit=baqayli
|
||||
20 ، _ PUNCT Y _ 19 punct _ Translit=,
|
||||
|
||||
# sent_id = s3
|
||||
# text = يەنە ئەتراپىمىزدىكى مۇھىتنى ياخشى كۆزىتىپ، مۇھىتنى قوغداش ئۈچۈن نېمىلەرنى قىلالايدىغانلىقىمىز توغرۇلۇق ئويلىنىپ باقايلى.
|
||||
1 يەنە _ ADV D _ 13 cc _ Translit=yene
|
||||
2 ئەتراپىمىزدىكى _ NOUN N _ 3 amod _ Translit=etrapimizdiki
|
||||
3 مۇھىتنى _ NOUN N _ 5 obj _ Translit=muhitni
|
||||
4 ياخشى _ ADJ A _ 5 advmod _ Translit=yaxshi
|
||||
5 كۆزىتىپ _ VERB V _ 13 advcl _ SpaceAfter=No|Translit=közitip
|
||||
6 ، _ PUNCT Y _ 5 punct _ Translit=,
|
||||
7 مۇھىتنى _ NOUN N _ 8 obj _ Translit=muhitni
|
||||
8 قوغداش _ VERB V _ 11 advcl _ Translit=qoghdash
|
||||
9 ئۈچۈن _ CCONJ C _ 8 case _ Translit=üchün
|
||||
10 نېمىلەرنى _ PRON P _ 11 obj _ Translit=nëmilerni
|
||||
11 قىلالايدىغانلىقىمىز _ VERB V _ 13 obj _ Translit=qilalaydighanliqimiz
|
||||
12 توغرۇلۇق _ ADP R _ 11 case _ Translit=toghruluq
|
||||
13 ئويلىنىپ _ VERB V _ 0 root _ Translit=oylinip
|
||||
14 باقايلى _ VERB V _ 13 aux _ SpaceAfter=No|Translit=baqayli
|
||||
15 . _ PUNCT Y _ 14 punct _ Translit=.
|
||||
|
||||
# sent_id = s4
|
||||
# text = بىر يىلى باھار كۈنلىرىنىڭ بىرىدە، شىۋېتسارىيىنىڭ بىر ۋوگزالىدا ھاۋا تەڭشىگۈچ ئورنىتىلغان چىرايلىق، ئازادە بىر پويىز قوزغىلىش ئالدىدا تۇراتتى.
|
||||
1 بىر _ NUM M _ 2 nummod _ Translit=bir
|
||||
2 يىلى _ NOUN N _ 20 nmod:tmod _ Translit=yili
|
||||
3 باھار _ NOUN N _ 4 nmod:poss _ Translit=bahar
|
||||
4 كۈنلىرىنىڭ _ NOUN N _ 5 nmod:part _ Translit=künlirining
|
||||
5 بىرىدە _ NUM M _ 20 nmod:tmod _ SpaceAfter=No|Translit=biride
|
||||
6 ، _ PUNCT Y _ 5 punct _ Translit=,
|
||||
7 شىۋېتسارىيىنىڭ _ NOUN N _ 9 nmod:poss _ Translit=shiwëtsariyining
|
||||
8 بىر _ NUM M _ 9 det _ Translit=bir
|
||||
9 ۋوگزالىدا _ NOUN N _ 20 obl _ Translit=wogzalida
|
||||
10 ھاۋا _ NOUN N _ 11 compound _ Translit=hawa
|
||||
11 تەڭشىگۈچ _ NOUN N _ 12 nsubj _ Translit=tengshigüch
|
||||
12 ئورنىتىلغان _ NOUN N _ 17 amod _ Translit=ornitilghan
|
||||
13 چىرايلىق _ ADJ A _ 17 amod _ SpaceAfter=No|Translit=chirayliq
|
||||
14 ، _ PUNCT Y _ 13 punct _ Translit=,
|
||||
15 ئازادە _ ADJ A _ 13 conj _ Translit=azade
|
||||
16 بىر _ NUM M _ 17 det _ Translit=bir
|
||||
17 پويىز _ NOUN N _ 20 nsubj _ Translit=poyiz
|
||||
18 قوزغىلىش _ VERB V _ 19 nmod:poss _ Translit=qozghilish
|
||||
19 ئالدىدا _ NOUN N _ 20 obl _ Translit=aldida
|
||||
20 تۇراتتى _ VERB V _ 0 root _ SpaceAfter=No|Translit=turatti
|
||||
21 . _ PUNCT Y _ 20 punct _ Translit=.
|
||||
|
||||
# sent_id = s5
|
||||
# text = ۋوگزال سۇپىسى ئۇزاتقۇچىلار بىلەن تولۇپ كەتكەنىدى.
|
||||
1 ۋوگزال _ NOUN N _ 2 nmod:poss _ Translit=wogzal
|
||||
2 سۇپىسى _ NOUN N _ 5 nsubj _ Translit=supisi
|
||||
3 ئۇزاتقۇچىلار _ NOUN N _ 5 obl _ Translit=uzatquchilar
|
||||
4 بىلەن _ ADP R _ 3 case _ Translit=bilen
|
||||
5 تولۇپ _ VERB V _ 0 root _ Translit=tolup
|
||||
6 كەتكەنىدى _ VERB V _ 5 aux _ SpaceAfter=No|Translit=ketkenidi
|
||||
7 . _ PUNCT Y _ 6 punct _ Translit=.
|
||||
|
||||
# sent_id = s6
|
||||
# text = ئۇلارنىڭ ئۇزاتماقچى بولغىنى ئۆزگىچە مىھمان - قارلىغاچلار ئىدى.
|
||||
1 ئۇلارنىڭ _ PRON P _ 2 nsubj _ Translit=ularning
|
||||
2 ئۇزاتماقچى _ NOUN N _ 5 acl _ Translit=uzatmaqchi
|
||||
3 بولغىنى _ AUX V _ 2 cop _ Translit=bolghini
|
||||
4 ئۆزگىچە _ ADJ A _ 5 amod _ Translit=özgiche
|
||||
5 مىھمان _ NOUN N _ 7 appos _ Translit=mihman
|
||||
6 - _ PUNCT Y _ 5 punct _ Translit=-
|
||||
7 قارلىغاچلار _ NOUN N _ 0 root _ Translit=qarlighachlar
|
||||
8 ئىدى _ AUX V _ 7 cop _ SpaceAfter=No|Translit=idi
|
||||
9 . _ PUNCT Y _ 8 punct _ Translit=.
|
||||
|
||||
36
samples/Common Workflow Language/trunk-peak-score.cwl
Normal file
36
samples/Common Workflow Language/trunk-peak-score.cwl
Normal file
@@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env cwl-runner
|
||||
# Originally from
|
||||
# https://github.com/Duke-GCB/GGR-cwl/blob/54e897263a702ff1074c8ac814b4bf7205d140dd/utils/trunk-peak-score.cwl
|
||||
# Released under the MIT License:
|
||||
# https://github.com/Duke-GCB/GGR-cwl/blob/54e897263a702ff1074c8ac814b4bf7205d140dd/LICENSE
|
||||
# Converted to CWL v1.0 syntax using
|
||||
# https://github.com/common-workflow-language/cwl-upgrader
|
||||
# and polished by Michael R. Crusoe <mrc@commonwl.org>
|
||||
# All modifications also released under the MIT License
|
||||
cwlVersion: v1.0
|
||||
class: CommandLineTool
|
||||
doc: Trunk scores in ENCODE bed6+4 files
|
||||
|
||||
hints:
|
||||
DockerRequirement:
|
||||
dockerPull: dukegcb/workflow-utils
|
||||
|
||||
inputs:
|
||||
peaks:
|
||||
type: File
|
||||
sep:
|
||||
type: string
|
||||
default: \t
|
||||
|
||||
outputs:
|
||||
trunked_scores_peaks:
|
||||
type: stdout
|
||||
|
||||
baseCommand: awk
|
||||
|
||||
arguments:
|
||||
- -F $(inputs.sep)
|
||||
- BEGIN{OFS=FS}$5>1000{$5=1000}{print}
|
||||
- $(inputs.peaks.path)
|
||||
|
||||
stdout: $(inputs.peaks.nameroot).trunked_scores$(inputs.peaks.nameext)
|
||||
12
samples/DataWeave/customInterpolator.dwl
Normal file
12
samples/DataWeave/customInterpolator.dwl
Normal file
@@ -0,0 +1,12 @@
|
||||
fun SQL(literals, parts) = ''
|
||||
---
|
||||
[
|
||||
SQL `SELECT * FROM table WHERE id = $(1) AND name = $('a')`,
|
||||
SQL `$('p')`,
|
||||
SQL `$('a')$('b')`,
|
||||
SQL `$('a')---$('b')`,
|
||||
SQL `---$('a')---$('b')---`,
|
||||
SQL `$('p')bbb`,
|
||||
SQL `aaa$('p')`,
|
||||
SQL `aaa$('p')bbb`
|
||||
]
|
||||
9
samples/DataWeave/directives.dwl
Normal file
9
samples/DataWeave/directives.dwl
Normal file
@@ -0,0 +1,9 @@
|
||||
%dw 2.0
|
||||
var number = 1234
|
||||
fun foo(func,name="Mariano") = func(name)
|
||||
input payload application/test arg="value"
|
||||
output application/json
|
||||
---
|
||||
{
|
||||
foo: "bar"
|
||||
}
|
||||
27
samples/DataWeave/functions.dwl
Normal file
27
samples/DataWeave/functions.dwl
Normal file
@@ -0,0 +1,27 @@
|
||||
%dw 2.0
|
||||
var x=(param1, param2) -> { "$param1": param2 }
|
||||
var y=(param1, param2 = "c") -> { "$param1": param2 }
|
||||
var toUser = (user) -> { name: user.name, lastName: user.lastName }
|
||||
fun z(param1, param2) = { "$param1": param2 }
|
||||
var a = { name: "Mariano" , toUser: ((param1, param2) -> { "$param1": param2 }) }
|
||||
var applyFirst = (array, func) -> (func(array[0]) ++ array[1 to -1])
|
||||
|
||||
var nested = (array, func) -> (a) -> (b) -> (c) -> array map func(a ++ b ++ c)
|
||||
|
||||
|
||||
fun f2(a1, a2) = ""
|
||||
fun f3(a1:String, a2:Number):String = a1
|
||||
fun f4(a1:String, a2:(a:Number) -> Number):String = a1
|
||||
---
|
||||
result: {
|
||||
a: x("a", "b"),
|
||||
b: y("a"),
|
||||
c: y("a", "b"),
|
||||
users: { (in1 map ((user) -> { user: (toUser(user) ++ user) })) },
|
||||
d: z("a", "b"),
|
||||
e: a.toUser("name","Mariano"),
|
||||
f: a.toUser("name","Mariano").name,
|
||||
f: applyFirst("mariano", (s) -> upper(s) ),
|
||||
g: [] map (s) -> upper(s),
|
||||
h: 1 f2 2
|
||||
}
|
||||
36
samples/DataWeave/literals.dwl
Normal file
36
samples/DataWeave/literals.dwl
Normal file
@@ -0,0 +1,36 @@
|
||||
%dw 2.0
|
||||
---
|
||||
{
|
||||
"boolean":{
|
||||
"true" : true,
|
||||
"false": false
|
||||
},
|
||||
"Number": {
|
||||
"int": 123,
|
||||
"decimal": 123.23
|
||||
},
|
||||
"string": {
|
||||
"singleQuote" : 'A String',
|
||||
"doubleQuote" : "A String"
|
||||
},
|
||||
"regex": /foo/,
|
||||
"date": {
|
||||
a: |2003-10-01|,
|
||||
b: |2005-045|,
|
||||
c: |2003-W14-3|,
|
||||
d: |23:57:59|,
|
||||
e: |23:57:30.700|,
|
||||
f: |23:50:30Z|,
|
||||
g: |+13:00|,
|
||||
h: |Z|,
|
||||
i: |-02:00|,
|
||||
j: |2005-06-02T15:10:16|,
|
||||
k: |2005-06-02T15:10:16Z|,
|
||||
l: |2005-06-02T15:10:16+03:00|,
|
||||
m: |P12Y7M11D|,
|
||||
n: |P12Y5M|,
|
||||
o: |P45DT9H20M8S|,
|
||||
p: |PT9H20M8S|
|
||||
}
|
||||
}
|
||||
|
||||
33
samples/DataWeave/match.dwl
Normal file
33
samples/DataWeave/match.dwl
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
// Regex Pattern Matching (Can be named or unnamed)
|
||||
a: in0.phones map $ match {
|
||||
case matches /\+(\d+)\s\((\d+)\)\s(\d+\-\d+)/ -> { country: $[0], area: $[1], number: $[2] }
|
||||
case matches /\((\d+)\)\s(\d+\-\d+)/ -> { area: $[1], number: $[2] }
|
||||
case phone matches /\((\d+)\)\s(\d+\-\d+)/ -> { area: phone[1], number: phone[2] }
|
||||
},
|
||||
// Type Pattern Matching (Can be named or unnamed)
|
||||
b: in0.object match {
|
||||
case is Object -> { object: $ }
|
||||
case is Number -> { number: $ }
|
||||
// This is how you name variables if needed
|
||||
case y is Boolean -> { boolean: y }
|
||||
},
|
||||
// Literal Pattern Matching (Can be named or unnamed)
|
||||
c: in0.value match {
|
||||
case "Emiliano" -> { string: $ }
|
||||
case 123 -> { number: $ }
|
||||
// This is how you name variables if needed
|
||||
case value: "Mariano" -> { name: value }
|
||||
},
|
||||
// Boolean Expression Pattern Matching (Always named)
|
||||
d: in0.value match {
|
||||
case x if x > 30 -> { biggerThan30: x }
|
||||
case x if x == 9 -> { nine: x }
|
||||
},
|
||||
// Default matches
|
||||
e: in0.value match {
|
||||
case "Emiliano" -> "string"
|
||||
case 3.14 -> number
|
||||
else -> "1234"
|
||||
}
|
||||
}
|
||||
2061
samples/Edje Data Collection/mild.edc
Normal file
2061
samples/Edje Data Collection/mild.edc
Normal file
File diff suppressed because it is too large
Load Diff
58
samples/HCL/terraform.tfvars
Normal file
58
samples/HCL/terraform.tfvars
Normal file
@@ -0,0 +1,58 @@
|
||||
# Terragrunt is a thin wrapper for Terraform that provides extra tools for working with multiple Terraform modules,
|
||||
# remote state, and locking: https://github.com/gruntwork-io/terragrunt
|
||||
terragrunt = {
|
||||
# Configure Terragrunt to automatically store tfstate files in an S3 bucket
|
||||
remote_state {
|
||||
backend = "s3"
|
||||
config {
|
||||
encrypt = true
|
||||
bucket = "acme-main-terraform-state"
|
||||
key = "${path_relative_to_include()}/terraform.tfstate"
|
||||
region = "us-east-1"
|
||||
dynamodb_table = "terraform-locks"
|
||||
}
|
||||
}
|
||||
|
||||
# Configure Terragrunt to use common var files to help you keep often-repeated variables (e.g., account ID) DRY.
|
||||
# Note that even though Terraform automatically pulls in terraform.tfvars, we include it explicitly at the end of the
|
||||
# list to make sure its variables override anything in the common var files.
|
||||
terraform {
|
||||
extra_arguments "common_vars" {
|
||||
commands = ["${get_terraform_commands_that_need_vars()}"]
|
||||
|
||||
optional_var_files = [
|
||||
"${get_tfvars_dir()}/${find_in_parent_folders("account.tfvars", "skip-account-if-does-not-exist")}",
|
||||
"${get_tfvars_dir()}/${find_in_parent_folders("region.tfvars", "skip-region-if-does-not-exist")}",
|
||||
"${get_tfvars_dir()}/${find_in_parent_folders("env.tfvars", "skip-env-if-does-not-exist")}",
|
||||
"${get_tfvars_dir()}/terraform.tfvars"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
key1 = "val1"
|
||||
key2 = 0
|
||||
key3 = 1
|
||||
key4 = true
|
||||
|
||||
# Sample comments
|
||||
key5 = false
|
||||
|
||||
key6 = ["hello", "from", "gruntwork.io"]
|
||||
|
||||
key7 = {
|
||||
key1 = "hello"
|
||||
key2 = "from"
|
||||
key3 = "gruntwork.io"
|
||||
}
|
||||
|
||||
key8 = [
|
||||
{
|
||||
keyA = "hello"
|
||||
keyB = "there"
|
||||
},
|
||||
{
|
||||
keyA = "hello"
|
||||
keyB = "there"
|
||||
}
|
||||
]
|
||||
38
samples/HTML+Django/_worker.jinja2
Normal file
38
samples/HTML+Django/_worker.jinja2
Normal file
@@ -0,0 +1,38 @@
|
||||
<h1>Workers</h1>
|
||||
|
||||
<table class="workers">
|
||||
<tr>
|
||||
<th>Job server</th>
|
||||
<th>IP</th>
|
||||
<th>File descriptor</th>
|
||||
<th>Client ID</th>
|
||||
<th>Functions</th>
|
||||
</tr>
|
||||
|
||||
{% for server_info in server_infos %}
|
||||
<tr {% if server_info['failed'] %} class="failure" {% endif %} >
|
||||
|
||||
<th>{{ server_info['hostport'][0] }}:{{ server_info['hostport'][1] }}</th>
|
||||
<th>
|
||||
{%- if server_info['failed'] -%} Not responding! {%- endif -%}
|
||||
</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
</tr>
|
||||
|
||||
{% if not server_info['failed'] %}
|
||||
{% for worker in server_info['workers'] %}
|
||||
<tr>
|
||||
<td class="server"></td>
|
||||
<td class="ip">{{ worker['ip'] }}</td>
|
||||
<td class="file_descriptor">{{ worker['file_descriptor'] }}</td>
|
||||
<td class="client_id">{{ worker['client_id'] }}</td>
|
||||
<td class="functions">
|
||||
{{ worker['tasks']|join(', ') }}
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</table>
|
||||
10
samples/HXML/checkstyle.hxml
Normal file
10
samples/HXML/checkstyle.hxml
Normal file
@@ -0,0 +1,10 @@
|
||||
buildGlobal.hxml
|
||||
-lib mcover:2.1.1
|
||||
-D unittest
|
||||
-x TestMain
|
||||
--macro mcover.MCover.coverage(['checkstyle'], ['src'], ['checkstyle.reporter', 'checkstyle.Main'])
|
||||
|
||||
--next
|
||||
-cmd neko run -s src -s test -p resources/static-analysis.txt
|
||||
-cmd neko run --default-config resources/default-config.json
|
||||
-cmd neko run -c resources/default-config.json
|
||||
31
samples/HXML/vshaxe.hxml
Normal file
31
samples/HXML/vshaxe.hxml
Normal file
@@ -0,0 +1,31 @@
|
||||
# This file is generated with vshaxe-build - DO NOT EDIT MANUALLY!
|
||||
-cp vscode-extern/src
|
||||
-cp src-api
|
||||
-cp src
|
||||
-cp server/src
|
||||
-cp server/protocol/src
|
||||
-cp server/formatter/src
|
||||
-cp server/test
|
||||
-cp server/formatter/test
|
||||
-cp syntaxes/src
|
||||
-D analyzer-optimize
|
||||
-D js_unflatten
|
||||
-D hxnodejs-no-version-warning
|
||||
-D JSTACK_MAIN=vshaxe.Main.main
|
||||
-D JSTACK_ASYNC_ENTRY
|
||||
-D JSTACK_FORMAT=vscode
|
||||
-lib hxnodejs
|
||||
-lib jstack
|
||||
-lib haxe-hxparser
|
||||
-lib compiletime
|
||||
-lib mockatoo
|
||||
-lib mconsole
|
||||
-lib hx3compat
|
||||
-lib hxargs
|
||||
-lib json2object
|
||||
-lib yaml
|
||||
-lib plist
|
||||
-debug
|
||||
-js bin/build.js
|
||||
--no-inline
|
||||
-main Build
|
||||
227
samples/INI/filenames/buildozer.spec
Normal file
227
samples/INI/filenames/buildozer.spec
Normal file
@@ -0,0 +1,227 @@
|
||||
[app]
|
||||
|
||||
# (str) Title of your application
|
||||
title = Kivy Kazam
|
||||
|
||||
# (str) Package name
|
||||
package.name = kivykazam
|
||||
|
||||
# (str) Package domain (needed for android/ios packaging)
|
||||
package.domain = org.test
|
||||
|
||||
# (str) Source code where the main.py live
|
||||
source.dir = .
|
||||
|
||||
# (list) Source files to include (let empty to include all the files)
|
||||
source.include_exts = py,png,jpg,kv,atlas
|
||||
|
||||
# (list) List of inclusions using pattern matching
|
||||
#source.include_patterns = assets/*,images/*.png
|
||||
|
||||
# (list) Source files to exclude (let empty to not exclude anything)
|
||||
#source.exclude_exts = spec
|
||||
|
||||
# (list) List of directory to exclude (let empty to not exclude anything)
|
||||
#source.exclude_dirs = tests, bin
|
||||
|
||||
# (list) List of exclusions using pattern matching
|
||||
#source.exclude_patterns = license,images/*/*.jpg
|
||||
|
||||
# (str) Application versioning (method 1)
|
||||
version = 0.1
|
||||
|
||||
# (str) Application versioning (method 2)
|
||||
# version.regex = __version__ = ['"](.*)['"]
|
||||
# version.filename = %(source.dir)s/main.py
|
||||
|
||||
# (list) Application requirements
|
||||
# comma seperated e.g. requirements = sqlite3,kivy
|
||||
requirements = kivy
|
||||
|
||||
# (str) Custom source folders for requirements
|
||||
# Sets custom source for any requirements with recipes
|
||||
# requirements.source.kivy = ../../kivy
|
||||
|
||||
# (list) Garden requirements
|
||||
#garden_requirements =
|
||||
|
||||
# (str) Presplash of the application
|
||||
#presplash.filename = %(source.dir)s/data/presplash.png
|
||||
|
||||
# (str) Icon of the application
|
||||
#icon.filename = %(source.dir)s/data/icon.png
|
||||
|
||||
# (str) Supported orientation (one of landscape, portrait or all)
|
||||
orientation = all
|
||||
|
||||
# (list) List of service to declare
|
||||
#services = NAME:ENTRYPOINT_TO_PY,NAME2:ENTRYPOINT2_TO_PY
|
||||
|
||||
#
|
||||
# OSX Specific
|
||||
#
|
||||
|
||||
#
|
||||
# author = © Copyright Info
|
||||
|
||||
#
|
||||
# Android specific
|
||||
#
|
||||
|
||||
# (bool) Indicate if the application should be fullscreen or not
|
||||
fullscreen = 1
|
||||
|
||||
# (list) Permissions
|
||||
#android.permissions = INTERNET
|
||||
|
||||
# (int) Android API to use
|
||||
#android.api = 19
|
||||
|
||||
# (int) Minimum API required
|
||||
android.minapi = 13
|
||||
|
||||
# (int) Android SDK version to use
|
||||
#android.sdk = 20
|
||||
|
||||
# (str) Android NDK version to use
|
||||
#android.ndk = 9c
|
||||
|
||||
# (bool) Use --private data storage (True) or --dir public storage (False)
|
||||
#android.private_storage = True
|
||||
|
||||
# (str) Android NDK directory (if empty, it will be automatically downloaded.)
|
||||
#android.ndk_path =
|
||||
|
||||
# (str) Android SDK directory (if empty, it will be automatically downloaded.)
|
||||
#android.sdk_path =
|
||||
|
||||
# (str) ANT directory (if empty, it will be automatically downloaded.)
|
||||
#android.ant_path =
|
||||
|
||||
# (str) python-for-android git clone directory (if empty, it will be automatically cloned from github)
|
||||
#android.p4a_dir =
|
||||
|
||||
# (list) python-for-android whitelist
|
||||
#android.p4a_whitelist =
|
||||
|
||||
# (bool) If True, then skip trying to update the Android sdk
|
||||
# This can be useful to avoid excess Internet downloads or save time
|
||||
# when an update is due and you just want to test/build your package
|
||||
# android.skip_update = False
|
||||
|
||||
# (str) Android entry point, default is ok for Kivy-based app
|
||||
#android.entrypoint = org.renpy.android.PythonActivity
|
||||
|
||||
# (list) List of Java .jar files to add to the libs so that pyjnius can access
|
||||
# their classes. Don't add jars that you do not need, since extra jars can slow
|
||||
# down the build process. Allows wildcards matching, for example:
|
||||
# OUYA-ODK/libs/*.jar
|
||||
#android.add_jars = foo.jar,bar.jar,path/to/more/*.jar
|
||||
|
||||
# (list) List of Java files to add to the android project (can be java or a
|
||||
# directory containing the files)
|
||||
#android.add_src =
|
||||
|
||||
# (str) python-for-android branch to use, if not master, useful to try
|
||||
# not yet merged features.
|
||||
#android.branch = master
|
||||
|
||||
# (str) OUYA Console category. Should be one of GAME or APP
|
||||
# If you leave this blank, OUYA support will not be enabled
|
||||
#android.ouya.category = GAME
|
||||
|
||||
# (str) Filename of OUYA Console icon. It must be a 732x412 png image.
|
||||
#android.ouya.icon.filename = %(source.dir)s/data/ouya_icon.png
|
||||
|
||||
# (str) XML file to include as an intent filters in <activity> tag
|
||||
#android.manifest.intent_filters =
|
||||
|
||||
# (list) Android additionnal libraries to copy into libs/armeabi
|
||||
#android.add_libs_armeabi = libs/android/*.so
|
||||
#android.add_libs_armeabi_v7a = libs/android-v7/*.so
|
||||
#android.add_libs_x86 = libs/android-x86/*.so
|
||||
#android.add_libs_mips = libs/android-mips/*.so
|
||||
|
||||
# (bool) Indicate whether the screen should stay on
|
||||
# Don't forget to add the WAKE_LOCK permission if you set this to True
|
||||
#android.wakelock = False
|
||||
|
||||
# (list) Android application meta-data to set (key=value format)
|
||||
#android.meta_data =
|
||||
|
||||
# (list) Android library project to add (will be added in the
|
||||
# project.properties automatically.)
|
||||
#android.library_references =
|
||||
|
||||
# (str) Android logcat filters to use
|
||||
#android.logcat_filters = *:S python:D
|
||||
|
||||
# (bool) Copy library instead of making a libpymodules.so
|
||||
#android.copy_libs = 1
|
||||
|
||||
#
|
||||
# iOS specific
|
||||
#
|
||||
|
||||
# (str) Path to a custom kivy-ios folder
|
||||
#ios.kivy_ios_dir = ../kivy-ios
|
||||
|
||||
# (str) Name of the certificate to use for signing the debug version
|
||||
# Get a list of available identities: buildozer ios list_identities
|
||||
#ios.codesign.debug = "iPhone Developer: <lastname> <firstname> (<hexstring>)"
|
||||
|
||||
# (str) Name of the certificate to use for signing the release version
|
||||
#ios.codesign.release = %(ios.codesign.debug)s
|
||||
|
||||
|
||||
[buildozer]
|
||||
|
||||
# (int) Log level (0 = error only, 1 = info, 2 = debug (with command output))
|
||||
log_level = 1
|
||||
|
||||
# (int) Display warning if buildozer is run as root (0 = False, 1 = True)
|
||||
warn_on_root = 1
|
||||
|
||||
# (str) Path to build artifact storage, absolute or relative to spec file
|
||||
# build_dir = ./.buildozer
|
||||
|
||||
# (str) Path to build output (i.e. .apk, .ipa) storage
|
||||
# bin_dir = ./bin
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# List as sections
|
||||
#
|
||||
# You can define all the "list" as [section:key].
|
||||
# Each line will be considered as a option to the list.
|
||||
# Let's take [app] / source.exclude_patterns.
|
||||
# Instead of doing:
|
||||
#
|
||||
#[app]
|
||||
#source.exclude_patterns = license,data/audio/*.wav,data/images/original/*
|
||||
#
|
||||
# This can be translated into:
|
||||
#
|
||||
#[app:source.exclude_patterns]
|
||||
#license
|
||||
#data/audio/*.wav
|
||||
#data/images/original/*
|
||||
#
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Profiles
|
||||
#
|
||||
# You can extend section / key with a profile
|
||||
# For example, you want to deploy a demo version of your application without
|
||||
# HD content. You could first change the title to add "(demo)" in the name
|
||||
# and extend the excluded directories to remove the HD content.
|
||||
#
|
||||
#[app@demo]
|
||||
#title = My Application (demo)
|
||||
#
|
||||
#[app:source.exclude_patterns@demo]
|
||||
#images/hd/*
|
||||
#
|
||||
# Then, invoke the command line with the "demo" profile:
|
||||
#
|
||||
#buildozer --profile demo android debug
|
||||
136
samples/JSON/Material_Alpha_01.gltf
Normal file
136
samples/JSON/Material_Alpha_01.gltf
Normal file
@@ -0,0 +1,136 @@
|
||||
{
|
||||
"accessors": [
|
||||
{
|
||||
"bufferView": 0,
|
||||
"componentType": 5126,
|
||||
"count": 4,
|
||||
"type": "VEC3",
|
||||
"max": [
|
||||
0.5,
|
||||
0.5,
|
||||
0.0
|
||||
],
|
||||
"min": [
|
||||
-0.5,
|
||||
-0.5,
|
||||
0.0
|
||||
],
|
||||
"name": "Positions Accessor"
|
||||
},
|
||||
{
|
||||
"bufferView": 1,
|
||||
"componentType": 5126,
|
||||
"count": 4,
|
||||
"type": "VEC4",
|
||||
"name": "Colors Accessor"
|
||||
},
|
||||
{
|
||||
"bufferView": 2,
|
||||
"componentType": 5126,
|
||||
"count": 4,
|
||||
"type": "VEC2",
|
||||
"name": "UV Accessor 0"
|
||||
},
|
||||
{
|
||||
"bufferView": 3,
|
||||
"componentType": 5125,
|
||||
"count": 6,
|
||||
"type": "SCALAR",
|
||||
"name": "Indices Accessor"
|
||||
}
|
||||
],
|
||||
"asset": {
|
||||
"generator": "glTF Asset Generator",
|
||||
"version": "2.0",
|
||||
"extras": {
|
||||
"Attributes": "VertexColor_Vector4_Float - AlphaMode_Mask - AlphaCutoff - DoubleSided - BaseColorFactor - BaseColorTexture"
|
||||
}
|
||||
},
|
||||
"buffers": [
|
||||
{
|
||||
"uri": "Material_Alpha_01.bin",
|
||||
"byteLength": 168
|
||||
}
|
||||
],
|
||||
"bufferViews": [
|
||||
{
|
||||
"buffer": 0,
|
||||
"byteLength": 48,
|
||||
"name": "Positions"
|
||||
},
|
||||
{
|
||||
"buffer": 0,
|
||||
"byteOffset": 48,
|
||||
"byteLength": 64,
|
||||
"name": "Colors"
|
||||
},
|
||||
{
|
||||
"buffer": 0,
|
||||
"byteOffset": 112,
|
||||
"byteLength": 32,
|
||||
"name": "Texture Coords 0"
|
||||
},
|
||||
{
|
||||
"buffer": 0,
|
||||
"byteOffset": 144,
|
||||
"byteLength": 24,
|
||||
"name": "Indices"
|
||||
}
|
||||
],
|
||||
"images": [
|
||||
{
|
||||
"uri": "Texture_baseColor.png"
|
||||
}
|
||||
],
|
||||
"materials": [
|
||||
{
|
||||
"pbrMetallicRoughness": {
|
||||
"baseColorFactor": [
|
||||
1.0,
|
||||
1.0,
|
||||
1.0,
|
||||
0.6
|
||||
],
|
||||
"baseColorTexture": {
|
||||
"index": 0
|
||||
}
|
||||
},
|
||||
"alphaMode": "MASK",
|
||||
"alphaCutoff": 0.7,
|
||||
"doubleSided": true
|
||||
}
|
||||
],
|
||||
"meshes": [
|
||||
{
|
||||
"primitives": [
|
||||
{
|
||||
"attributes": {
|
||||
"POSITION": 0,
|
||||
"COLOR_0": 1,
|
||||
"TEXCOORD_0": 2
|
||||
},
|
||||
"indices": 3,
|
||||
"material": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"nodes": [
|
||||
{
|
||||
"mesh": 0
|
||||
}
|
||||
],
|
||||
"scene": 0,
|
||||
"scenes": [
|
||||
{
|
||||
"nodes": [
|
||||
0
|
||||
]
|
||||
}
|
||||
],
|
||||
"textures": [
|
||||
{
|
||||
"source": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
25
samples/JSON/filenames/.htmlhintrc
Normal file
25
samples/JSON/filenames/.htmlhintrc
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"alt-require": true,
|
||||
"attr-lowercase": true,
|
||||
"attr-no-duplication": true,
|
||||
"attr-unsafe-chars": true,
|
||||
"attr-value-double-quotes": true,
|
||||
"attr-value-not-empty": false,
|
||||
"doctype-first": true,
|
||||
"doctype-html5": true,
|
||||
"head-script-disabled": false,
|
||||
"href-abs-or-rel": false,
|
||||
"id-class-ad-disabled": true,
|
||||
"id-class-value": false,
|
||||
"id-unique": true,
|
||||
"inline-script-disabled": true,
|
||||
"inline-style-disabled": true,
|
||||
"space-tab-mixed-disabled": "space",
|
||||
"spec-char-escape": true,
|
||||
"src-not-empty": true,
|
||||
"style-disabled": false,
|
||||
"tag-pair": true,
|
||||
"tag-self-close": false,
|
||||
"tagname-lowercase": true,
|
||||
"title-require": true
|
||||
}
|
||||
88
samples/JSON/filenames/.jscsrc
Normal file
88
samples/JSON/filenames/.jscsrc
Normal file
@@ -0,0 +1,88 @@
|
||||
{
|
||||
"requireCurlyBraces": [
|
||||
"if",
|
||||
"else",
|
||||
"for",
|
||||
"while",
|
||||
"do",
|
||||
"try",
|
||||
"catch"
|
||||
],
|
||||
"requireSpaceAfterKeywords": [
|
||||
"if",
|
||||
"else",
|
||||
"for",
|
||||
"while",
|
||||
"do",
|
||||
"switch",
|
||||
"case",
|
||||
"return",
|
||||
"try",
|
||||
"catch",
|
||||
"typeof"
|
||||
],
|
||||
"requireSpaceBeforeBlockStatements": true,
|
||||
"requireParenthesesAroundIIFE": true,
|
||||
"requireSpacesInConditionalExpression": true,
|
||||
"disallowSpacesInNamedFunctionExpression": {
|
||||
"beforeOpeningRoundBrace": true
|
||||
},
|
||||
"disallowSpacesInFunctionDeclaration": {
|
||||
"beforeOpeningRoundBrace": true
|
||||
},
|
||||
"requireSpaceBetweenArguments": true,
|
||||
"requireBlocksOnNewline": true,
|
||||
"disallowEmptyBlocks": true,
|
||||
"disallowSpacesInsideArrayBrackets": true,
|
||||
"disallowSpacesInsideParentheses": true,
|
||||
"disallowDanglingUnderscores": true,
|
||||
"requireCommaBeforeLineBreak": true,
|
||||
"disallowSpacesInCallExpression": true,
|
||||
"disallowSpaceAfterPrefixUnaryOperators": true,
|
||||
"disallowSpaceBeforePostfixUnaryOperators": true,
|
||||
"disallowSpaceBeforeBinaryOperators": [
|
||||
","
|
||||
],
|
||||
"requireSpacesInForStatement": true,
|
||||
"requireSpaceBeforeBinaryOperators": true,
|
||||
"requireSpaceAfterBinaryOperators": true,
|
||||
"disallowKeywords": [
|
||||
"with"
|
||||
],
|
||||
"disallowMixedSpacesAndTabs": true,
|
||||
"disallowTrailingWhitespace": true,
|
||||
"disallowKeywordsOnNewLine": [
|
||||
"else"
|
||||
],
|
||||
"requireLineFeedAtFileEnd": true,
|
||||
"requireCapitalizedConstructors": true,
|
||||
"requireDotNotation": true,
|
||||
"disallowNewlineBeforeBlockStatements": true,
|
||||
"disallowMultipleLineStrings": true,
|
||||
"requireSpaceBeforeObjectValues": true,
|
||||
"validateQuoteMarks": "'",
|
||||
"requireSpaceAfterLineComment": true,
|
||||
"validateIndentation": 2,
|
||||
"validateLineBreaks": "LF",
|
||||
"disallowSpacesInFunction": {
|
||||
"beforeOpeningRoundBrace": true
|
||||
},
|
||||
"requireSpacesInFunction": {
|
||||
"beforeOpeningCurlyBrace": true
|
||||
},
|
||||
"disallowMultipleLineBreaks": true,
|
||||
"disallowYodaConditions": true,
|
||||
"disallowFunctionDeclarations": true,
|
||||
"disallowMultipleVarDecl": "exceptUndefined",
|
||||
"requirePaddingNewlinesBeforeKeywords": [
|
||||
"do",
|
||||
"for",
|
||||
"if",
|
||||
"switch",
|
||||
"try",
|
||||
"void",
|
||||
"while",
|
||||
"return"
|
||||
],
|
||||
"excludeFiles": ["**/node_modules/**", "**/min/**", "**/*.min.js"]
|
||||
}
|
||||
19
samples/JSON/filenames/.tern-config
Normal file
19
samples/JSON/filenames/.tern-config
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"ecmaVersion": 6,
|
||||
"libs": [
|
||||
"browser",
|
||||
"jquery"
|
||||
],
|
||||
"dontLoad": [
|
||||
"node_modules/**"
|
||||
],
|
||||
"plugins": {
|
||||
"es_modules": {},
|
||||
"node": {},
|
||||
"angular": {},
|
||||
"doc_comment": {
|
||||
"fullDocs": true,
|
||||
"strong": true
|
||||
}
|
||||
}
|
||||
}
|
||||
15
samples/JSON/filenames/.tern-project
Normal file
15
samples/JSON/filenames/.tern-project
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"ecmaVersion": 6,
|
||||
"libs": [],
|
||||
"loadEagerly": [
|
||||
"src/app/**/*.js"
|
||||
],
|
||||
"dontLoad": [
|
||||
"node_modules"
|
||||
],
|
||||
"plugins": {
|
||||
"requirejs": {
|
||||
"baseURL": "src"
|
||||
}
|
||||
}
|
||||
}
|
||||
18
samples/JSON/http_response.avsc
Normal file
18
samples/JSON/http_response.avsc
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"type": "record",
|
||||
"name": "Response",
|
||||
"namespace": "org.rflow.message.data.http",
|
||||
"aliases": [],
|
||||
"fields": [
|
||||
{"name": "client_ip", "type": ["string", "null"]},
|
||||
{"name": "client_port", "type": ["int", "null"]},
|
||||
{"name": "server_ip", "type": ["string", "null"]},
|
||||
{"name": "server_port", "type": ["int", "null"]},
|
||||
|
||||
{"name": "protocol", "type": "string"},
|
||||
{"name": "status_code", "type": "int"},
|
||||
{"name": "status_reason_phrase", "type": "string"},
|
||||
{"name": "headers", "type": {"type": "map", "values": "string"}},
|
||||
{"name": "content", "type": "bytes"}
|
||||
]
|
||||
}
|
||||
19
samples/JSON/manifest.webapp
Normal file
19
samples/JSON/manifest.webapp
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"version": "1.0",
|
||||
"name": "demo",
|
||||
"description": "demo",
|
||||
"launch_path": "/index.html",
|
||||
"icons": {
|
||||
"128": "/res/icon.png"
|
||||
},
|
||||
"developer": {
|
||||
"name": "Cocos2d-html5",
|
||||
"url": "http://cocos2d-x.org/"
|
||||
},
|
||||
"default_locale": "en",
|
||||
"installs_allowed_from": [
|
||||
"*"
|
||||
],
|
||||
"orientation": "portrait-primary",
|
||||
"fullscreen": "true"
|
||||
}
|
||||
56
samples/JSON/manifest.webmanifest
Normal file
56
samples/JSON/manifest.webmanifest
Normal file
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"short_name": "CC Splitter",
|
||||
"name": "Credit Card Splitter",
|
||||
"start_url": "./index.html",
|
||||
"display": "standalone",
|
||||
"theme_color": "#000000",
|
||||
"background_color": "#ffffff",
|
||||
"lang": "en-GB",
|
||||
"icons": [
|
||||
{
|
||||
"src": "logo-16.png",
|
||||
"sizes": "16x16",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "logo-36.png",
|
||||
"sizes": "36x36",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "logo-48.png",
|
||||
"sizes": "48x48",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "logo-72.png",
|
||||
"sizes": "72x72",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "logo-96.png",
|
||||
"sizes": "96x96",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "logo-144.png",
|
||||
"sizes": "144x144",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "logo-192.png",
|
||||
"sizes": "192x192",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "logo-250.png",
|
||||
"sizes": "250x250",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "logo-512.png",
|
||||
"sizes": "512x512",
|
||||
"type": "image/png"
|
||||
}
|
||||
]
|
||||
}
|
||||
122
samples/JSON/small.tfstate
Normal file
122
samples/JSON/small.tfstate
Normal file
@@ -0,0 +1,122 @@
|
||||
{
|
||||
"version": 1,
|
||||
"serial": 12,
|
||||
"modules": [
|
||||
{
|
||||
"path": [
|
||||
"root"
|
||||
],
|
||||
"outputs": {
|
||||
"public_az1_subnet_id": "subnet-d658bba0",
|
||||
"region": "us-west-2",
|
||||
"vpc_cidr": "10.201.0.0/16",
|
||||
"vpc_id": "vpc-65814701"
|
||||
},
|
||||
"resources": {
|
||||
"aws_key_pair.onprem": {
|
||||
"type": "aws_key_pair",
|
||||
"primary": {
|
||||
"id": "onprem",
|
||||
"attributes": {
|
||||
"id": "onprem",
|
||||
"key_name": "onprem",
|
||||
"public_key": "foo"
|
||||
},
|
||||
"meta": {
|
||||
"schema_version": "1"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": [
|
||||
"root",
|
||||
"bootstrap"
|
||||
],
|
||||
"outputs": {
|
||||
"consul_bootstrap_dns": "consul.bootstrap"
|
||||
},
|
||||
"resources": {
|
||||
"aws_route53_record.oasis-consul-bootstrap-a": {
|
||||
"type": "aws_route53_record",
|
||||
"depends_on": [
|
||||
"aws_route53_zone.oasis-consul-bootstrap"
|
||||
],
|
||||
"primary": {
|
||||
"id": "Z68734P5178QN_consul.bootstrap_A",
|
||||
"attributes": {
|
||||
"failover": "",
|
||||
"fqdn": "consul.bootstrap",
|
||||
"health_check_id": "",
|
||||
"id": "Z68734P5178QN_consul.bootstrap_A",
|
||||
"name": "consul.bootstrap",
|
||||
"records.#": "6",
|
||||
"records.1148461392": "10.201.3.8",
|
||||
"records.1169574759": "10.201.2.8",
|
||||
"records.1206973758": "10.201.1.8",
|
||||
"records.1275070284": "10.201.2.4",
|
||||
"records.1304587643": "10.201.3.4",
|
||||
"records.1313257749": "10.201.1.4",
|
||||
"set_identifier": "",
|
||||
"ttl": "300",
|
||||
"type": "A",
|
||||
"weight": "-1",
|
||||
"zone_id": "Z68734P5178QN"
|
||||
}
|
||||
}
|
||||
},
|
||||
"aws_route53_record.oasis-consul-bootstrap-ns": {
|
||||
"type": "aws_route53_record",
|
||||
"depends_on": [
|
||||
"aws_route53_zone.oasis-consul-bootstrap",
|
||||
"aws_route53_zone.oasis-consul-bootstrap",
|
||||
"aws_route53_zone.oasis-consul-bootstrap",
|
||||
"aws_route53_zone.oasis-consul-bootstrap",
|
||||
"aws_route53_zone.oasis-consul-bootstrap"
|
||||
],
|
||||
"primary": {
|
||||
"id": "Z68734P5178QN_consul.bootstrap_NS",
|
||||
"attributes": {
|
||||
"failover": "",
|
||||
"fqdn": "consul.bootstrap",
|
||||
"health_check_id": "",
|
||||
"id": "Z68734P5178QN_consul.bootstrap_NS",
|
||||
"name": "consul.bootstrap",
|
||||
"records.#": "4",
|
||||
"records.1796532126": "ns-512.awsdns-00.net.",
|
||||
"records.2728059479": "ns-1536.awsdns-00.co.uk.",
|
||||
"records.4092160370": "ns-1024.awsdns-00.org.",
|
||||
"records.456007465": "ns-0.awsdns-00.com.",
|
||||
"set_identifier": "",
|
||||
"ttl": "30",
|
||||
"type": "NS",
|
||||
"weight": "-1",
|
||||
"zone_id": "Z68734P5178QN"
|
||||
}
|
||||
}
|
||||
},
|
||||
"aws_route53_zone.oasis-consul-bootstrap": {
|
||||
"type": "aws_route53_zone",
|
||||
"primary": {
|
||||
"id": "Z68734P5178QN",
|
||||
"attributes": {
|
||||
"comment": "Used to bootstrap consul dns",
|
||||
"id": "Z68734P5178QN",
|
||||
"name": "consul.bootstrap",
|
||||
"name_servers.#": "4",
|
||||
"name_servers.0": "ns-0.awsdns-00.com.",
|
||||
"name_servers.1": "ns-1024.awsdns-00.org.",
|
||||
"name_servers.2": "ns-1536.awsdns-00.co.uk.",
|
||||
"name_servers.3": "ns-512.awsdns-00.net.",
|
||||
"tags.#": "0",
|
||||
"vpc_id": "vpc-65814701",
|
||||
"vpc_region": "us-west-2",
|
||||
"zone_id": "Z68734P5178QN"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
77
samples/JSON/terraform.tfstate.backup
Normal file
77
samples/JSON/terraform.tfstate.backup
Normal file
@@ -0,0 +1,77 @@
|
||||
{
|
||||
"version": 3,
|
||||
"terraform_version": "0.11.2",
|
||||
"serial": 5,
|
||||
"lineage": "5ffde9fb-4814-4609-a8a6-f1054f1779c1",
|
||||
"modules": [
|
||||
{
|
||||
"path": [
|
||||
"root"
|
||||
],
|
||||
"outputs": {},
|
||||
"resources": {
|
||||
"aws_iam_role.iam_for_lambda": {
|
||||
"type": "aws_iam_role",
|
||||
"depends_on": [],
|
||||
"primary": {
|
||||
"id": "iam_for_lambda",
|
||||
"attributes": {
|
||||
"arn": "arn:aws:iam::387412527620:role/iam_for_lambda",
|
||||
"assume_role_policy": "{\"Version\":\"2012-10-17\",\"Statement\":[{\"Sid\":\"\",\"Effect\":\"Allow\",\"Principal\":{\"Service\":\"lambda.amazonaws.com\"},\"Action\":\"sts:AssumeRole\"}]}",
|
||||
"create_date": "2018-01-27T04:05:27Z",
|
||||
"force_detach_policies": "false",
|
||||
"id": "iam_for_lambda",
|
||||
"name": "iam_for_lambda",
|
||||
"path": "/",
|
||||
"unique_id": "AROAINXWJF2AIJOZMQXOE"
|
||||
},
|
||||
"meta": {},
|
||||
"tainted": false
|
||||
},
|
||||
"deposed": [],
|
||||
"provider": "provider.aws"
|
||||
},
|
||||
"aws_lambda_function.query-fitbit": {
|
||||
"type": "aws_lambda_function",
|
||||
"depends_on": [
|
||||
"aws_iam_role.iam_for_lambda"
|
||||
],
|
||||
"primary": {
|
||||
"id": "query-fitbit",
|
||||
"attributes": {
|
||||
"arn": "arn:aws:lambda:us-east-1:387412527620:function:query-fitbit",
|
||||
"dead_letter_config.#": "0",
|
||||
"description": "",
|
||||
"environment.#": "0",
|
||||
"filename": "../lambda/query-fitbit.zip",
|
||||
"function_name": "query-fitbit",
|
||||
"handler": "exports.handler",
|
||||
"id": "query-fitbit",
|
||||
"invoke_arn": "arn:aws:apigateway:us-east-1:lambda:path/2015-03-31/functions/arn:aws:lambda:us-east-1:387412527620:function:query-fitbit/invocations",
|
||||
"kms_key_arn": "",
|
||||
"last_modified": "2018-01-27T04:11:31.185+0000",
|
||||
"memory_size": "128",
|
||||
"publish": "false",
|
||||
"qualified_arn": "arn:aws:lambda:us-east-1:387412527620:function:query-fitbit:$LATEST",
|
||||
"reserved_concurrent_executions": "0",
|
||||
"role": "arn:aws:iam::387412527620:role/iam_for_lambda",
|
||||
"runtime": "nodejs6.10",
|
||||
"source_code_hash": "mNFY3lZD4jFsVq/f353zMD9MLSBvoaEbObIB1KBnxq4=",
|
||||
"tags.%": "0",
|
||||
"timeout": "3",
|
||||
"tracing_config.#": "1",
|
||||
"tracing_config.0.mode": "PassThrough",
|
||||
"version": "$LATEST",
|
||||
"vpc_config.#": "0"
|
||||
},
|
||||
"meta": {},
|
||||
"tainted": false
|
||||
},
|
||||
"deposed": [],
|
||||
"provider": "provider.aws"
|
||||
}
|
||||
},
|
||||
"depends_on": []
|
||||
}
|
||||
]
|
||||
}
|
||||
23
samples/JSON5/filenames/.jslintrc
Normal file
23
samples/JSON5/filenames/.jslintrc
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"indent": 4,
|
||||
"maxlen": 120,
|
||||
"browser": false,
|
||||
"couch": false,
|
||||
"devel": false,
|
||||
"node": false,
|
||||
"rhino": false,
|
||||
"white": true,
|
||||
"plusplus":true,
|
||||
"stupid":true,
|
||||
|
||||
"predef": [
|
||||
"setTimeout",
|
||||
"module",
|
||||
"exports",
|
||||
"define",
|
||||
"require",
|
||||
"window",
|
||||
"buster",
|
||||
"sinon"
|
||||
]
|
||||
}
|
||||
955
samples/JavaScript/constant_fold.mjs
Normal file
955
samples/JavaScript/constant_fold.mjs
Normal file
@@ -0,0 +1,955 @@
|
||||
// consumes <stdin> and performs constant folding
|
||||
// echo '"use strict";"_"[0],1+2;' | node constant_fold.js
|
||||
import _NodePath from '../NodePath';
|
||||
const {NodePath} = _NodePath;
|
||||
import _WalkCombinator from '../WalkCombinator';
|
||||
const {WalkCombinator} = _WalkCombinator;
|
||||
|
||||
const $CONSTEXPR = Symbol.for('$CONSTEXTR');
|
||||
const $CONSTVALUE = Symbol.for('$CONSTVALUE');
|
||||
const IS_EMPTY = path => {
|
||||
return (path.node.type === 'BlockStatement' && path.node.body.length === 0) ||
|
||||
path.node.type === 'EmptyStatement';
|
||||
};
|
||||
const IN_PRAGMA_POS = path => {
|
||||
if (path.parent && Array.isArray(path.parent.node)) {
|
||||
const siblings = path.parent.node;
|
||||
for (let i = 0; i < path.key; i++) {
|
||||
// preceded by non-pragma
|
||||
if (
|
||||
siblings[i].type !== 'ExpressionStatement' ||
|
||||
!IS_CONSTEXPR(siblings[i].expression) ||
|
||||
typeof CONSTVALUE(siblings[i].expression) !== 'string'
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
const IS_PRAGMA = path => {
|
||||
if (path.parent && Array.isArray(path.parent.node)) {
|
||||
const siblings = path.parent.node;
|
||||
for (let i = 0; i < path.key + 1; i++) {
|
||||
// preceded by non-pragma
|
||||
if (
|
||||
siblings[i].type !== 'ExpressionStatement' ||
|
||||
!IS_CONSTEXPR(siblings[i].expression) ||
|
||||
typeof CONSTVALUE(siblings[i].expression) !== 'string'
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
// worst case is the completion value
|
||||
const IS_NOT_COMPLETION = path => {
|
||||
while (true) {
|
||||
if (!path.parent) {
|
||||
return true;
|
||||
}
|
||||
if (
|
||||
Array.isArray(path.parent.node) &&
|
||||
path.key !== path.parent.node.length - 1
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
path = path.parent;
|
||||
while (Array.isArray(path.node)) {
|
||||
path = path.parent;
|
||||
}
|
||||
if (/Function/.test(path.node.type)) {
|
||||
return true;
|
||||
} else if (path.node.type === 'Program') {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
};
|
||||
const REMOVE_IF_EMPTY = path => {
|
||||
if (IS_EMPTY(path)) REMOVE(path);
|
||||
return null;
|
||||
};
|
||||
const REPLACE_IF_EMPTY = (path, folded) => {
|
||||
if (IS_EMPTY(path)) return REPLACE(path, folded);
|
||||
return path;
|
||||
};
|
||||
const REMOVE = path => {
|
||||
if (Array.isArray(path.parent.node)) {
|
||||
path.parent.node.splice(path.key, 1);
|
||||
} else {
|
||||
path.parent.node[path.key] = null;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
const REPLACE = (path, folded) => {
|
||||
const replacement = new NodePath(path.parent, folded, path.key);
|
||||
path.parent.node[path.key] = folded;
|
||||
return replacement;
|
||||
};
|
||||
// no mutation, this is an atomic value
|
||||
const NEG_ZERO = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'UnaryExpression',
|
||||
operator: '-',
|
||||
argument: Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'Literal',
|
||||
value: 0,
|
||||
}),
|
||||
});
|
||||
const INFINITY = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'BinaryExpression',
|
||||
operator: '/',
|
||||
left: Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'Literal',
|
||||
value: 1,
|
||||
}),
|
||||
right: Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'Literal',
|
||||
value: 0,
|
||||
}),
|
||||
});
|
||||
const NEG_INFINITY = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'BinaryExpression',
|
||||
operator: '/',
|
||||
left: Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'Literal',
|
||||
value: 1,
|
||||
}),
|
||||
right: NEG_ZERO,
|
||||
});
|
||||
const EMPTY = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'EmptyStatement',
|
||||
});
|
||||
const NULL = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'Literal',
|
||||
value: null,
|
||||
});
|
||||
const NAN = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'BinaryExpression',
|
||||
operator: '/',
|
||||
left: Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'Literal',
|
||||
value: 0,
|
||||
}),
|
||||
right: Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'Literal',
|
||||
value: 0,
|
||||
}),
|
||||
});
|
||||
const UNDEFINED = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'UnaryExpression',
|
||||
operator: 'void',
|
||||
argument: Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'Literal',
|
||||
value: 0,
|
||||
}),
|
||||
});
|
||||
// ESTree doesn't like negative numeric literals
|
||||
// this also preserves -0
|
||||
const IS_UNARY_NEGATIVE = node => {
|
||||
if (
|
||||
node.type === 'UnaryExpression' &&
|
||||
node.operator === '-' &&
|
||||
typeof node.argument.value === 'number' &&
|
||||
node.argument.value === node.argument.value &&
|
||||
node.argument.type === 'Literal'
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const IS_CONSTEXPR = node => {
|
||||
if (typeof node !== 'object' || node === null) {
|
||||
return false;
|
||||
}
|
||||
// DONT CALCULATE THINGS MULTIPLE TIMES!!@!@#
|
||||
if (node[$CONSTEXPR]) return true;
|
||||
if (node.type === 'ArrayExpression') {
|
||||
for (let i = 0; i < node.elements.length; i++) {
|
||||
const element = node.elements[i];
|
||||
// hole == null
|
||||
if (element !== null && !IS_CONSTEXPR(element)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if (node.type === 'ObjectExpression') {
|
||||
for (let i = 0; i < node.properties.length; i++) {
|
||||
const element = node.properties[i];
|
||||
if (element.kind !== 'init') return false;
|
||||
if (element.method) return false;
|
||||
let key;
|
||||
if (element.computed) {
|
||||
// be sure {["y"]:1} works
|
||||
if (!IS_CONSTEXPR(element.key)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (!IS_CONSTEXPR(element.value)) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if (node.type === 'Literal' || IS_UNDEFINED(node) || IS_NAN(node)) {
|
||||
return true;
|
||||
}
|
||||
if (IS_UNARY_NEGATIVE(node)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const IS_NAN = node => {
|
||||
return node === NAN;
|
||||
};
|
||||
const IS_UNDEFINED = node => {
|
||||
return node === UNDEFINED;
|
||||
};
|
||||
const CONSTVALUE = node => {
|
||||
if (node[$CONSTVALUE]) {
|
||||
return node[$CONSTVALUE];
|
||||
}
|
||||
if (IS_UNDEFINED(node)) return void 0;
|
||||
if (IS_NAN(node)) return +'_';
|
||||
if (!IS_CONSTEXPR(node)) throw new Error('Not a CONSTEXPR');
|
||||
if (node.type === 'ArrayExpression') {
|
||||
let ret = [];
|
||||
ret.length = node.elements.length;
|
||||
for (let i = 0; i < node.elements.length; i++) {
|
||||
if (node.elements[i] !== null) {
|
||||
ret[i] = CONSTVALUE(node.elements[i]);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
if (node.type === 'ObjectExpression') {
|
||||
let ret = Object.create(null);
|
||||
for (let i = 0; i < node.properties.length; i++) {
|
||||
const element = node.properties[i];
|
||||
let key;
|
||||
if (element.computed) {
|
||||
key = `${CONSTVALUE(element.key)}`;
|
||||
}
|
||||
else {
|
||||
key = element.key.name;
|
||||
}
|
||||
Object.defineProperty(ret, key, {
|
||||
// duplicate keys...
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: CONSTVALUE(element.value),
|
||||
enumerable: true
|
||||
});
|
||||
}
|
||||
Object.freeze(ret);
|
||||
return ret;
|
||||
}
|
||||
if (IS_UNARY_NEGATIVE(node)) {
|
||||
return -node.argument.value;
|
||||
}
|
||||
if (node.regex !== void 0) {
|
||||
return new RegExp(node.regex.pattern, node.regex.flags);
|
||||
}
|
||||
return node.value;
|
||||
};
|
||||
const CONSTEXPRS = new Map();
|
||||
CONSTEXPRS.set(void 0, UNDEFINED);
|
||||
CONSTEXPRS.set(+'_', NAN);
|
||||
CONSTEXPRS.set(null, NULL);
|
||||
const TO_CONSTEXPR = value => {
|
||||
if (value === -Infinity) {
|
||||
return NEG_INFINITY;
|
||||
}
|
||||
if (value === Infinity) {
|
||||
return INFINITY;
|
||||
}
|
||||
let is_neg_zero = 1 / value === -Infinity;
|
||||
if (is_neg_zero) return NEG_ZERO;
|
||||
if (CONSTEXPRS.has(value)) {
|
||||
return CONSTEXPRS.get(value);
|
||||
}
|
||||
if (typeof value === 'number') {
|
||||
if (value < 0) {
|
||||
const CONSTEXPR = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
[$CONSTVALUE]: value,
|
||||
type: 'UnaryExpression',
|
||||
operator: '-',
|
||||
argument: Object.freeze({ type: 'Literal', value: -value }),
|
||||
});
|
||||
CONSTEXPRS.set(value, CONSTEXPR);
|
||||
return CONSTEXPR;
|
||||
}
|
||||
}
|
||||
if (
|
||||
value === null ||
|
||||
typeof value === 'number' ||
|
||||
typeof value === 'boolean' ||
|
||||
typeof value === 'string'
|
||||
) {
|
||||
const CONSTEXPR = Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
[$CONSTVALUE]: value,
|
||||
type: 'Literal',
|
||||
value,
|
||||
});
|
||||
CONSTEXPRS.set(value, CONSTEXPR);
|
||||
return CONSTEXPR;
|
||||
}
|
||||
// have to generate new one every time :-/
|
||||
if (Array.isArray(value)) {
|
||||
return Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'ArrayExpression',
|
||||
elements: Object.freeze(value.map(TO_CONSTEXPR)),
|
||||
});
|
||||
}
|
||||
if (typeof value === 'object' && Object.getPrototypeOf(value) === Object.getPrototypeOf({}) && [...Object.getOwnPropertySymbols(value)].length === 0) {
|
||||
return Object.freeze({
|
||||
[$CONSTEXPR]: true,
|
||||
type: 'ObjectExpression',
|
||||
properties: Object.freeze(
|
||||
[...Object.getOwnPropertyKeys(value)].map(key => {
|
||||
if (!('value' in Object.getOwnProperty(value, key))) {
|
||||
throw Error('Not a CONSTVALUE (found a setter or getter?)');
|
||||
}
|
||||
return {
|
||||
type: 'Property',
|
||||
kind: 'init',
|
||||
method: false,
|
||||
shorthand: false,
|
||||
computed: true,
|
||||
key: {
|
||||
type: 'Literal',
|
||||
value: key
|
||||
},
|
||||
value: TO_CONSTEXPR(value[key])
|
||||
}
|
||||
})),
|
||||
});
|
||||
}
|
||||
throw Error('Not a CONSTVALUE (did you pass a RegExp?)');
|
||||
};
|
||||
|
||||
// THIS DOES NOT HANDLE NODE SPECIFIC CASES LIKE IfStatement
|
||||
const FOLD_EMPTY = function*(path) {
|
||||
if (
|
||||
path &&
|
||||
path.node &&
|
||||
path.parent &&
|
||||
Array.isArray(path.parent.node) &&
|
||||
IS_EMPTY(path)
|
||||
) {
|
||||
REMOVE(path);
|
||||
return yield;
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
|
||||
// THIS DOES NOT HANDLE NODE SPECIFIC CASES LIKE IfStatement
|
||||
const FOLD_TEMPLATE = function*(path) {
|
||||
if (
|
||||
path &&
|
||||
path.node &&
|
||||
path.type === 'TemplateLiteral'
|
||||
) {
|
||||
let updated = false;
|
||||
for (let i = 0; i < path.node.exressions.length; i++) {
|
||||
if (IS_CONSTEXPR(path.node.expressions[i])) {
|
||||
//let
|
||||
}
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_EXPR_STMT = function*(path) {
|
||||
// TODO: enforce completion value checking
|
||||
if (path && path.node && path.node.type === 'ExpressionStatement') {
|
||||
// merge all the adjacent expression statements into sequences
|
||||
if (Array.isArray(path.parent.node)) {
|
||||
// could have nodes after it
|
||||
const siblings = path.parent.node;
|
||||
if (!IS_PRAGMA(path)) {
|
||||
if (path.key < siblings.length - 1) {
|
||||
const mergeable = [path.node];
|
||||
for (let needle = path.key + 1; needle < siblings.length; needle++) {
|
||||
if (siblings[needle].type !== 'ExpressionStatement') {
|
||||
break;
|
||||
}
|
||||
mergeable.push(siblings[needle]);
|
||||
}
|
||||
if (mergeable.length > 1) {
|
||||
siblings.splice(path.key, mergeable.length, {
|
||||
type: 'ExpressionStatement',
|
||||
expression: {
|
||||
type: 'SequenceExpression',
|
||||
expressions: mergeable.reduce(
|
||||
(acc, es) => {
|
||||
if (es.expression.type == 'SequenceExpression') {
|
||||
return [...acc, ...es.expression.expressions];
|
||||
} else {
|
||||
return [...acc, es.expression];
|
||||
}
|
||||
},
|
||||
[]
|
||||
),
|
||||
},
|
||||
});
|
||||
return path;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (IS_NOT_COMPLETION(path) && IS_CONSTEXPR(path.node.expression)) {
|
||||
return REPLACE(path, EMPTY);
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_WHILE = function*(path) {
|
||||
if (path && path.node) {
|
||||
if (path.node.type === 'DoWhileStatement') {
|
||||
console.error('FOLD_DOWHILE');
|
||||
REPLACE_IF_EMPTY(path.get(['body']), EMPTY);
|
||||
}
|
||||
if (path.node.type === 'WhileStatement') {
|
||||
console.error('FOLD_WHILE');
|
||||
let { test, consequent, alternate } = path.node;
|
||||
if (IS_CONSTEXPR(test)) {
|
||||
test = CONSTVALUE(test);
|
||||
if (!test) {
|
||||
return REPLACE(path, EMPTY);
|
||||
}
|
||||
}
|
||||
REPLACE_IF_EMPTY(path.get(['body']), EMPTY);
|
||||
}
|
||||
if (path.node.type === 'ForStatement') {
|
||||
console.error('FOLD_FOR');
|
||||
REPLACE_IF_EMPTY(path.get(['body']), EMPTY);
|
||||
let { init, test, update } = path.node;
|
||||
let updated = false;
|
||||
if (init && IS_CONSTEXPR(init)) {
|
||||
updated = true;
|
||||
REPLACE(path.get(['init']), null);
|
||||
}
|
||||
if (test && IS_CONSTEXPR(test)) {
|
||||
let current = CONSTVALUE(test);
|
||||
let coerced = Boolean(current);
|
||||
// remove the test if it is always true
|
||||
if (coerced === true) {
|
||||
updated = true;
|
||||
REPLACE(path.get(['test']), null);
|
||||
} else if (coerced !== current) {
|
||||
updated = true;
|
||||
REPLACE(path.get(['test']), TO_CONSTEXPR(coerced));
|
||||
}
|
||||
}
|
||||
if (update && IS_CONSTEXPR(update)) {
|
||||
updated = true;
|
||||
REPLACE(path.get(['update']), null);
|
||||
}
|
||||
if (updated) {
|
||||
return path;
|
||||
}
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_IF = function*(path) {
|
||||
if (path && path.node && path.node.type === 'IfStatement') {
|
||||
let { test, consequent, alternate } = path.node;
|
||||
const is_not_completion = IS_NOT_COMPLETION(path);
|
||||
if (is_not_completion && !alternate) {
|
||||
if (IS_EMPTY(path.get(['consequent']))) {
|
||||
console.error('FOLD_IF_EMPTY_CONSEQUENT');
|
||||
REPLACE(path, {
|
||||
type: 'ExpressionStatement',
|
||||
expression: test,
|
||||
});
|
||||
return path.parent;
|
||||
}
|
||||
}
|
||||
if (alternate) {
|
||||
if (alternate.type === consequent.type) {
|
||||
if (consequent.type === 'ExpressionStatement') {
|
||||
console.error('FOLD_IF_BOTH_EXPRSTMT');
|
||||
REPLACE(path, {
|
||||
type: 'ExpressionStatement', expression:
|
||||
{
|
||||
type: 'ConditionalExpression',
|
||||
test: test,
|
||||
consequent: consequent.expression,
|
||||
alternate: alternate.expression,
|
||||
}});
|
||||
return path.parent;
|
||||
}
|
||||
else if (consequent.type === 'ReturnStatement' ||
|
||||
consequent.type === 'ThrowStatement') {
|
||||
console.error('FOLD_IF_BOTH_COMPLETIONS');
|
||||
REPLACE(path, {
|
||||
type: 'ExpressionStatement', expression:{
|
||||
type: consequent.type,
|
||||
argument: {
|
||||
type: 'ConditionalExpression',
|
||||
test: test,
|
||||
consequent: consequent.argument,
|
||||
alternate: alternate.argument,
|
||||
}}
|
||||
});
|
||||
return path.parent;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (is_not_completion && consequent.type === 'ExpressionStatement') {
|
||||
console.error('FOLD_IF_NON_COMPLETION_TO_&&');
|
||||
REPLACE(path, {
|
||||
type: 'ExpressionStatement',
|
||||
expression: {
|
||||
type: 'BinaryExpression',
|
||||
operator: '&&',
|
||||
left: test,
|
||||
right: consequent.expression,
|
||||
}
|
||||
});
|
||||
return path.parent;
|
||||
}
|
||||
if (IS_CONSTEXPR(test)) {
|
||||
test = CONSTVALUE(test);
|
||||
if (test) {
|
||||
return REPLACE(path, consequent);
|
||||
}
|
||||
if (alternate) {
|
||||
return REPLACE(path, alternate);
|
||||
}
|
||||
return REPLACE(path, EMPTY);
|
||||
}
|
||||
consequent = path.get(['consequent']);
|
||||
let updated;
|
||||
if (consequent.node !== EMPTY) {
|
||||
REPLACE_IF_EMPTY(consequent, EMPTY);
|
||||
if (consequent.parent.node[consequent.key] === EMPTY) {
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
if (alternate) {
|
||||
alternate = path.get(['alternate']);
|
||||
REMOVE_IF_EMPTY(alternate);
|
||||
if (path.node.alternate === null) {
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
if (updated) {
|
||||
return path;
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_SEQUENCE = function*(path) {
|
||||
if (path && path.node && path.node.type === 'SequenceExpression') {
|
||||
console.error('FOLD_SEQUENCE');
|
||||
// never delete the last value
|
||||
for (let i = 0; i < path.node.expressions.length - 1; i++) {
|
||||
if (IS_CONSTEXPR(path.node.expressions[i])) {
|
||||
path.node.expressions.splice(i, 1);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
if (path.node.expressions.length === 1) {
|
||||
return REPLACE(path, path.node.expressions[0]);
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_LOGICAL = function*(path) {
|
||||
if (path && path.node && path.node.type === 'LogicalExpression') {
|
||||
console.error('FOLD_LOGICAL');
|
||||
let { left, right, operator } = path.node;
|
||||
if (IS_CONSTEXPR(left)) {
|
||||
left = CONSTVALUE(left);
|
||||
if (operator === '||') {
|
||||
if (left) {
|
||||
return REPLACE(path, TO_CONSTEXPR(left));
|
||||
}
|
||||
return REPLACE(path, right);
|
||||
} else if (operator === '&&') {
|
||||
if (!left) {
|
||||
return REPLACE(path, TO_CONSTEXPR(left));
|
||||
}
|
||||
return REPLACE(path, right);
|
||||
}
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_SWITCH = function*(path) {
|
||||
if (path && path.node && path.node.type === 'SwitchStatement') {
|
||||
let { discriminant, cases } = path.node;
|
||||
// if there are no cases, just become an expression
|
||||
if (cases.length === 0 && IS_NOT_COMPLETION(path)) {
|
||||
return REPLACE(path, {
|
||||
type: 'ExpressionStatement',
|
||||
expression: discriminant
|
||||
});
|
||||
}
|
||||
// if the discriminant is static
|
||||
// remove any preceding non-matching static cases
|
||||
// fold any trailing cases into the matching case
|
||||
if (cases.length > 1 && IS_CONSTEXPR(discriminant)) {
|
||||
const discriminant_value = CONSTVALUE(discriminant);
|
||||
for (var i = 0; i < cases.length; i++) {
|
||||
const test = cases[i].test;
|
||||
if (IS_CONSTEXPR(test)) {
|
||||
let test_value = CONSTVALUE(test);
|
||||
if (discriminant_value === test_value) {
|
||||
let new_consequent = cases[i].consequent;
|
||||
if (i < cases.length - 1) {
|
||||
for (let fallthrough of cases.slice(i+1)) {
|
||||
new_consequent.push(...fallthrough.consequent);
|
||||
}
|
||||
}
|
||||
cases[i].consequent = new_consequent;
|
||||
REPLACE(path.get(['cases']), [cases[i]]);
|
||||
return path;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// we had a dynamic case need to bail
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_UNREACHABLE = function*(path) {
|
||||
if (path && path.node && path.parent && Array.isArray(path.parent.node)) {
|
||||
if (path.node.type === 'ReturnStatement' ||
|
||||
path.node.type === 'ContinueStatement' ||
|
||||
path.node.type === 'BreakStatement' ||
|
||||
path.node.type === 'ThrowStatement') {
|
||||
const next_key = path.key + 1;
|
||||
path.parent.node.splice(next_key, path.parent.node.length - next_key);
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
}
|
||||
const FOLD_CONDITIONAL = function*(path) {
|
||||
if (path && path.node && path.node.type === 'ConditionalExpression') {
|
||||
console.error('FOLD_CONDITIONAL');
|
||||
let { test, consequent, alternate } = path.node;
|
||||
if (IS_CONSTEXPR(test)) {
|
||||
test = CONSTVALUE(test);
|
||||
if (test) {
|
||||
return REPLACE(path, consequent);
|
||||
}
|
||||
return REPLACE(path, alternate);
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_BINARY = function*(path) {
|
||||
if (
|
||||
path &&
|
||||
path.node &&
|
||||
path.node.type === 'BinaryExpression' &&
|
||||
!IS_NAN(path.node)
|
||||
) {
|
||||
console.error('FOLD_BINARY');
|
||||
let { left, right, operator } = path.node;
|
||||
if (operator === '==' || operator === '!=') {
|
||||
let updated = false;
|
||||
if (IS_UNDEFINED(left)) {
|
||||
updated = true;
|
||||
REPLACE(path.get(['left']), NULL);
|
||||
}
|
||||
if (IS_UNDEFINED(right)) {
|
||||
updated = true;
|
||||
REPLACE(path.get(['right']), NULL);
|
||||
}
|
||||
if (updated) {
|
||||
return path;
|
||||
}
|
||||
}
|
||||
if (path.node !== INFINITY && path.node !== NEG_INFINITY && IS_CONSTEXPR(left) && IS_CONSTEXPR(right)) {
|
||||
left = CONSTVALUE(left);
|
||||
right = CONSTVALUE(right);
|
||||
let value;
|
||||
if ((!left || typeof left !== 'object') && (!right || typeof right !== 'object')) {
|
||||
if (operator === '+') {
|
||||
value = left + right;
|
||||
} else if (operator === '-') {
|
||||
value = left - right;
|
||||
} else if (operator === '*') {
|
||||
value = left * right;
|
||||
} else if (operator === '/') {
|
||||
value = left / right;
|
||||
} else if (operator === '%') {
|
||||
value = left % right;
|
||||
} else if (operator === '==') {
|
||||
value = left == right;
|
||||
} else if (operator === '!=') {
|
||||
value = left != right;
|
||||
} else if (operator === '===') {
|
||||
value = left === right;
|
||||
} else if (operator === '!==') {
|
||||
value = left !== right;
|
||||
} else if (operator === '<') {
|
||||
value = left < right;
|
||||
} else if (operator === '<=') {
|
||||
value = left <= right;
|
||||
} else if (operator === '>') {
|
||||
value = left > right;
|
||||
} else if (operator === '>=') {
|
||||
value = left >= right;
|
||||
} else if (operator === '<<') {
|
||||
value = left << right;
|
||||
} else if (operator === '>>') {
|
||||
value = left >> right;
|
||||
} else if (operator === '>>>') {
|
||||
value = left >>> right;
|
||||
} else if (operator === '|') {
|
||||
value = left | right;
|
||||
} else if (operator === '&') {
|
||||
value = left & right;
|
||||
} else if (operator === '^') {
|
||||
value = left ^ right;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (operator === '==') value = false;
|
||||
if (operator === '===') value = false;
|
||||
if (operator === '!=') value = true;
|
||||
if (operator === '!==') value = true;
|
||||
if (operator === 'in' && typeof right === 'object' && right) {
|
||||
value = Boolean(Object.getOwnPropertyDescriptor(right, left));
|
||||
}
|
||||
}
|
||||
if (value !== void 0) {
|
||||
if (typeof value === 'string' || typeof value === 'boolean' || value === null) {
|
||||
return REPLACE(path, TO_CONSTEXPR(value));
|
||||
}
|
||||
if (typeof value === 'number') {
|
||||
return REPLACE(path, TO_CONSTEXPR(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_UNARY = function*(path) {
|
||||
if (path && path.node && path.node.type === 'UnaryExpression') {
|
||||
console.error('FOLD_UNARY');
|
||||
if (IS_CONSTEXPR(path.node)) {
|
||||
return yield path;
|
||||
}
|
||||
let { argument, operator } = path.node;
|
||||
if (IS_CONSTEXPR(argument)) {
|
||||
if (operator === 'void') {
|
||||
return REPLACE(path, UNDEFINED);
|
||||
}
|
||||
let value = CONSTVALUE(argument);
|
||||
if (operator === '-') {
|
||||
value = -value;
|
||||
} else if (operator === '+') {
|
||||
value = +value;
|
||||
} else if (operator === '~') {
|
||||
value = ~value;
|
||||
} else if (operator === '!') {
|
||||
value = !value;
|
||||
} else if (operator === 'typeof') {
|
||||
value = typeof value;
|
||||
} else if (operator === 'delete') {
|
||||
value = true;
|
||||
}
|
||||
return REPLACE(path, TO_CONSTEXPR(value));
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
const FOLD_EVAL = function*(path) {
|
||||
if (path && path.node && path.node.type === 'CallExpression' &&
|
||||
path.node.callee.type === 'Identifier' && path.node.callee.name === 'eval') {
|
||||
console.error('FOLD_EVAL');
|
||||
if (path.node.arguments.length === 1 && path.node.arguments[0].type === 'Literal') {
|
||||
let result = esprima.parse(`${
|
||||
CONSTVALUE(path.node.arguments[0])
|
||||
}`);
|
||||
if (result.body.length === 1 && result.body[0].type === 'ExpressionStatement') {
|
||||
return REPLACE(path, result.body[0].expression);
|
||||
}
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
}
|
||||
const FOLD_MEMBER = function*(path) {
|
||||
if (path && path.node && path.node.type === 'MemberExpression') {
|
||||
console.error('FOLD_MEMBER');
|
||||
if (path.node.computed && path.node.property.type === 'Literal') {
|
||||
const current = `${CONSTVALUE(path.node.property)}`;
|
||||
if (typeof current === 'string' && /^[$_a-z][$_a-z\d]*$/i.test(current)) {
|
||||
path.node.computed = false;
|
||||
path.node.property = {
|
||||
type: 'Identifier',
|
||||
name: current,
|
||||
};
|
||||
return path;
|
||||
}
|
||||
}
|
||||
if (IS_CONSTEXPR(path.node.object)) {
|
||||
const value = CONSTVALUE(path.node.object);
|
||||
if (typeof value === 'string' || Array.isArray(value) || (value && typeof value === 'object')) {
|
||||
let key;
|
||||
if (IS_CONSTEXPR(path.node.property)) {
|
||||
key = `${CONSTVALUE(path.node.property)}`;
|
||||
}
|
||||
else if (!path.node.computed) {
|
||||
key = path.node.property.name;
|
||||
}
|
||||
if (key !== void 0) {
|
||||
const desc = Object.getOwnPropertyDescriptor(value, key);
|
||||
if (desc) {
|
||||
const folded = value[key];
|
||||
console.error('FOLDING', JSON.stringify(folded));
|
||||
if (IN_PRAGMA_POS(path) && typeof folded === 'string') {
|
||||
if (value.length > 1) {
|
||||
REPLACE(
|
||||
path.get(['object']),
|
||||
TO_CONSTEXPR(value.slice(key, key + 1))
|
||||
);
|
||||
REPLACE(path.get(['property']), TO_CONSTEXPR(0));
|
||||
return path;
|
||||
}
|
||||
} else {
|
||||
return REPLACE(path, TO_CONSTEXPR(value[key]));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
};
|
||||
|
||||
const $MIN = Symbol();
|
||||
const MIN_TRUE = Object.freeze({
|
||||
[$MIN]: true,
|
||||
type: 'UnaryExpression',
|
||||
operator: '!',
|
||||
argument: Object.freeze({
|
||||
[$MIN]: true,
|
||||
type: 'Literal',
|
||||
value: 0
|
||||
})
|
||||
});
|
||||
const MIN_FALSE = Object.freeze({
|
||||
[$MIN]: true,
|
||||
type: 'UnaryExpression',
|
||||
operator: '!',
|
||||
argument: Object.freeze({
|
||||
[$MIN]: true,
|
||||
type: 'Literal',
|
||||
value: 1
|
||||
})
|
||||
});
|
||||
const MIN_REPLACEMENTS = new Map;
|
||||
MIN_REPLACEMENTS.set(true, MIN_TRUE);
|
||||
MIN_REPLACEMENTS.set(false, MIN_FALSE);
|
||||
const MIN_VALUES = function*(path) {
|
||||
if (path && path.node && !path.node[$MIN] && IS_CONSTEXPR(path.node)) {
|
||||
let value = CONSTVALUE(path.node);
|
||||
if (MIN_REPLACEMENTS.has(value)) {
|
||||
console.error('MIN_VALUE', value)
|
||||
return REPLACE(path, MIN_REPLACEMENTS.get(value));
|
||||
}
|
||||
}
|
||||
return yield path;
|
||||
}
|
||||
|
||||
import esprima from 'esprima';
|
||||
import util from 'util';
|
||||
import escodegen from 'escodegen';
|
||||
const optimize = (src) => {
|
||||
const ROOT = new NodePath(
|
||||
null,
|
||||
esprima.parse(
|
||||
src,
|
||||
{
|
||||
// loc: true,
|
||||
// source: '<stdin>',
|
||||
}
|
||||
),
|
||||
null
|
||||
);
|
||||
// all of these are things that could affect completion value positions
|
||||
const walk_expressions = WalkCombinator.pipe(
|
||||
...[
|
||||
WalkCombinator.DEPTH_FIRST,
|
||||
{
|
||||
// We never work on Arrays
|
||||
*inputs(path) {
|
||||
if (Array.isArray(path)) return;
|
||||
return yield path;
|
||||
},
|
||||
},
|
||||
{ inputs: FOLD_UNREACHABLE },
|
||||
{ inputs: FOLD_IF },
|
||||
{ inputs: FOLD_SWITCH },
|
||||
{ inputs: FOLD_EXPR_STMT },
|
||||
{ inputs: FOLD_CONDITIONAL },
|
||||
{ inputs: FOLD_LOGICAL },
|
||||
{ inputs: FOLD_BINARY },
|
||||
{ inputs: FOLD_UNARY },
|
||||
{ inputs: FOLD_SEQUENCE },
|
||||
{ inputs: FOLD_MEMBER },
|
||||
{ inputs: FOLD_EMPTY },
|
||||
{ inputs: FOLD_WHILE },
|
||||
{ inputs: FOLD_EVAL },
|
||||
]
|
||||
).walk(ROOT);
|
||||
for (const _ of walk_expressions) {
|
||||
}
|
||||
const minify = WalkCombinator.pipe(
|
||||
...[
|
||||
WalkCombinator.DEPTH_FIRST,
|
||||
{
|
||||
// We never work on Arrays
|
||||
*inputs(path) {
|
||||
if (Array.isArray(path)) return;
|
||||
return yield path;
|
||||
},
|
||||
},
|
||||
{ inputs: MIN_VALUES },
|
||||
]
|
||||
).walk(ROOT);
|
||||
for (const _ of minify) {
|
||||
}
|
||||
return ROOT;
|
||||
}
|
||||
import mississippi from 'mississippi';
|
||||
process.stdin.pipe(
|
||||
mississippi.concat(buff => {
|
||||
const ROOT = optimize(`${buff}`)
|
||||
console.error(
|
||||
'%s',
|
||||
util.inspect(ROOT.node, {
|
||||
depth: null,
|
||||
colors: true,
|
||||
})
|
||||
);
|
||||
const out = escodegen.generate(ROOT.node);
|
||||
console.log(out);
|
||||
})
|
||||
);
|
||||
6
samples/JavaScript/entry.mjs
Normal file
6
samples/JavaScript/entry.mjs
Normal file
@@ -0,0 +1,6 @@
|
||||
import bar from './module.mjs';
|
||||
function foo() {
|
||||
return "I am foo";
|
||||
}
|
||||
export {foo};
|
||||
console.log(bar);
|
||||
5
samples/JavaScript/module.mjs
Normal file
5
samples/JavaScript/module.mjs
Normal file
@@ -0,0 +1,5 @@
|
||||
import {foo} from './entry.mjs';
|
||||
console.log(foo());
|
||||
|
||||
const bar = "I am bar.";
|
||||
export {bar as default};
|
||||
19
samples/Linker Script/inject.x
Normal file
19
samples/Linker Script/inject.x
Normal file
@@ -0,0 +1,19 @@
|
||||
/* OUTPUT_FORMAT("elf32-littlearm", "elf32-bigarm", "elf32-littlearm") */
|
||||
/* OUTPUT_ARCH(arm) */
|
||||
ENTRY(__adbi$entry)
|
||||
SECTIONS
|
||||
{
|
||||
. = 0x00000000 + SIZEOF_HEADERS;
|
||||
|
||||
.adbi : {
|
||||
*(.rodata)
|
||||
*(.rodata.*)
|
||||
*(.data) *(.data.*)
|
||||
*(.bss) *(.bss.*)
|
||||
*(.text)
|
||||
*(.text.*)
|
||||
*(.adbi)
|
||||
*(.adbi.*)
|
||||
} = 0
|
||||
|
||||
}
|
||||
57
samples/Logos/NCHax.x
Normal file
57
samples/Logos/NCHax.x
Normal file
@@ -0,0 +1,57 @@
|
||||
#import <UIKit/UIKit.h>
|
||||
#import <BulletinBoard/BBSectionInfo.h>
|
||||
#import <UIKit/UIImage+Private.h>
|
||||
#import <version.h>
|
||||
|
||||
static NSString *const kHBDPWeeAppIdentifier = @"ws.hbang.dailypaperweeapp";
|
||||
|
||||
#pragma mark - Change section header and icon
|
||||
|
||||
// courtesy of benno
|
||||
|
||||
BOOL isDailyPaper = NO;
|
||||
|
||||
%hook SBBulletinObserverViewController
|
||||
|
||||
- (void)_addSection:(BBSectionInfo *)section toCategory:(NSInteger)category widget:(id)widget {
|
||||
if ([section.sectionID isEqualToString:kHBDPWeeAppIdentifier]) {
|
||||
isDailyPaper = YES;
|
||||
%orig;
|
||||
isDailyPaper = NO;
|
||||
} else {
|
||||
%orig;
|
||||
}
|
||||
}
|
||||
|
||||
%end
|
||||
|
||||
%hook SBBulletinListSection
|
||||
|
||||
- (void)setDisplayName:(NSString *)displayName {
|
||||
%orig(isDailyPaper ? @"Current Wallpaper" : displayName);
|
||||
}
|
||||
|
||||
- (void)setIconImage:(UIImage *)iconImage {
|
||||
%orig(isDailyPaper ? [UIImage imageNamed:@"icon" inBundle:[NSBundle bundleWithPath:@"/Library/PreferenceBundles/DailyPaper.bundle"]] : iconImage);
|
||||
}
|
||||
|
||||
%end
|
||||
|
||||
#pragma mark - Enable by default
|
||||
|
||||
%hook SBNotificationCenterDataProviderController
|
||||
|
||||
- (NSArray *)_copyDefaultEnabledWidgetIDs {
|
||||
NSArray *defaultWidgets = %orig;
|
||||
return [[defaultWidgets arrayByAddingObject:kHBDPWeeAppIdentifier] copy];
|
||||
}
|
||||
|
||||
%end
|
||||
|
||||
#pragma mark - Constructor
|
||||
|
||||
%ctor {
|
||||
if (!IS_IOS_OR_NEWER(iOS_8_0)) {
|
||||
%init;
|
||||
}
|
||||
}
|
||||
42
samples/Logos/NoCarrier.x
Normal file
42
samples/Logos/NoCarrier.x
Normal file
@@ -0,0 +1,42 @@
|
||||
//
|
||||
// NoCarrier.x
|
||||
// NoCarrier
|
||||
//
|
||||
// Created by Jonas Gessner on 27.01.2014.
|
||||
// Copyright (c) 2014 Jonas Gessner. All rights reserved.
|
||||
//
|
||||
|
||||
#import <CoreGraphics/CoreGraphics.h>
|
||||
|
||||
#include <substrate.h>
|
||||
|
||||
%group main
|
||||
|
||||
%hook UIStatusBarServiceItemView
|
||||
|
||||
- (id)_serviceContentsImage {
|
||||
return nil;
|
||||
}
|
||||
|
||||
- (CGFloat)extraLeftPadding {
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
- (CGFloat)extraRightPadding {
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
- (CGFloat)standardPadding {
|
||||
return 2.0f;
|
||||
}
|
||||
|
||||
%end
|
||||
|
||||
%end
|
||||
|
||||
|
||||
%ctor {
|
||||
@autoreleasepool {
|
||||
%init(main);
|
||||
}
|
||||
}
|
||||
239
samples/Logos/Tweak.x
Normal file
239
samples/Logos/Tweak.x
Normal file
@@ -0,0 +1,239 @@
|
||||
/*
|
||||
* ShadowSocks Per-App Proxy Plugin
|
||||
* https://github.com/linusyang/MobileShadowSocks
|
||||
*
|
||||
* Copyright (c) 2014 Linus Yang <laokongzi@gmail.com>
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*
|
||||
*/
|
||||
|
||||
#include <UIKit/UIKit.h>
|
||||
#include <libfinder/LFFinderController.h>
|
||||
|
||||
#define FUNC_NAME SCDynamicStoreCopyProxies
|
||||
#define ORIG_FUNC original_ ## FUNC_NAME
|
||||
#define CUST_FUNC custom_ ## FUNC_NAME
|
||||
|
||||
#define DECL_FUNC(ret, ...) \
|
||||
extern ret FUNC_NAME(__VA_ARGS__); \
|
||||
static ret (*ORIG_FUNC)(__VA_ARGS__); \
|
||||
ret CUST_FUNC(__VA_ARGS__)
|
||||
|
||||
#define HOOK_FUNC() \
|
||||
MSHookFunction(FUNC_NAME, (void *) CUST_FUNC, (void **) &ORIG_FUNC)
|
||||
|
||||
typedef const struct __SCDynamicStore *SCDynamicStoreRef;
|
||||
void MSHookFunction(void *symbol, void *replace, void **result);
|
||||
|
||||
static BOOL proxyEnabled = YES;
|
||||
static BOOL spdyDisabled = YES;
|
||||
static BOOL finderEnabled = YES;
|
||||
|
||||
static BOOL getValue(NSDictionary *dict, NSString *key, BOOL defaultVal)
|
||||
{
|
||||
if (dict == nil || key == nil) {
|
||||
return defaultVal;
|
||||
}
|
||||
NSNumber *valObj = [dict objectForKey:key];
|
||||
if (valObj == nil) {
|
||||
return defaultVal;
|
||||
}
|
||||
return [valObj boolValue];
|
||||
}
|
||||
|
||||
static void updateSettings(void)
|
||||
{
|
||||
proxyEnabled = YES;
|
||||
spdyDisabled = YES;
|
||||
finderEnabled = YES;
|
||||
NSDictionary *dict = [[NSDictionary alloc] initWithContentsOfFile:@"/var/mobile/Library/Preferences/com.linusyang.ssperapp.plist"];
|
||||
if (dict != nil) {
|
||||
NSString *bundleName = [[NSBundle mainBundle] bundleIdentifier];
|
||||
if (getValue(dict, @"SSPerAppEnabled", NO) && bundleName != nil) {
|
||||
NSString *entry = [[NSString alloc] initWithFormat:@"Enabled-%@", bundleName];
|
||||
proxyEnabled = getValue(dict, entry, NO);
|
||||
if (getValue(dict, @"SSPerAppReversed", NO)) {
|
||||
proxyEnabled = !proxyEnabled;
|
||||
}
|
||||
[entry release];
|
||||
}
|
||||
spdyDisabled = getValue(dict, @"SSPerAppDisableSPDY", YES);
|
||||
finderEnabled = getValue(dict, @"SSPerAppFinder", YES);
|
||||
[dict release];
|
||||
}
|
||||
}
|
||||
|
||||
DECL_FUNC(CFDictionaryRef, SCDynamicStoreRef store)
|
||||
{
|
||||
if (proxyEnabled) {
|
||||
return ORIG_FUNC(store);
|
||||
}
|
||||
CFMutableDictionaryRef proxyDict = CFDictionaryCreateMutable(kCFAllocatorDefault, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
|
||||
int zero = 0;
|
||||
CFNumberRef zeroNumber = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &zero);
|
||||
CFDictionarySetValue(proxyDict, CFSTR("HTTPEnable"), zeroNumber);
|
||||
CFDictionarySetValue(proxyDict, CFSTR("HTTPProxyType"), zeroNumber);
|
||||
CFDictionarySetValue(proxyDict, CFSTR("HTTPSEnable"), zeroNumber);
|
||||
CFDictionarySetValue(proxyDict, CFSTR("ProxyAutoConfigEnable"), zeroNumber);
|
||||
CFRelease(zeroNumber);
|
||||
return proxyDict;
|
||||
}
|
||||
|
||||
@interface SettingTableViewController <LFFinderActionDelegate>
|
||||
|
||||
- (BOOL)useLibFinder;
|
||||
- (UIViewController *)allocFinderController;
|
||||
- (void)finderSelectedFilePath:(NSString *)path checkSanity:(BOOL)check;
|
||||
|
||||
@end
|
||||
|
||||
%group FinderHook
|
||||
|
||||
%hook SettingTableViewController
|
||||
- (BOOL)useLibFinder
|
||||
{
|
||||
return finderEnabled;
|
||||
}
|
||||
|
||||
- (UIViewController *)allocFinderController
|
||||
{
|
||||
LFFinderController* finder = [[LFFinderController alloc] initWithMode:LFFinderModeDefault];
|
||||
finder.actionDelegate = self;
|
||||
return finder;
|
||||
}
|
||||
|
||||
%new
|
||||
-(void)finder:(LFFinderController*)finder didSelectItemAtPath:(NSString*)path
|
||||
{
|
||||
[self finderSelectedFilePath:path checkSanity:NO];
|
||||
}
|
||||
%end
|
||||
|
||||
%end
|
||||
|
||||
%group TwitterHook
|
||||
|
||||
%hook T1SPDYConfigurationChangeListener
|
||||
- (BOOL)_shouldEnableSPDY
|
||||
{
|
||||
if (spdyDisabled) {
|
||||
return NO;
|
||||
} else {
|
||||
return %orig;
|
||||
}
|
||||
}
|
||||
%end
|
||||
|
||||
%end
|
||||
|
||||
%group FacebookHook
|
||||
|
||||
%hook FBRequester
|
||||
- (BOOL)allowSPDY
|
||||
{
|
||||
if (spdyDisabled) {
|
||||
return NO;
|
||||
} else {
|
||||
return %orig;
|
||||
}
|
||||
}
|
||||
|
||||
- (BOOL)useDNSCache
|
||||
{
|
||||
if (spdyDisabled) {
|
||||
return NO;
|
||||
} else {
|
||||
return %orig;
|
||||
}
|
||||
}
|
||||
%end
|
||||
|
||||
%hook FBNetworkerRequest
|
||||
- (BOOL)disableSPDY
|
||||
{
|
||||
if (spdyDisabled) {
|
||||
return YES;
|
||||
} else {
|
||||
return %orig;
|
||||
}
|
||||
}
|
||||
%end
|
||||
|
||||
%hook FBRequesterState
|
||||
- (BOOL)didUseSPDY
|
||||
{
|
||||
if (spdyDisabled) {
|
||||
return NO;
|
||||
} else {
|
||||
return %orig;
|
||||
}
|
||||
}
|
||||
%end
|
||||
|
||||
%hook FBAppConfigService
|
||||
- (BOOL)disableDNSCache
|
||||
{
|
||||
if (spdyDisabled) {
|
||||
return YES;
|
||||
} else {
|
||||
return %orig;
|
||||
}
|
||||
}
|
||||
%end
|
||||
|
||||
%hook FBNetworker
|
||||
- (BOOL)_shouldAllowUseOfDNSCache:(id)arg
|
||||
{
|
||||
if (spdyDisabled) {
|
||||
return NO;
|
||||
} else {
|
||||
return %orig;
|
||||
}
|
||||
}
|
||||
%end
|
||||
|
||||
%hook FBAppSessionController
|
||||
- (BOOL)networkerShouldAllowUseOfDNSCache:(id)arg
|
||||
{
|
||||
if (spdyDisabled) {
|
||||
return NO;
|
||||
} else {
|
||||
return %orig;
|
||||
}
|
||||
}
|
||||
%end
|
||||
|
||||
%end
|
||||
|
||||
%ctor
|
||||
{
|
||||
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
|
||||
NSString *bundleName = [[NSBundle mainBundle] bundleIdentifier];
|
||||
if (bundleName != nil && ![bundleName isEqualToString:@"com.apple.springboard"]) {
|
||||
updateSettings();
|
||||
CFNotificationCenterAddObserver(CFNotificationCenterGetDarwinNotifyCenter(), NULL, (CFNotificationCallback) updateSettings, CFSTR("com.linusyang.ssperapp.settingschanged"), NULL, CFNotificationSuspensionBehaviorCoalesce);
|
||||
if ([bundleName isEqualToString:@"com.linusyang.MobileShadowSocks"]) {
|
||||
%init(FinderHook);
|
||||
} else {
|
||||
HOOK_FUNC();
|
||||
if ([bundleName isEqualToString:@"com.atebits.Tweetie2"]) {
|
||||
%init(TwitterHook);
|
||||
} else if ([bundleName isEqualToString:@"com.facebook.Facebook"]) {
|
||||
%init(FacebookHook);
|
||||
}
|
||||
}
|
||||
}
|
||||
[pool drain];
|
||||
}
|
||||
5
samples/Logos/string1.x
Normal file
5
samples/Logos/string1.x
Normal file
@@ -0,0 +1,5 @@
|
||||
# APPLE LOCAL file string workaround 4943900
|
||||
if { [istarget "*-*-darwin\[9123\]*"] } {
|
||||
set additional_flags "-framework Foundation -fconstant-cfstrings"
|
||||
}
|
||||
return 0
|
||||
488
samples/Lua/treegen.p8
Normal file
488
samples/Lua/treegen.p8
Normal file
@@ -0,0 +1,488 @@
|
||||
pico-8 cartridge // http://www.pico-8.com
|
||||
version 7
|
||||
// taken from: https://github.com/lvictorino/pico8/blob/master/treegen.p8
|
||||
__lua__
|
||||
-- tree generation
|
||||
-- basic space colonization algorithm
|
||||
-- by laurent victorino
|
||||
|
||||
tree={} --tree node list
|
||||
influence={} --influence list
|
||||
newnodedist=5 -- distance between tree nodes
|
||||
influencedist=50 -- attraction max distance
|
||||
influencekilldist=10 -- distance at which an influence node is killed
|
||||
crownw=64 -- tree crown width
|
||||
crownh=64 -- tree crown height
|
||||
crownx=64 -- tree crown x center position
|
||||
cronwy=64 -- tree crown y center position
|
||||
generate=false -- has generation started?
|
||||
|
||||
function _init()
|
||||
-- randomize properties
|
||||
newnodedist=rnd(10)+2
|
||||
influencedist=rnd(60)+20
|
||||
influencekilldist=rnd(20)+8
|
||||
crownx=64+rnd(40)*(rnd(1)-rnd(1))
|
||||
crowny=64+rnd(40)*(rnd(1)-rnd(1))
|
||||
crownw=rnd(60)+30
|
||||
crownh=rnd(60)+20
|
||||
--
|
||||
generate = false
|
||||
|
||||
initialize_root()
|
||||
initialize_crown()
|
||||
end
|
||||
|
||||
-- initialize first tree node
|
||||
function initialize_root()
|
||||
tree={}
|
||||
add(tree,newnode(rnd(10)+54,127,nil,7))
|
||||
end
|
||||
|
||||
-- initialize crown size and influence
|
||||
function initialize_crown()
|
||||
influence={}
|
||||
-- create an eliptic crown composed of 100->400 influence nodes
|
||||
for i=0,rnd(100)+300 do
|
||||
a = rnd(1)
|
||||
x = crownx + rnd(crownw) * cos(a)
|
||||
y = crowny + rnd(crownh) * sin(a)
|
||||
-- add a new influence to the list
|
||||
add(influence,newnode(x,y,nil,5))
|
||||
end
|
||||
end
|
||||
|
||||
function _update()
|
||||
-- x button: generate a new set
|
||||
if btn(4) and btnp(4) == true then
|
||||
_init()
|
||||
end
|
||||
-- c button: start generation
|
||||
if btn(5) and btnp(5) == true then
|
||||
generate=true
|
||||
end
|
||||
|
||||
-- generation loop
|
||||
if #influence != 0 and generate==true then
|
||||
-- reset all tree nodes influence
|
||||
for c in all(tree) do c.resetinfluence(c) end
|
||||
-- is there any remaining influence?
|
||||
flag=false
|
||||
-- for every influence node
|
||||
-- check what node they it influenced
|
||||
for i in all(influence) do
|
||||
closest=nil
|
||||
for t in all(tree) do
|
||||
if distvector(i,t) < influencedist
|
||||
and (closest==nil or abs(distvector(i,t)) < abs(distvector(i,closest))) then
|
||||
flag=true
|
||||
closest=t
|
||||
end
|
||||
end
|
||||
if closest!=nil then
|
||||
closest.addinfluence(closest,i)
|
||||
end
|
||||
end
|
||||
-- if no influence remains stop the generation
|
||||
if flag == false then
|
||||
influence={}
|
||||
generate=false
|
||||
return
|
||||
end
|
||||
-- for every tree node
|
||||
-- compute the influence vector
|
||||
-- and add a new tree node to the list
|
||||
for t in all(tree) do
|
||||
if #t.influence != 0 then
|
||||
medv={}
|
||||
medv.x=0
|
||||
medv.y=0
|
||||
for i in all(t.influence) do
|
||||
dist=distvector(i,t)
|
||||
medv.x+=(i.x-t.x)/dist -- closest influence nodes are more powerful
|
||||
medv.y+=(i.y-t.y)/dist
|
||||
-- destroy influence if too close
|
||||
if dist < influencekilldist then
|
||||
del(influence,i)
|
||||
end
|
||||
end
|
||||
-- compute the influence vector
|
||||
medv.x /= #t.influence
|
||||
medv.y /= #t.influence
|
||||
-- normalize influence vector
|
||||
newn=normalize(medv)
|
||||
-- compute new node position
|
||||
newn.x=t.x+newnodedist*newn.x
|
||||
newn.y=t.y+newnodedist*newn.y
|
||||
-- add new node to the list
|
||||
add(tree,newnode(newn.x,newn.y,t))
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
function _draw()
|
||||
cls()
|
||||
|
||||
-- draw tree lines
|
||||
for t in all(tree) do
|
||||
if t.parent != nil then
|
||||
line(t.x,t.y,t.parent.x,t.parent.y,4)
|
||||
end
|
||||
end
|
||||
-- draw influence
|
||||
for i in all(influence) do
|
||||
i.draw(i,8)
|
||||
end
|
||||
|
||||
-- helpers
|
||||
color(13)
|
||||
print("c:new set\tx:generate")
|
||||
print("nodes\t\t\t\t\tcount:"..#tree.."\tdist:"..flr(newnodedist))
|
||||
print("influence\tcount:"..#influence.."\tdist:"..flr(influencedist))
|
||||
if #influence==0 then
|
||||
print("generation is over.",0,123)
|
||||
end
|
||||
end
|
||||
|
||||
-- generate a new node
|
||||
-- params: xpos,ypos,parent node to be attached to
|
||||
function newnode(x,y,parent)
|
||||
n={}
|
||||
n.x=x
|
||||
n.y=y
|
||||
-- set parent
|
||||
n.parent=parent
|
||||
-- list of influence node
|
||||
n.influence={}
|
||||
-- draw node as crosses
|
||||
n.draw=function(node,col)
|
||||
line(node.x,node.y-1,node.x,node.y+1,col)
|
||||
line(node.x-1,node.y,node.x+1,node.y,col)
|
||||
end
|
||||
-- add an influence node to the list
|
||||
n.addinfluence=function(node,influence)
|
||||
add(node.influence,influence)
|
||||
end
|
||||
-- reset the influence list
|
||||
n.resetinfluence=function(node)
|
||||
node.influence={}
|
||||
end
|
||||
return n
|
||||
end
|
||||
|
||||
-- return the distance between
|
||||
-- two vectors
|
||||
function distvector(v1,v2)
|
||||
vx=v1.x-v2.x
|
||||
vy=v1.y-v2.y
|
||||
return sqrt(vx*vx+vy*vy)
|
||||
end
|
||||
|
||||
-- return the magnitude of a vector
|
||||
function magnitude(v)
|
||||
return sqrt(v.x*v.x+v.y*v.y)
|
||||
end
|
||||
|
||||
-- return a normalized vector
|
||||
function normalize(v)
|
||||
vp={}
|
||||
vp.x=v.x/magnitude(v)
|
||||
vp.y=v.y/magnitude(v)
|
||||
return vp
|
||||
end
|
||||
__gfx__
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00700700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00077000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00077000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00700700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
__gff__
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
__map__
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
__sfx__
|
||||
000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
|
||||
__music__
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
00 41424344
|
||||
1
samples/Markdown/symlink.md
Symbolic link
1
samples/Markdown/symlink.md
Symbolic link
@@ -0,0 +1 @@
|
||||
README.mdown
|
||||
83
samples/Monkey/encodeToPng.monkey2
Normal file
83
samples/Monkey/encodeToPng.monkey2
Normal file
@@ -0,0 +1,83 @@
|
||||
|
||||
#Import "<std>"
|
||||
Using std..
|
||||
|
||||
'Set your own path here. Defaults to build folder.
|
||||
Global path:= AppDir() + "encodeToPng.png"
|
||||
|
||||
Function Main()
|
||||
|
||||
'Write from PixMap
|
||||
Local source := New Pixmap( 100, 100 )
|
||||
For Local y := 0 Until source.Width
|
||||
For Local x := 0 Until source.Height
|
||||
'Generates random pixels
|
||||
source.SetPixelARGB( x, y, ARGB( 255, Rnd(0, 255), 0, Rnd(0, 255) ) )
|
||||
Next
|
||||
Next
|
||||
source.Save( path )
|
||||
|
||||
'Read from png to PixMap
|
||||
Local dest := Pixmap.Load( path )
|
||||
Local a := ""
|
||||
Local r := ""
|
||||
Local g := ""
|
||||
Local b := ""
|
||||
For Local y := 0 Until dest.Width
|
||||
For Local x := 0 Until source.Height
|
||||
Local argb := dest.GetPixelARGB(x,y)
|
||||
a += ARGBToAlpha( argb ) + " "
|
||||
r += ARGBToRed( argb ) + " "
|
||||
g += ARGBToGreen( argb ) + " "
|
||||
b += ARGBToBlue( argb ) + " "
|
||||
Next
|
||||
a += "~n"
|
||||
r += "~n"
|
||||
g += "~n"
|
||||
b += "~n"
|
||||
Next
|
||||
|
||||
'Print resulting pixels
|
||||
Print( " ~nAlpha:~n" + a )
|
||||
Print( " ~nRed:~n" + r )
|
||||
Print( " ~nGreen:~n" + g )
|
||||
Print( " ~nBlue:~n" + b )
|
||||
|
||||
End
|
||||
|
||||
|
||||
'**************** Color Functions ****************
|
||||
|
||||
|
||||
Function ARGB:UInt( a:Float, r:Float, g:Float, b:Float )
|
||||
Assert ( a<=1.0, "Alpha max value is 1.0" )
|
||||
Assert ( r<=1.0, "Red max value is 1.0" )
|
||||
Assert ( g<=1.0, "Green max value is 1.0" )
|
||||
Assert ( b<=1.0, "Blue max value is 1.0" )
|
||||
Return UInt(a*255) Shl 24 | UInt(r*255) Shl 16 | UInt(g*255) Shl 8 | UInt(b*255)
|
||||
End
|
||||
|
||||
Function ARGB:UInt( a:Int, r:Int, g:Int, b:Int )
|
||||
Assert ( a<256, "Alpha can't be higher than 255" )
|
||||
Assert ( r<256, "Red can't be higher than 255" )
|
||||
Assert ( g<256, "Green can't be higher than 255" )
|
||||
Assert ( b<256, "Blue can't be higher than 255" )
|
||||
Return( a Shl 24 | r Shl 16 | g Shl 8 | b )
|
||||
End
|
||||
|
||||
Function ARGBToAlpha:Int( argb:UInt )
|
||||
Return argb Shr 24 & $ff
|
||||
End
|
||||
|
||||
Function ARGBToRed:Int( argb:UInt )
|
||||
Return argb Shr 16 & $ff
|
||||
End
|
||||
|
||||
Function ARGBToGreen:Int( argb:UInt )
|
||||
Return argb Shr 8 & $ff
|
||||
End
|
||||
|
||||
Function ARGBToBlue:Int( argb:UInt )
|
||||
Return argb & $ff
|
||||
End
|
||||
|
||||
185
samples/Monkey/example.monkey2
Normal file
185
samples/Monkey/example.monkey2
Normal file
@@ -0,0 +1,185 @@
|
||||
|
||||
Namespace example
|
||||
|
||||
#rem
|
||||
multi
|
||||
line
|
||||
comment
|
||||
#end
|
||||
|
||||
#rem
|
||||
nested
|
||||
#rem
|
||||
multi
|
||||
line
|
||||
#end
|
||||
comment
|
||||
#end
|
||||
|
||||
'Importing a module pre-compile in the modules folder
|
||||
#Import "<mojo>"
|
||||
|
||||
'Setting search paths for namespaces
|
||||
Using mojo..
|
||||
Using std..
|
||||
|
||||
Const ONECONST:Int = 1
|
||||
Const TWOCONST := 2
|
||||
Const THREECONST := 3, FOURCONST:Int = 4
|
||||
|
||||
Global someVariable:Int = 4
|
||||
|
||||
Function Main()
|
||||
'creating arrays
|
||||
Local scores:Int[]= New Int[](10,20,30)
|
||||
Local text:String[]= New String[]( "Hello","There","World" )
|
||||
|
||||
' string type
|
||||
Local string1:String = "Hello world"
|
||||
Local string2:= "Hello world"
|
||||
|
||||
' escape characers in strings
|
||||
Local string4 := "~qHello World~q"
|
||||
Local string5 := "~tIndented~n"
|
||||
Local string6 := "tilde is wavey... ~~"
|
||||
Print string4
|
||||
Print string5
|
||||
Print string6
|
||||
|
||||
' string pseudofunctions
|
||||
Print " Hello World ~n".Trim() ' prints "Hello World" whithout whitespace
|
||||
Print "Hello World".ToUpper() ' prints "HELLO WORLD"
|
||||
|
||||
' preprocessor keywords
|
||||
#If __TARGET__ = "android"
|
||||
'DoStuff()
|
||||
#ElseIf __TARGET__ = "ios"
|
||||
'DoOtherStuff()
|
||||
#End
|
||||
|
||||
' operators
|
||||
Local a := 32
|
||||
Local b := 32 ~ 0
|
||||
b ~= 16
|
||||
b |= 16
|
||||
b &= 16
|
||||
Local c := a | b
|
||||
Print c
|
||||
|
||||
'Creates a new Window class and starts the main App loop, using the Mojo module
|
||||
New AppInstance
|
||||
New GameWindow
|
||||
App.Run()
|
||||
End
|
||||
|
||||
|
||||
'------------------------------------------ Class Examples ------------------------------------------
|
||||
|
||||
|
||||
'You can extend the Window class to customize its behavior
|
||||
Class GameWindow Extends Window
|
||||
|
||||
Private
|
||||
Field _spiral :Float[]
|
||||
Field _circle :Float[]
|
||||
|
||||
Public
|
||||
Method New()
|
||||
Super.New( "Test", 800, 600, WindowFlags.Resizable )
|
||||
End
|
||||
|
||||
'Properties can be used to create "read-only" values
|
||||
Property Spiral:Float[]()
|
||||
Return _spiral
|
||||
End
|
||||
|
||||
'Or to control what happens to a value when assigned
|
||||
Property Circle:Float[]()
|
||||
Return _circle
|
||||
Setter( values:Float[] )
|
||||
If( values.Length > 2 ) And ( values.Length Mod 2 = 0 )
|
||||
_circle = values
|
||||
Else
|
||||
Print( "Circle values need to be an even number larger than 1" )
|
||||
End
|
||||
End
|
||||
|
||||
'Methods require a class instance. The keyword Self is optional when accessing fields and properties
|
||||
'The method Window.OnRender is virtual, and can be overriden
|
||||
'Width and Height are Propreties inherited from the Window class
|
||||
Method OnRender( canvas:Canvas ) Override
|
||||
RequestRender()
|
||||
canvas.Clear( Color.DarkGrey )
|
||||
canvas.Translate( Width/2.0, Height/2.0 )
|
||||
canvas.Rotate( -Millisecs()/200.0 )
|
||||
canvas.Color = New Color( 1, 0.8, 0.2 )
|
||||
DrawLines( canvas, Spiral )
|
||||
DrawLines( canvas, Circle, True )
|
||||
End
|
||||
|
||||
'This method is called whenever the window layout changes, like when resizing
|
||||
Method OnLayout() Override
|
||||
_spiral = CreateSpiral( 0, 0, Height/1.5, Height/1.5, 100 )
|
||||
Circle = CreateCircle( 0, 0, Height/1.5, Height/1.5, 100 )
|
||||
End
|
||||
|
||||
'Functions can be called without a GameWindow instance, like "Static Functions" in other languages.
|
||||
Function DrawLines( canvas:Canvas, lines:Float[], closedShape:Bool = False )
|
||||
For Local n := 0 Until lines.Length Step 2
|
||||
Local l := lines.Length - 3
|
||||
Local x0 := lines[n]
|
||||
Local y0 := lines[n+1]
|
||||
Local x1 := n<l? lines[n+2] Else (closedShape? lines[0] Else x0 ) 'Conditional assignment, uses the "?" symbol to test a condition
|
||||
Local y1 := n<l? lines[n+3] Else (closedShape? lines[1] Else y0 )
|
||||
canvas.DrawLine( x0, y0, x1, y1 )
|
||||
Next
|
||||
End
|
||||
|
||||
Function CreateSpiral:Float[]( x:Double, y:Double, width:Double, height:Double, sides:Int = 32, turns:Float = 3.0 )
|
||||
Local stack := New Stack<Float>
|
||||
Local radStep := (Pi*2.0)/Float(sides)
|
||||
Local xMult := 0.0
|
||||
Local yMult := 0.0
|
||||
Local radiusX:Float = width/2.0
|
||||
Local radiusY:Float = height/2.0
|
||||
Local stepX:Float = radiusX / sides
|
||||
Local stepY:Float = radiusY / sides
|
||||
For Local a := 0.0 To Pi*2 Step radStep
|
||||
stack.Push( ( ( Sin( a*turns ) * radiusX )* xMult ) + x )
|
||||
stack.Push( ( ( Cos( a*turns ) * radiusY )* yMult ) + y )
|
||||
xMult += stepX/radiusX
|
||||
yMult += stepY/radiusY
|
||||
Next
|
||||
Return stack.ToArray()
|
||||
End
|
||||
|
||||
Function CreateCircle:Float[]( x:Double, y:Double, width:Double, height:Double, sides:Int = 32 )
|
||||
Local stack := New Stack<Float>
|
||||
Local radStep := (Pi*2.0)/Float(sides)
|
||||
Local radiusX:Float = width/2.0
|
||||
Local radiusY:Float = height/2.0
|
||||
For Local a := 0.0 To Pi*2 Step radStep
|
||||
stack.Push( ( Sin( a ) * radiusX ) + x )
|
||||
stack.Push( ( Cos( a ) * radiusY ) + y )
|
||||
Next
|
||||
Return stack.ToArray()
|
||||
End
|
||||
|
||||
End
|
||||
|
||||
'--------- extending with generics -----------------------------------------------------------------------------
|
||||
|
||||
Class MyList Extends List<Double>
|
||||
End
|
||||
|
||||
'--------- interfaces ------------------------------------------------------------------------------------------
|
||||
|
||||
Interface Computer
|
||||
Method Boot ()
|
||||
Method Process ()
|
||||
Method Display ()
|
||||
End
|
||||
'
|
||||
Class PC Implements Computer
|
||||
End
|
||||
|
||||
115
samples/Monkey/gui.monkey2
Normal file
115
samples/Monkey/gui.monkey2
Normal file
@@ -0,0 +1,115 @@
|
||||
#Import "<mojo>"
|
||||
#Import "<mojox>"
|
||||
|
||||
Using std..
|
||||
Using mojo..
|
||||
Using mojox..
|
||||
|
||||
Function Main()
|
||||
New AppInstance
|
||||
New TestGui
|
||||
App.Run()
|
||||
End
|
||||
|
||||
|
||||
Class TestGui Extends Window
|
||||
Field mainDock:DockingView
|
||||
Field rgtDock:ScrollView
|
||||
Field graphView:GraphView
|
||||
|
||||
Const smallFont:Font = Font.Load( "font::DejaVuSans.ttf", 10 )
|
||||
|
||||
Method New()
|
||||
Super.New( "Test", 1024, 640, WindowFlags.Resizable )
|
||||
mainDock = New MainDock()
|
||||
rgtDock = New RightDock()
|
||||
mainDock.AddView( rgtDock, "right", "400", True )
|
||||
ContentView = mainDock
|
||||
End
|
||||
End
|
||||
|
||||
|
||||
Class MainDock Extends DockingView
|
||||
Method New()
|
||||
Layout="fill"
|
||||
Local newStyle := Style.Copy()
|
||||
newStyle.BackgroundColor = Color.DarkGrey
|
||||
newStyle.BorderColor = Color.Black
|
||||
newStyle.Font = TestGui.smallFont
|
||||
Style = newStyle
|
||||
End
|
||||
|
||||
Method OnRender( canvas:Canvas ) Override
|
||||
Super.OnRender( canvas )
|
||||
canvas.Color = New Color( Rnd(), Rnd(), Rnd() )
|
||||
canvas.DrawCircle( Frame.Width/4, Frame.Height/2, Frame.Height/4 )
|
||||
canvas.Color = Color.Aluminum
|
||||
canvas.DrawText( "gameview:" + App.FPS + " fps", 5, 5 )
|
||||
End
|
||||
End
|
||||
|
||||
|
||||
|
||||
Class RightDock Extends ScrollView
|
||||
Private
|
||||
Field _panSpeed := 10.0
|
||||
|
||||
Public
|
||||
Method New()
|
||||
Layout="fill"
|
||||
ScrollBarsVisible = True
|
||||
|
||||
Local newStyle := Style.Copy()
|
||||
newStyle.BackgroundColor = Color.Grey
|
||||
newStyle.BorderColor = Color.Black
|
||||
newStyle.Font = TestGui.smallFont
|
||||
Style = newStyle
|
||||
|
||||
Local graph:=New GraphView
|
||||
ContentView = graph
|
||||
|
||||
Scroll = New Vec2i( graph.Frame.Width/2, graph.Frame.Height/2 ) 'Doesn't work!
|
||||
End
|
||||
|
||||
Method OnRender( canvas:Canvas ) Override
|
||||
Super.OnRender( canvas )
|
||||
canvas.Color = Color.Aluminum
|
||||
canvas.DrawText( "size:" + Frame + " ,scroll:" + Scroll , 5, 5 )
|
||||
End
|
||||
|
||||
Method OnMouseEvent( event:MouseEvent ) Override
|
||||
Select event.Type
|
||||
Case EventType.MouseWheel
|
||||
Scroll = New Vec2i( Scroll.X+(event.Wheel.X*_panSpeed), Scroll.Y-(event.Wheel.Y*_panSpeed) )
|
||||
App.RequestRender()
|
||||
End
|
||||
End
|
||||
End
|
||||
|
||||
|
||||
Class GraphView Extends View
|
||||
Private
|
||||
Field _panSpeed := 5.0
|
||||
Field _size := New Vec2i( 1024, 1024 )
|
||||
|
||||
Public
|
||||
Method New()
|
||||
MinSize=New Vec2i( _size.X, _size.Y )
|
||||
End
|
||||
|
||||
Method OnRender( canvas:Canvas ) Override
|
||||
Local r:= 20.0
|
||||
For Local x := 1 Until 10
|
||||
For Local y := 1 Until 10
|
||||
Local v := (x/10.0) -0.05
|
||||
canvas.Color = New Color( v, v, v )
|
||||
canvas.DrawCircle( (x*100)+r, (y*100)+r, r )
|
||||
Next
|
||||
Next
|
||||
End
|
||||
End
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
29
samples/Monkey/sorting.monkey2
Normal file
29
samples/Monkey/sorting.monkey2
Normal file
@@ -0,0 +1,29 @@
|
||||
'Showcases use of Lambda functions and Generics.
|
||||
|
||||
#Import "<std>"
|
||||
Using std..
|
||||
|
||||
Function Main()
|
||||
|
||||
Local testStack := New Stack< MyObject >
|
||||
|
||||
For Local n := 1 To 20
|
||||
Local newItem := New MyObject
|
||||
newItem.depth = Rnd( 0, 100 )
|
||||
testStack.Push( newItem )
|
||||
Next
|
||||
|
||||
testStack.Sort( Lambda:Int( x:MyObject,y:MyObject )
|
||||
Return x.depth<=>y.depth
|
||||
End )
|
||||
|
||||
For Local n := Eachin testStack
|
||||
Print( n.depth )
|
||||
Next
|
||||
|
||||
End
|
||||
|
||||
|
||||
Struct MyObject
|
||||
Field depth := 0
|
||||
End
|
||||
106
samples/Nearley/nearley-language-bootstrapped.ne
Normal file
106
samples/Nearley/nearley-language-bootstrapped.ne
Normal file
@@ -0,0 +1,106 @@
|
||||
# nearley grammar
|
||||
@builtin "string.ne"
|
||||
|
||||
@{%
|
||||
|
||||
function insensitive(sl) {
|
||||
var s = sl.literal;
|
||||
result = [];
|
||||
for (var i=0; i<s.length; i++) {
|
||||
var c = s.charAt(i);
|
||||
if (c.toUpperCase() !== c || c.toLowerCase() !== c) {
|
||||
result.push(new RegExp("[" + c.toLowerCase() + c.toUpperCase() + "]"));
|
||||
} else {
|
||||
result.push({literal: c});
|
||||
}
|
||||
}
|
||||
return {subexpression: [{tokens: result, postprocess: function(d) {return d.join(""); }}]};
|
||||
}
|
||||
|
||||
%}
|
||||
|
||||
final -> whit? prog whit? {% function(d) { return d[1]; } %}
|
||||
|
||||
prog -> prod {% function(d) { return [d[0]]; } %}
|
||||
| prod whit prog {% function(d) { return [d[0]].concat(d[2]); } %}
|
||||
|
||||
prod -> word whit? ("-"|"="):+ ">" whit? expression+ {% function(d) { return {name: d[0], rules: d[5]}; } %}
|
||||
| word "[" wordlist "]" whit? ("-"|"="):+ ">" whit? expression+ {% function(d) {return {macro: d[0], args: d[2], exprs: d[8]}} %}
|
||||
| "@" whit? js {% function(d) { return {body: d[2]}; } %}
|
||||
| "@" word whit word {% function(d) { return {config: d[1], value: d[3]}; } %}
|
||||
| "@include" whit? string {% function(d) {return {include: d[2].literal, builtin: false}} %}
|
||||
| "@builtin" whit? string {% function(d) {return {include: d[2].literal, builtin: true }} %}
|
||||
|
||||
expression+ -> completeexpression
|
||||
| expression+ whit? "|" whit? completeexpression {% function(d) { return d[0].concat([d[4]]); } %}
|
||||
|
||||
expressionlist -> completeexpression
|
||||
| expressionlist whit? "," whit? completeexpression {% function(d) { return d[0].concat([d[4]]); } %}
|
||||
|
||||
wordlist -> word
|
||||
| wordlist whit? "," whit? word {% function(d) { return d[0].concat([d[4]]); } %}
|
||||
|
||||
completeexpression -> expr {% function(d) { return {tokens: d[0]}; } %}
|
||||
| expr whit? js {% function(d) { return {tokens: d[0], postprocess: d[2]}; } %}
|
||||
|
||||
expr_member ->
|
||||
word {% id %}
|
||||
| "$" word {% function(d) {return {mixin: d[1]}} %}
|
||||
| word "[" expressionlist "]" {% function(d) {return {macrocall: d[0], args: d[2]}} %}
|
||||
| string "i":? {% function(d) { if (d[1]) {return insensitive(d[0]); } else {return d[0]; } } %}
|
||||
| "%" word {% function(d) {return {token: d[1]}} %}
|
||||
| charclass {% id %}
|
||||
| "(" whit? expression+ whit? ")" {% function(d) {return {'subexpression': d[2]} ;} %}
|
||||
| expr_member whit? ebnf_modifier {% function(d) {return {'ebnf': d[0], 'modifier': d[2]}; } %}
|
||||
|
||||
ebnf_modifier -> ":+" {% id %} | ":*" {% id %} | ":?" {% id %}
|
||||
|
||||
expr -> expr_member
|
||||
| expr whit expr_member {% function(d){ return d[0].concat([d[2]]); } %}
|
||||
|
||||
word -> [\w\?\+] {% function(d){ return d[0]; } %}
|
||||
| word [\w\?\+] {% function(d){ return d[0]+d[1]; } %}
|
||||
|
||||
string -> dqstring {% function(d) {return { literal: d[0] }; } %}
|
||||
#string -> "\"" charset "\"" {% function(d) { return { literal: d[1].join("") }; } %}
|
||||
#
|
||||
#charset -> null
|
||||
# | charset char {% function(d) { return d[0].concat([d[1]]); } %}
|
||||
#
|
||||
#char -> [^\\"] {% function(d) { return d[0]; } %}
|
||||
# | "\\" . {% function(d) { return JSON.parse("\""+"\\"+d[1]+"\""); } %}
|
||||
|
||||
charclass -> "." {% function(d) { return new RegExp("."); } %}
|
||||
| "[" charclassmembers "]" {% function(d) { return new RegExp("[" + d[1].join('') + "]"); } %}
|
||||
|
||||
charclassmembers -> null
|
||||
| charclassmembers charclassmember {% function(d) { return d[0].concat([d[1]]); } %}
|
||||
|
||||
charclassmember -> [^\\\]] {% function(d) { return d[0]; } %}
|
||||
| "\\" . {% function(d) { return d[0] + d[1]; } %}
|
||||
|
||||
js -> "{" "%" jscode "%" "}" {% function(d) { return d[2]; } %}
|
||||
|
||||
jscode -> null {% function() {return "";} %}
|
||||
| jscode [^%] {% function(d) {return d[0] + d[1];} %}
|
||||
| jscode "%" [^}] {% function(d) {return d[0] + d[1] + d[2]; } %}
|
||||
|
||||
# Whitespace with a comment
|
||||
whit -> whitraw
|
||||
| whitraw? comment whit?
|
||||
|
||||
# Optional whitespace with a comment
|
||||
whit? -> null
|
||||
| whit
|
||||
|
||||
# Literally a string of whitespace
|
||||
whitraw -> [\s]
|
||||
| whitraw [\s]
|
||||
|
||||
# A string of whitespace OR the empty string
|
||||
whitraw? -> null
|
||||
| whitraw
|
||||
|
||||
comment -> "#" commentchars "\n"
|
||||
commentchars -> null
|
||||
| commentchars [^\n]
|
||||
67
samples/Nextflow/blast.nf
Normal file
67
samples/Nextflow/blast.nf
Normal file
@@ -0,0 +1,67 @@
|
||||
#!/usr/bin/env nextflow
|
||||
/*
|
||||
* This is free and unencumbered software released into the public domain.
|
||||
*
|
||||
* Anyone is free to copy, modify, publish, use, compile, sell, or
|
||||
* distribute this software, either in source code form or as a compiled
|
||||
* binary, for any purpose, commercial or non-commercial, and by any
|
||||
* means.
|
||||
*
|
||||
* In jurisdictions that recognize copyright laws, the author or authors
|
||||
* of this software dedicate any and all copyright interest in the
|
||||
* software to the public domain. We make this dedication for the benefit
|
||||
* of the public at large and to the detriment of our heirs and
|
||||
* successors. We intend this dedication to be an overt act of
|
||||
* relinquishment in perpetuity of all present and future rights to this
|
||||
* software under copyright law.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
* OTHER DEALINGS IN THE SOFTWARE.
|
||||
*
|
||||
* For more information, please refer to <http://unlicense.org/>
|
||||
*/
|
||||
|
||||
/*
|
||||
* Author Paolo Di Tommaso <paolo.ditommaso@gmail.com>
|
||||
*/
|
||||
|
||||
|
||||
params.query = "$HOME/sample.fa"
|
||||
params.db = "$HOME/tools/blast-db/pdb/pdb"
|
||||
|
||||
process blast {
|
||||
output:
|
||||
file top_hits
|
||||
|
||||
"""
|
||||
blastp -query ${params.query} -db ${params.db} -outfmt 6 \
|
||||
| head -n 10 \
|
||||
| cut -f 2 > top_hits
|
||||
"""
|
||||
}
|
||||
|
||||
process extract {
|
||||
input:
|
||||
file top_hits
|
||||
output:
|
||||
file sequences
|
||||
|
||||
"""
|
||||
blastdbcmd -db ${params.db} -entry_batch $top_hits > sequences
|
||||
"""
|
||||
}
|
||||
|
||||
process align {
|
||||
input:
|
||||
file sequences
|
||||
echo true
|
||||
|
||||
"""
|
||||
t_coffee $sequences 2>&- | tee align_result
|
||||
"""
|
||||
}
|
||||
496
samples/Nextflow/callings.nf
Executable file
496
samples/Nextflow/callings.nf
Executable file
@@ -0,0 +1,496 @@
|
||||
#!/usr/bin/env nextflow
|
||||
/*
|
||||
* This is free and unencumbered software released into the public domain.
|
||||
*
|
||||
* Anyone is free to copy, modify, publish, use, compile, sell, or
|
||||
* distribute this software, either in source code form or as a compiled
|
||||
* binary, for any purpose, commercial or non-commercial, and by any
|
||||
* means.
|
||||
*
|
||||
* In jurisdictions that recognize copyright laws, the author or authors
|
||||
* of this software dedicate any and all copyright interest in the
|
||||
* software to the public domain. We make this dedication for the benefit
|
||||
* of the public at large and to the detriment of our heirs and
|
||||
* successors. We intend this dedication to be an overt act of
|
||||
* relinquishment in perpetuity of all present and future rights to this
|
||||
* software under copyright law.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
* OTHER DEALINGS IN THE SOFTWARE.
|
||||
*
|
||||
* For more information, please refer to <http://unlicense.org/>
|
||||
*/
|
||||
|
||||
|
||||
/*
|
||||
* 'CalliNGS-NF' - A Nextflow pipeline for variant calling with NGS data
|
||||
*
|
||||
* This pipeline that reproduces steps from the GATK best practics of SNP
|
||||
* calling with RNAseq data procedure:
|
||||
* https://software.broadinstitute.org/gatk/guide/article?id=3891
|
||||
*
|
||||
* Anna Vlasova
|
||||
* Emilio Palumbo
|
||||
* Paolo Di Tommaso
|
||||
* Evan Floden
|
||||
*/
|
||||
|
||||
|
||||
/*
|
||||
* Define the default parameters
|
||||
*/
|
||||
|
||||
params.genome = "$baseDir/data/genome.fa"
|
||||
params.variants = "$baseDir/data/known_variants.vcf.gz"
|
||||
params.blacklist = "$baseDir/data/blacklist.bed"
|
||||
params.reads = "$baseDir/data/reads/rep1_{1,2}.fq.gz"
|
||||
params.results = "results"
|
||||
params.gatk = '/usr/local/bin/GenomeAnalysisTK.jar'
|
||||
params.gatk_launch = "java -jar $params.gatk"
|
||||
|
||||
log.info "C A L L I N G S - N F v 1.0"
|
||||
log.info "================================"
|
||||
log.info "genome : $params.genome"
|
||||
log.info "reads : $params.reads"
|
||||
log.info "variants : $params.variants"
|
||||
log.info "blacklist: $params.blacklist"
|
||||
log.info "results : $params.results"
|
||||
log.info "gatk : $params.gatk"
|
||||
log.info ""
|
||||
|
||||
/*
|
||||
* Parse the input parameters
|
||||
*/
|
||||
|
||||
GATK = params.gatk_launch
|
||||
genome_file = file(params.genome)
|
||||
variants_file = file(params.variants)
|
||||
blacklist_file = file(params.blacklist)
|
||||
reads_ch = Channel.fromFilePairs(params.reads)
|
||||
|
||||
|
||||
/**********
|
||||
* PART 1: Data preparation
|
||||
*
|
||||
* Process 1A: Create a FASTA genome index (.fai) with samtools for GATK
|
||||
*/
|
||||
|
||||
process '1A_prepare_genome_samtools' {
|
||||
tag "$genome.baseName"
|
||||
|
||||
input:
|
||||
file genome from genome_file
|
||||
|
||||
output:
|
||||
file "${genome}.fai" into genome_index_ch
|
||||
|
||||
script:
|
||||
"""
|
||||
samtools faidx ${genome}
|
||||
"""
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Process 1B: Create a FASTA genome sequence dictionary with Picard for GATK
|
||||
*/
|
||||
|
||||
process '1B_prepare_genome_picard' {
|
||||
tag "$genome.baseName"
|
||||
|
||||
input:
|
||||
file genome from genome_file
|
||||
output:
|
||||
file "${genome.baseName}.dict" into genome_dict_ch
|
||||
|
||||
script:
|
||||
"""
|
||||
PICARD=`which picard.jar`
|
||||
java -jar \$PICARD CreateSequenceDictionary R= $genome O= ${genome.baseName}.dict
|
||||
"""
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Process 1C: Create STAR genome index file.
|
||||
*/
|
||||
|
||||
process '1C_prepare_star_genome_index' {
|
||||
tag "$genome.baseName"
|
||||
|
||||
input:
|
||||
file genome from genome_file
|
||||
output:
|
||||
file "genome_dir" into genome_dir_ch
|
||||
|
||||
script:
|
||||
"""
|
||||
mkdir genome_dir
|
||||
|
||||
STAR --runMode genomeGenerate \
|
||||
--genomeDir genome_dir \
|
||||
--genomeFastaFiles ${genome} \
|
||||
--runThreadN ${task.cpus}
|
||||
"""
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Process 1D: Create a file containing the filtered and recoded set of variants
|
||||
*/
|
||||
|
||||
process '1D_prepare_vcf_file' {
|
||||
tag "$variantsFile.baseName"
|
||||
|
||||
input:
|
||||
file variantsFile from variants_file
|
||||
file blacklisted from blacklist_file
|
||||
|
||||
output:
|
||||
set file("${variantsFile.baseName}.filtered.recode.vcf.gz"), file("${variantsFile.baseName}.filtered.recode.vcf.gz.tbi") into prepared_vcf_ch
|
||||
|
||||
script:
|
||||
"""
|
||||
vcftools --gzvcf $variantsFile -c \
|
||||
--exclude-bed ${blacklisted} \
|
||||
--recode | bgzip -c \
|
||||
> ${variantsFile.baseName}.filtered.recode.vcf.gz
|
||||
|
||||
tabix ${variantsFile.baseName}.filtered.recode.vcf.gz
|
||||
"""
|
||||
}
|
||||
|
||||
/*
|
||||
* END OF PART 1
|
||||
*********/
|
||||
|
||||
|
||||
|
||||
/**********
|
||||
* PART 2: STAR RNA-Seq Mapping
|
||||
*
|
||||
* Process 2: Align RNA-Seq reads to the genome with STAR
|
||||
*/
|
||||
|
||||
process '2_rnaseq_mapping_star' {
|
||||
tag "$replicateId"
|
||||
|
||||
input:
|
||||
file genome from genome_file
|
||||
file genomeDir from genome_dir_ch
|
||||
set replicateId, file(reads) from reads_ch
|
||||
|
||||
output:
|
||||
set replicateId, file('Aligned.sortedByCoord.out.bam'), file('Aligned.sortedByCoord.out.bam.bai') into aligned_bam_ch
|
||||
|
||||
script:
|
||||
"""
|
||||
# ngs-nf-dev Align reads to genome
|
||||
STAR --genomeDir $genomeDir \
|
||||
--readFilesIn $reads \
|
||||
--runThreadN ${task.cpus} \
|
||||
--readFilesCommand zcat \
|
||||
--outFilterType BySJout \
|
||||
--alignSJoverhangMin 8 \
|
||||
--alignSJDBoverhangMin 1 \
|
||||
--outFilterMismatchNmax 999
|
||||
|
||||
# 2nd pass (improve alignmets using table of splice junctions and create a new index)
|
||||
mkdir genomeDir
|
||||
STAR --runMode genomeGenerate \
|
||||
--genomeDir genomeDir \
|
||||
--genomeFastaFiles $genome \
|
||||
--sjdbFileChrStartEnd SJ.out.tab \
|
||||
--sjdbOverhang 75 \
|
||||
--runThreadN ${task.cpus}
|
||||
|
||||
# Final read alignments
|
||||
STAR --genomeDir genomeDir \
|
||||
--readFilesIn $reads \
|
||||
--runThreadN ${task.cpus} \
|
||||
--readFilesCommand zcat \
|
||||
--outFilterType BySJout \
|
||||
--alignSJoverhangMin 8 \
|
||||
--alignSJDBoverhangMin 1 \
|
||||
--outFilterMismatchNmax 999 \
|
||||
--outSAMtype BAM SortedByCoordinate \
|
||||
--outSAMattrRGline ID:$replicateId LB:library PL:illumina PU:machine SM:GM12878
|
||||
|
||||
# Index the BAM file
|
||||
samtools index Aligned.sortedByCoord.out.bam
|
||||
"""
|
||||
}
|
||||
|
||||
/*
|
||||
* END OF PART 2
|
||||
******/
|
||||
|
||||
|
||||
/**********
|
||||
* PART 3: GATK Prepare Mapped Reads
|
||||
*
|
||||
* Process 3: Split reads that contain Ns in their CIGAR string.
|
||||
* Creates k+1 new reads (where k is the number of N cigar elements)
|
||||
* that correspond to the segments of the original read beside/between
|
||||
* the splicing events represented by the Ns in the original CIGAR.
|
||||
*/
|
||||
|
||||
process '3_rnaseq_gatk_splitNcigar' {
|
||||
tag "$replicateId"
|
||||
|
||||
input:
|
||||
file genome from genome_file
|
||||
file index from genome_index_ch
|
||||
file genome_dict from genome_dict_ch
|
||||
set replicateId, file(bam), file(index) from aligned_bam_ch
|
||||
|
||||
output:
|
||||
set replicateId, file('split.bam'), file('split.bai') into splitted_bam_ch
|
||||
|
||||
script:
|
||||
"""
|
||||
# SplitNCigarReads and reassign mapping qualities
|
||||
$GATK -T SplitNCigarReads \
|
||||
-R $genome -I $bam \
|
||||
-o split.bam \
|
||||
-rf ReassignOneMappingQuality \
|
||||
-RMQF 255 -RMQT 60 \
|
||||
-U ALLOW_N_CIGAR_READS \
|
||||
--fix_misencoded_quality_scores
|
||||
"""
|
||||
}
|
||||
|
||||
/*
|
||||
* END OF PART 3
|
||||
******/
|
||||
|
||||
|
||||
/***********
|
||||
* PART 4: GATK Base Quality Score Recalibration Workflow
|
||||
*
|
||||
* Process 4: Base recalibrate to detect systematic errors in base quality scores,
|
||||
* select unique alignments and index
|
||||
*
|
||||
*/
|
||||
|
||||
process '4_rnaseq_gatk_recalibrate' {
|
||||
tag "$replicateId"
|
||||
|
||||
input:
|
||||
file genome from genome_file
|
||||
file index from genome_index_ch
|
||||
file dict from genome_dict_ch
|
||||
set replicateId, file(bam), file(index) from splitted_bam_ch
|
||||
set file(variants_file), file(variants_file_index) from prepared_vcf_ch
|
||||
|
||||
output:
|
||||
set sampleId, file("${replicateId}.final.uniq.bam"), file("${replicateId}.final.uniq.bam.bai") into (final_output_ch, bam_for_ASE_ch)
|
||||
|
||||
script:
|
||||
sampleId = replicateId.replaceAll(/[12]$/,'')
|
||||
"""
|
||||
# Indel Realignment and Base Recalibration
|
||||
$GATK -T BaseRecalibrator \
|
||||
--default_platform illumina \
|
||||
-cov ReadGroupCovariate \
|
||||
-cov QualityScoreCovariate \
|
||||
-cov CycleCovariate \
|
||||
-knownSites ${variants_file} \
|
||||
-cov ContextCovariate \
|
||||
-R ${genome} -I ${bam} \
|
||||
--downsampling_type NONE \
|
||||
-nct ${task.cpus} \
|
||||
-o final.rnaseq.grp
|
||||
|
||||
$GATK -T PrintReads \
|
||||
-R ${genome} -I ${bam} \
|
||||
-BQSR final.rnaseq.grp \
|
||||
-nct ${task.cpus} \
|
||||
-o final.bam
|
||||
|
||||
# Select only unique alignments, no multimaps
|
||||
(samtools view -H final.bam; samtools view final.bam| grep -w 'NH:i:1') \
|
||||
|samtools view -Sb - > ${replicateId}.final.uniq.bam
|
||||
|
||||
# Index BAM files
|
||||
samtools index ${replicateId}.final.uniq.bam
|
||||
"""
|
||||
}
|
||||
|
||||
/*
|
||||
* END OF PART 4
|
||||
******/
|
||||
|
||||
|
||||
|
||||
/***********
|
||||
* PART 5: GATK Variant Calling
|
||||
*
|
||||
* Process 5: Call variants with GATK HaplotypeCaller.
|
||||
* Calls SNPs and indels simultaneously via local de-novo assembly of
|
||||
* haplotypes in an active region.
|
||||
* Filter called variants with GATK VariantFiltration.
|
||||
*/
|
||||
|
||||
|
||||
process '5_rnaseq_call_variants' {
|
||||
tag "$sampleId"
|
||||
|
||||
input:
|
||||
file genome from genome_file
|
||||
file index from genome_index_ch
|
||||
file dict from genome_dict_ch
|
||||
set sampleId, file(bam), file(bai) from final_output_ch.groupTuple()
|
||||
|
||||
output:
|
||||
set sampleId, file('final.vcf') into vcf_files
|
||||
|
||||
script:
|
||||
"""
|
||||
# fix absolute path in dict file
|
||||
sed -i 's@UR:file:.*${genome}@UR:file:${genome}@g' $dict
|
||||
echo "${bam.join('\n')}" > bam.list
|
||||
|
||||
# Variant calling
|
||||
$GATK -T HaplotypeCaller \
|
||||
-R $genome -I bam.list \
|
||||
-dontUseSoftClippedBases \
|
||||
-stand_call_conf 20.0 \
|
||||
-o output.gatk.vcf.gz
|
||||
|
||||
# Variant filtering
|
||||
$GATK -T VariantFiltration \
|
||||
-R $genome -V output.gatk.vcf.gz \
|
||||
-window 35 -cluster 3 \
|
||||
-filterName FS -filter "FS > 30.0" \
|
||||
-filterName QD -filter "QD < 2.0" \
|
||||
-o final.vcf
|
||||
"""
|
||||
}
|
||||
|
||||
/*
|
||||
* END OF PART 5
|
||||
******/
|
||||
|
||||
|
||||
/***********
|
||||
* PART 6: Post-process variants file and prepare for Allele-Specific Expression and RNA Editing Analysis
|
||||
*
|
||||
* Process 6A: Post-process the VCF result
|
||||
*/
|
||||
|
||||
process '6A_post_process_vcf' {
|
||||
tag "$sampleId"
|
||||
publishDir "$params.results/$sampleId"
|
||||
|
||||
input:
|
||||
set sampleId, file('final.vcf') from vcf_files
|
||||
set file('filtered.recode.vcf.gz'), file('filtered.recode.vcf.gz.tbi') from prepared_vcf_ch
|
||||
output:
|
||||
set sampleId, file('final.vcf'), file('commonSNPs.diff.sites_in_files') into vcf_and_snps_ch
|
||||
|
||||
script:
|
||||
'''
|
||||
grep -v '#' final.vcf | awk '$7~/PASS/' |perl -ne 'chomp($_); ($dp)=$_=~/DP\\=(\\d+)\\;/; if($dp>=8){print $_."\\n"};' > result.DP8.vcf
|
||||
|
||||
vcftools --vcf result.DP8.vcf --gzdiff filtered.recode.vcf.gz --diff-site --out commonSNPs
|
||||
'''
|
||||
}
|
||||
|
||||
/*
|
||||
* Process 6B: Prepare variants file for allele specific expression (ASE) analysis
|
||||
*/
|
||||
|
||||
process '6B_prepare_vcf_for_ase' {
|
||||
tag "$sampleId"
|
||||
publishDir "$params.results/$sampleId"
|
||||
|
||||
input:
|
||||
set sampleId, file('final.vcf'), file('commonSNPs.diff.sites_in_files') from vcf_and_snps_ch
|
||||
output:
|
||||
set sampleId, file('known_snps.vcf') into vcf_for_ASE
|
||||
file('AF.histogram.pdf') into gghist_pdfs
|
||||
|
||||
script:
|
||||
'''
|
||||
awk 'BEGIN{OFS="\t"} $4~/B/{print $1,$2,$3}' commonSNPs.diff.sites_in_files > test.bed
|
||||
|
||||
vcftools --vcf final.vcf --bed test.bed --recode --keep-INFO-all --stdout > known_snps.vcf
|
||||
|
||||
grep -v '#' known_snps.vcf | awk -F '\\t' '{print $10}' \
|
||||
|awk -F ':' '{print $2}'|perl -ne 'chomp($_); \
|
||||
@v=split(/\\,/,$_); if($v[0]!=0 ||$v[1] !=0)\
|
||||
{print $v[1]/($v[1]+$v[0])."\\n"; }' |awk '$1!=1' \
|
||||
>AF.4R
|
||||
|
||||
gghist.R -i AF.4R -o AF.histogram.pdf
|
||||
'''
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Group data for allele-specific expression.
|
||||
*
|
||||
* The `bam_for_ASE_ch` emites tuples having the following structure, holding the final BAM/BAI files:
|
||||
*
|
||||
* ( sample_id, file_bam, file_bai )
|
||||
*
|
||||
* The `vcf_for_ASE` channel emits tuples having the following structure, holding the VCF file:
|
||||
*
|
||||
* ( sample_id, output.vcf )
|
||||
*
|
||||
* The BAMs are grouped together and merged with VCFs having the same sample id. Finally
|
||||
* it creates a channel named `grouped_vcf_bam_bai_ch` emitting the following tuples:
|
||||
*
|
||||
* ( sample_id, file_vcf, List[file_bam], List[file_bai] )
|
||||
*/
|
||||
|
||||
bam_for_ASE_ch
|
||||
.groupTuple()
|
||||
.phase(vcf_for_ASE)
|
||||
.map{ left, right ->
|
||||
def sampleId = left[0]
|
||||
def bam = left[1]
|
||||
def bai = left[2]
|
||||
def vcf = right[1]
|
||||
tuple(sampleId, vcf, bam, bai)
|
||||
}
|
||||
.set { grouped_vcf_bam_bai_ch }
|
||||
|
||||
|
||||
/*
|
||||
* Process 6C: Allele-Specific Expression analysis with GATK ASEReadCounter.
|
||||
* Calculates allele counts at a set of positions after applying
|
||||
* filters that are tuned for enabling allele-specific expression
|
||||
* (ASE) analysis
|
||||
*/
|
||||
|
||||
process '6C_ASE_knownSNPs' {
|
||||
tag "$sampleId"
|
||||
publishDir "$params.results/$sampleId"
|
||||
|
||||
input:
|
||||
file genome from genome_file
|
||||
file index from genome_index_ch
|
||||
file dict from genome_dict_ch
|
||||
set sampleId, file(vcf), file(bam), file(bai) from grouped_vcf_bam_bai_ch
|
||||
|
||||
output:
|
||||
file "ASE.tsv"
|
||||
|
||||
script:
|
||||
"""
|
||||
echo "${bam.join('\n')}" > bam.list
|
||||
|
||||
$GATK -R ${genome} \
|
||||
-T ASEReadCounter \
|
||||
-o ASE.tsv \
|
||||
-I bam.list \
|
||||
-sites ${vcf}
|
||||
"""
|
||||
}
|
||||
50
samples/Nextflow/filenames/nextflow.config
Normal file
50
samples/Nextflow/filenames/nextflow.config
Normal file
@@ -0,0 +1,50 @@
|
||||
aws {
|
||||
region = 'eu-west-1'
|
||||
}
|
||||
|
||||
cloud {
|
||||
autoscale {
|
||||
enabled = true
|
||||
minInstances = 3
|
||||
starvingTimeout = '2 min'
|
||||
terminateWhenIdle = true
|
||||
}
|
||||
imageId = 'ami-78ds78d'
|
||||
instanceProfile = 'MyRole'
|
||||
instanceType = 'r4.large'
|
||||
sharedStorageId = 'fs-76ds76s'
|
||||
spotPrice = 0.06
|
||||
subnetId = 'subnet-8d98d7s'
|
||||
}
|
||||
|
||||
env {
|
||||
BAR = 'world'
|
||||
FOO = 'hola'
|
||||
}
|
||||
|
||||
mail {
|
||||
from = 'paolo.ditommaso@gmail.com'
|
||||
smtp {
|
||||
auth = true
|
||||
host = 'email-smtp.us-east-1.amazonaws.com'
|
||||
password = 'my-secret'
|
||||
port = 587
|
||||
starttls {
|
||||
enable = true
|
||||
required = true
|
||||
}
|
||||
user = 'my-name'
|
||||
}
|
||||
}
|
||||
|
||||
process {
|
||||
executor = 'slurm'
|
||||
queue = 'cn-el7'
|
||||
memory = '16GB'
|
||||
cpus = 8
|
||||
container = 'user/rnaseq-nf:latest'
|
||||
}
|
||||
|
||||
trace {
|
||||
fields = 'task_id,name,status,attempt,exit,queue'
|
||||
}
|
||||
135
samples/Nextflow/rnaseq.nf
Normal file
135
samples/Nextflow/rnaseq.nf
Normal file
@@ -0,0 +1,135 @@
|
||||
#!/usr/bin/env nextflow
|
||||
/*
|
||||
* This is free and unencumbered software released into the public domain.
|
||||
*
|
||||
* Anyone is free to copy, modify, publish, use, compile, sell, or
|
||||
* distribute this software, either in source code form or as a compiled
|
||||
* binary, for any purpose, commercial or non-commercial, and by any
|
||||
* means.
|
||||
*
|
||||
* In jurisdictions that recognize copyright laws, the author or authors
|
||||
* of this software dedicate any and all copyright interest in the
|
||||
* software to the public domain. We make this dedication for the benefit
|
||||
* of the public at large and to the detriment of our heirs and
|
||||
* successors. We intend this dedication to be an overt act of
|
||||
* relinquishment in perpetuity of all present and future rights to this
|
||||
* software under copyright law.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
* OTHER DEALINGS IN THE SOFTWARE.
|
||||
*
|
||||
* For more information, please refer to <http://unlicense.org/>
|
||||
*/
|
||||
|
||||
|
||||
/*
|
||||
* Proof of concept of a RNAseq pipeline implemented with Nextflow
|
||||
*
|
||||
* Authors:
|
||||
* - Paolo Di Tommaso <paolo.ditommaso@gmail.com>
|
||||
* - Emilio Palumbo <emiliopalumbo@gmail.com>
|
||||
* - Evan Floden <evanfloden@gmail.com>
|
||||
*/
|
||||
|
||||
|
||||
params.reads = "$baseDir/data/ggal/*_{1,2}.fq"
|
||||
params.transcriptome = "$baseDir/data/ggal/ggal_1_48850000_49020000.Ggal71.500bpflank.fa"
|
||||
params.outdir = "."
|
||||
params.multiqc = "$baseDir/multiqc"
|
||||
|
||||
log.info """\
|
||||
R N A S E Q - N F P I P E L I N E
|
||||
===================================
|
||||
transcriptome: ${params.transcriptome}
|
||||
reads : ${params.reads}
|
||||
outdir : ${params.outdir}
|
||||
"""
|
||||
.stripIndent()
|
||||
|
||||
|
||||
transcriptome_file = file(params.transcriptome)
|
||||
multiqc_file = file(params.multiqc)
|
||||
|
||||
|
||||
Channel
|
||||
.fromFilePairs( params.reads )
|
||||
.ifEmpty { error "Cannot find any reads matching: ${params.reads}" }
|
||||
.into { read_pairs_ch; read_pairs2_ch }
|
||||
|
||||
|
||||
process index {
|
||||
tag "$transcriptome_file.simpleName"
|
||||
|
||||
input:
|
||||
file transcriptome from transcriptome_file
|
||||
|
||||
output:
|
||||
file 'index' into index_ch
|
||||
|
||||
script:
|
||||
"""
|
||||
salmon index --threads $task.cpus -t $transcriptome -i index
|
||||
"""
|
||||
}
|
||||
|
||||
|
||||
process quant {
|
||||
tag "$pair_id"
|
||||
|
||||
input:
|
||||
file index from index_ch
|
||||
set pair_id, file(reads) from read_pairs_ch
|
||||
|
||||
output:
|
||||
file(pair_id) into quant_ch
|
||||
|
||||
script:
|
||||
"""
|
||||
salmon quant --threads $task.cpus --libType=U -i index -1 ${reads[0]} -2 ${reads[1]} -o $pair_id
|
||||
"""
|
||||
}
|
||||
|
||||
process fastqc {
|
||||
tag "FASTQC on $sample_id"
|
||||
|
||||
input:
|
||||
set sample_id, file(reads) from read_pairs2_ch
|
||||
|
||||
output:
|
||||
file("fastqc_${sample_id}_logs") into fastqc_ch
|
||||
|
||||
|
||||
script:
|
||||
"""
|
||||
mkdir fastqc_${sample_id}_logs
|
||||
fastqc -o fastqc_${sample_id}_logs -f fastq -q ${reads}
|
||||
"""
|
||||
}
|
||||
|
||||
|
||||
process multiqc {
|
||||
publishDir params.outdir, mode:'copy'
|
||||
|
||||
input:
|
||||
file('*') from quant_ch.mix(fastqc_ch).collect()
|
||||
file(config) from multiqc_file
|
||||
|
||||
output:
|
||||
file('multiqc_report.html')
|
||||
|
||||
script:
|
||||
"""
|
||||
cp $config/* .
|
||||
echo "custom_logo: \$PWD/logo.png" >> multiqc_config.yaml
|
||||
multiqc .
|
||||
"""
|
||||
}
|
||||
|
||||
workflow.onComplete {
|
||||
println ( workflow.success ? "\nDone! Open the following report in your browser --> $params.outdir/multiqc_report.html\n" : "Oops .. something went wrong" )
|
||||
}
|
||||
230
samples/OpenEdge ABL/test-win.w
Normal file
230
samples/OpenEdge ABL/test-win.w
Normal file
@@ -0,0 +1,230 @@
|
||||
&ANALYZE-SUSPEND _VERSION-NUMBER AB_v10r12 GUI
|
||||
&ANALYZE-RESUME
|
||||
&Scoped-define WINDOW-NAME C-Win
|
||||
&ANALYZE-SUSPEND _UIB-CODE-BLOCK _CUSTOM _DEFINITIONS C-Win
|
||||
/*------------------------------------------------------------------------
|
||||
|
||||
File:
|
||||
|
||||
Description:
|
||||
|
||||
Input Parameters:
|
||||
<none>
|
||||
|
||||
Output Parameters:
|
||||
<none>
|
||||
|
||||
Author:
|
||||
|
||||
Created:
|
||||
|
||||
------------------------------------------------------------------------*/
|
||||
/* This .W file was created with the Progress AppBuilder. */
|
||||
/*----------------------------------------------------------------------*/
|
||||
|
||||
/* Create an unnamed pool to store all the widgets created
|
||||
by this procedure. This is a good default which assures
|
||||
that this procedure's triggers and internal procedures
|
||||
will execute in this procedure's storage, and that proper
|
||||
cleanup will occur on deletion of the procedure. */
|
||||
|
||||
CREATE WIDGET-POOL.
|
||||
|
||||
/* *************************** Definitions ************************** */
|
||||
|
||||
/* Parameters Definitions --- */
|
||||
|
||||
/* Local Variable Definitions --- */
|
||||
|
||||
/* _UIB-CODE-BLOCK-END */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
|
||||
&ANALYZE-SUSPEND _UIB-PREPROCESSOR-BLOCK
|
||||
|
||||
/* ******************** Preprocessor Definitions ******************** */
|
||||
|
||||
&Scoped-define PROCEDURE-TYPE Window
|
||||
&Scoped-define DB-AWARE no
|
||||
|
||||
/* Name of designated FRAME-NAME and/or first browse and/or first query */
|
||||
&Scoped-define FRAME-NAME DEFAULT-FRAME
|
||||
|
||||
/* Custom List Definitions */
|
||||
/* List-1,List-2,List-3,List-4,List-5,List-6 */
|
||||
|
||||
/* _UIB-PREPROCESSOR-BLOCK-END */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
|
||||
|
||||
/* *********************** Control Definitions ********************** */
|
||||
|
||||
/* Define the widget handle for the window */
|
||||
DEFINE VAR C-Win AS WIDGET-HANDLE NO-UNDO.
|
||||
|
||||
/* ************************ Frame Definitions *********************** */
|
||||
|
||||
DEFINE FRAME DEFAULT-FRAME
|
||||
WITH 1 DOWN NO-BOX KEEP-TAB-ORDER OVERLAY
|
||||
SIDE-LABELS NO-UNDERLINE THREE-D
|
||||
AT COL 1 ROW 1
|
||||
SIZE 80 BY 16 WIDGET-ID 100.
|
||||
|
||||
|
||||
/* *********************** Procedure Settings ************************ */
|
||||
|
||||
&ANALYZE-SUSPEND _PROCEDURE-SETTINGS
|
||||
/* Settings for THIS-PROCEDURE
|
||||
Type: Window
|
||||
Allow: Basic,Browse,DB-Fields,Window,Query
|
||||
Other Settings: COMPILE
|
||||
*/
|
||||
&ANALYZE-RESUME _END-PROCEDURE-SETTINGS
|
||||
|
||||
/* ************************* Create Window ************************** */
|
||||
|
||||
&ANALYZE-SUSPEND _CREATE-WINDOW
|
||||
IF SESSION:DISPLAY-TYPE = "GUI":U THEN
|
||||
CREATE WINDOW C-Win ASSIGN
|
||||
HIDDEN = YES
|
||||
TITLE = "<insert window title>"
|
||||
HEIGHT = 16
|
||||
WIDTH = 80
|
||||
MAX-HEIGHT = 16
|
||||
MAX-WIDTH = 80
|
||||
VIRTUAL-HEIGHT = 16
|
||||
VIRTUAL-WIDTH = 80
|
||||
RESIZE = yes
|
||||
SCROLL-BARS = no
|
||||
STATUS-AREA = no
|
||||
BGCOLOR = ?
|
||||
FGCOLOR = ?
|
||||
KEEP-FRAME-Z-ORDER = yes
|
||||
THREE-D = yes
|
||||
MESSAGE-AREA = no
|
||||
SENSITIVE = yes.
|
||||
ELSE {&WINDOW-NAME} = CURRENT-WINDOW.
|
||||
/* END WINDOW DEFINITION */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
|
||||
|
||||
/* *********** Runtime Attributes and AppBuilder Settings *********** */
|
||||
|
||||
&ANALYZE-SUSPEND _RUN-TIME-ATTRIBUTES
|
||||
/* SETTINGS FOR WINDOW C-Win
|
||||
VISIBLE,,RUN-PERSISTENT */
|
||||
/* SETTINGS FOR FRAME DEFAULT-FRAME
|
||||
FRAME-NAME */
|
||||
IF SESSION:DISPLAY-TYPE = "GUI":U AND VALID-HANDLE(C-Win)
|
||||
THEN C-Win:HIDDEN = no.
|
||||
|
||||
/* _RUN-TIME-ATTRIBUTES-END */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* ************************ Control Triggers ************************ */
|
||||
|
||||
&Scoped-define SELF-NAME C-Win
|
||||
&ANALYZE-SUSPEND _UIB-CODE-BLOCK _CONTROL C-Win C-Win
|
||||
ON END-ERROR OF C-Win /* <insert window title> */
|
||||
OR ENDKEY OF {&WINDOW-NAME} ANYWHERE DO:
|
||||
/* This case occurs when the user presses the "Esc" key.
|
||||
In a persistently run window, just ignore this. If we did not, the
|
||||
application would exit. */
|
||||
IF THIS-PROCEDURE:PERSISTENT THEN RETURN NO-APPLY.
|
||||
END.
|
||||
|
||||
/* _UIB-CODE-BLOCK-END */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
|
||||
&ANALYZE-SUSPEND _UIB-CODE-BLOCK _CONTROL C-Win C-Win
|
||||
ON WINDOW-CLOSE OF C-Win /* <insert window title> */
|
||||
DO:
|
||||
/* This event will close the window and terminate the procedure. */
|
||||
APPLY "CLOSE":U TO THIS-PROCEDURE.
|
||||
RETURN NO-APPLY.
|
||||
END.
|
||||
|
||||
/* _UIB-CODE-BLOCK-END */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
|
||||
&UNDEFINE SELF-NAME
|
||||
|
||||
&ANALYZE-SUSPEND _UIB-CODE-BLOCK _CUSTOM _MAIN-BLOCK C-Win
|
||||
|
||||
|
||||
/* *************************** Main Block *************************** */
|
||||
|
||||
/* Set CURRENT-WINDOW: this will parent dialog-boxes and frames. */
|
||||
ASSIGN CURRENT-WINDOW = {&WINDOW-NAME}
|
||||
THIS-PROCEDURE:CURRENT-WINDOW = {&WINDOW-NAME}.
|
||||
|
||||
/* The CLOSE event can be used from inside or outside the procedure to */
|
||||
/* terminate it. */
|
||||
ON CLOSE OF THIS-PROCEDURE
|
||||
RUN disable_UI.
|
||||
|
||||
/* Best default for GUI applications is... */
|
||||
PAUSE 0 BEFORE-HIDE.
|
||||
|
||||
/* Now enable the interface and wait for the exit condition. */
|
||||
/* (NOTE: handle ERROR and END-KEY so cleanup code will always fire. */
|
||||
MAIN-BLOCK:
|
||||
DO ON ERROR UNDO MAIN-BLOCK, LEAVE MAIN-BLOCK
|
||||
ON END-KEY UNDO MAIN-BLOCK, LEAVE MAIN-BLOCK:
|
||||
RUN enable_UI.
|
||||
IF NOT THIS-PROCEDURE:PERSISTENT THEN
|
||||
WAIT-FOR CLOSE OF THIS-PROCEDURE.
|
||||
END.
|
||||
|
||||
/* _UIB-CODE-BLOCK-END */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
|
||||
/* ********************** Internal Procedures *********************** */
|
||||
|
||||
&ANALYZE-SUSPEND _UIB-CODE-BLOCK _PROCEDURE disable_UI C-Win _DEFAULT-DISABLE
|
||||
PROCEDURE disable_UI :
|
||||
/*------------------------------------------------------------------------------
|
||||
Purpose: DISABLE the User Interface
|
||||
Parameters: <none>
|
||||
Notes: Here we clean-up the user-interface by deleting
|
||||
dynamic widgets we have created and/or hide
|
||||
frames. This procedure is usually called when
|
||||
we are ready to "clean-up" after running.
|
||||
------------------------------------------------------------------------------*/
|
||||
/* Delete the WINDOW we created */
|
||||
IF SESSION:DISPLAY-TYPE = "GUI":U AND VALID-HANDLE(C-Win)
|
||||
THEN DELETE WIDGET C-Win.
|
||||
IF THIS-PROCEDURE:PERSISTENT THEN DELETE PROCEDURE THIS-PROCEDURE.
|
||||
END PROCEDURE.
|
||||
|
||||
/* _UIB-CODE-BLOCK-END */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
&ANALYZE-SUSPEND _UIB-CODE-BLOCK _PROCEDURE enable_UI C-Win _DEFAULT-ENABLE
|
||||
PROCEDURE enable_UI :
|
||||
/*------------------------------------------------------------------------------
|
||||
Purpose: ENABLE the User Interface
|
||||
Parameters: <none>
|
||||
Notes: Here we display/view/enable the widgets in the
|
||||
user-interface. In addition, OPEN all queries
|
||||
associated with each FRAME and BROWSE.
|
||||
These statements here are based on the "Other
|
||||
Settings" section of the widget Property Sheets.
|
||||
------------------------------------------------------------------------------*/
|
||||
VIEW FRAME DEFAULT-FRAME IN WINDOW C-Win.
|
||||
{&OPEN-BROWSERS-IN-QUERY-DEFAULT-FRAME}
|
||||
VIEW C-Win.
|
||||
END PROCEDURE.
|
||||
|
||||
/* _UIB-CODE-BLOCK-END */
|
||||
&ANALYZE-RESUME
|
||||
|
||||
100
samples/Perl/Any.pm
Normal file
100
samples/Perl/Any.pm
Normal file
@@ -0,0 +1,100 @@
|
||||
use strict; #-*-cperl-*-
|
||||
use warnings;
|
||||
|
||||
use lib qw( ../../../../lib );
|
||||
|
||||
=encoding utf8
|
||||
|
||||
=head1 NAME
|
||||
|
||||
Algorithm::Evolutionary::Fitness::Any - Façade for any function so that it can be used as fitness
|
||||
|
||||
=head1 SYNOPSIS
|
||||
|
||||
use Algorithm::Evolutionary::Utils qw( string_decode )
|
||||
|
||||
sub squares {
|
||||
my $chrom = shift;
|
||||
my @values = string_decode( $chrom, 10, -1, 1 );
|
||||
return $values[0] * $values[1];
|
||||
}
|
||||
|
||||
my $any_eval = new Algorithm::Evolutionary::Fitness::Any \&squares;
|
||||
|
||||
|
||||
=head1 DESCRIPTION
|
||||
|
||||
Turns any subroutine or closure into a fitness function. Useful mainly
|
||||
if you want results cached; it's not really needed otherwise.
|
||||
|
||||
=head1 METHODS
|
||||
|
||||
=cut
|
||||
|
||||
package Algorithm::Evolutionary::Fitness::Any;
|
||||
|
||||
use Carp;
|
||||
|
||||
use base 'Algorithm::Evolutionary::Fitness::Base';
|
||||
|
||||
our $VERSION = '3.2';
|
||||
|
||||
=head2 new( $function )
|
||||
|
||||
Assigns default variables
|
||||
|
||||
=cut
|
||||
|
||||
sub new {
|
||||
my $class = shift;
|
||||
my $self = { _function => shift || croak "No functiona rray" };
|
||||
bless $self, $class;
|
||||
$self->initialize();
|
||||
return $self;
|
||||
}
|
||||
|
||||
=head2 apply( $individual )
|
||||
|
||||
Applies the instantiated problem to a chromosome. It is actually a
|
||||
wrapper around C<_apply>.
|
||||
|
||||
=cut
|
||||
|
||||
sub apply {
|
||||
my $self = shift;
|
||||
my $individual = shift || croak "Nobody here!!!";
|
||||
$self->{'_counter'}++;
|
||||
return $self->_apply( $individual );
|
||||
}
|
||||
|
||||
=head2 _apply( $individual )
|
||||
|
||||
This is the one that really does the stuff. It applies the defined
|
||||
function to each individual. Itis cached for efficiency.
|
||||
|
||||
=cut
|
||||
|
||||
sub _apply {
|
||||
my $self = shift;
|
||||
my $individual = shift || croak "Nobody here!";
|
||||
my $chrom = $individual->Chrom();
|
||||
my $cache = $self->{'_cache'};
|
||||
if ( $cache->{$chrom} ) {
|
||||
return $cache->{$chrom};
|
||||
}
|
||||
my $result = $self->{'_function'}->($chrom);
|
||||
if ( (scalar $chrom ) eq $chrom ) {
|
||||
$cache->{$chrom} = $result;
|
||||
}
|
||||
return $result;
|
||||
}
|
||||
|
||||
|
||||
=head1 Copyright
|
||||
|
||||
This file is released under the GPL. See the LICENSE file included in this distribution,
|
||||
or go to http://www.fsf.org/licenses/gpl.txt
|
||||
|
||||
=cut
|
||||
|
||||
"What???";
|
||||
20
samples/Perl/filenames/Makefile.PL
Normal file
20
samples/Perl/filenames/Makefile.PL
Normal file
@@ -0,0 +1,20 @@
|
||||
use strict;
|
||||
use warnings;
|
||||
use ExtUtils::MakeMaker;
|
||||
|
||||
WriteMakefile(
|
||||
NAME => 'Algorithm::Evolutionary::Simple',
|
||||
AUTHOR => 'JJ Merelo <jj@merelo.net>',
|
||||
VERSION_FROM => 'lib/Algorithm/Evolutionary/Simple.pm',
|
||||
ABSTRACT_FROM => 'lib/Algorithm/Evolutionary/Simple.pm',
|
||||
LICENSE => 'gpl',
|
||||
EXE_FILES => [ 'script/simple-EA.pl', 'script/maxones.pl'],
|
||||
PREREQ_PM => {
|
||||
'Test::More' => 0,
|
||||
'Carp' => 0,
|
||||
'Exporter' => 0,
|
||||
'Sort::Key::Top' => 0
|
||||
},
|
||||
dist => { COMPRESS => 'gzip -9f', SUFFIX => 'gz', },
|
||||
clean => { FILES => 'Algorithm-Evolutionary-Simple-*' },
|
||||
);
|
||||
9
samples/Perl/filenames/Rexfile
Normal file
9
samples/Perl/filenames/Rexfile
Normal file
@@ -0,0 +1,9 @@
|
||||
use Rex -feature => ['1.0'];
|
||||
|
||||
user "eleccionesugr";
|
||||
group eleccionesugr => "elecciones-ugr.cloudapp.net";
|
||||
|
||||
desc "Install perlbrew";
|
||||
task "perlbrew", group => "eleccionesugr", sub {
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user