Merge branch 'master' into go-vendor

* master: (168 commits)
  ruby for example
  Bumping version
  Updating grammars
  Grammar for Less from Atom package
  Remove Less grammar
  Updating to latest perl6 grammar
  Adding Perl6-specific grammar.
  Grammar for YANG from Atom package
  Support for YANG language
  Add detection of GrammarKit-generated files
  Add .xproj to list of XML extensions
  Test submodules are using HTTPS links
  Improved vim modeline detection
  Heuristic for Pod vs. Perl
  Bumping to v4.7.4
  Grammar update
  Support .rs.in as a file extension for Rust files.
  HTTPS links for submodules
  Add the LFE lexer as an example of erlang .xrl
  Add the Elixir parser as an example of erlang .yrl
  ...
This commit is contained in:
Brandon Keepers
2016-02-18 20:12:27 -05:00
184 changed files with 27768 additions and 717 deletions

60
.gitmodules vendored
View File

@@ -25,9 +25,6 @@
[submodule "vendor/grammars/Sublime-REBOL"]
path = vendor/grammars/Sublime-REBOL
url = https://github.com/Oldes/Sublime-REBOL
[submodule "vendor/grammars/Sublime-Inform"]
path = vendor/grammars/Sublime-Inform
url = https://github.com/PogiNate/Sublime-Inform
[submodule "vendor/grammars/autoitv3-tmbundle"]
path = vendor/grammars/autoitv3-tmbundle
url = https://github.com/Red-Nova-Technologies/autoitv3-tmbundle
@@ -85,6 +82,9 @@
[submodule "vendor/grammars/language-shellscript"]
path = vendor/grammars/language-shellscript
url = https://github.com/atom/language-shellscript
[submodule "vendor/grammars/language-supercollider"]
path = vendor/grammars/language-supercollider
url = https://github.com/supercollider/language-supercollider
[submodule "vendor/grammars/language-yaml"]
path = vendor/grammars/language-yaml
url = https://github.com/atom/language-yaml
@@ -253,9 +253,6 @@
[submodule "vendor/grammars/SublimeXtend"]
path = vendor/grammars/SublimeXtend
url = https://github.com/staltz/SublimeXtend
[submodule "vendor/grammars/Stata.tmbundle"]
path = vendor/grammars/Stata.tmbundle
url = https://github.com/statatmbundle/Stata.tmbundle
[submodule "vendor/grammars/Vala-TMBundle"]
path = vendor/grammars/Vala-TMBundle
url = https://github.com/technosophos/Vala-TMBundle
@@ -343,9 +340,6 @@
[submodule "vendor/grammars/latex.tmbundle"]
path = vendor/grammars/latex.tmbundle
url = https://github.com/textmate/latex.tmbundle
[submodule "vendor/grammars/less.tmbundle"]
path = vendor/grammars/less.tmbundle
url = https://github.com/textmate/less.tmbundle
[submodule "vendor/grammars/lilypond.tmbundle"]
path = vendor/grammars/lilypond.tmbundle
url = https://github.com/textmate/lilypond.tmbundle
@@ -649,7 +643,7 @@
url = https://github.com/SRI-CSL/SMT.tmbundle.git
[submodule "vendor/grammars/language-crystal"]
path = vendor/grammars/language-crystal
url = https://github.com/k2b6s9j/language-crystal
url = https://github.com/atom-crystal/language-crystal
[submodule "vendor/grammars/language-xbase"]
path = vendor/grammars/language-xbase
url = https://github.com/hernad/atom-language-harbour
@@ -658,7 +652,7 @@
url = https://github.com/rpavlick/language-ncl.git
[submodule "vendor/grammars/atom-language-purescript"]
path = vendor/grammars/atom-language-purescript
url = https://github.com/freebroccolo/atom-language-purescript
url = https://github.com/purescript-contrib/atom-language-purescript
[submodule "vendor/grammars/vue-syntax-highlight"]
path = vendor/grammars/vue-syntax-highlight
url = https://github.com/vuejs/vue-syntax-highlight
@@ -674,6 +668,48 @@
[submodule "vendor/grammars/sublime-typescript"]
path = vendor/grammars/sublime-typescript
url = https://github.com/Microsoft/TypeScript-Sublime-Plugin
[submodule "vendor/grammars/sublime-pony"]
path = vendor/grammars/sublime-pony
url = https://github.com/CausalityLtd/sublime-pony
[submodule "vendor/grammars/X10"]
path = vendor/grammars/X10
url = git@github.com:x10-lang/x10-highlighting.git
url = https://github.com/x10-lang/x10-highlighting
[submodule "vendor/grammars/language-babel"]
path = vendor/grammars/language-babel
url = https://github.com/gandm/language-babel
[submodule "vendor/grammars/UrWeb-Language-Definition"]
path = vendor/grammars/UrWeb-Language-Definition
url = https://github.com/gwalborn/UrWeb-Language-Definition.git
[submodule "vendor/grammars/Stata.tmbundle"]
path = vendor/grammars/Stata.tmbundle
url = https://github.com/pschumm/Stata.tmbundle
[submodule "vendor/grammars/FreeMarker.tmbundle"]
path = vendor/grammars/FreeMarker.tmbundle
url = https://github.com/freemarker/FreeMarker.tmbundle
[submodule "vendor/grammars/MagicPython"]
path = vendor/grammars/MagicPython
url = https://github.com/MagicStack/MagicPython
[submodule "vendor/grammars/language-click"]
path = vendor/grammars/language-click
url = https://github.com/stenverbois/language-click.git
[submodule "vendor/grammars/language-maxscript"]
path = vendor/grammars/language-maxscript
url = https://github.com/Alhadis/language-maxscript
[submodule "vendor/grammars/language-renpy"]
path = vendor/grammars/language-renpy
url = https://github.com/williamd1k0/language-renpy.git
[submodule "vendor/grammars/language-inform7"]
path = vendor/grammars/language-inform7
url = https://github.com/erkyrath/language-inform7
[submodule "vendor/grammars/atom-language-stan"]
path = vendor/grammars/atom-language-stan
url = https://github.com/jrnold/atom-language-stan
[submodule "vendor/grammars/language-yang"]
path = vendor/grammars/language-yang
url = https://github.com/DzonyKalafut/language-yang.git
[submodule "vendor/grammars/perl6fe"]
path = vendor/grammars/perl6fe
url = https://github.com/MadcapJake/language-perl6fe.git
[submodule "vendor/grammars/language-less"]
path = vendor/grammars/language-less
url = https://github.com/atom/language-less.git

View File

@@ -1,6 +1,12 @@
language: ruby
sudo: false
addons:
apt:
packages:
- libicu-dev
- libicu48
before_install: script/travis/before_install
rvm:
- 1.9.3
- 2.0.0
- 2.1
- 2.2

View File

@@ -12,7 +12,7 @@ We try only to add new extensions once they have some usage on GitHub. In most c
To add support for a new extension:
0. Add your extension to the language entry in [`languages.yml`][languages].
0. Add your extension to the language entry in [`languages.yml`][languages], keeping the extensions in alphabetical order.
0. Add at least one sample for your extension to the [samples directory][samples] in the correct subdirectory.
0. Open a pull request, linking to a [GitHub search result](https://github.com/search?utf8=%E2%9C%93&q=extension%3Aboot+NOT+nothack&type=Code&ref=searchresults) showing in-the-wild usage.

329
HACKING.rst.txt Normal file
View File

@@ -0,0 +1,329 @@
Contributing to SciPy
=====================
This document aims to give an overview of how to contribute to SciPy. It
tries to answer commonly asked questions, and provide some insight into how the
community process works in practice. Readers who are familiar with the SciPy
community and are experienced Python coders may want to jump straight to the
`git workflow`_ documentation.
Contributing new code
---------------------
If you have been working with the scientific Python toolstack for a while, you
probably have some code lying around of which you think "this could be useful
for others too". Perhaps it's a good idea then to contribute it to SciPy or
another open source project. The first question to ask is then, where does
this code belong? That question is hard to answer here, so we start with a
more specific one: *what code is suitable for putting into SciPy?*
Almost all of the new code added to scipy has in common that it's potentially
useful in multiple scientific domains and it fits in the scope of existing
scipy submodules. In principle new submodules can be added too, but this is
far less common. For code that is specific to a single application, there may
be an existing project that can use the code. Some scikits (`scikit-learn`_,
`scikits-image`_, `statsmodels`_, etc.) are good examples here; they have a
narrower focus and because of that more domain-specific code than SciPy.
Now if you have code that you would like to see included in SciPy, how do you
go about it? After checking that your code can be distributed in SciPy under a
compatible license (see FAQ for details), the first step is to discuss on the
scipy-dev mailing list. All new features, as well as changes to existing code,
are discussed and decided on there. You can, and probably should, already
start this discussion before your code is finished.
Assuming the outcome of the discussion on the mailing list is positive and you
have a function or piece of code that does what you need it to do, what next?
Before code is added to SciPy, it at least has to have good documentation, unit
tests and correct code style.
1. Unit tests
In principle you should aim to create unit tests that exercise all the code
that you are adding. This gives some degree of confidence that your code
runs correctly, also on Python versions and hardware or OSes that you don't
have available yourself. An extensive description of how to write unit
tests is given in the NumPy `testing guidelines`_.
2. Documentation
Clear and complete documentation is essential in order for users to be able
to find and understand the code. Documentation for individual functions
and classes -- which includes at least a basic description, type and
meaning of all parameters and returns values, and usage examples in
`doctest`_ format -- is put in docstrings. Those docstrings can be read
within the interpreter, and are compiled into a reference guide in html and
pdf format. Higher-level documentation for key (areas of) functionality is
provided in tutorial format and/or in module docstrings. A guide on how to
write documentation is given in `how to document`_.
3. Code style
Uniformity of style in which code is written is important to others trying
to understand the code. SciPy follows the standard Python guidelines for
code style, `PEP8`_. In order to check that your code conforms to PEP8,
you can use the `pep8 package`_ style checker. Most IDEs and text editors
have settings that can help you follow PEP8, for example by translating
tabs by four spaces. Using `pyflakes`_ to check your code is also a good
idea.
At the end of this document a checklist is given that may help to check if your
code fulfills all requirements for inclusion in SciPy.
Another question you may have is: *where exactly do I put my code*? To answer
this, it is useful to understand how the SciPy public API (application
programming interface) is defined. For most modules the API is two levels
deep, which means your new function should appear as
``scipy.submodule.my_new_func``. ``my_new_func`` can be put in an existing or
new file under ``/scipy/<submodule>/``, its name is added to the ``__all__``
dict in that file (which lists all public functions in the file), and those
public functions are then imported in ``/scipy/<submodule>/__init__.py``. Any
private functions/classes should have a leading underscore (``_``) in their
name. A more detailed description of what the public API of SciPy is, is given
in `SciPy API`_.
Once you think your code is ready for inclusion in SciPy, you can send a pull
request (PR) on Github. We won't go into the details of how to work with git
here, this is described well in the `git workflow`_ section of the NumPy
documentation and in the Github help pages. When you send the PR for a new
feature, be sure to also mention this on the scipy-dev mailing list. This can
prompt interested people to help review your PR. Assuming that you already got
positive feedback before on the general idea of your code/feature, the purpose
of the code review is to ensure that the code is correct, efficient and meets
the requirements outlined above. In many cases the code review happens
relatively quickly, but it's possible that it stalls. If you have addressed
all feedback already given, it's perfectly fine to ask on the mailing list
again for review (after a reasonable amount of time, say a couple of weeks, has
passed). Once the review is completed, the PR is merged into the "master"
branch of SciPy.
The above describes the requirements and process for adding code to SciPy. It
doesn't yet answer the question though how decisions are made exactly. The
basic answer is: decisions are made by consensus, by everyone who chooses to
participate in the discussion on the mailing list. This includes developers,
other users and yourself. Aiming for consensus in the discussion is important
-- SciPy is a project by and for the scientific Python community. In those
rare cases that agreement cannot be reached, the `maintainers`_ of the module
in question can decide the issue.
Contributing by helping maintain existing code
----------------------------------------------
The previous section talked specifically about adding new functionality to
SciPy. A large part of that discussion also applies to maintenance of existing
code. Maintenance means fixing bugs, improving code quality or style,
documenting existing functionality better, adding missing unit tests, keeping
build scripts up-to-date, etc. The SciPy `Trac`_ bug tracker contains all
reported bugs, build/documentation issues, etc. Fixing issues described in
Trac tickets helps improve the overall quality of SciPy, and is also a good way
of getting familiar with the project. You may also want to fix a bug because
you ran into it and need the function in question to work correctly.
The discussion on code style and unit testing above applies equally to bug
fixes. It is usually best to start by writing a unit test that shows the
problem, i.e. it should pass but doesn't. Once you have that, you can fix the
code so that the test does pass. That should be enough to send a PR for this
issue. Unlike when adding new code, discussing this on the mailing list may
not be necessary - if the old behavior of the code is clearly incorrect, no one
will object to having it fixed. It may be necessary to add some warning or
deprecation message for the changed behavior. This should be part of the
review process.
Other ways to contribute
------------------------
There are many ways to contribute other than contributing code. Participating
in discussions on the scipy-user and scipy-dev *mailing lists* is a contribution
in itself. The `scipy.org`_ *website* contains a lot of information on the
SciPy community and can always use a new pair of hands. A redesign of this
website is ongoing, see `scipy.github.com`_. The redesigned website is a
static site based on Sphinx, the sources for it are
also on Github at `scipy.org-new`_.
The SciPy *documentation* is constantly being improved by many developers and
users. You can contribute by sending a PR on Github that improves the
documentation, but there's also a `documentation wiki`_ that is very convenient
for making edits to docstrings (and doesn't require git knowledge). Anyone can
register a username on that wiki, ask on the scipy-dev mailing list for edit
rights and make edits. The documentation there is updated every day with the
latest changes in the SciPy master branch, and wiki edits are regularly
reviewed and merged into master. Another advantage of the documentation wiki
is that you can immediately see how the reStructuredText (reST) of docstrings
and other docs is rendered as html, so you can easily catch formatting errors.
Code that doesn't belong in SciPy itself or in another package but helps users
accomplish a certain task is valuable. `SciPy Central`_ is the place to share
this type of code (snippets, examples, plotting code, etc.).
Useful links, FAQ, checklist
----------------------------
Checklist before submitting a PR
````````````````````````````````
- Are there unit tests with good code coverage?
- Do all public function have docstrings including examples?
- Is the code style correct (PEP8, pyflakes)
- Is the new functionality tagged with ``.. versionadded:: X.Y.Z`` (with
X.Y.Z the version number of the next release - can be found in setup.py)?
- Is the new functionality mentioned in the release notes of the next
release?
- Is the new functionality added to the reference guide?
- In case of larger additions, is there a tutorial or more extensive
module-level description?
- In case compiled code is added, is it integrated correctly via setup.py
(and preferably also Bento/Numscons configuration files)?
- If you are a first-time contributor, did you add yourself to THANKS.txt?
Please note that this is perfectly normal and desirable - the aim is to
give every single contributor credit, and if you don't add yourself it's
simply extra work for the reviewer (or worse, the reviewer may forget).
- Did you check that the code can be distributed under a BSD license?
Useful SciPy documents
``````````````````````
- The `how to document`_ guidelines
- NumPy/SciPy `testing guidelines`_
- `SciPy API`_
- SciPy `maintainers`_
- NumPy/SciPy `git workflow`_
FAQ
```
*I based my code on existing Matlab/R/... code I found online, is this OK?*
It depends. SciPy is distributed under a BSD license, so if the code that you
based your code on is also BSD licensed or has a BSD-compatible license (MIT,
Apache, ...) then it's OK. Code which is GPL-licensed, has no clear license,
requires citation or is free for academic use only can't be included in SciPy.
Therefore if you copied existing code with such a license or made a direct
translation to Python of it, your code can't be included. See also `license
compatibility`_.
*How do I set up SciPy so I can edit files, run the tests and make commits?*
The simplest method is setting up an in-place build. To create your local git
repo and do the in-place build::
$ git clone https://github.com/scipy/scipy.git scipy
$ cd scipy
$ python setup.py build_ext -i
Then you need to either set up a symlink in your site-packages or add this
directory to your PYTHONPATH environment variable, so Python can find it. Some
IDEs (Spyder for example) have utilities to manage PYTHONPATH. On Linux and OS
X, you can for example edit your .bash_login file to automatically add this dir
on startup of your terminal. Add the line::
export PYTHONPATH="$HOME/scipy:${PYTHONPATH}"
Alternatively, to set up the symlink, use (prefix only necessary if you want to
use your local instead of global site-packages dir)::
$ python setupegg.py develop --prefix=${HOME}
To test that everything works, start the interpreter (not inside the scipy/
source dir) and run the tests::
$ python
>>> import scipy as sp
>>> sp.test()
Now editing a Python source file in SciPy allows you to immediately test and
use your changes, by simply restarting the interpreter.
Note that while the above procedure is the most straightforward way to get
started, you may want to look into using Bento or numscons for faster and more
flexible building, or virtualenv to maintain development environments for
multiple Python versions.
*How do I set up a development version of SciPy in parallel to a released
version that I use to do my job/research?*
One simple way to achieve this is to install the released version in
site-packages, by using a binary installer or pip for example, and set up the
development version with an in-place build in a virtualenv. First install
`virtualenv`_ and `virtualenvwrapper`_, then create your virtualenv (named
scipy-dev here) with::
$ mkvirtualenv scipy-dev
Now, whenever you want to switch to the virtual environment, you can use the
command ``workon scipy-dev``, while the command ``deactivate`` exits from the
virtual environment and brings back your previous shell. With scipy-dev
activated, follow the in-place build with the symlink install above to actually
install your development version of SciPy.
*Can I use a programming language other than Python to speed up my code?*
Yes. The languages used in SciPy are Python, Cython, C, C++ and Fortran. All
of these have their pros and cons. If Python really doesn't offer enough
performance, one of those languages can be used. Important concerns when
using compiled languages are maintainability and portability. For
maintainability, Cython is clearly preferred over C/C++/Fortran. Cython and C
are more portable than C++/Fortran. A lot of the existing C and Fortran code
in SciPy is older, battle-tested code that was only wrapped in (but not
specifically written for) Python/SciPy. Therefore the basic advice is: use
Cython. If there's specific reasons why C/C++/Fortran should be preferred,
please discuss those reasons first.
*There's overlap between Trac and Github, which do I use for what?*
Trac_ is the bug tracker, Github_ the code repository. Before the SciPy code
repository moved to Github, the preferred way to contribute code was to create
a patch and attach it to a Trac ticket. The overhead of this approach is much
larger than sending a PR on Github, so please don't do this anymore. Use Trac
for bug reports, Github for patches.
.. _scikit-learn: http://scikit-learn.org
.. _scikits-image: http://scikits-image.org/
.. _statsmodels: http://statsmodels.sourceforge.net/
.. _testing guidelines: https://github.com/numpy/numpy/blob/master/doc/TESTS.rst.txt
.. _how to document: https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt
.. _PEP8: http://www.python.org/dev/peps/pep-0008/
.. _pep8 package: http://pypi.python.org/pypi/pep8
.. _pyflakes: http://pypi.python.org/pypi/pyflakes
.. _SciPy API: http://docs.scipy.org/doc/scipy/reference/api.html
.. _git workflow: http://docs.scipy.org/doc/numpy/dev/gitwash/index.html
.. _maintainers: https://github.com/scipy/scipy/blob/master/doc/MAINTAINERS.rst.txt
.. _Trac: http://projects.scipy.org/scipy/timeline
.. _Github: https://github.com/scipy/scipy
.. _scipy.org: http://scipy.org/
.. _scipy.github.com: http://scipy.github.com/
.. _scipy.org-new: https://github.com/scipy/scipy.org-new
.. _documentation wiki: http://docs.scipy.org/scipy/Front%20Page/
.. _SciPy Central: http://scipy-central.org/
.. _license compatibility: http://www.scipy.org/License_Compatibility
.. _doctest: http://www.doughellmann.com/PyMOTW/doctest/
.. _virtualenv: http://www.virtualenv.org/
.. _virtualenvwrapper: http://www.doughellmann.com/projects/virtualenvwrapper/

View File

@@ -1,4 +1,4 @@
Copyright (c) 2011-2015 GitHub, Inc.
Copyright (c) 2011-2016 GitHub, Inc.
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation

View File

@@ -13,11 +13,11 @@ See [Troubleshooting](#troubleshooting) and [`CONTRIBUTING.md`](/CONTRIBUTING.md
![language stats bar](https://cloud.githubusercontent.com/assets/173/5562290/48e24654-8ddf-11e4-8fe7-735b0ce3a0d3.png)
The Language stats bar is built by aggregating the languages of each file in that repository. If it is reporting a language that you don't expect:
The Language stats bar displays languages percentages for the files in the repository. The percentages are calculated based on the bytes of code for each language as reported by the [List Languages](https://developer.github.com/v3/repos/#list-languages) API. If the bar is reporting a language that you don't expect:
0. Click on the name of the language in the stats bar to see a list of the files that are identified as that language.
0. If you see files that you didn't write, consider moving the files into one of the [paths for vendored code](/lib/linguist/vendor.yml), or use the [manual overrides](#overrides) feature to ignore them.
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you an add, especially links to public repositories, is helpful.
0. If the files are being misclassified, search for [open issues][issues] to see if anyone else has already reported the issue. Any information you can add, especially links to public repositories, is helpful.
0. If there are no reported issues of this misclassification, [open an issue][new-issue] and include a link to the repository or a sample of the code that is being misclassified.
## Overrides
@@ -33,9 +33,9 @@ $ cat .gitattributes
*.rb linguist-language=Java
```
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository. Vendored files are also hidden by default in diffs on github.com.
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository.
Use the `linguist-vendored` attribute to vendor or un-vendor paths. Please note, overriding the vendored (or un-vendored) status of a file only affects the language statistics for the repository and not the behavior in diffs on github.com.
Use the `linguist-vendored` attribute to vendor or un-vendor paths.
```
$ cat .gitattributes
@@ -59,6 +59,9 @@ Alternatively, you can use Vim or Emacs style modelines to set the language for
##### Vim
```
# Some examples of various styles:
vim: syntax=java
vim: set syntax=ruby:
vim: set filetype=prolog:
vim: set ft=cpp:
```

138
bin/git-linguist Executable file
View File

@@ -0,0 +1,138 @@
#!/usr/bin/env ruby
require 'linguist'
require 'rugged'
require 'optparse'
require 'json'
require 'tmpdir'
require 'zlib'
class GitLinguist
def initialize(path, commit_oid, incremental = true)
@repo_path = path
@commit_oid = commit_oid
@incremental = incremental
end
def linguist
if @commit_oid.nil?
raise "git-linguist must be called with a specific commit OID to perform language computation"
end
repo = Linguist::Repository.new(rugged, @commit_oid)
if @incremental && stats = load_language_stats
old_commit_oid, old_stats = stats
# A cache with NULL oid means that we want to froze
# these language stats in place and stop computing
# them (for performance reasons)
return old_stats if old_commit_oid == NULL_OID
repo.load_existing_stats(old_commit_oid, old_stats)
end
result = yield repo
save_language_stats(@commit_oid, repo.cache)
result
end
def load_language_stats
version, oid, stats = load_cache
if version == LANGUAGE_STATS_CACHE_VERSION && oid && stats
[oid, stats]
end
end
def save_language_stats(oid, stats)
cache = [LANGUAGE_STATS_CACHE_VERSION, oid, stats]
write_cache(cache)
end
def clear_language_stats
File.unlink(cache_file)
rescue Errno::ENOENT
end
def disable_language_stats
save_language_stats(NULL_OID, {})
end
protected
NULL_OID = ("0" * 40).freeze
LANGUAGE_STATS_CACHE = 'language-stats.cache'
LANGUAGE_STATS_CACHE_VERSION = "v3:#{Linguist::VERSION}"
def rugged
@rugged ||= Rugged::Repository.bare(@repo_path)
end
def cache_file
File.join(@repo_path, LANGUAGE_STATS_CACHE)
end
def write_cache(object)
return unless File.directory? @repo_path
begin
tmp_path = Dir::Tmpname.make_tmpname(cache_file, nil)
File.open(tmp_path, "wb") do |f|
marshal = Marshal.dump(object)
f.write(Zlib::Deflate.deflate(marshal))
end
File.rename(tmp_path, cache_file)
rescue => e
(File.unlink(tmp_path) rescue nil)
raise e
end
end
def load_cache
marshal = File.open(cache_file, "rb") { |f| Zlib::Inflate.inflate(f.read) }
Marshal.load(marshal)
rescue SystemCallError, ::Zlib::DataError, ::Zlib::BufError, TypeError
nil
end
end
def git_linguist(args)
incremental = true
commit = nil
parser = OptionParser.new do |opts|
opts.banner = "Usage: git-linguist [OPTIONS] stats|breakdown|dump-cache|clear|disable"
opts.on("-f", "--force", "Force a full rescan") { incremental = false }
opts.on("--commit=COMMIT", "Commit to index") { |v| commit = v}
end
parser.parse!(args)
git_dir = `git rev-parse --git-dir`.strip
raise "git-linguist must be ran in a Git repository" unless $?.success?
wrapper = GitLinguist.new(git_dir, commit, incremental)
case args.pop
when "stats"
wrapper.linguist do |linguist|
puts JSON.dump(linguist.languages)
end
when "breakdown"
wrapper.linguist do |linguist|
puts JSON.dump(linguist.breakdown_by_file)
end
when "dump-cache"
puts JSON.dump(wrapper.load_language_stats)
when "clear"
wrapper.clear_language_stats
when "disable"
wrapper.disable_language_stats
else
$stderr.print(parser.help)
exit 1
end
end
git_linguist(ARGV)

View File

@@ -10,8 +10,8 @@ Gem::Specification.new do |s|
s.homepage = "https://github.com/github/linguist"
s.license = "MIT"
s.files = Dir['lib/**/*'] - ['lib/linguist/grammars.rb']
s.executables << 'linguist'
s.files = Dir['lib/**/*'] - ['lib/linguist/grammars.rb'] + ['LICENSE']
s.executables = ['linguist', 'git-linguist']
s.add_dependency 'charlock_holmes', '~> 0.7.3'
s.add_dependency 'escape_utils', '~> 1.1.0'
@@ -24,6 +24,6 @@ Gem::Specification.new do |s|
s.add_development_dependency 'rake'
s.add_development_dependency 'yajl-ruby'
s.add_development_dependency 'color-proximity', '~> 0.2.1'
s.add_development_dependency 'licensee', '~> 4.7.4'
s.add_development_dependency 'licensee', '6.0.0b1'
end

View File

@@ -42,6 +42,8 @@ vendor/grammars/Docker.tmbundle:
- source.dockerfile
vendor/grammars/Elm.tmLanguage:
- source.elm
vendor/grammars/FreeMarker.tmbundle:
- text.html.ftl
vendor/grammars/G-Code/:
- source.LS
- source.MCPOST
@@ -67,6 +69,9 @@ vendor/grammars/Lean.tmbundle:
- source.lean
vendor/grammars/LiveScript.tmbundle:
- source.livescript
vendor/grammars/MagicPython:
- source.python
- source.regexp.python
vendor/grammars/Modelica/:
- source.modelica
vendor/grammars/NSIS:
@@ -92,7 +97,7 @@ vendor/grammars/Scalate.tmbundle:
- text.html.ssp
vendor/grammars/Slash.tmbundle:
- text.html.slash
vendor/grammars/Stata.tmbundle:
vendor/grammars/Stata.tmbundle/:
- source.mata
- source.stata
vendor/grammars/Stylus/:
@@ -101,8 +106,6 @@ vendor/grammars/Sublime-Coq:
- source.coq
vendor/grammars/Sublime-HTTP:
- source.httpspec
vendor/grammars/Sublime-Inform:
- source.Inform7
vendor/grammars/Sublime-Lasso:
- file.lasso
vendor/grammars/Sublime-Logos:
@@ -140,6 +143,8 @@ vendor/grammars/TXL/:
- source.txl
vendor/grammars/Textmate-Gosu-Bundle:
- source.gosu.2
vendor/grammars/UrWeb-Language-Definition:
- source.ur
vendor/grammars/VBDotNetSyntax:
- source.vbnet
vendor/grammars/Vala-TMBundle:
@@ -178,8 +183,13 @@ vendor/grammars/assembly.tmbundle:
- source.x86asm
vendor/grammars/atom-fsharp/:
- source.fsharp
- source.fsharp.fsi
- source.fsharp.fsl
- source.fsharp.fsx
vendor/grammars/atom-language-purescript/:
- source.purescript
vendor/grammars/atom-language-stan/:
- source.stan
vendor/grammars/atom-salt:
- source.python.salt
- source.yaml.salt
@@ -297,8 +307,8 @@ vendor/grammars/io.tmbundle:
vendor/grammars/ioke-outdated:
- source.ioke
vendor/grammars/jade-tmbundle:
- source.jade
- source.pyjade
- text.jade
vendor/grammars/jasmin-sublime:
- source.jasmin
vendor/grammars/java.tmbundle:
@@ -314,6 +324,11 @@ vendor/grammars/json.tmbundle:
- source.json
vendor/grammars/kotlin-sublime-package:
- source.Kotlin
vendor/grammars/language-babel/:
- source.js.jsx
- source.regexp.babel
vendor/grammars/language-click/:
- source.click
vendor/grammars/language-clojure:
- source.clojure
vendor/grammars/language-coffee-script:
@@ -330,6 +345,8 @@ vendor/grammars/language-gfm:
- source.gfm
vendor/grammars/language-hy:
- source.hy
vendor/grammars/language-inform7:
- source.inform7
vendor/grammars/language-javascript:
- source.js
- source.js.regexp
@@ -337,20 +354,28 @@ vendor/grammars/language-javascript:
vendor/grammars/language-jsoniq/:
- source.jq
- source.xq
vendor/grammars/language-less/:
- source.css.less
vendor/grammars/language-maxscript:
- source.maxscript
vendor/grammars/language-ncl:
- source.ncl
vendor/grammars/language-python:
- source.python
- source.regexp.python
- text.python.console
- text.python.traceback
vendor/grammars/language-renpy:
- source.renpy
vendor/grammars/language-shellscript:
- source.shell
- text.shell-session
vendor/grammars/language-supercollider:
- source.supercollider
vendor/grammars/language-xbase:
- source.harbour
vendor/grammars/language-yaml:
- source.yaml
vendor/grammars/language-yang/:
- source.yang
vendor/grammars/latex.tmbundle:
- text.bibtex
- text.log.latex
@@ -358,8 +383,6 @@ vendor/grammars/latex.tmbundle:
- text.tex.latex
- text.tex.latex.beamer
- text.tex.latex.memoir
vendor/grammars/less.tmbundle:
- source.css.less
vendor/grammars/lilypond.tmbundle:
- source.lilypond
vendor/grammars/liquid.tmbundle:
@@ -422,6 +445,10 @@ vendor/grammars/pascal.tmbundle:
vendor/grammars/perl.tmbundle/:
- source.perl
- source.perl.6
vendor/grammars/perl6fe:
- source.meta-info
- source.perl6fe
- source.regexp.perl6fe
vendor/grammars/php-smarty.tmbundle:
- text.html.smarty
vendor/grammars/php.tmbundle:
@@ -507,6 +534,8 @@ vendor/grammars/sublime-nix:
vendor/grammars/sublime-opal/:
- source.opal
- source.opalsysdefs
vendor/grammars/sublime-pony:
- source.pony
vendor/grammars/sublime-robot-plugin:
- text.robot
vendor/grammars/sublime-rust:

View File

@@ -13,8 +13,8 @@ class << Linguist
def instrument(*args, &bk)
if instrumenter
instrumenter.instrument(*args, &bk)
else
yield if block_given?
elsif block_given?
yield
end
end
end

73
lib/linguist/blob.rb Normal file
View File

@@ -0,0 +1,73 @@
require 'linguist/blob_helper'
module Linguist
# A Blob is a wrapper around the content of a file to make it quack
# like a Grit::Blob. It provides the basic interface: `name`,
# `data`, `path` and `size`.
class Blob
include BlobHelper
# Public: Initialize a new Blob.
#
# path - A path String (does not necessarily exists on the file system).
# content - Content of the file.
#
# Returns a Blob.
def initialize(path, content)
@path = path
@content = content
end
# Public: Filename
#
# Examples
#
# Blob.new("/path/to/linguist/lib/linguist.rb", "").path
# # => "/path/to/linguist/lib/linguist.rb"
#
# Returns a String
attr_reader :path
# Public: File name
#
# Returns a String
def name
File.basename(@path)
end
# Public: File contents.
#
# Returns a String.
def data
@content
end
# Public: Get byte size
#
# Returns an Integer.
def size
@content.bytesize
end
# Public: Get file extension.
#
# Returns a String.
def extension
extensions.last || ""
end
# Public: Return an array of the file extensions
#
# >> Linguist::Blob.new("app/views/things/index.html.erb").extensions
# => [".html.erb", ".erb"]
#
# Returns an Array
def extensions
basename, *segments = name.downcase.split(".")
segments.map.with_index do |segment, index|
"." + segments[index..-1].join(".")
end
end
end
end

View File

@@ -1,10 +1,11 @@
require 'linguist/blob_helper'
require 'linguist/blob'
module Linguist
# A FileBlob is a wrapper around a File object to make it quack
# like a Grit::Blob. It provides the basic interface: `name`,
# `data`, `path` and `size`.
class FileBlob
class FileBlob < Blob
include BlobHelper
# Public: Initialize a new FileBlob from a path
@@ -18,20 +19,6 @@ module Linguist
@path = base_path ? path.sub("#{base_path}/", '') : path
end
# Public: Filename
#
# Examples
#
# FileBlob.new("/path/to/linguist/lib/linguist.rb").path
# # => "/path/to/linguist/lib/linguist.rb"
#
# FileBlob.new("/path/to/linguist/lib/linguist.rb",
# "/path/to/linguist").path
# # => "lib/linguist.rb"
#
# Returns a String
attr_reader :path
# Public: Read file permissions
#
# Returns a String like '100644'
@@ -39,13 +26,6 @@ module Linguist
File.stat(@fullpath).mode.to_s(8)
end
# Public: File name
#
# Returns a String
def name
File.basename(@fullpath)
end
# Public: Read file contents.
#
# Returns a String.
@@ -59,26 +39,5 @@ module Linguist
def size
File.size(@fullpath)
end
# Public: Get file extension.
#
# Returns a String.
def extension
extensions.last || ""
end
# Public: Return an array of the file extensions
#
# >> Linguist::FileBlob.new("app/views/things/index.html.erb").extensions
# => [".html.erb", ".erb"]
#
# Returns an Array
def extensions
basename, *segments = name.downcase.split(".")
segments.map.with_index do |segment, index|
"." + segments[index..-1].join(".")
end
end
end
end

View File

@@ -72,7 +72,10 @@ module Linguist
generated_jni_header? ||
vcr_cassette? ||
generated_module? ||
generated_unity3d_meta?
generated_unity3d_meta? ||
generated_racc? ||
generated_jflex? ||
generated_grammarkit?
end
# Internal: Is the blob an Xcode file?
@@ -242,22 +245,26 @@ module Linguist
return lines[0].include?("Code generated by")
end
PROTOBUF_EXTENSIONS = ['.py', '.java', '.h', '.cc', '.cpp']
# Internal: Is the blob a C++, Java or Python source file generated by the
# Protocol Buffer compiler?
#
# Returns true of false.
def generated_protocol_buffer?
return false unless ['.py', '.java', '.h', '.cc', '.cpp'].include?(extname)
return false unless PROTOBUF_EXTENSIONS.include?(extname)
return false unless lines.count > 1
return lines[0].include?("Generated by the protocol buffer compiler. DO NOT EDIT!")
end
APACHE_THRIFT_EXTENSIONS = ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp']
# Internal: Is the blob generated by Apache Thrift compiler?
#
# Returns true or false
def generated_apache_thrift?
return false unless ['.rb', '.py', '.go', '.js', '.m', '.java', '.h', '.cc', '.cpp'].include?(extname)
return false unless APACHE_THRIFT_EXTENSIONS.include?(extname)
return false unless lines.count > 1
return lines[0].include?("Autogenerated by Thrift Compiler") || lines[1].include?("Autogenerated by Thrift Compiler")
@@ -366,5 +373,45 @@ module Linguist
return false unless lines.count > 1
return lines[0].include?("fileFormatVersion: ")
end
# Internal: Is this a Racc-generated file?
#
# A Racc-generated file contains:
# # This file is automatically generated by Racc x.y.z
# on the third line.
#
# Return true or false
def generated_racc?
return false unless extname == '.rb'
return false unless lines.count > 2
return lines[2].start_with?("# This file is automatically generated by Racc")
end
# Internal: Is this a JFlex-generated file?
#
# A JFlex-generated file contains:
# /* The following code was generated by JFlex x.y.z on d/at/e ti:me */
# on the first line.
#
# Return true or false
def generated_jflex?
return false unless extname == '.java'
return false unless lines.count > 1
return lines[0].start_with?("/* The following code was generated by JFlex ")
end
# Internal: Is this a GrammarKit-generated file?
#
# A GrammarKit-generated file typically contain:
# // This is a generated file. Not intended for manual editing.
# on the first line. This is not always the case, as it's possible to
# customize the class header.
#
# Return true or false
def generated_grammarkit?
return false unless extname == '.java'
return false unless lines.count > 1
return lines[0].start_with?("// This is a generated file. Not intended for manual editing.")
end
end
end

View File

@@ -56,7 +56,8 @@ module Linguist
# Internal: Check if this heuristic matches the candidate languages.
def matches?(filename)
@extensions.any? { |ext| filename.downcase.end_with?(ext) }
filename = filename.downcase
@extensions.any? { |ext| filename.end_with?(ext) }
end
# Internal: Perform the heuristic
@@ -65,7 +66,7 @@ module Linguist
end
# Common heuristics
ObjectiveCRegex = /^[ \t]*@(interface|class|protocol|property|end|synchronized|selector|implementation)\b/
ObjectiveCRegex = /^\s*(@(interface|class|protocol|property|end|synchronised|selector|implementation)\b|#import\s+.+\.h[">])/
disambiguate ".asc" do |data|
if /^(----[- ]BEGIN|ssh-(rsa|dss)) /.match(data)
@@ -130,7 +131,7 @@ module Linguist
disambiguate ".for", ".f" do |data|
if /^: /.match(data)
Language["Forth"]
elsif /^([c*][^a-z]| (subroutine|program)\s|\s*!)/i.match(data)
elsif /^([c*][^abd-z]| (subroutine|program|end)\s|\s*!)/i.match(data)
Language["FORTRAN"]
end
end
@@ -237,8 +238,10 @@ module Linguist
disambiguate ".ms" do |data|
if /^[.'][a-z][a-z](\s|$)/i.match(data)
Language["Groff"]
elsif /((^|\s)move?[. ])|\.(include|globa?l)\s/.match(data)
elsif /(?<!\S)\.(include|globa?l)\s/.match(data) || /(?<!\/\*)(\A|\n)\s*\.[A-Za-z]/.match(data.gsub(/"([^\\"]|\\.)*"|'([^\\']|\\.)*'|\\\s*(?:--.*)?\n/, ""))
Language["GAS"]
else
Language["MAXScript"]
end
end
@@ -273,19 +276,27 @@ module Linguist
end
disambiguate ".pl" do |data|
if /^(use v6|(my )?class|module)/.match(data)
Language["Perl6"]
if /^[^#]+:-/.match(data)
Language["Prolog"]
elsif /use strict|use\s+v?5\./.match(data)
Language["Perl"]
elsif /^[^#]+:-/.match(data)
Language["Prolog"]
elsif /^(use v6|(my )?class|module)/.match(data)
Language["Perl6"]
end
end
disambiguate ".pm", ".t" do |data|
if /^(use v6|(my )?class|module)/.match(data)
if /use strict|use\s+v?5\./.match(data)
Language["Perl"]
elsif /^(use v6|(my )?class|module)/.match(data)
Language["Perl6"]
elsif /use strict|use\s+v?5\./.match(data)
end
end
disambiguate ".pod" do |data|
if /^=\w+$/.match(data)
Language["Pod"]
else
Language["Perl"]
end
end

View File

@@ -8,7 +8,8 @@
# Use "text" if a mode does not exist.
# wrap - Boolean wrap to enable line wrapping (default: false)
# extensions - An Array of associated extensions (the first one is
# considered the primary extension)
# considered the primary extension, the others should be
# listed alphabetically)
# interpreters - An Array of associated interpreters
# searchable - Boolean flag to enable searching (defaults to true)
# search_term - Deprecated: Some languages maybe indexed under a
@@ -141,7 +142,7 @@ Agda:
Alloy:
type: programming # 'modeling' would be more appropiate
color: "#cc5c24"
color: "#64C800"
extensions:
- .als
ace_mode: text
@@ -182,6 +183,7 @@ AppleScript:
interpreters:
- osascript
ace_mode: applescript
color: "#101F1F"
Arc:
type: programming
@@ -289,6 +291,7 @@ Batchfile:
- .cmd
tm_scope: source.dosbatch
ace_mode: batchfile
color: "#C1F12E"
Befunge:
type: programming
@@ -303,6 +306,7 @@ Bison:
extensions:
- .bison
ace_mode: text
color: "#6A463F"
BitBake:
type: programming
@@ -392,6 +396,7 @@ C#:
- csharp
extensions:
- .cs
- .cake
- .cshtml
- .csx
@@ -539,6 +544,14 @@ Clean:
tm_scope: none
ace_mode: text
Click:
type: programming
color: "#E4E6F3"
extensions:
- .click
tm_scope: source.click
ace_mode: text
Clojure:
type: programming
ace_mode: clojure
@@ -567,6 +580,7 @@ CoffeeScript:
extensions:
- .coffee
- ._coffee
- .cake
- .cjsx
- .cson
- .iced
@@ -627,7 +641,7 @@ Common Lisp:
Component Pascal:
type: programming
color: "#b0ce4e"
color: "#B0CE4E"
extensions:
- .cp
- .cps
@@ -690,6 +704,7 @@ Cucumber:
aliases:
- gherkin
ace_mode: text
color: "#5B2063"
Cuda:
type: programming
@@ -698,6 +713,7 @@ Cuda:
- .cuh
tm_scope: source.cuda-c++
ace_mode: c_cpp
color: "#3A4E3A"
Cycript:
type: programming
@@ -719,7 +735,7 @@ Cython:
D:
type: programming
color: "#fcd46d"
color: "#ba595e"
extensions:
- .d
- .di
@@ -790,7 +806,6 @@ Dart:
Diff:
type: data
color: "#88dddd"
extensions:
- .diff
- .patch
@@ -884,6 +899,8 @@ Elixir:
ace_mode: elixir
filenames:
- mix.lock
interpreters:
- elixir
Elm:
type: programming
@@ -926,6 +943,8 @@ Erlang:
- .es
- .escript
- .hrl
- .xrl
- .yrl
filenames:
- rebar.config
- rebar.config.lock
@@ -1011,6 +1030,7 @@ Formatted:
type: data
extensions:
- .for
- .eam.fs
tm_scope: none
ace_mode: text
@@ -1028,6 +1048,16 @@ Forth:
- .fs
ace_mode: forth
FreeMarker:
type: programming
color: "#0050b2"
aliases:
- ftl
extensions:
- .ftl
tm_scope: text.html.ftl
ace_mode: ftl
Frege:
type: programming
color: "#00cafe"
@@ -1317,11 +1347,22 @@ HTML+Django:
- .mustache
- .jinja
aliases:
- django
- html+django/jinja
- html+jinja
- htmldjango
ace_mode: django
HTML+EEX:
type: markup
tm_scope: text.html.elixir
group: HTML
aliases:
- eex
extensions:
- .eex
ace_mode: text
HTML+ERB:
type: markup
tm_scope: text.html.erb
@@ -1331,7 +1372,7 @@ HTML+ERB:
extensions:
- .erb
- .erb.deface
ace_mode: html_ruby
ace_mode: text
HTML+PHP:
type: markup
@@ -1355,6 +1396,7 @@ Hack:
- .hh
- .php
tm_scope: text.html.php
color: "#878787"
Haml:
group: HTML
@@ -1363,10 +1405,12 @@ Haml:
- .haml
- .haml.deface
ace_mode: haml
color: "#ECE2A9"
Handlebars:
type: markup
color: "#01a9d6"
group: HTML
aliases:
- hbs
- htmlbars
@@ -1474,7 +1518,7 @@ Inform 7:
extensions:
- .ni
- .i7x
tm_scope: source.Inform7
tm_scope: source.inform7
aliases:
- i7
- inform7
@@ -1575,12 +1619,20 @@ JSONiq:
- .jq
tm_scope: source.jq
JSX:
type: programming
group: JavaScript
extensions:
- .jsx
tm_scope: source.js.jsx
ace_mode: javascript
Jade:
group: HTML
type: markup
extensions:
- .jade
tm_scope: source.jade
tm_scope: text.jade
ace_mode: jade
Jasmin:
@@ -1625,10 +1677,10 @@ JavaScript:
- .gs
- .jake
- .jsb
- .jscad
- .jsfl
- .jsm
- .jss
- .jsx
- .njs
- .pac
- .sjs
@@ -1660,6 +1712,18 @@ Julia:
color: "#a270ba"
ace_mode: julia
Jupyter Notebook:
type: markup
ace_mode: json
tm_scope: source.json
color: "#DA5B0B"
extensions:
- .ipynb
filenames:
- Notebook
aliases:
- IPython Notebook
KRL:
type: programming
color: "#28431f"
@@ -1672,6 +1736,7 @@ KiCad:
type: programming
extensions:
- .sch
- .brd
- .kicad_pcb
tm_scope: none
ace_mode: text
@@ -1707,6 +1772,7 @@ LLVM:
extensions:
- .ll
ace_mode: text
color: "#185619"
LOLCODE:
type: programming
@@ -1770,6 +1836,7 @@ Less:
- .less
tm_scope: source.css.less
ace_mode: less
color: "#A1D9A1"
Lex:
type: programming
@@ -1921,6 +1988,15 @@ M:
tm_scope: source.lisp
ace_mode: lisp
MAXScript:
type: programming
color: "#00a6a6"
extensions:
- .ms
- .mcr
tm_scope: source.maxscript
ace_mode: text
MTML:
type: markup
color: "#b7e1f4"
@@ -1979,6 +2055,7 @@ Markdown:
- .mkdown
- .ron
tm_scope: source.gfm
color: "#083FA1"
Mask:
type: markup
@@ -1995,6 +2072,7 @@ Mathematica:
- .cdf
- .m
- .ma
- .mt
- .nb
- .nbp
- .wl
@@ -2006,6 +2084,8 @@ Mathematica:
Matlab:
type: programming
color: "#bb92ac"
aliases:
- octave
extensions:
- .matlab
- .m
@@ -2039,6 +2119,7 @@ MediaWiki:
wrap: true
extensions:
- .mediawiki
- .wiki
tm_scope: text.html.mediawiki
ace_mode: text
@@ -2054,6 +2135,14 @@ Mercury:
tm_scope: source.mercury
ace_mode: prolog
Metal:
type: programming
color: "#8f14e9"
extensions:
- .metal
tm_scope: source.c++
ace_mode: c_cpp
MiniD: # Legacy
type: programming
searchable: false
@@ -2206,6 +2295,7 @@ Nginx:
aliases:
- nginx configuration file
ace_mode: text
color: "#9469E9"
Nimrod:
type: programming
@@ -2264,6 +2354,7 @@ NumPy:
- .numsc
tm_scope: none
ace_mode: text
color: "#9C8AF9"
OCaml:
type: programming
@@ -2432,6 +2523,7 @@ PHP:
- .php3
- .php4
- .php5
- .phps
- .phpt
filenames:
- Phakefile
@@ -2448,6 +2540,7 @@ PLSQL:
color: "#dad8d8"
extensions:
- .pls
- .pck
- .pkb
- .pks
- .plb
@@ -2512,7 +2605,7 @@ Parrot Internal Representation:
Pascal:
type: programming
color: "#b0ce4e"
color: "#E3F171"
extensions:
- .pas
- .dfm
@@ -2561,9 +2654,16 @@ Perl6:
- Rexfile
interpreters:
- perl6
tm_scope: source.perl.6
tm_scope: source.perl6fe
ace_mode: perl
Pickle:
type: data
extensions:
- .pkl
tm_scope: none
ace_mode: text
PicoLisp:
type: programming
extensions:
@@ -2608,6 +2708,13 @@ PogoScript:
tm_scope: source.pogoscript
ace_mode: text
Pony:
type: programming
extensions:
- .pony
tm_scope: source.pony
ace_mode: text
PostScript:
type: markup
extensions:
@@ -2766,7 +2873,7 @@ QMake:
R:
type: programming
color: "#198ce7"
color: "#198CE7"
aliases:
- R
- Rscript
@@ -2796,6 +2903,7 @@ RDoc:
extensions:
- .rdoc
tm_scope: text.rdoc
color: "#8E84BF"
REALbasic:
type: programming
@@ -2891,6 +2999,17 @@ Redcode:
tm_scope: none
ace_mode: text
Ren'Py:
type: programming
group: Python
aliases:
- renpy
color: "#ff7f7f"
extensions:
- .rpy
tm_scope: source.renpy
ace_mode: python
RenderScript:
type: programming
extensions:
@@ -2976,6 +3095,7 @@ Rust:
color: "#dea584"
extensions:
- .rs
- .rs.in
ace_mode: rust
SAS:
@@ -2993,6 +3113,7 @@ SCSS:
ace_mode: scss
extensions:
- .scss
color: "#CF649A"
SMT:
type: programming
@@ -3095,11 +3216,12 @@ Sass:
extensions:
- .sass
ace_mode: sass
color: "#CF649A"
Scala:
type: programming
ace_mode: scala
color: "#7dd3b0"
color: "#DC322F"
extensions:
- .scala
- .sbt
@@ -3251,6 +3373,14 @@ Squirrel:
tm_scope: source.c++
ace_mode: c_cpp
Stan:
type: programming
color: "#b2011d"
extensions:
- .stan
ace_mode: text
tm_scope: source.stan
Standard ML:
type: programming
color: "#dc566d"
@@ -3288,9 +3418,12 @@ SuperCollider:
type: programming
color: "#46390b"
extensions:
- .scd
- .sc
tm_scope: none
- .scd
interpreters:
- sclang
- scsynth
tm_scope: source.supercollider
ace_mode: text
Swift:
@@ -3433,6 +3566,7 @@ TypeScript:
- ts
extensions:
- .ts
- .tsx
tm_scope: source.ts
ace_mode: typescript
@@ -3448,7 +3582,6 @@ Unified Parallel C:
Unity3D Asset:
type: data
ace_mode: yaml
color: "#ab69a1"
extensions:
- .anim
- .asset
@@ -3466,6 +3599,17 @@ UnrealScript:
tm_scope: source.java
ace_mode: java
UrWeb:
type: programming
aliases:
- Ur/Web
- Ur
extensions:
- .ur
- .urs
tm_scope: source.ur
ace_mode: text
VCL:
group: Perl
type: programming
@@ -3603,6 +3747,7 @@ XML:
- .ccxml
- .clixml
- .cproject
- .csl
- .csproj
- .ct
- .dita
@@ -3649,6 +3794,7 @@ XML:
- .tmSnippet
- .tmTheme
- .ts
- .tsx
- .ui
- .urdf
- .vbproj
@@ -3667,6 +3813,7 @@ XML:
- .xliff
- .xmi
- .xml.dist
- .xproj
- .xsd
- .xul
- .zcml
@@ -3723,6 +3870,7 @@ XSLT:
- .xsl
tm_scope: text.xml.xsl
ace_mode: xml
color: "#EB8CEB"
Xojo:
type: programming
@@ -3756,6 +3904,13 @@ YAML:
- .yaml-tmlanguage
ace_mode: yaml
YANG:
type: data
extensions:
- .yang
tm_scope: source.yang
ace_mode: text
Yacc:
type: programming
extensions:
@@ -3764,6 +3919,7 @@ Yacc:
- .yy
tm_scope: source.bison
ace_mode: text
color: "#4B6C4B"
Zephir:
type: programming
@@ -3803,7 +3959,6 @@ eC:
edn:
type: data
ace_mode: clojure
color: "#db5855"
extensions:
- .edn
tm_scope: source.clojure
@@ -3845,7 +4000,10 @@ reStructuredText:
extensions:
- .rst
- .rest
- .rest.txt
- .rst.txt
ace_mode: text
color: "#B3BCBC"
wisp:
type: programming

View File

@@ -79,11 +79,15 @@ module Linguist
@size
end
def cleanup!
@data.clear if @data
end
protected
# Returns true if the attribute is present and not the string "false".
def boolean_attribute(attr)
attr != "false"
def boolean_attribute(attribute)
attribute != "false"
end
def load_blob!

View File

@@ -126,12 +126,13 @@ module Linguist
end
protected
MAX_TREE_SIZE = 100_000
def compute_stats(old_commit_oid, cache = nil)
return {} if current_tree.count_recursive(MAX_TREE_SIZE) >= MAX_TREE_SIZE
old_tree = old_commit_oid && Rugged::Commit.lookup(repository, old_commit_oid).tree
read_index
diff = Rugged::Tree.diff(repository, old_tree, current_tree)
# Clear file map and fetch full diff if any .gitattributes files are changed
@@ -157,8 +158,11 @@ module Linguist
blob = Linguist::LazyBlob.new(repository, delta.new_file[:oid], new, mode.to_s(8))
next unless blob.include_in_language_stats?
file_map[new] = [blob.language.group.name, blob.size]
if blob.include_in_language_stats?
file_map[new] = [blob.language.group.name, blob.size]
end
blob.cleanup!
end
end

View File

@@ -1,8 +1,19 @@
module Linguist
module Strategy
class Modeline
EmacsModeline = /-\*-\s*(?:(?!mode)[\w-]+\s*:\s*(?:[\w+-]+)\s*;?\s*)*(?:mode\s*:)?\s*([\w+-]+)\s*(?:;\s*(?!mode)[\w-]+\s*:\s*[\w+-]+\s*)*;?\s*-\*-/i
VimModeline = /vim:\s*set.*\s(?:ft|filetype)=(\w+)\s?.*:/i
EMACS_MODELINE = /-\*-\s*(?:(?!mode)[\w-]+\s*:\s*(?:[\w+-]+)\s*;?\s*)*(?:mode\s*:)?\s*([\w+-]+)\s*(?:;\s*(?!mode)[\w-]+\s*:\s*[\w+-]+\s*)*;?\s*-\*-/i
# First form vim modeline
# [text]{white}{vi:|vim:|ex:}[white]{options}
# ex: 'vim: syntax=ruby'
VIM_MODELINE_1 = /(?:vim|vi|ex):\s*(?:ft|filetype|syntax)=(\w+)\s?/i
# Second form vim modeline (compatible with some versions of Vi)
# [text]{white}{vi:|vim:|Vim:|ex:}[white]se[t] {options}:[text]
# ex: 'vim set syntax=ruby:'
VIM_MODELINE_2 = /(?:vim|vi|Vim|ex):\s*se(?:t)?.*\s(?:ft|filetype|syntax)=(\w+)\s?.*:/i
MODELINES = [EMACS_MODELINE, VIM_MODELINE_1, VIM_MODELINE_2]
# Public: Detects language based on Vim and Emacs modelines
#
@@ -22,7 +33,7 @@ module Linguist
#
# Returns a String or nil
def self.modeline(data)
match = data.match(EmacsModeline) || data.match(VimModeline)
match = MODELINES.map { |regex| data.match(regex) }.reject(&:nil?).first
match[1] if match
end
end

View File

@@ -86,13 +86,13 @@ module Linguist
if s.peek(1) == "\""
s.getch
else
s.skip_until(/[^\\]"/)
s.skip_until(/(?<!\\)"/)
end
elsif s.scan(/'/)
if s.peek(1) == "'"
s.getch
else
s.skip_until(/[^\\]'/)
s.skip_until(/(?<!\\)'/)
end
# Skip number literals

View File

@@ -95,7 +95,7 @@
- jquery.fn.gantt.js
# jQuery fancyBox
- jquery.fancybox.js
- jquery.fancybox.(js|css)
# Fuel UX
- fuelux.js

View File

@@ -1,3 +1,3 @@
module Linguist
VERSION = "4.5.15"
VERSION = "4.7.5"
end

86
samples/C#/build.cake Normal file
View File

@@ -0,0 +1,86 @@
///////////////////////////////////////////////////////////////////////////////
// ARGUMENTS
///////////////////////////////////////////////////////////////////////////////
var target = Argument<string>("target", "Default");
var configuration = Argument<string>("configuration", "Release");
///////////////////////////////////////////////////////////////////////////////
// GLOBAL VARIABLES
///////////////////////////////////////////////////////////////////////////////
var solutions = GetFiles("./**/*.sln");
var solutionPaths = solutions.Select(solution => solution.GetDirectory());
///////////////////////////////////////////////////////////////////////////////
// SETUP / TEARDOWN
///////////////////////////////////////////////////////////////////////////////
Setup(() =>
{
// Executed BEFORE the first task.
Information("Running tasks...");
});
Teardown(() =>
{
// Executed AFTER the last task.
Information("Finished running tasks.");
});
///////////////////////////////////////////////////////////////////////////////
// TASK DEFINITIONS
///////////////////////////////////////////////////////////////////////////////
Task("Clean")
.Does(() =>
{
// Clean solution directories.
foreach(var path in solutionPaths)
{
Information("Cleaning {0}", path);
CleanDirectories(path + "/**/bin/" + configuration);
CleanDirectories(path + "/**/obj/" + configuration);
}
});
Task("Restore")
.Does(() =>
{
// Restore all NuGet packages.
foreach(var solution in solutions)
{
Information("Restoring {0}...", solution);
NuGetRestore(solution);
}
});
Task("Build")
.IsDependentOn("Clean")
.IsDependentOn("Restore")
.Does(() =>
{
// Build all solutions.
foreach(var solution in solutions)
{
Information("Building {0}", solution);
MSBuild(solution, settings =>
settings.SetPlatformTarget(PlatformTarget.MSIL)
.WithProperty("TreatWarningsAsErrors","true")
.WithTarget("Build")
.SetConfiguration(configuration));
}
});
///////////////////////////////////////////////////////////////////////////////
// TARGETS
///////////////////////////////////////////////////////////////////////////////
Task("Default")
.IsDependentOn("Build");
///////////////////////////////////////////////////////////////////////////////
// EXECUTION
///////////////////////////////////////////////////////////////////////////////
RunTarget(target);

133
samples/Click/sr2.click Normal file
View File

@@ -0,0 +1,133 @@
rates :: AvailableRates
elementclass sr2 {
$sr2_ip, $sr2_nm, $wireless_mac, $gateway, $probes|
arp :: ARPTable();
lt :: LinkTable(IP $sr2_ip);
gw :: SR2GatewaySelector(ETHTYPE 0x062c,
IP $sr2_ip,
ETH $wireless_mac,
LT lt,
ARP arp,
PERIOD 15,
GW $gateway);
gw -> SR2SetChecksum -> [0] output;
set_gw :: SR2SetGateway(SEL gw);
es :: SR2ETTStat(ETHTYPE 0x0641,
ETH $wireless_mac,
IP $sr2_ip,
PERIOD 30000,
TAU 300000,
ARP arp,
PROBES $probes,
ETT metric,
RT rates);
metric :: SR2ETTMetric(LT lt);
forwarder :: SR2Forwarder(ETHTYPE 0x0643,
IP $sr2_ip,
ETH $wireless_mac,
ARP arp,
LT lt);
querier :: SR2Querier(ETH $wireless_mac,
SR forwarder,
LT lt,
ROUTE_DAMPENING true,
TIME_BEFORE_SWITCH 5,
DEBUG true);
query_forwarder :: SR2MetricFlood(ETHTYPE 0x0644,
IP $sr2_ip,
ETH $wireless_mac,
LT lt,
ARP arp,
DEBUG false);
query_responder :: SR2QueryResponder(ETHTYPE 0x0645,
IP $sr2_ip,
ETH $wireless_mac,
LT lt,
ARP arp,
DEBUG true);
query_responder -> SR2SetChecksum -> [0] output;
query_forwarder -> SR2SetChecksum -> SR2Print(forwarding) -> [0] output;
query_forwarder [1] -> query_responder;
data_ck :: SR2SetChecksum()
input [1]
-> host_cl :: IPClassifier(dst net $sr2_ip mask $sr2_nm,
-)
-> querier
-> data_ck;
host_cl [1] -> [0] set_gw [0] -> querier;
forwarder[0]
-> dt ::DecIPTTL
-> data_ck
-> [2] output;
dt[1]
-> Print(ttl-error)
-> ICMPError($sr2_ip, timeexceeded, 0)
-> querier;
// queries
querier [1] -> [1] query_forwarder;
es -> SetTimestamp() -> [1] output;
forwarder[1] //ip packets to me
-> SR2StripHeader()
-> CheckIPHeader()
-> from_gw_cl :: IPClassifier(src net $sr2_ip mask $sr2_nm,
-)
-> [3] output;
from_gw_cl [1] -> [1] set_gw [1] -> [3] output;
input [0]
-> ncl :: Classifier(
12/0643 , //sr2_forwarder
12/0644 , //sr2
12/0645 , //replies
12/0641 , //sr2_es
12/062c , //sr2_gw
);
ncl[0] -> SR2CheckHeader() -> [0] forwarder;
ncl[1] -> SR2CheckHeader() -> PrintSR(query) -> query_forwarder
ncl[2] -> SR2CheckHeader() -> query_responder;
ncl[3] -> es;
ncl[4] -> SR2CheckHeader() -> gw;
}
Idle -> s :: sr2(2.0.0.1, 255.0.0.0, 00:00:00:00:00:01, false, "12 60 12 1500") -> Discard;
Idle -> [1] s;
s[1] -> Discard;
s[2] -> Discard;
s[3] -> Discard;

View File

@@ -0,0 +1,142 @@
// This Click configuration implements a firewall and NAT, roughly based on the
// mazu-nat.click example.
//
// This example assumes there is one interface that is IP-aliased. In this
// example, eth0 and eth0:0 have IP addresses 66.68.65.90 and 192.168.1.1,
// respectively. There is a local network, 192.168.1.0/24, and an upstream
// gateway, 66.58.65.89. Traffic from the local network is NATed.
//
// Connections can be initiated from the NAT box itself, also.
//
// For bugs, suggestions, and, corrections, please email me.
//
// Author: Thomer M. Gil (click@thomer.com)
AddressInfo(
eth0-in 192.168.1.1 192.168.1.0/24 00:0d:87:9d:1c:e9,
eth0-ex 66.58.65.90 00:0d:87:9d:1c:e9,
gw-addr 66.58.65.89 00:20:6f:14:54:c2
);
elementclass SniffGatewayDevice {
$device |
from :: FromDevice($device)
-> t1 :: Tee
-> output;
input -> q :: Queue(1024)
-> t2 :: PullTee
-> to :: ToDevice($device);
t1[1] -> ToHostSniffers;
t2[1] -> ToHostSniffers($device);
ScheduleInfo(from .1, to 1);
}
device :: SniffGatewayDevice(eth0);
arpq_in :: ARPQuerier(eth0-in) -> device;
ip_to_extern :: GetIPAddress(16)
-> CheckIPHeader
-> EtherEncap(0x800, eth0-ex, gw-addr)
-> device;
ip_to_host :: EtherEncap(0x800, gw-addr, eth0-ex)
-> ToHost;
ip_to_intern :: GetIPAddress(16)
-> CheckIPHeader
-> arpq_in;
arp_class :: Classifier(
12/0806 20/0001, // [0] ARP requests
12/0806 20/0002, // [1] ARP replies to host
12/0800); // [2] IP packets
device -> arp_class;
// ARP crap
arp_class[0] -> ARPResponder(eth0-in, eth0-ex) -> device;
arp_class[1] -> arp_t :: Tee;
arp_t[0] -> ToHost;
arp_t[1] -> [1]arpq_in;
// IP packets
arp_class[2] -> Strip(14)
-> CheckIPHeader
-> ipclass :: IPClassifier(dst host eth0-ex,
dst host eth0-in,
src net eth0-in);
// Define pattern NAT
iprw :: IPRewriterPatterns(NAT eth0-ex 50000-65535 - -);
// Rewriting rules for UDP/TCP packets
// output[0] rewritten to go into the wild
// output[1] rewritten to come back from the wild or no match
rw :: IPRewriter(pattern NAT 0 1,
pass 1);
// Rewriting rules for ICMP packets
irw :: ICMPPingRewriter(eth0-ex, -);
irw[0] -> ip_to_extern;
irw[1] -> icmp_me_or_intern :: IPClassifier(dst host eth0-ex, -);
icmp_me_or_intern[0] -> ip_to_host;
icmp_me_or_intern[1] -> ip_to_intern;
// Rewriting rules for ICMP error packets
ierw :: ICMPRewriter(rw irw);
ierw[0] -> icmp_me_or_intern;
ierw[1] -> icmp_me_or_intern;
// Packets directed at eth0-ex.
// Send it through IPRewriter(pass). If there was a mapping, it will be
// rewritten such that dst is eth0-in:net, otherwise dst will still be for
// eth0-ex.
ipclass[0] -> [1]rw;
// packets that were rewritten, heading into the wild world.
rw[0] -> ip_to_extern;
// packets that come back from the wild or are not part of an established
// connection.
rw[1] -> established_class :: IPClassifier(dst host eth0-ex,
dst net eth0-in);
// not established yet or returning packets for a connection that was
// established from this host itself.
established_class[0] ->
firewall :: IPClassifier(dst tcp port ssh,
dst tcp port smtp,
dst tcp port domain,
dst udp port domain,
icmp type echo-reply,
proto icmp,
port > 4095,
-);
firewall[0] -> ip_to_host; // ssh
firewall[1] -> ip_to_host; // smtp
firewall[2] -> ip_to_host; // domain (t)
firewall[3] -> ip_to_host; // domain (u)
firewall[4] -> [0]irw; // icmp reply
firewall[5] -> [0]ierw; // other icmp
firewall[6] -> ip_to_host; // port > 4095, probably for connection
// originating from host itself
firewall[7] -> Discard; // don't allow incoming for port <= 4095
// established connection
established_class[1] -> ip_to_intern;
// To eth0-in. Only accept from inside network.
ipclass[1] -> IPClassifier(src net eth0-in) -> ip_to_host;
// Packets from eth0-in:net either stay on local network or go to the wild.
// Those that go into the wild need to go through the appropriate rewriting
// element. (Either UDP/TCP rewriter or ICMP rewriter.)
ipclass[2] -> inter_class :: IPClassifier(dst net eth0-in, -);
inter_class[0] -> ip_to_intern;
inter_class[1] -> ip_udp_class :: IPClassifier(tcp or udp,
icmp type echo);
ip_udp_class[0] -> [0]rw;
ip_udp_class[1] -> [0]irw;

View File

@@ -0,0 +1,17 @@
fs = require 'fs'
{print} = require 'sys'
{spawn} = require 'child_process'
build = (callback) ->
coffee = spawn 'coffee', ['-c', '-o', '.', '.']
coffee.stderr.on 'data', (data) ->
process.stderr.write data.toString()
coffee.stdout.on 'data', (data) ->
print data.toString()
coffee.on 'exit', (code) ->
callback?() if code is 0
task 'build', 'Build from source', ->
build()

View File

@@ -0,0 +1,856 @@
Nonterminals
grammar expr_list
expr container_expr block_expr access_expr
no_parens_expr no_parens_zero_expr no_parens_one_expr no_parens_one_ambig_expr
bracket_expr bracket_at_expr bracket_arg matched_expr unmatched_expr max_expr
unmatched_op_expr matched_op_expr no_parens_op_expr no_parens_many_expr
comp_op_eol at_op_eol unary_op_eol and_op_eol or_op_eol capture_op_eol
add_op_eol mult_op_eol two_op_eol three_op_eol pipe_op_eol stab_op_eol
arrow_op_eol match_op_eol when_op_eol in_op_eol in_match_op_eol
type_op_eol rel_op_eol
open_paren close_paren empty_paren eoe
list list_args open_bracket close_bracket
tuple open_curly close_curly
bit_string open_bit close_bit
map map_op map_close map_args map_expr struct_op
assoc_op_eol assoc_expr assoc_base assoc_update assoc_update_kw assoc
container_args_base container_args
call_args_parens_expr call_args_parens_base call_args_parens parens_call
call_args_no_parens_one call_args_no_parens_ambig call_args_no_parens_expr
call_args_no_parens_comma_expr call_args_no_parens_all call_args_no_parens_many
call_args_no_parens_many_strict
stab stab_eoe stab_expr stab_op_eol_and_expr stab_parens_many
kw_eol kw_base kw call_args_no_parens_kw_expr call_args_no_parens_kw
dot_op dot_alias dot_alias_container
dot_identifier dot_op_identifier dot_do_identifier
dot_paren_identifier dot_bracket_identifier
do_block fn_eoe do_eoe end_eoe block_eoe block_item block_list
.
Terminals
identifier kw_identifier kw_identifier_safe kw_identifier_unsafe bracket_identifier
paren_identifier do_identifier block_identifier
fn 'end' aliases
number atom atom_safe atom_unsafe bin_string list_string sigil
dot_call_op op_identifier
comp_op at_op unary_op and_op or_op arrow_op match_op in_op in_match_op
type_op dual_op add_op mult_op two_op three_op pipe_op stab_op when_op assoc_op
capture_op rel_op
'true' 'false' 'nil' 'do' eol ';' ',' '.'
'(' ')' '[' ']' '{' '}' '<<' '>>' '%{}' '%'
.
Rootsymbol grammar.
%% Two shift/reduce conflicts coming from call_args_parens.
Expect 2.
%% Changes in ops and precedence should be reflected on lib/elixir/lib/macro.ex
%% Note though the operator => in practice has lower precedence than all others,
%% its entry in the table is only to support the %{user | foo => bar} syntax.
Left 5 do.
Right 10 stab_op_eol. %% ->
Left 20 ','.
Nonassoc 30 capture_op_eol. %% &
Left 40 in_match_op_eol. %% <-, \\ (allowed in matches along =)
Right 50 when_op_eol. %% when
Right 60 type_op_eol. %% ::
Right 70 pipe_op_eol. %% |
Right 80 assoc_op_eol. %% =>
Right 90 match_op_eol. %% =
Left 130 or_op_eol. %% ||, |||, or
Left 140 and_op_eol. %% &&, &&&, and
Left 150 comp_op_eol. %% ==, !=, =~, ===, !==
Left 160 rel_op_eol. %% <, >, <=, >=
Left 170 arrow_op_eol. %% |>, <<<, >>>, ~>>, <<~, ~>, <~, <~>, <|>
Left 180 in_op_eol. %% in
Left 190 three_op_eol. %% ^^^
Right 200 two_op_eol. %% ++, --, .., <>
Left 210 add_op_eol. %% +, -
Left 220 mult_op_eol. %% *, /
Nonassoc 300 unary_op_eol. %% +, -, !, ^, not, ~~~
Left 310 dot_call_op.
Left 310 dot_op. %% .
Nonassoc 320 at_op_eol. %% @
Nonassoc 330 dot_identifier.
%%% MAIN FLOW OF EXPRESSIONS
grammar -> eoe : nil.
grammar -> expr_list : to_block('$1').
grammar -> eoe expr_list : to_block('$2').
grammar -> expr_list eoe : to_block('$1').
grammar -> eoe expr_list eoe : to_block('$2').
grammar -> '$empty' : nil.
% Note expressions are on reverse order
expr_list -> expr : ['$1'].
expr_list -> expr_list eoe expr : ['$3'|'$1'].
expr -> matched_expr : '$1'.
expr -> no_parens_expr : '$1'.
expr -> unmatched_expr : '$1'.
%% In Elixir we have three main call syntaxes: with parentheses,
%% without parentheses and with do blocks. They are represented
%% in the AST as matched, no_parens and unmatched.
%%
%% Calls without parentheses are further divided according to how
%% problematic they are:
%%
%% (a) no_parens_one: a call with one unproblematic argument
%% (e.g. `f a` or `f g a` and similar) (includes unary operators)
%%
%% (b) no_parens_many: a call with several arguments (e.g. `f a, b`)
%%
%% (c) no_parens_one_ambig: a call with one argument which is
%% itself a no_parens_many or no_parens_one_ambig (e.g. `f g a, b`
%% or `f g h a, b` and similar)
%%
%% Note, in particular, that no_parens_one_ambig expressions are
%% ambiguous and are interpreted such that the outer function has
%% arity 1 (e.g. `f g a, b` is interpreted as `f(g(a, b))` rather
%% than `f(g(a), b)`). Hence the name, no_parens_one_ambig.
%%
%% The distinction is required because we can't, for example, have
%% a function call with a do block as argument inside another do
%% block call, unless there are parentheses:
%%
%% if if true do true else false end do #=> invalid
%% if(if true do true else false end) do #=> valid
%%
%% Similarly, it is not possible to nest calls without parentheses
%% if their arity is more than 1:
%%
%% foo a, bar b, c #=> invalid
%% foo(a, bar b, c) #=> invalid
%% foo bar a, b #=> valid
%% foo a, bar(b, c) #=> valid
%%
%% So the different grammar rules need to take into account
%% if calls without parentheses are do blocks in particular
%% segments and act accordingly.
matched_expr -> matched_expr matched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
matched_expr -> unary_op_eol matched_expr : build_unary_op('$1', '$2').
matched_expr -> at_op_eol matched_expr : build_unary_op('$1', '$2').
matched_expr -> capture_op_eol matched_expr : build_unary_op('$1', '$2').
matched_expr -> no_parens_one_expr : '$1'.
matched_expr -> no_parens_zero_expr : '$1'.
matched_expr -> access_expr : '$1'.
matched_expr -> access_expr kw_identifier : throw_invalid_kw_identifier('$2').
unmatched_expr -> matched_expr unmatched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
unmatched_expr -> unmatched_expr matched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
unmatched_expr -> unmatched_expr unmatched_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
unmatched_expr -> unmatched_expr no_parens_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
unmatched_expr -> unary_op_eol expr : build_unary_op('$1', '$2').
unmatched_expr -> at_op_eol expr : build_unary_op('$1', '$2').
unmatched_expr -> capture_op_eol expr : build_unary_op('$1', '$2').
unmatched_expr -> block_expr : '$1'.
no_parens_expr -> matched_expr no_parens_op_expr : build_op(element(1, '$2'), '$1', element(2, '$2')).
no_parens_expr -> unary_op_eol no_parens_expr : build_unary_op('$1', '$2').
no_parens_expr -> at_op_eol no_parens_expr : build_unary_op('$1', '$2').
no_parens_expr -> capture_op_eol no_parens_expr : build_unary_op('$1', '$2').
no_parens_expr -> no_parens_one_ambig_expr : '$1'.
no_parens_expr -> no_parens_many_expr : '$1'.
block_expr -> parens_call call_args_parens do_block : build_identifier('$1', '$2' ++ '$3').
block_expr -> parens_call call_args_parens call_args_parens do_block : build_nested_parens('$1', '$2', '$3' ++ '$4').
block_expr -> dot_do_identifier do_block : build_identifier('$1', '$2').
block_expr -> dot_identifier call_args_no_parens_all do_block : build_identifier('$1', '$2' ++ '$3').
matched_op_expr -> match_op_eol matched_expr : {'$1', '$2'}.
matched_op_expr -> add_op_eol matched_expr : {'$1', '$2'}.
matched_op_expr -> mult_op_eol matched_expr : {'$1', '$2'}.
matched_op_expr -> two_op_eol matched_expr : {'$1', '$2'}.
matched_op_expr -> three_op_eol matched_expr : {'$1', '$2'}.
matched_op_expr -> and_op_eol matched_expr : {'$1', '$2'}.
matched_op_expr -> or_op_eol matched_expr : {'$1', '$2'}.
matched_op_expr -> in_op_eol matched_expr : {'$1', '$2'}.
matched_op_expr -> in_match_op_eol matched_expr : {'$1', '$2'}.
matched_op_expr -> type_op_eol matched_expr : {'$1', '$2'}.
matched_op_expr -> when_op_eol matched_expr : {'$1', '$2'}.
matched_op_expr -> pipe_op_eol matched_expr : {'$1', '$2'}.
matched_op_expr -> comp_op_eol matched_expr : {'$1', '$2'}.
matched_op_expr -> rel_op_eol matched_expr : {'$1', '$2'}.
matched_op_expr -> arrow_op_eol matched_expr : {'$1', '$2'}.
%% Warn for no parens subset
matched_op_expr -> arrow_op_eol no_parens_one_expr : warn_pipe('$1', '$2'), {'$1', '$2'}.
unmatched_op_expr -> match_op_eol unmatched_expr : {'$1', '$2'}.
unmatched_op_expr -> add_op_eol unmatched_expr : {'$1', '$2'}.
unmatched_op_expr -> mult_op_eol unmatched_expr : {'$1', '$2'}.
unmatched_op_expr -> two_op_eol unmatched_expr : {'$1', '$2'}.
unmatched_op_expr -> three_op_eol unmatched_expr : {'$1', '$2'}.
unmatched_op_expr -> and_op_eol unmatched_expr : {'$1', '$2'}.
unmatched_op_expr -> or_op_eol unmatched_expr : {'$1', '$2'}.
unmatched_op_expr -> in_op_eol unmatched_expr : {'$1', '$2'}.
unmatched_op_expr -> in_match_op_eol unmatched_expr : {'$1', '$2'}.
unmatched_op_expr -> type_op_eol unmatched_expr : {'$1', '$2'}.
unmatched_op_expr -> when_op_eol unmatched_expr : {'$1', '$2'}.
unmatched_op_expr -> pipe_op_eol unmatched_expr : {'$1', '$2'}.
unmatched_op_expr -> comp_op_eol unmatched_expr : {'$1', '$2'}.
unmatched_op_expr -> rel_op_eol unmatched_expr : {'$1', '$2'}.
unmatched_op_expr -> arrow_op_eol unmatched_expr : {'$1', '$2'}.
no_parens_op_expr -> match_op_eol no_parens_expr : {'$1', '$2'}.
no_parens_op_expr -> add_op_eol no_parens_expr : {'$1', '$2'}.
no_parens_op_expr -> mult_op_eol no_parens_expr : {'$1', '$2'}.
no_parens_op_expr -> two_op_eol no_parens_expr : {'$1', '$2'}.
no_parens_op_expr -> three_op_eol no_parens_expr : {'$1', '$2'}.
no_parens_op_expr -> and_op_eol no_parens_expr : {'$1', '$2'}.
no_parens_op_expr -> or_op_eol no_parens_expr : {'$1', '$2'}.
no_parens_op_expr -> in_op_eol no_parens_expr : {'$1', '$2'}.
no_parens_op_expr -> in_match_op_eol no_parens_expr : {'$1', '$2'}.
no_parens_op_expr -> type_op_eol no_parens_expr : {'$1', '$2'}.
no_parens_op_expr -> when_op_eol no_parens_expr : {'$1', '$2'}.
no_parens_op_expr -> pipe_op_eol no_parens_expr : {'$1', '$2'}.
no_parens_op_expr -> comp_op_eol no_parens_expr : {'$1', '$2'}.
no_parens_op_expr -> rel_op_eol no_parens_expr : {'$1', '$2'}.
no_parens_op_expr -> arrow_op_eol no_parens_expr : {'$1', '$2'}.
%% Warn for no parens subset
no_parens_op_expr -> arrow_op_eol no_parens_one_ambig_expr : warn_pipe('$1', '$2'), {'$1', '$2'}.
no_parens_op_expr -> arrow_op_eol no_parens_many_expr : warn_pipe('$1', '$2'), {'$1', '$2'}.
%% Allow when (and only when) with keywords
no_parens_op_expr -> when_op_eol call_args_no_parens_kw : {'$1', '$2'}.
no_parens_one_ambig_expr -> dot_op_identifier call_args_no_parens_ambig : build_identifier('$1', '$2').
no_parens_one_ambig_expr -> dot_identifier call_args_no_parens_ambig : build_identifier('$1', '$2').
no_parens_many_expr -> dot_op_identifier call_args_no_parens_many_strict : build_identifier('$1', '$2').
no_parens_many_expr -> dot_identifier call_args_no_parens_many_strict : build_identifier('$1', '$2').
no_parens_one_expr -> dot_op_identifier call_args_no_parens_one : build_identifier('$1', '$2').
no_parens_one_expr -> dot_identifier call_args_no_parens_one : build_identifier('$1', '$2').
no_parens_zero_expr -> dot_do_identifier : build_identifier('$1', nil).
no_parens_zero_expr -> dot_identifier : build_identifier('$1', nil).
%% From this point on, we just have constructs that can be
%% used with the access syntax. Notice that (dot_)identifier
%% is not included in this list simply because the tokenizer
%% marks identifiers followed by brackets as bracket_identifier.
access_expr -> bracket_at_expr : '$1'.
access_expr -> bracket_expr : '$1'.
access_expr -> at_op_eol number : build_unary_op('$1', ?exprs('$2')).
access_expr -> unary_op_eol number : build_unary_op('$1', ?exprs('$2')).
access_expr -> capture_op_eol number : build_unary_op('$1', ?exprs('$2')).
access_expr -> fn_eoe stab end_eoe : build_fn('$1', reverse('$2')).
access_expr -> open_paren stab close_paren : build_stab(reverse('$2')).
access_expr -> open_paren stab ';' close_paren : build_stab(reverse('$2')).
access_expr -> open_paren ';' stab ';' close_paren : build_stab(reverse('$3')).
access_expr -> open_paren ';' stab close_paren : build_stab(reverse('$3')).
access_expr -> open_paren ';' close_paren : build_stab([]).
access_expr -> empty_paren : nil.
access_expr -> number : ?exprs('$1').
access_expr -> list : element(1, '$1').
access_expr -> map : '$1'.
access_expr -> tuple : '$1'.
access_expr -> 'true' : ?id('$1').
access_expr -> 'false' : ?id('$1').
access_expr -> 'nil' : ?id('$1').
access_expr -> bin_string : build_bin_string('$1').
access_expr -> list_string : build_list_string('$1').
access_expr -> bit_string : '$1'.
access_expr -> sigil : build_sigil('$1').
access_expr -> max_expr : '$1'.
%% Aliases and properly formed calls. Used by map_expr.
max_expr -> atom : ?exprs('$1').
max_expr -> atom_safe : build_quoted_atom('$1', true).
max_expr -> atom_unsafe : build_quoted_atom('$1', false).
max_expr -> parens_call call_args_parens : build_identifier('$1', '$2').
max_expr -> parens_call call_args_parens call_args_parens : build_nested_parens('$1', '$2', '$3').
max_expr -> dot_alias : '$1'.
bracket_arg -> open_bracket kw close_bracket : build_list('$1', '$2').
bracket_arg -> open_bracket container_expr close_bracket : build_list('$1', '$2').
bracket_arg -> open_bracket container_expr ',' close_bracket : build_list('$1', '$2').
bracket_expr -> dot_bracket_identifier bracket_arg : build_access(build_identifier('$1', nil), '$2').
bracket_expr -> access_expr bracket_arg : build_access('$1', '$2').
bracket_at_expr -> at_op_eol dot_bracket_identifier bracket_arg :
build_access(build_unary_op('$1', build_identifier('$2', nil)), '$3').
bracket_at_expr -> at_op_eol access_expr bracket_arg :
build_access(build_unary_op('$1', '$2'), '$3').
%% Blocks
do_block -> do_eoe 'end' : [[{do, nil}]].
do_block -> do_eoe stab end_eoe : [[{do, build_stab(reverse('$2'))}]].
do_block -> do_eoe block_list 'end' : [[{do, nil}|'$2']].
do_block -> do_eoe stab_eoe block_list 'end' : [[{do, build_stab(reverse('$2'))}|'$3']].
eoe -> eol : '$1'.
eoe -> ';' : '$1'.
eoe -> eol ';' : '$1'.
fn_eoe -> 'fn' : '$1'.
fn_eoe -> 'fn' eoe : '$1'.
do_eoe -> 'do' : '$1'.
do_eoe -> 'do' eoe : '$1'.
end_eoe -> 'end' : '$1'.
end_eoe -> eoe 'end' : '$2'.
block_eoe -> block_identifier : '$1'.
block_eoe -> block_identifier eoe : '$1'.
stab -> stab_expr : ['$1'].
stab -> stab eoe stab_expr : ['$3'|'$1'].
stab_eoe -> stab : '$1'.
stab_eoe -> stab eoe : '$1'.
%% Here, `element(1, Token)` is the stab operator,
%% while `element(2, Token)` is the expression.
stab_expr -> expr :
'$1'.
stab_expr -> stab_op_eol_and_expr :
build_op(element(1, '$1'), [], element(2, '$1')).
stab_expr -> empty_paren stab_op_eol_and_expr :
build_op(element(1, '$2'), [], element(2, '$2')).
stab_expr -> call_args_no_parens_all stab_op_eol_and_expr :
build_op(element(1, '$2'), unwrap_when(unwrap_splice('$1')), element(2, '$2')).
stab_expr -> stab_parens_many stab_op_eol_and_expr :
build_op(element(1, '$2'), unwrap_splice('$1'), element(2, '$2')).
stab_expr -> stab_parens_many when_op expr stab_op_eol_and_expr :
build_op(element(1, '$4'), [{'when', meta_from_token('$2'), unwrap_splice('$1') ++ ['$3']}], element(2, '$4')).
stab_op_eol_and_expr -> stab_op_eol expr : {'$1', '$2'}.
stab_op_eol_and_expr -> stab_op_eol : warn_empty_stab_clause('$1'), {'$1', nil}.
block_item -> block_eoe stab_eoe : {?exprs('$1'), build_stab(reverse('$2'))}.
block_item -> block_eoe : {?exprs('$1'), nil}.
block_list -> block_item : ['$1'].
block_list -> block_item block_list : ['$1'|'$2'].
%% Helpers
open_paren -> '(' : '$1'.
open_paren -> '(' eol : '$1'.
close_paren -> ')' : '$1'.
close_paren -> eol ')' : '$2'.
empty_paren -> open_paren ')' : '$1'.
open_bracket -> '[' : '$1'.
open_bracket -> '[' eol : '$1'.
close_bracket -> ']' : '$1'.
close_bracket -> eol ']' : '$2'.
open_bit -> '<<' : '$1'.
open_bit -> '<<' eol : '$1'.
close_bit -> '>>' : '$1'.
close_bit -> eol '>>' : '$2'.
open_curly -> '{' : '$1'.
open_curly -> '{' eol : '$1'.
close_curly -> '}' : '$1'.
close_curly -> eol '}' : '$2'.
% Operators
add_op_eol -> add_op : '$1'.
add_op_eol -> add_op eol : '$1'.
add_op_eol -> dual_op : '$1'.
add_op_eol -> dual_op eol : '$1'.
mult_op_eol -> mult_op : '$1'.
mult_op_eol -> mult_op eol : '$1'.
two_op_eol -> two_op : '$1'.
two_op_eol -> two_op eol : '$1'.
three_op_eol -> three_op : '$1'.
three_op_eol -> three_op eol : '$1'.
pipe_op_eol -> pipe_op : '$1'.
pipe_op_eol -> pipe_op eol : '$1'.
capture_op_eol -> capture_op : '$1'.
capture_op_eol -> capture_op eol : '$1'.
unary_op_eol -> unary_op : '$1'.
unary_op_eol -> unary_op eol : '$1'.
unary_op_eol -> dual_op : '$1'.
unary_op_eol -> dual_op eol : '$1'.
match_op_eol -> match_op : '$1'.
match_op_eol -> match_op eol : '$1'.
and_op_eol -> and_op : '$1'.
and_op_eol -> and_op eol : '$1'.
or_op_eol -> or_op : '$1'.
or_op_eol -> or_op eol : '$1'.
in_op_eol -> in_op : '$1'.
in_op_eol -> in_op eol : '$1'.
in_match_op_eol -> in_match_op : '$1'.
in_match_op_eol -> in_match_op eol : '$1'.
type_op_eol -> type_op : '$1'.
type_op_eol -> type_op eol : '$1'.
when_op_eol -> when_op : '$1'.
when_op_eol -> when_op eol : '$1'.
stab_op_eol -> stab_op : '$1'.
stab_op_eol -> stab_op eol : '$1'.
at_op_eol -> at_op : '$1'.
at_op_eol -> at_op eol : '$1'.
comp_op_eol -> comp_op : '$1'.
comp_op_eol -> comp_op eol : '$1'.
rel_op_eol -> rel_op : '$1'.
rel_op_eol -> rel_op eol : '$1'.
arrow_op_eol -> arrow_op : '$1'.
arrow_op_eol -> arrow_op eol : '$1'.
% Dot operator
dot_op -> '.' : '$1'.
dot_op -> '.' eol : '$1'.
dot_identifier -> identifier : '$1'.
dot_identifier -> matched_expr dot_op identifier : build_dot('$2', '$1', '$3').
dot_alias -> aliases : {'__aliases__', meta_from_token('$1', 0), ?exprs('$1')}.
dot_alias -> matched_expr dot_op aliases : build_dot_alias('$2', '$1', '$3').
dot_alias -> matched_expr dot_op dot_alias_container : build_dot_container('$2', '$1', '$3').
dot_alias_container -> open_curly '}' : [].
dot_alias_container -> open_curly container_args close_curly : '$2'.
dot_op_identifier -> op_identifier : '$1'.
dot_op_identifier -> matched_expr dot_op op_identifier : build_dot('$2', '$1', '$3').
dot_do_identifier -> do_identifier : '$1'.
dot_do_identifier -> matched_expr dot_op do_identifier : build_dot('$2', '$1', '$3').
dot_bracket_identifier -> bracket_identifier : '$1'.
dot_bracket_identifier -> matched_expr dot_op bracket_identifier : build_dot('$2', '$1', '$3').
dot_paren_identifier -> paren_identifier : '$1'.
dot_paren_identifier -> matched_expr dot_op paren_identifier : build_dot('$2', '$1', '$3').
parens_call -> dot_paren_identifier : '$1'.
parens_call -> matched_expr dot_call_op : {'.', meta_from_token('$2'), ['$1']}. % Fun/local calls
% Function calls with no parentheses
call_args_no_parens_expr -> matched_expr : '$1'.
call_args_no_parens_expr -> no_parens_expr : throw_no_parens_many_strict('$1').
call_args_no_parens_comma_expr -> matched_expr ',' call_args_no_parens_expr : ['$3', '$1'].
call_args_no_parens_comma_expr -> call_args_no_parens_comma_expr ',' call_args_no_parens_expr : ['$3'|'$1'].
call_args_no_parens_all -> call_args_no_parens_one : '$1'.
call_args_no_parens_all -> call_args_no_parens_ambig : '$1'.
call_args_no_parens_all -> call_args_no_parens_many : '$1'.
call_args_no_parens_one -> call_args_no_parens_kw : ['$1'].
call_args_no_parens_one -> matched_expr : ['$1'].
call_args_no_parens_ambig -> no_parens_expr : ['$1'].
call_args_no_parens_many -> matched_expr ',' call_args_no_parens_kw : ['$1', '$3'].
call_args_no_parens_many -> call_args_no_parens_comma_expr : reverse('$1').
call_args_no_parens_many -> call_args_no_parens_comma_expr ',' call_args_no_parens_kw : reverse(['$3'|'$1']).
call_args_no_parens_many_strict -> call_args_no_parens_many : '$1'.
call_args_no_parens_many_strict -> open_paren call_args_no_parens_kw close_paren : throw_no_parens_strict('$1').
call_args_no_parens_many_strict -> open_paren call_args_no_parens_many close_paren : throw_no_parens_strict('$1').
stab_parens_many -> open_paren call_args_no_parens_kw close_paren : ['$2'].
stab_parens_many -> open_paren call_args_no_parens_many close_paren : '$2'.
% Containers
container_expr -> matched_expr : '$1'.
container_expr -> unmatched_expr : '$1'.
container_expr -> no_parens_expr : throw_no_parens_container_strict('$1').
container_args_base -> container_expr : ['$1'].
container_args_base -> container_args_base ',' container_expr : ['$3'|'$1'].
container_args -> container_args_base : lists:reverse('$1').
container_args -> container_args_base ',' : lists:reverse('$1').
container_args -> container_args_base ',' kw : lists:reverse(['$3'|'$1']).
% Function calls with parentheses
call_args_parens_expr -> matched_expr : '$1'.
call_args_parens_expr -> unmatched_expr : '$1'.
call_args_parens_expr -> no_parens_expr : throw_no_parens_many_strict('$1').
call_args_parens_base -> call_args_parens_expr : ['$1'].
call_args_parens_base -> call_args_parens_base ',' call_args_parens_expr : ['$3'|'$1'].
call_args_parens -> empty_paren : [].
call_args_parens -> open_paren no_parens_expr close_paren : ['$2'].
call_args_parens -> open_paren kw close_paren : ['$2'].
call_args_parens -> open_paren call_args_parens_base close_paren : reverse('$2').
call_args_parens -> open_paren call_args_parens_base ',' kw close_paren : reverse(['$4'|'$2']).
% KV
kw_eol -> kw_identifier : ?exprs('$1').
kw_eol -> kw_identifier eol : ?exprs('$1').
kw_eol -> kw_identifier_safe : build_quoted_atom('$1', true).
kw_eol -> kw_identifier_safe eol : build_quoted_atom('$1', true).
kw_eol -> kw_identifier_unsafe : build_quoted_atom('$1', false).
kw_eol -> kw_identifier_unsafe eol : build_quoted_atom('$1', false).
kw_base -> kw_eol container_expr : [{'$1', '$2'}].
kw_base -> kw_base ',' kw_eol container_expr : [{'$3', '$4'}|'$1'].
kw -> kw_base : reverse('$1').
kw -> kw_base ',' : reverse('$1').
call_args_no_parens_kw_expr -> kw_eol matched_expr : {'$1', '$2'}.
call_args_no_parens_kw_expr -> kw_eol no_parens_expr : {'$1', '$2'}.
call_args_no_parens_kw -> call_args_no_parens_kw_expr : ['$1'].
call_args_no_parens_kw -> call_args_no_parens_kw_expr ',' call_args_no_parens_kw : ['$1'|'$3'].
% Lists
list_args -> kw : '$1'.
list_args -> container_args_base : reverse('$1').
list_args -> container_args_base ',' : reverse('$1').
list_args -> container_args_base ',' kw : reverse('$1', '$3').
list -> open_bracket ']' : build_list('$1', []).
list -> open_bracket list_args close_bracket : build_list('$1', '$2').
% Tuple
tuple -> open_curly '}' : build_tuple('$1', []).
tuple -> open_curly container_args close_curly : build_tuple('$1', '$2').
% Bitstrings
bit_string -> open_bit '>>' : build_bit('$1', []).
bit_string -> open_bit container_args close_bit : build_bit('$1', '$2').
% Map and structs
%% Allow unquote/@something/aliases inside maps and structs.
map_expr -> max_expr : '$1'.
map_expr -> dot_identifier : build_identifier('$1', nil).
map_expr -> at_op_eol map_expr : build_unary_op('$1', '$2').
assoc_op_eol -> assoc_op : '$1'.
assoc_op_eol -> assoc_op eol : '$1'.
assoc_expr -> matched_expr assoc_op_eol matched_expr : {'$1', '$3'}.
assoc_expr -> unmatched_expr assoc_op_eol unmatched_expr : {'$1', '$3'}.
assoc_expr -> matched_expr assoc_op_eol unmatched_expr : {'$1', '$3'}.
assoc_expr -> unmatched_expr assoc_op_eol matched_expr : {'$1', '$3'}.
assoc_expr -> map_expr : '$1'.
assoc_update -> matched_expr pipe_op_eol assoc_expr : {'$2', '$1', ['$3']}.
assoc_update -> unmatched_expr pipe_op_eol assoc_expr : {'$2', '$1', ['$3']}.
assoc_update_kw -> matched_expr pipe_op_eol kw : {'$2', '$1', '$3'}.
assoc_update_kw -> unmatched_expr pipe_op_eol kw : {'$2', '$1', '$3'}.
assoc_base -> assoc_expr : ['$1'].
assoc_base -> assoc_base ',' assoc_expr : ['$3'|'$1'].
assoc -> assoc_base : reverse('$1').
assoc -> assoc_base ',' : reverse('$1').
map_op -> '%{}' : '$1'.
map_op -> '%{}' eol : '$1'.
map_close -> kw close_curly : '$1'.
map_close -> assoc close_curly : '$1'.
map_close -> assoc_base ',' kw close_curly : reverse('$1', '$3').
map_args -> open_curly '}' : build_map('$1', []).
map_args -> open_curly map_close : build_map('$1', '$2').
map_args -> open_curly assoc_update close_curly : build_map_update('$1', '$2', []).
map_args -> open_curly assoc_update ',' close_curly : build_map_update('$1', '$2', []).
map_args -> open_curly assoc_update ',' map_close : build_map_update('$1', '$2', '$4').
map_args -> open_curly assoc_update_kw close_curly : build_map_update('$1', '$2', []).
struct_op -> '%' : '$1'.
map -> map_op map_args : '$2'.
map -> struct_op map_expr map_args : {'%', meta_from_token('$1'), ['$2', '$3']}.
map -> struct_op map_expr eol map_args : {'%', meta_from_token('$1'), ['$2', '$4']}.
Erlang code.
-define(file(), get(elixir_parser_file)).
-define(id(Token), element(1, Token)).
-define(location(Token), element(2, Token)).
-define(exprs(Token), element(3, Token)).
-define(meta(Node), element(2, Node)).
-define(rearrange_uop(Op), (Op == 'not' orelse Op == '!')).
%% The following directive is needed for (significantly) faster
%% compilation of the generated .erl file by the HiPE compiler
-compile([{hipe, [{regalloc, linear_scan}]}]).
-import(lists, [reverse/1, reverse/2]).
meta_from_token(Token, Counter) -> [{counter, Counter}|meta_from_token(Token)].
meta_from_token(Token) -> meta_from_location(?location(Token)).
meta_from_location({Line, Column, EndColumn})
when is_integer(Line), is_integer(Column), is_integer(EndColumn) -> [{line, Line}].
%% Operators
build_op({_Kind, Location, 'in'}, {UOp, _, [Left]}, Right) when ?rearrange_uop(UOp) ->
{UOp, meta_from_location(Location), [{'in', meta_from_location(Location), [Left, Right]}]};
build_op({_Kind, Location, Op}, Left, Right) ->
{Op, meta_from_location(Location), [Left, Right]}.
build_unary_op({_Kind, Location, Op}, Expr) ->
{Op, meta_from_location(Location), [Expr]}.
build_list(Marker, Args) ->
{Args, ?location(Marker)}.
build_tuple(_Marker, [Left, Right]) ->
{Left, Right};
build_tuple(Marker, Args) ->
{'{}', meta_from_token(Marker), Args}.
build_bit(Marker, Args) ->
{'<<>>', meta_from_token(Marker), Args}.
build_map(Marker, Args) ->
{'%{}', meta_from_token(Marker), Args}.
build_map_update(Marker, {Pipe, Left, Right}, Extra) ->
{'%{}', meta_from_token(Marker), [build_op(Pipe, Left, Right ++ Extra)]}.
%% Blocks
build_block([{Op, _, [_]}]=Exprs) when ?rearrange_uop(Op) -> {'__block__', [], Exprs};
build_block([{unquote_splicing, _, Args}]=Exprs) when
length(Args) =< 2 -> {'__block__', [], Exprs};
build_block([Expr]) -> Expr;
build_block(Exprs) -> {'__block__', [], Exprs}.
%% Dots
build_dot_alias(Dot, {'__aliases__', _, Left}, {'aliases', _, Right}) ->
{'__aliases__', meta_from_token(Dot), Left ++ Right};
build_dot_alias(_Dot, Atom, {'aliases', _, _} = Token) when is_atom(Atom) ->
throw_bad_atom(Token);
build_dot_alias(Dot, Other, {'aliases', _, Right}) ->
{'__aliases__', meta_from_token(Dot), [Other|Right]}.
build_dot_container(Dot, Left, Right) ->
Meta = meta_from_token(Dot),
{{'.', Meta, [Left, '{}']}, Meta, Right}.
build_dot(Dot, Left, Right) ->
{'.', meta_from_token(Dot), [Left, extract_identifier(Right)]}.
extract_identifier({Kind, _, Identifier}) when
Kind == identifier; Kind == bracket_identifier; Kind == paren_identifier;
Kind == do_identifier; Kind == op_identifier ->
Identifier.
%% Identifiers
build_nested_parens(Dot, Args1, Args2) ->
Identifier = build_identifier(Dot, Args1),
Meta = ?meta(Identifier),
{Identifier, Meta, Args2}.
build_identifier({'.', Meta, _} = Dot, Args) ->
FArgs = case Args of
nil -> [];
_ -> Args
end,
{Dot, Meta, FArgs};
build_identifier({op_identifier, Location, Identifier}, [Arg]) ->
{Identifier, [{ambiguous_op, nil}|meta_from_location(Location)], [Arg]};
build_identifier({_, Location, Identifier}, Args) ->
{Identifier, meta_from_location(Location), Args}.
%% Fn
build_fn(Op, [{'->', _, [_, _]}|_] = Stab) ->
{fn, meta_from_token(Op), build_stab(Stab)};
build_fn(Op, _Stab) ->
throw(meta_from_token(Op), "expected clauses to be defined with -> inside: ", "'fn'").
%% Access
build_access(Expr, {List, Location}) ->
Meta = meta_from_location(Location),
{{'.', Meta, ['Elixir.Access', get]}, Meta, [Expr, List]}.
%% Interpolation aware
build_sigil({sigil, Location, Sigil, Parts, Modifiers}) ->
Meta = meta_from_location(Location),
{list_to_atom("sigil_" ++ [Sigil]), Meta, [{'<<>>', Meta, string_parts(Parts)}, Modifiers]}.
build_bin_string({bin_string, _Location, [H]}) when is_binary(H) ->
H;
build_bin_string({bin_string, Location, Args}) ->
{'<<>>', meta_from_location(Location), string_parts(Args)}.
build_list_string({list_string, _Location, [H]}) when is_binary(H) ->
elixir_utils:characters_to_list(H);
build_list_string({list_string, Location, Args}) ->
Meta = meta_from_location(Location),
{{'.', Meta, ['Elixir.String', to_char_list]}, Meta, [{'<<>>', Meta, string_parts(Args)}]}.
build_quoted_atom({_, _Location, [H]}, Safe) when is_binary(H) ->
Op = binary_to_atom_op(Safe), erlang:Op(H, utf8);
build_quoted_atom({_, Location, Args}, Safe) ->
Meta = meta_from_location(Location),
{{'.', Meta, [erlang, binary_to_atom_op(Safe)]}, Meta, [{'<<>>', Meta, string_parts(Args)}, utf8]}.
binary_to_atom_op(true) -> binary_to_existing_atom;
binary_to_atom_op(false) -> binary_to_atom.
string_parts(Parts) ->
[string_part(Part) || Part <- Parts].
string_part(Binary) when is_binary(Binary) ->
Binary;
string_part({Location, Tokens}) ->
Form = string_tokens_parse(Tokens),
Meta = meta_from_location(Location),
{'::', Meta, [{{'.', Meta, ['Elixir.Kernel', to_string]}, Meta, [Form]}, {binary, Meta, nil}]}.
string_tokens_parse(Tokens) ->
case parse(Tokens) of
{ok, Forms} -> Forms;
{error, _} = Error -> throw(Error)
end.
%% Keywords
build_stab([{'->', Meta, [Left, Right]}|T]) ->
build_stab(Meta, T, Left, [Right], []);
build_stab(Else) ->
build_block(Else).
build_stab(Old, [{'->', New, [Left, Right]}|T], Marker, Temp, Acc) ->
H = {'->', Old, [Marker, build_block(reverse(Temp))]},
build_stab(New, T, Left, [Right], [H|Acc]);
build_stab(Meta, [H|T], Marker, Temp, Acc) ->
build_stab(Meta, T, Marker, [H|Temp], Acc);
build_stab(Meta, [], Marker, Temp, Acc) ->
H = {'->', Meta, [Marker, build_block(reverse(Temp))]},
reverse([H|Acc]).
%% Every time the parser sees a (unquote_splicing())
%% it assumes that a block is being spliced, wrapping
%% the splicing in a __block__. But in the stab clause,
%% we can have (unquote_splicing(1, 2, 3)) -> :ok, in such
%% case, we don't actually want the block, since it is
%% an arg style call. unwrap_splice unwraps the splice
%% from such blocks.
unwrap_splice([{'__block__', [], [{unquote_splicing, _, _}] = Splice}]) ->
Splice;
unwrap_splice(Other) -> Other.
unwrap_when(Args) ->
case elixir_utils:split_last(Args) of
{Start, {'when', Meta, [_, _] = End}} ->
[{'when', Meta, Start ++ End}];
{_, _} ->
Args
end.
to_block([One]) -> One;
to_block(Other) -> {'__block__', [], reverse(Other)}.
%% Warnings and errors
throw(Meta, Error, Token) ->
Line =
case lists:keyfind(line, 1, Meta) of
{line, L} -> L;
false -> 0
end,
throw({error, {Line, ?MODULE, [Error, Token]}}).
throw_bad_atom(Token) ->
throw(meta_from_token(Token), "atom cannot be followed by an alias. If the '.' was meant to be "
"part of the atom's name, the atom name must be quoted. Syntax error before: ", "'.'").
throw_no_parens_strict(Token) ->
throw(meta_from_token(Token), "unexpected parentheses. If you are making a "
"function call, do not insert spaces between the function name and the "
"opening parentheses. Syntax error before: ", "'('").
throw_no_parens_many_strict(Node) ->
throw(?meta(Node),
"unexpected comma. Parentheses are required to solve ambiguity in nested calls.\n\n"
"This error happens when you have nested function calls without parentheses. "
"For example:\n\n"
" one a, two b, c, d\n\n"
"In the example above, we don't know if the parameters \"c\" and \"d\" apply "
"to the function \"one\" or \"two\". You can solve this by explicitly adding "
"parentheses:\n\n"
" one a, two(b, c, d)\n\n"
"Elixir cannot compile otherwise. Syntax error before: ", "','").
throw_no_parens_container_strict(Node) ->
throw(?meta(Node),
"unexpected comma. Parentheses are required to solve ambiguity inside containers.\n\n"
"This error may happen when you forget a comma in a list or other container:\n\n"
" [a, b c, d]\n\n"
"Or when you have ambiguous calls:\n\n"
" [one, two three, four, five]\n\n"
"In the example above, we don't know if the parameters \"four\" and \"five\" "
"belongs to the list or the function \"two\". You can solve this by explicitly "
"adding parentheses:\n\n"
" [one, two(three, four), five]\n\n"
"Elixir cannot compile otherwise. Syntax error before: ", "','").
throw_invalid_kw_identifier({_, _, do} = Token) ->
throw(meta_from_token(Token), elixir_tokenizer:invalid_do_error("unexpected keyword \"do:\""), "'do:'");
throw_invalid_kw_identifier({_, _, KW} = Token) ->
throw(meta_from_token(Token), "syntax error before: ", "'" ++ atom_to_list(KW) ++ "':").
%% TODO: Make those warnings errors.
warn_empty_stab_clause({stab_op, {Line, _Begin, _End}, '->'}) ->
elixir_errors:warn(Line, ?file(),
"an expression is always required on the right side of ->. "
"Please provide a value after ->").
warn_pipe({arrow_op, {Line, _Begin, _End}, Op}, {_, [_|_], [_|_]}) ->
elixir_errors:warn(Line, ?file(),
io_lib:format(
"you are piping into a function call without parentheses, which may be ambiguous. "
"Please wrap the function you are piping into in parentheses. For example:\n\n"
" foo 1 ~ts bar 2 ~ts baz 3\n\n"
"Should be written as:\n\n"
" foo(1) ~ts bar(2) ~ts baz(3)\n",
[Op, Op, Op, Op]
)
);
warn_pipe(_Token, _) ->
ok.

256
samples/Erlang/lfe_scan.xrl Normal file
View File

@@ -0,0 +1,256 @@
%% Copyright (c) 2008-2013 Robert Virding
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% File : lfe_scan.xrl
%% Author : Robert Virding
%% Purpose : Token definitions for Lisp Flavoured Erlang.
Definitions.
B = [01]
O = [0-7]
D = [0-9]
H = [0-9a-fA-F]
B36 = [0-9a-zA-Z]
U = [A-Z]
L = [a-z]
A = ({U}|{L})
DEL = [][()}{";\000-\s]
SYM = [^][()}{";\000-\s\177-\237]
SSYM = [^][()}{"|;#`',\000-\s\177-\237]
WS = ([\000-\s]|;[^\n]*)
Rules.
%% Bracketed Comments using #| foo |#
#{D}*\|[^\|]*\|+([^#\|][^\|]*\|+)*# :
block_comment(string:substr(TokenChars, 3)).
%% Separators
' : {token,{'\'',TokenLine}}.
` : {token,{'`',TokenLine}}.
, : {token,{',',TokenLine}}.
,@ : {token,{',@',TokenLine}}.
\. : {token,{'.',TokenLine}}.
[][()}{] : {token,{list_to_atom(TokenChars),TokenLine}}.
#{D}*[bB]\( : {token,{'#B(',TokenLine}}.
#{D}*[mM]\( : {token,{'#M(',TokenLine}}.
#{D}*\( : {token,{'#(',TokenLine}}.
#{D}*\. : {token,{'#.',TokenLine}}.
#{D}*` : {token,{'#`',TokenLine}}.
#{D}*; : {token,{'#;',TokenLine}}.
#{D}*, : {token,{'#,',TokenLine}}.
#{D}*,@ : {token,{'#,@',TokenLine}}.
%% Characters
#{D}*\\(x{H}+|.) : char_token(skip_past(TokenChars, $\\, $\\), TokenLine).
%% Based numbers
#{D}*\*{SYM}+ : base_token(skip_past(TokenChars, $*, $*), 2, TokenLine).
#{D}*[bB]{SYM}+ : base_token(skip_past(TokenChars, $b, $B), 2, TokenLine).
#{D}*[oO]{SYM}+ : base_token(skip_past(TokenChars, $o, $O), 8, TokenLine).
#{D}*[dD]{SYM}+ : base_token(skip_past(TokenChars, $d, $D), 10, TokenLine).
#{D}*[xX]{SYM}+ : base_token(skip_past(TokenChars, $x, $X), 16, TokenLine).
#{D}*[rR]{SYM}+ :
%% Scan over digit chars to get base.
{Base,[_|Ds]} = base1(tl(TokenChars), 10, 0),
base_token(Ds, Base, TokenLine).
%% String
"(\\x{H}+;|\\.|[^"\\])*" :
%% Strip quotes.
S = string:substr(TokenChars, 2, TokenLen - 2),
{token,{string,TokenLine,chars(S)}}.
%% Binary string
#"(\\x{H}+;|\\.|[^"\\])*" :
%% Strip quotes.
S = string:substr(TokenChars, 3, TokenLen - 3),
Bin = unicode:characters_to_binary(chars(S), utf8, utf8),
{token,{binary,TokenLine,Bin}}.
%% Symbols
\|(\\x{H}+;|\\.|[^|\\])*\| :
%% Strip quotes.
S = string:substr(TokenChars, 2, TokenLen - 2),
symbol_token(chars(S), TokenLine).
%% Funs
#'{SSYM}{SYM}*/{D}+ :
%% Strip sharpsign single-quote.
FunStr = string:substr(TokenChars,3),
{token,{'#\'',TokenLine,FunStr}}.
%% Atoms
[+-]?{D}+ :
case catch {ok,list_to_integer(TokenChars)} of
{ok,I} -> {token,{number,TokenLine,I}};
_ -> {error,"illegal integer"}
end.
[+-]?{D}+\.{D}+([eE][+-]?{D}+)? :
case catch {ok,list_to_float(TokenChars)} of
{ok,F} -> {token,{number,TokenLine,F}};
_ -> {error,"illegal float"}
end.
{SSYM}{SYM}* :
symbol_token(TokenChars, TokenLine).
{WS}+ : skip_token.
Erlang code.
%% Copyright (c) 2008-2013 Robert Virding
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% File : lfe_scan.erl
%% Author : Robert Virding
%% Purpose : Token definitions for Lisp Flavoured Erlang.
-export([start_symbol_char/1,symbol_char/1]).
-import(string, [substr/2,substr/3]).
%% start_symbol_char(Char) -> true | false.
%% symbol_char(Char) -> true | false.
%% Define start symbol chars and symbol chars.
start_symbol_char($#) -> false;
start_symbol_char($`) -> false;
start_symbol_char($') -> false; %'
start_symbol_char($,) -> false;
start_symbol_char($|) -> false; %Symbol quote character
start_symbol_char(C) -> symbol_char(C).
symbol_char($() -> false;
symbol_char($)) -> false;
symbol_char($[) -> false;
symbol_char($]) -> false;
symbol_char(${) -> false;
symbol_char($}) -> false;
symbol_char($") -> false;
symbol_char($;) -> false;
symbol_char(C) -> ((C > $\s) and (C =< $~)) orelse (C > $\240).
%% symbol_token(Chars, Line) -> {token,{symbol,Line,Symbol}} | {error,E}.
%% Build a symbol from list of legal characters, else error.
symbol_token(Cs, L) ->
case catch {ok,list_to_atom(Cs)} of
{ok,S} -> {token,{symbol,L,S}};
_ -> {error,"illegal symbol"}
end.
%% base_token(Chars, Base, Line) -> Integer.
%% Convert a string of Base characters into a number. We only allow
%% base betqeen 2 and 36, and an optional sign character first.
base_token(_, B, _) when B < 2; B > 36 ->
{error,"illegal number base"};
base_token([$+|Cs], B, L) -> base_token(Cs, B, +1, L);
base_token([$-|Cs], B, L) -> base_token(Cs, B, -1, L);
base_token(Cs, B, L) -> base_token(Cs, B, +1, L).
base_token(Cs, B, S, L) ->
case base1(Cs, B, 0) of
{N,[]} -> {token,{number,L,S*N}};
{_,_} -> {error,"illegal based number"}
end.
base1([C|Cs], Base, SoFar) when C >= $0, C =< $9, C < Base + $0 ->
Next = SoFar * Base + (C - $0),
base1(Cs, Base, Next);
base1([C|Cs], Base, SoFar) when C >= $a, C =< $z, C < Base + $a - 10 ->
Next = SoFar * Base + (C - $a + 10),
base1(Cs, Base, Next);
base1([C|Cs], Base, SoFar) when C >= $A, C =< $Z, C < Base + $A - 10 ->
Next = SoFar * Base + (C - $A + 10),
base1(Cs, Base, Next);
base1([C|Cs], _Base, SoFar) -> {SoFar,[C|Cs]};
base1([], _Base, N) -> {N,[]}.
-define(IS_UNICODE(C), ((C >= 0) and (C =< 16#10FFFF))).
%% char_token(InputChars, Line) -> {token,{number,L,N}} | {error,E}.
%% Convert an input string into the corresponding character. For a
%% sequence of hex characters we check resultant is code is in the
%% unicode range.
char_token([$x,C|Cs], L) ->
case base1([C|Cs], 16, 0) of
{N,[]} when ?IS_UNICODE(N) -> {token,{number,L,N}};
_ -> {error,"illegal character"}
end;
char_token([C], L) -> {token,{number,L,C}}.
%% chars(InputChars) -> Chars.
%% Convert an input string into the corresponding string characters.
%% We know that the input string is correct.
chars([$\\,$x,C|Cs0]) ->
case hex_char(C) of
true ->
case base1([C|Cs0], 16, 0) of
{N,[$;|Cs1]} -> [N|chars(Cs1)];
_Other -> [escape_char($x)|chars([C|Cs0])]
end;
false -> [escape_char($x)|chars([C|Cs0])]
end;
chars([$\\,C|Cs]) -> [escape_char(C)|chars(Cs)];
chars([C|Cs]) -> [C|chars(Cs)];
chars([]) -> [].
hex_char(C) when C >= $0, C =< $9 -> true;
hex_char(C) when C >= $a, C =< $f -> true;
hex_char(C) when C >= $A, C =< $F -> true;
hex_char(_) -> false.
escape_char($b) -> $\b; %\b = BS
escape_char($t) -> $\t; %\t = TAB
escape_char($n) -> $\n; %\n = LF
escape_char($v) -> $\v; %\v = VT
escape_char($f) -> $\f; %\f = FF
escape_char($r) -> $\r; %\r = CR
escape_char($e) -> $\e; %\e = ESC
escape_char($s) -> $\s; %\s = SPC
escape_char($d) -> $\d; %\d = DEL
escape_char(C) -> C.
%% Block Comment:
%% Provide a sensible error when people attempt to include nested
%% comments because currently the parser cannot process them without
%% a rebuild. But simply exploding on a '#|' is not going to be that
%% helpful.
block_comment(TokenChars) ->
%% Check we're not opening another comment block.
case string:str(TokenChars, "#|") of
0 -> skip_token; %% No nesting found
_ -> {error, "illegal nested block comment"}
end.
%% skip_until(String, Char1, Char2) -> String.
%% skip_past(String, Char1, Char2) -> String.
%% skip_until([C|_]=Cs, C1, C2) when C =:= C1 ; C =:= C2 -> Cs;
%% skip_until([_|Cs], C1, C2) -> skip_until(Cs, C1, C2);
%% skip_until([], _, _) -> [].
skip_past([C|Cs], C1, C2) when C =:= C1 ; C =:= C2 -> Cs;
skip_past([_|Cs], C1, C2) -> skip_past(Cs, C1, C2);
skip_past([], _, _) -> [].

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,31 @@
<#import "layout.ftl" as layout>
<#assign results = [
{
"title": "Example Result",
"description": "Lorem ipsum dolor sit amet, pede id pellentesque, sollicitudin turpis sed in sed sed, libero dictum."
}
] />
<@layout.page title="FreeMarker Example">
<#if results?size == 0>
There were no results.
<#else>
<ul>
<#list results as result>
<li>
<strong>${result.title}</strong>
<p>${result.description}</p>
</li>
</#list>
</ul>
</#if>
<#-- This is a FreeMarker comment -->
<@currentTime />
</@layout.page>
<#macro currentTime>
${.now?string.full}
</#macro>

View File

@@ -0,0 +1,32 @@
<#ftl strip_text=true />
<#macro page title>
<!doctype html>
<html lang="${.lang}">
<head>
<title>${title}</title>
<@metaTags />
</head>
<body>
<#nested />
<@footer />
</body>
</html>
</#macro>
<#---
Default meta tags
-->
<#macro metaTags>
<#compress>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width,initial-scale=1">
<meta name="format-detection" content="telephone=no">
</#compress>
</#macro>
<#macro footer>
<p>This page is using FreeMarker v${.version}</p>
</#macro>

View File

@@ -0,0 +1,26 @@
<h1>Listing Books</h1>
<table>
<tr>
<th>Title</th>
<th>Summary</th>
<th></th>
<th></th>
<th></th>
</tr>
<%= for book <- @books do %>
<tr>
<%# comment %>
<td><%= book.title %></td>
<td><%= book.content %></td>
<td><%= link "Show", to: book_path(@conn, :show, book) %></td>
<td><%= link "Edit", to: book_path(@conn, :edit, book) %></td>
<td><%= link "Delete", to: book_path(@conn, :delete, book), method: :delete, data: [confirm: "Are you sure?"] %></td>
</tr>
<% end %>
</table>
<br />
<%= link "New book", to: book_path(@conn, :new) %>

View File

@@ -1,6 +1,74 @@
Version 1 of Trivial Extension by Andrew Plotkin begins here.
Version 2 of Trivial Extension by Andrew Plotkin begins here.
"This is the rubric of the extension."
"provided for the Linguist package by Andrew Plotkin"
[Note the two special quoted lines above.]
A cow is a kind of animal. A cow can be purple.
Understand "cow" as a cow.
Understand "purple" as a purple cow.
Check pushing a cow:
instead say "Cow-tipping, at your age?[paragraph break]Inconceivable."
[Here are the possible levels of heading:]
Volume One
Text-line is always "A line of text."
Book 2
Part the third - indented headings still count
Chapter IV - not for release
[Heading labels are case-insensitive.]
section foobar
[A line beginning "Volume" that does not have blank lines before and after it is *not* a header line. So the following should all be part of section foobar. Sadly, the "Volume is..." line gets colored as a header, because Atom's regexp model can't recognize "thing with blank lines before and after"!]
Measure is a kind of value.
Volume is a measure. Length is a measure.
Area is a measure.
[And now some Inform 6 inclusions.]
To say em -- running on:
(- style underline; -).
To say /em -- running on:
(- style roman; -).
Include (-
! Inform 6 comments start with a ! mark and run to the end of the line.
Global cowcount;
[ inform6func arg;
print "Here is some text; ", (address) 'dictword', ".^";
cowcount++; ! increment this variable
];
Object i6cow
with name 'cow' 'animal',
with description "It looks like a cow.",
has animate scenery;
-) after "Global Variables" in "Output.i6t".
Trivial Extension ends here.
---- DOCUMENTATION ----
Everything after the "---- DOCUMENTATION ----" line is documentation, so it should have the comment style.
However, tab-indented lines are sample Inform code within the documentation:
Horns are a kind of thing. Every cow has horns.
say "Moo[if the noun is purple] indigo[end if]."
So we need to allow for that.

View File

@@ -2,11 +2,61 @@
Include Trivial Extension by Andrew Plotkin.
Volume 1 - overview
Chapter - setting the scene
The Kitchen is a room.
[This kitchen is modelled after the one in Zork, although it lacks the detail to establish this to the player.]
[Comment: this kitchen is modelled after the one in Zork, although it lacks the detail to establish this to the player.]
Section - the kitchen table
The spicerack is a container in the Kitchen.
Table of Spices
Name Flavor
"cinnamon" 5
"nutmeg" 4
"szechuan pepper" 8
The description of the spicerack is "It's mostly empty."
Chapter - a character
A purple cow called Gelett is in the Kitchen.
[This comment spans multiple lines..
...and this line contains [nested square[] brackets]...
...which is legal in Inform 7.]
Instead of examining Gelett:
say "You'd rather see than be one."
Instead of examining Gelett:
say "You'd rather see than be one."
Check smelling Gelett:
say "This text contains several lines.
A blank line is displayed as a paragraph break,
but a simple line break is not.";
stop the action.
Section - cow catching
Gelett has a number called the mooness.
Instead of taking Gelett:
increment the mooness of Gelett;
if the mooness of Gelett is one:
say "Gelett moos once.";
else:
say "Gelett moos [mooness of Gelett in words] times.";
Volume 2 - the turn cycle
Every turn:
say "A turn passes[one of][or] placidly[or] idly[or] tediously[at random]."

23
samples/JSX/sample.jsx Normal file
View File

@@ -0,0 +1,23 @@
'use strict';
const React = require('react')
module.exports = React.createClass({
render: function() {
let {feeds, log} = this.props;
log.info(feeds);
return <div className="feed-list">
<h3>News Feed's</h3>
<ul>
{feeds.map(function(feed) {
return <li key={feed.name} className={feed.fetched ? 'loaded' : 'loading'}>
{feed.data && feed.data.length > 0 ?
<span>{feed.name} <span className='light'>({feed.data.length})</span></span>
: 'feed.name' }
</li>
})}
</ul>
</div>;
}
});

View File

@@ -0,0 +1,625 @@
// This is a generated file. Not intended for manual editing.
package org.intellij.grammar.parser;
import com.intellij.lang.PsiBuilder;
import com.intellij.lang.PsiBuilder.Marker;
import static org.intellij.grammar.psi.BnfTypes.*;
import static org.intellij.grammar.parser.GeneratedParserUtilBase.*;
import com.intellij.psi.tree.IElementType;
import com.intellij.lang.ASTNode;
import com.intellij.psi.tree.TokenSet;
import com.intellij.lang.PsiParser;
import com.intellij.lang.LightPsiParser;
@SuppressWarnings({"SimplifiableIfStatement", "UnusedAssignment"})
public class GrammarParser implements PsiParser, LightPsiParser {
public ASTNode parse(IElementType t, PsiBuilder b) {
parseLight(t, b);
return b.getTreeBuilt();
}
public void parseLight(IElementType t, PsiBuilder b) {
boolean r;
b = adapt_builder_(t, b, this, EXTENDS_SETS_);
Marker m = enter_section_(b, 0, _COLLAPSE_, null);
if (t == BNF_ATTR) {
r = attr(b, 0);
}
else if (t == BNF_ATTR_PATTERN) {
r = attr_pattern(b, 0);
}
else if (t == BNF_ATTR_VALUE) {
r = attr_value(b, 0);
}
else if (t == BNF_ATTRS) {
r = attrs(b, 0);
}
else if (t == BNF_CHOICE) {
r = choice(b, 0);
}
else if (t == BNF_EXPRESSION) {
r = expression(b, 0);
}
else if (t == BNF_LITERAL_EXPRESSION) {
r = literal_expression(b, 0);
}
else if (t == BNF_MODIFIER) {
r = modifier(b, 0);
}
else if (t == BNF_PAREN_EXPRESSION) {
r = paren_expression(b, 0);
}
else if (t == BNF_PREDICATE) {
r = predicate(b, 0);
}
else if (t == BNF_PREDICATE_SIGN) {
r = predicate_sign(b, 0);
}
else if (t == BNF_QUANTIFIED) {
r = quantified(b, 0);
}
else if (t == BNF_QUANTIFIER) {
r = quantifier(b, 0);
}
else if (t == BNF_REFERENCE_OR_TOKEN) {
r = reference_or_token(b, 0);
}
else if (t == BNF_RULE) {
r = rule(b, 0);
}
else if (t == BNF_SEQUENCE) {
r = sequence(b, 0);
}
else if (t == BNF_STRING_LITERAL_EXPRESSION) {
r = string_literal_expression(b, 0);
}
else {
r = parse_root_(t, b, 0);
}
exit_section_(b, 0, m, t, r, true, TRUE_CONDITION);
}
protected boolean parse_root_(IElementType t, PsiBuilder b, int l) {
return grammar(b, l + 1);
}
public static final TokenSet[] EXTENDS_SETS_ = new TokenSet[] {
create_token_set_(BNF_LITERAL_EXPRESSION, BNF_STRING_LITERAL_EXPRESSION),
create_token_set_(BNF_CHOICE, BNF_EXPRESSION, BNF_LITERAL_EXPRESSION, BNF_PAREN_EXPRESSION,
BNF_PREDICATE, BNF_QUANTIFIED, BNF_REFERENCE_OR_TOKEN, BNF_SEQUENCE,
BNF_STRING_LITERAL_EXPRESSION),
};
/* ********************************************************** */
// id attr_pattern? '=' attr_value ';'?
public static boolean attr(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "attr")) return false;
boolean r, p;
Marker m = enter_section_(b, l, _NONE_, "<attr>");
r = consumeToken(b, BNF_ID);
p = r; // pin = 1
r = r && report_error_(b, attr_1(b, l + 1));
r = p && report_error_(b, consumeToken(b, BNF_OP_EQ)) && r;
r = p && report_error_(b, attr_value(b, l + 1)) && r;
r = p && attr_4(b, l + 1) && r;
exit_section_(b, l, m, BNF_ATTR, r, p, attr_recover_until_parser_);
return r || p;
}
// attr_pattern?
private static boolean attr_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "attr_1")) return false;
attr_pattern(b, l + 1);
return true;
}
// ';'?
private static boolean attr_4(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "attr_4")) return false;
consumeToken(b, BNF_SEMICOLON);
return true;
}
/* ********************************************************** */
// '(' string ')'
public static boolean attr_pattern(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "attr_pattern")) return false;
if (!nextTokenIs(b, BNF_LEFT_PAREN)) return false;
boolean r;
Marker m = enter_section_(b);
r = consumeToken(b, BNF_LEFT_PAREN);
r = r && consumeToken(b, BNF_STRING);
r = r && consumeToken(b, BNF_RIGHT_PAREN);
exit_section_(b, m, BNF_ATTR_PATTERN, r);
return r;
}
/* ********************************************************** */
// !'}'
static boolean attr_recover_until(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "attr_recover_until")) return false;
boolean r;
Marker m = enter_section_(b, l, _NOT_, null);
r = !consumeToken(b, BNF_RIGHT_BRACE);
exit_section_(b, l, m, null, r, false, null);
return r;
}
/* ********************************************************** */
// (reference_or_token | literal_expression) !'='
public static boolean attr_value(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "attr_value")) return false;
boolean r;
Marker m = enter_section_(b, l, _NONE_, "<attr value>");
r = attr_value_0(b, l + 1);
r = r && attr_value_1(b, l + 1);
exit_section_(b, l, m, BNF_ATTR_VALUE, r, false, null);
return r;
}
// reference_or_token | literal_expression
private static boolean attr_value_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "attr_value_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = reference_or_token(b, l + 1);
if (!r) r = literal_expression(b, l + 1);
exit_section_(b, m, null, r);
return r;
}
// !'='
private static boolean attr_value_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "attr_value_1")) return false;
boolean r;
Marker m = enter_section_(b, l, _NOT_, null);
r = !consumeToken(b, BNF_OP_EQ);
exit_section_(b, l, m, null, r, false, null);
return r;
}
/* ********************************************************** */
// '{' attr* '}'
public static boolean attrs(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "attrs")) return false;
if (!nextTokenIs(b, BNF_LEFT_BRACE)) return false;
boolean r, p;
Marker m = enter_section_(b, l, _NONE_, null);
r = consumeToken(b, BNF_LEFT_BRACE);
p = r; // pin = 1
r = r && report_error_(b, attrs_1(b, l + 1));
r = p && consumeToken(b, BNF_RIGHT_BRACE) && r;
exit_section_(b, l, m, BNF_ATTRS, r, p, null);
return r || p;
}
// attr*
private static boolean attrs_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "attrs_1")) return false;
int c = current_position_(b);
while (true) {
if (!attr(b, l + 1)) break;
if (!empty_element_parsed_guard_(b, "attrs_1", c)) break;
c = current_position_(b);
}
return true;
}
/* ********************************************************** */
// '{' sequence ('|' sequence)* '}' | sequence choice_tail*
public static boolean choice(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "choice")) return false;
boolean r;
Marker m = enter_section_(b, l, _COLLAPSE_, "<choice>");
r = choice_0(b, l + 1);
if (!r) r = choice_1(b, l + 1);
exit_section_(b, l, m, BNF_CHOICE, r, false, null);
return r;
}
// '{' sequence ('|' sequence)* '}'
private static boolean choice_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "choice_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = consumeToken(b, BNF_LEFT_BRACE);
r = r && sequence(b, l + 1);
r = r && choice_0_2(b, l + 1);
r = r && consumeToken(b, BNF_RIGHT_BRACE);
exit_section_(b, m, null, r);
return r;
}
// ('|' sequence)*
private static boolean choice_0_2(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "choice_0_2")) return false;
int c = current_position_(b);
while (true) {
if (!choice_0_2_0(b, l + 1)) break;
if (!empty_element_parsed_guard_(b, "choice_0_2", c)) break;
c = current_position_(b);
}
return true;
}
// '|' sequence
private static boolean choice_0_2_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "choice_0_2_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = consumeToken(b, BNF_OP_OR);
r = r && sequence(b, l + 1);
exit_section_(b, m, null, r);
return r;
}
// sequence choice_tail*
private static boolean choice_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "choice_1")) return false;
boolean r;
Marker m = enter_section_(b);
r = sequence(b, l + 1);
r = r && choice_1_1(b, l + 1);
exit_section_(b, m, null, r);
return r;
}
// choice_tail*
private static boolean choice_1_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "choice_1_1")) return false;
int c = current_position_(b);
while (true) {
if (!choice_tail(b, l + 1)) break;
if (!empty_element_parsed_guard_(b, "choice_1_1", c)) break;
c = current_position_(b);
}
return true;
}
/* ********************************************************** */
// '|' sequence
static boolean choice_tail(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "choice_tail")) return false;
if (!nextTokenIs(b, BNF_OP_OR)) return false;
boolean r, p;
Marker m = enter_section_(b, l, _NONE_, null);
r = consumeToken(b, BNF_OP_OR);
p = r; // pin = 1
r = r && sequence(b, l + 1);
exit_section_(b, l, m, null, r, p, null);
return r || p;
}
/* ********************************************************** */
// choice?
public static boolean expression(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "expression")) return false;
Marker m = enter_section_(b, l, _COLLAPSE_, "<expression>");
choice(b, l + 1);
exit_section_(b, l, m, BNF_EXPRESSION, true, false, null);
return true;
}
/* ********************************************************** */
// (attrs | rule) *
static boolean grammar(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "grammar")) return false;
int c = current_position_(b);
while (true) {
if (!grammar_0(b, l + 1)) break;
if (!empty_element_parsed_guard_(b, "grammar", c)) break;
c = current_position_(b);
}
return true;
}
// attrs | rule
private static boolean grammar_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "grammar_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = attrs(b, l + 1);
if (!r) r = rule(b, l + 1);
exit_section_(b, m, null, r);
return r;
}
/* ********************************************************** */
// string_literal_expression | number
public static boolean literal_expression(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "literal_expression")) return false;
if (!nextTokenIs(b, "<literal expression>", BNF_NUMBER, BNF_STRING)) return false;
boolean r;
Marker m = enter_section_(b, l, _COLLAPSE_, "<literal expression>");
r = string_literal_expression(b, l + 1);
if (!r) r = consumeToken(b, BNF_NUMBER);
exit_section_(b, l, m, BNF_LITERAL_EXPRESSION, r, false, null);
return r;
}
/* ********************************************************** */
// 'private' | 'external' | 'wrapped'
public static boolean modifier(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "modifier")) return false;
boolean r;
Marker m = enter_section_(b, l, _NONE_, "<modifier>");
r = consumeToken(b, "private");
if (!r) r = consumeToken(b, "external");
if (!r) r = consumeToken(b, "wrapped");
exit_section_(b, l, m, BNF_MODIFIER, r, false, null);
return r;
}
/* ********************************************************** */
// quantified | predicate
static boolean option(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "option")) return false;
boolean r;
Marker m = enter_section_(b);
r = quantified(b, l + 1);
if (!r) r = predicate(b, l + 1);
exit_section_(b, m, null, r);
return r;
}
/* ********************************************************** */
// '(' expression ')'
public static boolean paren_expression(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "paren_expression")) return false;
if (!nextTokenIs(b, BNF_LEFT_PAREN)) return false;
boolean r, p;
Marker m = enter_section_(b, l, _NONE_, null);
r = consumeToken(b, BNF_LEFT_PAREN);
p = r; // pin = 1
r = r && report_error_(b, expression(b, l + 1));
r = p && consumeToken(b, BNF_RIGHT_PAREN) && r;
exit_section_(b, l, m, BNF_PAREN_EXPRESSION, r, p, null);
return r || p;
}
/* ********************************************************** */
// predicate_sign simple
public static boolean predicate(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "predicate")) return false;
if (!nextTokenIs(b, "<predicate>", BNF_OP_NOT, BNF_OP_AND)) return false;
boolean r;
Marker m = enter_section_(b, l, _NONE_, "<predicate>");
r = predicate_sign(b, l + 1);
r = r && simple(b, l + 1);
exit_section_(b, l, m, BNF_PREDICATE, r, false, null);
return r;
}
/* ********************************************************** */
// '&' | '!'
public static boolean predicate_sign(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "predicate_sign")) return false;
if (!nextTokenIs(b, "<predicate sign>", BNF_OP_NOT, BNF_OP_AND)) return false;
boolean r;
Marker m = enter_section_(b, l, _NONE_, "<predicate sign>");
r = consumeToken(b, BNF_OP_AND);
if (!r) r = consumeToken(b, BNF_OP_NOT);
exit_section_(b, l, m, BNF_PREDICATE_SIGN, r, false, null);
return r;
}
/* ********************************************************** */
// '[' expression ']' | simple quantifier?
public static boolean quantified(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "quantified")) return false;
boolean r;
Marker m = enter_section_(b, l, _COLLAPSE_, "<quantified>");
r = quantified_0(b, l + 1);
if (!r) r = quantified_1(b, l + 1);
exit_section_(b, l, m, BNF_QUANTIFIED, r, false, null);
return r;
}
// '[' expression ']'
private static boolean quantified_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "quantified_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = consumeToken(b, BNF_LEFT_BRACKET);
r = r && expression(b, l + 1);
r = r && consumeToken(b, BNF_RIGHT_BRACKET);
exit_section_(b, m, null, r);
return r;
}
// simple quantifier?
private static boolean quantified_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "quantified_1")) return false;
boolean r;
Marker m = enter_section_(b);
r = simple(b, l + 1);
r = r && quantified_1_1(b, l + 1);
exit_section_(b, m, null, r);
return r;
}
// quantifier?
private static boolean quantified_1_1(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "quantified_1_1")) return false;
quantifier(b, l + 1);
return true;
}
/* ********************************************************** */
// '?' | '+' | '*'
public static boolean quantifier(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "quantifier")) return false;
boolean r;
Marker m = enter_section_(b, l, _NONE_, "<quantifier>");
r = consumeToken(b, BNF_OP_OPT);
if (!r) r = consumeToken(b, BNF_OP_ONEMORE);
if (!r) r = consumeToken(b, BNF_OP_ZEROMORE);
exit_section_(b, l, m, BNF_QUANTIFIER, r, false, null);
return r;
}
/* ********************************************************** */
// id
public static boolean reference_or_token(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "reference_or_token")) return false;
if (!nextTokenIs(b, BNF_ID)) return false;
boolean r;
Marker m = enter_section_(b);
r = consumeToken(b, BNF_ID);
exit_section_(b, m, BNF_REFERENCE_OR_TOKEN, r);
return r;
}
/* ********************************************************** */
// modifier* id '::=' expression attrs? ';'?
public static boolean rule(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "rule")) return false;
boolean r, p;
Marker m = enter_section_(b, l, _NONE_, "<rule>");
r = rule_0(b, l + 1);
r = r && consumeToken(b, BNF_ID);
r = r && consumeToken(b, BNF_OP_IS);
p = r; // pin = 3
r = r && report_error_(b, expression(b, l + 1));
r = p && report_error_(b, rule_4(b, l + 1)) && r;
r = p && rule_5(b, l + 1) && r;
exit_section_(b, l, m, BNF_RULE, r, p, rule_recover_until_parser_);
return r || p;
}
// modifier*
private static boolean rule_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "rule_0")) return false;
int c = current_position_(b);
while (true) {
if (!modifier(b, l + 1)) break;
if (!empty_element_parsed_guard_(b, "rule_0", c)) break;
c = current_position_(b);
}
return true;
}
// attrs?
private static boolean rule_4(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "rule_4")) return false;
attrs(b, l + 1);
return true;
}
// ';'?
private static boolean rule_5(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "rule_5")) return false;
consumeToken(b, BNF_SEMICOLON);
return true;
}
/* ********************************************************** */
// !'{'
static boolean rule_recover_until(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "rule_recover_until")) return false;
boolean r;
Marker m = enter_section_(b, l, _NOT_, null);
r = !consumeToken(b, BNF_LEFT_BRACE);
exit_section_(b, l, m, null, r, false, null);
return r;
}
/* ********************************************************** */
// option +
public static boolean sequence(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "sequence")) return false;
boolean r;
Marker m = enter_section_(b, l, _COLLAPSE_, "<sequence>");
r = option(b, l + 1);
int c = current_position_(b);
while (r) {
if (!option(b, l + 1)) break;
if (!empty_element_parsed_guard_(b, "sequence", c)) break;
c = current_position_(b);
}
exit_section_(b, l, m, BNF_SEQUENCE, r, false, null);
return r;
}
/* ********************************************************** */
// !(modifier* id '::=' ) reference_or_token | literal_expression | paren_expression
static boolean simple(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "simple")) return false;
boolean r;
Marker m = enter_section_(b);
r = simple_0(b, l + 1);
if (!r) r = literal_expression(b, l + 1);
if (!r) r = paren_expression(b, l + 1);
exit_section_(b, m, null, r);
return r;
}
// !(modifier* id '::=' ) reference_or_token
private static boolean simple_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "simple_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = simple_0_0(b, l + 1);
r = r && reference_or_token(b, l + 1);
exit_section_(b, m, null, r);
return r;
}
// !(modifier* id '::=' )
private static boolean simple_0_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "simple_0_0")) return false;
boolean r;
Marker m = enter_section_(b, l, _NOT_, null);
r = !simple_0_0_0(b, l + 1);
exit_section_(b, l, m, null, r, false, null);
return r;
}
// modifier* id '::='
private static boolean simple_0_0_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "simple_0_0_0")) return false;
boolean r;
Marker m = enter_section_(b);
r = simple_0_0_0_0(b, l + 1);
r = r && consumeToken(b, BNF_ID);
r = r && consumeToken(b, BNF_OP_IS);
exit_section_(b, m, null, r);
return r;
}
// modifier*
private static boolean simple_0_0_0_0(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "simple_0_0_0_0")) return false;
int c = current_position_(b);
while (true) {
if (!modifier(b, l + 1)) break;
if (!empty_element_parsed_guard_(b, "simple_0_0_0_0", c)) break;
c = current_position_(b);
}
return true;
}
/* ********************************************************** */
// string
public static boolean string_literal_expression(PsiBuilder b, int l) {
if (!recursion_guard_(b, l, "string_literal_expression")) return false;
if (!nextTokenIs(b, BNF_STRING)) return false;
boolean r;
Marker m = enter_section_(b);
r = consumeToken(b, BNF_STRING);
exit_section_(b, m, BNF_STRING_LITERAL_EXPRESSION, r);
return r;
}
final static Parser attr_recover_until_parser_ = new Parser() {
public boolean parse(PsiBuilder b, int l) {
return attr_recover_until(b, l + 1);
}
};
final static Parser rule_recover_until_parser_ = new Parser() {
public boolean parse(PsiBuilder b, int l) {
return rule_recover_until(b, l + 1);
}
};
}

View File

@@ -0,0 +1,482 @@
/* The following code was generated by JFlex 1.4.3 on 28/01/16 11:27 */
package test;
import com.intellij.lexer.*;
import com.intellij.psi.tree.IElementType;
import static org.intellij.grammar.psi.BnfTypes.*;
/**
* This class is a scanner generated by
* <a href="http://www.jflex.de/">JFlex</a> 1.4.3
* on 28/01/16 11:27 from the specification file
* <tt>/home/abigail/code/intellij-grammar-kit-test/src/test/_GrammarLexer.flex</tt>
*/
public class _GrammarLexer implements FlexLexer {
/** initial size of the lookahead buffer */
private static final int ZZ_BUFFERSIZE = 16384;
/** lexical states */
public static final int YYINITIAL = 0;
/**
* ZZ_LEXSTATE[l] is the state in the DFA for the lexical state l
* ZZ_LEXSTATE[l+1] is the state in the DFA for the lexical state l
* at the beginning of a line
* l is of the form l = 2*k, k a non negative integer
*/
private static final int ZZ_LEXSTATE[] = {
0, 0
};
/**
* Translates characters to character classes
*/
private static final String ZZ_CMAP_PACKED =
"\11\0\1\1\1\1\1\0\1\1\1\1\22\0\1\1\101\0\1\13"+
"\1\0\1\3\1\14\1\0\1\10\1\0\1\2\3\0\1\12\1\7"+
"\3\0\1\6\1\4\1\5\1\11\uff8a\0";
/**
* Translates characters to character classes
*/
private static final char [] ZZ_CMAP = zzUnpackCMap(ZZ_CMAP_PACKED);
/**
* Translates DFA states to action switch labels.
*/
private static final int [] ZZ_ACTION = zzUnpackAction();
private static final String ZZ_ACTION_PACKED_0 =
"\1\0\1\1\1\2\3\1\1\3\10\0\1\4\1\5";
private static int [] zzUnpackAction() {
int [] result = new int[17];
int offset = 0;
offset = zzUnpackAction(ZZ_ACTION_PACKED_0, offset, result);
return result;
}
private static int zzUnpackAction(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
do result[j++] = value; while (--count > 0);
}
return j;
}
/**
* Translates a state to a row index in the transition table
*/
private static final int [] ZZ_ROWMAP = zzUnpackRowMap();
private static final String ZZ_ROWMAP_PACKED_0 =
"\0\0\0\15\0\32\0\47\0\64\0\101\0\15\0\116"+
"\0\133\0\150\0\165\0\202\0\217\0\234\0\251\0\15"+
"\0\15";
private static int [] zzUnpackRowMap() {
int [] result = new int[17];
int offset = 0;
offset = zzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result);
return result;
}
private static int zzUnpackRowMap(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int high = packed.charAt(i++) << 16;
result[j++] = high | packed.charAt(i++);
}
return j;
}
/**
* The transition table of the DFA
*/
private static final int [] ZZ_TRANS = zzUnpackTrans();
private static final String ZZ_TRANS_PACKED_0 =
"\1\2\1\3\1\4\1\2\1\5\2\2\1\6\5\2"+
"\16\0\1\3\16\0\1\7\16\0\1\10\20\0\1\11"+
"\11\0\1\12\20\0\1\13\4\0\1\14\25\0\1\15"+
"\10\0\1\16\21\0\1\17\10\0\1\20\12\0\1\21"+
"\6\0";
private static int [] zzUnpackTrans() {
int [] result = new int[182];
int offset = 0;
offset = zzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result);
return result;
}
private static int zzUnpackTrans(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
value--;
do result[j++] = value; while (--count > 0);
}
return j;
}
/* error codes */
private static final int ZZ_UNKNOWN_ERROR = 0;
private static final int ZZ_NO_MATCH = 1;
private static final int ZZ_PUSHBACK_2BIG = 2;
private static final char[] EMPTY_BUFFER = new char[0];
private static final int YYEOF = -1;
private static java.io.Reader zzReader = null; // Fake
/* error messages for the codes above */
private static final String ZZ_ERROR_MSG[] = {
"Unkown internal scanner error",
"Error: could not match input",
"Error: pushback value was too large"
};
/**
* ZZ_ATTRIBUTE[aState] contains the attributes of state <code>aState</code>
*/
private static final int [] ZZ_ATTRIBUTE = zzUnpackAttribute();
private static final String ZZ_ATTRIBUTE_PACKED_0 =
"\1\0\1\11\4\1\1\11\10\0\2\11";
private static int [] zzUnpackAttribute() {
int [] result = new int[17];
int offset = 0;
offset = zzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result);
return result;
}
private static int zzUnpackAttribute(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
do result[j++] = value; while (--count > 0);
}
return j;
}
/** the current state of the DFA */
private int zzState;
/** the current lexical state */
private int zzLexicalState = YYINITIAL;
/** this buffer contains the current text to be matched and is
the source of the yytext() string */
private CharSequence zzBuffer = "";
/** this buffer may contains the current text array to be matched when it is cheap to acquire it */
private char[] zzBufferArray;
/** the textposition at the last accepting state */
private int zzMarkedPos;
/** the textposition at the last state to be included in yytext */
private int zzPushbackPos;
/** the current text position in the buffer */
private int zzCurrentPos;
/** startRead marks the beginning of the yytext() string in the buffer */
private int zzStartRead;
/** endRead marks the last character in the buffer, that has been read
from input */
private int zzEndRead;
/**
* zzAtBOL == true <=> the scanner is currently at the beginning of a line
*/
private boolean zzAtBOL = true;
/** zzAtEOF == true <=> the scanner is at the EOF */
private boolean zzAtEOF;
/* user code: */
public _GrammarLexer() {
this((java.io.Reader)null);
}
/**
* Creates a new scanner
*
* @param in the java.io.Reader to read input from.
*/
public _GrammarLexer(java.io.Reader in) {
this.zzReader = in;
}
/**
* Unpacks the compressed character translation table.
*
* @param packed the packed character translation table
* @return the unpacked character translation table
*/
private static char [] zzUnpackCMap(String packed) {
char [] map = new char[0x10000];
int i = 0; /* index in packed string */
int j = 0; /* index in unpacked array */
while (i < 52) {
int count = packed.charAt(i++);
char value = packed.charAt(i++);
do map[j++] = value; while (--count > 0);
}
return map;
}
public final int getTokenStart(){
return zzStartRead;
}
public final int getTokenEnd(){
return getTokenStart() + yylength();
}
public void reset(CharSequence buffer, int start, int end,int initialState){
zzBuffer = buffer;
zzBufferArray = com.intellij.util.text.CharArrayUtil.fromSequenceWithoutCopying(buffer);
zzCurrentPos = zzMarkedPos = zzStartRead = start;
zzPushbackPos = 0;
zzAtEOF = false;
zzAtBOL = true;
zzEndRead = end;
yybegin(initialState);
}
/**
* Refills the input buffer.
*
* @return <code>false</code>, iff there was new input.
*
* @exception java.io.IOException if any I/O-Error occurs
*/
private boolean zzRefill() throws java.io.IOException {
return true;
}
/**
* Returns the current lexical state.
*/
public final int yystate() {
return zzLexicalState;
}
/**
* Enters a new lexical state
*
* @param newState the new lexical state
*/
public final void yybegin(int newState) {
zzLexicalState = newState;
}
/**
* Returns the text matched by the current regular expression.
*/
public final CharSequence yytext() {
return zzBuffer.subSequence(zzStartRead, zzMarkedPos);
}
/**
* Returns the character at position <tt>pos</tt> from the
* matched text.
*
* It is equivalent to yytext().charAt(pos), but faster
*
* @param pos the position of the character to fetch.
* A value from 0 to yylength()-1.
*
* @return the character at position pos
*/
public final char yycharat(int pos) {
return zzBufferArray != null ? zzBufferArray[zzStartRead+pos]:zzBuffer.charAt(zzStartRead+pos);
}
/**
* Returns the length of the matched text region.
*/
public final int yylength() {
return zzMarkedPos-zzStartRead;
}
/**
* Reports an error that occured while scanning.
*
* In a wellformed scanner (no or only correct usage of
* yypushback(int) and a match-all fallback rule) this method
* will only be called with things that "Can't Possibly Happen".
* If this method is called, something is seriously wrong
* (e.g. a JFlex bug producing a faulty scanner etc.).
*
* Usual syntax/scanner level error handling should be done
* in error fallback rules.
*
* @param errorCode the code of the errormessage to display
*/
private void zzScanError(int errorCode) {
String message;
try {
message = ZZ_ERROR_MSG[errorCode];
}
catch (ArrayIndexOutOfBoundsException e) {
message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
}
throw new Error(message);
}
/**
* Pushes the specified amount of characters back into the input stream.
*
* They will be read again by then next call of the scanning method
*
* @param number the number of characters to be read again.
* This number must not be greater than yylength()!
*/
public void yypushback(int number) {
if ( number > yylength() )
zzScanError(ZZ_PUSHBACK_2BIG);
zzMarkedPos -= number;
}
/**
* Resumes scanning until the next regular expression is matched,
* the end of input is encountered or an I/O-Error occurs.
*
* @return the next token
* @exception java.io.IOException if any I/O-Error occurs
*/
public IElementType advance() throws java.io.IOException {
int zzInput;
int zzAction;
// cached fields:
int zzCurrentPosL;
int zzMarkedPosL;
int zzEndReadL = zzEndRead;
CharSequence zzBufferL = zzBuffer;
char[] zzBufferArrayL = zzBufferArray;
char [] zzCMapL = ZZ_CMAP;
int [] zzTransL = ZZ_TRANS;
int [] zzRowMapL = ZZ_ROWMAP;
int [] zzAttrL = ZZ_ATTRIBUTE;
while (true) {
zzMarkedPosL = zzMarkedPos;
zzAction = -1;
zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL;
zzState = ZZ_LEXSTATE[zzLexicalState];
zzForAction: {
while (true) {
if (zzCurrentPosL < zzEndReadL)
zzInput = (zzBufferArrayL != null ? zzBufferArrayL[zzCurrentPosL++] : zzBufferL.charAt(zzCurrentPosL++));
else if (zzAtEOF) {
zzInput = YYEOF;
break zzForAction;
}
else {
// store back cached positions
zzCurrentPos = zzCurrentPosL;
zzMarkedPos = zzMarkedPosL;
boolean eof = zzRefill();
// get translated positions and possibly new buffer
zzCurrentPosL = zzCurrentPos;
zzMarkedPosL = zzMarkedPos;
zzBufferL = zzBuffer;
zzEndReadL = zzEndRead;
if (eof) {
zzInput = YYEOF;
break zzForAction;
}
else {
zzInput = (zzBufferArrayL != null ? zzBufferArrayL[zzCurrentPosL++] : zzBufferL.charAt(zzCurrentPosL++));
}
}
int zzNext = zzTransL[ zzRowMapL[zzState] + zzCMapL[zzInput] ];
if (zzNext == -1) break zzForAction;
zzState = zzNext;
int zzAttributes = zzAttrL[zzState];
if ( (zzAttributes & 1) == 1 ) {
zzAction = zzState;
zzMarkedPosL = zzCurrentPosL;
if ( (zzAttributes & 8) == 8 ) break zzForAction;
}
}
}
// store back cached position
zzMarkedPos = zzMarkedPosL;
switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) {
case 1:
{ return com.intellij.psi.TokenType.BAD_CHARACTER;
}
case 6: break;
case 4:
{ return BNF_STRING;
}
case 7: break;
case 5:
{ return BNF_NUMBER;
}
case 8: break;
case 3:
{ return BNF_ID;
}
case 9: break;
case 2:
{ return com.intellij.psi.TokenType.WHITE_SPACE;
}
case 10: break;
default:
if (zzInput == YYEOF && zzStartRead == zzCurrentPos) {
zzAtEOF = true;
return null;
}
else {
zzScanError(ZZ_NO_MATCH);
}
}
}
}
}

View File

@@ -0,0 +1,19 @@
// title : OpenJSCAD.org Logo
// author : Rene K. Mueller
// license : MIT License
// revision : 0.003
// tags : Logo,Intersection,Sphere,Cube
// file : logo.jscad
function main() {
return union(
difference(
cube({size: 3, center: true}),
sphere({r:2, center: true})
),
intersection(
sphere({r: 1.3, center: true}),
cube({size: 2.1, center: true})
)
).translate([0,0,1.5]).scale(10);
}

File diff suppressed because one or more lines are too long

14069
samples/KiCad/tc14badge.brd Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,29 @@
-- Taken from an example from Autodesk's MAXScript reference:
-- http://help.autodesk.com/view/3DSMAX/2016/ENU/?guid=__files_GUID_84E24969_C175_4389_B9A6_3B2699B66785_htm
macroscript MoveToSurface
category: "HowTo"
(
fn g_filter o = superclassof o == Geometryclass
fn find_intersection z_node node_to_z = (
local testRay = ray node_to_z.pos [0,0,-1]
local nodeMaxZ = z_node.max.z
testRay.pos.z = nodeMaxZ + 0.0001 * abs nodeMaxZ
intersectRay z_node testRay
)
on isEnabled return selection.count > 0
on Execute do (
target_mesh = pickObject message:"Pick Target Surface:" filter:g_filter
if isValidNode target_mesh then (
undo "MoveToSurface" on (
for i in selection do (
int_point = find_intersection target_mesh i
if int_point != undefined then i.pos = int_point.pos
)--end i loop
)--end undo
)--end if
)--end execute
)--end script

View File

@@ -0,0 +1,53 @@
-- Taken from an example from Autodesk's MAXScript reference:
-- http://help.autodesk.com/view/3DSMAX/2016/ENU/?guid=__files_GUID_0876DF46_FAA3_4131_838D_5739A67FF2C1_htm
macroscript FreeSpline category:"HowTo" tooltip:"FreeSpline" (
local old_pos
local new_spline
local second_knot_set
fn get_mouse_pos pen_pos old_pen_pos = (
if old_pos == undefined then old_pos = old_pen_pos
if distance pen_pos old_pos > 10 then
(
if second_knot_set then
addKnot new_spline 1 #smooth #curve pen_pos
else
(
setKnotPoint new_spline 1 2 pen_pos
second_knot_set = true
)
old_pos = pen_pos
updateShape new_spline
)-- end if
)-- end fn
fn draw_new_line old_pen_pos = (
pickPoint mouseMoveCallback:#(get_mouse_pos,old_pen_pos)
)
undo"Free Spline"on(
new_spline = splineShape ()
old_pen_pos = pickPoint ()
if old_pen_pos == #RightClick then
delete new_spline
else
(
select new_spline
new_spline.pos = old_pen_pos
addNewSpline new_spline
addKnot new_spline 1 #smooth #curve old_pen_pos
addKnot new_spline 1 #smooth #curve old_pen_pos
second_knot_set = false
draw_new_line old_pen_pos
q = querybox "Close Spline?" title:"Free Spline"
if q then
(
close new_spline 1
updateshape new_spline
)
select new_spline
)--end else
)--end undo
)--end script

View File

@@ -0,0 +1,64 @@
-- Taken from a 3-part tutorial from Autodesk's MAXScript reference
-- Source: http://help.autodesk.com/view/3DSMAX/2016/ENU/?guid=__files_GUID_6B5EDC11_A154_4AA7_A972_A11AC36949E9_htm
fn ColourToHex col = (
local theComponents = #(bit.intAsHex col.r, bit.intAsHex col.g, bit.intAsHex col.b)
local theValue = "#"
for i in theComponents do
theValue += (if i.count == 1 then "0" else "") + i
theValue
)
local st = timestamp()
local theFileName = (getDir #userscripts + "\\PolygonRendering3.svg")
local theSVGfile = createFile theFileName
format "<svg xmlns=\"http://www.w3.org/2000/svg\"\n" to:theSVGfile
format "\t\txmlns:xlink=\"http://www.w3.org/1999/xlink\">\n" to:theSVGfile
local theViewTM = viewport.getTM()
theViewTM.row4 = [0,0,0]
local theViewTM2 = viewport.getTM()
local theViewSize = getViewSize()
local theViewScale = getViewSize()
theViewScale.x /= 1024.0
theViewScale.y /= 1024.0
local theStrokeThickness = 3
gw.setTransform (matrix3 1)
for o in Geometry where not o.isHiddenInVpt and classof o != TargetObject do (
local theStrokeColour = white
local theFillColour = o.wirecolor
local theMesh = snapshotAsMesh o
for f = 1 to theMesh.numfaces do (
local theNormal = normalize (getFaceNormal theMesh f)
if (theNormal*theViewTM).z > 0 do
(
local theFace = getFace theMesh f
local v1 = gw.transPoint (getVert theMesh theFace.x)
local v2 = gw.transPoint (getVert theMesh theFace.y)
local v3 = gw.transPoint (getVert theMesh theFace.z)
v1.x /= theViewScale.x
v1.y /= theViewScale.y
v2.x /= theViewScale.x
v2.y /= theViewScale.y
v3.x /= theViewScale.x
v3.y /= theViewScale.y
format "\t<polygon points='%,% %,% %,%' \n" v1.x v1.y v2.x v2.y v3.x v3.y to:theSVGfile
format "\tstyle='stroke:%; fill:%; stroke-width:%'/>\n" (ColourToHex theStrokeColour) (ColourToHex theFillColour) theStrokeThickness to:theSVGfile
)--end if normal positive
)--end f loop
)--end o loop
format "</svg>\n" to:theSVGfile
close theSVGfile
local theSVGMap = VectorMap vectorFile:theFileName alphasource:0
local theBitmap = bitmap theViewSize.x theViewSize.y
renderMap theSVGMap into:theBitmap filter:true
display theBitmap
format "Render Time: % sec.\n" ((timestamp()-st)/1000.0)

View File

@@ -0,0 +1,22 @@
fn CalculateVolumeAndCentreOfMass obj =
(
local Volume= 0.0
local Centre= [0.0, 0.0, 0.0]
local theMesh = snapshotasmesh obj
local numFaces = theMesh.numfaces
for i = 1 to numFaces do
(
local Face= getFace theMesh i
local vert2 = getVert theMesh Face.z
local vert1 = getVert theMesh Face.y
local vert0 = getVert theMesh Face.x
local dV = Dot (Cross (vert1 - vert0) (vert2 - vert0)) vert0
Volume+= dV
Centre+= (vert0 + vert1 + vert2) * dV
)
delete theMesh
Volume /= 6
Centre /= 24
Centre /= Volume
#(Volume,Centre)
)

View File

@@ -0,0 +1 @@
Test[1 + 2, 3, TestID -> "One plus two"]

View File

@@ -0,0 +1 @@
Test["a" <> "b", "ab", TestID -> "Concat \"a\" and \"b\""]

View File

@@ -0,0 +1,5 @@
TestSuite[
{ "TestArithmetic.mt"
, "TestString.mt"
}
]

View File

@@ -0,0 +1,694 @@
= Name =
'''nginx_tcp_proxy_module''' - support TCP proxy with Nginx
= Installation =
Download the latest stable version of the release tarball of this module from [http://github.com/yaoweibin/nginx_tcp_proxy_module github]
Grab the nginx source code from [http://nginx.org/ nginx.org], for example, the version 1.2.1 (see nginx compatibility), and then build the source with this module:
<geshi lang="bash">
$ wget 'http://nginx.org/download/nginx-1.2.1.tar.gz'
$ tar -xzvf nginx-1.2.1.tar.gz
$ cd nginx-1.2.1/
$ patch -p1 < /path/to/nginx_tcp_proxy_module/tcp.patch
$ ./configure --add-module=/path/to/nginx_tcp_proxy_module
$ make
$ make install
</geshi>
= Synopsis =
<geshi lang="nginx">
http {
server {
listen 80;
location /status {
tcp_check_status;
}
}
}
</geshi>
<geshi lang="nginx">
#You can also include tcp_proxy.conf file individually
#include /path/to/tcp_proxy.conf;
tcp {
upstream cluster {
# simple round-robin
server 192.168.0.1:80;
server 192.168.0.2:80;
check interval=3000 rise=2 fall=5 timeout=1000;
#check interval=3000 rise=2 fall=5 timeout=1000 type=ssl_hello;
#check interval=3000 rise=2 fall=5 timeout=1000 type=http;
#check_http_send "GET / HTTP/1.0\r\n\r\n";
#check_http_expect_alive http_2xx http_3xx;
}
server {
listen 8888;
proxy_pass cluster;
}
}
</geshi>
= Description =
This module actually include many modules: ngx_tcp_module, ngx_tcp_core_module, ngx_tcp_upstream_module, ngx_tcp_proxy_module, ngx_tcp_websocket_module, ngx_tcp_ssl_module, ngx_tcp_upstream_ip_hash_module. All these modules work together to support TCP proxy with Nginx. I also added other features: ip_hash, upstream server health check, status monitor.
The motivation of writing these modules is Nginx's high performance and robustness. At first, I developed this module just for general TCP proxy. And now, this module is frequently used in websocket reverse proxying.
Note, You can't use the same listening port with HTTP modules.
= Directives =
== ngx_tcp_moodule ==
=== tcp ===
'''syntax:''' ''tcp {...}''
'''default:''' ''none''
'''context:''' ''main''
'''description:''' All the tcp related directives are contained in the tcp block.
'''ngx_tcp_core_moodule'''
=== server ===
'''syntax:''' ''server {...}''
'''default:''' ''none''
'''context:''' ''tcp''
'''description:''' All the specific server directives are contained in the server block.
=== listen ===
'''syntax:''' ''listen address:port [ bind | ssl | default]''
'''default:''' ''none''
'''context:''' ''server''
'''description:''' The same as [http://wiki.nginx.org/NginxMailCoreModule#listen listen]. The parameter of default means the default server if you have several server blocks with the same port.
=== access_log ===
'''syntax:''' ''access_log path [buffer=size] | off''
'''default:''' ''access_log logs/tcp_access.log''
'''context:''' ''tcp, server''
'''description:''' Set the access.log. Each record's format is like this:
<pre>
log_time worker_process_pid client_ip host_ip accept_time upstream_ip bytes_read bytes_write
2011/08/02 06:19:07 [5972] 127.0.0.1 0.0.0.0:1982 2011/08/02 06:18:19 172.19.0.129:80 80 236305
</pre>
* ''log_time'': The current time when writing this log. The log action is called when the proxy session is closed.
* ''worker_process_pid'': the pid of worker process
* ''client_ip'': the client ip
* ''host_ip'': the server ip and port
* ''accept_time'': the time when the server accepts client's connection
* ''upstream_ip'': the upstream server's ip
* ''bytes_read'': the bytes read from client
* ''bytes_write'': the bytes written to client
=== allow ===
'''syntax:''' ''allow [ address | CIDR | all ]''
'''default:''' ''none''
'''context:''' ''server''
'''description:''' Directive grants access for the network or addresses indicated.
=== deny ===
'''syntax:''' ''deny [ address | CIDR | all ]''
'''default:''' ''none''
'''context:''' ''server''
'''description:''' Directive grants access for the network or addresses indicated.
=== so_keepalive ===
'''syntax:''' ''so_keepalive on|off''
'''default:''' ''off''
'''context:''' ''main, server''
'''description:''' The same as [http://wiki.nginx.org/NginxMailCoreModule#so_keepalive so_keepalive].
=== tcp_nodelay ===
'''syntax:''' ''tcp_nodelay on|off''
'''default:''' ''on''
'''context:''' ''main, server''
'''description:''' The same as [http://wiki.nginx.org/NginxHttpCoreModule#tcp_nodelay tcp_nodelay].
=== timeout ===
'''syntax:''' ''timeout milliseconds''
'''default:''' ''60000''
'''context:''' ''main, server''
'''description:''' set the timeout value with clients.
=== server_name ===
'''syntax:''' ''server_name name''
'''default:''' ''The name of the host, obtained through gethostname()''
'''context:''' ''tcp, server''
'''description:''' The same as [http://wiki.nginx.org/NginxMailCoreModule#server_name server_name]. You can specify several server name in different server block with the same port. They can be used in websocket module.
=== resolver ===
'''syntax:''' ''resolver address''
'''default:''' ''none''
'''context:''' ''tcp, server''
'''description:''' DNS server
=== resolver_timeout ===
'''syntax:''' ''resolver_timeout time''
'''default:''' ''30s''
'''context:''' ''tcp, server''
'''description:''' Resolver timeout in seconds.
== ngx_tcp_upstream_module ==
=== upstream ===
'''syntax:''' ''upstream {...}''
'''default:''' ''none''
'''context:''' ''tcp''
'''description:''' All the upstream directives are contained in this block. The upstream server will be dispatched with round robin by default.
=== server ===
'''syntax:''' ''server name [parameters]''
'''default:''' ''none''
'''context:''' ''upstream''
'''description:''' Most of the parameters are the same as [http://wiki.nginx.org/NginxHttpUpstreamModule#server server]. Default port is 80.
=== check ===
'''syntax:''' ''check interval=milliseconds [fall=count] [rise=count] [timeout=milliseconds] [type=tcp|ssl_hello|smtp|mysql|pop3|imap]''
'''default:''' ''none, if parameters omitted, default parameters are interval=30000 fall=5 rise=2 timeout=1000''
'''context:''' ''upstream''
'''description:''' Add the health check for the upstream servers. At present, the check method is a simple tcp connect.
The parameters' meanings are:
* ''interval'': the check request's interval time.
* ''fall''(fall_count): After fall_count check failures, the server is marked down.
* ''rise''(rise_count): After rise_count check success, the server is marked up.
* ''timeout'': the check request's timeout.
* ''type'': the check protocol type:
# ''tcp'' is a simple tcp socket connect and peek one byte.
# ''ssl_hello'' sends a client ssl hello packet and receives the server ssl hello packet.
# ''http'' sends a http request packet, receives and parses the http response to diagnose if the upstream server is alive.
# ''smtp'' sends a smtp request packet, receives and parses the smtp response to diagnose if the upstream server is alive. The response begins with '2' should be an OK response.
# ''mysql'' connects to the mysql server, receives the greeting response to diagnose if the upstream server is alive.
# ''pop3'' receives and parses the pop3 response to diagnose if the upstream server is alive. The response begins with '+' should be an OK response.
# ''imap'' connects to the imap server, receives the greeting response to diagnose if the upstream server is alive.
=== check_http_send ===
'''syntax:''' ''check_http_send http_packet''
'''default:''' ''"GET / HTTP/1.0\r\n\r\n"''
'''context:''' ''upstream''
'''description:''' If you set the check type is http, then the check function will sends this http packet to check the upstream server.
=== check_http_expect_alive ===
'''syntax:''' ''check_http_expect_alive [ http_2xx | http_3xx | http_4xx | http_5xx ]''
'''default:''' ''http_2xx | http_3xx''
'''context:''' ''upstream''
'''description:''' These status codes indicate the upstream server's http response is OK, the backend is alive.
=== check_smtp_send ===
'''syntax:''' ''check_smtp_send smtp_packet''
'''default:''' ''"HELO smtp.localdomain\r\n"''
'''context:''' ''upstream''
'''description:''' If you set the check type is smtp, then the check function will sends this smtp packet to check the upstream server.
=== check_smtp_expect_alive ===
'''syntax:''' ''check_smtp_expect_alive [smtp_2xx | smtp_3xx | smtp_4xx | smtp_5xx]''
'''default:''' ''smtp_2xx''
'''context:''' ''upstream''
'''description:''' These status codes indicate the upstream server's smtp response is OK, the backend is alive.
=== check_shm_size ===
'''syntax:''' ''check_shm_size size''
'''default:''' ''(number_of_checked_upstream_blocks + 1) * pagesize''
'''context:''' ''tcp''
'''description:''' If you store hundreds of servers in one upstream block, the shared memory for health check may be not enough, you can enlarged it by this directive.
=== tcp_check_status ===
'''syntax:''' ''tcp_check_status''
'''default:''' ''none''
'''context:''' ''location''
'''description:''' Display the health checking servers' status by HTTP. This directive is set in the http block.
The table field meanings are:
* ''Index'': The server index in the check table
* ''Name'' : The upstream server name
* ''Status'': The marked status of the server.
* ''Busyness'': The number of connections which are connecting to the server.
* ''Rise counts'': Count the successful checking
* ''Fall counts'': Count the unsuccessful checking
* ''Access counts'': Count the times accessing to this server
* ''Check type'': The type of the check packet
'''ngx_tcp_upstream_busyness_module'''
=== busyness ===
'''syntax:''' ''busyness''
'''default:''' ''none''
'''context:''' ''upstream''
'''description:''' the upstream server will be dispatched by backend servers' busyness.
'''ngx_tcp_upstream_ip_hash_module'''
=== ip_hash ===
'''syntax:''' ''ip_hash''
'''default:''' ''none''
'''context:''' ''upstream''
'''description:''' the upstream server will be dispatched by ip_hash.
== ngx_tcp_proxy_module ==
=== proxy_pass ===
'''syntax:''' ''proxy_pass host:port''
'''default:''' ''none''
'''context:''' ''server''
'''description:''' proxy the request to the backend server. Default port is 80.
=== proxy_buffer ===
'''syntax:''' ''proxy_buffer size''
'''default:''' ''4k''
'''context:''' ''tcp, server''
'''description:''' set the size of proxy buffer.
=== proxy_connect_timeout ===
'''syntax:''' ''proxy_connect_timeout miliseconds''
'''default:''' ''60000''
'''context:''' ''tcp, server''
'''description:''' set the timeout value of connection to backends.
=== proxy_read_timeout ===
'''syntax:''' ''proxy_read_timeout miliseconds''
'''default:''' ''60000''
'''context:''' ''tcp, server''
'''description:''' set the timeout value of reading from backends.
=== proxy_send_timeout ===
'''syntax:''' ''proxy_send_timeout miliseconds''
'''default:''' ''60000''
'''context:''' ''tcp, server''
'''description:''' set the timeout value of sending to backends.
== ngx_tcp_websocket_module ==
=== websocket_pass ===
'''syntax:''' ''websocket_pass [path] host:port''
'''default:''' ''none''
'''context:''' ''server''
'''description:''' proxy the websocket request to the backend server. Default port is 80. You can specify several different paths in the same server block.
=== websocket_buffer ===
'''syntax:''' ''websocket_buffer size''
'''default:''' ''4k''
'''context:''' ''tcp, server''
'''description:''' set the size of proxy buffer.
=== websocket_connect_timeout ===
'''syntax:''' ''websocket_connect_timeout miliseconds''
'''default:''' ''60000''
'''context:''' ''tcp, server''
'''description:''' set the timeout value of connection to backends.
=== websocket_read_timeout ===
'''syntax:''' ''websocket_read_timeout miliseconds''
'''default:''' ''60000''
'''context:''' ''tcp, server''
'''description:''' set the timeout value of reading from backends. Your timeout will be the minimum of this and the *timeout* parameter, so if you want a long timeout for your websockets, make sure to set both paramaters.
=== websocket_send_timeout ===
'''syntax:''' ''websocket_send_timeout miliseconds''
'''default:''' ''60000''
'''context:''' ''tcp, server''
'''description:''' set the timeout value of sending to backends.
== ngx_tcp_ssl_module ==
The default config file includes this ngx_tcp_ssl_module. If you want to just compile nginx without ngx_tcp_ssl_module, copy the ngx_tcp_proxy_module/config_without_ssl to ngx_tcp_proxy_module/config, reconfigrure and compile nginx.
=== ssl ===
'''syntax:''' ''ssl [on|off] ''
'''default:''' ''ssl off''
'''context:''' ''tcp, server''
Enables SSL for a server.
=== ssl_certificate ===
'''syntax:''' ''ssl_certificate file''
'''default:''' ''ssl_certificate cert.pem''
'''context:''' ''tcp, server''
This directive specifies the file containing the certificate, in PEM format. This file can contain also other certificates and the server private key.
=== ssl_certificate_key ===
'''syntax:''' ''ssl_certificate_key file''
'''default:''' ''ssl_certificate_key cert.pem''
'''context:''' ''tcp, server''
This directive specifies the file containing the private key, in PEM format.
=== ssl_client_certificate ===
'''syntax:''' ''ssl_client_certificate file''
'''default:''' ''none''
'''context:''' ''tcp, server''
This directive specifies the file containing the CA (root) certificate, in PEM format, that is used for validating client certificates.
=== ssl_dhparam ===
'''syntax:''' ''ssl_dhparam file''
'''default:''' ''none''
'''context:''' ''tcp, server''
This directive specifies a file containing Diffie-Hellman key agreement protocol cryptographic parameters, in PEM format, utilized for exchanging session keys between server and client.
=== ssl_ciphers ===
'''syntax:''' ''ssl_ciphers openssl_cipherlist_spec''
'''default:''' ''ssl_ciphers HIGH:!aNULL:!MD5''
'''context:''' ''tcp, server''
This directive describes the list of cipher suites the server supports for establishing a secure connection. Cipher suites are specified in the [http://openssl.org/docs/apps/ciphers.html OpenSSL] cipherlist format, for example:
<geshi lang="nginx">
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv2:+EXP;
</geshi>
The complete cipherlist supported by the currently installed version of OpenSSL in your platform can be obtained by issuing the command:
<pre>
openssl ciphers
</pre>
=== ssl_crl ===
'''syntax:''' ''ssl_crl file''
'''default:''' ''none''
'''context:''' ''tcp, server''
This directive specifies the filename of a Certificate Revocation List, in PEM format, which is used to check the revocation status of certificates.
=== ssl_prefer_server_ciphers ===
'''syntax:''' ''ssl_prefer_server_ciphers [on|off] ''
'''default:''' ''ssl_prefer_server_ciphers off''
'''context:''' ''tcp, server''
The server requires that the cipher suite list for protocols SSLv3 and TLSv1 are to be preferred over the client supported cipher suite list.
=== ssl_protocols ===
'''syntax:''' ''ssl_protocols [SSLv2] [SSLv3] [TLSv1] [TLSv1.1] [TLSv1.2]''
'''default:''' ''ssl_protocols SSLv3 TLSv1 TLSv1.1 TLSv1.2''
'''context:''' ''tcp, server''
This directive enables the protocol versions specified.
=== ssl_verify_client ===
'''syntax:''' ''ssl_verify_client on|off|optional''
'''default:''' ''ssl_verify_client off''
'''context:''' ''tcp, server''
This directive enables the verification of the client identity. Parameter 'optional' checks the client identity using its certificate in case it was made available to the server.
=== ssl_verify_depth ===
'''syntax:''' ''ssl_verify_depth number''
'''default:''' ''ssl_verify_depth 1''
'''context:''' ''tcp, server''
This directive sets how deep the server should go in the client provided certificate chain in order to verify the client identity.
=== ssl_session_cache ===
'''syntax:''' ''ssl_session_cache off|none|builtin:size and/or shared:name:size''
'''default:''' ''ssl_session_cache off''
'''context:''' ''tcp, server''
The directive sets the types and sizes of caches to store the SSL sessions.
The cache types are:
* off -- Hard off: nginx says explicitly to a client that sessions can not reused.
* none -- Soft off: nginx says to a client that session can be resued, but nginx actually never reuses them. This is workaround for some mail clients as ssl_session_cache may be used in mail proxy as well as in HTTP server.
* builtin -- the OpenSSL builtin cache, is used inside one worker process only. The cache size is assigned in the number of the sessions. Note: there appears to be a memory fragmentation issue using this method, please take that into consideration when using this. See "References" below.
* shared -- the cache is shared between all worker processes. The size of the cache is assigned in bytes: 1 MB cache can contain roughly 4000 sessions. Each shared cache must be given an arbitrary name. A shared cache with a given name can be used in several virtual hosts.
It's possible to use both types of cache &mdash; builtin and shared &mdash; simultaneously, for example:
<geshi lang="nginx">
ssl_session_cache builtin:1000 shared:SSL:10m;
</geshi>
Bear in mind however, that using only shared cache, i.e., without builtin, should be more effective.
=== ssl_session_timeout ===
'''syntax:''' ''ssl_session_timeout time''
'''default:''' ''ssl_session_timeout 5m''
'''context:''' ''tcp, server''
This directive defines the maximum time during which the client can re-use the previously negotiated cryptographic parameters of the secure session that is stored in the SSL cache.
= Compatibility =
* My test bed is 0.7.65+
= Notes =
The http_response_parse.rl and smtp_response_parse.rl are [http://www.complang.org/ragel/ ragel] scripts , you can edit the script and compile it like this:
<geshi lang="bash">
$ ragel -G2 http_response_parse.rl
$ ragel -G2 smtp_response_parse.rl
</geshi>
= TODO =
* refact this module, make it more extendable for adding third-party modules
* manipulate header like http module's proxy_set_header
* built-in variable support
* custom log format
* syslog support
* FTP/IRC proxying
= Known Issues =
* This module can't use the same listening port with the HTTP module.
= Changelogs =
== v0.2.0 ==
* add ssl proxy module
* add websocket proxy module
* add upstream busyness module
* add tcp access log module
== v0.19 ==
* add many check methods
== v0.1 ==
* first release
= Authors =
Weibin Yao(姚伟斌) ''yaoweibin at gmail dot com''
= Copyright & License =
This README template copy from [http://github.com/agentzh agentzh].
I borrowed a lot of code from upstream and mail module from the nginx 0.7.* core. This part of code is copyrighted by Igor Sysoev. And the health check part is borrowed the design of Jack Lindamood's healthcheck module [http://github.com/cep21/healthcheck_nginx_upstreams healthcheck_nginx_upstreams];
This module is licensed under the BSD license.
Copyright (C) 2013 by Weibin Yao <yaoweibin@gmail.com>.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@@ -0,0 +1,99 @@
// Copyright 2014 Isis Innovation Limited and the authors of InfiniTAM
#include <metal_stdlib>
#include "../../DeviceAgnostic/ITMSceneReconstructionEngine.h"
#include "../../DeviceAgnostic/ITMVisualisationEngine.h"
#include "ITMVisualisationEngine_Metal.h"
using namespace metal;
kernel void genericRaycastVH_device(DEVICEPTR(Vector4f) *pointsRay [[ buffer(0) ]],
const CONSTPTR(ITMVoxel) *voxelData [[ buffer(1) ]],
const CONSTPTR(typename ITMVoxelIndex::IndexData) *voxelIndex [[ buffer(2) ]],
const CONSTPTR(Vector2f) *minmaxdata [[ buffer(3) ]],
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(4) ]],
uint2 threadIdx [[ thread_position_in_threadgroup ]],
uint2 blockIdx [[ threadgroup_position_in_grid ]],
uint2 blockDim [[ threads_per_threadgroup ]])
{
int x = threadIdx.x + blockIdx.x * blockDim.x, y = threadIdx.y + blockIdx.y * blockDim.y;
if (x >= params->imgSize.x || y >= params->imgSize.y) return;
int locId = x + y * params->imgSize.x;
int locId2 = (int)floor((float)x / minmaximg_subsample) + (int)floor((float)y / minmaximg_subsample) * params->imgSize.x;
castRay<ITMVoxel, ITMVoxelIndex>(pointsRay[locId], x, y, voxelData, voxelIndex, params->invM, params->invProjParams,
params->voxelSizes.y, params->lightSource.w, minmaxdata[locId2]);
}
kernel void genericRaycastVGMissingPoints_device(DEVICEPTR(Vector4f) *forwardProjection [[ buffer(0) ]],
const CONSTPTR(int) *fwdProjMissingPoints [[ buffer(1) ]],
const CONSTPTR(ITMVoxel) *voxelData [[ buffer(2) ]],
const CONSTPTR(typename ITMVoxelIndex::IndexData) *voxelIndex [[ buffer(3) ]],
const CONSTPTR(Vector2f) *minmaxdata [[ buffer(4) ]],
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(5) ]],
uint2 threadIdx [[ thread_position_in_threadgroup ]],
uint2 blockIdx [[ threadgroup_position_in_grid ]],
uint2 blockDim [[ threads_per_threadgroup ]])
{
int pointId = threadIdx.x + blockIdx.x * blockDim.x;
if (pointId >= params->imgSize.z) return;
int locId = fwdProjMissingPoints[pointId];
int y = locId / params->imgSize.x, x = locId - y * params->imgSize.x;
int locId2 = (int)floor((float)x / minmaximg_subsample) + (int)floor((float)y / minmaximg_subsample) * params->imgSize.x;
castRay<ITMVoxel, ITMVoxelIndex>(forwardProjection[locId], x, y, voxelData, voxelIndex, params->invM, params->invProjParams,
params->voxelSizes.y, params->lightSource.w, minmaxdata[locId2]);
}
kernel void renderICP_device(const CONSTPTR(Vector4f) *pointsRay [[ buffer(0) ]],
DEVICEPTR(Vector4f) *pointsMap [[ buffer(1) ]],
DEVICEPTR(Vector4f) *normalsMap [[ buffer(2) ]],
DEVICEPTR(Vector4u) *outRendering [[ buffer(3) ]],
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(4) ]],
uint2 threadIdx [[ thread_position_in_threadgroup ]],
uint2 blockIdx [[ threadgroup_position_in_grid ]],
uint2 blockDim [[ threads_per_threadgroup ]])
{
int x = threadIdx.x + blockIdx.x * blockDim.x, y = threadIdx.y + blockIdx.y * blockDim.y;
if (x >= params->imgSize.x || y >= params->imgSize.y) return;
processPixelICP<false>(outRendering, pointsMap, normalsMap, pointsRay, params->imgSize.xy, x, y, params->voxelSizes.x, TO_VECTOR3(params->lightSource));
}
kernel void renderForward_device(DEVICEPTR(Vector4u) *outRendering [[ buffer(0) ]],
const CONSTPTR(Vector4f) *pointsRay [[ buffer(1) ]],
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(2) ]],
uint2 threadIdx [[ thread_position_in_threadgroup ]],
uint2 blockIdx [[ threadgroup_position_in_grid ]],
uint2 blockDim [[ threads_per_threadgroup ]])
{
int x = threadIdx.x + blockIdx.x * blockDim.x, y = threadIdx.y + blockIdx.y * blockDim.y;
if (x >= params->imgSize.x || y >= params->imgSize.y) return;
processPixelForwardRender<false>(outRendering, pointsRay, params->imgSize.xy, x, y, params->voxelSizes.x, TO_VECTOR3(params->lightSource));
}
kernel void forwardProject_device(DEVICEPTR(Vector4f) *forwardProjection [[ buffer(0) ]],
const CONSTPTR(Vector4f) *pointsRay [[ buffer(1) ]],
const CONSTPTR(CreateICPMaps_Params) *params [[ buffer(2) ]],
uint2 threadIdx [[ thread_position_in_threadgroup ]],
uint2 blockIdx [[ threadgroup_position_in_grid ]],
uint2 blockDim [[ threads_per_threadgroup ]])
{
int x = (threadIdx.x + blockIdx.x * blockDim.x), y = (threadIdx.y + blockIdx.y * blockDim.y);
if (x >= params->imgSize.x || y >= params->imgSize.y) return;
int locId = x + y * params->imgSize.x;
Vector4f pixel = pointsRay[locId];
int locId_new = forwardProjectPixel(pixel * params->voxelSizes.x, params->M, params->projParams, params->imgSize.xy);
if (locId_new >= 0) forwardProjection[locId_new] = pixel;
}

View File

@@ -0,0 +1,16 @@
//
// Siesta.h
// Siesta
//
// Created by Paul on 2015/6/14.
// Copyright © 2015 Bust Out Solutions. MIT license.
//
#import <UIKit/UIKit.h>
//! Project version number for Siesta.
FOUNDATION_EXPORT double SiestaVersionNumber;
//! Project version string for Siesta.
FOUNDATION_EXPORT const unsigned char SiestaVersionString[];

31
samples/PHP/mail.phps Normal file
View File

@@ -0,0 +1,31 @@
<?php
/**
* This example shows sending a message using PHP's mail() function.
*/
require '../PHPMailerAutoload.php';
//Create a new PHPMailer instance
$mail = new PHPMailer;
//Set who the message is to be sent from
$mail->setFrom('from@example.com', 'First Last');
//Set an alternative reply-to address
$mail->addReplyTo('replyto@example.com', 'First Last');
//Set who the message is to be sent to
$mail->addAddress('whoto@example.com', 'John Doe');
//Set the subject line
$mail->Subject = 'PHPMailer mail() test';
//Read an HTML message body from an external file, convert referenced images to embedded,
//convert HTML into a basic plain-text alternative body
$mail->msgHTML(file_get_contents('contents.html'), dirname(__FILE__));
//Replace the plain text body with one created manually
$mail->AltBody = 'This is a plain-text message body';
//Attach an image file
$mail->addAttachment('images/phpmailer_mini.png');
//send the message, check for errors
if (!$mail->send()) {
echo "Mailer Error: " . $mail->ErrorInfo;
} else {
echo "Message sent!";
}

View File

@@ -0,0 +1,90 @@
create or replace package plsqlguide is
-- Author : Jared Petersen
-- Created : 9/22/2015 12:26:22 AM
-- Purpose : Basic PLSQL template/guide
/* Procedures */
procedure p_main;
end plsqlguide;
/
create or replace package body plsqlguide is
/* Main entry point (homepage) */
procedure p_main
is
begin
htp.prn('
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- The above 3 meta tags *must* come first in the head; any other head content must come *after* these tags -->
<title>PL/SQL Sample Application</title>
<!-- Bootstrap -->
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/css/bootstrap.min.css">
<!-- HTML5 shim and Respond.js for IE8 support of HTML5 elements and media queries -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<!-- Static navbar -->
<nav class="navbar navbar-default navbar-static-top">
<div class="container">
<div class="navbar-header">
<a class="navbar-brand" href="#">PL/SQL Sample Application</a>
</div>
</div>
</nav>
<div class="container">
<table class="table table-bordered">
<tr>
<th>#</th>
<th>Name</th>
<th>Description</th>
<th>Quantity</th>
<th>Price</th>
</tr>
');
-- Fill out the parts table
for row in (select * from parts) loop
htp.prn('
<tr>
<td>'||row.pid||'</td>
<td>'||row.name||'</td>
<td>'||row.description||'</td>
<td>'||row.quantity||'</td>
<td>'||row.price||'</td>
</tr>
');
end loop;
htp.prn('
</table>
</div> <!-- /container -->
<!-- jQuery (necessary for Bootstrap''s JavaScript plugins) -->
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.3/jquery.min.js"></script>
<!-- Include all compiled plugins (below), or include individual files as needed -->
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/js/bootstrap.min.js"></script>
</body>
</html>
');
end p_main;
begin
-- Initialization
null;
end plsqlguide;
/

10
samples/Perl/Sample.pod Normal file
View File

@@ -0,0 +1,10 @@
use strict;
use warnings;
package DZT::Sample;
sub return_arrayref_of_values_passed {
my $invocant = shift;
return \@_;
}
1;

View File

@@ -12,7 +12,6 @@ unless EVAL 'EVAL("1", :lang<perl5>)' {
die unless
EVAL(q/
package My::Hash;
use strict;
sub new {
my ($class, $ref) = @_;

24
samples/Pickle/data.pkl Normal file
View File

@@ -0,0 +1,24 @@
(dp0
S'a'
p1
(lp2
I1
aF2.0
aI3
ac__builtin__
complex
p3
(F4.0
F6.0
tp4
Rp5
asS'c'
p6
NsS'b'
p7
(S'string'
p8
VUnicode string
p9
tp10
s.

File diff suppressed because one or more lines are too long

36
samples/Pickle/random.pkl Normal file
View File

@@ -0,0 +1,36 @@
cnumpy.core.multiarray
_reconstruct
p0
(cnumpy
ndarray
p1
(I0
tp2
S'b'
p3
tp4
Rp5
(I1
(I100
tp6
cnumpy
dtype
p7
(S'f8'
p8
I0
I1
tp9
Rp10
(I3
S'<'
p11
NNNI-1
I-1
I0
tp12
bI00
S'\x1cc~\xc3\xa7r\xed?\xe5${\xec\xd6\xcd\xed?\x809-\x02%\xa9\xa2?F\x0f\x1d\xe8\xef\xa3\xdb?\xfe\xd1\x0c\xb7\x83\x13\xef?\xe0<o\xa1\xa9^\xdf?CE\x96\x88/o\xe2?<\xd8\xa1\x96\xa2T\xce?\x152\x8e\xe5\xa8\x7f\xe8?\xe4\xb7\x9a\xe0$\x0f\xdc?\x90\xe4\xe2\xd4=\xce\xc3?Ix\xe3P\xc4C\xe1?\x16\xd17\xc1Y\xfc\xed?5\xd7\xae@4\xfa\xe8?\x0f\x87\x8d>\xfcO\xe5?Y\x97\xcb"\xa7%\xe7?\x9b\x8d\x16\xda\x97\xe1\xeb?T\x14\xbd\xfe|\xf4\xd0?\x18\xdfH\xc56A\xba?\x90\xc5\xfb\xc63:\xe5?\xbf%\xad\xe5.\x86\xe9?\xc6\x0c\xa9\x8c\xd7\xd5\xe9?\xf8\xafc:\x84g\xd7?\xf8\x98\x879\x9a\x16\xee?\xba\xdf\x88\x8az\x06\xe2?~g-\xeb\xc8\xed\xee?\x08A\xcc\x8c\xe7>\xef?\xceD\xc4ar\n\xdc?\x92w\xbb\xa34\xb1\xd9?\x88\xb9\xc0{u\xa3\xdc?d\x1a\xad\xe8\xf3\x14\xdd?\x9c\x95\x13\x96o?\xe5?\x9cT[\xb8r\xa9\xe5?0\xf1\x01+(\x0f\xdf?W\xbdjqD&\xed?c\xcf1-W\xe6\xe1?\xce\xbc\xe1{zW\xd9?"d\xcf\xd7\x13\x93\xde?\xf2P\xf6\xc3\xd6\x87\xd5?\xc2\x0e\x92q\x89\xda\xd5?\xc0:B\x1bb\x00\x9e?Y\xafHmr\x80\xe3?\x1co\xa7\xba\xa5/\xe4?\xa2\xbc \x9c\xddB\xd0?\xd2L\x935\x17\'\xee?|\x8cM\xeb\x97=\xe8?\x0f0xN*V\xea?\x81p\xe3,!\xf2\xee?\xf5w\xed\x10\x9eu\xe0?\xc5\x16\\LR\xb5\xe1?\xbeh\x04\xa4g\xe5\xd6?\xea\xc0\xb9\xf0\xb2\xd8\xd9?\xac\x9c\xeep\x1a\xa9\xd8?@W9hp\x16\xb1?\xc4\xedS\xd6V\xa1\xed?\x93,!\xdc\xa1\x8b\xe9?\x80)\xb1\xa6[T\xc9?\xac\xbc\x8a\xd21\xdd\xc5?\x80\x9c.g\xf1\xf2\xc6?\tLu\xc3\xf7U\xe9?n\'\x9f?\xbe\xf9\xe9?\xa3\xe7K\x1c\xb3\xa9\xea?X\x98\x1a\xcb\xa0\xcd\xd3? \xb6O\x9c\x1bQ\xc2?"\x89[\xad1\x8e\xea?\xdd\x8f\xa0P\xc7\x0e\xe2?c\xa4j\xa3\r\xac\xef?\xba\xb6\x0f\x8emo\xef?\xe0\xed\xa0\xc5R9\xab?U\xf1\xcd\xcf\xbf\xcb\xea?\x89*#\x06\xb0|\xe8?d\xa3\xad\xcd\xe0]\xcc?\xb5\xe78\xa7w\x13\xe3?\xce\x99\x98\xefS%\xd7?\xb1\xf8\xd8\x8eI\x13\xef?\x91`]\x93\xd4 \xec?\xc0\rPz\xee\xbd\xe7?7\x92\xd4\x0fP\x8f\xe1?L\x0f\xaf\xa9\xc3\x19\xdd?\\}\x15X\x870\xc7? ~ t\xcat\xb1?@?\xec\x97u\x05\xe9?F\x8d:\xac4D\xdb?qY\xe1Qk|\xe2? \xaf\xeaj\xa5\x04\xab?J[\x1al;\x00\xd5?\x00^{n\xc2\xf1S?\xb0\x82dN\xda\xb5\xc7?\xe0 \x07\xe1?R\x92?\xc4\r\x08+\x99J\xe1?I|&U\x19\xc4\xe1?|*\xf9\xebq\x7f\xed?\xbc*\x93\x89k\xab\xe9?oiL\x90;\xe0\xef?\x96\xcd\x9b\xff\x18g\xdc?pt\xb4\xa5\x9c\xa2\xbc?Nu]w*\xb7\xd2?\x88k\xac\xd0\xfd\xbf\xd5?Q\x02$b\xfeH\xea?5\xf6\t\xb6K\x1a\xee?'
p13
tp14
b.

10
samples/Pickle/save.pkl Normal file
View File

@@ -0,0 +1,10 @@
(dp0
S'lion'
p1
S'yellow'
p2
sS'kitty'
p3
S'red'
p4
s.

30
samples/Pony/circle.pony Normal file
View File

@@ -0,0 +1,30 @@
use "collections"
class Circle
var _radius: F32
new create(radius': F32) =>
_radius = radius'
fun ref get_radius(): F32 =>
_radius
fun ref get_area(): F32 =>
F32.pi() * _radius.pow(2)
fun ref get_circumference(): F32 =>
2 * _radius * F32.pi()
actor Main
new create(env: Env) =>
for i in Range[F32](1.0, 101.0) do
let c = Circle(i)
var str =
"Radius: " + c.get_radius().string() + "\n" +
"Circumference: " + c.get_circumference().string() + "\n" +
"Area: " + c.get_area().string() + "\n"
env.out.print(str)
end

32
samples/Pony/counter.pony Normal file
View File

@@ -0,0 +1,32 @@
use "collections"
actor Counter
var _count: U32
new create() =>
_count = 0
be increment() =>
_count = _count + 1
be get_and_reset(main: Main) =>
main.display(_count)
_count = 0
actor Main
var _env: Env
new create(env: Env) =>
_env = env
var count: U32 = try env.args(1).u32() else 10 end
var counter = Counter
for i in Range[U32](0, count) do
counter.increment()
end
counter.get_and_reset(this)
be display(result: U32) =>
_env.out.print(result.string())

261
samples/Pony/gups-opt.pony Normal file
View File

@@ -0,0 +1,261 @@
use "options"
use "time"
use "collections"
class Config
var logtable: U64 = 20
var iterate: U64 = 10000
var logchunk: U64 = 10
var logactors: U64 = 2
fun ref apply(env: Env): Bool =>
var options = Options(env)
options
.add("logtable", "l", I64Argument)
.add("iterate", "i", I64Argument)
.add("chunk", "c", I64Argument)
.add("actors", "a", I64Argument)
for option in options do
match option
| ("table", var arg: I64) => logtable = arg.u64()
| ("iterate", var arg: I64) => iterate = arg.u64()
| ("chunk", var arg: I64) => logchunk = arg.u64()
| ("actors", var arg: I64) => logactors = arg.u64()
| let err: ParseError =>
err.report(env.out)
env.out.print(
"""
gups_opt [OPTIONS]
--table N log2 of the total table size. Defaults to 20.
--iterate N number of iterations. Defaults to 10000.
--chunk N log2 of the chunk size. Defaults to 10.
--actors N log2 of the actor count. Defaults to 2.
"""
)
return false
end
end
env.out.print(
"logtable: " + logtable.string() +
"\niterate: " + iterate.string() +
"\nlogchunk: " + logchunk.string() +
"\nlogactors: " + logactors.string()
)
true
actor Main
let _env: Env
let _config: Config = Config
var _updates: U64 = 0
var _confirm: U64 = 0
let _start: U64
var _actors: Array[Updater] val
new create(env: Env) =>
_env = env
if _config(env) then
let actor_count = 1 << _config.logactors
let loglocal = _config.logtable - _config.logactors
let chunk_size = 1 << _config.logchunk
let chunk_iterate = chunk_size * _config.iterate
_updates = chunk_iterate * actor_count
_confirm = actor_count
var updaters = recover Array[Updater](actor_count) end
for i in Range[U64](0, actor_count) do
updaters.push(Updater(this, actor_count, i, loglocal, chunk_size,
chunk_iterate * i))
end
_actors = consume updaters
_start = Time.nanos()
for a in _actors.values() do
a.start(_actors, _config.iterate)
end
else
_start = 0
_actors = recover Array[Updater] end
end
be done() =>
if (_confirm = _confirm - 1) == 1 then
for a in _actors.values() do
a.done()
end
end
be confirm() =>
_confirm = _confirm + 1
if _confirm == _actors.size() then
let elapsed = (Time.nanos() - _start).f64()
let gups = _updates.f64() / elapsed
_env.out.print(
"Time: " + (elapsed / 1e9).string() +
"\nGUPS: " + gups.string()
)
end
actor Updater
let _main: Main
let _index: U64
let _updaters: U64
let _chunk: U64
let _mask: U64
let _loglocal: U64
let _output: Array[Array[U64] iso]
let _reuse: List[Array[U64] iso] = List[Array[U64] iso]
var _others: (Array[Updater] val | None) = None
var _table: Array[U64]
var _rand: U64
new create(main:Main, updaters: U64, index: U64, loglocal: U64, chunk: U64,
seed: U64)
=>
_main = main
_index = index
_updaters = updaters
_chunk = chunk
_mask = updaters - 1
_loglocal = loglocal
_rand = PolyRand.seed(seed)
_output = _output.create(updaters)
let size = 1 << loglocal
_table = Array[U64].undefined(size)
var offset = index * size
try
for i in Range[U64](0, size) do
_table(i) = i + offset
end
end
be start(others: Array[Updater] val, iterate: U64) =>
_others = others
iteration(iterate)
be apply(iterate: U64) =>
iteration(iterate)
fun ref iteration(iterate: U64) =>
let chk = _chunk
for i in Range(0, _updaters) do
_output.push(
try
_reuse.pop()
else
recover Array[U64](chk) end
end
)
end
for i in Range(0, _chunk) do
var datum = _rand = PolyRand(_rand)
var updater = (datum >> _loglocal) and _mask
try
if updater == _index then
_table(i) = _table(i) xor datum
else
_output(updater).push(datum)
end
end
end
try
let to = _others as Array[Updater] val
repeat
let data = _output.pop()
if data.size() > 0 then
to(_output.size()).receive(consume data)
else
_reuse.push(consume data)
end
until _output.size() == 0 end
end
if iterate > 1 then
apply(iterate - 1)
else
_main.done()
end
be receive(data: Array[U64] iso) =>
try
for i in Range(0, data.size()) do
let datum = data(i)
var j = (datum >> _loglocal) and _mask
_table(j) = _table(j) xor datum
end
data.clear()
_reuse.push(consume data)
end
be done() =>
_main.confirm()
primitive PolyRand
fun apply(prev: U64): U64 =>
(prev << 1) xor if prev.i64() < 0 then _poly() else 0 end
fun seed(from: U64): U64 =>
var n = from % _period()
if n == 0 then
return 1
end
var m2 = Array[U64].undefined(64)
var temp = U64(1)
try
for i in Range(0, 64) do
m2(i) = temp
temp = this(temp)
temp = this(temp)
end
end
var i: U64 = 64 - n.clz()
var r = U64(2)
try
while i > 0 do
temp = 0
for j in Range(0, 64) do
if ((r >> j) and 1) != 0 then
temp = temp xor m2(j)
end
end
r = temp
i = i - 1
if ((n >> i) and 1) != 0 then
r = this(r)
end
end
end
r
fun _poly(): U64 => 7
fun _period(): U64 => 1317624576693539401

View File

@@ -0,0 +1,3 @@
actor Main
new create(env: Env) =>
env.out.print("Hello, world.")

View File

@@ -0,0 +1,188 @@
use "files"
use "options"
use "collections"
actor Worker
new mandelbrot(main: Main, x: U64, y: U64, width: U64, iterations: U64,
limit: F32, real: Array[F32] val, imaginary: Array[F32] val)
=>
var view: Array[U8] iso =
recover
Array[U8]((y - x) * (width >> 3))
end
let group_r = Array[F32].undefined(8)
let group_i = Array[F32].undefined(8)
var row = x
try
while row < y do
let prefetch_i = imaginary(row)
var col: U64 = 0
while col < width do
var j: U64 = 0
while j < 8 do
group_r.update(j, real(col + j))
group_i.update(j, prefetch_i)
j = j + 1
end
var bitmap: U8 = 0xFF
var n = iterations
repeat
var mask: U8 = 0x80
var k: U64 = 0
while k < 8 do
let r = group_r(k)
let i = group_i(k)
group_r.update(k, ((r * r) - (i * i)) + real(col + k))
group_i.update(k, (2.0 * r * i) + prefetch_i)
if ((r * r) + (i * i)) > limit then
bitmap = bitmap and not mask
end
mask = mask >> 1
k = k + 1
end
until (bitmap == 0) or ((n = n - 1) == 1) end
view.push(bitmap)
col = col + 8
end
row = row + 1
end
main.draw(x * (width >> 3), consume view)
end
actor Main
var iterations: U64 = 50
var limit: F32 = 4.0
var chunks: U64 = 16
var width: U64 = 16000
var actors: U64 = 0
var header: U64 = 0
var real: Array[F32] val = recover Array[F32] end
var imaginary: Array[F32] val = recover Array[F32] end
var outfile: (File | None) = None
new create(env: Env) =>
try
arguments(env)
let length = width
let recip_width = 2.0 / width.f32()
var r = recover Array[F32](length) end
var i = recover Array[F32](length) end
for j in Range(0, width) do
r.push((recip_width * j.f32()) - 1.5)
i.push((recip_width * j.f32()) - 1.0)
end
real = consume r
imaginary = consume i
spawn_actors()
create_outfile()
end
be draw(offset: U64, pixels: Array[U8] val) =>
match outfile
| var out: File =>
out.seek_start(header + offset)
out.write(pixels)
if (actors = actors - 1) == 1 then
out.dispose()
end
end
fun ref create_outfile() =>
match outfile
| var f: File =>
f.print("P4\n " + width.string() + " " + width.string() + "\n")
header = f.size()
f.set_length((width * (width >> 3)) + header)
end
fun ref spawn_actors() =>
actors = ((width + (chunks - 1)) / chunks)
var rest = width % chunks
if rest == 0 then rest = chunks end
var x: U64 = 0
var y: U64 = 0
for i in Range(0, actors - 1) do
x = i * chunks
y = x + chunks
Worker.mandelbrot(this, x, y, width, iterations, limit, real, imaginary)
end
Worker.mandelbrot(this, y, y + rest, width, iterations, limit, real,
imaginary)
fun ref arguments(env: Env) ? =>
let options = Options(env)
options
.add("iterations", "i", I64Argument)
.add("limit", "l", F64Argument)
.add("chunks", "c", I64Argument)
.add("width", "w", I64Argument)
.add("output", "o", StringArgument)
for option in options do
match option
| ("iterations", var arg: I64) => iterations = arg.u64()
| ("limit", var arg: F64) => limit = arg.f32()
| ("chunks", var arg: I64) => chunks = arg.u64()
| ("width", var arg: I64) => width = arg.u64()
| ("output", var arg: String) =>
outfile = try File(FilePath(env.root, arg)) end
| let err: ParseError => err.report(env.out) ; usage(env) ; error
end
end
fun tag usage(env: Env) =>
env.out.print(
"""
mandelbrot [OPTIONS]
The binary output can be converted to a BMP with the following command
(ImageMagick Tools required):
convert <output> JPEG:<output>.jpg
Available options:
--iterations, -i Maximum amount of iterations to be done for each pixel.
Defaults to 50.
--limit, -l Square of the limit that pixels need to exceed in order
to escape from the Mandelbrot set.
Defaults to 4.0.
--chunks, -c Maximum line count of chunks the image should be
divided into for divide & conquer processing.
Defaults to 16.
--width, -w Lateral length of the resulting mandelbrot image.
Defaults to 16000.
--output, -o File to write the output to.
"""
)

130
samples/Pony/mixed.pony Normal file
View File

@@ -0,0 +1,130 @@
use "collections"
actor Worker
var _env: Env
new create(env: Env) =>
_env = env
var a: U64 = 86028157
var b: U64 = 329545133
var result = factorize(a*b)
var correct =
try
(result.size() == 2) and
(result(0) == 86028157) and
(result(1) == 329545133)
else
false
end
fun ref factorize(bigint: U64) : Array[U64] =>
var factors = Array[U64](2)
if bigint <= 3 then
factors.push(bigint)
else
var d: U64 = 2
var i: U64 = 0
var n = bigint
while d < n do
if (n % d) == 0 then
i = i + 1
factors.push(d)
n = n / d
else
d = if d == 2 then 3 else (d + 2) end
end
end
factors.push(d)
end
factors
actor Ring
var _env: Env
var _size: U32
var _pass: U32
var _repetitions: U32
var _next: Ring
new create(env: Env, size: U32, pass: U32, repetitions: U32) =>
_env = env
_size = size
_pass = pass
_repetitions = repetitions
_next = spawn_ring(_env, _size, _pass)
run()
new neighbor(env: Env, next: Ring) =>
_env = env
_next = next
_size = 0
_pass = 0
_repetitions = 0
be apply(i: U32) =>
if i > 0 then
_next(i - 1)
else
run()
end
fun ref run() =>
if _repetitions > 0 then
_repetitions = _repetitions - 1
_next(_pass * _size)
Worker(_env)
end
fun tag spawn_ring(env: Env, size: U32, pass': U32) : Ring =>
var next: Ring = this
for i in Range[U32](0, size) do
next = Ring.neighbor(env, next)
end
next
actor Main
var _size: U32 = 50
var _count: U32 = 20
var _pass: U32 = 10000
var _repetitions: U32 = 5
var _env: Env
new create(env: Env) =>
_env = env
try
arguments()
start_benchmark()
else
usage()
end
fun ref arguments() ? =>
_count = _env.args(1).u32()
_size = _env.args(2).u32()
_pass = _env.args(3).u32()
_repetitions = _env.args(4).u32()
fun ref start_benchmark() =>
for i in Range[U32](0, _count) do
Ring(_env, _size, _pass, _repetitions)
end
fun ref usage() =>
_env.out.print(
"""
mixed OPTIONS
N number of actors in each ring"
N number of rings"
N number of messages to pass around each ring"
N number of times to repeat"
"""
)

1427
samples/Ren'Py/example.rpy Normal file

File diff suppressed because it is too large Load Diff

267
samples/Ruby/racc.rb Normal file
View File

@@ -0,0 +1,267 @@
#
# DO NOT MODIFY!!!!
# This file is automatically generated by Racc 1.4.7
# from Racc grammer file "".
#
require 'racc/parser.rb'
module RJSON
class Parser < Racc::Parser
require 'rjson/handler'
attr_reader :handler
def initialize tokenizer, handler = Handler.new
@tokenizer = tokenizer
@handler = handler
super()
end
def next_token
@tokenizer.next_token
end
def parse
do_parse
handler
end
##### State transition tables begin ###
racc_action_table = [
9, 33, 9, 11, 13, 16, 19, 22, 9, 7,
23, 1, 9, 11, 13, 16, 19, 29, 30, 7,
21, 1, 9, 11, 13, 16, 19, 31, nil, 7,
21, 1, 23, 7, nil, 1 ]
racc_action_check = [
6, 27, 33, 33, 33, 33, 33, 3, 31, 33,
6, 33, 29, 29, 29, 29, 29, 12, 22, 29,
12, 29, 2, 2, 2, 2, 2, 25, nil, 2,
2, 2, 25, 0, nil, 0 ]
racc_action_pointer = [
24, nil, 20, 7, nil, nil, -2, nil, nil, nil,
nil, nil, 10, nil, nil, nil, nil, nil, nil, nil,
nil, nil, 18, nil, nil, 20, nil, -7, nil, 10,
nil, 6, nil, 0, nil, nil, nil ]
racc_action_default = [
-27, -12, -21, -27, -1, -2, -27, -10, -15, -26,
-8, -22, -27, -23, -17, -16, -24, -20, -18, -25,
-19, -11, -27, -13, -3, -27, -6, -27, -9, -21,
37, -27, -4, -21, -14, -5, -7 ]
racc_goto_table = [
8, 26, 24, 27, 10, 3, 25, 5, 4, 12,
nil, nil, nil, nil, 28, nil, nil, nil, nil, nil,
nil, 32, nil, nil, nil, nil, 35, 34, 27, nil,
nil, 36 ]
racc_goto_check = [
9, 7, 5, 8, 11, 1, 6, 3, 2, 12,
nil, nil, nil, nil, 11, nil, nil, nil, nil, nil,
nil, 5, nil, nil, nil, nil, 7, 9, 8, nil,
nil, 9 ]
racc_goto_pointer = [
nil, 5, 8, 7, nil, -4, 0, -5, -3, -2,
nil, 2, 7, nil, nil ]
racc_goto_default = [
nil, nil, 14, 18, 6, nil, nil, nil, 20, nil,
2, nil, nil, 15, 17 ]
racc_reduce_table = [
0, 0, :racc_error,
1, 14, :_reduce_none,
1, 14, :_reduce_none,
2, 15, :_reduce_none,
3, 15, :_reduce_none,
3, 19, :_reduce_none,
1, 19, :_reduce_none,
3, 20, :_reduce_none,
2, 16, :_reduce_none,
3, 16, :_reduce_none,
1, 23, :_reduce_10,
1, 24, :_reduce_11,
1, 17, :_reduce_12,
1, 18, :_reduce_13,
3, 25, :_reduce_none,
1, 25, :_reduce_none,
1, 22, :_reduce_none,
1, 22, :_reduce_none,
1, 22, :_reduce_none,
1, 26, :_reduce_none,
1, 26, :_reduce_20,
0, 27, :_reduce_none,
1, 27, :_reduce_22,
1, 27, :_reduce_23,
1, 27, :_reduce_24,
1, 27, :_reduce_25,
1, 21, :_reduce_26 ]
racc_reduce_n = 27
racc_shift_n = 37
racc_token_table = {
false => 0,
:error => 1,
:STRING => 2,
:NUMBER => 3,
:TRUE => 4,
:FALSE => 5,
:NULL => 6,
"," => 7,
":" => 8,
"[" => 9,
"]" => 10,
"{" => 11,
"}" => 12 }
racc_nt_base = 13
racc_use_result_var = true
Racc_arg = [
racc_action_table,
racc_action_check,
racc_action_default,
racc_action_pointer,
racc_goto_table,
racc_goto_check,
racc_goto_default,
racc_goto_pointer,
racc_nt_base,
racc_reduce_table,
racc_token_table,
racc_shift_n,
racc_reduce_n,
racc_use_result_var ]
Racc_token_to_s_table = [
"$end",
"error",
"STRING",
"NUMBER",
"TRUE",
"FALSE",
"NULL",
"\",\"",
"\":\"",
"\"[\"",
"\"]\"",
"\"{\"",
"\"}\"",
"$start",
"document",
"object",
"array",
"start_object",
"end_object",
"pairs",
"pair",
"string",
"value",
"start_array",
"end_array",
"values",
"scalar",
"literal" ]
Racc_debug_parser = false
##### State transition tables end #####
# reduce 0 omitted
# reduce 1 omitted
# reduce 2 omitted
# reduce 3 omitted
# reduce 4 omitted
# reduce 5 omitted
# reduce 6 omitted
# reduce 7 omitted
# reduce 8 omitted
# reduce 9 omitted
def _reduce_10(val, _values, result)
@handler.start_array
result
end
def _reduce_11(val, _values, result)
@handler.end_array
result
end
def _reduce_12(val, _values, result)
@handler.start_object
result
end
def _reduce_13(val, _values, result)
@handler.end_object
result
end
# reduce 14 omitted
# reduce 15 omitted
# reduce 16 omitted
# reduce 17 omitted
# reduce 18 omitted
# reduce 19 omitted
def _reduce_20(val, _values, result)
@handler.scalar val[0]
result
end
# reduce 21 omitted
def _reduce_22(val, _values, result)
n = val[0]; result = n.count('.') > 0 ? n.to_f : n.to_i
result
end
def _reduce_23(val, _values, result)
result = true
result
end
def _reduce_24(val, _values, result)
result = false
result
end
def _reduce_25(val, _values, result)
result = nil
result
end
def _reduce_26(val, _values, result)
@handler.scalar val[0].gsub(/^"|"$/, '')
result
end
def _reduce_none(val, _values, result)
val[0]
end
end # class Parser
end # module RJSON

View File

@@ -0,0 +1,14 @@
data {
int<lower=0> N;
vector[N] incumbency_88;
vector[N] vote_86;
vector[N] vote_88;
}
parameters {
vector[3] beta;
real<lower=0> sigma;
}
model {
vote_88 ~ normal(beta[1] + beta[2] * vote_86
+ beta[3] * incumbency_88,sigma);
}

31
samples/Stan/dogs.stan Normal file
View File

@@ -0,0 +1,31 @@
data {
int<lower=0> n_dogs;
int<lower=0> n_trials;
int<lower=0,upper=1> y[n_dogs,n_trials];
}
parameters {
vector[3] beta;
}
transformed parameters {
matrix[n_dogs,n_trials] n_avoid;
matrix[n_dogs,n_trials] n_shock;
matrix[n_dogs,n_trials] p;
for (j in 1:n_dogs) {
n_avoid[j,1] <- 0;
n_shock[j,1] <- 0;
for (t in 2:n_trials) {
n_avoid[j,t] <- n_avoid[j,t-1] + 1 - y[j,t-1];
n_shock[j,t] <- n_shock[j,t-1] + y[j,t-1];
}
for (t in 1:n_trials)
p[j,t] <- beta[1] + beta[2] * n_avoid[j,t] + beta[3] * n_shock[j,t];
}
}
model {
beta ~ normal(0, 100);
for (i in 1:n_dogs) {
for (j in 1:n_trials)
y[i,j] ~ bernoulli_logit(p[i,j]);
}
}

26
samples/Stan/schools.stan Normal file
View File

@@ -0,0 +1,26 @@
data {
int<lower=0> N;
vector[N] y;
vector[N] sigma_y;
}
parameters {
vector[N] eta;
real mu_theta;
real<lower=0,upper=100> sigma_eta;
real xi;
}
transformed parameters {
real<lower=0> sigma_theta;
vector[N] theta;
theta <- mu_theta + xi * eta;
sigma_theta <- fabs(xi) / sigma_eta;
}
model {
mu_theta ~ normal(0, 100);
sigma_eta ~ inv_gamma(1, 1); //prior distribution can be changed to uniform
eta ~ normal(0, sigma_eta);
xi ~ normal(0, 5);
y ~ normal(theta,sigma_y);
}

View File

@@ -0,0 +1,27 @@
//@jsx: preserve
//@module: amd
//@filename: react.d.ts
declare module JSX {
interface Element { }
interface IntrinsicElements {
}
interface ElementAttributesProperty {
props;
}
}
interface Props {
foo: string;
}
//@filename: file.tsx
export class MyComponent {
render() {
}
props: { foo: string; }
}
<MyComponent foo="bar" />; // ok
<MyComponent foo={0} />; // should be an error

79
samples/UrWeb/iso8601.ur Normal file
View File

@@ -0,0 +1,79 @@
open Parse.String
val digit = satisfy isdigit
val decimal_of_len n =
ds <- count n digit;
return (List.foldl (fn d acc => 10*acc + ((ord d)-(ord #"0"))) 0 ds)
val date =
y <- decimal_of_len 4;
char' #"-";
m <- decimal_of_len 2;
char' #"-";
d <- decimal_of_len 2;
if m > 0 && m <= 12 then
return {Year=y, Month=(Datetime.intToMonth (m-1)), Day=d}
else
fail
(* We parse fractions of a second, but ignore them since Datetime
doesn't permit representing them. *)
val time =
h <- decimal_of_len 2;
char' #":";
m <- decimal_of_len 2;
s <- maybe (char' #":";
s <- decimal_of_len 2;
maybe' (char' #"."; skipWhile isdigit);
return s);
return {Hour=h, Minute=m, Second=Option.get 0 s}
val timezone_offset =
let val zulu = char' #"Z"; return 0
val digits = decimal_of_len 2
val sign = or (char' #"+"; return 1)
(char' #"-"; return (-1))
in
zulu `or` (s <- sign;
h <- digits;
m <- (maybe' (char' #":"); or digits (return 0));
return (s*(h*60+m)))
end
val datetime_with_tz =
d <- date; char' #"T"; t <- time;
tz <- timezone_offset;
return (d ++ t ++ {TZOffsetMinutes=tz})
val datetime =
d <- datetime_with_tz;
return (d -- #TZOffsetMinutes)
fun process v =
case parse (d <- datetime_with_tz; eof; return d) v of
Some r =>
let
val {Year=year,Month=month,Day=day,
Hour=hour,Minute=minute,Second=second} =
Datetime.addMinutes (r.TZOffsetMinutes) (r -- #TZOffsetMinutes)
fun pad x =
if x < 10 then "0" `strcat` show x else show x
in
<xml>{[pad hour]}:{[pad minute]}:{[pad second]} {[month]} {[day]}, {[year]}</xml>
end
| None => <xml>none</xml>
fun main () : transaction page =
input <- source "2012-01-01T01:10:42Z";
return <xml>
<body>
<label>
Enter an
<a href="https://en.wikipedia.org/wiki/ISO_8601">ISO 8601</a>
datetime here:
<ctextbox source={input} />
</label>
<ul><dyn signal={v <- signal input; return (process v)} /></ul>
</body>
</xml>

85
samples/UrWeb/parse.urs Normal file
View File

@@ -0,0 +1,85 @@
functor Make(Stream : sig type t end) : sig
con t :: Type -> Type
val mreturn : a ::: Type -> a -> t a
val mbind : a ::: Type -> b ::: Type ->
(t a) -> (a -> t b) -> (t b)
val monad_parse : monad t
val parse : a ::: Type -> t a -> Stream.t -> option a
(** Combinators *)
val fail : a ::: Type -> t a
val or : a ::: Type -> t a -> t a -> t a
val maybe : a ::: Type -> t a -> t (option a)
val maybe' : a ::: Type -> t a -> t unit
val many : a ::: Type -> t a -> t (list a)
val count : a ::: Type -> int -> t a -> t (list a)
val skipMany : a ::: Type -> t a -> t unit
val sepBy : a ::: Type -> s ::: Type -> t a -> t s -> t (list a)
end
structure String : sig
con t :: Type -> Type
val monad_parse : monad t
val parse : a ::: Type -> t a -> string -> option a
(** Combinators *)
val fail : a ::: Type -> t a
val or : a ::: Type -> t a -> t a -> t a
val maybe : a ::: Type -> t a -> t (option a)
val maybe' : a ::: Type -> t a -> t unit
val many : a ::: Type -> t a -> t (list a)
val count : a ::: Type -> int -> t a -> t (list a)
val skipMany : a ::: Type -> t a -> t unit
val sepBy : a ::: Type -> s ::: Type -> t a -> t s -> t (list a)
val eof : t unit
(* We provide alternative versions of some of these predicates
* that return t unit as a monadic syntactical convenience. *)
val string : string -> t string
val string' : string -> t unit
val stringCI : string -> t string
val stringCI' : string -> t unit
val char : char -> t char
val char' : char -> t unit
val take : int -> t (string*int)
val drop : int -> t unit
val satisfy : (char -> bool) -> t char
val skip : (char -> bool) -> t unit
val skipWhile : (char -> bool) -> t unit
val takeWhile : (char -> bool) -> t (string*int)
val takeWhile' : (char -> bool) -> t string (* conses *)
(* Well, "till" is the correct form; but "til" is in common enough
* usage that I'll prefer it for terseness. *)
val takeTil : (char -> bool) -> t (string*int)
val takeTil' : (char -> bool) -> t string (* conses *)
val takeRest : t string
(** Convenience functions *)
val skipSpace : t unit
val endOfLine : t unit
val unsigned_int_of_radix : int -> t int
(*
* val signed_int_of_radix : int -> t int
* val double : t float
*)
end
structure Blob : sig
con t :: Type -> Type
val monad_parse : monad t
val parse : a ::: Type -> t a -> blob -> option a
(** Combinators *)
val fail : a ::: Type -> t a
val or : a ::: Type -> t a -> t a -> t a
val maybe : a ::: Type -> t a -> t (option a)
val maybe' : a ::: Type -> t a -> t unit
val many : a ::: Type -> t a -> t (list a)
val count : a ::: Type -> int -> t a -> t (list a)
val skipMany : a ::: Type -> t a -> t unit
val sepBy : a ::: Type -> s ::: Type -> t a -> t s -> t (list a)
end

1053
samples/XML/sample.csl Normal file

File diff suppressed because it is too large Load Diff

213
samples/XML/water.tsx Normal file
View File

@@ -0,0 +1,213 @@
<?xml version="1.0" ?>
<tileset name="Sunny Beach">
<tile id="0">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAABYklEQVR42tVVq47DMBDsp/hHDxQcOFBwwMAgwMDEIKAgoCAgUqBhqH8l50126lHaq9TrQ7pKq8TOemd2dtfd7f7rz3TTbNpibpyNT/N7gUOxSHbKs+mLlW+vAwaQJ1B5ulQN+88kYkIJ3GhwkVwyFRABvKbEkKvfI0TOGYkha9nDe4AiqfqIHZUoznbTfURwYA1agn+rvAL2VRpuX8ym1Rw9QUpIiB/WLQilRZXfgXuVziro51hr69HxtPZUAlk3ui9ErZYNvvLeEplAU7MAx00tj3mVEhkMuWYdVeZGQTrtCZTDqz8IsAotleqCCDcUZ7aQUduSsanKjnVDe44yx9Qogdvj1k21m08KDiltqllAdnwX+xgvRzNqvLuaUQ4EDSCgB8oQtb4GxIBDfvxeOE9ApO7mUjltWl5HvQfiMy+kXhXBjGP8DhsF2rTYa6/kQKPFV7Sf3vinFLQX3Np0f43zA259mZw6IuSNAAAAAElFTkSuQmCC
</data>
</image>
</tile>
<tile id="1">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAAB/0lEQVR42tVVLYvDQBBduTJy5drIyMrY/oOTFScrKioqKioiwhFxcBWFVhQaU0JEOCIKPXFQGVmbvzI3M/vRlIM7ev2AKwwNk7Bv3nvzEiH+7S/dQG/dg2gVQZRpeBiuro+g8yPI+R7E8A3EQEC0jkDPNdf9gLcIvGtBr/Efr4k9lUoVxGUMYaJAZaaofzvgvAG9ahhUl1gfLYisADEpQE6ksWFrFIgWEcixhHAVXq+IXlhQKsuae3gtZoVnT/47QGKvZwEPxCqMBPcvUoQ9rh0wAr42BpjkTw+gp+j985IVoIOZ5VhAv+p7sF5p+kES+Gd+HYSkZYnnFjQ5GGDLmu/htRhtEBBruGRgqi6QnEkPqKxFdN/1SLEzaxjYSe2qwl5ltp2BP1szGJbCfSALeJCRjSKmgOQncAKL85j3g/qcELLFPuOG8eqdlq0zQN7xnodpQZVmmKBq2ALHjBgFCECH0y7wXmwjz5R6POQ8hLiKvQo/x622rEsbvcrETybvIJ4w+9PibMlUYsAJWE0VD+Ei6Qa5OBU8QG6TQPJnDQTpnv0PUwti2bLvC8OMpBdDYVJCqbhJHHdmT8TASK9Ty8hGjpmujN+8Ayj1N4+vfiHRCyjB5Ru88Oazv1l48hR7DLwIOYb3+xiMlp4d+U5yx3Xs4/ewjxLtQfcl89dzvgD75hkn04cPugAAAABJRU5ErkJggg==
</data>
</image>
</tile>
<tile id="2">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACjElEQVR42r1VIY/iYBTkZ6ysrURWYpFIJBKLRJJTVZsUQcKKTajZEAQhCBIqmrSCpAiSIhCfQFQgKk5UnHk3876W3Iq7bLjdJXkp/dow8+bNPFqtBz+d2EgSjaQs5pKfJtL6ro93rqR7EuluS+lFpSTpWMx5ItlxjPpCIs5bLm5UiLcrZVmI+BcB+ATgvlQ/F5LEI1UiP40/VxEnzMR5QYGAd6jEOxvx3ox0VoVk6L4s50ogO0CJSyCb9UC/a0GV/W74GBlnZkG1Xu316TmR7nQv3bQSY5bo1FcgksgxBnPhfaPERJZhX58X1+DjJJwtwLYNMEg8o0hmZ6Q9y2WwrmR5E1WABmSHSTqSqlwoENUgGZ5xLMYEksMbfC/HuP4OHBlxVgCd1qA/EgvMogJryB5VMkD14IN+Kiq3zh6ABNZRANiOw1dTFte5ktlH9fnZqvYeuJG6KYA5a1zD3JJKC3Ffclu4J3j/INJbGZFfSy0CslOmoqiV4YhYJEm/6LUmV1AZPLMmQ6YpsYKz42ndNa+hBW6/Gn3G+I0ZQ4BT8rIIABjc5eYZKz+OLQGSIvDRArM226ESfRdb7bZRIPzDfFuj8WunpQzOkL1EBAsrP+Uk0F1eyk4/YBQkRflz3RETu7Rudmnx7J95VxPG1hNubMGfZkgAZq8RDCG9bDR2/PEstbNVJVKbAhvPOVTx9ayJ5sfTQAIYjwupvXUhvRjOBwEXY1A5jTUTu+G8lYiqMcT96L6gtOv/2ZAuuu2gYRdjGR5FFWBH9+xj7kZ3QKCxJClrzD3eWXzeRuTy8a9wP/xAiRtD6UzZPRfOJagN94X/Cb24VBC6mPJmBxKwbv/Wf0Wu3PK2UKdT/kd/5zeaE2gm63UKTAAAAABJRU5ErkJggg==
</data>
</image>
</tile>
<tile id="3">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAABvklEQVR42tVVr2/CQBTmb0XWVlZW1lZWoicWMkECgqQIklY0acVEBeIEomICe3vfd33H0Y1kY0AykpfCcbzv17tjNvuvr7ZLbbmL7fJlbodjYZ8GbA65rQS42ie2f88Izs91IpU+jki5jW3f57YVkPUqssYU4oC838Qkge/bJpVK+LwbsCqFOnMoaDvKgWEtF2LZ6Ehul69zvmeJKzcDAxRqoRSK2yZjEUwInD4Wjgyc6TKCYX9Irto5IiD5K2DU8i2y5SaixVQjzQCkmQMsdOY0LEhuEMI6oNPYWvn99akWxuWYJxV0yDfywLBX1VWBM8NQuGEUAn4gJ7GhB/pxvXNxBYq/5oYnWJOEgKAR1kFovYp9HFCI5krCRZCQXDiY6kSvJGSvgTNN4Mh3thGodg2MEIW9aKBOADiMiarVAVEK9XQJZCbuXp/60TZjcj6x2ZFJSQJk1AmAaEzqhq7r6UCVI6kL639y7n3jiW3lNmJsdGokyH114qPi3IxD2/7lgro4FTt32zkw54qS4PrmrLStEw7mXe9+r7o5Dyczrd1waUQPvZL1uOpwVXsQyPycPO1PCTkPxwUHFUf51j6f/7okyrolgPYAAAAASUVORK5CYII=
</data>
</image>
</tile>
<tile id="4">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACUklEQVR42tWVr2/qUBTHJ5FIZC2yEomdRCKRtVUvVUsz8dLMMUHSiSXMEDJBGgRJESRFkBRB0oknEE8gJvgXzjufU+4eZsl7+5Vsyclt77m73+/5nu8pFxff9a991xZ/5EvzuinJYiBfBty6aUk060k4uZR4NZDhNpDw8VIg1LnvfB4R79aT1nXLgCAQzfoSKInurCvZLpJ40RfOuPi4in+2hACgcdUQb+SJf+NJ8NCV6TqU6aYOiBj4VVP8e19Qiva8GXj/K5FqG8vhkEq+CMSf1JdWu0TSdWCgibYA2WlB9NizM5BwZFjJ/5ciABPZvAbNl6GUm0jKdWQEqB5iUyWRK4lcc7G2BJB42TfAYhtJtgqMAHewWqhqrwJXu1gKBTs+p8Jz9RQreCCNHw3ZPyUmZ61AnYNENO9Ze9gnkuXAgMcajhSEjMht21b+hyLOKh5KNhtYlUhZKnueAeAC5C/1mYsgFDzUcpODGN4ghyJjzac6kuYZVSOcdF+IEJxJ533DQQ2K+6uAVlWoqYpVKPv90AjQb1b2Dkr0qH6AHOfoP7nOqG3nbX9XFzFVBVAjcW3AtHddI8UUQfDVVlDtcBOo+YZy+J1KpqydGpBAapTIF6HKWVdDDpKstJB9fMQ7gOcj/M9m5EKMZ6tWx4UHlFFVCq2MtplSJz84tx+fx0ac1tIiqseI7xrHUo3HVBRKyPmhPKkCCT5MvBeniWG/Ui8dj+OP+yBR6UvVSsiZ06ZCVwdsiun7p32SM519KnUfJ6SutsnJtPHX/ShhxPN+v/WeP+Pal6x1OIpVAAAAAElFTkSuQmCC
</data>
</image>
</tile>
<tile id="5">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACTElEQVR42tVUr2vrUBSenKyMjI28MjI2MrIyMjYyaoSJEepSUWhEoc+UMlFKRaAVhUwUOvFgExMVT0Q80X/hvO876X2rGlvf9mCBQ27Opff7cb7bq6vv+qw3iRx/T6X9NUaV8t+Al7tUmk0q6zqRfBsr+H6Xy9Mj6mf+dUSennNZLmI5HEoFIoE9QNNZCCfG0jyk4g5dabap9j8N+PBSyB6Ay1UCkEwP9ye+vgmY1311Ib0PlRh7XuXBkUyOx+nlRFQJAPW9y4REWqhvtok6oT26AWem6K0xGhLjKOYPiWT3fYnrUMzAFTMz7ydCZeUukbYtNWDnituXUrrwjcUZOB34JlYXSJTquTcHmRC/MZVRN9h3bh1dvzljzpEHqTINVaFlVVvFtDutIx2JP/K0T0cIzL1s1ZfiRIzkvYGna+7x7dw5r0SyRaTAnB9LlbdjDRn3qJyq13WqB3ezLnBQLMkkgNWR3obmGePC3OMfgSSLUIH5e6veTEzXm0VKwoyMBvjVAYDzetFCrglkU22tVdvPg3YoJMahcQVQHBbOfAmrDsgC85wSmTBDT4JFoH3m4s3Us5h6hufc2jlGoXmA9UusUygMoCw8qck3nUNqOwhmGBOLoAQP4M67w5itIs2BHQfJdA4UmvJ+BeWrUMuH8ilGQmBabYvf3OP74uvIwLBI4PrmWtxRZ2ECNTy4d9v7m3AqdW96SL+nZLn3aX9I51fJqqKt/O4BlD0lcKov+0umMmt1+Ygc4BZwrcT+xeqPPgT0ht399kbexcB/AEhbiVW/ps4pAAAAAElFTkSuQmCC
</data>
</image>
</tile>
<tile id="6">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACOUlEQVR42tWVrW/bUBTF+2cEhhoaGpoGBgYamhoaWgOTFeaCSS6IlJEoKoiiAksNiJSASimIlIACgwKDAoOB0bv7u3bc7qPSlq2VFunpfeTl3XPPOffm4uJ//RyOiWy2kSyvA0lXI3m3wNVjJjsNvLuLpXxIpX7Km/19rCN5OyCbdSRlmclBg9wWoVRVrgzoehU1IBRQ/7Iv3Dvs/yGQU6ZkVz3mRjuDIAwYKcvUALAHhK2Vkbqeng8kXQWWLZmS8WGf2rBgCuDrl2kDBmaOqczXoXgTz34z34YGJF4MxR33xZ25vw+EbBnLm1A2q9AoJqPdNrZA0A24cOJLfDPsmGFNUL6f30UdEOfKsfPeh56tX3e10rdp9TR6j+gbdoHLh8wyZk/WnQnrvJ2nnSFhgzOAZLqOZ0ML3hs3IHofe89AkmIky0XQZMm4j1sNWxBKP1lyD0Bkyx66mfmtmVPvDj65kq4Dkw3qgyvPZIgmA3EnrjHnjB0DkRaBTPW9n+r6pdtNd505q5SBuso7OQBzW0RK/chkiq4HBpLAxpiOZDGy80ylcC8d8Re+ycGd113falpVmc0Aa8AkBgKtYYWHYAEgyENAHiY77kAzhuUeg+D+Z1/+qO5/1LuCGWWFMx6FKXQ1/6xPzSg22qGb7L2ZZ0DPLsfvqmLbdDuCYTACnMrPzlUKPABgfPBXfeBXvb8z3P7ZnFYxOhOUeqc/vGlLPpUrAKwnFDSstPPJu/0pYb76aWpGxYTnvvMN/STd514e0SYAAAAASUVORK5CYII=
</data>
</image>
</tile>
<tile id="7">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAABjklEQVR42tVVsWrDMBDNp+i7+gnZO3UqoUMoHQL1YKgHgbWYoKEUD4Z4CHhUN636FVV30knnkBYSnEADRwKS7r179+6yWv3Xj+itFzpEPXkhjb8vsArRsRicf1d7jNsBJyAhCyh+18Yfvq3f9wcksN6++vXzZjkiQhkvGhPBQHKZwAMwKAHg7ThlApvdRyYBal0PXJsisYwgAJAVOAFv9ddMhYfHp0i4t5cRoQcROJDYJYmHklxUBoMDcxUyAZVUg+jCm9H9TgQO8WIVQcXLFGU+6TFJjORUJEZndI7gTXyLisFvzcgoNjXY46ObmQpJyEgGKoMqITlXAXssbfYCEQeysztkWMoPcXTniLCxUqz3w5SrbEczr7JK4InsOaWQhGZTI2P8PW49YwkPPx0SIZkpCZxRxbxVdDd7gaq+yIzwQKVJgErfUmsakwnM5j9Uimr1xRMIPrgFxpHk206lTckrBE6uJ3A0dWcXXEhjUkQXf+TNqJl/tMG47UpWbLT4ipb2jn9KKnmhjvvi2jw/5gK11tLQbjMAAAAASUVORK5CYII=
</data>
</image>
</tile>
<tile id="8">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACOElEQVR42tWVr2/bQBzF+ycMBoYaFgaGBhoGGoYaGgZN0cCkDERKQaWGBBhEVUGkBVRywCQHRDqDAIOAAwUHCgxG3t73e7nVqbQf6tpKi/TV2U7k97n3np2Li//1Y6oxim2K2zyBe5jj3YTtcYqSwuW3DPVhouJ6vss447cDKe5T1PUUhiJf1yNYO6cDPN6kCiHfm/2Yk+n6asJhp7I7e5yr7TJeTK5NCTY5OTLF7SrRYx268mLhXm4x2AHxIzDcNxSb6KgYAZrHGw8jzlQTFRN32nDl1oMI5F8Ld+8MZPoU7W8d4gOQNdCby42cu1EAOW870/C6wDlGEwr6PDZD0F8Lb2p0c4NoZtBbcve5Q39NgIoOfAdGRyCxQEpX1HoKyU2dm/syEuBnIZ/FJk4IlF6vfFznwktzPqsavY1TF3pbH8Xw4DDY0w1GktTQOGSHcvMA4SPIFK5dzOBEHSD4WyvO7FuOdBctgHB8bdD5WODyzjKOBjFdESBxIqEzqayMaESoEXxX9HEMIkfvkoy4IVDaj+o3T4kKSw/ufSTdWakwceU7ITCJA4+tuhTTjf7OR5XUDYaMagwfWXhCilMsZ9b/sYwCsPDinSuun0qEJ+NDXrKcjT/fcF0RhhAiPhCQB66O1wmnu/6XF5Q6sD71hHGEmDrXJaJlrQ5FVwaXa6sdESBxYyAQhH21F5KWVRzJT/34UqLzudCOhJjaxX2zV3IoabR4KqxEIjHJvNufUpSfIGT3s5cL/wB3sgL2s65DmgAAAABJRU5ErkJggg==
</data>
</image>
</tile>
<tile id="9">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACYUlEQVR42tVVoW7jUBDsJwQGmhoaGpoWBhoGhgYGGp2igpN6IFICItUkqgKiKiBSDCo5IJIDLNkg4IECgwKDAwYlc7P77F7InapeW+kirTZ5D8zszOzL1dX/+vGTAulhguppjof1EF8HXDYIn4HBocGoBIpyyoqQnSas6POIOHEBZ8tKDIYkMMwBY+aoqjman3dIH8co8og10f5hwL3ZHv3vKYSA+2jg5TX8Y43BETp1/TwnkalWdpzgYTPUrsX79088y9BfZOgtUvT53VkX7Clc9rEB/F2FQSW9Zg5uYWiFAKaHTokI93GoROT+zcDuhpOynFWhwP2bVBXwdgbukndLA39TIUgaS4CZcI8V1anQ1HdWjfNUiYgtxtyiYDb2u5Hm5c/AW2On/JFp9QgsKjgLS8TjxN6KJGiFTO2faoxqZoF1zUyM2cMz1JKM4DK5EJEtETL7pD0rrVW/pWaoZNrLcjfGqhC3Zy0xISBEglOD4NzYPLAPSGAiRI7cEG5G+AINp5ARFaQyrqz2jpgok18o8gq2ugBeWvCOlJdQ6q2oYNQKb0VVaIe3rXB9qGlLDYdBlTUVYjKtKKAqUAEhpPko/7IlFqxQzwX81RrJA7PQ+9aGUraCBBU8b7TL9O6hQiRqPNEe/sbLvX0n2s14+xZwEifOrBKU37nJtAuozQTDSFBnYc/EmgEBAyoxJnjITbn0/p/W0dnZnMhGdDb1l5Zcp4ooEPB19NZU5MTpSUI8/7iXMGkVaTOh2yLg3bbwzGcGAtrhU4XPe5LbkLrxZWDtoyX1ZX9K8iIqCZl+9n7gXyTF4JLCnrXHAAAAAElFTkSuQmCC
</data>
</image>
</tile>
<tile id="10">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACQ0lEQVR42tVVoW7jQBDtp5gaGhqaGgYGGpoaBgaGBvbASSmpTgVRFWCpAZFiYMkGkRxgsKDAIGBBgMGRd29mbQdVV7W9ShdpNJvx23nvza6Tu7v/9ePtGvg7A/+hgfezwvcR5yR9MvAem1uw5u0ZFPXviIWIJAHJ/S1zaTX7PxqETx3CbXcT85VCpKmOmTnYk+jcI6itEsskgrxDVPRILeAXFl7RwWfNO3xSiCcE28Et3QmR/2AQ5hbhwSJi+CQLBhEhI9gxKCI4OIz7zn0U827ikO7iVyA+gSQ9oi0bPRp1HnM9z4G0BeZ8PiuJI2Zecy01iTOQFMzsMROMAcKSU5O7QzFvE7e9kkqzhHnBjSmbROKYEbGWGEeado5oxojqHlnvBCWCN66HiJH9ijs5Q7HgmWU6E7Fp13jeJgqoygWaekkQxbQOKOpnV2D5mwStcxoPbkVQwvqsc8SZdfWA05Je/XWD5rxEc1pifiGWz+NymBS/R+w9CRHgschwPGQwZq1CmtNKszQTkv76S9eCk6bW3uNln6IqFlo37YrrTNcV93WvfJ6nsJd7HPeZZtknXG8exbix69aa1UHtXHSclggU0pecQs+rYZ0qYVUvpmci3hgXx1EU492XUYjEmebBXSeTYePjIdVj00kNAhVXLyYRIk5CXdfLj7+OQizxvEvpxI3WkbmpjCK0vr85bSjE2s3X/SAJyeSaxOPY9UyHezIeUfUZx38VcnJOx8tVlSJgNd2Tb/tTknO2l41eVHmVP9rnD/NFCxuaQAv3AAAAAElFTkSuQmCC
</data>
</image>
</tile>
<tile id="11">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACIUlEQVR42tWVrW/jQBDFCwsPFi41NDQ0NQwMLCy9vyLgwIGCgIAAEwMD62RgEGCpAZEcUCkFAQYFhgYHDI5M35vZ9VWV7qT22koXabRrOTvvN1/ri4v/9efqk7gStr4Tt+3kc4VzWHGSqO7NdoO4XS9893HCEKRIVMDKXuLDaGuOZwCp4dk17wzi8k7cpjPnFcT2AwziG8vCbChH1CITbW+l+deMuHU3O3Zbi46iIdIYMGkvktSDLM4iiyP2u1GS4yRxC0C81/P16XUg4YBGRDGARACIGfU37FedgXgYdwvQAJsbrFvdGUDu/QQYZOXPwj51KriG4HcY6p3UiKqBHSaNNuW+GXSvZQnCKJOu7AH6IDzBS+sRLV/ty1g+y4jWqnhRT3Y2ROJq0DU6jupMo4aDGF1PxwTkBLD2armHYZ9QnA1a2JkwMZqhrS9t/mx8w2iFiFSoMoG49SBwkJwndUxxXZkNwnpAhSp8+TYWuVrjA/UAfx+32kdBaoozyqKfnc6j6FeFK3y6V92cLbf5fWdoiV/TjBHmPEE3pwBJykGyBj2AkqTsdFiKTLAfUvYI+iHZ2/+ZEb2kcO5L1clVdZLL2x9vH0cVwZgpTAGA/WRiAYSrLwXf8XnxIBIdrEnf7UKK0YgZZvx6FLn5KXJ9NlvuRbJ20rtg+Yj9AyDux4+7kimU4dJZEgAgX38hOxDNDqIZ+LSPEtO8uIdRePf2iJ8AkT7BKeWMTHEAAAAASUVORK5CYII=
</data>
</image>
</tile>
<tile id="12">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAAAhklEQVR42mNgGKpgztI5/5Ex3S2u7mxGwTR3CC6Lae4QYi1Gx1GZSWBMscVAJhjDDCTBYjAmOUTQfUysQ3BZTHTUEApqXA4hZDHBNEJqHKM7hFiLCTqEUBzj8impaYVg4kQ3gNggJuQQknMFqXFLyCF0LweoXiCRUQ7QBhBRDtAHYCkHyAIATRZdO8VgYzoAAAAASUVORK5CYII=
</data>
</image>
</tile>
<tile id="13">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACNElEQVR42tWVrY/bQBTEAwMNDU0NAw8eDQwMDDQNDIwKqqisBZbOINKZRJGBZRlY2gORHHCSD0TaggJD/xuvM2+zl6hqq7b3ITXSUxwr8fx2ZnYzGv2vr7DoJCqsxFsr44+VvJtwd1xJVS4k3HYSl71MT4OEpZWg7GSct28Hss/n0h6WUtULud/OpHtcyb5I5KYeJM6tRBmAil6CXSejzLweCFdrHpxwd1zLfjfXMXUipklk0gxyexwkSNtnJ4IMTtCN9AXRtF4UK+WKTbPUoQv7Yi79t42DgTM3DwN6YCRCH2JcTzCEGH8yzo2/AaEwRe7SqdznMxWvigVE6UQi9rRWAMJ5Z+LMukLu8I44psWgUGHq3Bh/MYCpnCu/etFOCtqntXs4P0PcC7eHlbMe96+d6fvPMkEZ2QWKx4jjtmQvepmgE4RSkPQMghld7xq/Wgox78u4jO+ymQrynjqSXeLQCOCG/bpR17SIEIwfBxUn1PQJbhwAB8gwx075ULn5EYRiPlPfdooQhMJ0oKM7APFOMI7rmPg70ywkAgRd4RkR1xBmNE2vMYWA/O254dtOi7nnKeQb72PwTrQA9DF5N3hfhTm564ZGA1ei2v55GZ8fmDsBLacCLfUeY2uPS9eJs0s+Pl4H2JLRoddOBLv2Zdvx0hPXfsJpR+DMTyFYXHyPW/XVDiQt5HnVVe12iIfguA64iAj5ZkcyxTSC7LxldbWJQhHy3f6U9Mw4bVDWta78X5/zHZ1ZvLB/eOxTAAAAAElFTkSuQmCC
</data>
</image>
</tile>
<tile id="14">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACSUlEQVR42tVVLW/jQBTsT1lquNAw1NDw4MHQwEKjKjqWgkgpqFSTgoCAAEs1iBSDSDaw5IACg4IFAYamrzNvN70rqO7UL+kijXbzIc+8efNeLi7+15fZdmLWwHIv5raWbyOOyl7MfSfR2p963zmJmgFn/3VClBDk0T2wASqnIuKNk0kxyKQEDqPERwBiPs/qvBZzU/tqaTmqNUUvNge2TuJdIIYAklsK2Pn7BHg/8bLWClmtuQ3kN506YAunhIkT+XEUmVQgoxCe7SjJIz7vAXxPEe5p8e9CTAESWG1Lpw9NylER48HxEp9fd2IhgrYnu1HSSvSkC2kQM21F3+93M+n7hXRNJg/FVLrj/G0h7K/ZgByVm2tUf1VLvPb9jYEIVdvQc62+8raTSFuB36QgnopIehCtugaxe1pJXV2CPJP6cCk9RPB8TRzSfE63ksEB2s/TIlQWbYgA7W8zauhiEvMOy9NB5Cdsn55EhtMdsFIHujZ7caI/i3ici6Mz7R+OmPy3iJc7e7/1ghRM/wHEaAExgRusPml81dqSxtvv3AJYyUM59WLKmZ4URDfebgWJuWQw0xo8JF5FBEERgmhJvAntKLwLtJ9tmZ98MAeQ9/1csa9mWvkr6/8aRgrIw/ghE+aXzwZFkJzLh6N4DiNd0Gy0fgJsOyi5Vt1kHxtHdYFCrvY+F2H+mRcVUPgpoAiK4fhlCCPFfN5CKoMja58PToi2IHd6TxlEWM8tyF3wdX9CIaRRWE5cSpyS80L6vn9DuBGdhSCY733OM8S85MLHyAKwAAAAAElFTkSuQmCC
</data>
</image>
</tile>
<tile id="15">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACUElEQVR42tWVr4/icBDF90/BViIrschKZGVtJbKyFsmKS8CsQJANgoSKJosgAdGkFYgKxFcgKlZUnHn33rTcj+Tukg27myzJZL5pE+Yzb958+/DwVX9eXsNb1xg8HjH4dsSnFfZPDYYbh+HWwc8ay17uMMgIsyk/DkSFgiswqYDgDPhFi2FGiJXDaNvAXzsD8/aNwbxb4Rhd0dABowIYXxg8T3iWAqN9izFhgopAVMdnttHcq0h9TlGeEqTfgbhhMEeXXwoEVa9C37kvFTSSvIGnsQjiqTSIN4GosOJ5E+Eli7DLYhwPU4QESF8JcaICWWtgAc+SXmMY8zwh2Chv4T/WnU96rwxuMP8bTVkkLBijuc4tu8vc4rifoq5SO6ccx5wR0Qth3SmhEajokBBDFpIqgtKzUa+Ktyw7RRQrgix/25qySPGyjyEAhXMzBiHy2ELvd9sIDZ9JifI0tWft6wLhpRvLWN5Q508svGZRjkHFBSejevtuU7xVr0T+FxD9uTpVluyCUtcah0DcefYT4gbcNHPLET0SUZGw6sx5U8WUkFkPrSljAMvS4t+jqLqOZUABlFViZ1OHEILZyRvb2IAnGoU2hF2PtRFLZ/m2mjYCSq8NeZMZ3WVm81dBU4dKuHpm8r/kEfQ+pAeStjNjzDxip/KFyX5g8XO3mt4998IfW0GgI9UwqJsqBJm6zpC2CUV/UfX3wbtdSCr2vA5RmwqJjUkQzi2QoAdQcZoy4Fg+7Eo2BWhS2wyubKS7Ad3FFBT4vI+SjNhcF2bU8I6OfwC3fgnHe4r96AAAAABJRU5ErkJggg==
</data>
</image>
</tile>
<tile id="16">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACP0lEQVR42tWVoW/qUBjF96dgkZWVtUjkZGVt5WQlFgnuzSAQhCCagGgCgqQIklYgKhAVExUTFc+cd853C1u2ZAvLtuTd5MulN4Tf+c53brm7+19XUSbY7mIs5yHCCvg1cH0eIyc43z+gOo0QPQMRgGHJOv6gkG0Wo6rGKA4J1mmEup7QgQTRGUgk4AQMKGDI5+F3OnLpNCe4Pk/MdlVxTKwEe6g7AQfg/ug++2WL/qL6uhBB1a06VcfFcWRVVSMbQfv8ByFBSUsgwTH3YN8a3NvU6O9r+KcW3qFBf3ODEIFVy1WE7SbCeuPmne/oRDlC8zQxmED3ZWc7HQiyBoNdi8FjbXuQNvAXFLFpzAmJ6K0KfGj1ljABzF6mfE0BF3B1Gl+t1zi8FUFnZ3vUsDiKsHQjCCQga01AwGd918voypwiHgurVx2PsVyErkvVwe2y30TQfgF1LkHrNL6OI6C9yoBcGAg4I3DBzumGHJAQ6z4lWHURsGDNJSTHu3v9Ou0G4q6zmkIb5sGc6pxomsm1W2/OLgnzj13nOptV1n1vRtiqctC34Hej6NJe12PbJUwiwr/AiHdeYi5OKHw2Z8LtBpzgrJ86cJ8C5EBPJRG3hFEgzV9BiznjmAJ05/WsAOrFo249ChDMV+gYOC9zoTPLVy54H4bvsxXDBStU0jVrBa92ZwJqBAPZrquXujKbP0v9rUv2D59eHLDuBZ0WNmsl3Cr9ZvDb5TP5dq3Slztu3XbJ/rU/pX7WBWyaW331d/4B+NMC1rjos6YAAAAASUVORK5CYII=
</data>
</image>
</tile>
<tile id="17">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAAB/klEQVR42tVVLU/DUBTlp9RWPln57CRykv/BD0AiEBMTFTUVFRUVT1Q02QRJJ0geAlGBmEBM1l7Oue9t3QKEAIOEJSdru6Tn4577dnHxXz+J85JUwGIlSd7LnxGn3VZSEKfNIEnpAxyuW6DxvyeEhGkL8gLk9SBmsxPjtkign7AXc04hhsTlIFkLQgjIHkd1m+ZeTMM0hjAKkjOFLt7/NJH5g4i9H5U4c0AH1FsxAMnoPtuMYiCAiSQQlCCdpPaTIKLxXxOSIdr5KDJ7BNaj2HonlgLg3BZbsTlEFENAOUGToAAKuVkF8iKKUUG9pvIx8RMI4Mg2ELARmTvRa5JbprAOCdh21BRMHZxrN5jCnpwduAPZMvaCz3hdHYkpjrbGPOzk6kU0UkZOzF9CAiTht30OYmbdKJe4T10YhfYAwnQzulBQdc0e7AUcp1BFkfl7QjhTvExFIHoWTIFnhEUnMvxmMIqT+FlEjmDpA+Gyn0axOHLu4tpGAR+OYob4OW8S0Wm6DiIOZYTzzEVxFMAk3DA5u169Xc0ypvKVMnI0PHA0mQpkOVaxjNFzLdEXjiAtYtylP0UXV/Kn54I6Zho8fBb+UDyeCdp8kt/2U+v3rtsQ+flOQo6CxC4Qq3tGftufJlD1it87kqvhICCph+n4/axcZ/83LOKOL0LpvvueV8nivBN16X1tAAAAAElFTkSuQmCC
</data>
</image>
</tile>
<tile id="18">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACQklEQVR42tVUr4vjQBTePyV25MjI2pGRKyPXVlZWHeXEQc1CKwoXU46IslQELiLQiEAiFhKxYkTFiBURJ2Lffe9NuqUcy+4t3YMrPObHy/u+731v6M3N//pruzkdyik9pDH1zyv6Z8TuuKQaxHU1I/u0EHI5NzPE/POEHIopWbukFiQ/sztybgUHsM+nIoLz7eMcMZP1asTSqV1Id+64Ets5PBnfLX1eHFnSwy6WvQRc+TBxVBHdOqJJM1DUEMgWEkIGATT8EBJxplu8CGJxYdmTeSQK85506Ujl9v1C+ONJNwjAbUd094QVAnTTy8yHX98pRm76TBRb5DmOOBORacaakZwjQt4AQ2cQUrnXhXBSui2JogLgCFNACM6mHASIhbAb8dETRQBmoVHnv2FCgxy7x2Fy1FcDTYCjcwjYW4kgqc9CAnQcpC0F25ZUaiksejgA68cuWH004Jz1XhyfIWbCOQCHEM3fh52/03uHPe53jvTWkt7ZC3KFu2DTes4LIYkXcRKiuBBgGvNUGfYF9hCnKggEuMlAkjovmDtMrIRKfK0QrWtPhHMADMHf+Hh1FPLRHlGMznChKIcwiFTrlnTqxYkLbDG7hRmHuDNJL/fsgNr4ZgST8fbt+x+jFMAiKb7H+rWWlUHFVjgjXbPN6XmMvAppMUb2l8R/CGHyk31fDhckMiKMQW/8/kUsr7mvudofkgCyIzxffiNrPw71rZb17EAt8Wl/yULCD/b0wLbjO3nrcV1dCLvBpPcHsf2jOL8BAQzkc/Yiwa4AAAAASUVORK5CYII=
</data>
</image>
</tile>
<tile id="19">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACN0lEQVR42tVUr4vjQBjdPyU2MjKyNjKyMjI2srKyticWeuJga1ZUlFIRaEShEYVUFFJREVExomLEidjv3vsmuestt+zt0j24wGPIJOT9+iYPD//rVZ/GsiszWS0SsdeZ/DNic5lKBeJqP5LmPFFyvT+MgPHnCdltM2maqdQg2eSpGDNDAiPZFJmK4PP6OAZGut6NuHdKd+Yy09gJR8a9KYRNukSmslomP58xlQ8Tk5Ru6ZSO6+NEoWQQ0H5/cmKYzGmiZHz/Vlx0FIkakcGplXcRE6t1Krsi1YjprkLcJOo7J9ltMq19UnFZK5LsgatIDAFhaSXctxLkRoKjfV0IFe+6PtUBpnwDAT0x4+3dVTfJWDtzwwgByQWEcJucRTE8AFhTiBlAzABCvEUt3rz6JeRlbMZMf4ud5CrkMNJ1k2e6zwr4LgXzwynIh3R+AlHZSrgwEkFAWFjxSyNe3oi/aMR7hoCt+YOQ7lzfTnvfO/cMErAQxjpINkbUKd3ZziW6jvJWBrlVQSHWaIv7A+IvDERYR/7N4fWp7zq9Pd9OzFhiOIpLAC4jkMRwPARx0rq9AdwmhUhAp18r8ee1I1XX2FvXfz+M/fmmY/aZwbXGzMlmtMdWBUWIm+CQMWKNfN1IcLAu6vydxC8vuoyvzmGMiOOz67hH8Ixuv8DtEqRriJh3PRduvdsPiY5CdMl+6dBHjwHIgsdaSSmA8BaV4tN+yXTlE+x32bllzG8N192F4Agp6eMO+LjjH9KwCMHNaVe4AAAAAElFTkSuQmCC
</data>
</image>
</tile>
<tile id="20">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACS0lEQVR42tWVrY/bQBDF708oDDQNLDQMDTQMDDQNDAyqogOVUlApB066kFNlUBAQKQaRHBDJBpEcELDggMGBBQELSqbvza7dkqq9T6mRVjMZW57fvHlxrq7+10/0tRSe/qqWflbLuzXu3ZTC8yEDAJpH97VCtPmbNY73jQyOTuLcaiNO3/tUSO9zIdGX0tcygiDPzeuBDCuR4aNIckLMnQyPIh83jfRvPET/3kjvNgDwAErjxuj1Zzeuq5nYx6XEBo2tkzEghoAYPYjElZX4e6NAcdZIcna6EsLye3Tt16QgVGT3BEXKairlYSrFbiL1aSbNw0JSNJ2Jbz42TtUYACIBHFcywHrGFsqssaY96ifXrUnXAoBo/RePsBmbc+oS0xNgu0kBsBRznisUr4/ggxQw6QUwZ5FJgwigMSJ9kmQEcgoTXRedObsY8l+Nj74ZH25Oc6H0rLUQxX4ixsxVCR5jFmrGFBMPDk5ssxBx33RqrQUFqFD/tvZeWQU16JnMeH9QldVvHmkBOGkbGzQrA5CCBV9Ye6eqWLuUbZ5qznsmlVeCKxv9CMbdI9/hHJDvCAZ1APfnVRznOjVXwmmZl/upfrfNUtVwAOBqFKxVC/c1UENVRG3qAHAOigBoAFXio/13M1JuNSNhQiPNK++VzqyoucsdIP2qOAAPvZJcvAoJlHnRz7Ez5TrtDMlIheowMUF4j9G1ecVYf7UXkkp98A9ujanmPczUK9vcK8UV8PqbvZKpRvueoCGpQFvj9O/2p6QK5JMg9/zZjX8CTEsEAZI+lIoAAAAASUVORK5CYII=
</data>
</image>
</tile>
<tile id="21">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAABfElEQVR42tVVoW7DQAw9WFhYGBoYGBg2RQVTpIJWChgoKCgYGBgYGNh33+7Zfo6jVZuqtJtW6ZSrk/g9Pz9fUvqvv+a4yt3zWlZb9r8H/JQyloC/bHK90z3jdwPuATYmXfuUh7cq91glPrxXEmtGXYjdvOL2vM7Vg17xHyBC6lH3iEENEMEziIPMoooJHsGkSiMEHwCUpHgPRBGnQlcpcglY9pYM++GjFhB6AUT6181U/W56nx6R575TREBLIoJLT01WVigKWGIkrLZJgU36ejt5wL1R7uEqhJHfMGbAsYdIgORkLjFUaNXENpCcE9wnrz7mxfN471CUi36CkrO5FnkL2/akgN5bEjCJmbCxieBe5I/KnVZfvMF8P7reWzJOQKiAxGKlVMKNauZ0HwRvXDd+RoDyM4bKOH4wJ/ZSrfUYvujCO7i/6ByIhoqOjj3mNOAqbTRv3O4kLJV2Z03uUzLqFMzGDa0oz971W+BngynDNjR/8VGSM2HB2f8JRvClN9IGbE8AAAAASUVORK5CYII=
</data>
</image>
</tile>
<tile id="22">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACO0lEQVR42tVVLW/CUBRFIpGVWGRlZW1lJRJZ26CqlmZiaeaKICmChBlCEM2CIAGxpAiSIkiKmKhAIBD8hbt77uN1H8m+P5ItuenbbXPPueect9Vq//XHvG6SOTBV8fnPgJu9JqHaU5vMsUnNiwaZQ5N0/9eAraFFxrUh1bo0yGYC1tQi9HFGT79H78eAHR6GzVqDFvljl8y+2hbbA6h+UVdKcK9x2SDjypBvG6wMyHwZuLyPqNiEdDgktJh7VGwjqbKMKkLugBXotSictylIXQHEu3q3TgYrohX6lCIARt3OPMqWngzO1wHlq4CiZYdOx4TPvpCL+XdUtg1psgvkDGLIBYgAGKogIwjrm4oUPCRb+gKAc7ELacEEAFzuQCqmDMD8Ln+izGTLxFiBZOWR0+c8MFBn6kghGyBgpzbVujW5LahnYcXg27QjWyZzfm4COWNDkOiwzABEH4QWc7+yI7vzheDplJC2zLuxyR1blKxZvZlbWYOs2DO7sg+EnNR5JAIwbIihZRkrAls1FL3ojuXnrUFOKwHgOG0zMV/6IAF75MyqhWwJVIKqL9V91QqRd5+wxLE88bEiw33YwGS0EgDRNmk1vIEt/cM+lsCisjMp1IfDqAfK87zdAcqI9J7YhuunCcp3sO9sleSGS7behN+7jtWtWAWSBwWmVBG7diqYCduhNy02yMbo5/4gAQRbCyADx1O3IoENAaotyr+z8btEztdVhytfg0BU5eTP/inB59NxJEHFVf7qnAcPbLZlMU2ZQwAAAABJRU5ErkJggg==
</data>
</image>
</tile>
<tile id="23">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACPUlEQVR42tVVLY/iUBSdn4KtrKzEVlYikVgkshKLZMUmjBmBaAiChIomVJC0okkRiCdGPIF4YkTFmrP33DclzSa7yczOTDKT3DwohPN1z5uHh+/6115SnMo5dtsp4nOHLwO2zytUAlydFzDXJeIGGBsgbByC3HwekVMxhzErtHWK42EGa9fiQIqkBiYXIM47RLlDdJGz+SBHCNQrrQTYPq/Vdk7bpDp0JCoc5r+8E5xIIomuHUb79v1ECEq1VEoibbPUMWapEXQvG0+GzlyWGJcdEnEiIYHS6fsgM0riTUQIzNntZzjlMxxzn3dVLhTI3dZKgOSGzpBQIvsQZhbx1qkr0cEikAlzOTMh8q8doZ0nASOA2itbfhQCPbC5ru7WVwNnnFsrkc5tMC46hGdRf/Az+tEi3BpMhFh89c6MtkLkscJA8Qq7bOpVcmp/UqGS4B7UqT4noeNhfo+D9pOgkpBo4jO0kvFNwPai+lEI7L0LwV7iYCQ/Wz9/Eul7Pdx2BZKTz6wQdaKa5Pg9RkPgPqaJtCHtPDCryTjYjqnzSxkUFqOn9k7gr1H0mVq70pPEPJlUSZDMMBp9LZ/HrKPYnEj+Y9ZRiESZ9YtI4MK8bRl7e/V8raKlM2r9TGOjU3w+swIs4KFYzZNkWMNA9mF0MP9fx3sryoWq9dV7dUWcSgrpv9isVZQ9UNWFV/6hd3+vmsD9cmpE4gprp9dyLaqz9vOu5L6uWkHeCbXfDV7JdODL/ilx+dxto0Pw9/7Ob7W/DzcYBXyyAAAAAElFTkSuQmCC
</data>
</image>
</tile>
<tile id="24">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACK0lEQVR42tVVr2/CQBTmT5hEYpGVSCwSOYmsPVnZTF3mOrGkCBIUIROEIEiKICmCpAiSTiAqJhAT/Rfevu/BwUZGlv2AZCSXu757fe/7cVcqlf/6C0dt8bqeDju9las1rj3UhMProfnAk3DS1nW9W5dGr3E5ICxeva/qqN9VJVr60p8ZiScdCWe3YnpN3XMA/5wxGQ4XRrxHT8xTS9nzebg0umYsgC2dbkNzCebm7ubnQIqNlXwVynYbSzL1wbIj/bkvASQnoLQIZTzzZTiFAjgDjHGOMey8o8/NUVOyZSDbl0i+1ZhjPPElRYMEMrNItgjA2BcLIHxOwJwKsGn2bCXZhMq8BOD8OZQUe+ncSFFESoQkcuSdbZyv8RKala8ogDWLJADAxmxSbCKVlXsZlUFh9R97jCfI57vZYgeYJJhHW1iD9TQOEJyP1wkFxqMdK/qYrQJdEzVlJHIWYpyAkqnRovnaKkMWL8tYnGU8D7SNYLivgNkYgAsHArkkwBpHBT6RjQmcGdtCAcrLAsxjYTZ2NvH5ABY55WtfZz0veP9U3bNWqLwvMVhEOo/hu1ODIMygeVCCcppBS1XgNc3U612czYrC6kj3tnyQ/qufk1fnE9lS3AbapkqBjVPJSc0Y7XRAuP+r63i4FQCUuWaYeS1dQ43PjkzzFc9G/+8+SGxyYP3Ob/V07zstcuAu9kl215UA9NotCcDuD214vT8lHkSedItDym/ET+u8AfUc39TwIyvtAAAAAElFTkSuQmCC
</data>
</image>
</tile>
<tile id="25">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACV0lEQVR42tWVr4/iUBDHV1YiK7HISmQtElmJrCUnLlWbZsWlwbHiEhAkrCEXREMQJEVsAoKkiE1YsQKBqFjBvzA3n+k+cifuR/Z2NzmSyXt9fW++P2Zeubr6X39p3pVgHNQxaMqHATdvm0IEEwWeBdK8btjcrb8bcHvSFn/gW7RufAnnobTnbWGdOWvuPWtvrrg1bon3yZPga60W9QB5155k9z2ZrnvSuGlIb9yWRMvTUGcg80/AALjR2W0ODFpGKJl1ZJhH8m3XN9D+vGNxeEilWMVSVSM57FM5PmV/TyS8CyXMw4tK5iRNV5EsFChVpZ28I/Gk3sN6pjHcxeJ/8c0NiHGmWPfleBxKpCWCxOHxN0ToZBI6cJIxYisgHEatA13sE/E+ewJhR/D8PJKO3op0GRlgqeqrk7rwmFqUu0S4PYwXYJI4qwHDYutqtRtrqSnkim1fFmorNcfuQpOwP5nr/CmVZNk1uzdbBVnXZDb3dWkYceL4QgJylT4fHn5wBMXUFQeC25YpQsF0HctIlWNtqQfZM1KFhZI7nDJzCqtxgXfltp4DCiF6wcjoHkb6Azd+WQqS+HqlUFGq7XVzdQ3IqWLPcBapaiWiBEtVzTpjsVK1eq46qeJjZrFRUij/yfo//WhArI/vaKDhxbbRqqdgsSzyntmKGsoCgUjLVe5riwFzpHDi1deRZMRiGRsZymKJdcSlukFTey5feoC9pVp9Pk/f7oMEiFNNw1JbAN2dR3GlTjly7/ZJBpCGcs1V7pTEPjObefdhf0o03/l5Ktk2th55bZ7vlm2RtXeU/5AAAAAASUVORK5CYII=
</data>
</image>
</tile>
<tile id="26">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACSElEQVR42tVVr4vjQBReWVkZGRsZGRlbWVkZOXY4FU4sYcUR1nVFIRWFrikloiwVhVQUuqLQioWuOBGxIuLE/gvfve+ls+yJ+8F1e3CFIdOZl/f9mDcvV1f/6y+89RGOQ9j7HqqnFP8M2L/zwRFOQoSzEP51V+du/WLA0SSCd+vpCG48xGWMbJUgLfsodkbX3D5jPwzYlj2kyz6CcYDOp44Cci1bD5CTwKKPh4PVNc69L57GmvtY185S7GwlqLObCpk43wn4rAczoRMDBZ+uDQqZH58yHfvHFPtdiuZliL8C5lyBOS8juL3eogWeP1pMNwZakBJHEq9NgeNzJkQSbDcWdT3E8ZChWhmJN/hlcTERrXVWcq1701VLOSco96g43yTIZ33UzVATM9773NZAthyg/ppjL8CMpxMkRTfq51yfb8CpBDtlBGMCrepTdSfjCFnZgpIA/89F9VzUzUXlgzyPYvFWriKVbgWsERdevxXqAI/COVELiam8Q3KN/B9u3jlC9SQRL2KEd4GScsqdxXrWUpBWzn66TtQtKq/WtlUnie2sLT4qbV4KtV7JSAyfWh9C5Lf33AHTQnuq/EpedjfCjOU4xD0jZNiMqJJWVyt7AhfFda6jWCZK8Afr/6ThENTdax6PI+WPfCSjqK0DGcYROKR6Y0ia75CIqj5kOKvzMRmPhiRYF60rbS/gvtaNtGV1TBzSYxyF6tTHdUJew1GgyQnqOl8sDed9S2YMYy/6LXjrDXRGWjIBO9edy34LfvpREjLn9P7vqg9kQ79shN4AAAAASUVORK5CYII=
</data>
</image>
</tile>
<tile id="27">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAACMUlEQVR42tVVrW/bQBTvnzBYGDpoaBgaaGhoaHrw0HTQKkuZByK5JKoCLCsgkg0iOSBSAiI5YMCgwKDAYKD07f3e+bwUbNW6dNIsPeWcD/8+73Jz879ezdlQvVNUrCJS+Yz+GXD3NKcDAx/2mtpvCfXPqb0/ah7zcUTqrSK9DqhhkHITU9el7ACvKyUk8HlzMjxaXq8G7JRCXfeUiu2Yx50FhCNtmwyOzKnII1nLsCvvBvYXPk3uJ1SwonQdMVhCejkTF9RqRi/fM0uGlbfnRMDgTj0Qy6qYo7FEQPKPgPU6FHCsvaVn1yufHKmkiiheTIUMnEk2Ib30mTjRczSuoEKunY+xNUz0l8B4sHc3EQAMVMZQzANAlQcUfvUpePBlrRgUD+371JaRCbhCGhZwGVu2jYWUxHS2cb0Cdsrihynd3t2St2DVXz6RqUJKlgGVUMM/KtnWYh1LHBgohM2OhI1AS0Evi+mcaB0J/m7H98UlEad+mk/Ju/8spAyr1Kw2Yrs7FOxoM3U5A7hmUvNVOD543KKsFOrFJZAZdovslLP5fRRuQAqOmDzkCSjdKy5XJPGUG1u+ciibcwPvF0zC7Q5MPZB6Zf1bl+tBynmiZI87tv+oButj2W5SMFYj1m/dYWSjAjmMqP6bAwq2YhAF8j2MYEbsRExSMLxf/VTaHLUU86pn/6j6ZLeVIwGFst+5J47cx/0JnaxSV67DHgTsDrjqEfzWhZz750yajiP4vc/5ASZt46J2q67KAAAAAElFTkSuQmCC
</data>
</image>
</tile>
<tile id="30">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAAAF0lEQVR42mNgGAWjYBSMglEwCkbBSAcACBAAAb475JcAAAAASUVORK5CYII=
</data>
</image>
</tile>
<tile id="31">
<image format="png">
<data encoding="base64">
iVBORw0KGgoAAAANSUhEUgAAACAAAAAQCAYAAAB3AH1ZAAAAF0lEQVR42mNgGAWjYBSMglEwCkbBSAcACBAAAb475JcAAAAASUVORK5CYII=
</data>
</image>
</tile>
</tileset>

View File

@@ -0,0 +1,55 @@
module sfc-lisp-impl {
yang-version 1;
namespace "urn:opendaylight:params:xml:ns:yang:controller:config:sfc-lisp:impl";
prefix "sfc-lisp-impl";
import config { prefix config; revision-date 2013-04-05; }
import rpc-context { prefix rpcx; revision-date 2013-06-17; }
import opendaylight-md-sal-binding { prefix mdsal; revision-date 2013-10-28; }
description
"This module contains the base YANG definitions for
sfc-lisp implementation.";
revision "2015-04-27" {
description
"Initial revision.";
}
// This is the definition of the service implementation as a module identity
identity sfc-lisp-impl {
base config:module-type;
// Specifies the prefix for generated java classes.
config:java-name-prefix SfcLisp;
}
// Augments the 'configuration' choice node under modules/module.
augment "/config:modules/config:module/config:configuration" {
case sfc-lisp-impl {
when "/config:modules/config:module/config:type = 'sfc-lisp-impl'";
//wires in the data-broker service
container data-broker {
uses config:service-ref {
refine type {
mandatory false;
config:required-identity mdsal:binding-async-data-broker;
}
}
}
container rpc-registry {
uses config:service-ref {
refine type {
mandatory true;
config:required-identity mdsal:binding-rpc-registry;
}
}
}
}
}
}

View File

@@ -5,18 +5,6 @@ set -ex
# Fetch all commits/refs needed to run our tests.
git fetch origin master:master v2.0.0:v2.0.0 test/attributes:test/attributes test/master:test/master
sudo apt-get update
script/vendor-deb libicu48 libicu-dev
if ruby -e 'exit RUBY_VERSION >= "2.0" && RUBY_VERSION < "2.1"'; then
# Workaround for https://bugs.ruby-lang.org/issues/8074. We can't use this
# solution on all versions of Ruby due to
# https://github.com/bundler/bundler/pull/3338.
bundle config build.charlock_holmes --with-icu-include=$(pwd)/vendor/debs/include --with-icu-lib=$(pwd)/vendor/debs/lib
else
bundle config build.charlock_holmes --with-icu-dir=$(pwd)/vendor/debs
fi
# Replace SSH links to submodules by HTTPS links.
sed -i 's/git@github.com:/https:\/\/github.com\//' .gitmodules

View File

@@ -1,13 +0,0 @@
#!/bin/sh
set -ex
cd "$(dirname "$0")/.."
mkdir -p vendor/apt vendor/debs
(cd vendor/apt && apt-get --assume-yes download "$@")
for deb in vendor/apt/*.deb; do
ar p $deb data.tar.gz | tar -vzxC vendor/debs --strip-components=2
done

3
test/fixtures/Data/Modelines/ruby4 vendored Normal file
View File

@@ -0,0 +1,3 @@
# vim: filetype=ruby
# I am Ruby

3
test/fixtures/Data/Modelines/ruby5 vendored Normal file
View File

@@ -0,0 +1,3 @@
# vim: ft=ruby
# I am Ruby

3
test/fixtures/Data/Modelines/ruby6 vendored Normal file
View File

@@ -0,0 +1,3 @@
# vim: syntax=Ruby
# I am Ruby

3
test/fixtures/Data/Modelines/ruby7 vendored Normal file
View File

@@ -0,0 +1,3 @@
# vim: se syntax=ruby:
# I am Ruby

3
test/fixtures/Data/Modelines/ruby8 vendored Normal file
View File

@@ -0,0 +1,3 @@
# vim: set syntax=ruby:
# I am Ruby

3
test/fixtures/Data/Modelines/ruby9 vendored Normal file
View File

@@ -0,0 +1,3 @@
# ex: syntax=ruby
# I am Ruby

View File

@@ -3,6 +3,7 @@ require "minitest/autorun"
require "mocha/setup"
require "linguist"
require 'color-proximity'
require "linguist/blob"
require 'licensee'
def fixtures_path
@@ -10,8 +11,14 @@ def fixtures_path
end
def fixture_blob(name)
name = File.join(fixtures_path, name) unless name =~ /^\//
Linguist::FileBlob.new(name, fixtures_path)
filepath = (name =~ /^\//)? name : File.join(fixtures_path, name)
Linguist::FileBlob.new(filepath, fixtures_path)
end
def fixture_blob_memory(name)
filepath = (name =~ /^\//)? name : File.join(fixtures_path, name)
content = File.read(filepath)
Linguist::Blob.new(name, content)
end
def samples_path
@@ -19,6 +26,12 @@ def samples_path
end
def sample_blob(name)
name = File.join(samples_path, name) unless name =~ /^\//
Linguist::FileBlob.new(name, samples_path)
filepath = (name =~ /^\//)? name : File.join(samples_path, name)
Linguist::FileBlob.new(filepath, samples_path)
end
def sample_blob_memory(name)
filepath = (name =~ /^\//)? name : File.join(samples_path, name)
content = File.read(filepath)
Linguist::Blob.new(name, content)
end

View File

@@ -15,50 +15,47 @@ class TestBlob < Minitest::Test
end
def script_blob(name)
blob = sample_blob(name)
blob = sample_blob_memory(name)
blob.instance_variable_set(:@name, 'script')
blob
end
def test_name
assert_equal "foo.rb", sample_blob("foo.rb").name
assert_equal "foo.rb", sample_blob_memory("Ruby/foo.rb").name
end
def test_mime_type
assert_equal "application/postscript", fixture_blob("Binary/octocat.ai").mime_type
assert_equal "application/x-ruby", sample_blob("Ruby/grit.rb").mime_type
assert_equal "application/x-sh", sample_blob("Shell/script.sh").mime_type
assert_equal "application/xml", sample_blob("XML/bar.xml").mime_type
assert_equal "audio/ogg", fixture_blob("Binary/foo.ogg").mime_type
assert_equal "text/plain", fixture_blob("Data/README").mime_type
assert_equal "application/postscript", fixture_blob_memory("Binary/octocat.ai").mime_type
assert_equal "application/x-ruby", sample_blob_memory("Ruby/grit.rb").mime_type
assert_equal "application/x-sh", sample_blob_memory("Shell/script.sh").mime_type
assert_equal "text/plain", fixture_blob_memory("Data/README").mime_type
end
def test_content_type
assert_equal "application/pdf", fixture_blob("Binary/foo.pdf").content_type
assert_equal "audio/ogg", fixture_blob("Binary/foo.ogg").content_type
assert_equal "image/png", fixture_blob("Binary/foo.png").content_type
assert_equal "text/plain; charset=iso-8859-2", fixture_blob("Data/README").content_type
assert_equal "application/pdf", fixture_blob_memory("Binary/foo.pdf").content_type
assert_equal "image/png", fixture_blob_memory("Binary/foo.png").content_type
assert_equal "text/plain; charset=iso-8859-2", fixture_blob_memory("Data/README").content_type
end
def test_disposition
assert_equal "attachment; filename=foo+bar.jar", fixture_blob("Binary/foo bar.jar").disposition
assert_equal "attachment; filename=foo.bin", fixture_blob("Binary/foo.bin").disposition
assert_equal "attachment; filename=linguist.gem", fixture_blob("Binary/linguist.gem").disposition
assert_equal "attachment; filename=octocat.ai", fixture_blob("Binary/octocat.ai").disposition
assert_equal "inline", fixture_blob("Data/README").disposition
assert_equal "inline", sample_blob("Text/foo.txt").disposition
assert_equal "inline", sample_blob("Ruby/grit.rb").disposition
assert_equal "inline", fixture_blob("Binary/octocat.png").disposition
assert_equal "attachment; filename=foo+bar.jar", fixture_blob_memory("Binary/foo bar.jar").disposition
assert_equal "attachment; filename=foo.bin", fixture_blob_memory("Binary/foo.bin").disposition
assert_equal "attachment; filename=linguist.gem", fixture_blob_memory("Binary/linguist.gem").disposition
assert_equal "attachment; filename=octocat.ai", fixture_blob_memory("Binary/octocat.ai").disposition
assert_equal "inline", fixture_blob_memory("Data/README").disposition
assert_equal "inline", sample_blob_memory("Text/foo.txt").disposition
assert_equal "inline", sample_blob_memory("Ruby/grit.rb").disposition
assert_equal "inline", fixture_blob_memory("Binary/octocat.png").disposition
end
def test_data
assert_equal "module Foo\nend\n", sample_blob("Ruby/foo.rb").data
assert_equal "module Foo\nend\n", sample_blob_memory("Ruby/foo.rb").data
end
def test_lines
assert_equal ["module Foo", "end", ""], sample_blob("Ruby/foo.rb").lines
assert_equal ["line 1", "line 2", ""], sample_blob("Text/mac.txt").lines
assert_equal 475, sample_blob("Emacs Lisp/ess-julia.el").lines.length
assert_equal ["module Foo", "end", ""], sample_blob_memory("Ruby/foo.rb").lines
assert_equal ["line 1", "line 2", ""], sample_blob_memory("Text/mac.txt").lines
assert_equal 475, sample_blob_memory("Emacs Lisp/ess-julia.el").lines.length
end
def test_lines_maintains_original_encoding
@@ -66,539 +63,178 @@ class TestBlob < Minitest::Test
# earlier versions of the gem made implicit guarantees that the encoding of
# each `line` is in the same encoding as the file was originally read (in
# practice, UTF-8 or ASCII-8BIT)
assert_equal Encoding.default_external, fixture_blob("Data/utf16le").lines.first.encoding
assert_equal Encoding.default_external, fixture_blob_memory("Data/utf16le").lines.first.encoding
end
def test_size
assert_equal 15, sample_blob("Ruby/foo.rb").size
assert_equal 15, sample_blob_memory("Ruby/foo.rb").size
end
def test_loc
assert_equal 3, sample_blob("Ruby/foo.rb").loc
assert_equal 3, sample_blob_memory("Ruby/foo.rb").loc
end
def test_sloc
assert_equal 2, sample_blob("Ruby/foo.rb").sloc
assert_equal 3, fixture_blob("Data/utf16le-windows").sloc
assert_equal 1, fixture_blob("Data/iso8859-8-i").sloc
assert_equal 2, sample_blob_memory("Ruby/foo.rb").sloc
assert_equal 3, fixture_blob_memory("Data/utf16le-windows").sloc
assert_equal 1, fixture_blob_memory("Data/iso8859-8-i").sloc
end
def test_encoding
assert_equal "ISO-8859-2", fixture_blob("Data/README").encoding
assert_equal "ISO-8859-2", fixture_blob("Data/README").ruby_encoding
assert_equal "UTF-8", sample_blob("Text/foo.txt").encoding
assert_equal "UTF-8", sample_blob("Text/foo.txt").ruby_encoding
assert_equal "UTF-16LE", fixture_blob("Data/utf16le").encoding
assert_equal "UTF-16LE", fixture_blob("Data/utf16le").ruby_encoding
assert_equal "UTF-16LE", fixture_blob("Data/utf16le-windows").encoding
assert_equal "UTF-16LE", fixture_blob("Data/utf16le-windows").ruby_encoding
assert_equal "ISO-2022-KR", sample_blob("Text/ISO-2022-KR.txt").encoding
assert_equal "binary", sample_blob("Text/ISO-2022-KR.txt").ruby_encoding
assert_nil fixture_blob("Binary/dog.o").encoding
assert_equal "ISO-8859-2", fixture_blob_memory("Data/README").encoding
assert_equal "ISO-8859-2", fixture_blob_memory("Data/README").ruby_encoding
assert_equal "UTF-8", sample_blob_memory("Text/foo.txt").encoding
assert_equal "UTF-8", sample_blob_memory("Text/foo.txt").ruby_encoding
assert_equal "UTF-16LE", fixture_blob_memory("Data/utf16le").encoding
assert_equal "UTF-16LE", fixture_blob_memory("Data/utf16le").ruby_encoding
assert_equal "UTF-16LE", fixture_blob_memory("Data/utf16le-windows").encoding
assert_equal "UTF-16LE", fixture_blob_memory("Data/utf16le-windows").ruby_encoding
assert_equal "ISO-2022-KR", sample_blob_memory("Text/ISO-2022-KR.txt").encoding
assert_equal "binary", sample_blob_memory("Text/ISO-2022-KR.txt").ruby_encoding
assert_nil fixture_blob_memory("Binary/dog.o").encoding
end
def test_binary
# Large blobs aren't loaded
large_blob = sample_blob("git.exe")
large_blob.instance_eval do
def data; end
end
assert large_blob.binary?
assert fixture_blob("Binary/git.deb").binary?
assert fixture_blob("Binary/git.exe").binary?
assert fixture_blob("Binary/hello.pbc").binary?
assert fixture_blob("Binary/linguist.gem").binary?
assert fixture_blob("Binary/octocat.ai").binary?
assert fixture_blob("Binary/octocat.png").binary?
assert fixture_blob("Binary/zip").binary?
assert !fixture_blob("Data/README").binary?
assert !sample_blob("Ruby/foo.rb").binary?
assert !sample_blob("Perl/script.pl").binary?
assert fixture_blob_memory("Binary/git.deb").binary?
assert fixture_blob_memory("Binary/hello.pbc").binary?
assert fixture_blob_memory("Binary/linguist.gem").binary?
assert fixture_blob_memory("Binary/octocat.ai").binary?
assert fixture_blob_memory("Binary/octocat.png").binary?
assert fixture_blob_memory("Binary/zip").binary?
assert !fixture_blob_memory("Data/README").binary?
assert !sample_blob_memory("Ruby/foo.rb").binary?
assert !sample_blob_memory("Perl/script.pl").binary?
end
def test_all_binary
Samples.each do |sample|
blob = sample_blob(sample[:path])
blob = sample_blob_memory(sample[:path])
assert ! (blob.likely_binary? || blob.binary?), "#{sample[:path]} is a binary file"
end
end
def test_text
assert fixture_blob("Data/README").text?
assert fixture_blob("Data/md").text?
assert sample_blob("Shell/script.sh").text?
assert fixture_blob("Data/txt").text?
assert fixture_blob_memory("Data/README").text?
assert fixture_blob_memory("Data/md").text?
assert sample_blob_memory("Shell/script.sh").text?
assert fixture_blob_memory("Data/txt").text?
end
def test_image
assert fixture_blob("Binary/octocat.gif").image?
assert fixture_blob("Binary/octocat.jpeg").image?
assert fixture_blob("Binary/octocat.jpg").image?
assert fixture_blob("Binary/octocat.png").image?
assert !fixture_blob("Binary/octocat.ai").image?
assert !fixture_blob("Binary/octocat.psd").image?
assert fixture_blob_memory("Binary/octocat.png").image?
assert !fixture_blob_memory("Binary/octocat.ai").image?
assert !fixture_blob_memory("Binary/octocat.psd").image?
end
def test_solid
assert fixture_blob("Binary/cube.stl").solid?
assert fixture_blob("Data/cube.stl").solid?
assert fixture_blob_memory("Binary/cube.stl").solid?
assert fixture_blob_memory("Data/cube.stl").solid?
end
def test_csv
assert fixture_blob("Data/cars.csv").csv?
assert fixture_blob_memory("Data/cars.csv").csv?
end
def test_pdf
assert fixture_blob("Binary/foo.pdf").pdf?
assert fixture_blob_memory("Binary/foo.pdf").pdf?
end
def test_viewable
assert fixture_blob("Data/README").viewable?
assert sample_blob("Ruby/foo.rb").viewable?
assert sample_blob("Perl/script.pl").viewable?
assert !fixture_blob("Binary/linguist.gem").viewable?
assert !fixture_blob("Binary/octocat.ai").viewable?
assert !fixture_blob("Binary/octocat.png").viewable?
assert fixture_blob_memory("Data/README").viewable?
assert sample_blob_memory("Ruby/foo.rb").viewable?
assert sample_blob_memory("Perl/script.pl").viewable?
assert !fixture_blob_memory("Binary/linguist.gem").viewable?
assert !fixture_blob_memory("Binary/octocat.ai").viewable?
assert !fixture_blob_memory("Binary/octocat.png").viewable?
end
def test_generated
assert !fixture_blob("Data/README").generated?
# Xcode project files
assert !sample_blob("XML/MainMenu.xib").generated?
assert fixture_blob("Binary/MainMenu.nib").generated?
assert !sample_blob("XML/project.pbxproj").generated?
# Gemfile.lock is NOT generated
assert !sample_blob("Gemfile.lock").generated?
assert !fixture_blob_memory("Data/README").generated?
# Generated .NET Docfiles
assert sample_blob("XML/net_docfile.xml").generated?
assert sample_blob_memory("XML/net_docfile.xml").generated?
# Long line
assert !sample_blob("JavaScript/uglify.js").generated?
assert !sample_blob_memory("JavaScript/uglify.js").generated?
# Inlined JS, but mostly code
assert !sample_blob("JavaScript/json2_backbone.js").generated?
assert !sample_blob_memory("JavaScript/json2_backbone.js").generated?
# Minified JS
assert !sample_blob("JavaScript/jquery-1.6.1.js").generated?
assert sample_blob("JavaScript/jquery-1.6.1.min.js").generated?
assert sample_blob("JavaScript/jquery-1.4.2.min.js").generated?
# CoffeeScript-generated JS
# TODO
# TypeScript-generated JS
# TODO
assert !sample_blob_memory("JavaScript/jquery-1.6.1.js").generated?
assert sample_blob_memory("JavaScript/jquery-1.6.1.min.js").generated?
assert sample_blob_memory("JavaScript/jquery-1.4.2.min.js").generated?
# Composer generated composer.lock file
assert sample_blob("JSON/composer.lock").generated?
assert sample_blob_memory("JSON/composer.lock").generated?
# PEG.js-generated parsers
assert sample_blob("JavaScript/parser.js").generated?
assert sample_blob_memory("JavaScript/parser.js").generated?
# Generated PostScript
assert !sample_blob("PostScript/sierpinski.ps").generated?
assert !sample_blob_memory("PostScript/sierpinski.ps").generated?
# These examples are too basic to tell
assert !sample_blob("JavaScript/hello.js").generated?
assert !sample_blob_memory("JavaScript/hello.js").generated?
assert sample_blob("JavaScript/intro-old.js").generated?
assert sample_blob("JavaScript/classes-old.js").generated?
assert sample_blob_memory("JavaScript/intro-old.js").generated?
assert sample_blob_memory("JavaScript/classes-old.js").generated?
assert sample_blob("JavaScript/intro.js").generated?
assert sample_blob("JavaScript/classes.js").generated?
assert sample_blob_memory("JavaScript/intro.js").generated?
assert sample_blob_memory("JavaScript/classes.js").generated?
# Protocol Buffer generated code
assert sample_blob("C++/protocol-buffer.pb.h").generated?
assert sample_blob("C++/protocol-buffer.pb.cc").generated?
assert sample_blob("Java/ProtocolBuffer.java").generated?
assert sample_blob("Python/protocol_buffer_pb2.py").generated?
assert sample_blob("Go/api.pb.go").generated?
assert sample_blob("Go/embedded.go").generated?
assert sample_blob_memory("C++/protocol-buffer.pb.h").generated?
assert sample_blob_memory("C++/protocol-buffer.pb.cc").generated?
assert sample_blob_memory("Java/ProtocolBuffer.java").generated?
assert sample_blob_memory("Python/protocol_buffer_pb2.py").generated?
assert sample_blob_memory("Go/api.pb.go").generated?
assert sample_blob_memory("Go/embedded.go").generated?
# Apache Thrift generated code
assert sample_blob("Python/gen-py-linguist-thrift.py").generated?
assert sample_blob("Go/gen-go-linguist-thrift.go").generated?
assert sample_blob("Java/gen-java-linguist-thrift.java").generated?
assert sample_blob("JavaScript/gen-js-linguist-thrift.js").generated?
assert sample_blob("Ruby/gen-rb-linguist-thrift.rb").generated?
assert sample_blob("Objective-C/gen-cocoa-linguist-thrift.m").generated?
assert sample_blob_memory("Python/gen-py-linguist-thrift.py").generated?
assert sample_blob_memory("Go/gen-go-linguist-thrift.go").generated?
assert sample_blob_memory("Java/gen-java-linguist-thrift.java").generated?
assert sample_blob_memory("JavaScript/gen-js-linguist-thrift.js").generated?
assert sample_blob_memory("Ruby/gen-rb-linguist-thrift.rb").generated?
assert sample_blob_memory("Objective-C/gen-cocoa-linguist-thrift.m").generated?
# Generated JNI
assert sample_blob("C/jni_layer.h").generated?
assert sample_blob_memory("C/jni_layer.h").generated?
# Minified CSS
assert !sample_blob("CSS/bootstrap.css").generated?
assert sample_blob("CSS/bootstrap.min.css").generated?
assert !sample_blob_memory("CSS/bootstrap.css").generated?
assert sample_blob_memory("CSS/bootstrap.min.css").generated?
# Generated VCR
assert sample_blob("YAML/vcr_cassette.yml").generated?
assert sample_blob_memory("YAML/vcr_cassette.yml").generated?
# Generated by Zephir
assert sample_blob("Zephir/filenames/exception.zep.c").generated?
assert sample_blob("Zephir/filenames/exception.zep.h").generated?
assert sample_blob("Zephir/filenames/exception.zep.php").generated?
assert !sample_blob("Zephir/Router.zep").generated?
assert sample_blob("node_modules/grunt/lib/grunt.js").generated?
assert !sample_blob_memory("Zephir/Router.zep").generated?
# Go vendored dependencies
refute sample_blob("vendor/vendor.json").generated?
assert sample_blob("vendor/github.com/kr/s3/sign.go").generated?
refute fixture_blob("go/food_vendor/candy.go").generated?
# Godep saved dependencies
assert sample_blob("Godeps/Godeps.json").generated?
assert sample_blob("Godeps/_workspace/src/github.com/kr/s3/sign.go").generated?
# Cython-generated C/C++
assert sample_blob("C/sgd_fast.c").generated?
assert sample_blob("C++/wrapper_inner.cpp").generated?
assert sample_blob_memory("C/sgd_fast.c").generated?
assert sample_blob_memory("C++/wrapper_inner.cpp").generated?
# Unity3D-generated metadata
assert sample_blob("Unity3D Asset/Tiles.meta").generated?
assert sample_blob_memory("Unity3D Asset/Tiles.meta").generated?
# Racc-generated Ruby
assert sample_blob_memory("Ruby/racc.rb").generated?
end
def test_vendored
assert !fixture_blob("Data/README").vendored?
assert !sample_blob("ext/extconf.rb").vendored?
# Dependencies
assert sample_blob("dependencies/windows/headers/GL/glext.h").vendored?
# Node dependencies
assert sample_blob("node_modules/coffee-script/lib/coffee-script.js").vendored?
# Bower Components
assert sample_blob("bower_components/custom/custom.js").vendored?
assert sample_blob("app/bower_components/custom/custom.js").vendored?
assert sample_blob("vendor/assets/bower_components/custom/custom.js").vendored?
# Go dependencies
assert !sample_blob("Godeps/Godeps.json").vendored?
assert sample_blob("Godeps/_workspace/src/github.com/kr/s3/sign.go").vendored?
# Rails vendor/
assert sample_blob("vendor/plugins/will_paginate/lib/will_paginate.rb").vendored?
# Vendor/
assert sample_blob("Vendor/my_great_file.h").vendored?
# 'thirdparty' directory
assert sample_blob("thirdparty/lib/main.c").vendored?
# 'extern(al)' directory
assert sample_blob("extern/util/__init__.py").vendored?
assert sample_blob("external/jquery.min.js").vendored?
# C deps
assert sample_blob("deps/http_parser/http_parser.c").vendored?
assert sample_blob("deps/v8/src/v8.h").vendored?
assert sample_blob("tools/something/else.c").vendored?
# Chart.js
assert sample_blob("some/vendored/path/Chart.js").vendored?
assert !sample_blob("some/vendored/path/chart.js").vendored?
# Codemirror deps
assert sample_blob("codemirror/mode/blah.js").vendored?
assert sample_blob("codemirror/5.0/mode/blah.js").vendored?
# Debian packaging
assert sample_blob("debian/cron.d").vendored?
# Erlang
assert sample_blob("rebar").vendored?
# git config files
assert_predicate fixture_blob("some/path/.gitattributes"), :vendored?
assert_predicate fixture_blob(".gitignore"), :vendored?
assert_predicate fixture_blob("special/path/.gitmodules"), :vendored?
# Minified JavaScript and CSS
assert sample_blob("foo.min.js").vendored?
assert sample_blob("foo.min.css").vendored?
assert sample_blob("foo-min.js").vendored?
assert sample_blob("foo-min.css").vendored?
assert !sample_blob("foomin.css").vendored?
assert !sample_blob("foo.min.txt").vendored?
#.osx
assert sample_blob(".osx").vendored?
# Prototype
assert !sample_blob("public/javascripts/application.js").vendored?
assert sample_blob("public/javascripts/prototype.js").vendored?
assert sample_blob("public/javascripts/effects.js").vendored?
assert sample_blob("public/javascripts/controls.js").vendored?
assert sample_blob("public/javascripts/dragdrop.js").vendored?
# jQuery
assert sample_blob("jquery.js").vendored?
assert sample_blob("public/javascripts/jquery.js").vendored?
assert sample_blob("public/javascripts/jquery.min.js").vendored?
assert sample_blob("public/javascripts/jquery-1.7.js").vendored?
assert sample_blob("public/javascripts/jquery-1.7.min.js").vendored?
assert sample_blob("public/javascripts/jquery-1.5.2.js").vendored?
assert sample_blob("public/javascripts/jquery-1.6.1.js").vendored?
assert sample_blob("public/javascripts/jquery-1.6.1.min.js").vendored?
assert sample_blob("public/javascripts/jquery-1.10.1.js").vendored?
assert sample_blob("public/javascripts/jquery-1.10.1.min.js").vendored?
assert !sample_blob("public/javascripts/jquery.github.menu.js").vendored?
# jQuery UI
assert sample_blob("themes/ui-lightness/jquery-ui.css").vendored?
assert sample_blob("themes/ui-lightness/jquery-ui-1.8.22.custom.css").vendored?
assert sample_blob("themes/ui-lightness/jquery.ui.accordion.css").vendored?
assert sample_blob("ui/i18n/jquery.ui.datepicker-ar.js").vendored?
assert sample_blob("ui/i18n/jquery-ui-i18n.js").vendored?
assert sample_blob("ui/jquery.effects.blind.js").vendored?
assert sample_blob("ui/jquery-ui-1.8.22.custom.js").vendored?
assert sample_blob("ui/jquery-ui-1.8.22.custom.min.js").vendored?
assert sample_blob("ui/jquery-ui-1.8.22.js").vendored?
assert sample_blob("ui/jquery-ui-1.8.js").vendored?
assert sample_blob("ui/jquery-ui.min.js").vendored?
assert sample_blob("ui/jquery.ui.accordion.js").vendored?
assert sample_blob("ui/minified/jquery.effects.blind.min.js").vendored?
assert sample_blob("ui/minified/jquery.ui.accordion.min.js").vendored?
# jQuery Gantt
assert sample_blob("web-app/jquery-gantt/js/jquery.fn.gantt.js").vendored?
# jQuery fancyBox
assert sample_blob("web-app/fancybox/jquery.fancybox.js").vendored?
# Fuel UX
assert sample_blob("web-app/fuelux/js/fuelux.js").vendored?
# jQuery File Upload
assert sample_blob("fileupload-9.0.0/jquery.fileupload-process.js").vendored?
# Slick
assert sample_blob("web-app/slickgrid/controls/slick.columnpicker.js").vendored?
# Leaflet plugins
assert sample_blob("leaflet-plugins/Leaflet.Coordinates-0.5.0.src.js").vendored?
assert sample_blob("leaflet-plugins/leaflet.draw-src.js").vendored?
assert sample_blob("leaflet-plugins/leaflet.spin.js").vendored?
# MooTools
assert sample_blob("public/javascripts/mootools-core-1.3.2-full-compat.js").vendored?
assert sample_blob("public/javascripts/mootools-core-1.3.2-full-compat-yc.js").vendored?
# Dojo
assert sample_blob("public/javascripts/dojo.js").vendored?
# MochiKit
assert sample_blob("public/javascripts/MochiKit.js").vendored?
# YUI
assert sample_blob("public/javascripts/yahoo-dom-event.js").vendored?
assert sample_blob("public/javascripts/yahoo-min.js").vendored?
assert sample_blob("public/javascripts/yuiloader-dom-event.js").vendored?
# WYS editors
assert sample_blob("public/javascripts/ckeditor.js").vendored?
assert sample_blob("public/javascripts/tiny_mce.js").vendored?
assert sample_blob("public/javascripts/tiny_mce_popup.js").vendored?
assert sample_blob("public/javascripts/tiny_mce_src.js").vendored?
# AngularJS
assert sample_blob("public/javascripts/angular.js").vendored?
assert sample_blob("public/javascripts/angular.min.js").vendored?
# D3.js
assert sample_blob("public/javascripts/d3.v3.js").vendored?
assert sample_blob("public/javascripts/d3.v3.min.js").vendored?
# Modernizr
assert sample_blob("public/javascripts/modernizr-2.7.1.js").vendored?
assert sample_blob("public/javascripts/modernizr.custom.01009.js").vendored?
# Fabric
assert sample_blob("fabfile.py").vendored?
# WAF
assert sample_blob("waf").vendored?
# Visual Studio IntelliSense
assert sample_blob("Scripts/jquery-1.7-vsdoc.js").vendored?
# Microsoft Ajax
assert sample_blob("Scripts/MicrosoftAjax.debug.js").vendored?
assert sample_blob("Scripts/MicrosoftAjax.js").vendored?
assert sample_blob("Scripts/MicrosoftMvcAjax.debug.js").vendored?
assert sample_blob("Scripts/MicrosoftMvcAjax.js").vendored?
assert sample_blob("Scripts/MicrosoftMvcValidation.debug.js").vendored?
assert sample_blob("Scripts/MicrosoftMvcValidation.js").vendored?
# jQuery validation plugin (MS bundles this with asp.net mvc)
assert sample_blob("Scripts/jquery.validate.js").vendored?
assert sample_blob("Scripts/jquery.validate.min.js").vendored?
assert sample_blob("Scripts/jquery.validate.unobtrusive.js").vendored?
assert sample_blob("Scripts/jquery.validate.unobtrusive.min.js").vendored?
assert sample_blob("Scripts/jquery.unobtrusive-ajax.js").vendored?
assert sample_blob("Scripts/jquery.unobtrusive-ajax.min.js").vendored?
# NuGet Packages
assert sample_blob("packages/Modernizr.2.0.6/Content/Scripts/modernizr-2.0.6-development-only.js").vendored?
# Font Awesome
assert sample_blob("some/asset/path/font-awesome.min.css").vendored?
assert sample_blob("some/asset/path/font-awesome.css").vendored?
# Normalize
assert sample_blob("some/asset/path/normalize.css").vendored?
# Carthage
assert sample_blob('Carthage/blah').vendored?
# Cocoapods
assert sample_blob('Pods/blah').vendored?
# Html5shiv
assert sample_blob("Scripts/html5shiv.js").vendored?
assert sample_blob("Scripts/html5shiv.min.js").vendored?
# Test fixtures
assert sample_blob("test/fixtures/random.rkt").vendored?
assert sample_blob("Test/fixtures/random.rkt").vendored?
assert sample_blob("tests/fixtures/random.rkt").vendored?
# Cordova/PhoneGap
assert sample_blob("cordova.js").vendored?
assert sample_blob("cordova.min.js").vendored?
assert sample_blob("cordova-2.1.0.js").vendored?
assert sample_blob("cordova-2.1.0.min.js").vendored?
# Foundation js
assert sample_blob("foundation.js").vendored?
assert sample_blob("foundation.min.js").vendored?
assert sample_blob("foundation.abide.js").vendored?
# Vagrant
assert sample_blob("Vagrantfile").vendored?
# Gradle
assert sample_blob("gradlew").vendored?
assert sample_blob("gradlew.bat").vendored?
assert sample_blob("gradle/wrapper/gradle-wrapper.properties").vendored?
assert sample_blob("subproject/gradlew").vendored?
assert sample_blob("subproject/gradlew.bat").vendored?
assert sample_blob("subproject/gradle/wrapper/gradle-wrapper.properties").vendored?
# Octicons
assert sample_blob("octicons.css").vendored?
assert sample_blob("public/octicons.min.css").vendored?
assert sample_blob("public/octicons/sprockets-octicons.scss").vendored?
# Typesafe Activator
assert sample_blob("activator").vendored?
assert sample_blob("activator.bat").vendored?
assert sample_blob("subproject/activator").vendored?
assert sample_blob("subproject/activator.bat").vendored?
assert_predicate fixture_blob(".google_apis/bar.jar"), :vendored?
assert_predicate fixture_blob("foo/.google_apis/bar.jar"), :vendored?
# Sphinx docs
assert sample_blob("docs/_build/asset.doc").vendored?
assert sample_blob("docs/theme/file.css").vendored?
# Vagrant
assert sample_blob("puphpet/file.pp").vendored?
# Fabric.io
assert sample_blob("Fabric.framework/Fabric.h").vendored?
# Crashlytics
assert sample_blob("Crashlytics.framework/Crashlytics.h").vendored?
# Xcode
assert sample_blob("myapp/My Template.xctemplate/___FILEBASENAME___.h").vendored?
assert sample_blob("myapp/My Images.xcassets/some/stuff.imageset/Contents.json").vendored?
assert !sample_blob("myapp/MyData.json").vendored?
end
def test_documentation
assert_predicate fixture_blob("doc/foo.html"), :documentation?
assert_predicate fixture_blob("docs/foo.html"), :documentation?
refute_predicate fixture_blob("project/doc/foo.html"), :documentation?
refute_predicate fixture_blob("project/docs/foo.html"), :documentation?
assert_predicate fixture_blob("Documentation/foo.md"), :documentation?
assert_predicate fixture_blob("documentation/foo.md"), :documentation?
assert_predicate fixture_blob("project/Documentation/foo.md"), :documentation?
assert_predicate fixture_blob("project/documentation/foo.md"), :documentation?
assert_predicate fixture_blob("javadoc/foo.html"), :documentation?
assert_predicate fixture_blob("project/javadoc/foo.html"), :documentation?
assert_predicate fixture_blob("man/foo.html"), :documentation?
refute_predicate fixture_blob("project/man/foo.html"), :documentation?
assert_predicate fixture_blob("README"), :documentation?
assert_predicate fixture_blob("README.md"), :documentation?
assert_predicate fixture_blob("README.txt"), :documentation?
assert_predicate fixture_blob("Readme"), :documentation?
assert_predicate fixture_blob("readme"), :documentation?
assert_predicate fixture_blob("foo/README"), :documentation?
assert_predicate fixture_blob("CHANGE"), :documentation?
assert_predicate fixture_blob("CHANGE.md"), :documentation?
assert_predicate fixture_blob("CHANGE.txt"), :documentation?
assert_predicate fixture_blob("foo/CHANGE"), :documentation?
assert_predicate fixture_blob("CHANGELOG"), :documentation?
assert_predicate fixture_blob("CHANGELOG.md"), :documentation?
assert_predicate fixture_blob("CHANGELOG.txt"), :documentation?
assert_predicate fixture_blob("foo/CHANGELOG"), :documentation?
assert_predicate fixture_blob("CHANGES"), :documentation?
assert_predicate fixture_blob("CHANGES.md"), :documentation?
assert_predicate fixture_blob("CHANGES.txt"), :documentation?
assert_predicate fixture_blob("foo/CHANGES"), :documentation?
assert_predicate fixture_blob("CONTRIBUTING"), :documentation?
assert_predicate fixture_blob("CONTRIBUTING.md"), :documentation?
assert_predicate fixture_blob("CONTRIBUTING.txt"), :documentation?
assert_predicate fixture_blob("foo/CONTRIBUTING"), :documentation?
assert_predicate fixture_blob("examples/some-file.pl"), :documentation?
assert_predicate fixture_blob("Examples/some-example-file.rb"), :documentation?
assert_predicate fixture_blob("LICENSE"), :documentation?
assert_predicate fixture_blob("LICENCE.md"), :documentation?
assert_predicate fixture_blob("License.txt"), :documentation?
assert_predicate fixture_blob("LICENSE.txt"), :documentation?
assert_predicate fixture_blob("foo/LICENSE"), :documentation?
assert_predicate fixture_blob("COPYING"), :documentation?
assert_predicate fixture_blob("COPYING.md"), :documentation?
assert_predicate fixture_blob("COPYING.txt"), :documentation?
assert_predicate fixture_blob("foo/COPYING"), :documentation?
assert_predicate fixture_blob("INSTALL"), :documentation?
assert_predicate fixture_blob("INSTALL.md"), :documentation?
assert_predicate fixture_blob("INSTALL.txt"), :documentation?
assert_predicate fixture_blob("foo/INSTALL"), :documentation?
refute_predicate fixture_blob("foo.md"), :documentation?
# Samples
assert sample_blob("Samples/Ruby/foo.rb").documentation?
assert_predicate fixture_blob("INSTALL.txt"), :documentation?
assert !fixture_blob_memory("Data/README").vendored?
end
def test_language
Samples.each do |sample|
blob = sample_blob(sample[:path])
blob = sample_blob_memory(sample[:path])
assert blob.language, "No language for #{sample[:path]}"
assert_equal sample[:language], blob.language.name, blob.name
end
@@ -617,7 +253,7 @@ class TestBlob < Minitest::Test
filepath = File.join(dirname, filename)
next unless File.file?(filepath)
blob = fixture_blob(filepath)
blob = fixture_blob_memory(filepath)
if language == 'Data'
assert blob.language.nil?, "A language was found for #{filepath}"
elsif language == 'Generated'
@@ -631,7 +267,7 @@ class TestBlob < Minitest::Test
end
def test_minified_files_not_safe_to_highlight
assert !sample_blob("JavaScript/jquery-1.6.1.min.js").safe_to_colorize?
assert !sample_blob_memory("JavaScript/jquery-1.6.1.min.js").safe_to_colorize?
end
def test_empty
@@ -644,27 +280,19 @@ class TestBlob < Minitest::Test
end
def test_include_in_language_stats
vendored = sample_blob("bower_components/custom/custom.js")
assert_predicate vendored, :vendored?
refute_predicate vendored, :include_in_language_stats?
documentation = fixture_blob("README")
assert_predicate documentation, :documentation?
refute_predicate documentation, :include_in_language_stats?
generated = sample_blob("CSS/bootstrap.min.css")
generated = sample_blob_memory("CSS/bootstrap.min.css")
assert_predicate generated, :generated?
refute_predicate generated, :include_in_language_stats?
data = sample_blob("Ant Build System/filenames/ant.xml")
data = sample_blob_memory("Ant Build System/filenames/ant.xml")
assert_equal :data, data.language.type
refute_predicate data, :include_in_language_stats?
prose = sample_blob("Markdown/tender.md")
prose = sample_blob_memory("Markdown/tender.md")
assert_equal :prose, prose.language.type
refute_predicate prose, :include_in_language_stats?
included = sample_blob("HTML/pages.html")
included = sample_blob_memory("HTML/pages.html")
assert_predicate included, :include_in_language_stats?
end
end

View File

@@ -1,9 +1,669 @@
require_relative "./helper"
class TestFileBlob < Minitest::Test
class TestBlob < Minitest::Test
include Linguist
def setup
# git blobs are normally loaded as ASCII-8BIT since they may contain data
# with arbitrary encoding not known ahead of time
@original_external = Encoding.default_external
Encoding.default_external = Encoding.find("ASCII-8BIT")
end
def teardown
Encoding.default_external = @original_external
end
def script_blob(name)
blob = sample_blob(name)
blob.instance_variable_set(:@name, 'script')
blob
end
def test_extensions
assert_equal [".gitignore"], Linguist::FileBlob.new(".gitignore").extensions
assert_equal [".xml"], Linguist::FileBlob.new("build.xml").extensions
assert_equal [".html.erb", ".erb"], Linguist::FileBlob.new("dotted.dir/index.html.erb").extensions
end
def test_name
assert_equal "foo.rb", sample_blob("foo.rb").name
end
def test_mime_type
assert_equal "application/postscript", fixture_blob("Binary/octocat.ai").mime_type
assert_equal "application/x-ruby", sample_blob("Ruby/grit.rb").mime_type
assert_equal "application/x-sh", sample_blob("Shell/script.sh").mime_type
assert_equal "application/xml", sample_blob("XML/bar.xml").mime_type
assert_equal "audio/ogg", fixture_blob("Binary/foo.ogg").mime_type
assert_equal "text/plain", fixture_blob("Data/README").mime_type
end
def test_content_type
assert_equal "application/pdf", fixture_blob("Binary/foo.pdf").content_type
assert_equal "audio/ogg", fixture_blob("Binary/foo.ogg").content_type
assert_equal "image/png", fixture_blob("Binary/foo.png").content_type
assert_equal "text/plain; charset=iso-8859-2", fixture_blob("Data/README").content_type
end
def test_disposition
assert_equal "attachment; filename=foo+bar.jar", fixture_blob("Binary/foo bar.jar").disposition
assert_equal "attachment; filename=foo.bin", fixture_blob("Binary/foo.bin").disposition
assert_equal "attachment; filename=linguist.gem", fixture_blob("Binary/linguist.gem").disposition
assert_equal "attachment; filename=octocat.ai", fixture_blob("Binary/octocat.ai").disposition
assert_equal "inline", fixture_blob("Data/README").disposition
assert_equal "inline", sample_blob("Text/foo.txt").disposition
assert_equal "inline", sample_blob("Ruby/grit.rb").disposition
assert_equal "inline", fixture_blob("Binary/octocat.png").disposition
end
def test_data
assert_equal "module Foo\nend\n", sample_blob("Ruby/foo.rb").data
end
def test_lines
assert_equal ["module Foo", "end", ""], sample_blob("Ruby/foo.rb").lines
assert_equal ["line 1", "line 2", ""], sample_blob("Text/mac.txt").lines
assert_equal 475, sample_blob("Emacs Lisp/ess-julia.el").lines.length
end
def test_lines_maintains_original_encoding
# Even if the file's encoding is detected as something like UTF-16LE,
# earlier versions of the gem made implicit guarantees that the encoding of
# each `line` is in the same encoding as the file was originally read (in
# practice, UTF-8 or ASCII-8BIT)
assert_equal Encoding.default_external, fixture_blob("Data/utf16le").lines.first.encoding
end
def test_size
assert_equal 15, sample_blob("Ruby/foo.rb").size
end
def test_loc
assert_equal 3, sample_blob("Ruby/foo.rb").loc
end
def test_sloc
assert_equal 2, sample_blob("Ruby/foo.rb").sloc
assert_equal 3, fixture_blob("Data/utf16le-windows").sloc
assert_equal 1, fixture_blob("Data/iso8859-8-i").sloc
end
def test_encoding
assert_equal "ISO-8859-2", fixture_blob("Data/README").encoding
assert_equal "ISO-8859-2", fixture_blob("Data/README").ruby_encoding
assert_equal "UTF-8", sample_blob("Text/foo.txt").encoding
assert_equal "UTF-8", sample_blob("Text/foo.txt").ruby_encoding
assert_equal "UTF-16LE", fixture_blob("Data/utf16le").encoding
assert_equal "UTF-16LE", fixture_blob("Data/utf16le").ruby_encoding
assert_equal "UTF-16LE", fixture_blob("Data/utf16le-windows").encoding
assert_equal "UTF-16LE", fixture_blob("Data/utf16le-windows").ruby_encoding
assert_equal "ISO-2022-KR", sample_blob("Text/ISO-2022-KR.txt").encoding
assert_equal "binary", sample_blob("Text/ISO-2022-KR.txt").ruby_encoding
assert_nil fixture_blob("Binary/dog.o").encoding
end
def test_binary
# Large blobs aren't loaded
large_blob = sample_blob("git.exe")
large_blob.instance_eval do
def data; end
end
assert large_blob.binary?
assert fixture_blob("Binary/git.deb").binary?
assert fixture_blob("Binary/git.exe").binary?
assert fixture_blob("Binary/hello.pbc").binary?
assert fixture_blob("Binary/linguist.gem").binary?
assert fixture_blob("Binary/octocat.ai").binary?
assert fixture_blob("Binary/octocat.png").binary?
assert fixture_blob("Binary/zip").binary?
assert !fixture_blob("Data/README").binary?
assert !sample_blob("Ruby/foo.rb").binary?
assert !sample_blob("Perl/script.pl").binary?
end
def test_all_binary
Samples.each do |sample|
blob = sample_blob(sample[:path])
assert ! (blob.likely_binary? || blob.binary?), "#{sample[:path]} is a binary file"
end
end
def test_text
assert fixture_blob("Data/README").text?
assert fixture_blob("Data/md").text?
assert sample_blob("Shell/script.sh").text?
assert fixture_blob("Data/txt").text?
end
def test_image
assert fixture_blob("Binary/octocat.gif").image?
assert fixture_blob("Binary/octocat.jpeg").image?
assert fixture_blob("Binary/octocat.jpg").image?
assert fixture_blob("Binary/octocat.png").image?
assert !fixture_blob("Binary/octocat.ai").image?
assert !fixture_blob("Binary/octocat.psd").image?
end
def test_solid
assert fixture_blob("Binary/cube.stl").solid?
assert fixture_blob("Data/cube.stl").solid?
end
def test_csv
assert fixture_blob("Data/cars.csv").csv?
end
def test_pdf
assert fixture_blob("Binary/foo.pdf").pdf?
end
def test_viewable
assert fixture_blob("Data/README").viewable?
assert sample_blob("Ruby/foo.rb").viewable?
assert sample_blob("Perl/script.pl").viewable?
assert !fixture_blob("Binary/linguist.gem").viewable?
assert !fixture_blob("Binary/octocat.ai").viewable?
assert !fixture_blob("Binary/octocat.png").viewable?
end
def test_generated
assert !fixture_blob("Data/README").generated?
# Xcode project files
assert !sample_blob("XML/MainMenu.xib").generated?
assert fixture_blob("Binary/MainMenu.nib").generated?
assert !sample_blob("XML/project.pbxproj").generated?
# Gemfile.lock is NOT generated
assert !sample_blob("Gemfile.lock").generated?
# Generated .NET Docfiles
assert sample_blob("XML/net_docfile.xml").generated?
# Long line
assert !sample_blob("JavaScript/uglify.js").generated?
# Inlined JS, but mostly code
assert !sample_blob("JavaScript/json2_backbone.js").generated?
# Minified JS
assert !sample_blob("JavaScript/jquery-1.6.1.js").generated?
assert sample_blob("JavaScript/jquery-1.6.1.min.js").generated?
assert sample_blob("JavaScript/jquery-1.4.2.min.js").generated?
# CoffeeScript-generated JS
# TODO
# TypeScript-generated JS
# TODO
# Composer generated composer.lock file
assert sample_blob("JSON/composer.lock").generated?
# PEG.js-generated parsers
assert sample_blob("JavaScript/parser.js").generated?
# Generated PostScript
assert !sample_blob("PostScript/sierpinski.ps").generated?
# These examples are too basic to tell
assert !sample_blob("JavaScript/hello.js").generated?
assert sample_blob("JavaScript/intro-old.js").generated?
assert sample_blob("JavaScript/classes-old.js").generated?
assert sample_blob("JavaScript/intro.js").generated?
assert sample_blob("JavaScript/classes.js").generated?
# Protocol Buffer generated code
assert sample_blob("C++/protocol-buffer.pb.h").generated?
assert sample_blob("C++/protocol-buffer.pb.cc").generated?
assert sample_blob("Java/ProtocolBuffer.java").generated?
assert sample_blob("Python/protocol_buffer_pb2.py").generated?
assert sample_blob("Go/api.pb.go").generated?
assert sample_blob("Go/embedded.go").generated?
# Apache Thrift generated code
assert sample_blob("Python/gen-py-linguist-thrift.py").generated?
assert sample_blob("Go/gen-go-linguist-thrift.go").generated?
assert sample_blob("Java/gen-java-linguist-thrift.java").generated?
assert sample_blob("JavaScript/gen-js-linguist-thrift.js").generated?
assert sample_blob("Ruby/gen-rb-linguist-thrift.rb").generated?
assert sample_blob("Objective-C/gen-cocoa-linguist-thrift.m").generated?
# Generated JNI
assert sample_blob("C/jni_layer.h").generated?
# Minified CSS
assert !sample_blob("CSS/bootstrap.css").generated?
assert sample_blob("CSS/bootstrap.min.css").generated?
# Generated VCR
assert sample_blob("YAML/vcr_cassette.yml").generated?
# Generated by Zephir
assert sample_blob("Zephir/filenames/exception.zep.c").generated?
assert sample_blob("Zephir/filenames/exception.zep.h").generated?
assert sample_blob("Zephir/filenames/exception.zep.php").generated?
assert !sample_blob("Zephir/Router.zep").generated?
assert sample_blob("node_modules/grunt/lib/grunt.js").generated?
# Godep saved dependencies
assert sample_blob("Godeps/Godeps.json").generated?
assert sample_blob("Godeps/_workspace/src/github.com/kr/s3/sign.go").generated?
# Cython-generated C/C++
assert sample_blob("C/sgd_fast.c").generated?
assert sample_blob("C++/wrapper_inner.cpp").generated?
# Unity3D-generated metadata
assert sample_blob("Unity3D Asset/Tiles.meta").generated?
end
def test_vendored
assert !fixture_blob("Data/README").vendored?
assert !sample_blob("ext/extconf.rb").vendored?
# Dependencies
assert sample_blob("dependencies/windows/headers/GL/glext.h").vendored?
# Node dependencies
assert sample_blob("node_modules/coffee-script/lib/coffee-script.js").vendored?
# Bower Components
assert sample_blob("bower_components/custom/custom.js").vendored?
assert sample_blob("app/bower_components/custom/custom.js").vendored?
assert sample_blob("vendor/assets/bower_components/custom/custom.js").vendored?
# Go dependencies
assert !sample_blob("Godeps/Godeps.json").vendored?
assert sample_blob("Godeps/_workspace/src/github.com/kr/s3/sign.go").vendored?
# Rails vendor/
assert sample_blob("vendor/plugins/will_paginate/lib/will_paginate.rb").vendored?
# Vendor/
assert sample_blob("Vendor/my_great_file.h").vendored?
# 'thirdparty' directory
assert sample_blob("thirdparty/lib/main.c").vendored?
# 'extern(al)' directory
assert sample_blob("extern/util/__init__.py").vendored?
assert sample_blob("external/jquery.min.js").vendored?
# C deps
assert sample_blob("deps/http_parser/http_parser.c").vendored?
assert sample_blob("deps/v8/src/v8.h").vendored?
assert sample_blob("tools/something/else.c").vendored?
# Chart.js
assert sample_blob("some/vendored/path/Chart.js").vendored?
assert !sample_blob("some/vendored/path/chart.js").vendored?
# Codemirror deps
assert sample_blob("codemirror/mode/blah.js").vendored?
assert sample_blob("codemirror/5.0/mode/blah.js").vendored?
# Debian packaging
assert sample_blob("debian/cron.d").vendored?
# Erlang
assert sample_blob("rebar").vendored?
# git config files
assert_predicate fixture_blob("some/path/.gitattributes"), :vendored?
assert_predicate fixture_blob(".gitignore"), :vendored?
assert_predicate fixture_blob("special/path/.gitmodules"), :vendored?
# Minified JavaScript and CSS
assert sample_blob("foo.min.js").vendored?
assert sample_blob("foo.min.css").vendored?
assert sample_blob("foo-min.js").vendored?
assert sample_blob("foo-min.css").vendored?
assert !sample_blob("foomin.css").vendored?
assert !sample_blob("foo.min.txt").vendored?
#.osx
assert sample_blob(".osx").vendored?
# Prototype
assert !sample_blob("public/javascripts/application.js").vendored?
assert sample_blob("public/javascripts/prototype.js").vendored?
assert sample_blob("public/javascripts/effects.js").vendored?
assert sample_blob("public/javascripts/controls.js").vendored?
assert sample_blob("public/javascripts/dragdrop.js").vendored?
# jQuery
assert sample_blob("jquery.js").vendored?
assert sample_blob("public/javascripts/jquery.js").vendored?
assert sample_blob("public/javascripts/jquery.min.js").vendored?
assert sample_blob("public/javascripts/jquery-1.7.js").vendored?
assert sample_blob("public/javascripts/jquery-1.7.min.js").vendored?
assert sample_blob("public/javascripts/jquery-1.5.2.js").vendored?
assert sample_blob("public/javascripts/jquery-1.6.1.js").vendored?
assert sample_blob("public/javascripts/jquery-1.6.1.min.js").vendored?
assert sample_blob("public/javascripts/jquery-1.10.1.js").vendored?
assert sample_blob("public/javascripts/jquery-1.10.1.min.js").vendored?
assert !sample_blob("public/javascripts/jquery.github.menu.js").vendored?
# jQuery UI
assert sample_blob("themes/ui-lightness/jquery-ui.css").vendored?
assert sample_blob("themes/ui-lightness/jquery-ui-1.8.22.custom.css").vendored?
assert sample_blob("themes/ui-lightness/jquery.ui.accordion.css").vendored?
assert sample_blob("ui/i18n/jquery.ui.datepicker-ar.js").vendored?
assert sample_blob("ui/i18n/jquery-ui-i18n.js").vendored?
assert sample_blob("ui/jquery.effects.blind.js").vendored?
assert sample_blob("ui/jquery-ui-1.8.22.custom.js").vendored?
assert sample_blob("ui/jquery-ui-1.8.22.custom.min.js").vendored?
assert sample_blob("ui/jquery-ui-1.8.22.js").vendored?
assert sample_blob("ui/jquery-ui-1.8.js").vendored?
assert sample_blob("ui/jquery-ui.min.js").vendored?
assert sample_blob("ui/jquery.ui.accordion.js").vendored?
assert sample_blob("ui/minified/jquery.effects.blind.min.js").vendored?
assert sample_blob("ui/minified/jquery.ui.accordion.min.js").vendored?
# jQuery Gantt
assert sample_blob("web-app/jquery-gantt/js/jquery.fn.gantt.js").vendored?
# jQuery fancyBox
assert sample_blob("web-app/fancybox/jquery.fancybox.js").vendored?
# Fuel UX
assert sample_blob("web-app/fuelux/js/fuelux.js").vendored?
# jQuery File Upload
assert sample_blob("fileupload-9.0.0/jquery.fileupload-process.js").vendored?
# Slick
assert sample_blob("web-app/slickgrid/controls/slick.columnpicker.js").vendored?
# Leaflet plugins
assert sample_blob("leaflet-plugins/Leaflet.Coordinates-0.5.0.src.js").vendored?
assert sample_blob("leaflet-plugins/leaflet.draw-src.js").vendored?
assert sample_blob("leaflet-plugins/leaflet.spin.js").vendored?
# MooTools
assert sample_blob("public/javascripts/mootools-core-1.3.2-full-compat.js").vendored?
assert sample_blob("public/javascripts/mootools-core-1.3.2-full-compat-yc.js").vendored?
# Dojo
assert sample_blob("public/javascripts/dojo.js").vendored?
# MochiKit
assert sample_blob("public/javascripts/MochiKit.js").vendored?
# YUI
assert sample_blob("public/javascripts/yahoo-dom-event.js").vendored?
assert sample_blob("public/javascripts/yahoo-min.js").vendored?
assert sample_blob("public/javascripts/yuiloader-dom-event.js").vendored?
# WYS editors
assert sample_blob("public/javascripts/ckeditor.js").vendored?
assert sample_blob("public/javascripts/tiny_mce.js").vendored?
assert sample_blob("public/javascripts/tiny_mce_popup.js").vendored?
assert sample_blob("public/javascripts/tiny_mce_src.js").vendored?
# AngularJS
assert sample_blob("public/javascripts/angular.js").vendored?
assert sample_blob("public/javascripts/angular.min.js").vendored?
# D3.js
assert sample_blob("public/javascripts/d3.v3.js").vendored?
assert sample_blob("public/javascripts/d3.v3.min.js").vendored?
# Modernizr
assert sample_blob("public/javascripts/modernizr-2.7.1.js").vendored?
assert sample_blob("public/javascripts/modernizr.custom.01009.js").vendored?
# Fabric
assert sample_blob("fabfile.py").vendored?
# WAF
assert sample_blob("waf").vendored?
# Visual Studio IntelliSense
assert sample_blob("Scripts/jquery-1.7-vsdoc.js").vendored?
# Microsoft Ajax
assert sample_blob("Scripts/MicrosoftAjax.debug.js").vendored?
assert sample_blob("Scripts/MicrosoftAjax.js").vendored?
assert sample_blob("Scripts/MicrosoftMvcAjax.debug.js").vendored?
assert sample_blob("Scripts/MicrosoftMvcAjax.js").vendored?
assert sample_blob("Scripts/MicrosoftMvcValidation.debug.js").vendored?
assert sample_blob("Scripts/MicrosoftMvcValidation.js").vendored?
# jQuery validation plugin (MS bundles this with asp.net mvc)
assert sample_blob("Scripts/jquery.validate.js").vendored?
assert sample_blob("Scripts/jquery.validate.min.js").vendored?
assert sample_blob("Scripts/jquery.validate.unobtrusive.js").vendored?
assert sample_blob("Scripts/jquery.validate.unobtrusive.min.js").vendored?
assert sample_blob("Scripts/jquery.unobtrusive-ajax.js").vendored?
assert sample_blob("Scripts/jquery.unobtrusive-ajax.min.js").vendored?
# NuGet Packages
assert sample_blob("packages/Modernizr.2.0.6/Content/Scripts/modernizr-2.0.6-development-only.js").vendored?
# Font Awesome
assert sample_blob("some/asset/path/font-awesome.min.css").vendored?
assert sample_blob("some/asset/path/font-awesome.css").vendored?
# Normalize
assert sample_blob("some/asset/path/normalize.css").vendored?
# Carthage
assert sample_blob('Carthage/blah').vendored?
# Cocoapods
assert sample_blob('Pods/blah').vendored?
# Html5shiv
assert sample_blob("Scripts/html5shiv.js").vendored?
assert sample_blob("Scripts/html5shiv.min.js").vendored?
# Test fixtures
assert sample_blob("test/fixtures/random.rkt").vendored?
assert sample_blob("Test/fixtures/random.rkt").vendored?
assert sample_blob("tests/fixtures/random.rkt").vendored?
# Cordova/PhoneGap
assert sample_blob("cordova.js").vendored?
assert sample_blob("cordova.min.js").vendored?
assert sample_blob("cordova-2.1.0.js").vendored?
assert sample_blob("cordova-2.1.0.min.js").vendored?
# Foundation js
assert sample_blob("foundation.js").vendored?
assert sample_blob("foundation.min.js").vendored?
assert sample_blob("foundation.abide.js").vendored?
# Vagrant
assert sample_blob("Vagrantfile").vendored?
# Gradle
assert sample_blob("gradlew").vendored?
assert sample_blob("gradlew.bat").vendored?
assert sample_blob("gradle/wrapper/gradle-wrapper.properties").vendored?
assert sample_blob("subproject/gradlew").vendored?
assert sample_blob("subproject/gradlew.bat").vendored?
assert sample_blob("subproject/gradle/wrapper/gradle-wrapper.properties").vendored?
# Octicons
assert sample_blob("octicons.css").vendored?
assert sample_blob("public/octicons.min.css").vendored?
assert sample_blob("public/octicons/sprockets-octicons.scss").vendored?
# Typesafe Activator
assert sample_blob("activator").vendored?
assert sample_blob("activator.bat").vendored?
assert sample_blob("subproject/activator").vendored?
assert sample_blob("subproject/activator.bat").vendored?
assert_predicate fixture_blob(".google_apis/bar.jar"), :vendored?
assert_predicate fixture_blob("foo/.google_apis/bar.jar"), :vendored?
# Sphinx docs
assert sample_blob("docs/_build/asset.doc").vendored?
assert sample_blob("docs/theme/file.css").vendored?
# Vagrant
assert sample_blob("puphpet/file.pp").vendored?
# Fabric.io
assert sample_blob("Fabric.framework/Fabric.h").vendored?
# Crashlytics
assert sample_blob("Crashlytics.framework/Crashlytics.h").vendored?
assert sample_blob("myapp/My Template.xctemplate/___FILEBASENAME___.h").vendored?
assert sample_blob("myapp/My Images.xcassets/some/stuff.imageset/Contents.json").vendored?
assert !sample_blob("myapp/MyData.json").vendored?
end
def test_documentation
assert_predicate fixture_blob("doc/foo.html"), :documentation?
assert_predicate fixture_blob("docs/foo.html"), :documentation?
refute_predicate fixture_blob("project/doc/foo.html"), :documentation?
refute_predicate fixture_blob("project/docs/foo.html"), :documentation?
assert_predicate fixture_blob("Documentation/foo.md"), :documentation?
assert_predicate fixture_blob("documentation/foo.md"), :documentation?
assert_predicate fixture_blob("project/Documentation/foo.md"), :documentation?
assert_predicate fixture_blob("project/documentation/foo.md"), :documentation?
assert_predicate fixture_blob("javadoc/foo.html"), :documentation?
assert_predicate fixture_blob("project/javadoc/foo.html"), :documentation?
assert_predicate fixture_blob("man/foo.html"), :documentation?
refute_predicate fixture_blob("project/man/foo.html"), :documentation?
assert_predicate fixture_blob("README"), :documentation?
assert_predicate fixture_blob("README.md"), :documentation?
assert_predicate fixture_blob("README.txt"), :documentation?
assert_predicate fixture_blob("Readme"), :documentation?
assert_predicate fixture_blob("readme"), :documentation?
assert_predicate fixture_blob("foo/README"), :documentation?
assert_predicate fixture_blob("CHANGE"), :documentation?
assert_predicate fixture_blob("CHANGE.md"), :documentation?
assert_predicate fixture_blob("CHANGE.txt"), :documentation?
assert_predicate fixture_blob("foo/CHANGE"), :documentation?
assert_predicate fixture_blob("CHANGELOG"), :documentation?
assert_predicate fixture_blob("CHANGELOG.md"), :documentation?
assert_predicate fixture_blob("CHANGELOG.txt"), :documentation?
assert_predicate fixture_blob("foo/CHANGELOG"), :documentation?
assert_predicate fixture_blob("CHANGES"), :documentation?
assert_predicate fixture_blob("CHANGES.md"), :documentation?
assert_predicate fixture_blob("CHANGES.txt"), :documentation?
assert_predicate fixture_blob("foo/CHANGES"), :documentation?
assert_predicate fixture_blob("CONTRIBUTING"), :documentation?
assert_predicate fixture_blob("CONTRIBUTING.md"), :documentation?
assert_predicate fixture_blob("CONTRIBUTING.txt"), :documentation?
assert_predicate fixture_blob("foo/CONTRIBUTING"), :documentation?
assert_predicate fixture_blob("examples/some-file.pl"), :documentation?
assert_predicate fixture_blob("Examples/some-example-file.rb"), :documentation?
assert_predicate fixture_blob("LICENSE"), :documentation?
assert_predicate fixture_blob("LICENCE.md"), :documentation?
assert_predicate fixture_blob("License.txt"), :documentation?
assert_predicate fixture_blob("LICENSE.txt"), :documentation?
assert_predicate fixture_blob("foo/LICENSE"), :documentation?
assert_predicate fixture_blob("COPYING"), :documentation?
assert_predicate fixture_blob("COPYING.md"), :documentation?
assert_predicate fixture_blob("COPYING.txt"), :documentation?
assert_predicate fixture_blob("foo/COPYING"), :documentation?
assert_predicate fixture_blob("INSTALL"), :documentation?
assert_predicate fixture_blob("INSTALL.md"), :documentation?
assert_predicate fixture_blob("INSTALL.txt"), :documentation?
assert_predicate fixture_blob("foo/INSTALL"), :documentation?
refute_predicate fixture_blob("foo.md"), :documentation?
# Samples
assert sample_blob("Samples/Ruby/foo.rb").documentation?
assert_predicate fixture_blob("INSTALL.txt"), :documentation?
end
def test_language
Samples.each do |sample|
blob = sample_blob(sample[:path])
assert blob.language, "No language for #{sample[:path]}"
assert_equal sample[:language], blob.language.name, blob.name
end
# Test language detection for files which shouldn't be used as samples
root = File.expand_path('../fixtures', __FILE__)
Dir.entries(root).each do |language|
next if language == '.' || language == '..' || language == 'Binary' ||
File.basename(language) == 'ace_modes.json'
# Each directory contains test files of a language
dirname = File.join(root, language)
Dir.entries(dirname).each do |filename|
# By default blob search the file in the samples;
# thus, we need to give it the absolute path
filepath = File.join(dirname, filename)
next unless File.file?(filepath)
blob = fixture_blob(filepath)
if language == 'Data'
assert blob.language.nil?, "A language was found for #{filepath}"
elsif language == 'Generated'
assert blob.generated?, "#{filepath} is not a generated file"
else
assert blob.language, "No language for #{filepath}"
assert_equal language, blob.language.name, blob.name
end
end
end
end
def test_minified_files_not_safe_to_highlight
assert !sample_blob("JavaScript/jquery-1.6.1.min.js").safe_to_colorize?
end
def test_empty
blob = Struct.new(:data) { include Linguist::BlobHelper }
assert blob.new("").empty?
assert blob.new(nil).empty?
refute blob.new(" ").empty?
refute blob.new("nope").empty?
end
def test_include_in_language_stats
vendored = sample_blob("bower_components/custom/custom.js")
assert_predicate vendored, :vendored?
refute_predicate vendored, :include_in_language_stats?
documentation = fixture_blob("README")
assert_predicate documentation, :documentation?
refute_predicate documentation, :include_in_language_stats?
generated = sample_blob("CSS/bootstrap.min.css")
assert_predicate generated, :generated?
refute_predicate generated, :include_in_language_stats?
data = sample_blob("Ant Build System/filenames/ant.xml")
assert_equal :data, data.language.type
refute_predicate data, :include_in_language_stats?
prose = sample_blob("Markdown/tender.md")
assert_equal :prose, prose.language.type
refute_predicate prose, :include_in_language_stats?
included = sample_blob("HTML/pages.html")
assert_predicate included, :include_in_language_stats?
end
end

View File

@@ -69,5 +69,11 @@ class TestGenerated < Minitest::Test
# Specflow
generated_fixture_without_loading_data("Features/BindingCulture.feature.cs")
# JFlex
generated_sample_loading_data("Java/JFlexLexer.java")
# GrammarKit
generated_sample_loading_data("Java/GrammarKit.java")
end
end

View File

@@ -9,7 +9,6 @@ class TestGrammars < Minitest::Test
# This grammar has a nonstandard but acceptable license.
"vendor/grammars/gap-tmbundle",
"vendor/grammars/factor",
# These grammars have no license but have been grandfathered in. New grammars
# must have a license that allows redistribution.
@@ -81,7 +80,7 @@ class TestGrammars < Minitest::Test
end
def test_submodules_have_recognized_licenses
unrecognized = submodule_licenses.select { |k,v| v.nil? && Licensee::Project.new(k).license_file }
unrecognized = submodule_licenses.select { |k,v| v.nil? && Licensee::FSProject.new(k).license_file }
unrecognized.reject! { |k,v| PROJECT_WHITELIST.include?(k) }
message = "The following submodules have unrecognized licenses:\n* #{unrecognized.keys.join("\n* ")}\n"
message << "Please ensure that the project's LICENSE file contains the full text of the license."
@@ -114,6 +113,20 @@ class TestGrammars < Minitest::Test
assert_equal [], licensed, msg
end
def test_submodules_use_https_links
File.open(".gitmodules", "r") do |fh|
ssh_submodules = []
fh.each_line do |line|
if matches = line.match(/url = (git@.*)/)
submodule_link = matches.captures[0]
ssh_submodules.push(submodule_link)
end
end
msg = "The following submodules don't have an HTTPS link:\n* #{ssh_submodules.join("\n* ")}"
assert_equal [], ssh_submodules, msg
end
end
private
def submodule_paths
@@ -132,7 +145,7 @@ class TestGrammars < Minitest::Test
# Given the path to a submodule, return its SPDX-compliant license key
def submodule_license(submodule)
# Prefer Licensee to detect a submodule's license
project = Licensee::Project.new(submodule)
project = Licensee::FSProject.new(submodule)
return project.license.key if project.license
# We know a license file exists, but Licensee wasn't able to detect the license,

View File

@@ -155,6 +155,14 @@ class TestHeuristcs < Minitest::Test
})
end
# Candidate languages = ["Pod", "Perl"]
def test_pod_by_heuristics
assert_heuristics({
"Perl" => all_fixtures("Perl", "*.pod"),
"Pod" => all_fixtures("Pod", "*.pod")
})
end
# Candidate languages = ["IDL", "Prolog", "QMake", "INI"]
def test_pro_by_heuristics
assert_heuristics({

View File

@@ -57,6 +57,7 @@ class TestLanguage < Minitest::Test
assert_equal Language['Shell'], Language.find_by_alias('sh')
assert_equal Language['Shell'], Language.find_by_alias('shell')
assert_equal Language['Shell'], Language.find_by_alias('zsh')
assert_equal Language['SuperCollider'], Language.find_by_alias('supercollider')
assert_equal Language['TeX'], Language.find_by_alias('tex')
assert_equal Language['TypeScript'], Language.find_by_alias('ts')
assert_equal Language['VimL'], Language.find_by_alias('vim')
@@ -119,6 +120,7 @@ class TestLanguage < Minitest::Test
assert_equal 'vim', Language['VimL'].search_term
assert_equal 'jsp', Language['Java Server Pages'].search_term
assert_equal 'rst', Language['reStructuredText'].search_term
assert_equal 'supercollider', Language['SuperCollider'].search_term
end
def test_popular
@@ -138,6 +140,7 @@ class TestLanguage < Minitest::Test
assert_equal :programming, Language['Ruby'].type
assert_equal :programming, Language['TypeScript'].type
assert_equal :programming, Language['Makefile'].type
assert_equal :programming, Language['SuperCollider'].type
end
def test_markup
@@ -227,7 +230,8 @@ class TestLanguage < Minitest::Test
"python" => "Python",
"python2" => "Python",
"python3" => "Python",
"sbcl" => "Common Lisp"
"sbcl" => "Common Lisp",
"sclang" => "SuperCollider"
}.each do |interpreter, language|
assert_equal [Language[language]], Language.find_by_interpreter(interpreter)
end
@@ -339,6 +343,7 @@ class TestLanguage < Minitest::Test
assert Language['Perl'].extensions.include?('.pl')
assert Language['Python'].extensions.include?('.py')
assert Language['Ruby'].extensions.include?('.rb')
assert Language['SuperCollider'].extensions.include?('.scd')
end
def test_primary_extension
@@ -349,6 +354,7 @@ class TestLanguage < Minitest::Test
assert_equal '.coffee', Language['CoffeeScript'].primary_extension
assert_equal '.t', Language['Turing'].primary_extension
assert_equal '.ts', Language['TypeScript'].primary_extension
assert_equal '.sc', Language['SuperCollider'].primary_extension
end
def test_eql

View File

@@ -11,6 +11,12 @@ class TestModelines < Minitest::Test
assert_modeline Language["Ruby"], fixture_blob("Data/Modelines/ruby")
assert_modeline Language["Ruby"], fixture_blob("Data/Modelines/ruby2")
assert_modeline Language["Ruby"], fixture_blob("Data/Modelines/ruby3")
assert_modeline Language["Ruby"], fixture_blob("Data/Modelines/ruby4")
assert_modeline Language["Ruby"], fixture_blob("Data/Modelines/ruby5")
assert_modeline Language["Ruby"], fixture_blob("Data/Modelines/ruby6")
assert_modeline Language["Ruby"], fixture_blob("Data/Modelines/ruby7")
assert_modeline Language["Ruby"], fixture_blob("Data/Modelines/ruby8")
assert_modeline Language["Ruby"], fixture_blob("Data/Modelines/ruby9")
assert_modeline Language["C++"], fixture_blob("Data/Modelines/seeplusplus")
assert_modeline Language["C++"], fixture_blob("Data/Modelines/seeplusplusEmacs1")
assert_modeline Language["C++"], fixture_blob("Data/Modelines/seeplusplusEmacs2")

View File

@@ -9,7 +9,7 @@ class TestPedantic < Minitest::Test
assert_sorted LANGUAGES.keys
end
def test_extensions_are_sorted
def test_nonprimary_extensions_are_sorted
LANGUAGES.each do |name, language|
extensions = language['extensions']
assert_sorted extensions[1..-1].map(&:downcase) if extensions && extensions.size > 1

Some files were not shown because too many files have changed in this diff Show More