mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Merge branch 'master' into 2703-local
This commit is contained in:
23
.gitmodules
vendored
23
.gitmodules
vendored
@@ -85,6 +85,9 @@
|
||||
[submodule "vendor/grammars/language-shellscript"]
|
||||
path = vendor/grammars/language-shellscript
|
||||
url = https://github.com/atom/language-shellscript
|
||||
[submodule "vendor/grammars/language-supercollider"]
|
||||
path = vendor/grammars/language-supercollider
|
||||
url = https://github.com/supercollider/language-supercollider
|
||||
[submodule "vendor/grammars/language-yaml"]
|
||||
path = vendor/grammars/language-yaml
|
||||
url = https://github.com/atom/language-yaml
|
||||
@@ -655,7 +658,7 @@
|
||||
url = https://github.com/rpavlick/language-ncl.git
|
||||
[submodule "vendor/grammars/atom-language-purescript"]
|
||||
path = vendor/grammars/atom-language-purescript
|
||||
url = https://github.com/freebroccolo/atom-language-purescript
|
||||
url = https://github.com/purescript-contrib/atom-language-purescript
|
||||
[submodule "vendor/grammars/vue-syntax-highlight"]
|
||||
path = vendor/grammars/vue-syntax-highlight
|
||||
url = https://github.com/vuejs/vue-syntax-highlight
|
||||
@@ -671,12 +674,18 @@
|
||||
[submodule "vendor/grammars/sublime-typescript"]
|
||||
path = vendor/grammars/sublime-typescript
|
||||
url = https://github.com/Microsoft/TypeScript-Sublime-Plugin
|
||||
[submodule "vendor/grammars/sublime-pony"]
|
||||
path = vendor/grammars/sublime-pony
|
||||
url = https://github.com/CausalityLtd/sublime-pony
|
||||
[submodule "vendor/grammars/X10"]
|
||||
path = vendor/grammars/X10
|
||||
url = git@github.com:x10-lang/x10-highlighting.git
|
||||
[submodule "vendor/grammars/language-babel"]
|
||||
path = vendor/grammars/language-babel
|
||||
url = https://github.com/gandm/language-babel
|
||||
[submodule "vendor/grammars/UrWeb-Language-Definition"]
|
||||
path = vendor/grammars/UrWeb-Language-Definition
|
||||
url = https://github.com/gwalborn/UrWeb-Language-Definition.git
|
||||
[submodule "vendor/grammars/Stata.tmbundle"]
|
||||
path = vendor/grammars/Stata.tmbundle
|
||||
url = https://github.com/pschumm/Stata.tmbundle
|
||||
@@ -686,3 +695,15 @@
|
||||
[submodule "vendor/grammars/MagicPython"]
|
||||
path = vendor/grammars/MagicPython
|
||||
url = git@github.com:MagicStack/MagicPython.git
|
||||
[submodule "vendor/grammars/language-click"]
|
||||
path = vendor/grammars/language-click
|
||||
url = https://github.com/stenverbois/language-click.git
|
||||
[submodule "vendor/grammars/language-maxscript"]
|
||||
path = vendor/grammars/language-maxscript
|
||||
url = https://github.com/Alhadis/language-maxscript
|
||||
[submodule "vendor/grammars/language-renpy"]
|
||||
path = vendor/grammars/language-renpy
|
||||
url = https://github.com/williamd1k0/language-renpy.git
|
||||
[submodule "vendor/grammars/atom-language-stan"]
|
||||
path = vendor/grammars/atom-language-stan
|
||||
url = git@github.com:jrnold/atom-language-stan.git
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
language: ruby
|
||||
sudo: false
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- libicu-dev
|
||||
- libicu48
|
||||
before_install: script/travis/before_install
|
||||
rvm:
|
||||
- 2.0.0
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,4 +1,4 @@
|
||||
Copyright (c) 2011-2015 GitHub, Inc.
|
||||
Copyright (c) 2011-2016 GitHub, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
|
||||
@@ -33,9 +33,9 @@ $ cat .gitattributes
|
||||
*.rb linguist-language=Java
|
||||
```
|
||||
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository. Vendored files are also hidden by default in diffs on github.com.
|
||||
Checking code you didn't write, such as JavaScript libraries, into your git repo is a common practice, but this often inflates your project's language stats and may even cause your project to be labeled as another language. By default, Linguist treats all of the paths defined in [lib/linguist/vendor.yml](https://github.com/github/linguist/blob/master/lib/linguist/vendor.yml) as vendored and therefore doesn't include them in the language statistics for a repository.
|
||||
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths. Please note, overriding the vendored (or un-vendored) status of a file only affects the language statistics for the repository and not the behavior in diffs on github.com.
|
||||
Use the `linguist-vendored` attribute to vendor or un-vendor paths.
|
||||
|
||||
```
|
||||
$ cat .gitattributes
|
||||
|
||||
@@ -50,6 +50,7 @@ class GitLinguist
|
||||
|
||||
def clear_language_stats
|
||||
File.unlink(cache_file)
|
||||
rescue Errno::ENOENT
|
||||
end
|
||||
|
||||
def disable_language_stats
|
||||
|
||||
@@ -24,6 +24,6 @@ Gem::Specification.new do |s|
|
||||
s.add_development_dependency 'rake'
|
||||
s.add_development_dependency 'yajl-ruby'
|
||||
s.add_development_dependency 'color-proximity', '~> 0.2.1'
|
||||
s.add_development_dependency 'licensee', '~> 4.7.4'
|
||||
s.add_development_dependency 'licensee', '6.0.0b1'
|
||||
|
||||
end
|
||||
|
||||
17
grammars.yml
17
grammars.yml
@@ -142,6 +142,8 @@ vendor/grammars/TXL/:
|
||||
- source.txl
|
||||
vendor/grammars/Textmate-Gosu-Bundle:
|
||||
- source.gosu.2
|
||||
vendor/grammars/UrWeb-Language-Definition:
|
||||
- source.ur
|
||||
vendor/grammars/VBDotNetSyntax:
|
||||
- source.vbnet
|
||||
vendor/grammars/Vala-TMBundle:
|
||||
@@ -181,9 +183,12 @@ vendor/grammars/assembly.tmbundle:
|
||||
vendor/grammars/atom-fsharp/:
|
||||
- source.fsharp
|
||||
- source.fsharp.fsi
|
||||
- source.fsharp.fsl
|
||||
- source.fsharp.fsx
|
||||
vendor/grammars/atom-language-purescript/:
|
||||
- source.purescript
|
||||
vendor/grammars/atom-language-stan/:
|
||||
- source.stan
|
||||
vendor/grammars/atom-salt:
|
||||
- source.python.salt
|
||||
- source.yaml.salt
|
||||
@@ -301,8 +306,8 @@ vendor/grammars/io.tmbundle:
|
||||
vendor/grammars/ioke-outdated:
|
||||
- source.ioke
|
||||
vendor/grammars/jade-tmbundle:
|
||||
- source.jade
|
||||
- source.pyjade
|
||||
- text.jade
|
||||
vendor/grammars/jasmin-sublime:
|
||||
- source.jasmin
|
||||
vendor/grammars/java.tmbundle:
|
||||
@@ -321,6 +326,8 @@ vendor/grammars/kotlin-sublime-package:
|
||||
vendor/grammars/language-babel/:
|
||||
- source.js.jsx
|
||||
- source.regexp.babel
|
||||
vendor/grammars/language-click/:
|
||||
- source.click
|
||||
vendor/grammars/language-clojure:
|
||||
- source.clojure
|
||||
vendor/grammars/language-coffee-script:
|
||||
@@ -344,6 +351,8 @@ vendor/grammars/language-javascript:
|
||||
vendor/grammars/language-jsoniq/:
|
||||
- source.jq
|
||||
- source.xq
|
||||
vendor/grammars/language-maxscript:
|
||||
- source.maxscript
|
||||
vendor/grammars/language-ncl:
|
||||
- source.ncl
|
||||
vendor/grammars/MagicPython:
|
||||
@@ -352,9 +361,13 @@ vendor/grammars/language-python:
|
||||
- source.regexp.python
|
||||
- text.python.console
|
||||
- text.python.traceback
|
||||
vendor/grammars/language-renpy:
|
||||
- source.renpy
|
||||
vendor/grammars/language-shellscript:
|
||||
- source.shell
|
||||
- text.shell-session
|
||||
vendor/grammars/language-supercollider:
|
||||
- source.supercollider
|
||||
vendor/grammars/language-xbase:
|
||||
- source.harbour
|
||||
vendor/grammars/language-yaml:
|
||||
@@ -515,6 +528,8 @@ vendor/grammars/sublime-nix:
|
||||
vendor/grammars/sublime-opal/:
|
||||
- source.opal
|
||||
- source.opalsysdefs
|
||||
vendor/grammars/sublime-pony:
|
||||
- source.pony
|
||||
vendor/grammars/sublime-robot-plugin:
|
||||
- text.robot
|
||||
vendor/grammars/sublime-rust:
|
||||
|
||||
@@ -13,8 +13,8 @@ class << Linguist
|
||||
def instrument(*args, &bk)
|
||||
if instrumenter
|
||||
instrumenter.instrument(*args, &bk)
|
||||
else
|
||||
yield if block_given?
|
||||
elsif block_given?
|
||||
yield
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -12,7 +12,7 @@ module Linguist
|
||||
# path - A path String (does not necessarily exists on the file system).
|
||||
# content - Content of the file.
|
||||
#
|
||||
# Returns a FileBlob.
|
||||
# Returns a Blob.
|
||||
def initialize(path, content)
|
||||
@path = path
|
||||
@content = content
|
||||
@@ -58,7 +58,7 @@ module Linguist
|
||||
|
||||
# Public: Return an array of the file extensions
|
||||
#
|
||||
# >> Linguist::FileBlob.new("app/views/things/index.html.erb").extensions
|
||||
# >> Linguist::Blob.new("app/views/things/index.html.erb").extensions
|
||||
# => [".html.erb", ".erb"]
|
||||
#
|
||||
# Returns an Array
|
||||
|
||||
@@ -131,7 +131,7 @@ module Linguist
|
||||
disambiguate ".for", ".f" do |data|
|
||||
if /^: /.match(data)
|
||||
Language["Forth"]
|
||||
elsif /^([c*][^a-z]| (subroutine|program)\s|\s*!)/i.match(data)
|
||||
elsif /^([c*][^abd-z]| (subroutine|program|end)\s|\s*!)/i.match(data)
|
||||
Language["FORTRAN"]
|
||||
end
|
||||
end
|
||||
@@ -238,8 +238,10 @@ module Linguist
|
||||
disambiguate ".ms" do |data|
|
||||
if /^[.'][a-z][a-z](\s|$)/i.match(data)
|
||||
Language["Groff"]
|
||||
elsif /((^|\s)move?[. ])|\.(include|globa?l)\s/.match(data)
|
||||
elsif /(?<!\S)\.(include|globa?l)\s/.match(data) || /(?<!\/\*)(\A|\n)\s*\.[A-Za-z]/.match(data.gsub(/"([^\\"]|\\.)*"|'([^\\']|\\.)*'|\\\s*(?:--.*)?\n/, ""))
|
||||
Language["GAS"]
|
||||
else
|
||||
Language["MAXScript"]
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -142,7 +142,7 @@ Agda:
|
||||
|
||||
Alloy:
|
||||
type: programming # 'modeling' would be more appropiate
|
||||
color: "#cc5c24"
|
||||
color: "#64C800"
|
||||
extensions:
|
||||
- .als
|
||||
ace_mode: text
|
||||
@@ -393,6 +393,7 @@ C#:
|
||||
- csharp
|
||||
extensions:
|
||||
- .cs
|
||||
- .cake
|
||||
- .cshtml
|
||||
- .csx
|
||||
|
||||
@@ -540,6 +541,14 @@ Clean:
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
|
||||
Click:
|
||||
type: programming
|
||||
color: "#E4E6F3"
|
||||
extensions:
|
||||
- .click
|
||||
tm_scope: source.click
|
||||
ace_mode: text
|
||||
|
||||
Clojure:
|
||||
type: programming
|
||||
ace_mode: clojure
|
||||
@@ -568,6 +577,7 @@ CoffeeScript:
|
||||
extensions:
|
||||
- .coffee
|
||||
- ._coffee
|
||||
- .cake
|
||||
- .cjsx
|
||||
- .cson
|
||||
- .iced
|
||||
@@ -885,6 +895,8 @@ Elixir:
|
||||
ace_mode: elixir
|
||||
filenames:
|
||||
- mix.lock
|
||||
interpreters:
|
||||
- elixir
|
||||
|
||||
Elm:
|
||||
type: programming
|
||||
@@ -1012,6 +1024,7 @@ Formatted:
|
||||
type: data
|
||||
extensions:
|
||||
- .for
|
||||
- .eam.fs
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
|
||||
@@ -1611,7 +1624,7 @@ Jade:
|
||||
type: markup
|
||||
extensions:
|
||||
- .jade
|
||||
tm_scope: source.jade
|
||||
tm_scope: text.jade
|
||||
ace_mode: jade
|
||||
|
||||
Jasmin:
|
||||
@@ -1656,6 +1669,7 @@ JavaScript:
|
||||
- .gs
|
||||
- .jake
|
||||
- .jsb
|
||||
- .jscad
|
||||
- .jsfl
|
||||
- .jsm
|
||||
- .jss
|
||||
@@ -1690,6 +1704,18 @@ Julia:
|
||||
color: "#a270ba"
|
||||
ace_mode: julia
|
||||
|
||||
Jupyter Notebook:
|
||||
type: markup
|
||||
ace_mode: json
|
||||
tm_scope: source.json
|
||||
color: "#DA5B0B"
|
||||
extensions:
|
||||
- .ipynb
|
||||
filenames:
|
||||
- Notebook
|
||||
aliases:
|
||||
- IPython Notebook
|
||||
|
||||
KRL:
|
||||
type: programming
|
||||
color: "#28431f"
|
||||
@@ -1952,6 +1978,15 @@ M:
|
||||
tm_scope: source.lisp
|
||||
ace_mode: lisp
|
||||
|
||||
MAXScript:
|
||||
type: programming
|
||||
color: "#00a6a6"
|
||||
extensions:
|
||||
- .ms
|
||||
- .mcr
|
||||
tm_scope: source.maxscript
|
||||
ace_mode: text
|
||||
|
||||
MTML:
|
||||
type: markup
|
||||
color: "#b7e1f4"
|
||||
@@ -2026,6 +2061,7 @@ Mathematica:
|
||||
- .cdf
|
||||
- .m
|
||||
- .ma
|
||||
- .mt
|
||||
- .nb
|
||||
- .nbp
|
||||
- .wl
|
||||
@@ -2037,6 +2073,8 @@ Mathematica:
|
||||
Matlab:
|
||||
type: programming
|
||||
color: "#bb92ac"
|
||||
aliases:
|
||||
- octave
|
||||
extensions:
|
||||
- .matlab
|
||||
- .m
|
||||
@@ -2605,6 +2643,13 @@ Perl6:
|
||||
tm_scope: source.perl.6
|
||||
ace_mode: perl
|
||||
|
||||
Pickle:
|
||||
type: data
|
||||
extensions:
|
||||
- .pkl
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
|
||||
PicoLisp:
|
||||
type: programming
|
||||
extensions:
|
||||
@@ -2649,6 +2694,13 @@ PogoScript:
|
||||
tm_scope: source.pogoscript
|
||||
ace_mode: text
|
||||
|
||||
Pony:
|
||||
type: programming
|
||||
extensions:
|
||||
- .pony
|
||||
tm_scope: source.pony
|
||||
ace_mode: text
|
||||
|
||||
PostScript:
|
||||
type: markup
|
||||
extensions:
|
||||
@@ -2932,6 +2984,17 @@ Redcode:
|
||||
tm_scope: none
|
||||
ace_mode: text
|
||||
|
||||
Ren'Py:
|
||||
type: programming
|
||||
group: Python
|
||||
aliases:
|
||||
- renpy
|
||||
color: "#ff7f7f"
|
||||
extensions:
|
||||
- .rpy
|
||||
tm_scope: source.renpy
|
||||
ace_mode: python
|
||||
|
||||
RenderScript:
|
||||
type: programming
|
||||
extensions:
|
||||
@@ -3140,7 +3203,7 @@ Sass:
|
||||
Scala:
|
||||
type: programming
|
||||
ace_mode: scala
|
||||
color: "#7dd3b0"
|
||||
color: "#DC322F"
|
||||
extensions:
|
||||
- .scala
|
||||
- .sbt
|
||||
@@ -3292,6 +3355,14 @@ Squirrel:
|
||||
tm_scope: source.c++
|
||||
ace_mode: c_cpp
|
||||
|
||||
Stan:
|
||||
type: programming
|
||||
color: "#b2011d"
|
||||
extensions:
|
||||
- .stan
|
||||
ace_mode: text
|
||||
tm_scope: source.stan
|
||||
|
||||
Standard ML:
|
||||
type: programming
|
||||
color: "#dc566d"
|
||||
@@ -3329,9 +3400,12 @@ SuperCollider:
|
||||
type: programming
|
||||
color: "#46390b"
|
||||
extensions:
|
||||
- .scd
|
||||
- .sc
|
||||
tm_scope: none
|
||||
- .scd
|
||||
interpreters:
|
||||
- sclang
|
||||
- scsynth
|
||||
tm_scope: source.supercollider
|
||||
ace_mode: text
|
||||
|
||||
Swift:
|
||||
@@ -3508,6 +3582,17 @@ UnrealScript:
|
||||
tm_scope: source.java
|
||||
ace_mode: java
|
||||
|
||||
UrWeb:
|
||||
type: programming
|
||||
aliases:
|
||||
- Ur/Web
|
||||
- Ur
|
||||
extensions:
|
||||
- .ur
|
||||
- .urs
|
||||
tm_scope: source.ur
|
||||
ace_mode: text
|
||||
|
||||
VCL:
|
||||
group: Perl
|
||||
type: programming
|
||||
@@ -3645,6 +3730,7 @@ XML:
|
||||
- .ccxml
|
||||
- .clixml
|
||||
- .cproject
|
||||
- .csl
|
||||
- .csproj
|
||||
- .ct
|
||||
- .dita
|
||||
|
||||
@@ -86,8 +86,8 @@ module Linguist
|
||||
protected
|
||||
|
||||
# Returns true if the attribute is present and not the string "false".
|
||||
def boolean_attribute(attr)
|
||||
attr != "false"
|
||||
def boolean_attribute(attribute)
|
||||
attribute != "false"
|
||||
end
|
||||
|
||||
def load_blob!
|
||||
|
||||
@@ -86,13 +86,13 @@ module Linguist
|
||||
if s.peek(1) == "\""
|
||||
s.getch
|
||||
else
|
||||
s.skip_until(/[^\\]"/)
|
||||
s.skip_until(/(?<!\\)"/)
|
||||
end
|
||||
elsif s.scan(/'/)
|
||||
if s.peek(1) == "'"
|
||||
s.getch
|
||||
else
|
||||
s.skip_until(/[^\\]'/)
|
||||
s.skip_until(/(?<!\\)'/)
|
||||
end
|
||||
|
||||
# Skip number literals
|
||||
|
||||
@@ -95,7 +95,7 @@
|
||||
- jquery.fn.gantt.js
|
||||
|
||||
# jQuery fancyBox
|
||||
- jquery.fancybox.js
|
||||
- jquery.fancybox.(js|css)
|
||||
|
||||
# Fuel UX
|
||||
- fuelux.js
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
module Linguist
|
||||
VERSION = "4.7.0"
|
||||
VERSION = "4.7.3"
|
||||
end
|
||||
|
||||
86
samples/C#/build.cake
Normal file
86
samples/C#/build.cake
Normal file
@@ -0,0 +1,86 @@
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// ARGUMENTS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var target = Argument<string>("target", "Default");
|
||||
var configuration = Argument<string>("configuration", "Release");
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// GLOBAL VARIABLES
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
var solutions = GetFiles("./**/*.sln");
|
||||
var solutionPaths = solutions.Select(solution => solution.GetDirectory());
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// SETUP / TEARDOWN
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Setup(() =>
|
||||
{
|
||||
// Executed BEFORE the first task.
|
||||
Information("Running tasks...");
|
||||
});
|
||||
|
||||
Teardown(() =>
|
||||
{
|
||||
// Executed AFTER the last task.
|
||||
Information("Finished running tasks.");
|
||||
});
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// TASK DEFINITIONS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Task("Clean")
|
||||
.Does(() =>
|
||||
{
|
||||
// Clean solution directories.
|
||||
foreach(var path in solutionPaths)
|
||||
{
|
||||
Information("Cleaning {0}", path);
|
||||
CleanDirectories(path + "/**/bin/" + configuration);
|
||||
CleanDirectories(path + "/**/obj/" + configuration);
|
||||
}
|
||||
});
|
||||
|
||||
Task("Restore")
|
||||
.Does(() =>
|
||||
{
|
||||
// Restore all NuGet packages.
|
||||
foreach(var solution in solutions)
|
||||
{
|
||||
Information("Restoring {0}...", solution);
|
||||
NuGetRestore(solution);
|
||||
}
|
||||
});
|
||||
|
||||
Task("Build")
|
||||
.IsDependentOn("Clean")
|
||||
.IsDependentOn("Restore")
|
||||
.Does(() =>
|
||||
{
|
||||
// Build all solutions.
|
||||
foreach(var solution in solutions)
|
||||
{
|
||||
Information("Building {0}", solution);
|
||||
MSBuild(solution, settings =>
|
||||
settings.SetPlatformTarget(PlatformTarget.MSIL)
|
||||
.WithProperty("TreatWarningsAsErrors","true")
|
||||
.WithTarget("Build")
|
||||
.SetConfiguration(configuration));
|
||||
}
|
||||
});
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// TARGETS
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
Task("Default")
|
||||
.IsDependentOn("Build");
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// EXECUTION
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
RunTarget(target);
|
||||
133
samples/Click/sr2.click
Normal file
133
samples/Click/sr2.click
Normal file
@@ -0,0 +1,133 @@
|
||||
rates :: AvailableRates
|
||||
elementclass sr2 {
|
||||
$sr2_ip, $sr2_nm, $wireless_mac, $gateway, $probes|
|
||||
|
||||
|
||||
arp :: ARPTable();
|
||||
lt :: LinkTable(IP $sr2_ip);
|
||||
|
||||
|
||||
gw :: SR2GatewaySelector(ETHTYPE 0x062c,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
PERIOD 15,
|
||||
GW $gateway);
|
||||
|
||||
|
||||
gw -> SR2SetChecksum -> [0] output;
|
||||
|
||||
set_gw :: SR2SetGateway(SEL gw);
|
||||
|
||||
|
||||
es :: SR2ETTStat(ETHTYPE 0x0641,
|
||||
ETH $wireless_mac,
|
||||
IP $sr2_ip,
|
||||
PERIOD 30000,
|
||||
TAU 300000,
|
||||
ARP arp,
|
||||
PROBES $probes,
|
||||
ETT metric,
|
||||
RT rates);
|
||||
|
||||
|
||||
metric :: SR2ETTMetric(LT lt);
|
||||
|
||||
|
||||
forwarder :: SR2Forwarder(ETHTYPE 0x0643,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
ARP arp,
|
||||
LT lt);
|
||||
|
||||
|
||||
querier :: SR2Querier(ETH $wireless_mac,
|
||||
SR forwarder,
|
||||
LT lt,
|
||||
ROUTE_DAMPENING true,
|
||||
TIME_BEFORE_SWITCH 5,
|
||||
DEBUG true);
|
||||
|
||||
|
||||
query_forwarder :: SR2MetricFlood(ETHTYPE 0x0644,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
DEBUG false);
|
||||
|
||||
query_responder :: SR2QueryResponder(ETHTYPE 0x0645,
|
||||
IP $sr2_ip,
|
||||
ETH $wireless_mac,
|
||||
LT lt,
|
||||
ARP arp,
|
||||
DEBUG true);
|
||||
|
||||
|
||||
query_responder -> SR2SetChecksum -> [0] output;
|
||||
query_forwarder -> SR2SetChecksum -> SR2Print(forwarding) -> [0] output;
|
||||
query_forwarder [1] -> query_responder;
|
||||
|
||||
data_ck :: SR2SetChecksum()
|
||||
|
||||
input [1]
|
||||
-> host_cl :: IPClassifier(dst net $sr2_ip mask $sr2_nm,
|
||||
-)
|
||||
-> querier
|
||||
-> data_ck;
|
||||
|
||||
|
||||
host_cl [1] -> [0] set_gw [0] -> querier;
|
||||
|
||||
forwarder[0]
|
||||
-> dt ::DecIPTTL
|
||||
-> data_ck
|
||||
-> [2] output;
|
||||
|
||||
|
||||
dt[1]
|
||||
-> Print(ttl-error)
|
||||
-> ICMPError($sr2_ip, timeexceeded, 0)
|
||||
-> querier;
|
||||
|
||||
|
||||
// queries
|
||||
querier [1] -> [1] query_forwarder;
|
||||
es -> SetTimestamp() -> [1] output;
|
||||
|
||||
|
||||
forwarder[1] //ip packets to me
|
||||
-> SR2StripHeader()
|
||||
-> CheckIPHeader()
|
||||
-> from_gw_cl :: IPClassifier(src net $sr2_ip mask $sr2_nm,
|
||||
-)
|
||||
-> [3] output;
|
||||
|
||||
from_gw_cl [1] -> [1] set_gw [1] -> [3] output;
|
||||
|
||||
input [0]
|
||||
-> ncl :: Classifier(
|
||||
12/0643 , //sr2_forwarder
|
||||
12/0644 , //sr2
|
||||
12/0645 , //replies
|
||||
12/0641 , //sr2_es
|
||||
12/062c , //sr2_gw
|
||||
);
|
||||
|
||||
|
||||
ncl[0] -> SR2CheckHeader() -> [0] forwarder;
|
||||
ncl[1] -> SR2CheckHeader() -> PrintSR(query) -> query_forwarder
|
||||
ncl[2] -> SR2CheckHeader() -> query_responder;
|
||||
ncl[3] -> es;
|
||||
ncl[4] -> SR2CheckHeader() -> gw;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
Idle -> s :: sr2(2.0.0.1, 255.0.0.0, 00:00:00:00:00:01, false, "12 60 12 1500") -> Discard;
|
||||
Idle -> [1] s;
|
||||
s[1] -> Discard;
|
||||
s[2] -> Discard;
|
||||
s[3] -> Discard;
|
||||
142
samples/Click/thomer-nat.click
Normal file
142
samples/Click/thomer-nat.click
Normal file
@@ -0,0 +1,142 @@
|
||||
// This Click configuration implements a firewall and NAT, roughly based on the
|
||||
// mazu-nat.click example.
|
||||
//
|
||||
// This example assumes there is one interface that is IP-aliased. In this
|
||||
// example, eth0 and eth0:0 have IP addresses 66.68.65.90 and 192.168.1.1,
|
||||
// respectively. There is a local network, 192.168.1.0/24, and an upstream
|
||||
// gateway, 66.58.65.89. Traffic from the local network is NATed.
|
||||
//
|
||||
// Connections can be initiated from the NAT box itself, also.
|
||||
//
|
||||
// For bugs, suggestions, and, corrections, please email me.
|
||||
//
|
||||
// Author: Thomer M. Gil (click@thomer.com)
|
||||
|
||||
AddressInfo(
|
||||
eth0-in 192.168.1.1 192.168.1.0/24 00:0d:87:9d:1c:e9,
|
||||
eth0-ex 66.58.65.90 00:0d:87:9d:1c:e9,
|
||||
gw-addr 66.58.65.89 00:20:6f:14:54:c2
|
||||
);
|
||||
|
||||
|
||||
elementclass SniffGatewayDevice {
|
||||
$device |
|
||||
from :: FromDevice($device)
|
||||
-> t1 :: Tee
|
||||
-> output;
|
||||
input -> q :: Queue(1024)
|
||||
-> t2 :: PullTee
|
||||
-> to :: ToDevice($device);
|
||||
t1[1] -> ToHostSniffers;
|
||||
t2[1] -> ToHostSniffers($device);
|
||||
ScheduleInfo(from .1, to 1);
|
||||
}
|
||||
|
||||
|
||||
device :: SniffGatewayDevice(eth0);
|
||||
arpq_in :: ARPQuerier(eth0-in) -> device;
|
||||
ip_to_extern :: GetIPAddress(16)
|
||||
-> CheckIPHeader
|
||||
-> EtherEncap(0x800, eth0-ex, gw-addr)
|
||||
-> device;
|
||||
ip_to_host :: EtherEncap(0x800, gw-addr, eth0-ex)
|
||||
-> ToHost;
|
||||
ip_to_intern :: GetIPAddress(16)
|
||||
-> CheckIPHeader
|
||||
-> arpq_in;
|
||||
|
||||
|
||||
arp_class :: Classifier(
|
||||
12/0806 20/0001, // [0] ARP requests
|
||||
12/0806 20/0002, // [1] ARP replies to host
|
||||
12/0800); // [2] IP packets
|
||||
|
||||
device -> arp_class;
|
||||
|
||||
// ARP crap
|
||||
arp_class[0] -> ARPResponder(eth0-in, eth0-ex) -> device;
|
||||
arp_class[1] -> arp_t :: Tee;
|
||||
arp_t[0] -> ToHost;
|
||||
arp_t[1] -> [1]arpq_in;
|
||||
|
||||
|
||||
// IP packets
|
||||
arp_class[2] -> Strip(14)
|
||||
-> CheckIPHeader
|
||||
-> ipclass :: IPClassifier(dst host eth0-ex,
|
||||
dst host eth0-in,
|
||||
src net eth0-in);
|
||||
|
||||
// Define pattern NAT
|
||||
iprw :: IPRewriterPatterns(NAT eth0-ex 50000-65535 - -);
|
||||
|
||||
// Rewriting rules for UDP/TCP packets
|
||||
// output[0] rewritten to go into the wild
|
||||
// output[1] rewritten to come back from the wild or no match
|
||||
rw :: IPRewriter(pattern NAT 0 1,
|
||||
pass 1);
|
||||
|
||||
// Rewriting rules for ICMP packets
|
||||
irw :: ICMPPingRewriter(eth0-ex, -);
|
||||
irw[0] -> ip_to_extern;
|
||||
irw[1] -> icmp_me_or_intern :: IPClassifier(dst host eth0-ex, -);
|
||||
icmp_me_or_intern[0] -> ip_to_host;
|
||||
icmp_me_or_intern[1] -> ip_to_intern;
|
||||
|
||||
// Rewriting rules for ICMP error packets
|
||||
ierw :: ICMPRewriter(rw irw);
|
||||
ierw[0] -> icmp_me_or_intern;
|
||||
ierw[1] -> icmp_me_or_intern;
|
||||
|
||||
|
||||
// Packets directed at eth0-ex.
|
||||
// Send it through IPRewriter(pass). If there was a mapping, it will be
|
||||
// rewritten such that dst is eth0-in:net, otherwise dst will still be for
|
||||
// eth0-ex.
|
||||
ipclass[0] -> [1]rw;
|
||||
|
||||
// packets that were rewritten, heading into the wild world.
|
||||
rw[0] -> ip_to_extern;
|
||||
|
||||
// packets that come back from the wild or are not part of an established
|
||||
// connection.
|
||||
rw[1] -> established_class :: IPClassifier(dst host eth0-ex,
|
||||
dst net eth0-in);
|
||||
|
||||
// not established yet or returning packets for a connection that was
|
||||
// established from this host itself.
|
||||
established_class[0] ->
|
||||
firewall :: IPClassifier(dst tcp port ssh,
|
||||
dst tcp port smtp,
|
||||
dst tcp port domain,
|
||||
dst udp port domain,
|
||||
icmp type echo-reply,
|
||||
proto icmp,
|
||||
port > 4095,
|
||||
-);
|
||||
|
||||
firewall[0] -> ip_to_host; // ssh
|
||||
firewall[1] -> ip_to_host; // smtp
|
||||
firewall[2] -> ip_to_host; // domain (t)
|
||||
firewall[3] -> ip_to_host; // domain (u)
|
||||
firewall[4] -> [0]irw; // icmp reply
|
||||
firewall[5] -> [0]ierw; // other icmp
|
||||
firewall[6] -> ip_to_host; // port > 4095, probably for connection
|
||||
// originating from host itself
|
||||
firewall[7] -> Discard; // don't allow incoming for port <= 4095
|
||||
|
||||
// established connection
|
||||
established_class[1] -> ip_to_intern;
|
||||
|
||||
// To eth0-in. Only accept from inside network.
|
||||
ipclass[1] -> IPClassifier(src net eth0-in) -> ip_to_host;
|
||||
|
||||
// Packets from eth0-in:net either stay on local network or go to the wild.
|
||||
// Those that go into the wild need to go through the appropriate rewriting
|
||||
// element. (Either UDP/TCP rewriter or ICMP rewriter.)
|
||||
ipclass[2] -> inter_class :: IPClassifier(dst net eth0-in, -);
|
||||
inter_class[0] -> ip_to_intern;
|
||||
inter_class[1] -> ip_udp_class :: IPClassifier(tcp or udp,
|
||||
icmp type echo);
|
||||
ip_udp_class[0] -> [0]rw;
|
||||
ip_udp_class[1] -> [0]irw;
|
||||
17
samples/CoffeeScript/build.cake
Normal file
17
samples/CoffeeScript/build.cake
Normal file
@@ -0,0 +1,17 @@
|
||||
fs = require 'fs'
|
||||
|
||||
{print} = require 'sys'
|
||||
{spawn} = require 'child_process'
|
||||
|
||||
build = (callback) ->
|
||||
coffee = spawn 'coffee', ['-c', '-o', '.', '.']
|
||||
coffee.stderr.on 'data', (data) ->
|
||||
process.stderr.write data.toString()
|
||||
coffee.stdout.on 'data', (data) ->
|
||||
print data.toString()
|
||||
coffee.on 'exit', (code) ->
|
||||
callback?() if code is 0
|
||||
|
||||
task 'build', 'Build from source', ->
|
||||
build()
|
||||
|
||||
3608
samples/Formatted/NiAlH_jea.eam.fs
Normal file
3608
samples/Formatted/NiAlH_jea.eam.fs
Normal file
File diff suppressed because it is too large
Load Diff
19
samples/JavaScript/logo.jscad
Normal file
19
samples/JavaScript/logo.jscad
Normal file
@@ -0,0 +1,19 @@
|
||||
// title : OpenJSCAD.org Logo
|
||||
// author : Rene K. Mueller
|
||||
// license : MIT License
|
||||
// revision : 0.003
|
||||
// tags : Logo,Intersection,Sphere,Cube
|
||||
// file : logo.jscad
|
||||
|
||||
function main() {
|
||||
return union(
|
||||
difference(
|
||||
cube({size: 3, center: true}),
|
||||
sphere({r:2, center: true})
|
||||
),
|
||||
intersection(
|
||||
sphere({r: 1.3, center: true}),
|
||||
cube({size: 2.1, center: true})
|
||||
)
|
||||
).translate([0,0,1.5]).scale(10);
|
||||
}
|
||||
210
samples/Jupyter Notebook/JupyterNotebook.ipynb
Normal file
210
samples/Jupyter Notebook/JupyterNotebook.ipynb
Normal file
File diff suppressed because one or more lines are too long
29
samples/MAXScript/macro-1.mcr
Normal file
29
samples/MAXScript/macro-1.mcr
Normal file
@@ -0,0 +1,29 @@
|
||||
-- Taken from an example from Autodesk's MAXScript reference:
|
||||
-- http://help.autodesk.com/view/3DSMAX/2016/ENU/?guid=__files_GUID_84E24969_C175_4389_B9A6_3B2699B66785_htm
|
||||
|
||||
macroscript MoveToSurface
|
||||
category: "HowTo"
|
||||
(
|
||||
fn g_filter o = superclassof o == Geometryclass
|
||||
fn find_intersection z_node node_to_z = (
|
||||
local testRay = ray node_to_z.pos [0,0,-1]
|
||||
local nodeMaxZ = z_node.max.z
|
||||
testRay.pos.z = nodeMaxZ + 0.0001 * abs nodeMaxZ
|
||||
intersectRay z_node testRay
|
||||
)
|
||||
|
||||
on isEnabled return selection.count > 0
|
||||
|
||||
on Execute do (
|
||||
target_mesh = pickObject message:"Pick Target Surface:" filter:g_filter
|
||||
|
||||
if isValidNode target_mesh then (
|
||||
undo "MoveToSurface" on (
|
||||
for i in selection do (
|
||||
int_point = find_intersection target_mesh i
|
||||
if int_point != undefined then i.pos = int_point.pos
|
||||
)--end i loop
|
||||
)--end undo
|
||||
)--end if
|
||||
)--end execute
|
||||
)--end script
|
||||
53
samples/MAXScript/macro-2.mcr
Normal file
53
samples/MAXScript/macro-2.mcr
Normal file
@@ -0,0 +1,53 @@
|
||||
-- Taken from an example from Autodesk's MAXScript reference:
|
||||
-- http://help.autodesk.com/view/3DSMAX/2016/ENU/?guid=__files_GUID_0876DF46_FAA3_4131_838D_5739A67FF2C1_htm
|
||||
|
||||
macroscript FreeSpline category:"HowTo" tooltip:"FreeSpline" (
|
||||
local old_pos
|
||||
local new_spline
|
||||
local second_knot_set
|
||||
|
||||
fn get_mouse_pos pen_pos old_pen_pos = (
|
||||
if old_pos == undefined then old_pos = old_pen_pos
|
||||
if distance pen_pos old_pos > 10 then
|
||||
(
|
||||
if second_knot_set then
|
||||
addKnot new_spline 1 #smooth #curve pen_pos
|
||||
else
|
||||
(
|
||||
setKnotPoint new_spline 1 2 pen_pos
|
||||
second_knot_set = true
|
||||
)
|
||||
old_pos = pen_pos
|
||||
updateShape new_spline
|
||||
)-- end if
|
||||
)-- end fn
|
||||
|
||||
fn draw_new_line old_pen_pos = (
|
||||
pickPoint mouseMoveCallback:#(get_mouse_pos,old_pen_pos)
|
||||
)
|
||||
|
||||
undo"Free Spline"on(
|
||||
new_spline = splineShape ()
|
||||
old_pen_pos = pickPoint ()
|
||||
|
||||
if old_pen_pos == #RightClick then
|
||||
delete new_spline
|
||||
else
|
||||
(
|
||||
select new_spline
|
||||
new_spline.pos = old_pen_pos
|
||||
addNewSpline new_spline
|
||||
addKnot new_spline 1 #smooth #curve old_pen_pos
|
||||
addKnot new_spline 1 #smooth #curve old_pen_pos
|
||||
second_knot_set = false
|
||||
draw_new_line old_pen_pos
|
||||
q = querybox "Close Spline?" title:"Free Spline"
|
||||
if q then
|
||||
(
|
||||
close new_spline 1
|
||||
updateshape new_spline
|
||||
)
|
||||
select new_spline
|
||||
)--end else
|
||||
)--end undo
|
||||
)--end script
|
||||
64
samples/MAXScript/svg-renderer.ms
Normal file
64
samples/MAXScript/svg-renderer.ms
Normal file
@@ -0,0 +1,64 @@
|
||||
-- Taken from a 3-part tutorial from Autodesk's MAXScript reference
|
||||
-- Source: http://help.autodesk.com/view/3DSMAX/2016/ENU/?guid=__files_GUID_6B5EDC11_A154_4AA7_A972_A11AC36949E9_htm
|
||||
|
||||
fn ColourToHex col = (
|
||||
local theComponents = #(bit.intAsHex col.r, bit.intAsHex col.g, bit.intAsHex col.b)
|
||||
local theValue = "#"
|
||||
for i in theComponents do
|
||||
theValue += (if i.count == 1 then "0" else "") + i
|
||||
theValue
|
||||
)
|
||||
|
||||
local st = timestamp()
|
||||
local theFileName = (getDir #userscripts + "\\PolygonRendering3.svg")
|
||||
local theSVGfile = createFile theFileName
|
||||
format "<svg xmlns=\"http://www.w3.org/2000/svg\"\n" to:theSVGfile
|
||||
format "\t\txmlns:xlink=\"http://www.w3.org/1999/xlink\">\n" to:theSVGfile
|
||||
|
||||
local theViewTM = viewport.getTM()
|
||||
theViewTM.row4 = [0,0,0]
|
||||
|
||||
local theViewTM2 = viewport.getTM()
|
||||
local theViewSize = getViewSize()
|
||||
local theViewScale = getViewSize()
|
||||
theViewScale.x /= 1024.0
|
||||
theViewScale.y /= 1024.0
|
||||
|
||||
local theStrokeThickness = 3
|
||||
|
||||
gw.setTransform (matrix3 1)
|
||||
for o in Geometry where not o.isHiddenInVpt and classof o != TargetObject do (
|
||||
local theStrokeColour = white
|
||||
local theFillColour = o.wirecolor
|
||||
|
||||
local theMesh = snapshotAsMesh o
|
||||
for f = 1 to theMesh.numfaces do (
|
||||
local theNormal = normalize (getFaceNormal theMesh f)
|
||||
|
||||
if (theNormal*theViewTM).z > 0 do
|
||||
(
|
||||
local theFace = getFace theMesh f
|
||||
local v1 = gw.transPoint (getVert theMesh theFace.x)
|
||||
local v2 = gw.transPoint (getVert theMesh theFace.y)
|
||||
local v3 = gw.transPoint (getVert theMesh theFace.z)
|
||||
|
||||
v1.x /= theViewScale.x
|
||||
v1.y /= theViewScale.y
|
||||
v2.x /= theViewScale.x
|
||||
v2.y /= theViewScale.y
|
||||
v3.x /= theViewScale.x
|
||||
v3.y /= theViewScale.y
|
||||
|
||||
format "\t<polygon points='%,% %,% %,%' \n" v1.x v1.y v2.x v2.y v3.x v3.y to:theSVGfile
|
||||
format "\tstyle='stroke:%; fill:%; stroke-width:%'/>\n" (ColourToHex theStrokeColour) (ColourToHex theFillColour) theStrokeThickness to:theSVGfile
|
||||
)--end if normal positive
|
||||
)--end f loop
|
||||
)--end o loop
|
||||
|
||||
format "</svg>\n" to:theSVGfile
|
||||
close theSVGfile
|
||||
local theSVGMap = VectorMap vectorFile:theFileName alphasource:0
|
||||
local theBitmap = bitmap theViewSize.x theViewSize.y
|
||||
renderMap theSVGMap into:theBitmap filter:true
|
||||
display theBitmap
|
||||
format "Render Time: % sec.\n" ((timestamp()-st)/1000.0)
|
||||
22
samples/MAXScript/volume-calc.ms
Normal file
22
samples/MAXScript/volume-calc.ms
Normal file
@@ -0,0 +1,22 @@
|
||||
fn CalculateVolumeAndCentreOfMass obj =
|
||||
(
|
||||
local Volume= 0.0
|
||||
local Centre= [0.0, 0.0, 0.0]
|
||||
local theMesh = snapshotasmesh obj
|
||||
local numFaces = theMesh.numfaces
|
||||
for i = 1 to numFaces do
|
||||
(
|
||||
local Face= getFace theMesh i
|
||||
local vert2 = getVert theMesh Face.z
|
||||
local vert1 = getVert theMesh Face.y
|
||||
local vert0 = getVert theMesh Face.x
|
||||
local dV = Dot (Cross (vert1 - vert0) (vert2 - vert0)) vert0
|
||||
Volume+= dV
|
||||
Centre+= (vert0 + vert1 + vert2) * dV
|
||||
)
|
||||
delete theMesh
|
||||
Volume /= 6
|
||||
Centre /= 24
|
||||
Centre /= Volume
|
||||
#(Volume,Centre)
|
||||
)
|
||||
1
samples/Mathematica/TestArithmetic.mt
Normal file
1
samples/Mathematica/TestArithmetic.mt
Normal file
@@ -0,0 +1 @@
|
||||
Test[1 + 2, 3, TestID -> "One plus two"]
|
||||
1
samples/Mathematica/TestString.mt
Normal file
1
samples/Mathematica/TestString.mt
Normal file
@@ -0,0 +1 @@
|
||||
Test[1"a" <> "b", "ab", TestID -> "Concat \"a\" and \"b\""]
|
||||
5
samples/Mathematica/TestSuite.mt
Normal file
5
samples/Mathematica/TestSuite.mt
Normal file
@@ -0,0 +1,5 @@
|
||||
TestSuite[
|
||||
{ "TestArithmetic.mt"
|
||||
, "TestString.mt"
|
||||
}
|
||||
]
|
||||
24
samples/Pickle/data.pkl
Normal file
24
samples/Pickle/data.pkl
Normal file
@@ -0,0 +1,24 @@
|
||||
(dp0
|
||||
S'a'
|
||||
p1
|
||||
(lp2
|
||||
I1
|
||||
aF2.0
|
||||
aI3
|
||||
ac__builtin__
|
||||
complex
|
||||
p3
|
||||
(F4.0
|
||||
F6.0
|
||||
tp4
|
||||
Rp5
|
||||
asS'c'
|
||||
p6
|
||||
NsS'b'
|
||||
p7
|
||||
(S'string'
|
||||
p8
|
||||
VUnicode string
|
||||
p9
|
||||
tp10
|
||||
s.
|
||||
60
samples/Pickle/neural-network-ce-l2reg-784-10-30.pkl
Normal file
60
samples/Pickle/neural-network-ce-l2reg-784-10-30.pkl
Normal file
File diff suppressed because one or more lines are too long
36
samples/Pickle/random.pkl
Normal file
36
samples/Pickle/random.pkl
Normal file
@@ -0,0 +1,36 @@
|
||||
cnumpy.core.multiarray
|
||||
_reconstruct
|
||||
p0
|
||||
(cnumpy
|
||||
ndarray
|
||||
p1
|
||||
(I0
|
||||
tp2
|
||||
S'b'
|
||||
p3
|
||||
tp4
|
||||
Rp5
|
||||
(I1
|
||||
(I100
|
||||
tp6
|
||||
cnumpy
|
||||
dtype
|
||||
p7
|
||||
(S'f8'
|
||||
p8
|
||||
I0
|
||||
I1
|
||||
tp9
|
||||
Rp10
|
||||
(I3
|
||||
S'<'
|
||||
p11
|
||||
NNNI-1
|
||||
I-1
|
||||
I0
|
||||
tp12
|
||||
bI00
|
||||
S'\x1cc~\xc3\xa7r\xed?\xe5${\xec\xd6\xcd\xed?\x809-\x02%\xa9\xa2?F\x0f\x1d\xe8\xef\xa3\xdb?\xfe\xd1\x0c\xb7\x83\x13\xef?\xe0<o\xa1\xa9^\xdf?CE\x96\x88/o\xe2?<\xd8\xa1\x96\xa2T\xce?\x152\x8e\xe5\xa8\x7f\xe8?\xe4\xb7\x9a\xe0$\x0f\xdc?\x90\xe4\xe2\xd4=\xce\xc3?Ix\xe3P\xc4C\xe1?\x16\xd17\xc1Y\xfc\xed?5\xd7\xae@4\xfa\xe8?\x0f\x87\x8d>\xfcO\xe5?Y\x97\xcb"\xa7%\xe7?\x9b\x8d\x16\xda\x97\xe1\xeb?T\x14\xbd\xfe|\xf4\xd0?\x18\xdfH\xc56A\xba?\x90\xc5\xfb\xc63:\xe5?\xbf%\xad\xe5.\x86\xe9?\xc6\x0c\xa9\x8c\xd7\xd5\xe9?\xf8\xafc:\x84g\xd7?\xf8\x98\x879\x9a\x16\xee?\xba\xdf\x88\x8az\x06\xe2?~g-\xeb\xc8\xed\xee?\x08A\xcc\x8c\xe7>\xef?\xceD\xc4ar\n\xdc?\x92w\xbb\xa34\xb1\xd9?\x88\xb9\xc0{u\xa3\xdc?d\x1a\xad\xe8\xf3\x14\xdd?\x9c\x95\x13\x96o?\xe5?\x9cT[\xb8r\xa9\xe5?0\xf1\x01+(\x0f\xdf?W\xbdjqD&\xed?c\xcf1-W\xe6\xe1?\xce\xbc\xe1{zW\xd9?"d\xcf\xd7\x13\x93\xde?\xf2P\xf6\xc3\xd6\x87\xd5?\xc2\x0e\x92q\x89\xda\xd5?\xc0:B\x1bb\x00\x9e?Y\xafHmr\x80\xe3?\x1co\xa7\xba\xa5/\xe4?\xa2\xbc \x9c\xddB\xd0?\xd2L\x935\x17\'\xee?|\x8cM\xeb\x97=\xe8?\x0f0xN*V\xea?\x81p\xe3,!\xf2\xee?\xf5w\xed\x10\x9eu\xe0?\xc5\x16\\LR\xb5\xe1?\xbeh\x04\xa4g\xe5\xd6?\xea\xc0\xb9\xf0\xb2\xd8\xd9?\xac\x9c\xeep\x1a\xa9\xd8?@W9hp\x16\xb1?\xc4\xedS\xd6V\xa1\xed?\x93,!\xdc\xa1\x8b\xe9?\x80)\xb1\xa6[T\xc9?\xac\xbc\x8a\xd21\xdd\xc5?\x80\x9c.g\xf1\xf2\xc6?\tLu\xc3\xf7U\xe9?n\'\x9f?\xbe\xf9\xe9?\xa3\xe7K\x1c\xb3\xa9\xea?X\x98\x1a\xcb\xa0\xcd\xd3? \xb6O\x9c\x1bQ\xc2?"\x89[\xad1\x8e\xea?\xdd\x8f\xa0P\xc7\x0e\xe2?c\xa4j\xa3\r\xac\xef?\xba\xb6\x0f\x8emo\xef?\xe0\xed\xa0\xc5R9\xab?U\xf1\xcd\xcf\xbf\xcb\xea?\x89*#\x06\xb0|\xe8?d\xa3\xad\xcd\xe0]\xcc?\xb5\xe78\xa7w\x13\xe3?\xce\x99\x98\xefS%\xd7?\xb1\xf8\xd8\x8eI\x13\xef?\x91`]\x93\xd4 \xec?\xc0\rPz\xee\xbd\xe7?7\x92\xd4\x0fP\x8f\xe1?L\x0f\xaf\xa9\xc3\x19\xdd?\\}\x15X\x870\xc7? ~ t\xcat\xb1?@?\xec\x97u\x05\xe9?F\x8d:\xac4D\xdb?qY\xe1Qk|\xe2? \xaf\xeaj\xa5\x04\xab?J[\x1al;\x00\xd5?\x00^{n\xc2\xf1S?\xb0\x82dN\xda\xb5\xc7?\xe0 \x07\xe1?R\x92?\xc4\r\x08+\x99J\xe1?I|&U\x19\xc4\xe1?|*\xf9\xebq\x7f\xed?\xbc*\x93\x89k\xab\xe9?oiL\x90;\xe0\xef?\x96\xcd\x9b\xff\x18g\xdc?pt\xb4\xa5\x9c\xa2\xbc?Nu]w*\xb7\xd2?\x88k\xac\xd0\xfd\xbf\xd5?Q\x02$b\xfeH\xea?5\xf6\t\xb6K\x1a\xee?'
|
||||
p13
|
||||
tp14
|
||||
b.
|
||||
10
samples/Pickle/save.pkl
Normal file
10
samples/Pickle/save.pkl
Normal file
@@ -0,0 +1,10 @@
|
||||
(dp0
|
||||
S'lion'
|
||||
p1
|
||||
S'yellow'
|
||||
p2
|
||||
sS'kitty'
|
||||
p3
|
||||
S'red'
|
||||
p4
|
||||
s.
|
||||
30
samples/Pony/circle.pony
Normal file
30
samples/Pony/circle.pony
Normal file
@@ -0,0 +1,30 @@
|
||||
use "collections"
|
||||
|
||||
class Circle
|
||||
var _radius: F32
|
||||
|
||||
new create(radius': F32) =>
|
||||
_radius = radius'
|
||||
|
||||
fun ref get_radius(): F32 =>
|
||||
_radius
|
||||
|
||||
fun ref get_area(): F32 =>
|
||||
F32.pi() * _radius.pow(2)
|
||||
|
||||
fun ref get_circumference(): F32 =>
|
||||
2 * _radius * F32.pi()
|
||||
|
||||
actor Main
|
||||
new create(env: Env) =>
|
||||
|
||||
for i in Range[F32](1.0, 101.0) do
|
||||
let c = Circle(i)
|
||||
|
||||
var str =
|
||||
"Radius: " + c.get_radius().string() + "\n" +
|
||||
"Circumference: " + c.get_circumference().string() + "\n" +
|
||||
"Area: " + c.get_area().string() + "\n"
|
||||
|
||||
env.out.print(str)
|
||||
end
|
||||
32
samples/Pony/counter.pony
Normal file
32
samples/Pony/counter.pony
Normal file
@@ -0,0 +1,32 @@
|
||||
use "collections"
|
||||
|
||||
actor Counter
|
||||
var _count: U32
|
||||
|
||||
new create() =>
|
||||
_count = 0
|
||||
|
||||
be increment() =>
|
||||
_count = _count + 1
|
||||
|
||||
be get_and_reset(main: Main) =>
|
||||
main.display(_count)
|
||||
_count = 0
|
||||
|
||||
actor Main
|
||||
var _env: Env
|
||||
|
||||
new create(env: Env) =>
|
||||
_env = env
|
||||
|
||||
var count: U32 = try env.args(1).u32() else 10 end
|
||||
var counter = Counter
|
||||
|
||||
for i in Range[U32](0, count) do
|
||||
counter.increment()
|
||||
end
|
||||
|
||||
counter.get_and_reset(this)
|
||||
|
||||
be display(result: U32) =>
|
||||
_env.out.print(result.string())
|
||||
261
samples/Pony/gups-opt.pony
Normal file
261
samples/Pony/gups-opt.pony
Normal file
@@ -0,0 +1,261 @@
|
||||
use "options"
|
||||
use "time"
|
||||
use "collections"
|
||||
|
||||
class Config
|
||||
var logtable: U64 = 20
|
||||
var iterate: U64 = 10000
|
||||
var logchunk: U64 = 10
|
||||
var logactors: U64 = 2
|
||||
|
||||
fun ref apply(env: Env): Bool =>
|
||||
var options = Options(env)
|
||||
|
||||
options
|
||||
.add("logtable", "l", I64Argument)
|
||||
.add("iterate", "i", I64Argument)
|
||||
.add("chunk", "c", I64Argument)
|
||||
.add("actors", "a", I64Argument)
|
||||
|
||||
for option in options do
|
||||
match option
|
||||
| ("table", var arg: I64) => logtable = arg.u64()
|
||||
| ("iterate", var arg: I64) => iterate = arg.u64()
|
||||
| ("chunk", var arg: I64) => logchunk = arg.u64()
|
||||
| ("actors", var arg: I64) => logactors = arg.u64()
|
||||
| let err: ParseError =>
|
||||
err.report(env.out)
|
||||
env.out.print(
|
||||
"""
|
||||
gups_opt [OPTIONS]
|
||||
--table N log2 of the total table size. Defaults to 20.
|
||||
--iterate N number of iterations. Defaults to 10000.
|
||||
--chunk N log2 of the chunk size. Defaults to 10.
|
||||
--actors N log2 of the actor count. Defaults to 2.
|
||||
"""
|
||||
)
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
env.out.print(
|
||||
"logtable: " + logtable.string() +
|
||||
"\niterate: " + iterate.string() +
|
||||
"\nlogchunk: " + logchunk.string() +
|
||||
"\nlogactors: " + logactors.string()
|
||||
)
|
||||
true
|
||||
|
||||
actor Main
|
||||
let _env: Env
|
||||
let _config: Config = Config
|
||||
|
||||
var _updates: U64 = 0
|
||||
var _confirm: U64 = 0
|
||||
let _start: U64
|
||||
var _actors: Array[Updater] val
|
||||
|
||||
new create(env: Env) =>
|
||||
_env = env
|
||||
|
||||
if _config(env) then
|
||||
let actor_count = 1 << _config.logactors
|
||||
let loglocal = _config.logtable - _config.logactors
|
||||
let chunk_size = 1 << _config.logchunk
|
||||
let chunk_iterate = chunk_size * _config.iterate
|
||||
|
||||
_updates = chunk_iterate * actor_count
|
||||
_confirm = actor_count
|
||||
|
||||
var updaters = recover Array[Updater](actor_count) end
|
||||
|
||||
for i in Range[U64](0, actor_count) do
|
||||
updaters.push(Updater(this, actor_count, i, loglocal, chunk_size,
|
||||
chunk_iterate * i))
|
||||
end
|
||||
|
||||
_actors = consume updaters
|
||||
_start = Time.nanos()
|
||||
|
||||
for a in _actors.values() do
|
||||
a.start(_actors, _config.iterate)
|
||||
end
|
||||
else
|
||||
_start = 0
|
||||
_actors = recover Array[Updater] end
|
||||
end
|
||||
|
||||
be done() =>
|
||||
if (_confirm = _confirm - 1) == 1 then
|
||||
for a in _actors.values() do
|
||||
a.done()
|
||||
end
|
||||
end
|
||||
|
||||
be confirm() =>
|
||||
_confirm = _confirm + 1
|
||||
|
||||
if _confirm == _actors.size() then
|
||||
let elapsed = (Time.nanos() - _start).f64()
|
||||
let gups = _updates.f64() / elapsed
|
||||
|
||||
_env.out.print(
|
||||
"Time: " + (elapsed / 1e9).string() +
|
||||
"\nGUPS: " + gups.string()
|
||||
)
|
||||
end
|
||||
|
||||
actor Updater
|
||||
let _main: Main
|
||||
let _index: U64
|
||||
let _updaters: U64
|
||||
let _chunk: U64
|
||||
let _mask: U64
|
||||
let _loglocal: U64
|
||||
|
||||
let _output: Array[Array[U64] iso]
|
||||
let _reuse: List[Array[U64] iso] = List[Array[U64] iso]
|
||||
var _others: (Array[Updater] val | None) = None
|
||||
var _table: Array[U64]
|
||||
var _rand: U64
|
||||
|
||||
new create(main:Main, updaters: U64, index: U64, loglocal: U64, chunk: U64,
|
||||
seed: U64)
|
||||
=>
|
||||
_main = main
|
||||
_index = index
|
||||
_updaters = updaters
|
||||
_chunk = chunk
|
||||
_mask = updaters - 1
|
||||
_loglocal = loglocal
|
||||
|
||||
_rand = PolyRand.seed(seed)
|
||||
_output = _output.create(updaters)
|
||||
|
||||
let size = 1 << loglocal
|
||||
_table = Array[U64].undefined(size)
|
||||
|
||||
var offset = index * size
|
||||
|
||||
try
|
||||
for i in Range[U64](0, size) do
|
||||
_table(i) = i + offset
|
||||
end
|
||||
end
|
||||
|
||||
be start(others: Array[Updater] val, iterate: U64) =>
|
||||
_others = others
|
||||
iteration(iterate)
|
||||
|
||||
be apply(iterate: U64) =>
|
||||
iteration(iterate)
|
||||
|
||||
fun ref iteration(iterate: U64) =>
|
||||
let chk = _chunk
|
||||
|
||||
for i in Range(0, _updaters) do
|
||||
_output.push(
|
||||
try
|
||||
_reuse.pop()
|
||||
else
|
||||
recover Array[U64](chk) end
|
||||
end
|
||||
)
|
||||
end
|
||||
|
||||
for i in Range(0, _chunk) do
|
||||
var datum = _rand = PolyRand(_rand)
|
||||
var updater = (datum >> _loglocal) and _mask
|
||||
|
||||
try
|
||||
if updater == _index then
|
||||
_table(i) = _table(i) xor datum
|
||||
else
|
||||
_output(updater).push(datum)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
try
|
||||
let to = _others as Array[Updater] val
|
||||
|
||||
repeat
|
||||
let data = _output.pop()
|
||||
|
||||
if data.size() > 0 then
|
||||
to(_output.size()).receive(consume data)
|
||||
else
|
||||
_reuse.push(consume data)
|
||||
end
|
||||
until _output.size() == 0 end
|
||||
end
|
||||
|
||||
if iterate > 1 then
|
||||
apply(iterate - 1)
|
||||
else
|
||||
_main.done()
|
||||
end
|
||||
|
||||
be receive(data: Array[U64] iso) =>
|
||||
try
|
||||
for i in Range(0, data.size()) do
|
||||
let datum = data(i)
|
||||
var j = (datum >> _loglocal) and _mask
|
||||
_table(j) = _table(j) xor datum
|
||||
end
|
||||
|
||||
data.clear()
|
||||
_reuse.push(consume data)
|
||||
end
|
||||
|
||||
be done() =>
|
||||
_main.confirm()
|
||||
|
||||
primitive PolyRand
|
||||
fun apply(prev: U64): U64 =>
|
||||
(prev << 1) xor if prev.i64() < 0 then _poly() else 0 end
|
||||
|
||||
fun seed(from: U64): U64 =>
|
||||
var n = from % _period()
|
||||
|
||||
if n == 0 then
|
||||
return 1
|
||||
end
|
||||
|
||||
var m2 = Array[U64].undefined(64)
|
||||
var temp = U64(1)
|
||||
|
||||
try
|
||||
for i in Range(0, 64) do
|
||||
m2(i) = temp
|
||||
temp = this(temp)
|
||||
temp = this(temp)
|
||||
end
|
||||
end
|
||||
|
||||
var i: U64 = 64 - n.clz()
|
||||
var r = U64(2)
|
||||
|
||||
try
|
||||
while i > 0 do
|
||||
temp = 0
|
||||
|
||||
for j in Range(0, 64) do
|
||||
if ((r >> j) and 1) != 0 then
|
||||
temp = temp xor m2(j)
|
||||
end
|
||||
end
|
||||
|
||||
r = temp
|
||||
i = i - 1
|
||||
|
||||
if ((n >> i) and 1) != 0 then
|
||||
r = this(r)
|
||||
end
|
||||
end
|
||||
end
|
||||
r
|
||||
|
||||
fun _poly(): U64 => 7
|
||||
|
||||
fun _period(): U64 => 1317624576693539401
|
||||
3
samples/Pony/hello-world.pony
Normal file
3
samples/Pony/hello-world.pony
Normal file
@@ -0,0 +1,3 @@
|
||||
actor Main
|
||||
new create(env: Env) =>
|
||||
env.out.print("Hello, world.")
|
||||
188
samples/Pony/mandelbrot.pony
Normal file
188
samples/Pony/mandelbrot.pony
Normal file
@@ -0,0 +1,188 @@
|
||||
use "files"
|
||||
use "options"
|
||||
use "collections"
|
||||
|
||||
actor Worker
|
||||
new mandelbrot(main: Main, x: U64, y: U64, width: U64, iterations: U64,
|
||||
limit: F32, real: Array[F32] val, imaginary: Array[F32] val)
|
||||
=>
|
||||
var view: Array[U8] iso =
|
||||
recover
|
||||
Array[U8]((y - x) * (width >> 3))
|
||||
end
|
||||
|
||||
let group_r = Array[F32].undefined(8)
|
||||
let group_i = Array[F32].undefined(8)
|
||||
|
||||
var row = x
|
||||
|
||||
try
|
||||
while row < y do
|
||||
let prefetch_i = imaginary(row)
|
||||
|
||||
var col: U64 = 0
|
||||
|
||||
while col < width do
|
||||
var j: U64 = 0
|
||||
|
||||
while j < 8 do
|
||||
group_r.update(j, real(col + j))
|
||||
group_i.update(j, prefetch_i)
|
||||
j = j + 1
|
||||
end
|
||||
|
||||
var bitmap: U8 = 0xFF
|
||||
var n = iterations
|
||||
|
||||
repeat
|
||||
var mask: U8 = 0x80
|
||||
var k: U64 = 0
|
||||
|
||||
while k < 8 do
|
||||
let r = group_r(k)
|
||||
let i = group_i(k)
|
||||
|
||||
group_r.update(k, ((r * r) - (i * i)) + real(col + k))
|
||||
group_i.update(k, (2.0 * r * i) + prefetch_i)
|
||||
|
||||
if ((r * r) + (i * i)) > limit then
|
||||
bitmap = bitmap and not mask
|
||||
end
|
||||
|
||||
mask = mask >> 1
|
||||
k = k + 1
|
||||
end
|
||||
until (bitmap == 0) or ((n = n - 1) == 1) end
|
||||
|
||||
view.push(bitmap)
|
||||
|
||||
col = col + 8
|
||||
end
|
||||
row = row + 1
|
||||
end
|
||||
|
||||
main.draw(x * (width >> 3), consume view)
|
||||
end
|
||||
|
||||
actor Main
|
||||
var iterations: U64 = 50
|
||||
var limit: F32 = 4.0
|
||||
var chunks: U64 = 16
|
||||
var width: U64 = 16000
|
||||
var actors: U64 = 0
|
||||
var header: U64 = 0
|
||||
var real: Array[F32] val = recover Array[F32] end
|
||||
var imaginary: Array[F32] val = recover Array[F32] end
|
||||
var outfile: (File | None) = None
|
||||
|
||||
new create(env: Env) =>
|
||||
try
|
||||
arguments(env)
|
||||
|
||||
let length = width
|
||||
let recip_width = 2.0 / width.f32()
|
||||
|
||||
var r = recover Array[F32](length) end
|
||||
var i = recover Array[F32](length) end
|
||||
|
||||
for j in Range(0, width) do
|
||||
r.push((recip_width * j.f32()) - 1.5)
|
||||
i.push((recip_width * j.f32()) - 1.0)
|
||||
end
|
||||
|
||||
real = consume r
|
||||
imaginary = consume i
|
||||
|
||||
spawn_actors()
|
||||
create_outfile()
|
||||
end
|
||||
|
||||
be draw(offset: U64, pixels: Array[U8] val) =>
|
||||
match outfile
|
||||
| var out: File =>
|
||||
out.seek_start(header + offset)
|
||||
out.write(pixels)
|
||||
if (actors = actors - 1) == 1 then
|
||||
out.dispose()
|
||||
end
|
||||
end
|
||||
|
||||
fun ref create_outfile() =>
|
||||
match outfile
|
||||
| var f: File =>
|
||||
f.print("P4\n " + width.string() + " " + width.string() + "\n")
|
||||
header = f.size()
|
||||
f.set_length((width * (width >> 3)) + header)
|
||||
end
|
||||
|
||||
fun ref spawn_actors() =>
|
||||
actors = ((width + (chunks - 1)) / chunks)
|
||||
|
||||
var rest = width % chunks
|
||||
|
||||
if rest == 0 then rest = chunks end
|
||||
|
||||
var x: U64 = 0
|
||||
var y: U64 = 0
|
||||
|
||||
for i in Range(0, actors - 1) do
|
||||
x = i * chunks
|
||||
y = x + chunks
|
||||
Worker.mandelbrot(this, x, y, width, iterations, limit, real, imaginary)
|
||||
end
|
||||
|
||||
Worker.mandelbrot(this, y, y + rest, width, iterations, limit, real,
|
||||
imaginary)
|
||||
|
||||
fun ref arguments(env: Env) ? =>
|
||||
let options = Options(env)
|
||||
|
||||
options
|
||||
.add("iterations", "i", I64Argument)
|
||||
.add("limit", "l", F64Argument)
|
||||
.add("chunks", "c", I64Argument)
|
||||
.add("width", "w", I64Argument)
|
||||
.add("output", "o", StringArgument)
|
||||
|
||||
for option in options do
|
||||
match option
|
||||
| ("iterations", var arg: I64) => iterations = arg.u64()
|
||||
| ("limit", var arg: F64) => limit = arg.f32()
|
||||
| ("chunks", var arg: I64) => chunks = arg.u64()
|
||||
| ("width", var arg: I64) => width = arg.u64()
|
||||
| ("output", var arg: String) =>
|
||||
outfile = try File(FilePath(env.root, arg)) end
|
||||
| let err: ParseError => err.report(env.out) ; usage(env) ; error
|
||||
end
|
||||
end
|
||||
|
||||
fun tag usage(env: Env) =>
|
||||
env.out.print(
|
||||
"""
|
||||
mandelbrot [OPTIONS]
|
||||
|
||||
The binary output can be converted to a BMP with the following command
|
||||
(ImageMagick Tools required):
|
||||
|
||||
convert <output> JPEG:<output>.jpg
|
||||
|
||||
Available options:
|
||||
|
||||
--iterations, -i Maximum amount of iterations to be done for each pixel.
|
||||
Defaults to 50.
|
||||
|
||||
--limit, -l Square of the limit that pixels need to exceed in order
|
||||
to escape from the Mandelbrot set.
|
||||
Defaults to 4.0.
|
||||
|
||||
--chunks, -c Maximum line count of chunks the image should be
|
||||
divided into for divide & conquer processing.
|
||||
Defaults to 16.
|
||||
|
||||
--width, -w Lateral length of the resulting mandelbrot image.
|
||||
Defaults to 16000.
|
||||
|
||||
--output, -o File to write the output to.
|
||||
|
||||
"""
|
||||
)
|
||||
130
samples/Pony/mixed.pony
Normal file
130
samples/Pony/mixed.pony
Normal file
@@ -0,0 +1,130 @@
|
||||
use "collections"
|
||||
|
||||
actor Worker
|
||||
var _env: Env
|
||||
|
||||
new create(env: Env) =>
|
||||
_env = env
|
||||
|
||||
var a: U64 = 86028157
|
||||
var b: U64 = 329545133
|
||||
|
||||
var result = factorize(a*b)
|
||||
|
||||
var correct =
|
||||
try
|
||||
(result.size() == 2) and
|
||||
(result(0) == 86028157) and
|
||||
(result(1) == 329545133)
|
||||
else
|
||||
false
|
||||
end
|
||||
|
||||
fun ref factorize(bigint: U64) : Array[U64] =>
|
||||
var factors = Array[U64](2)
|
||||
|
||||
if bigint <= 3 then
|
||||
factors.push(bigint)
|
||||
else
|
||||
var d: U64 = 2
|
||||
var i: U64 = 0
|
||||
var n = bigint
|
||||
|
||||
while d < n do
|
||||
if (n % d) == 0 then
|
||||
i = i + 1
|
||||
factors.push(d)
|
||||
n = n / d
|
||||
else
|
||||
d = if d == 2 then 3 else (d + 2) end
|
||||
end
|
||||
end
|
||||
|
||||
factors.push(d)
|
||||
end
|
||||
|
||||
factors
|
||||
|
||||
actor Ring
|
||||
var _env: Env
|
||||
var _size: U32
|
||||
var _pass: U32
|
||||
var _repetitions: U32
|
||||
var _next: Ring
|
||||
|
||||
new create(env: Env, size: U32, pass: U32, repetitions: U32) =>
|
||||
_env = env
|
||||
_size = size
|
||||
_pass = pass
|
||||
_repetitions = repetitions
|
||||
_next = spawn_ring(_env, _size, _pass)
|
||||
run()
|
||||
|
||||
new neighbor(env: Env, next: Ring) =>
|
||||
_env = env
|
||||
_next = next
|
||||
_size = 0
|
||||
_pass = 0
|
||||
_repetitions = 0
|
||||
|
||||
be apply(i: U32) =>
|
||||
if i > 0 then
|
||||
_next(i - 1)
|
||||
else
|
||||
run()
|
||||
end
|
||||
|
||||
fun ref run() =>
|
||||
if _repetitions > 0 then
|
||||
_repetitions = _repetitions - 1
|
||||
_next(_pass * _size)
|
||||
Worker(_env)
|
||||
end
|
||||
|
||||
fun tag spawn_ring(env: Env, size: U32, pass': U32) : Ring =>
|
||||
var next: Ring = this
|
||||
|
||||
for i in Range[U32](0, size) do
|
||||
next = Ring.neighbor(env, next)
|
||||
end
|
||||
|
||||
next
|
||||
|
||||
actor Main
|
||||
var _size: U32 = 50
|
||||
var _count: U32 = 20
|
||||
var _pass: U32 = 10000
|
||||
var _repetitions: U32 = 5
|
||||
var _env: Env
|
||||
|
||||
new create(env: Env) =>
|
||||
_env = env
|
||||
|
||||
try
|
||||
arguments()
|
||||
start_benchmark()
|
||||
else
|
||||
usage()
|
||||
end
|
||||
|
||||
fun ref arguments() ? =>
|
||||
_count = _env.args(1).u32()
|
||||
_size = _env.args(2).u32()
|
||||
_pass = _env.args(3).u32()
|
||||
_repetitions = _env.args(4).u32()
|
||||
|
||||
fun ref start_benchmark() =>
|
||||
for i in Range[U32](0, _count) do
|
||||
Ring(_env, _size, _pass, _repetitions)
|
||||
end
|
||||
|
||||
fun ref usage() =>
|
||||
_env.out.print(
|
||||
"""
|
||||
mixed OPTIONS
|
||||
N number of actors in each ring"
|
||||
N number of rings"
|
||||
N number of messages to pass around each ring"
|
||||
N number of times to repeat"
|
||||
"""
|
||||
)
|
||||
1427
samples/Ren'Py/example.rpy
Normal file
1427
samples/Ren'Py/example.rpy
Normal file
File diff suppressed because it is too large
Load Diff
14
samples/Stan/congress.stan
Normal file
14
samples/Stan/congress.stan
Normal file
@@ -0,0 +1,14 @@
|
||||
data {
|
||||
int<lower=0> N;
|
||||
vector[N] incumbency_88;
|
||||
vector[N] vote_86;
|
||||
vector[N] vote_88;
|
||||
}
|
||||
parameters {
|
||||
vector[3] beta;
|
||||
real<lower=0> sigma;
|
||||
}
|
||||
model {
|
||||
vote_88 ~ normal(beta[1] + beta[2] * vote_86
|
||||
+ beta[3] * incumbency_88,sigma);
|
||||
}
|
||||
31
samples/Stan/dogs.stan
Normal file
31
samples/Stan/dogs.stan
Normal file
@@ -0,0 +1,31 @@
|
||||
data {
|
||||
int<lower=0> n_dogs;
|
||||
int<lower=0> n_trials;
|
||||
int<lower=0,upper=1> y[n_dogs,n_trials];
|
||||
}
|
||||
parameters {
|
||||
vector[3] beta;
|
||||
}
|
||||
transformed parameters {
|
||||
matrix[n_dogs,n_trials] n_avoid;
|
||||
matrix[n_dogs,n_trials] n_shock;
|
||||
matrix[n_dogs,n_trials] p;
|
||||
|
||||
for (j in 1:n_dogs) {
|
||||
n_avoid[j,1] <- 0;
|
||||
n_shock[j,1] <- 0;
|
||||
for (t in 2:n_trials) {
|
||||
n_avoid[j,t] <- n_avoid[j,t-1] + 1 - y[j,t-1];
|
||||
n_shock[j,t] <- n_shock[j,t-1] + y[j,t-1];
|
||||
}
|
||||
for (t in 1:n_trials)
|
||||
p[j,t] <- beta[1] + beta[2] * n_avoid[j,t] + beta[3] * n_shock[j,t];
|
||||
}
|
||||
}
|
||||
model {
|
||||
beta ~ normal(0, 100);
|
||||
for (i in 1:n_dogs) {
|
||||
for (j in 1:n_trials)
|
||||
y[i,j] ~ bernoulli_logit(p[i,j]);
|
||||
}
|
||||
}
|
||||
26
samples/Stan/schools.stan
Normal file
26
samples/Stan/schools.stan
Normal file
@@ -0,0 +1,26 @@
|
||||
data {
|
||||
int<lower=0> N;
|
||||
vector[N] y;
|
||||
vector[N] sigma_y;
|
||||
}
|
||||
parameters {
|
||||
vector[N] eta;
|
||||
real mu_theta;
|
||||
real<lower=0,upper=100> sigma_eta;
|
||||
real xi;
|
||||
}
|
||||
transformed parameters {
|
||||
real<lower=0> sigma_theta;
|
||||
vector[N] theta;
|
||||
|
||||
theta <- mu_theta + xi * eta;
|
||||
sigma_theta <- fabs(xi) / sigma_eta;
|
||||
}
|
||||
model {
|
||||
mu_theta ~ normal(0, 100);
|
||||
sigma_eta ~ inv_gamma(1, 1); //prior distribution can be changed to uniform
|
||||
|
||||
eta ~ normal(0, sigma_eta);
|
||||
xi ~ normal(0, 5);
|
||||
y ~ normal(theta,sigma_y);
|
||||
}
|
||||
79
samples/UrWeb/iso8601.ur
Normal file
79
samples/UrWeb/iso8601.ur
Normal file
@@ -0,0 +1,79 @@
|
||||
open Parse.String
|
||||
|
||||
val digit = satisfy isdigit
|
||||
|
||||
val decimal_of_len n =
|
||||
ds <- count n digit;
|
||||
return (List.foldl (fn d acc => 10*acc + ((ord d)-(ord #"0"))) 0 ds)
|
||||
|
||||
val date =
|
||||
y <- decimal_of_len 4;
|
||||
char' #"-";
|
||||
m <- decimal_of_len 2;
|
||||
char' #"-";
|
||||
d <- decimal_of_len 2;
|
||||
if m > 0 && m <= 12 then
|
||||
return {Year=y, Month=(Datetime.intToMonth (m-1)), Day=d}
|
||||
else
|
||||
fail
|
||||
|
||||
(* We parse fractions of a second, but ignore them since Datetime
|
||||
doesn't permit representing them. *)
|
||||
val time =
|
||||
h <- decimal_of_len 2;
|
||||
char' #":";
|
||||
m <- decimal_of_len 2;
|
||||
s <- maybe (char' #":";
|
||||
s <- decimal_of_len 2;
|
||||
maybe' (char' #"."; skipWhile isdigit);
|
||||
return s);
|
||||
return {Hour=h, Minute=m, Second=Option.get 0 s}
|
||||
|
||||
val timezone_offset =
|
||||
let val zulu = char' #"Z"; return 0
|
||||
val digits = decimal_of_len 2
|
||||
val sign = or (char' #"+"; return 1)
|
||||
(char' #"-"; return (-1))
|
||||
in
|
||||
zulu `or` (s <- sign;
|
||||
h <- digits;
|
||||
m <- (maybe' (char' #":"); or digits (return 0));
|
||||
return (s*(h*60+m)))
|
||||
end
|
||||
|
||||
val datetime_with_tz =
|
||||
d <- date; char' #"T"; t <- time;
|
||||
tz <- timezone_offset;
|
||||
return (d ++ t ++ {TZOffsetMinutes=tz})
|
||||
|
||||
val datetime =
|
||||
d <- datetime_with_tz;
|
||||
return (d -- #TZOffsetMinutes)
|
||||
|
||||
fun process v =
|
||||
case parse (d <- datetime_with_tz; eof; return d) v of
|
||||
Some r =>
|
||||
let
|
||||
val {Year=year,Month=month,Day=day,
|
||||
Hour=hour,Minute=minute,Second=second} =
|
||||
Datetime.addMinutes (r.TZOffsetMinutes) (r -- #TZOffsetMinutes)
|
||||
fun pad x =
|
||||
if x < 10 then "0" `strcat` show x else show x
|
||||
in
|
||||
<xml>{[pad hour]}:{[pad minute]}:{[pad second]} {[month]} {[day]}, {[year]}</xml>
|
||||
end
|
||||
| None => <xml>none</xml>
|
||||
|
||||
fun main () : transaction page =
|
||||
input <- source "2012-01-01T01:10:42Z";
|
||||
return <xml>
|
||||
<body>
|
||||
<label>
|
||||
Enter an
|
||||
<a href="https://en.wikipedia.org/wiki/ISO_8601">ISO 8601</a>
|
||||
datetime here:
|
||||
<ctextbox source={input} />
|
||||
</label>
|
||||
<ul><dyn signal={v <- signal input; return (process v)} /></ul>
|
||||
</body>
|
||||
</xml>
|
||||
85
samples/UrWeb/parse.urs
Normal file
85
samples/UrWeb/parse.urs
Normal file
@@ -0,0 +1,85 @@
|
||||
functor Make(Stream : sig type t end) : sig
|
||||
con t :: Type -> Type
|
||||
|
||||
val mreturn : a ::: Type -> a -> t a
|
||||
val mbind : a ::: Type -> b ::: Type ->
|
||||
(t a) -> (a -> t b) -> (t b)
|
||||
val monad_parse : monad t
|
||||
|
||||
val parse : a ::: Type -> t a -> Stream.t -> option a
|
||||
|
||||
(** Combinators *)
|
||||
val fail : a ::: Type -> t a
|
||||
val or : a ::: Type -> t a -> t a -> t a
|
||||
val maybe : a ::: Type -> t a -> t (option a)
|
||||
val maybe' : a ::: Type -> t a -> t unit
|
||||
val many : a ::: Type -> t a -> t (list a)
|
||||
val count : a ::: Type -> int -> t a -> t (list a)
|
||||
val skipMany : a ::: Type -> t a -> t unit
|
||||
val sepBy : a ::: Type -> s ::: Type -> t a -> t s -> t (list a)
|
||||
end
|
||||
|
||||
structure String : sig
|
||||
con t :: Type -> Type
|
||||
val monad_parse : monad t
|
||||
|
||||
val parse : a ::: Type -> t a -> string -> option a
|
||||
|
||||
(** Combinators *)
|
||||
val fail : a ::: Type -> t a
|
||||
val or : a ::: Type -> t a -> t a -> t a
|
||||
val maybe : a ::: Type -> t a -> t (option a)
|
||||
val maybe' : a ::: Type -> t a -> t unit
|
||||
val many : a ::: Type -> t a -> t (list a)
|
||||
val count : a ::: Type -> int -> t a -> t (list a)
|
||||
val skipMany : a ::: Type -> t a -> t unit
|
||||
val sepBy : a ::: Type -> s ::: Type -> t a -> t s -> t (list a)
|
||||
|
||||
val eof : t unit
|
||||
(* We provide alternative versions of some of these predicates
|
||||
* that return t unit as a monadic syntactical convenience. *)
|
||||
val string : string -> t string
|
||||
val string' : string -> t unit
|
||||
val stringCI : string -> t string
|
||||
val stringCI' : string -> t unit
|
||||
val char : char -> t char
|
||||
val char' : char -> t unit
|
||||
val take : int -> t (string*int)
|
||||
val drop : int -> t unit
|
||||
val satisfy : (char -> bool) -> t char
|
||||
val skip : (char -> bool) -> t unit
|
||||
val skipWhile : (char -> bool) -> t unit
|
||||
val takeWhile : (char -> bool) -> t (string*int)
|
||||
val takeWhile' : (char -> bool) -> t string (* conses *)
|
||||
(* Well, "till" is the correct form; but "til" is in common enough
|
||||
* usage that I'll prefer it for terseness. *)
|
||||
val takeTil : (char -> bool) -> t (string*int)
|
||||
val takeTil' : (char -> bool) -> t string (* conses *)
|
||||
val takeRest : t string
|
||||
|
||||
(** Convenience functions *)
|
||||
val skipSpace : t unit
|
||||
val endOfLine : t unit
|
||||
val unsigned_int_of_radix : int -> t int
|
||||
(*
|
||||
* val signed_int_of_radix : int -> t int
|
||||
* val double : t float
|
||||
*)
|
||||
end
|
||||
|
||||
structure Blob : sig
|
||||
con t :: Type -> Type
|
||||
val monad_parse : monad t
|
||||
|
||||
val parse : a ::: Type -> t a -> blob -> option a
|
||||
|
||||
(** Combinators *)
|
||||
val fail : a ::: Type -> t a
|
||||
val or : a ::: Type -> t a -> t a -> t a
|
||||
val maybe : a ::: Type -> t a -> t (option a)
|
||||
val maybe' : a ::: Type -> t a -> t unit
|
||||
val many : a ::: Type -> t a -> t (list a)
|
||||
val count : a ::: Type -> int -> t a -> t (list a)
|
||||
val skipMany : a ::: Type -> t a -> t unit
|
||||
val sepBy : a ::: Type -> s ::: Type -> t a -> t s -> t (list a)
|
||||
end
|
||||
1053
samples/XML/sample.csl
Normal file
1053
samples/XML/sample.csl
Normal file
File diff suppressed because it is too large
Load Diff
@@ -5,18 +5,6 @@ set -ex
|
||||
# Fetch all commits/refs needed to run our tests.
|
||||
git fetch origin master:master v2.0.0:v2.0.0 test/attributes:test/attributes test/master:test/master
|
||||
|
||||
sudo apt-get update
|
||||
|
||||
script/vendor-deb libicu48 libicu-dev
|
||||
if ruby -e 'exit RUBY_VERSION >= "2.0" && RUBY_VERSION < "2.1"'; then
|
||||
# Workaround for https://bugs.ruby-lang.org/issues/8074. We can't use this
|
||||
# solution on all versions of Ruby due to
|
||||
# https://github.com/bundler/bundler/pull/3338.
|
||||
bundle config build.charlock_holmes --with-icu-include=$(pwd)/vendor/debs/include --with-icu-lib=$(pwd)/vendor/debs/lib
|
||||
else
|
||||
bundle config build.charlock_holmes --with-icu-dir=$(pwd)/vendor/debs
|
||||
fi
|
||||
|
||||
# Replace SSH links to submodules by HTTPS links.
|
||||
sed -i 's/git@github.com:/https:\/\/github.com\//' .gitmodules
|
||||
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -ex
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
mkdir -p vendor/apt vendor/debs
|
||||
|
||||
(cd vendor/apt && apt-get --assume-yes download "$@")
|
||||
|
||||
for deb in vendor/apt/*.deb; do
|
||||
ar p $deb data.tar.gz | tar -vzxC vendor/debs --strip-components=2
|
||||
done
|
||||
@@ -9,7 +9,6 @@ class TestGrammars < Minitest::Test
|
||||
|
||||
# This grammar has a nonstandard but acceptable license.
|
||||
"vendor/grammars/gap-tmbundle",
|
||||
"vendor/grammars/factor",
|
||||
|
||||
# These grammars have no license but have been grandfathered in. New grammars
|
||||
# must have a license that allows redistribution.
|
||||
@@ -81,7 +80,7 @@ class TestGrammars < Minitest::Test
|
||||
end
|
||||
|
||||
def test_submodules_have_recognized_licenses
|
||||
unrecognized = submodule_licenses.select { |k,v| v.nil? && Licensee::Project.new(k).license_file }
|
||||
unrecognized = submodule_licenses.select { |k,v| v.nil? && Licensee::FSProject.new(k).license_file }
|
||||
unrecognized.reject! { |k,v| PROJECT_WHITELIST.include?(k) }
|
||||
message = "The following submodules have unrecognized licenses:\n* #{unrecognized.keys.join("\n* ")}\n"
|
||||
message << "Please ensure that the project's LICENSE file contains the full text of the license."
|
||||
@@ -132,7 +131,7 @@ class TestGrammars < Minitest::Test
|
||||
# Given the path to a submodule, return its SPDX-compliant license key
|
||||
def submodule_license(submodule)
|
||||
# Prefer Licensee to detect a submodule's license
|
||||
project = Licensee::Project.new(submodule)
|
||||
project = Licensee::FSProject.new(submodule)
|
||||
return project.license.key if project.license
|
||||
|
||||
# We know a license file exists, but Licensee wasn't able to detect the license,
|
||||
|
||||
@@ -57,6 +57,7 @@ class TestLanguage < Minitest::Test
|
||||
assert_equal Language['Shell'], Language.find_by_alias('sh')
|
||||
assert_equal Language['Shell'], Language.find_by_alias('shell')
|
||||
assert_equal Language['Shell'], Language.find_by_alias('zsh')
|
||||
assert_equal Language['SuperCollider'], Language.find_by_alias('supercollider')
|
||||
assert_equal Language['TeX'], Language.find_by_alias('tex')
|
||||
assert_equal Language['TypeScript'], Language.find_by_alias('ts')
|
||||
assert_equal Language['VimL'], Language.find_by_alias('vim')
|
||||
@@ -119,6 +120,7 @@ class TestLanguage < Minitest::Test
|
||||
assert_equal 'vim', Language['VimL'].search_term
|
||||
assert_equal 'jsp', Language['Java Server Pages'].search_term
|
||||
assert_equal 'rst', Language['reStructuredText'].search_term
|
||||
assert_equal 'supercollider', Language['SuperCollider'].search_term
|
||||
end
|
||||
|
||||
def test_popular
|
||||
@@ -138,6 +140,7 @@ class TestLanguage < Minitest::Test
|
||||
assert_equal :programming, Language['Ruby'].type
|
||||
assert_equal :programming, Language['TypeScript'].type
|
||||
assert_equal :programming, Language['Makefile'].type
|
||||
assert_equal :programming, Language['SuperCollider'].type
|
||||
end
|
||||
|
||||
def test_markup
|
||||
@@ -227,7 +230,8 @@ class TestLanguage < Minitest::Test
|
||||
"python" => "Python",
|
||||
"python2" => "Python",
|
||||
"python3" => "Python",
|
||||
"sbcl" => "Common Lisp"
|
||||
"sbcl" => "Common Lisp",
|
||||
"sclang" => "SuperCollider"
|
||||
}.each do |interpreter, language|
|
||||
assert_equal [Language[language]], Language.find_by_interpreter(interpreter)
|
||||
end
|
||||
@@ -339,6 +343,7 @@ class TestLanguage < Minitest::Test
|
||||
assert Language['Perl'].extensions.include?('.pl')
|
||||
assert Language['Python'].extensions.include?('.py')
|
||||
assert Language['Ruby'].extensions.include?('.rb')
|
||||
assert Language['SuperCollider'].extensions.include?('.scd')
|
||||
end
|
||||
|
||||
def test_primary_extension
|
||||
@@ -349,6 +354,7 @@ class TestLanguage < Minitest::Test
|
||||
assert_equal '.coffee', Language['CoffeeScript'].primary_extension
|
||||
assert_equal '.t', Language['Turing'].primary_extension
|
||||
assert_equal '.ts', Language['TypeScript'].primary_extension
|
||||
assert_equal '.sc', Language['SuperCollider'].primary_extension
|
||||
end
|
||||
|
||||
def test_eql
|
||||
|
||||
2
vendor/grammars/AutoHotkey
vendored
2
vendor/grammars/AutoHotkey
vendored
Submodule vendor/grammars/AutoHotkey updated: 8a9bb55597...77b8f2d785
2
vendor/grammars/Docker.tmbundle
vendored
2
vendor/grammars/Docker.tmbundle
vendored
Submodule vendor/grammars/Docker.tmbundle updated: 6bb36d8262...08585643c0
2
vendor/grammars/Elm.tmLanguage
vendored
2
vendor/grammars/Elm.tmLanguage
vendored
Submodule vendor/grammars/Elm.tmLanguage updated: f17b1914dd...ab79692fed
2
vendor/grammars/FreeMarker.tmbundle
vendored
2
vendor/grammars/FreeMarker.tmbundle
vendored
Submodule vendor/grammars/FreeMarker.tmbundle updated: 6b7b880c53...7259485a01
2
vendor/grammars/InnoSetup
vendored
2
vendor/grammars/InnoSetup
vendored
Submodule vendor/grammars/InnoSetup updated: a7f79fd1a5...9da37ae690
2
vendor/grammars/NSIS
vendored
2
vendor/grammars/NSIS
vendored
Submodule vendor/grammars/NSIS updated: ea6104445d...68a4534dde
2
vendor/grammars/NimLime
vendored
2
vendor/grammars/NimLime
vendored
Submodule vendor/grammars/NimLime updated: 5111833868...4e60e5657f
2
vendor/grammars/Stata.tmbundle
vendored
2
vendor/grammars/Stata.tmbundle
vendored
Submodule vendor/grammars/Stata.tmbundle updated: bc1e36344d...50685c2735
2
vendor/grammars/Sublime-Text-2-OpenEdge-ABL
vendored
2
vendor/grammars/Sublime-Text-2-OpenEdge-ABL
vendored
Submodule vendor/grammars/Sublime-Text-2-OpenEdge-ABL updated: dd14b342cb...c830631431
2
vendor/grammars/SublimeClarion
vendored
2
vendor/grammars/SublimeClarion
vendored
Submodule vendor/grammars/SublimeClarion updated: f070aadd26...5823e7f447
1
vendor/grammars/UrWeb-Language-Definition
vendored
Submodule
1
vendor/grammars/UrWeb-Language-Definition
vendored
Submodule
Submodule vendor/grammars/UrWeb-Language-Definition added at cd47cc7891
2
vendor/grammars/abap.tmbundle
vendored
2
vendor/grammars/abap.tmbundle
vendored
Submodule vendor/grammars/abap.tmbundle updated: 8ab33a8978...5b7e30fd8f
2
vendor/grammars/atom-fsharp
vendored
2
vendor/grammars/atom-fsharp
vendored
Submodule vendor/grammars/atom-fsharp updated: 7051d65d63...500cdb5f04
2
vendor/grammars/atom-language-purescript
vendored
2
vendor/grammars/atom-language-purescript
vendored
Submodule vendor/grammars/atom-language-purescript updated: 5fd5b6460e...bd2b59f14e
1
vendor/grammars/atom-language-stan
vendored
Submodule
1
vendor/grammars/atom-language-stan
vendored
Submodule
Submodule vendor/grammars/atom-language-stan added at 72c626ae96
2
vendor/grammars/carto-atom
vendored
2
vendor/grammars/carto-atom
vendored
Submodule vendor/grammars/carto-atom updated: 9895909323...08a80dc54a
2
vendor/grammars/cmake.tmbundle
vendored
2
vendor/grammars/cmake.tmbundle
vendored
Submodule vendor/grammars/cmake.tmbundle updated: 926867a414...1421c6ba47
2
vendor/grammars/elixir-tmbundle
vendored
2
vendor/grammars/elixir-tmbundle
vendored
Submodule vendor/grammars/elixir-tmbundle updated: 46514e8f9f...4b502e436d
2
vendor/grammars/factor
vendored
2
vendor/grammars/factor
vendored
Submodule vendor/grammars/factor updated: 9b5cb445ee...9d95602591
2
vendor/grammars/fancy-tmbundle
vendored
2
vendor/grammars/fancy-tmbundle
vendored
Submodule vendor/grammars/fancy-tmbundle updated: d48b6100cc...c9cdcd61f5
2
vendor/grammars/haskell.tmbundle
vendored
2
vendor/grammars/haskell.tmbundle
vendored
Submodule vendor/grammars/haskell.tmbundle updated: c3137ccc28...df3d54278a
2
vendor/grammars/html.tmbundle
vendored
2
vendor/grammars/html.tmbundle
vendored
Submodule vendor/grammars/html.tmbundle updated: 181a15de24...a0bc0c479b
2
vendor/grammars/jade-tmbundle
vendored
2
vendor/grammars/jade-tmbundle
vendored
Submodule vendor/grammars/jade-tmbundle updated: fea35b58dc...d27b61d178
2
vendor/grammars/json.tmbundle
vendored
2
vendor/grammars/json.tmbundle
vendored
Submodule vendor/grammars/json.tmbundle updated: 4100e5b016...06b38d5532
2
vendor/grammars/language-babel
vendored
2
vendor/grammars/language-babel
vendored
Submodule vendor/grammars/language-babel updated: 44ff68da9e...4b0e9658e0
1
vendor/grammars/language-click
vendored
Submodule
1
vendor/grammars/language-click
vendored
Submodule
Submodule vendor/grammars/language-click added at 1ee1fe012b
2
vendor/grammars/language-clojure
vendored
2
vendor/grammars/language-clojure
vendored
Submodule vendor/grammars/language-clojure updated: 3173abe995...12b73d41a0
2
vendor/grammars/language-coffee-script
vendored
2
vendor/grammars/language-coffee-script
vendored
Submodule vendor/grammars/language-coffee-script updated: 0eeace014b...afa4f6f157
2
vendor/grammars/language-gfm
vendored
2
vendor/grammars/language-gfm
vendored
Submodule vendor/grammars/language-gfm updated: 597d382a8c...e5b24d57e7
2
vendor/grammars/language-javascript
vendored
2
vendor/grammars/language-javascript
vendored
Submodule vendor/grammars/language-javascript updated: 9d69b86e30...b227486fc8
1
vendor/grammars/language-maxscript
vendored
Submodule
1
vendor/grammars/language-maxscript
vendored
Submodule
Submodule vendor/grammars/language-maxscript added at a465c9ca4a
2
vendor/grammars/language-python
vendored
2
vendor/grammars/language-python
vendored
Submodule vendor/grammars/language-python updated: 6d7b52b882...cdb699e7a8
1
vendor/grammars/language-renpy
vendored
Submodule
1
vendor/grammars/language-renpy
vendored
Submodule
Submodule vendor/grammars/language-renpy added at 00e92d7450
2
vendor/grammars/language-shellscript
vendored
2
vendor/grammars/language-shellscript
vendored
Submodule vendor/grammars/language-shellscript updated: 0bbc7eee5a...bbaca532de
1
vendor/grammars/language-supercollider
vendored
Submodule
1
vendor/grammars/language-supercollider
vendored
Submodule
Submodule vendor/grammars/language-supercollider added at 2b1da230e3
2
vendor/grammars/language-yaml
vendored
2
vendor/grammars/language-yaml
vendored
Submodule vendor/grammars/language-yaml updated: 1f5a5b2db0...e1d62e5aff
2
vendor/grammars/latex.tmbundle
vendored
2
vendor/grammars/latex.tmbundle
vendored
Submodule vendor/grammars/latex.tmbundle updated: d40245e130...bb4edc2b6a
2
vendor/grammars/make.tmbundle
vendored
2
vendor/grammars/make.tmbundle
vendored
Submodule vendor/grammars/make.tmbundle updated: 371a9e8a56...e0d96dc1d6
2
vendor/grammars/matlab.tmbundle
vendored
2
vendor/grammars/matlab.tmbundle
vendored
Submodule vendor/grammars/matlab.tmbundle updated: e1460dd9e7...2cdc1f8414
2
vendor/grammars/perl.tmbundle
vendored
2
vendor/grammars/perl.tmbundle
vendored
Submodule vendor/grammars/perl.tmbundle updated: fd81fe586b...dedebdcfd4
2
vendor/grammars/php-smarty.tmbundle
vendored
2
vendor/grammars/php-smarty.tmbundle
vendored
Submodule vendor/grammars/php-smarty.tmbundle updated: 3e673e1980...36c058a467
2
vendor/grammars/processing.tmbundle
vendored
2
vendor/grammars/processing.tmbundle
vendored
Submodule vendor/grammars/processing.tmbundle updated: 4070e43b09...214b3420f1
2
vendor/grammars/sas.tmbundle
vendored
2
vendor/grammars/sas.tmbundle
vendored
Submodule vendor/grammars/sas.tmbundle updated: 43a05b10fc...30fa23fc34
2
vendor/grammars/smali-sublime
vendored
2
vendor/grammars/smali-sublime
vendored
Submodule vendor/grammars/smali-sublime updated: b9eba91397...28a4336421
1
vendor/grammars/sublime-pony
vendored
Submodule
1
vendor/grammars/sublime-pony
vendored
Submodule
Submodule vendor/grammars/sublime-pony added at 384ba3ed98
2
vendor/grammars/sublime-typescript
vendored
2
vendor/grammars/sublime-typescript
vendored
Submodule vendor/grammars/sublime-typescript updated: a4c4b9fc79...6540de452e
2
vendor/grammars/sublimeprolog
vendored
2
vendor/grammars/sublimeprolog
vendored
Submodule vendor/grammars/sublimeprolog updated: 9fd46df4b6...c71d8a1e0f
2
vendor/grammars/vue-syntax-highlight
vendored
2
vendor/grammars/vue-syntax-highlight
vendored
Submodule vendor/grammars/vue-syntax-highlight updated: 7d35a53e7e...a2336ddc7e
Reference in New Issue
Block a user