mirror of
https://github.com/KevinMidboe/linguist.git
synced 2025-10-29 17:50:22 +00:00
Merge branch 'master' into 1413-local
Conflicts: lib/linguist/vendor.yml
This commit is contained in:
@@ -532,6 +532,7 @@ module Linguist
|
||||
if extnames = extensions[name]
|
||||
extnames.each do |extname|
|
||||
if !options['extensions'].include?(extname)
|
||||
warn "#{name} has a sample with extension (#{extname}) that isn't explicitly defined in languages.yml" unless extname == '.script!'
|
||||
options['extensions'] << extname
|
||||
end
|
||||
end
|
||||
|
||||
@@ -260,6 +260,7 @@ C:
|
||||
extensions:
|
||||
- .c
|
||||
- .cats
|
||||
- .h
|
||||
- .w
|
||||
|
||||
C#:
|
||||
@@ -288,6 +289,7 @@ C++:
|
||||
- .cc
|
||||
- .cxx
|
||||
- .H
|
||||
- .h
|
||||
- .h++
|
||||
- .hh
|
||||
- .hpp
|
||||
@@ -321,7 +323,7 @@ CLIPS:
|
||||
CMake:
|
||||
extensions:
|
||||
- .cmake
|
||||
- .cmake.in
|
||||
- .in
|
||||
filenames:
|
||||
- CMakeLists.txt
|
||||
|
||||
@@ -346,6 +348,14 @@ Ceylon:
|
||||
extensions:
|
||||
- .ceylon
|
||||
|
||||
Chapel:
|
||||
type: programming
|
||||
color: "#8dc63f"
|
||||
aliases:
|
||||
- chpl
|
||||
extensions:
|
||||
- .chpl
|
||||
|
||||
ChucK:
|
||||
lexer: Java
|
||||
extensions:
|
||||
@@ -380,7 +390,7 @@ Clojure:
|
||||
- .cljscm
|
||||
- .cljx
|
||||
- .hic
|
||||
- .cljs.hl
|
||||
- .hl
|
||||
filenames:
|
||||
- riemann.config
|
||||
|
||||
@@ -444,6 +454,7 @@ Coq:
|
||||
type: programming
|
||||
extensions:
|
||||
- .coq
|
||||
- .v
|
||||
|
||||
Cpp-ObjDump:
|
||||
type: data
|
||||
@@ -479,6 +490,12 @@ Cuda:
|
||||
- .cu
|
||||
- .cuh
|
||||
|
||||
Cycript:
|
||||
type: programming
|
||||
lexer: JavaScript
|
||||
extensions:
|
||||
- .cy
|
||||
|
||||
Cython:
|
||||
type: programming
|
||||
group: Python
|
||||
@@ -533,6 +550,7 @@ Dart:
|
||||
Diff:
|
||||
extensions:
|
||||
- .diff
|
||||
- .patch
|
||||
|
||||
Dogescript:
|
||||
type: programming
|
||||
@@ -617,6 +635,7 @@ Erlang:
|
||||
color: "#0faf8d"
|
||||
extensions:
|
||||
- .erl
|
||||
- .escript
|
||||
- .hrl
|
||||
|
||||
F#:
|
||||
@@ -692,6 +711,7 @@ Forth:
|
||||
extensions:
|
||||
- .fth
|
||||
- .4th
|
||||
- .forth
|
||||
|
||||
Frege:
|
||||
type: programming
|
||||
@@ -800,6 +820,9 @@ Gosu:
|
||||
color: "#82937f"
|
||||
extensions:
|
||||
- .gs
|
||||
- .gst
|
||||
- .gsx
|
||||
- .vark
|
||||
|
||||
Grace:
|
||||
type: programming
|
||||
@@ -835,6 +858,7 @@ Groovy:
|
||||
color: "#e69f56"
|
||||
extensions:
|
||||
- .groovy
|
||||
- .gradle
|
||||
- .grt
|
||||
- .gtpl
|
||||
- .gvy
|
||||
@@ -857,7 +881,6 @@ HTML:
|
||||
extensions:
|
||||
- .html
|
||||
- .htm
|
||||
- .html.hl
|
||||
- .st
|
||||
- .xhtml
|
||||
|
||||
@@ -877,9 +900,7 @@ HTML+ERB:
|
||||
- erb
|
||||
extensions:
|
||||
- .erb
|
||||
- .erb.deface
|
||||
- .html.erb
|
||||
- .html.erb.deface
|
||||
- .deface
|
||||
|
||||
HTML+PHP:
|
||||
type: markup
|
||||
@@ -897,17 +918,14 @@ Haml:
|
||||
type: markup
|
||||
extensions:
|
||||
- .haml
|
||||
- .haml.deface
|
||||
- .html.haml.deface
|
||||
- .deface
|
||||
|
||||
Handlebars:
|
||||
type: markup
|
||||
lexer: Text only
|
||||
lexer: Handlebars
|
||||
extensions:
|
||||
- .handlebars
|
||||
- .hbs
|
||||
- .html.handlebars
|
||||
- .html.hbs
|
||||
|
||||
Harbour:
|
||||
type: programming
|
||||
@@ -945,6 +963,7 @@ IDL:
|
||||
color: "#e3592c"
|
||||
extensions:
|
||||
- .pro
|
||||
- .dlm
|
||||
|
||||
INI:
|
||||
type: data
|
||||
@@ -1019,6 +1038,7 @@ JSON:
|
||||
searchable: false
|
||||
extensions:
|
||||
- .json
|
||||
- .lock
|
||||
- .sublime-keymap
|
||||
- .sublime-mousemap
|
||||
- .sublime-project
|
||||
@@ -1147,6 +1167,9 @@ Lasso:
|
||||
color: "#2584c3"
|
||||
extensions:
|
||||
- .lasso
|
||||
- .las
|
||||
- .lasso9
|
||||
- .ldml
|
||||
|
||||
Latte:
|
||||
type: markup
|
||||
@@ -1225,6 +1248,14 @@ Logtalk:
|
||||
- .lgt
|
||||
- .logtalk
|
||||
|
||||
LookML:
|
||||
type: programming
|
||||
lexer: YAML
|
||||
ace_mode: yaml
|
||||
color: "#652B81"
|
||||
extensions:
|
||||
- .lookml
|
||||
|
||||
Lua:
|
||||
type: programming
|
||||
ace_mode: lua
|
||||
@@ -1232,6 +1263,7 @@ Lua:
|
||||
extensions:
|
||||
- .lua
|
||||
- .nse
|
||||
- .pd_lua
|
||||
- .rbxs
|
||||
interpreters:
|
||||
- lua
|
||||
@@ -1377,6 +1409,7 @@ Myghty:
|
||||
NSIS:
|
||||
extensions:
|
||||
- .nsi
|
||||
- .nsh
|
||||
|
||||
Nemerle:
|
||||
type: programming
|
||||
@@ -1441,6 +1474,7 @@ OCaml:
|
||||
color: "#3be133"
|
||||
extensions:
|
||||
- .ml
|
||||
- .eliom
|
||||
- .eliomi
|
||||
- .ml4
|
||||
- .mli
|
||||
@@ -1461,6 +1495,7 @@ Objective-C:
|
||||
- objc
|
||||
extensions:
|
||||
- .m
|
||||
- .h
|
||||
|
||||
Objective-C++:
|
||||
type: programming
|
||||
@@ -1508,6 +1543,13 @@ OpenEdge ABL:
|
||||
- abl
|
||||
extensions:
|
||||
- .p
|
||||
- .cls
|
||||
|
||||
OpenSCAD:
|
||||
type: programming
|
||||
lexer: Text only
|
||||
extensions:
|
||||
- .scad
|
||||
|
||||
Org:
|
||||
type: prose
|
||||
@@ -1546,6 +1588,7 @@ PHP:
|
||||
- .php
|
||||
- .aw
|
||||
- .ctp
|
||||
- .module
|
||||
- .php3
|
||||
- .php4
|
||||
- .php5
|
||||
@@ -1594,6 +1637,7 @@ Pascal:
|
||||
extensions:
|
||||
- .pas
|
||||
- .dfm
|
||||
- .dpr
|
||||
- .lpr
|
||||
|
||||
Perl:
|
||||
@@ -1603,12 +1647,15 @@ Perl:
|
||||
extensions:
|
||||
- .pl
|
||||
- .PL
|
||||
- .cgi
|
||||
- .fcgi
|
||||
- .perl
|
||||
- .ph
|
||||
- .plx
|
||||
- .pm
|
||||
- .pod
|
||||
- .psgi
|
||||
- .t
|
||||
interpreters:
|
||||
- perl
|
||||
|
||||
@@ -1817,6 +1864,7 @@ Racket:
|
||||
- .rkt
|
||||
- .rktd
|
||||
- .rktl
|
||||
- .scrbl
|
||||
|
||||
Ragel in Ruby Host:
|
||||
type: programming
|
||||
@@ -1886,7 +1934,10 @@ Ruby:
|
||||
- .god
|
||||
- .irbrc
|
||||
- .mspec
|
||||
- .pluginspec
|
||||
- .podspec
|
||||
- .rabl
|
||||
- .rake
|
||||
- .rbuild
|
||||
- .rbw
|
||||
- .rbx
|
||||
@@ -1931,6 +1982,14 @@ SCSS:
|
||||
extensions:
|
||||
- .scss
|
||||
|
||||
SQF:
|
||||
type: programming
|
||||
color: "#FFCB1F"
|
||||
lexer: C++
|
||||
extensions:
|
||||
- .sqf
|
||||
- .hqf
|
||||
|
||||
SQL:
|
||||
type: data
|
||||
ace_mode: sql
|
||||
@@ -1960,6 +2019,7 @@ Sass:
|
||||
group: CSS
|
||||
extensions:
|
||||
- .sass
|
||||
- .scss
|
||||
|
||||
Scala:
|
||||
type: programming
|
||||
@@ -1967,6 +2027,7 @@ Scala:
|
||||
color: "#7dd3b0"
|
||||
extensions:
|
||||
- .scala
|
||||
- .sbt
|
||||
- .sc
|
||||
|
||||
Scaml:
|
||||
@@ -1982,6 +2043,7 @@ Scheme:
|
||||
- .scm
|
||||
- .sld
|
||||
- .sls
|
||||
- .sps
|
||||
- .ss
|
||||
interpreters:
|
||||
- guile
|
||||
@@ -1993,6 +2055,8 @@ Scilab:
|
||||
type: programming
|
||||
extensions:
|
||||
- .sci
|
||||
- .sce
|
||||
- .tst
|
||||
|
||||
Self:
|
||||
type: programming
|
||||
@@ -2012,8 +2076,10 @@ Shell:
|
||||
- zsh
|
||||
extensions:
|
||||
- .sh
|
||||
- .bash
|
||||
- .bats
|
||||
- .tmux
|
||||
- .zsh
|
||||
interpreters:
|
||||
- bash
|
||||
- sh
|
||||
@@ -2080,6 +2146,7 @@ Standard ML:
|
||||
extensions:
|
||||
- .ML
|
||||
- .fun
|
||||
- .sig
|
||||
- .sml
|
||||
|
||||
Stata:
|
||||
@@ -2160,10 +2227,13 @@ TeX:
|
||||
extensions:
|
||||
- .tex
|
||||
- .aux
|
||||
- .bbx
|
||||
- .bib
|
||||
- .cbx
|
||||
- .cls
|
||||
- .dtx
|
||||
- .ins
|
||||
- .lbx
|
||||
- .ltx
|
||||
- .mkii
|
||||
- .mkiv
|
||||
@@ -2280,6 +2350,7 @@ Visual Basic:
|
||||
extensions:
|
||||
- .vb
|
||||
- .bas
|
||||
- .cls
|
||||
- .frm
|
||||
- .frx
|
||||
- .vba
|
||||
@@ -2308,6 +2379,7 @@ XML:
|
||||
- wsdl
|
||||
extensions:
|
||||
- .xml
|
||||
- .ant
|
||||
- .axml
|
||||
- .ccxml
|
||||
- .clixml
|
||||
@@ -2321,6 +2393,7 @@ XML:
|
||||
- .fsproj
|
||||
- .glade
|
||||
- .grxml
|
||||
- .ivy
|
||||
- .jelly
|
||||
- .kml
|
||||
- .launch
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -36,6 +36,10 @@
|
||||
# Bootstrap minified css and js
|
||||
- (^|/)bootstrap([^.]*)(\.min)?\.(js|css)$
|
||||
|
||||
# Font Awesome
|
||||
- font-awesome.min.css
|
||||
- font-awesome.css
|
||||
|
||||
# Foundation css
|
||||
- foundation.min.css
|
||||
- foundation.css
|
||||
@@ -47,6 +51,10 @@
|
||||
- (^|/)[Bb]ourbon/.*\.css$
|
||||
- (^|/)[Bb]ourbon/.*\.scss$
|
||||
|
||||
# Animate.css
|
||||
- animate.css
|
||||
- animate.min.css
|
||||
|
||||
# Vendored dependencies
|
||||
- thirdparty/
|
||||
- vendors?/
|
||||
@@ -116,6 +124,10 @@
|
||||
- (^|/)modernizr\-\d\.\d+(\.\d+)?(\.min)?\.js$
|
||||
- (^|/)modernizr\.custom\.\d+\.js$
|
||||
|
||||
# Knockout
|
||||
- (^|/)knockout-(\d+\.){3}(debug\.)?js$
|
||||
- knockout-min.js
|
||||
|
||||
## Python ##
|
||||
|
||||
# django
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
module Linguist
|
||||
VERSION = "3.1.0"
|
||||
VERSION = "3.1.1"
|
||||
end
|
||||
|
||||
304
samples/Chapel/distributions.chpl
Normal file
304
samples/Chapel/distributions.chpl
Normal file
@@ -0,0 +1,304 @@
|
||||
//
|
||||
// Distributions Primer
|
||||
//
|
||||
// This primer demonstrates uses of some of Chapel's standard
|
||||
// distributions. To use these distributions in a Chapel program,
|
||||
// the respective module must be used:
|
||||
//
|
||||
use BlockDist, CyclicDist, BlockCycDist, ReplicatedDist;
|
||||
use DimensionalDist2D, ReplicatedDim, BlockCycDim;
|
||||
|
||||
//
|
||||
// For each distribution, we'll create a distributed domain and array
|
||||
// and then initialize it just to give a brief flavor of how the
|
||||
// distribution maps across locales. Running this example on 6
|
||||
// locales does a nice job of illustrating the distribution
|
||||
// characteristics.
|
||||
//
|
||||
// All of these distributions support options to map to a different
|
||||
// virtual locale grid than the one used by default (a
|
||||
// multidimensional factoring of the built-in Locales array), as well
|
||||
// as to control the amount of parallelism used in data parallel
|
||||
// loops. See the Standard Distributions chapter of the language spec
|
||||
// for more details.
|
||||
//
|
||||
|
||||
//
|
||||
// Make the program size configurable from the command line.
|
||||
//
|
||||
config const n = 8;
|
||||
|
||||
//
|
||||
// Declare a 2-dimensional domain Space that we will later use to
|
||||
// initialize the distributed domains.
|
||||
//
|
||||
const Space = {1..n, 1..n};
|
||||
|
||||
//
|
||||
// The Block distribution distributes a bounding box from
|
||||
// n-dimensional space across the target locale array viewed as an
|
||||
// n-dimensional virtual locale grid. The bounding box is blocked
|
||||
// into roughly equal portions across the locales. Note that domains
|
||||
// declared over a Block distribution can also store indices outside
|
||||
// of the bounding box; the bounding box is merely used to compute
|
||||
// the blocking of space.
|
||||
//
|
||||
// In this example, we declare a 2-dimensional Block-distributed
|
||||
// domain BlockSpace and a Block-distributed array BA declared over
|
||||
// the domain.
|
||||
//
|
||||
const BlockSpace = Space dmapped Block(boundingBox=Space);
|
||||
var BA: [BlockSpace] int;
|
||||
|
||||
//
|
||||
// To illustrate how the index set is distributed across locales,
|
||||
// we'll use a forall loop to initialize each array element to the
|
||||
// locale ID that stores that index/element/iteration.
|
||||
//
|
||||
forall ba in BA do
|
||||
ba = here.id;
|
||||
|
||||
//
|
||||
// Output the Block-distributed array to visually see how the elements
|
||||
// are partitioned across the locales.
|
||||
//
|
||||
writeln("Block Array Index Map");
|
||||
writeln(BA);
|
||||
writeln();
|
||||
|
||||
//
|
||||
// Most of Chapel's standard distributions support an optional
|
||||
// targetLocales argument that permits you to pass in your own
|
||||
// array of locales to be targeted. In general, the targetLocales
|
||||
// argument should match the rank of the distribution. So for
|
||||
// example, to map a Block to a [numLocales x 1] view of the
|
||||
// locale set, one could do something like this:
|
||||
|
||||
//
|
||||
// We start by creating our own array of the locale values. Here
|
||||
// we use the standard array reshape function for convenience,
|
||||
// but more generally, this array could be accessed/assigned like any
|
||||
// other.
|
||||
//
|
||||
|
||||
var MyLocaleView = {0..#numLocales, 1..1};
|
||||
var MyLocales: [MyLocaleView] locale = reshape(Locales, MyLocaleView);
|
||||
|
||||
//
|
||||
// Then we'll declare a distributed domain/array that targets
|
||||
// this view of the locales:
|
||||
//
|
||||
|
||||
const BlockSpace2 = Space dmapped Block(boundingBox=Space,
|
||||
targetLocales=MyLocales);
|
||||
var BA2: [BlockSpace2] int;
|
||||
|
||||
//
|
||||
// Then we'll do a similar computation as before to verify where
|
||||
// everything ended up:
|
||||
//
|
||||
forall ba in BA2 do
|
||||
ba = here.id;
|
||||
|
||||
writeln("Block Array Index Map");
|
||||
writeln(BA2);
|
||||
writeln();
|
||||
|
||||
|
||||
|
||||
//
|
||||
// Next, we'll perform a similar computation for the Cyclic distribution.
|
||||
// Cyclic distributions start at a designated n-dimensional index and
|
||||
// distribute the n-dimensional space across an n-dimensional array
|
||||
// of locales in a round-robin fashion (in each dimension). As with
|
||||
// the Block distribution, domains may be declared using the
|
||||
// distribution who have lower indices that the starting index; that
|
||||
// value should just be considered a parameterization of how the
|
||||
// distribution is defined.
|
||||
//
|
||||
const CyclicSpace = Space dmapped Cyclic(startIdx=Space.low);
|
||||
var CA: [CyclicSpace] int;
|
||||
|
||||
forall ca in CA do
|
||||
ca = here.id;
|
||||
|
||||
writeln("Cyclic Array Index Map");
|
||||
writeln(CA);
|
||||
writeln();
|
||||
|
||||
|
||||
//
|
||||
// Next, we'll declare a Block-Cyclic distribution. These
|
||||
// distributions also deal out indices in a round-robin fashion,
|
||||
// but rather than dealing out singleton indices, they deal out blocks
|
||||
// of indices. Thus, the BlockCyclic distribution is parameterized
|
||||
// by a starting index (as with Cyclic) and a block size (per
|
||||
// dimension) specifying how large the chunks to be dealt out are.
|
||||
//
|
||||
const BlkCycSpace = Space dmapped BlockCyclic(startIdx=Space.low,
|
||||
blocksize=(2, 3));
|
||||
var BCA: [BlkCycSpace] int;
|
||||
|
||||
forall bca in BCA do
|
||||
bca = here.id;
|
||||
|
||||
writeln("Block-Cyclic Array Index Map");
|
||||
writeln(BCA);
|
||||
writeln();
|
||||
|
||||
|
||||
//
|
||||
// The ReplicatedDist distribution is different: each of the
|
||||
// original domain's indices - and the corresponding array elements -
|
||||
// is replicated onto each locale. (Note: consistency among these
|
||||
// array replicands is NOT maintained automatically.)
|
||||
//
|
||||
// This replication is observable in some cases but not others,
|
||||
// as shown below. Note: this behavior may change in the future.
|
||||
//
|
||||
const ReplicatedSpace = Space dmapped ReplicatedDist();
|
||||
var RA: [ReplicatedSpace] int;
|
||||
|
||||
// The replication is observable - this visits each replicand.
|
||||
forall ra in RA do
|
||||
ra = here.id;
|
||||
|
||||
writeln("Replicated Array Index Map, ", RA.numElements, " elements total");
|
||||
writeln(RA);
|
||||
writeln();
|
||||
|
||||
//
|
||||
// The replication is observable when the replicated array is
|
||||
// on the left-hand side. If the right-hand side is not replicated,
|
||||
// it is copied into each replicand.
|
||||
// We illustrate this using a non-distributed array.
|
||||
//
|
||||
var A: [Space] int = [(i,j) in Space] i*100 + j;
|
||||
RA = A;
|
||||
writeln("Replicated Array after being array-assigned into");
|
||||
writeln(RA);
|
||||
writeln();
|
||||
|
||||
//
|
||||
// Analogously, each replicand will be visited and
|
||||
// other participated expressions will be computed on each locale
|
||||
// (a) when the replicated array is assigned a scalar:
|
||||
// RA = 5;
|
||||
// (b) when it appears first in a zippered forall loop:
|
||||
// forall (ra, a) in zip(RA, A) do ...;
|
||||
// (c) when it appears in a for loop:
|
||||
// for ra in RA do ...;
|
||||
//
|
||||
// Zippering (RA,A) or (A,RA) in a 'for' loop will generate
|
||||
// an error due to their different number of elements.
|
||||
|
||||
// Let RA store the Index Map again, for the examples below.
|
||||
forall ra in RA do
|
||||
ra = here.id;
|
||||
|
||||
//
|
||||
// Only the local replicand is accessed - replication is NOT observable
|
||||
// and consistency is NOT maintained - when:
|
||||
// (a) the replicated array is indexed - an individual element is read...
|
||||
//
|
||||
on Locales(0) do
|
||||
writeln("on ", here, ": ", RA(Space.low));
|
||||
on Locales(LocaleSpace.high) do
|
||||
writeln("on ", here, ": ", RA(Space.low));
|
||||
writeln();
|
||||
|
||||
// ...or an individual element is written;
|
||||
on Locales(LocaleSpace.high) do
|
||||
RA(Space.low) = 7777;
|
||||
|
||||
writeln("Replicated Array after being indexed into");
|
||||
writeln(RA);
|
||||
writeln();
|
||||
|
||||
//
|
||||
// (b) the replicated array is on the right-hand side of an assignment...
|
||||
//
|
||||
on Locales(LocaleSpace.high) do
|
||||
A = RA + 4;
|
||||
writeln("Non-Replicated Array after assignment from Replicated Array + 4");
|
||||
writeln(A);
|
||||
writeln();
|
||||
|
||||
//
|
||||
// (c) ...or, generally, the replicated array or domain participates
|
||||
// in a zippered forall loop, but not in the first position.
|
||||
// The loop could look like:
|
||||
//
|
||||
// forall (a, (i,j), ra) in (A, ReplicatedSpace, RA) do ...;
|
||||
//
|
||||
|
||||
|
||||
//
|
||||
// The DimensionalDist2D distribution lets us build a 2D distribution
|
||||
// as a composition of specifiers for individual dimensions.
|
||||
// Under such a "dimensional" distribution each dimension is handled
|
||||
// independently of the other.
|
||||
//
|
||||
// The dimension specifiers are similar to the corresponding multi-dimensional
|
||||
// distributions in constructor arguments and index-to-locale mapping rules.
|
||||
// However, instead of an array of locales, a specifier constructor
|
||||
// accepts just the number of locales that the indices in the corresponding
|
||||
// dimension will be distributed across.
|
||||
//
|
||||
// The DimensionalDist2D constructor requires:
|
||||
// * an [0..nl1-1, 0..nl2-1] array of locales, where
|
||||
// nl1 and nl2 are the number of locales in each dimension, and
|
||||
// * two dimension specifiers, created for nl1 and nl2 locale counts, resp.
|
||||
//
|
||||
// Presently, the following dimension specifiers are available
|
||||
// (shown here with their constructor arguments):
|
||||
//
|
||||
// * ReplicatedDim(numLocales)
|
||||
// * BlockDim(numLocales, boundingBoxLow, boundingBoxHigh)
|
||||
// * BlockCyclicDim(lowIdx, blockSize, numLocales)
|
||||
//
|
||||
|
||||
//
|
||||
// The following example creates a dimensional distribution that
|
||||
// replicates over 2 locales (when available) in the first dimemsion
|
||||
// and distributes using block-cyclic distribution in the second dimension.
|
||||
// The example computes nl1 and nl2 and reshapes MyLocales correspondingly.
|
||||
//
|
||||
|
||||
var (nl1, nl2) = if numLocales == 1 then (1, 1) else (2, numLocales/2);
|
||||
MyLocaleView = {0..#nl1, 0..#nl2};
|
||||
MyLocales = reshape(Locales[0..#nl1*nl2], MyLocaleView);
|
||||
|
||||
const DimReplicatedBlockcyclicSpace = Space
|
||||
dmapped DimensionalDist2D(MyLocales,
|
||||
new ReplicatedDim(numLocales = nl1),
|
||||
new BlockCyclicDim(numLocales = nl2,
|
||||
lowIdx = 1, blockSize = 2));
|
||||
|
||||
var DRBA: [DimReplicatedBlockcyclicSpace] int;
|
||||
|
||||
// The ReplicatedDim specifier always accesses the local replicand.
|
||||
// (This differs from how the ReplicatedDist distribution works.)
|
||||
//
|
||||
// This example visits each replicand. The behavior is the same
|
||||
// regardless of the second index into MyLocales below.
|
||||
|
||||
for locId1 in 0..#nl1 do on MyLocales[locId1, 0] {
|
||||
|
||||
forall drba in DRBA do
|
||||
drba = here.id;
|
||||
|
||||
writeln("Dimensional2D(Replicated,BlockCyclic) Array Index Map",
|
||||
" from ", here);
|
||||
|
||||
// Technicality: 'writeln(DRBA)' would read DRBA always on Locale 0.
|
||||
// Since we want to see what DRBA contains on the current locale,
|
||||
// we use 'Helper' that is mapped using the default distribution.
|
||||
// 'Helper = DRBA' captures the view of DRBA on the current locale,
|
||||
// which we then print out.
|
||||
|
||||
const Helper: [Space] int = DRBA;
|
||||
writeln(Helper);
|
||||
writeln();
|
||||
|
||||
}
|
||||
1
samples/Chapel/hello.chpl
Normal file
1
samples/Chapel/hello.chpl
Normal file
@@ -0,0 +1 @@
|
||||
writeln("Hello, world!"); // print 'Hello, world!' to the console
|
||||
1692
samples/Chapel/lulesh.chpl
Normal file
1692
samples/Chapel/lulesh.chpl
Normal file
File diff suppressed because it is too large
Load Diff
147
samples/Chapel/nbody.chpl
Normal file
147
samples/Chapel/nbody.chpl
Normal file
@@ -0,0 +1,147 @@
|
||||
/* The Computer Language Benchmarks Game
|
||||
http://benchmarksgame.alioth.debian.org/
|
||||
|
||||
contributed by Albert Sidelnik
|
||||
modified by Brad Chamberlain
|
||||
*/
|
||||
|
||||
|
||||
//
|
||||
// The number of timesteps to simulate; may be set via the command-line
|
||||
//
|
||||
config const n = 10000;
|
||||
|
||||
//
|
||||
// Constants representing pi, the solar mass, and the number of days per year
|
||||
//
|
||||
const pi = 3.141592653589793,
|
||||
solarMass = 4 * pi**2,
|
||||
daysPerYear = 365.24;
|
||||
|
||||
//
|
||||
// a record representing one of the bodies in the solar system
|
||||
//
|
||||
record body {
|
||||
var pos: 3*real;
|
||||
var v: 3*real;
|
||||
var mass: real; // does not change after it is set up
|
||||
}
|
||||
|
||||
//
|
||||
// the array of bodies that we'll be simulating
|
||||
//
|
||||
var bodies = [/* sun */
|
||||
new body(mass = solarMass),
|
||||
|
||||
/* jupiter */
|
||||
new body(pos = ( 4.84143144246472090e+00,
|
||||
-1.16032004402742839e+00,
|
||||
-1.03622044471123109e-01),
|
||||
v = ( 1.66007664274403694e-03 * daysPerYear,
|
||||
7.69901118419740425e-03 * daysPerYear,
|
||||
-6.90460016972063023e-05 * daysPerYear),
|
||||
mass = 9.54791938424326609e-04 * solarMass),
|
||||
|
||||
/* saturn */
|
||||
new body(pos = ( 8.34336671824457987e+00,
|
||||
4.12479856412430479e+00,
|
||||
-4.03523417114321381e-01),
|
||||
v = (-2.76742510726862411e-03 * daysPerYear,
|
||||
4.99852801234917238e-03 * daysPerYear,
|
||||
2.30417297573763929e-05 * daysPerYear),
|
||||
mass = 2.85885980666130812e-04 * solarMass),
|
||||
|
||||
/* uranus */
|
||||
new body(pos = ( 1.28943695621391310e+01,
|
||||
-1.51111514016986312e+01,
|
||||
-2.23307578892655734e-01),
|
||||
v = ( 2.96460137564761618e-03 * daysPerYear,
|
||||
2.37847173959480950e-03 * daysPerYear,
|
||||
-2.96589568540237556e-05 * daysPerYear),
|
||||
mass = 4.36624404335156298e-05 * solarMass),
|
||||
|
||||
/* neptune */
|
||||
new body(pos = ( 1.53796971148509165e+01,
|
||||
-2.59193146099879641e+01,
|
||||
1.79258772950371181e-01),
|
||||
v = ( 2.68067772490389322e-03 * daysPerYear,
|
||||
1.62824170038242295e-03 * daysPerYear,
|
||||
-9.51592254519715870e-05 * daysPerYear),
|
||||
mass = 5.15138902046611451e-05 * solarMass)
|
||||
];
|
||||
|
||||
//
|
||||
// the number of bodies to be simulated
|
||||
//
|
||||
const numbodies = bodies.numElements;
|
||||
|
||||
//
|
||||
// The computation involves initializing the sun's velocity,
|
||||
// writing the initial energy, advancing the system through 'n'
|
||||
// timesteps, and writing the final energy.
|
||||
//
|
||||
proc main() {
|
||||
initSun();
|
||||
|
||||
writef("%.9r\n", energy());
|
||||
for 1..n do
|
||||
advance(0.01);
|
||||
writef("%.9r\n", energy());
|
||||
}
|
||||
|
||||
//
|
||||
// compute the sun's initial velocity
|
||||
//
|
||||
proc initSun() {
|
||||
const p = + reduce (for b in bodies do (b.v * b.mass));
|
||||
bodies[1].v = -p / solarMass;
|
||||
}
|
||||
|
||||
//
|
||||
// advance the positions and velocities of all the bodies
|
||||
//
|
||||
proc advance(dt) {
|
||||
for i in 1..numbodies {
|
||||
for j in i+1..numbodies {
|
||||
updateVelocities(bodies[i], bodies[j]);
|
||||
|
||||
inline proc updateVelocities(ref b1, ref b2) {
|
||||
const dpos = b1.pos - b2.pos,
|
||||
mag = dt / sqrt(sumOfSquares(dpos))**3;
|
||||
|
||||
b1.v -= dpos * b2.mass * mag;
|
||||
b2.v += dpos * b1.mass * mag;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for b in bodies do
|
||||
b.pos += dt * b.v;
|
||||
}
|
||||
|
||||
//
|
||||
// compute the energy of the bodies
|
||||
//
|
||||
proc energy() {
|
||||
var e = 0.0;
|
||||
|
||||
for i in 1..numbodies {
|
||||
const b1 = bodies[i];
|
||||
|
||||
e += 0.5 * b1.mass * sumOfSquares(b1.v);
|
||||
|
||||
for j in i+1..numbodies {
|
||||
const b2 = bodies[j];
|
||||
|
||||
e -= (b1.mass * b2.mass) / sqrt(sumOfSquares(b1.pos - b2.pos));
|
||||
}
|
||||
}
|
||||
|
||||
return e;
|
||||
}
|
||||
|
||||
//
|
||||
// a helper routine to compute the sum of squares of a 3-tuple's components
|
||||
//
|
||||
inline proc sumOfSquares(x)
|
||||
return x(1)**2 + x(2)**2 + x(3)**2;
|
||||
145
samples/Chapel/quicksort.chpl
Normal file
145
samples/Chapel/quicksort.chpl
Normal file
@@ -0,0 +1,145 @@
|
||||
//
|
||||
// An example of a parallel quick sort implementation that uses
|
||||
// "cobegin" to make each recursive call in parallel and "serial" to
|
||||
// limit the number of threads.
|
||||
//
|
||||
|
||||
use Random, Time; // for random number generation and the Timer class
|
||||
|
||||
var timer: Timer; // to time the sort
|
||||
|
||||
config var n: int = 2**15; // the size of the array to be sorted
|
||||
config var thresh: int = 1; // the recursive depth to serialize
|
||||
config var verbose: int = 0; // print out this many elements in array
|
||||
config var timing: bool = true; // set timing to false to disable timer
|
||||
|
||||
var A: [1..n] real; // array of real numbers
|
||||
|
||||
//
|
||||
// initialize array with random numbers
|
||||
//
|
||||
fillRandom(A);
|
||||
|
||||
//
|
||||
// print out front of array if verbose flag is set
|
||||
//
|
||||
if verbose > 0 then
|
||||
writeln("A[1..", verbose, "] = ", A[1..verbose]);
|
||||
|
||||
//
|
||||
// start timer, call parallel quick sort routine, stop timer
|
||||
//
|
||||
if timing then timer.start();
|
||||
pqsort(A, thresh);
|
||||
if timing then timer.stop();
|
||||
|
||||
//
|
||||
// report sort time
|
||||
//
|
||||
if timing then writeln("sorted in ", timer.elapsed(), " seconds");
|
||||
|
||||
//
|
||||
// print out front of array if verbose flag is set
|
||||
// values should now be in sorted order
|
||||
//
|
||||
if verbose > 0 then
|
||||
writeln("A[1..", verbose, "] = ", A[1..verbose]);
|
||||
|
||||
//
|
||||
// verify that array is sorted or halt
|
||||
//
|
||||
for i in 2..n do
|
||||
if A(i) < A(i-1) then
|
||||
halt("A(", i-1, ") == ", A(i-1), " > A(", i, ") == ", A(i));
|
||||
|
||||
writeln("verification success");
|
||||
|
||||
//
|
||||
// pqsort -- parallel quick sort
|
||||
//
|
||||
// arr: generic 1D array of values (real, int, ...)
|
||||
// thresh: number of recursive calls to make before serializing
|
||||
// low: lower bound of array to start sort at, defaults to whole array
|
||||
// high: upper bound of array to stop sort at, defaults to whole array
|
||||
//
|
||||
proc pqsort(arr: [],
|
||||
thresh: int,
|
||||
low: int = arr.domain.low,
|
||||
high: int = arr.domain.high) where arr.rank == 1 {
|
||||
|
||||
//
|
||||
// base case: arr[low..high] is small enough to bubble sort
|
||||
//
|
||||
if high - low < 8 {
|
||||
bubbleSort(arr, low, high);
|
||||
return;
|
||||
}
|
||||
|
||||
//
|
||||
// determine pivot and partition arr[low..high]
|
||||
//
|
||||
const pivotVal = findPivot();
|
||||
const pivotLoc = partition(pivotVal);
|
||||
|
||||
//
|
||||
// make recursive calls to parallel quick sort each unsorted half of
|
||||
// the array; if thresh is 0 or less, start executing conquer tasks
|
||||
// serially
|
||||
//
|
||||
serial thresh <= 0 do cobegin {
|
||||
pqsort(arr, thresh-1, low, pivotLoc-1);
|
||||
pqsort(arr, thresh-1, pivotLoc+1, high);
|
||||
}
|
||||
|
||||
//
|
||||
// findPivot -- helper routine to find pivot value using simple
|
||||
// median-of-3 method, returns pivot value
|
||||
//
|
||||
proc findPivot() {
|
||||
const mid = low + (high-low+1) / 2;
|
||||
|
||||
if arr(mid) < arr(low) then arr(mid) <=> arr(low);
|
||||
if arr(high) < arr(low) then arr(high) <=> arr(low);
|
||||
if arr(high) < arr(mid) then arr(high) <=> arr(mid);
|
||||
|
||||
const pivotVal = arr(mid);
|
||||
arr(mid) = arr(high-1);
|
||||
arr(high-1) = pivotVal;
|
||||
|
||||
return pivotVal;
|
||||
}
|
||||
|
||||
//
|
||||
// partition -- helper routine to partition array such that all
|
||||
// values less than pivot are to its left and all
|
||||
// values greater than pivot are to its right, returns
|
||||
// pivot location
|
||||
//
|
||||
proc partition(pivotVal) {
|
||||
var ilo = low, ihi = high-1;
|
||||
while (ilo < ihi) {
|
||||
do { ilo += 1; } while arr(ilo) < pivotVal;
|
||||
do { ihi -= 1; } while pivotVal < arr(ihi);
|
||||
if (ilo < ihi) {
|
||||
arr(ilo) <=> arr(ihi);
|
||||
}
|
||||
}
|
||||
arr(high-1) = arr(ilo);
|
||||
arr(ilo) = pivotVal;
|
||||
return ilo;
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// bubbleSort -- bubble sort for base case of quick sort
|
||||
//
|
||||
// arr: generic 1D array of values (real, int, ...)
|
||||
// low: lower bound of array to start sort at
|
||||
// high: upper bound of array to stop sort at
|
||||
//
|
||||
proc bubbleSort(arr: [], low: int, high: int) where arr.rank == 1 {
|
||||
for i in low..high do
|
||||
for j in low..high-1 do
|
||||
if arr(j) > arr(j+1) then
|
||||
arr(j) <=> arr(j+1);
|
||||
}
|
||||
146
samples/Clojure/index.cljs.hl
Normal file
146
samples/Clojure/index.cljs.hl
Normal file
@@ -0,0 +1,146 @@
|
||||
;; Copyright (c) Alan Dipert and Micha Niskin. All rights reserved.
|
||||
;; The use and distribution terms for this software are covered by the
|
||||
;; Eclipse Public License 1.0 (http://opensource.org/licenses/eclipse-1.0.php)
|
||||
;; which can be found in the file epl-v10.html at the root of this distribution.
|
||||
;; By using this software in any fashion, you are agreeing to be bound by
|
||||
;; the terms of this license.
|
||||
;; You must not remove this notice, or any other, from this software.
|
||||
|
||||
(page "index.html"
|
||||
(:refer-clojure :exclude [nth])
|
||||
(:require
|
||||
[tailrecursion.hoplon.reload :refer [reload-all]]
|
||||
[tailrecursion.hoplon.util :refer [nth name pluralize]]
|
||||
[tailrecursion.hoplon.storage-atom :refer [local-storage]]))
|
||||
|
||||
;; utility functions ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(declare route state editing)
|
||||
|
||||
(reload-all)
|
||||
|
||||
(def mapvi (comp vec map-indexed))
|
||||
|
||||
(defn dissocv [v i]
|
||||
(let [z (- (dec (count v)) i)]
|
||||
(cond (neg? z) v
|
||||
(zero? z) (pop v)
|
||||
(pos? z) (into (subvec v 0 i) (subvec v (inc i))))))
|
||||
|
||||
(defn decorate [todo route editing i]
|
||||
(let [{done? :completed text :text} todo]
|
||||
(-> todo (assoc :editing (= editing i)
|
||||
:visible (and (not (empty? text))
|
||||
(or (= "#/" route)
|
||||
(and (= "#/active" route) (not done?))
|
||||
(and (= "#/completed" route) done?)))))))
|
||||
|
||||
;; persisted state cell (AKA: stem cell) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(def state (-> (cell []) (local-storage ::store)))
|
||||
|
||||
;; local state cells ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defc loaded? false)
|
||||
(defc editing nil)
|
||||
(def route (route-cell "#/"))
|
||||
|
||||
;; formula cells (computed state) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defc= completed (filter :completed state))
|
||||
(defc= active (remove :completed state))
|
||||
(defc= plural-item (pluralize "item" (count active)))
|
||||
(defc= todos (mapvi #(list %1 (decorate %2 route editing %1)) state))
|
||||
|
||||
;; state transition functions ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(defn todo [t] {:completed false :text t})
|
||||
(defn destroy! [i] (swap! state dissocv i))
|
||||
(defn done! [i v] (swap! state assoc-in [i :completed] v))
|
||||
(defn clear-done! [& _] (swap! state #(vec (remove :completed %))))
|
||||
(defn new! [t] (when (not (empty? t)) (swap! state conj (todo t))))
|
||||
(defn all-done! [v] (swap! state #(mapv (fn [x] (assoc x :completed v)) %)))
|
||||
(defn editing! [i v] (reset! editing (if v i nil)))
|
||||
(defn text! [i v] (if (empty? v) (destroy! i) (swap! state assoc-in [i :text] v)))
|
||||
|
||||
;; page ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
(html :lang "en"
|
||||
(head
|
||||
(meta :charset "utf-8")
|
||||
(meta :http-equiv "X-UA-Compatible" :content "IE=edge,chrome=1")
|
||||
(link :rel "stylesheet" :href "base.css")
|
||||
(title "Hoplon • TodoMVC"))
|
||||
(body
|
||||
(noscript
|
||||
(div :id "noscript"
|
||||
(p "JavaScript is required to view this page.")))
|
||||
(div
|
||||
(section :id "todoapp"
|
||||
(header :id "header"
|
||||
(h1 "todos")
|
||||
(form :on-submit #(do (new! (val-id :new-todo))
|
||||
(do! (by-id :new-todo) :value ""))
|
||||
(input
|
||||
:id "new-todo"
|
||||
:type "text"
|
||||
:autofocus true
|
||||
:placeholder "What needs to be done?"
|
||||
:on-blur #(do! (by-id :new-todo) :value ""))))
|
||||
(section
|
||||
:id "main"
|
||||
:do-toggle (cell= (not (and (empty? active) (empty? completed))))
|
||||
(input
|
||||
:id "toggle-all"
|
||||
:type "checkbox"
|
||||
:do-attr (cell= {:checked (empty? active)})
|
||||
:on-click #(all-done! (val-id :toggle-all)))
|
||||
(label :for "toggle-all"
|
||||
"Mark all as complete")
|
||||
(ul :id "todo-list"
|
||||
(loop-tpl
|
||||
:reverse true
|
||||
:bind-ids [done# edit#]
|
||||
:bindings [[i {edit? :editing done? :completed todo-text :text show? :visible}] todos]
|
||||
(li
|
||||
:do-class (cell= {:completed done? :editing edit?})
|
||||
:do-toggle show?
|
||||
(div :class "view" :on-dblclick #(editing! @i true)
|
||||
(input
|
||||
:id done#
|
||||
:type "checkbox"
|
||||
:class "toggle"
|
||||
:do-attr (cell= {:checked done?})
|
||||
:on-click #(done! @i (val-id done#)))
|
||||
(label (text "~{todo-text}"))
|
||||
(button
|
||||
:type "submit"
|
||||
:class "destroy"
|
||||
:on-click #(destroy! @i)))
|
||||
(form :on-submit #(editing! @i false)
|
||||
(input
|
||||
:id edit#
|
||||
:type "text"
|
||||
:class "edit"
|
||||
:do-value todo-text
|
||||
:do-focus edit?
|
||||
:on-blur #(when @edit? (editing! @i false))
|
||||
:on-change #(when @edit? (text! @i (val-id edit#)))))))))
|
||||
(footer
|
||||
:id "footer"
|
||||
:do-toggle (cell= (not (and (empty? active) (empty? completed))))
|
||||
(span :id "todo-count"
|
||||
(strong (text "~(count active) "))
|
||||
(span (text "~{plural-item} left")))
|
||||
(ul :id "filters"
|
||||
(li (a :href "#/" :do-class (cell= {:selected (= "#/" route)}) "All"))
|
||||
(li (a :href "#/active" :do-class (cell= {:selected (= "#/active" route)}) "Active"))
|
||||
(li (a :href "#/completed" :do-class (cell= {:selected (= "#/completed" route)}) "Completed")))
|
||||
(button
|
||||
:type "submit"
|
||||
:id "clear-completed"
|
||||
:on-click #(clear-done!)
|
||||
(text "Clear completed (~(count completed))"))))
|
||||
(footer :id "info"
|
||||
(p "Double-click to edit a todo")
|
||||
(p "Part of " (a :href "http://github.com/tailrecursion/hoplon-demos/" "hoplon-demos"))))))
|
||||
580
samples/Cycript/utils.cy
Normal file
580
samples/Cycript/utils.cy
Normal file
@@ -0,0 +1,580 @@
|
||||
(function(utils) {
|
||||
// Load C functions declared in utils.loadFuncs
|
||||
var shouldLoadCFuncs = true;
|
||||
// Expose the C functions to cycript's global scope
|
||||
var shouldExposeCFuncs = true;
|
||||
// Expose C constants to cycript's global scope
|
||||
var shouldExposeConsts = true;
|
||||
// Expose functions defined here to cycript's global scope
|
||||
var shouldExposeFuncs = true;
|
||||
// Which functions to expose
|
||||
var funcsToExpose = ["exec", "include", "sizeof", "logify", "apply", "str2voidPtr", "voidPtr2str", "double2voidPtr", "voidPtr2double", "isMemoryReadable", "isObject", "makeStruct"];
|
||||
|
||||
// C functions that utils.loadFuncs loads
|
||||
var CFuncsDeclarations = [
|
||||
// <stdlib.h>
|
||||
"void *calloc(size_t num, size_t size)",
|
||||
// <string.h>
|
||||
"char *strcpy(char *restrict dst, const char *restrict src)",
|
||||
"char *strdup(const char *s1)",
|
||||
"void* memset(void* dest, int ch, size_t count)",
|
||||
// <stdio.h>
|
||||
"FILE *fopen(const char *, const char *)",
|
||||
"int fclose(FILE *)",
|
||||
"size_t fread(void *restrict, size_t, size_t, FILE *restrict)",
|
||||
"size_t fwrite(const void *restrict, size_t, size_t, FILE *restrict)",
|
||||
// <mach.h>
|
||||
"mach_port_t mach_task_self()",
|
||||
"kern_return_t task_for_pid(mach_port_name_t target_tport, int pid, mach_port_name_t *tn)",
|
||||
"kern_return_t mach_vm_protect(vm_map_t target_task, mach_vm_address_t address, mach_vm_size_t size, boolean_t set_maximum, vm_prot_t new_protection)",
|
||||
"kern_return_t mach_vm_write(vm_map_t target_task, mach_vm_address_t address, vm_offset_t data, mach_msg_type_number_t dataCnt)",
|
||||
"kern_return_t mach_vm_read(vm_map_t target_task, mach_vm_address_t address, mach_vm_size_t size, vm_offset_t *data, mach_msg_type_number_t *dataCnt)",
|
||||
];
|
||||
|
||||
/*
|
||||
Replacement for eval that can handle @encode etc.
|
||||
|
||||
Usage:
|
||||
cy# utils.exec("@encode(void *(int, char))")
|
||||
@encode(void*(int,char))
|
||||
*/
|
||||
utils.exec = function(str) {
|
||||
var mkdir = @encode(int (const char *, int))(dlsym(RTLD_DEFAULT, "mkdir"));
|
||||
var tempnam = @encode(char *(const char *, const char *))(dlsym(RTLD_DEFAULT, "tempnam"));
|
||||
var fopen = @encode(void *(const char *, const char *))(dlsym(RTLD_DEFAULT, "fopen"));
|
||||
var fclose = @encode(int (void *))(dlsym(RTLD_DEFAULT, "fclose"));
|
||||
var fwrite = @encode(int (const char *, int, int, void *))(dlsym(RTLD_DEFAULT, "fwrite"));
|
||||
var symlink = @encode(int (const char *, const char *))(dlsym(RTLD_DEFAULT, "symlink"));
|
||||
var unlink = @encode(int (const char *))(dlsym(RTLD_DEFAULT, "unlink"));
|
||||
var getenv = @encode(const char *(const char *))(dlsym(RTLD_DEFAULT, "getenv"));
|
||||
var setenv = @encode(int (const char *, const char *, int))(dlsym(RTLD_DEFAULT, "setenv"));
|
||||
|
||||
var libdir = "/usr/lib/cycript0.9";
|
||||
var dir = libdir + "/tmp";
|
||||
|
||||
mkdir(dir, 0777);
|
||||
|
||||
// This is needed because tempnam seems to ignore the first argument on i386
|
||||
var old_tmpdir = getenv("TMPDIR");
|
||||
setenv("TMPDIR", dir, 1);
|
||||
|
||||
// No freeing :(
|
||||
var f = tempnam(dir, "exec-");
|
||||
setenv("TMPDIR", old_tmpdir, 1);
|
||||
if(!f) {
|
||||
return false;
|
||||
}
|
||||
|
||||
symlink(f, f + ".cy");
|
||||
|
||||
str = "exports.result = " + str;
|
||||
|
||||
var handle = fopen(f, "w");
|
||||
fwrite(str, str.length, 1, handle);
|
||||
fclose(handle);
|
||||
|
||||
var r;
|
||||
var except = null;
|
||||
try {
|
||||
r = require(f.replace(libdir + "/", ""));
|
||||
} catch(e) {
|
||||
except = e;
|
||||
}
|
||||
|
||||
unlink(f + ".cy");
|
||||
unlink(f);
|
||||
|
||||
if(except !== null) {
|
||||
throw except;
|
||||
}
|
||||
|
||||
return r.result;
|
||||
};
|
||||
|
||||
/*
|
||||
Applies known typedefs
|
||||
Used in utils.include and utils.makeStruct
|
||||
|
||||
Usage:
|
||||
cy# utils.applyTypedefs("mach_vm_address_t")
|
||||
"uint64_t"
|
||||
*/
|
||||
utils.applyTypedefs = function(str) {
|
||||
var typedefs = {
|
||||
"struct": "",
|
||||
"restrict": "",
|
||||
"FILE": "void",
|
||||
"size_t": "uint64_t",
|
||||
"uintptr_t": "unsigned long",
|
||||
"kern_return_t": "int",
|
||||
"mach_port_t": "unsigned int",
|
||||
"mach_port_name_t": "unsigned int",
|
||||
"vm_offset_t": "unsigned long",
|
||||
"vm_size_t": "unsigned long",
|
||||
"mach_vm_address_t": "uint64_t",
|
||||
"mach_vm_offset_t": "uint64_t",
|
||||
"mach_vm_size_t": "uint64_t",
|
||||
"vm_map_offset_t": "uint64_t",
|
||||
"vm_map_address_t": "uint64_t",
|
||||
"vm_map_size_t": "uint64_t",
|
||||
"mach_port_context_t": "uint64_t",
|
||||
"vm_map_t": "unsigned int",
|
||||
"boolean_t": "unsigned int",
|
||||
"vm_prot_t": "int",
|
||||
"mach_msg_type_number_t": "unsigned int",
|
||||
"cpu_type_t": "int",
|
||||
"cpu_subtype_t": "int",
|
||||
"cpu_threadtype_t": "int",
|
||||
};
|
||||
|
||||
for(var k in typedefs) {
|
||||
str = str.replace(new RegExp("(\\s|\\*|,|\\(|^)" + k + "(\\s|\\*|,|\\)|$)", "g"), "$1" + typedefs[k] + "$2");
|
||||
}
|
||||
|
||||
return str;
|
||||
};
|
||||
|
||||
/*
|
||||
Parses a C function declaration and returns the function name and cycript type
|
||||
If load is true, tries to load it into cycript using utils.exec
|
||||
|
||||
Usage:
|
||||
cy# var str = "void *calloc(size_t num, size_t size)";
|
||||
"void *calloc(size_t num, size_t size)"
|
||||
cy# utils.include(str)
|
||||
["calloc","@encode(void *(uint64_t num, uint64_t size))(140735674376857)"]
|
||||
cy# var ret = utils.include(str, true)
|
||||
["calloc",0x7fff93e0e299]
|
||||
cy# ret[1].type
|
||||
@encode(void*(unsigned long long int,unsigned long long int))
|
||||
cy# ret[1](100, 1)
|
||||
0x100444100
|
||||
*/
|
||||
utils.include = function(str, load) {
|
||||
var re = /^\s*([^(]*(?:\s+|\*))(\w*)\s*\(([^)]*)\)\s*;?\s*$/;
|
||||
var match = re.exec(str);
|
||||
if(!match) {
|
||||
return -1;
|
||||
}
|
||||
var rType = utils.applyTypedefs(match[1]);
|
||||
var name = match[2];
|
||||
var args = match[3];
|
||||
|
||||
var argsRe = /([^,]+)(?:,|$)/g;
|
||||
var argsTypes = [];
|
||||
while((match = argsRe.exec(args)) !== null) {
|
||||
var type = utils.applyTypedefs(match[1]);
|
||||
argsTypes.push(type);
|
||||
}
|
||||
|
||||
var encodeString = "@encode(";
|
||||
encodeString += rType + "(";
|
||||
encodeString += argsTypes.join(", ") + "))";
|
||||
|
||||
var fun = dlsym(RTLD_DEFAULT, name);
|
||||
if(fun !== null) {
|
||||
encodeString += "(" + fun + ")";
|
||||
if(load) {
|
||||
return [name, utils.exec(encodeString)];
|
||||
}
|
||||
} else if(load) {
|
||||
throw "Function couldn't be found with dlsym!";
|
||||
}
|
||||
|
||||
return [name, encodeString];
|
||||
};
|
||||
|
||||
/*
|
||||
Loads the function declaration in the defs array using utils.exec and exposes to cycript's global scope
|
||||
Is automatically called if shouldLoadCFuncs is true
|
||||
*/
|
||||
utils.funcs = {};
|
||||
utils.loadfuncs = function(expose) {
|
||||
for(var i = 0; i < CFuncsDeclarations.length; i++) {
|
||||
try {
|
||||
var o = utils.include(CFuncsDeclarations[i], true);
|
||||
utils.funcs[o[0]] = o[1];
|
||||
if(expose) {
|
||||
Cycript.all[o[0]] = o[1];
|
||||
}
|
||||
} catch(e) {
|
||||
system.print("Failed to load function: " + i);
|
||||
try {
|
||||
system.print(utils.include(CFuncsDeclarations[i]));
|
||||
} catch(e2) {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/*
|
||||
Calculates the size of a type like the C operator sizeof
|
||||
|
||||
Usage:
|
||||
cy# utils.sizeof(int)
|
||||
4
|
||||
cy# utils.sizeof(@encode(void *))
|
||||
8
|
||||
cy# utils.sizeof("mach_vm_address_t")
|
||||
8
|
||||
*/
|
||||
utils.sizeof = function(type) {
|
||||
if(typeof type === "string") {
|
||||
type = utils.applyTypedefs(type);
|
||||
type = utils.exec("@encode(" + type + ")");
|
||||
}
|
||||
|
||||
// (const) char * has "infinite" preceision
|
||||
if(type.toString().slice(-1) === "*") {
|
||||
return utils.sizeof(@encode(void *));
|
||||
}
|
||||
|
||||
// float and double
|
||||
if(type.toString() === @encode(float).toString()) {
|
||||
return 4;
|
||||
} else if (type.toString() === @encode(double).toString()) {
|
||||
return 8;
|
||||
}
|
||||
|
||||
var typeInstance = type(0);
|
||||
|
||||
if(typeInstance instanceof Object) {
|
||||
// Arrays
|
||||
if("length" in typeInstance) {
|
||||
return typeInstance.length * utils.sizeof(typeInstance.type);
|
||||
}
|
||||
|
||||
// Structs
|
||||
if(typeInstance.toString() === "[object Struct]") {
|
||||
var typeStr = type.toString();
|
||||
var arrayTypeStr = "[2" + typeStr + "]";
|
||||
var arrayType = new Type(arrayTypeStr);
|
||||
|
||||
var arrayInstance = new arrayType;
|
||||
|
||||
return @encode(void *)(&(arrayInstance[1])) - @encode(void *)(&(arrayInstance[0]));
|
||||
}
|
||||
}
|
||||
|
||||
for(var i = 0; i < 5; i++) {
|
||||
var maxSigned = Math.pow(2, 8 * Math.pow(2, i) - 1) - 1;
|
||||
if(i === 3) {
|
||||
// Floating point fix ;^)
|
||||
maxSigned /= 1000;
|
||||
}
|
||||
|
||||
// can't use !== or sizeof(void *) === 0.5
|
||||
if(type(maxSigned) != maxSigned) {
|
||||
return Math.pow(2, i - 1);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/*
|
||||
Logs a specific message sent to an instance of a class like logify.pl in theos
|
||||
Requires Cydia Substrate (com.saurik.substrate.MS) and NSLog (org.cycript.NSLog) modules
|
||||
Returns the old message returned by MS.hookMessage (Note: this is not just the old message!)
|
||||
|
||||
Usage:
|
||||
cy# var oldm = utils.logify(objc_getMetaClass(NSNumber), @selector(numberWithDouble:))
|
||||
...
|
||||
cy# var n = [NSNumber numberWithDouble:1.5]
|
||||
2014-07-28 02:26:39.805 cycript[71213:507] +[<NSNumber: 0x10032d0c4> numberWithDouble:1.5]
|
||||
2014-07-28 02:26:39.806 cycript[71213:507] = 1.5
|
||||
@1.5
|
||||
*/
|
||||
utils.logify = function(cls, sel) {
|
||||
@import com.saurik.substrate.MS;
|
||||
@import org.cycript.NSLog;
|
||||
|
||||
var oldm = {};
|
||||
|
||||
MS.hookMessage(cls, sel, function() {
|
||||
var args = [].slice.call(arguments);
|
||||
|
||||
var selFormat = sel.toString().replace(/:/g, ":%@ ").trim();
|
||||
var logFormat = "%@[<%@: 0x%@> " + selFormat + "]";
|
||||
|
||||
var standardArgs = [logFormat, class_isMetaClass(cls)? "+": "-", cls.toString(), (&this).valueOf().toString(16)];
|
||||
var logArgs = standardArgs.concat(args);
|
||||
|
||||
NSLog.apply(null, logArgs);
|
||||
|
||||
var r = oldm->apply(this, arguments);
|
||||
|
||||
if(r !== undefined) {
|
||||
NSLog(" = %@", r);
|
||||
}
|
||||
|
||||
return r;
|
||||
}, oldm);
|
||||
|
||||
return oldm;
|
||||
};
|
||||
|
||||
/*
|
||||
Calls a C function by providing its name and arguments
|
||||
Doesn't support structs
|
||||
Return value is always a void pointer
|
||||
|
||||
Usage:
|
||||
cy# utils.apply("printf", ["%s %.3s, %d -> %c, float: %f\n", "foo", "barrrr", 97, 97, 1.5])
|
||||
foo bar, 97 -> a, float: 1.500000
|
||||
0x22
|
||||
*/
|
||||
utils.apply = function(fun, args) {
|
||||
if(!(args instanceof Array)) {
|
||||
throw "Args needs to be an array!";
|
||||
}
|
||||
|
||||
var argc = args.length;
|
||||
var voidPtr = @encode(void *);
|
||||
var argTypes = [];
|
||||
for(var i = 0; i < argc; i++) {
|
||||
var argType = voidPtr;
|
||||
|
||||
var arg = args[i];
|
||||
if(typeof arg === "string") {
|
||||
argType = @encode(char *);
|
||||
}
|
||||
if(typeof arg === "number" && arg % 1 !== 0) {
|
||||
argType = @encode(double);
|
||||
}
|
||||
|
||||
argTypes.push(argType);
|
||||
}
|
||||
|
||||
var type = voidPtr.functionWith.apply(voidPtr, argTypes);
|
||||
|
||||
if(typeof fun === "string") {
|
||||
fun = dlsym(RTLD_DEFAULT, fun);
|
||||
}
|
||||
|
||||
if(!fun) {
|
||||
throw "Function not found!";
|
||||
}
|
||||
|
||||
return type(fun).apply(null, args);
|
||||
};
|
||||
|
||||
/*
|
||||
Converts a string (char *) to a void pointer (void *)
|
||||
You can't cast to strings to void pointers and vice versa in cycript. Blame saurik.
|
||||
|
||||
Usage:
|
||||
cy# var voidPtr = utils.str2voidPtr("foobar")
|
||||
0x100331590
|
||||
cy# utils.voidPtr2str(voidPtr)
|
||||
"foobar"
|
||||
*/
|
||||
utils.str2voidPtr = function(str) {
|
||||
var strdup = @encode(void *(char *))(dlsym(RTLD_DEFAULT, "strdup"));
|
||||
return strdup(str);
|
||||
};
|
||||
|
||||
/*
|
||||
The inverse function of str2voidPtr
|
||||
*/
|
||||
utils.voidPtr2str = function(voidPtr) {
|
||||
var strdup = @encode(char *(void *))(dlsym(RTLD_DEFAULT, "strdup"));
|
||||
return strdup(voidPtr);
|
||||
};
|
||||
|
||||
/*
|
||||
Converts a double into a void pointer
|
||||
This can be used to view the binary representation of a floating point number
|
||||
|
||||
Usage:
|
||||
cy# var n = utils.double2voidPtr(-1.5)
|
||||
0xbff8000000000000
|
||||
cy# utils.voidPtr2double(n)
|
||||
-1.5
|
||||
*/
|
||||
utils.double2voidPtr = function(n) {
|
||||
var doublePtr = new double;
|
||||
*doublePtr = n;
|
||||
|
||||
var voidPtrPtr = @encode(void **)(doublePtr);
|
||||
|
||||
return *voidPtrPtr;
|
||||
};
|
||||
|
||||
/*
|
||||
The inverse function of double2voidPtr
|
||||
*/
|
||||
utils.voidPtr2double = function(voidPtr) {
|
||||
var voidPtrPtr = new @encode(void **);
|
||||
*voidPtrPtr = voidPtr;
|
||||
|
||||
var doublePtr = @encode(double *)(voidPtrPtr);
|
||||
|
||||
return *doublePtr;
|
||||
};
|
||||
|
||||
/*
|
||||
Determines in a safe way if a memory location is readable
|
||||
|
||||
Usage:
|
||||
cy# utils.isMemoryReadable(0)
|
||||
false
|
||||
cy# utils.isMemoryReadable(0x1337)
|
||||
false
|
||||
cy# utils.isMemoryReadable(NSObject)
|
||||
true
|
||||
cy# var a = malloc(100); utils.isMemoryReadable(a)
|
||||
true
|
||||
*/
|
||||
utils.isMemoryReadable = function(ptr) {
|
||||
if(typeof ptr === "string") {
|
||||
return true;
|
||||
}
|
||||
|
||||
var fds = new @encode(int [2]);
|
||||
utils.apply("pipe", [fds]);
|
||||
var result = utils.apply("write", [fds[1], ptr, 1]) == 1;
|
||||
|
||||
utils.apply("close", [fds[0]]);
|
||||
utils.apply("close", [fds[1]]);
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
/*
|
||||
Determines in a safe way if the memory location contains an Objective-C object
|
||||
|
||||
Usage:
|
||||
cy# utils.isObject(0)
|
||||
false
|
||||
cy# utils.isObject(0x1337)
|
||||
false
|
||||
cy# utils.isObject(NSObject)
|
||||
true
|
||||
cy# utils.isObject(objc_getMetaClass(NSObject))
|
||||
true
|
||||
cy# utils.isObject([new NSObject init])
|
||||
true
|
||||
cy# var a = malloc(100); utils.isObject(a)
|
||||
false
|
||||
cy# *@encode(void **)(a) = NSObject; utils.isObject(a)
|
||||
true
|
||||
*/
|
||||
utils.isObject = function(obj) {
|
||||
obj = @encode(void *)(obj);
|
||||
var lastObj = -1;
|
||||
|
||||
function objc_isa_ptr(obj) {
|
||||
// See http://www.sealiesoftware.com/blog/archive/2013/09/24/objc_explain_Non-pointer_isa.html
|
||||
var objc_debug_isa_class_mask = 0x00000001fffffffa;
|
||||
obj = (obj & 1)? (obj & objc_debug_isa_class_mask): obj;
|
||||
|
||||
if((obj & (utils.sizeof(@encode(void *)) - 1)) != 0) {
|
||||
return null;
|
||||
} else {
|
||||
return obj;
|
||||
}
|
||||
}
|
||||
|
||||
function ptrValue(obj) {
|
||||
return obj? obj.valueOf(): null;
|
||||
}
|
||||
|
||||
var foundMetaClass = false;
|
||||
|
||||
for(obj = objc_isa_ptr(obj); utils.isMemoryReadable(obj); ) {
|
||||
obj = *@encode(void **)(obj);
|
||||
|
||||
if(ptrValue(obj) == ptrValue(lastObj)) {
|
||||
foundMetaClass = true;
|
||||
break;
|
||||
}
|
||||
|
||||
lastObj = obj;
|
||||
}
|
||||
|
||||
if(!foundMetaClass) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if(lastObj === -1 || lastObj === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
var obj_class = objc_isa_ptr(@encode(void **)(obj)[1]);
|
||||
|
||||
if(!utils.isMemoryReadable(obj_class)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
var metaclass = objc_isa_ptr(@encode(void **)(obj_class)[0]);
|
||||
var superclass = objc_isa_ptr(@encode(void **)(obj_class)[1]);
|
||||
|
||||
return ptrValue(obj) == ptrValue(metaclass) && superclass == null;
|
||||
};
|
||||
|
||||
/*
|
||||
Creates a cycript struct type from a C struct definition
|
||||
|
||||
Usage:
|
||||
cy# var foo = makeStruct("int a; short b; char c; uint64_t d; double e;", "foo");
|
||||
@encode(foo)
|
||||
cy# var f = new foo
|
||||
&{a:0,b:0,c:0,d:0,e:0}
|
||||
cy# f->a = 100; f
|
||||
&{a:100,b:0,c:0,d:0,e:0}
|
||||
cy# *@encode(int *)(f)
|
||||
100
|
||||
*/
|
||||
utils.makeStruct = function(str, name) {
|
||||
var fieldRe = /(?:\s|\n)*([^;]+\s*(?:\s|\*))([^;]+)\s*;/g;
|
||||
|
||||
if(!name) {
|
||||
name = "struct" + Math.floor(Math.random() * 100000);
|
||||
}
|
||||
var typeStr = "{" + name + "=";
|
||||
|
||||
while((match = fieldRe.exec(str)) !== null) {
|
||||
var fieldType = utils.applyTypedefs(match[1]);
|
||||
var fieldName = match[2];
|
||||
var encodedType = utils.exec("@encode(" + fieldType + ")").toString();
|
||||
|
||||
typeStr += '"' + fieldName + '"' + encodedType;
|
||||
}
|
||||
|
||||
typeStr += "}";
|
||||
|
||||
return new Type(typeStr);
|
||||
};
|
||||
|
||||
// Various constants
|
||||
utils.constants = {
|
||||
VM_PROT_NONE: 0x0,
|
||||
VM_PROT_READ: 0x1,
|
||||
VM_PROT_WRITE: 0x2,
|
||||
VM_PROT_EXECUTE: 0x4,
|
||||
VM_PROT_NO_CHANGE: 0x8,
|
||||
VM_PROT_COPY: 0x10,
|
||||
VM_PROT_WANTS_COPY: 0x10,
|
||||
VM_PROT_IS_MASK: 0x40,
|
||||
};
|
||||
var c = utils.constants;
|
||||
c.VM_PROT_DEFAULT = c.VM_PROT_READ | c.VM_PROT_WRITE;
|
||||
c.VM_PROT_ALL = c.VM_PROT_READ | c.VM_PROT_WRITE | c.VM_PROT_EXECUTE;
|
||||
|
||||
if(shouldExposeConsts) {
|
||||
for(var k in c) {
|
||||
Cycript.all[k] = c[k];
|
||||
}
|
||||
}
|
||||
|
||||
if(shouldExposeFuncs) {
|
||||
for(var i = 0; i < funcsToExpose.length; i++) {
|
||||
var name = funcsToExpose[i];
|
||||
Cycript.all[name] = utils[name];
|
||||
}
|
||||
}
|
||||
|
||||
if(shouldLoadCFuncs) {
|
||||
utils.loadfuncs(shouldExposeCFuncs);
|
||||
}
|
||||
})(exports);
|
||||
31
samples/HTML+ERB/fishbowl.html.erb.deface
Normal file
31
samples/HTML+ERB/fishbowl.html.erb.deface
Normal file
@@ -0,0 +1,31 @@
|
||||
<!-- insert_before '[data-hook="buttons"]' -->
|
||||
<% if Spree::Config[:enable_fishbowl] %>
|
||||
<div class="row">
|
||||
<div class="twelve columns" id="fishbowl_preferences">
|
||||
<fieldset class="no-border-bottom">
|
||||
<legend align="center"><%= t(:fishbowl_settings)%></legend>
|
||||
<% @fishbowl_options.each do |key| %>
|
||||
<div class="field">
|
||||
<%= label_tag(key, t(key.to_s.gsub('fishbowl_', '').to_sym) + ': ') + tag(:br) %>
|
||||
<%= text_field_tag('preferences[' + key.to_s + ']', Spree::Config[key], { :size => 10, :class => 'fullwidth' }) %>
|
||||
</div>
|
||||
<% end %>
|
||||
<div class="field">
|
||||
<%= hidden_field_tag 'preferences[fishbowl_always_fetch_current_inventory]', '0' %>
|
||||
<%= check_box_tag('preferences[fishbowl_always_fetch_current_inventory]', "1", Spree::Config[:fishbowl_always_fetch_current_inventory]) %>
|
||||
<%= t(:always_fetch_current_inventory) %>
|
||||
</div>
|
||||
<% if !@location_groups.empty? %>
|
||||
<div class="field">
|
||||
<%= label_tag(:fishbowl_location_group, t(:location_group) + ': ') + tag(:br) %>
|
||||
<%= select('preferences', 'fishbowl_location_group', @location_groups, { :selected => Spree::Config[:fishbowl_location_group]}, { :class => ['select2', 'fullwidth'] }) %>
|
||||
</div>
|
||||
<% end %>
|
||||
</fieldset>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script type="text/javascript">
|
||||
$('.select2').select2();
|
||||
</script>
|
||||
<% end %>
|
||||
39
samples/HTML+ERB/index.html.erb
Normal file
39
samples/HTML+ERB/index.html.erb
Normal file
@@ -0,0 +1,39 @@
|
||||
<% provide(:title, @header) %>
|
||||
<% present @users do |user_presenter| %>
|
||||
<div class="row key-header">
|
||||
<h1><%= @header %></h1>
|
||||
</div>
|
||||
|
||||
<div class='row'>
|
||||
<div class='small-12 columns'>
|
||||
<%= will_paginate %>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row key-table">
|
||||
<div class="small-12 columns">
|
||||
<div class="row key-table-row">
|
||||
<div class="small-2 columns">Name</div>
|
||||
<div class="small-3 columns">Email</div>
|
||||
<div class="small-1 columns">Chords</div>
|
||||
<div class="small-1 columns">Keys</div>
|
||||
<div class="small-1 columns">Tunings</div>
|
||||
<div class="small-1 columns">Credits</div>
|
||||
<div class="small-1 columns">Prem?</div>
|
||||
<div class="small-2 columns">Since?</div>
|
||||
</div>
|
||||
|
||||
<% if @users == [] %>
|
||||
<div class="row key-table-row">
|
||||
<div class="small-4 small-centered columns">No Users</div>
|
||||
</div>
|
||||
<% else %>
|
||||
<%= render @users %>
|
||||
<% end %>
|
||||
</div>
|
||||
</div>
|
||||
<div class='row'>
|
||||
<div class='small-12 columns'>
|
||||
<%= will_paginate %>
|
||||
</div>
|
||||
</div>
|
||||
<% end %>
|
||||
29
samples/Haml/buttons.html.haml.deface
Normal file
29
samples/Haml/buttons.html.haml.deface
Normal file
@@ -0,0 +1,29 @@
|
||||
/
|
||||
replace '.actions'
|
||||
|
||||
.pull-right
|
||||
.btn-group
|
||||
= link_to page.url, target: "_blank", title: t('.view_live_html'), class: "tip btn btn-xs btn-default" do
|
||||
%i.icon-picture.row-black
|
||||
|
||||
= link_to refinery.edit_admin_page_path(page.nested_url,
|
||||
switch_locale: (page.translations.first.locale unless page.translated_to_default_locale?)),
|
||||
title: t('edit', :scope => 'refinery.admin.pages'),
|
||||
class: "tip btn btn-xs btn-default" do
|
||||
%i.icon-edit.row-blue
|
||||
|
||||
|
||||
- if page.deletable?
|
||||
= link_to refinery.admin_page_path(page.nested_url),
|
||||
methode: :delete,
|
||||
title: t('delete', :scope => 'refinery.admin.pages'),
|
||||
class: "tip cancel confirm-delete btn btn-xs btn-default",
|
||||
data: { confirm: t('message', scope: 'refinery.admin.delete', title: page_title_with_translations(page)) } do
|
||||
%i.icon-trash.row-red
|
||||
- else
|
||||
%button.btn.btn-xs.btn-default.disabled
|
||||
%i.icon-trash
|
||||
|
||||
.btn-group
|
||||
= link_to refinery.new_admin_page_path(:parent_id => page.id), title: t('new', :scope => 'refinery.admin.pages'), class: "tip btn btn-xs btn-default" do
|
||||
%i.icon-plus.row-green
|
||||
43
samples/LookML/comments.view.lookml
Normal file
43
samples/LookML/comments.view.lookml
Normal file
@@ -0,0 +1,43 @@
|
||||
- view: comments
|
||||
fields:
|
||||
|
||||
- dimension: id
|
||||
primary_key: true
|
||||
type: int
|
||||
sql: ${TABLE}.id
|
||||
|
||||
- dimension: body
|
||||
sql: ${TABLE}.body
|
||||
|
||||
- dimension_group: created
|
||||
type: time
|
||||
timeframes: [time, date, week, month]
|
||||
sql: ${TABLE}.created_at
|
||||
|
||||
- dimension: headline_id
|
||||
type: int
|
||||
hidden: true
|
||||
sql: ${TABLE}.headline_id
|
||||
|
||||
- dimension_group: updated
|
||||
type: time
|
||||
timeframes: [time, date, week, month]
|
||||
sql: ${TABLE}.updated_at
|
||||
|
||||
- dimension: user_id
|
||||
type: int
|
||||
hidden: true
|
||||
sql: ${TABLE}.user_id
|
||||
|
||||
- measure: count
|
||||
type: count
|
||||
detail: detail*
|
||||
|
||||
|
||||
# ----- Detail ------
|
||||
sets:
|
||||
detail:
|
||||
- id
|
||||
- headlines.id
|
||||
- headlines.name
|
||||
- users.id
|
||||
13
samples/OpenSCAD/not_simple.scad
Normal file
13
samples/OpenSCAD/not_simple.scad
Normal file
@@ -0,0 +1,13 @@
|
||||
// A more complicated 3D shape in OpenSCAD
|
||||
$fn=32;
|
||||
|
||||
difference() {
|
||||
// main shape
|
||||
union() {
|
||||
translate( [ 0, 0, 2 ] ) cube( [ 15, 15, 4 ], center=true );
|
||||
translate( [ 0, 0, 13 ] ) cylinder( h=25, r1=5, r2=3, center=true );
|
||||
translate( [ 0, 0, 28 ] ) sphere( r=6 );
|
||||
}
|
||||
// hole through center
|
||||
translate( [ 0, 0, 17 ] ) cylinder( h=35, r=2, center=true );
|
||||
}
|
||||
3
samples/OpenSCAD/simple.scad
Normal file
3
samples/OpenSCAD/simple.scad
Normal file
@@ -0,0 +1,3 @@
|
||||
// Simple sphere in OpenSCAD
|
||||
|
||||
sphere( r=10 );
|
||||
447
samples/Perl/example.cgi
Executable file
447
samples/Perl/example.cgi
Executable file
@@ -0,0 +1,447 @@
|
||||
#!/usr/bin/perl
|
||||
|
||||
# v1.0
|
||||
# nagiostat, program to insert performance-data from Nagios into RRD-archives
|
||||
# Copyright (C) 2004 Carl Bingel / Svensk IT konsult AB
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
|
||||
use strict;
|
||||
|
||||
## Basic configuration options
|
||||
my $BASE_DIR = "/usr/share/nagiostat";
|
||||
my $CONFIG_FILE = "/etc/nagios/nagiostat.conf"; ## Config-file location
|
||||
my $DEBUG_LOG_FILE = "/var/spool/nagiostat/debug.log"; ## Specify where to create log-file and what filename (must be writable by nagios-user!)
|
||||
my $DEBUGLEVEL = 1; ## 0=Nothing, 1=Errors, 2=Warnings, 3=Debug
|
||||
my $DEBUGOUTPUT = 0; ## 0=file, 1=STDERR, 2=STDOUT (for cgi)
|
||||
|
||||
require 'shellwords.pl';
|
||||
|
||||
## Global vars
|
||||
my $DEBUG_TIMESTAMP=0;
|
||||
|
||||
## Find out how program is run
|
||||
if( $ARGV[0] eq "-t") { ## -t = test configuration-file
|
||||
print STDERR "nagiostat: Testing configuration-file..\n";
|
||||
$DEBUGLEVEL=3;
|
||||
$DEBUGOUTPUT=1; ## output errors to console and not file
|
||||
my $c = &read_config();
|
||||
abort();
|
||||
} elsif( $ARGV[0] eq "-p") { ## -p = parse performance-data (when started by nagios)
|
||||
&parse_perfdata();
|
||||
} else {
|
||||
if( exists $ENV{'GATEWAY_INTERFACE'}) { ## we are run as a CGI-script!
|
||||
$DEBUGOUTPUT=2; ## output errors to web-browser
|
||||
&run_as_cgi();
|
||||
} else { ## print some help-info
|
||||
print STDERR "nagiostat: usage:
|
||||
-t Test configuration-file
|
||||
-p Parse/import performance-data (used when called from nagios)
|
||||
";
|
||||
}
|
||||
}
|
||||
|
||||
abort();
|
||||
|
||||
sub abort {
|
||||
## logfile: write blank if we wrote anything...
|
||||
if( $DEBUG_TIMESTAMP!=0) {
|
||||
debug( 1, "");
|
||||
}
|
||||
exit;
|
||||
}
|
||||
|
||||
##
|
||||
## Program is called as CGI
|
||||
##
|
||||
sub run_as_cgi {
|
||||
use CGI;
|
||||
my $cgi = new CGI;
|
||||
|
||||
my $graph_name = $cgi->param( "graph_name");
|
||||
my $graph_iteration = $cgi->param( "graph_iteration");
|
||||
|
||||
if( $graph_iteration eq "") {
|
||||
print "Content-type: text/html\nExpires: 0\n\n";
|
||||
} else {
|
||||
print "Content-type: image/gif\nExpires: 0\n\n";
|
||||
}
|
||||
|
||||
my $config = read_config();
|
||||
|
||||
if( $graph_name eq "") {
|
||||
##
|
||||
## display index of graphs
|
||||
##
|
||||
display_htmltemplate( $config->{'htmltemplatepath'}."/".$config->{'graphindextemplate'}, $graph_name, $config);
|
||||
} else { ## display graph
|
||||
if( ! exists $config->{'graphs'}->{$graph_name}) {
|
||||
debug( 1, "ERROR: Graph '$graph_name' does not exist!");
|
||||
exit;
|
||||
} elsif( $graph_iteration eq "") {
|
||||
##
|
||||
## Display HTML-page with all the graphs
|
||||
##
|
||||
if( ! -r $config->{'htmltemplatepath'}."/".$config->{'graphs'}->{$graph_name}->{'htmltemplate'}) {
|
||||
debug( 1, "ERROR: HTML-template '".($config->{'htmltemplatepath'}."/".$config->{'graphs'}->{$graph_name}->{'htmltemplate'})."' is not readable by effective userid!");
|
||||
exit;
|
||||
}
|
||||
display_htmltemplate( $config->{'htmltemplatepath'}."/".$config->{'graphs'}->{$graph_name}->{'htmltemplate'}, $graph_name, $config);
|
||||
} else {
|
||||
##
|
||||
## generate graph (call 'rrdtool graph')
|
||||
##
|
||||
my $rrdtool_cmdline = $config->{'rrdtoolpath'}." graph - ".join( " ", @{$config->{'plottemplates'}->{ $config->{'graphs'}->{$graph_name}->{'plottemplate'} } });
|
||||
|
||||
## expand variables
|
||||
my $rrdarchive = $config->{'rrdarchivepath'}."/".$config->{'graphs'}->{$graph_name}->{'rrdfilename'};
|
||||
$rrdtool_cmdline =~ s/\$f/$rrdarchive/g;
|
||||
my $t_start = $config->{'graphtimetemplates'}->{ $config->{'graphs'}->{$graph_name}->{'graphtimetemplate'} }->[$graph_iteration]->{'starttime'};
|
||||
$rrdtool_cmdline =~ s/\$s/$t_start/g;
|
||||
my $t_end = $config->{'graphtimetemplates'}->{ $config->{'graphs'}->{$graph_name}->{'graphtimetemplate'} }->[$graph_iteration]->{'endtime'};
|
||||
$rrdtool_cmdline =~ s/\$e/$t_end/g;
|
||||
my $t_descr = $config->{'graphtimetemplates'}->{ $config->{'graphs'}->{$graph_name}->{'graphtimetemplate'} }->[$graph_iteration]->{'description'};
|
||||
$rrdtool_cmdline =~ s/\$d/$t_descr/g;
|
||||
|
||||
## Call rrdtool (should probably be fixed to call it in a better way, like exec)
|
||||
print `$rrdtool_cmdline`;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
## Display HTML template (and do variable-substitution and other stuff)
|
||||
##
|
||||
sub display_htmltemplate {
|
||||
my( $filename, $graph_name, $config) = @_;
|
||||
|
||||
if( -r $filename) {
|
||||
open( HTML, $filename);
|
||||
while( <HTML>) {
|
||||
## All is a big regex.. :-)
|
||||
s/\$(\w+)/my $t=sub {
|
||||
my $varname = $_[0];
|
||||
if( $varname eq "GRAPHNAME") { ## return the name of the graph
|
||||
if( $config->{'graphs'}->{$graph_name}->{'title'} ne "") {
|
||||
return( $config->{'graphs'}->{$graph_name}->{'title'});
|
||||
} else {
|
||||
return( "Graph ".$graph_name);
|
||||
}
|
||||
} elsif( $varname eq "CURRENTTIME") { ## return current date-time
|
||||
return( localtime());
|
||||
} elsif( $varname eq "GRAPHINDEX" || $varname eq "GRAPHINDEX_ONEROW") { ## return HTML-code for index of the different graphs
|
||||
my $return_html;
|
||||
foreach my $gn ( sort keys %{$config->{'graphs'}}) {
|
||||
$return_html.=(($varname eq "GRAPHINDEX")?"<LI>":"").
|
||||
"<A HREF=\"?graph_name=$gn\">".($config->{'graphs'}->{$gn}->{'title'})."<\/A>". # must escape slash since were inside an regex!
|
||||
(($varname eq "GRAPHINDEX_ONEROW")?" ":"");
|
||||
}
|
||||
return( $return_html);
|
||||
} elsif( $varname eq "GRAPH_AUTOGENERATE") { ## return HTML-code for displaying the actual graph-images
|
||||
my $iteration_id=0;
|
||||
my $return_html;
|
||||
foreach my $time ( @{ $config->{'graphtimetemplates'}->{ $config->{'graphs'}->{$graph_name}->{'graphtimetemplate'} } }) {
|
||||
$return_html.="<P>".($time->{'description'})."<BR><IMG SRC=\"?graph_name=$graph_name&graph_iteration=$iteration_id\">";
|
||||
$iteration_id++;
|
||||
}
|
||||
return( $return_html);
|
||||
} else { ## unknown variable
|
||||
return( "##UNKNOWN-VARIABLE##");
|
||||
}
|
||||
}; &$t($1)/eig; ## i thought that regex would never end!
|
||||
print;
|
||||
}
|
||||
close( HTML);
|
||||
} else {
|
||||
print "ERROR: HTML-template '$filename' does not exist or is not readable by effective userid.";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
##
|
||||
## Process incoming performance-data (parse output from check-plugin and insert values into rrd-archives)
|
||||
##
|
||||
sub parse_perfdata {
|
||||
|
||||
$DEBUG_TIMESTAMP=0;
|
||||
|
||||
my $config = read_config();
|
||||
|
||||
my $rrd_updates;
|
||||
|
||||
## Provide more symbolic names (same names as the macros in nagios configuration-file)
|
||||
|
||||
my( $LASTCHECK, $HOSTNAME, $SERVICEDESCR, $SERVICESTATE, $OUTPUT, $PERFDATA) = split( /\|!!\|/, $ARGV[1]);
|
||||
debug( 3, "**INCOMING PERFDATA:\n LASTCHECK=$LASTCHECK\n HOSTNAME=$HOSTNAME\n SERVICEDESCR=\"$SERVICEDESCR\"\n SERVICESTATE=\"$SERVICESTATE\"\n OUTPUT=\"$OUTPUT\"\n PERFDATA=\"$PERFDATA\"");
|
||||
|
||||
my $host_and_descr_found;
|
||||
## Loop through all host_regexes
|
||||
foreach my $host_regex ( keys %{$config->{'regexes'}}) {
|
||||
## Loop through all service_description_regexes
|
||||
foreach my $service_regex ( keys %{$config->{'regexes'}->{$host_regex}}) {
|
||||
if( ($HOSTNAME =~ m/$host_regex/i) && ($SERVICEDESCR =~ m/$service_regex/i) ) { ## match!
|
||||
$host_and_descr_found=1;
|
||||
## Loop through all InsertValue-lines with same host and service_description match
|
||||
foreach my $insert_value ( @{$config->{'regexes'}->{$host_regex}->{$service_regex}} ) {
|
||||
## Loop through all regexes that should match values in the output/perfdata
|
||||
foreach my $regex ( @{ $config->{'valueregextemplates'}->{$insert_value->{'regextemplate'}} }) {
|
||||
my $regex_string = $regex->{'regex'};
|
||||
if( $regex->{'regex_what'} eq "output") { ## do regex on "output"
|
||||
if( $OUTPUT =~ m/$regex_string/) {
|
||||
debug( 3, " +VALUE: ".$1);
|
||||
push( @{$rrd_updates->{$insert_value->{'rrdarchive'}}->{'value'}}, $1);
|
||||
push( @{$rrd_updates->{$insert_value->{'rrdarchive'}}->{'dsaname'}}, $regex->{'dsaname'});
|
||||
$rrd_updates->{$insert_value->{'rrdarchive'}}->{'rrdcreatetemplate'} = $insert_value->{'rrdcreatetemplate'}; #$config->{'regexes'}->{$host_regex}->{$service_regex}->[0]->{'rrdcreatetemplate'};
|
||||
} else {
|
||||
debug( 2, "**WARNING: No match for value with regex on output '$regex_string'.");
|
||||
}
|
||||
} else { ## do regex on "perfdata"
|
||||
if( $PERFDATA =~ m/$regex_string/) {
|
||||
debug( 3, " +VALUE: ".$1);
|
||||
push( @{$rrd_updates->{$insert_value->{'rrdarchive'}}->{'value'}}, $1);
|
||||
push( @{$rrd_updates->{$insert_value->{'rrdarchive'}}->{'dsaname'}}, $regex->{'dsaname'});
|
||||
$rrd_updates->{$insert_value->{'rrdarchive'}}->{'rrdcreatetemplate'} = $insert_value->{'rrdcreatetemplate'}; #$config->{'regexes'}->{$host_regex}->{$service_regex}->[0]->{'rrdcreatetemplate'};
|
||||
} else {
|
||||
debug( 2, "**WARNING: No match for value with regex on perfdata '$regex_string'.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if( !$host_and_descr_found) {
|
||||
debug( 2, "**WARNING: Hostname and description didn't match any of the regexes in the config-file.");
|
||||
} else {
|
||||
##
|
||||
## Insert the value into the RRD by calling the rrdtool (may be several rrd-archives)
|
||||
##
|
||||
foreach my $archive ( keys %{$rrd_updates}) {
|
||||
debug( 3, " =INSERT into '$archive': ".join( ",", @{$rrd_updates->{$archive}->{'value'}} )." DSA-names=".join( ",", @{$rrd_updates->{$archive}->{'dsaname'}}) );
|
||||
|
||||
my $rrdarchive_filename = $config->{'rrdarchivepath'}."/".$archive;
|
||||
|
||||
## Create RRD-Archive (according to template) if it does not exist
|
||||
if( ! -e $rrdarchive_filename) {
|
||||
my $rrdtool_cmdline = $config->{'rrdtoolpath'}." create ".$rrdarchive_filename." ".(join( " ", @{$config->{'rrdcreatetemplates'}->{$rrd_updates->{$archive}->{'rrdcreatetemplate'}}}));
|
||||
debug( 2, "**CREATING: $rrdarchive_filename, cmdline='".$rrdtool_cmdline."'.");
|
||||
`$rrdtool_cmdline`;
|
||||
}
|
||||
|
||||
## Check if rrd-archive is writable
|
||||
if( ! -w $rrdarchive_filename) { ## check wheter RRD-archive exists
|
||||
debug( 1, "!!ERROR: RRD-archive '".$rrdarchive_filename."' does not exist or is not writable by effective UID."); abort();
|
||||
}
|
||||
|
||||
## Assemle command-line for rrdtool
|
||||
my $rrdtool_cmdline = $config->{'rrdtoolpath'}." update ".$rrdarchive_filename.
|
||||
" --template ".join( ":", @{$rrd_updates->{$archive}->{'dsaname'}}).
|
||||
" $LASTCHECK:".join( ":", @{$rrd_updates->{$archive}->{'value'}});
|
||||
debug( 3, " !RRDCMDLINE: ".$rrdtool_cmdline);
|
||||
my $result = `$rrdtool_cmdline`;
|
||||
if( $result ne "") {
|
||||
debug( 1, "!!RESULT: $result");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
##
|
||||
## Read config-file and check for errors
|
||||
##
|
||||
sub read_config {
|
||||
my $config;
|
||||
open( CONFIG, $CONFIG_FILE);
|
||||
my( $line_counter);
|
||||
while( <CONFIG>) {
|
||||
$line_counter++;
|
||||
chomp;
|
||||
my( @args) = &shellwords( $_);
|
||||
my $orig_confline = $_;
|
||||
$args[0] = uc( $args[0]);
|
||||
|
||||
if( $args[0] eq "INSERTVALUE") { ## INSERTVALUE-command
|
||||
shift @args;
|
||||
my( $rrd_filename, $rrdcreatetemplate, $hostname_regex, $servicedescr_regex, $regex_template) = @args;
|
||||
|
||||
if( ! exists $config->{'rrdcreatetemplates'}->{$rrdcreatetemplate}) {
|
||||
debug( 1, "!!ERROR: RRDCreateTemplate '$rrdcreatetemplate' is not defined but refered to in line $line_counter."); abort();
|
||||
}
|
||||
if( $hostname_regex !~ m/^\/(.*)\/$/) { ## verify hostname regex
|
||||
debug( 1, "!!ERROR: Hostname regex should be enclosed in slashes, i.e. /HOSTNAME/ and optionally enclosed in quotes if needed. Conf-line $line_counter."); abort();
|
||||
} else {
|
||||
$hostname_regex = $1;
|
||||
}
|
||||
if( $servicedescr_regex !~ m/^\/(.*)\/$/) { ## verify service description regex
|
||||
debug( 1, "!!ERROR: Service-description regex should be enclosed in slashes, i.e. /SERVICEDESCR/ and optionally enclosed in quotes if needed. Config-line $line_counter.");
|
||||
abort();
|
||||
} else {
|
||||
$servicedescr_regex = $1;
|
||||
}
|
||||
if( ! exists $config->{'valueregextemplates'}->{$regex_template}) { ## verify value-regex-template exists
|
||||
debug( 1, "!!ERROR: VALUEREGEXTEMPLATE '$regex_template' is not defined on line $line_counter."); abort();
|
||||
}
|
||||
push( @{$config->{'regexes'}->{$hostname_regex}->{$servicedescr_regex}}, {
|
||||
'rrdarchive' => $rrd_filename,
|
||||
'rrdcreatetemplate' => $rrdcreatetemplate,
|
||||
'regextemplate' => $regex_template
|
||||
} );
|
||||
} elsif( $args[0] =~ m/^(\s*)#/ || $args[0] eq "") { ## comment or blank row
|
||||
## do nuthin
|
||||
} elsif( $args[0] eq "RRDTOOLPATH") { ## RRDToolPath args: path
|
||||
$config->{'rrdtoolpath'} = $args[1];
|
||||
} elsif( $args[0] eq "PLOTTEMPLATE") { ## PlotTemplate args: name,htmltemplate,parameters..
|
||||
shift @args;
|
||||
my( $name, @params) = @args;
|
||||
if( $name eq "") {
|
||||
debug( 1, "!!ERROR: PLOTTEMPLATE-name must be specified on line $line_counter."); abort();
|
||||
}
|
||||
if( exists $config->{'plottemplates'}->{$name}) {
|
||||
debug( 1, "!!ERROR: PLOTTTEMPLATE-names must be uniqe. Duplicate name found on line: $line_counter."); abort();
|
||||
}
|
||||
$config->{'plottemplates'}->{$name} = [ @params];
|
||||
} elsif( $args[0] eq "GRAPHTIMETEMPLATE") { ## GraphTimeTemplate args: name,parameters..
|
||||
shift @args;
|
||||
my( $name, @params) = @args;
|
||||
if( $name eq "") {
|
||||
debug( 1, "!!ERROR: GRAPHTIMETEMPLATE-name must be specified on line $line_counter."); abort();
|
||||
}
|
||||
if( exists $config->{'graphtimetemplates'}->{$name}) {
|
||||
debug( 1, "!!ERROR: GRAPHTIMETEMPLATE-names must be uniqe. Duplicate name found on line: $line_counter."); abort();
|
||||
}
|
||||
foreach my $time_template (@params) {
|
||||
my( $t_start, $t_end, @t_descr) = split( /:/, $time_template);
|
||||
my $t_descr = join( ":", @t_descr); # workaround if ':' is used in description-string
|
||||
if( $t_start eq "" || $t_end eq "") {
|
||||
debug( 1, "!!ERROR: GRAPHTIMETEMPLATE, each time-definition should be defined as '<starttime>:<endtime>:<description>' on line $line_counter.");
|
||||
}
|
||||
push( @{$config->{'graphtimetemplates'}->{$name}}, {
|
||||
'starttime' => $t_start,
|
||||
'endtime' => $t_end,
|
||||
'description' => $t_descr
|
||||
});
|
||||
}
|
||||
} elsif( $args[0] eq "RRDCREATETEMPLATE") { ## RRDCreateTemplate
|
||||
shift @args;
|
||||
my( $name, @params) = @args;
|
||||
if( $name eq "") {
|
||||
debug( 1, "!!ERROR: RRDCREATETEMPLATE-name must be specified on line $line_counter."); abort();
|
||||
}
|
||||
if( exists $config->{'rrdcreatetemplates'}->{$name}) {
|
||||
debug( 1, "!!ERROR: RRDCREATETEMPLATE-names must be uniq. Duplicate name found on line: $line_counter."); abort();
|
||||
}
|
||||
$config->{'rrdcreatetemplates'}->{$name} = [ @params];
|
||||
} elsif( $args[0] eq "VALUEREGEXTEMPLATE") { ## ValueRegexTemplate
|
||||
shift @args;
|
||||
my( $template_name, @regexes) = @args;
|
||||
if( $template_name eq "") {
|
||||
debug( 1, "!!ERROR: VALUEREGEXTEMPLATE-name must be specified on line $line_counter."); abort();
|
||||
}
|
||||
foreach my $r (@regexes) {
|
||||
if( $r !~ m/^(output|perfdata):(\w+):\/(.*)\/$/) {
|
||||
debug( 1, "!!ERROR: Value-regex should be formatted as 'output:dsaname:/regex/' or 'perfdata:dsaname:/regex/' depending on in which field to extract the data. The value should be within parantheses in the regex. Config-line $line_counter.");
|
||||
abort();
|
||||
} else {
|
||||
my( $regex_what, $dsa_name, $regex) = ( $1, $2, $3);
|
||||
push( @{$config->{'valueregextemplates'}->{$template_name}}, {
|
||||
'regex_what' => $regex_what,
|
||||
'regex' => $regex,
|
||||
'dsaname' => $dsa_name
|
||||
} );
|
||||
}
|
||||
}
|
||||
} elsif( $args[0] eq "RRDARCHIVEPATH") { ## RRDARCHIVEPATH
|
||||
$config->{'rrdarchivepath'} = $args[1];
|
||||
} elsif( $args[0] eq "HTMLTEMPLATEPATH") { ## HTMLTemplatePath
|
||||
$config->{'htmltemplatepath'} = $args[1];
|
||||
} elsif( $args[0] eq "GRAPHINDEXTEMPLATE") { ## GraphIndexTemplate
|
||||
$config->{'graphindextemplate'} = $args[1];
|
||||
} elsif( $args[0] eq "GRAPH") { ## GRAPH args: name,rrdfilename,rrdcreatetemplate,graphtimetemplate,plottemplate,htmltemplate
|
||||
if( $args[1] eq "") {
|
||||
debug( 1, "!!ERROR: GRAPH-name must be specified on line $line_counter."); abort();
|
||||
}
|
||||
if( ! exists $config->{'graphtimetemplates'}->{$args[3]}) {
|
||||
debug( 1, "!!ERROR: Unknown GRAPHTIMETEMPLATE on line $line_counter."); abort();
|
||||
}
|
||||
if( ! exists $config->{'plottemplates'}->{$args[4]}) {
|
||||
debug( 1, "!!ERROR: Unknown PLOTTEMPLATE on line $line_counter."); abort();
|
||||
}
|
||||
if( exists $config->{'graphs'}->{$args[1]}) {
|
||||
debug( 1, "!!ERROR: Graphnames must be uniqe. Duplicate name found on line: $line_counter.");
|
||||
abort();
|
||||
} else {
|
||||
$config->{'graphs'}->{$args[1]} = {
|
||||
'graphname' => $args[1],
|
||||
'rrdfilename' => $args[2],
|
||||
'graphtimetemplate' => $args[3],
|
||||
'plottemplate' => $args[4],
|
||||
'htmltemplate' => $args[5],
|
||||
'title' => $args[6]
|
||||
};
|
||||
}
|
||||
} else {
|
||||
debug( 1, "!!ERROR: Unknown config-file directive on line $line_counter: '".$args[0]."'");
|
||||
}
|
||||
}
|
||||
close( CONFIG);
|
||||
|
||||
if( ! -x $config->{'rrdtoolpath'}) {
|
||||
debug( 1, "!!ERROR: RRDTOOLPATH does not point to executable rrdtool-binary.");
|
||||
abort();
|
||||
}
|
||||
if( ! -x $config->{'rrdarchivepath'}) {
|
||||
debug( 1, "!!ERROR: RRDARCHIVEPATH, '".($config->{'rrdarchivepath'})."' is not readable by effective userid."); abort();
|
||||
}
|
||||
if( ! -x $config->{'htmltemplatepath'}) {
|
||||
debug( 1, "!!ERROR: HTMLTEMPLATEPATH, '".($config->{'htmltemplatepath'})."' is not readable by effective userid."); abort();
|
||||
}
|
||||
|
||||
return( $config);
|
||||
}
|
||||
|
||||
|
||||
##
|
||||
## Write debug-output/logging
|
||||
##
|
||||
sub debug {
|
||||
my( $level, $msg) = @_;
|
||||
if( $DEBUGLEVEL >= $level) {
|
||||
|
||||
## write timestamp
|
||||
if( !$DEBUG_TIMESTAMP) {
|
||||
$DEBUG_TIMESTAMP=1;
|
||||
debug( 1, scalar localtime());
|
||||
}
|
||||
|
||||
## write to file or STDERR
|
||||
if( $DEBUGOUTPUT == 0) {
|
||||
open( DEBUGOUTPUT, ">>".$DEBUG_LOG_FILE);
|
||||
print DEBUGOUTPUT $msg."\n";
|
||||
close( DEBUGOUTPUT);
|
||||
} elsif( $DEBUGOUTPUT == 2) {
|
||||
print "<BR><PRE>$msg</PRE>";
|
||||
} else {
|
||||
print STDERR $msg."\n";
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
11
samples/Perl/strict.t
Normal file
11
samples/Perl/strict.t
Normal file
@@ -0,0 +1,11 @@
|
||||
use Test::Base;
|
||||
|
||||
__DATA__
|
||||
=== Strict Test
|
||||
|
||||
--- perl strict
|
||||
my $x = 5;
|
||||
--- strict
|
||||
use strict;
|
||||
use warnings;
|
||||
my $x = 5;
|
||||
68
samples/SQF/fn_remoteExecFnc.sqf
Normal file
68
samples/SQF/fn_remoteExecFnc.sqf
Normal file
@@ -0,0 +1,68 @@
|
||||
/*
|
||||
* Author: commy2
|
||||
*
|
||||
* Execute a function on a remote machine in mp.
|
||||
*
|
||||
* Argument:
|
||||
* 0: Function arguments (Array)
|
||||
* 1: Function to execute, has to be defined on the remote machine first (String)
|
||||
* 2: The function will be executed where this unit is local OR the mode were this function should be executed. (Object OR Number, optional default: 2)
|
||||
* Mode 0: execute on this machine only
|
||||
* Mode 1: execute on server
|
||||
* Mode 2: execute on all clients + server
|
||||
* Mode 3: execute on dedicated only
|
||||
*
|
||||
* Return value:
|
||||
* Nothing
|
||||
*/
|
||||
|
||||
private ["_arguments", "_function", "_unit", "_id"];
|
||||
|
||||
AGM_Core_remoteFnc = _this;
|
||||
|
||||
_arguments = _this select 0;
|
||||
_function = call compile (_this select 1);
|
||||
_unit = _this select 2;
|
||||
|
||||
if (isNil "_unit") then {
|
||||
_unit = 2;
|
||||
};
|
||||
|
||||
if (typeName _unit == "SCALAR") exitWith {
|
||||
switch (_unit) do {
|
||||
case 0 : {
|
||||
_arguments call _function;
|
||||
};
|
||||
case 1 : {
|
||||
if (isServer) then {
|
||||
_arguments call _function;
|
||||
} else {
|
||||
publicVariableServer "AGM_Core_remoteFnc";
|
||||
};
|
||||
};
|
||||
case 2 : {
|
||||
_arguments call _function;
|
||||
|
||||
AGM_Core_remoteFnc set [2, 0];
|
||||
publicVariable "AGM_Core_remoteFnc";
|
||||
};
|
||||
case 3 : {
|
||||
if (isDedicated) then {
|
||||
_arguments call _function;
|
||||
} else {
|
||||
if (!isServer) then {publicVariableServer "AGM_Core_remoteFnc"};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
if (local _unit) then {
|
||||
_arguments call _function;
|
||||
} else {
|
||||
if (isServer) then {
|
||||
_id = owner _unit;
|
||||
_id publicVariableClient "AGM_Core_remoteFnc";
|
||||
} else {
|
||||
publicVariableServer "AGM_Core_remoteFnc";
|
||||
};
|
||||
};
|
||||
19
samples/SQF/macros.hqf
Normal file
19
samples/SQF/macros.hqf
Normal file
@@ -0,0 +1,19 @@
|
||||
#include <version.hqf>
|
||||
|
||||
#define SET(VAR,VALUE) private #VAR; VAR = VALUE;
|
||||
#define CONV(VAR,ARRAY,POOL) VAR = ARRAY select (POOL find VAR);
|
||||
|
||||
#define ALL_HITPOINTS_MAN [ \
|
||||
"HitHead", "HitBody", \
|
||||
"HitLeftArm", "HitRightArm", \
|
||||
"HitLeftLeg","HitRightLeg" \
|
||||
]
|
||||
|
||||
#define ALL_HITPOINTS_VEH [ \
|
||||
"HitBody", "HitHull", "HitEngine", "HitFuel", \
|
||||
"HitTurret", "HitGun", \
|
||||
"HitLTrack", "HitRTrack", \
|
||||
"HitLFWheel", "HitRFWheel", "HitLF2Wheel", "HitRF2Wheel", "HitLMWheel", "HitRMWheel", "HitLBWheel", "HitRBWheel", \
|
||||
"HitAvionics", "HitHRotor", "HitVRotor", \
|
||||
"HitRGlass", "HitLGlass", "HitGlass1", "HitGlass2", "HitGlass3", "HitGlass4", "HitGlass5", "HitGlass6" \
|
||||
]
|
||||
119
samples/TeX/authortitle.cbx
Normal file
119
samples/TeX/authortitle.cbx
Normal file
@@ -0,0 +1,119 @@
|
||||
\ProvidesFile{authortitle.cbx}
|
||||
[\abx@cbxid]
|
||||
|
||||
\ExecuteBibliographyOptions{uniquename,uniquelist,autocite=footnote}
|
||||
|
||||
\renewcommand*{\iffinalcitedelim}{\iflastcitekey}
|
||||
|
||||
\newbool{cbx:parens}
|
||||
|
||||
\newbibmacro*{cite}{%
|
||||
\iffieldundef{shorthand}
|
||||
{\ifnameundef{labelname}
|
||||
{}
|
||||
{\printnames{labelname}%
|
||||
\setunit{\nametitledelim}}%
|
||||
\usebibmacro{cite:title}}%
|
||||
{\usebibmacro{cite:shorthand}}}
|
||||
|
||||
\newbibmacro*{citetitle}{%
|
||||
\iffieldundef{shorthand}
|
||||
{\usebibmacro{cite:title}}%
|
||||
{\usebibmacro{cite:shorthand}}}
|
||||
|
||||
\newbibmacro*{textcite}{%
|
||||
\ifnameundef{labelname}
|
||||
{}
|
||||
{\printnames{labelname}%
|
||||
\setunit{%
|
||||
\global\booltrue{cbx:parens}%
|
||||
\addspace\bibopenparen}}%
|
||||
\ifnumequal{\value{citecount}}{1}
|
||||
{\usebibmacro{prenote}}
|
||||
{}%
|
||||
\iffieldundef{shorthand}
|
||||
{\usebibmacro{cite:title}}%
|
||||
{\usebibmacro{cite:shorthand}}}
|
||||
|
||||
\newbibmacro*{cite:title}{%
|
||||
\printtext[bibhyperref]{%
|
||||
\printfield[citetitle]{labeltitle}}}
|
||||
|
||||
\newbibmacro*{cite:shorthand}{%
|
||||
\printtext[bibhyperref]{\printfield{shorthand}}}
|
||||
|
||||
\newbibmacro*{textcite:postnote}{%
|
||||
\iffieldundef{postnote}
|
||||
{\ifbool{cbx:parens}
|
||||
{\bibcloseparen}
|
||||
{}}
|
||||
{\ifbool{cbx:parens}
|
||||
{\postnotedelim}
|
||||
{\addspace\bibopenparen}%
|
||||
\printfield{postnote}\bibcloseparen}}
|
||||
|
||||
\DeclareCiteCommand{\cite}
|
||||
{\usebibmacro{prenote}}
|
||||
{\usebibmacro{citeindex}%
|
||||
\usebibmacro{cite}}
|
||||
{\multicitedelim}
|
||||
{\usebibmacro{postnote}}
|
||||
|
||||
\DeclareCiteCommand*{\cite}
|
||||
{\usebibmacro{prenote}}
|
||||
{\usebibmacro{citeindex}%
|
||||
\usebibmacro{citetitle}}
|
||||
{\multicitedelim}
|
||||
{\usebibmacro{postnote}}
|
||||
|
||||
\DeclareCiteCommand{\parencite}[\mkbibparens]
|
||||
{\usebibmacro{prenote}}
|
||||
{\usebibmacro{citeindex}%
|
||||
\usebibmacro{cite}}
|
||||
{\multicitedelim}
|
||||
{\usebibmacro{postnote}}
|
||||
|
||||
\DeclareCiteCommand*{\parencite}[\mkbibparens]
|
||||
{\usebibmacro{prenote}}
|
||||
{\usebibmacro{citeindex}%
|
||||
\usebibmacro{citetitle}}
|
||||
{\multicitedelim}
|
||||
{\usebibmacro{postnote}}
|
||||
|
||||
\DeclareCiteCommand{\footcite}[\mkbibfootnote]
|
||||
{\usebibmacro{prenote}}
|
||||
{\usebibmacro{citeindex}%
|
||||
\usebibmacro{cite}}
|
||||
{\multicitedelim}
|
||||
{\usebibmacro{postnote}}
|
||||
|
||||
\DeclareCiteCommand{\footcitetext}[\mkbibfootnotetext]
|
||||
{\usebibmacro{prenote}}
|
||||
{\usebibmacro{citeindex}%
|
||||
\usebibmacro{cite}}
|
||||
{\multicitedelim}
|
||||
{\usebibmacro{postnote}}
|
||||
|
||||
\DeclareCiteCommand{\smartcite}[\iffootnote\mkbibparens\mkbibfootnote]
|
||||
{\usebibmacro{prenote}}
|
||||
{\usebibmacro{citeindex}%
|
||||
\usebibmacro{cite}}
|
||||
{\multicitedelim}
|
||||
{\usebibmacro{postnote}}
|
||||
|
||||
\DeclareCiteCommand{\textcite}
|
||||
{\boolfalse{cbx:parens}}
|
||||
{\usebibmacro{citeindex}%
|
||||
\iffirstcitekey
|
||||
{\setcounter{textcitetotal}{1}}
|
||||
{\stepcounter{textcitetotal}%
|
||||
\textcitedelim}%
|
||||
\usebibmacro{textcite}}
|
||||
{\ifbool{cbx:parens}
|
||||
{\bibcloseparen\global\boolfalse{cbx:parens}}
|
||||
{}}
|
||||
{\usebibmacro{textcite:postnote}}
|
||||
|
||||
\DeclareMultiCiteCommand{\textcites}{\textcite}{}
|
||||
|
||||
\endinput
|
||||
554
samples/TeX/english.lbx
Normal file
554
samples/TeX/english.lbx
Normal file
@@ -0,0 +1,554 @@
|
||||
\ProvidesFile{english.lbx}
|
||||
[\abx@lbxid]
|
||||
|
||||
\DeclareRedundantLanguages{english,american}{english,american,british,
|
||||
canadian,australian,newzealand,USenglish,UKenglish}
|
||||
|
||||
\DeclareBibliographyExtras{%
|
||||
\protected\def\bibrangedash{%
|
||||
\textendash\penalty\hyphenpenalty}% breakable dash
|
||||
\protected\def\bibdatedash{\bibrangedash}%
|
||||
\def\finalandcomma{\addcomma}%
|
||||
\def\finalandsemicolon{\addsemicolon}%
|
||||
\protected\def\mkbibordinal#1{%
|
||||
\begingroup
|
||||
\@tempcnta0#1\relax\number\@tempcnta
|
||||
\@whilenum\@tempcnta>100\do{\advance\@tempcnta-100\relax}%
|
||||
\ifnum\@tempcnta>20
|
||||
\@whilenum\@tempcnta>9\do{\advance\@tempcnta-10\relax}%
|
||||
\fi
|
||||
\ifcase\@tempcnta th\or st\or nd\or rd\else th\fi
|
||||
\endgroup}%
|
||||
\protected\def\mkbibmascord{\mkbibordinal}%
|
||||
\protected\def\mkbibfemord{\mkbibordinal}%
|
||||
\protected\def\mkbibneutord{\mkbibordinal}%
|
||||
\protected\def\mkbibdatelong#1#2#3{%
|
||||
\iffieldundef{#2}
|
||||
{}
|
||||
{\mkbibmonth{\thefield{#2}}%
|
||||
\iffieldundef{#3}
|
||||
{\iffieldundef{#1}{}{\space}}
|
||||
{\nobreakspace}}%
|
||||
\iffieldundef{#3}
|
||||
{}
|
||||
{\stripzeros{\thefield{#3}}%
|
||||
\iffieldundef{#1}{}{,\space}}%
|
||||
\iffieldbibstring{#1}
|
||||
{\bibstring{\thefield{#1}}}
|
||||
{\stripzeros{\thefield{#1}}}}%
|
||||
\protected\def\mkbibdateshort#1#2#3{%
|
||||
\iffieldundef{#2}
|
||||
{}
|
||||
{\mkdatezeros{\thefield{#2}}%
|
||||
\iffieldundef{#3}
|
||||
{\iffieldundef{#1}{}{/}}
|
||||
{/}}%
|
||||
\iffieldundef{#3}
|
||||
{}
|
||||
{\mkdatezeros{\thefield{#3}}%
|
||||
\iffieldundef{#1}{}{/}}%
|
||||
\iffieldbibstring{#1}
|
||||
{\bibstring{\thefield{#1}}}
|
||||
{\mkdatezeros{\thefield{#1}}}}%
|
||||
\savecommand\mkbibrangecomp
|
||||
\savecommand\mkbibrangecompextra
|
||||
\savecommand\mkbibrangeterse
|
||||
\savecommand\mkbibrangeterseextra
|
||||
\protected\def\mkbibrangecomp{%
|
||||
\lbx@us@mkbibrangetrunc@long{long}}%
|
||||
\protected\def\mkbibrangeterse{%
|
||||
\lbx@us@mkbibrangetrunc@short{short}}%
|
||||
\protected\def\mkbibrangecompextra{%
|
||||
\lbx@us@mkbibrangetruncextra@long{long}}%
|
||||
\protected\def\mkbibrangeterseextra{%
|
||||
\lbx@us@mkbibrangetruncextra@short{short}}%
|
||||
}
|
||||
|
||||
\UndeclareBibliographyExtras{%
|
||||
\restorecommand\mkbibrangecomp
|
||||
\restorecommand\mkbibrangecompextra
|
||||
\restorecommand\mkbibrangeterse
|
||||
\restorecommand\mkbibrangeterseextra
|
||||
}
|
||||
|
||||
\DeclareBibliographyStrings{%
|
||||
bibliography = {{Bibliography}{Bibliography}},
|
||||
references = {{References}{References}},
|
||||
shorthands = {{List of Abbreviations}{Abbreviations}},
|
||||
editor = {{editor}{ed\adddot}},
|
||||
editors = {{editors}{eds\adddot}},
|
||||
compiler = {{compiler}{comp\adddot}},
|
||||
compilers = {{compilers}{comp\adddot}},
|
||||
redactor = {{redactor}{red\adddot}},
|
||||
redactors = {{redactors}{red\adddot}},
|
||||
reviser = {{reviser}{rev\adddot}},
|
||||
revisers = {{revisers}{rev\adddot}},
|
||||
founder = {{founder}{found\adddot}},
|
||||
founders = {{founders}{found\adddot}},
|
||||
continuator = {{continued}{cont\adddot}},% FIXME: unsure
|
||||
continuators = {{continued}{cont\adddot}},% FIXME: unsure
|
||||
collaborator = {{collaborator}{collab\adddot}},% FIXME: unsure
|
||||
collaborators = {{collaborators}{collab\adddot}},% FIXME: unsure
|
||||
translator = {{translator}{trans\adddot}},
|
||||
translators = {{translators}{trans\adddot}},
|
||||
commentator = {{commentator}{comm\adddot}},
|
||||
commentators = {{commentators}{comm\adddot}},
|
||||
annotator = {{annotator}{annot\adddot}},
|
||||
annotators = {{annotators}{annot\adddot}},
|
||||
commentary = {{commentary}{comm\adddot}},
|
||||
annotations = {{annotations}{annot\adddot}},
|
||||
introduction = {{introduction}{intro\adddot}},
|
||||
foreword = {{foreword}{forew\adddot}},
|
||||
afterword = {{afterword}{afterw\adddot}},
|
||||
editortr = {{editor and translator}%
|
||||
{ed\adddotspace and trans\adddot}},
|
||||
editorstr = {{editors and translators}%
|
||||
{eds\adddotspace and trans\adddot}},
|
||||
editorco = {{editor and commentator}%
|
||||
{ed\adddotspace and comm\adddot}},
|
||||
editorsco = {{editors and commentators}%
|
||||
{eds\adddotspace and comm\adddot}},
|
||||
editoran = {{editor and annotator}%
|
||||
{ed\adddotspace and annot\adddot}},
|
||||
editorsan = {{editors and annotators}%
|
||||
{eds\adddotspace and annot\adddot}},
|
||||
editorin = {{editor and introduction}%
|
||||
{ed\adddotspace and introd\adddot}},
|
||||
editorsin = {{editors and introduction}%
|
||||
{eds\adddotspace and introd\adddot}},
|
||||
editorfo = {{editor and foreword}%
|
||||
{ed\adddotspace and forew\adddot}},
|
||||
editorsfo = {{editors and foreword}%
|
||||
{eds\adddotspace and forew\adddot}},
|
||||
editoraf = {{editor and afterword}%
|
||||
{ed\adddotspace and afterw\adddot}},
|
||||
editorsaf = {{editors and afterword}%
|
||||
{eds\adddotspace and afterw\adddot}},
|
||||
editortrco = {{editor, translator\finalandcomma\ and commentator}%
|
||||
{ed.,\addabbrvspace trans\adddot\finalandcomma\ and comm\adddot}},
|
||||
editorstrco = {{editors, translators\finalandcomma\ and commentators}%
|
||||
{eds.,\addabbrvspace trans\adddot\finalandcomma\ and comm\adddot}},
|
||||
editortran = {{editor, translator\finalandcomma\ and annotator}%
|
||||
{ed.,\addabbrvspace trans\adddot\finalandcomma\ and annot\adddot}},
|
||||
editorstran = {{editors, translators\finalandcomma\ and annotators}%
|
||||
{eds.,\addabbrvspace trans\adddot\finalandcomma\ and annot\adddot}},
|
||||
editortrin = {{editor, translator\finalandcomma\ and introduction}%
|
||||
{ed.,\addabbrvspace trans\adddot\finalandcomma\ and introd\adddot}},
|
||||
editorstrin = {{editors, translators\finalandcomma\ and introduction}%
|
||||
{eds.,\addabbrvspace trans\adddot\finalandcomma\ and introd\adddot}},
|
||||
editortrfo = {{editor, translator\finalandcomma\ and foreword}%
|
||||
{ed.,\addabbrvspace trans\adddot\finalandcomma\ and forew\adddot}},
|
||||
editorstrfo = {{editors, translators\finalandcomma\ and foreword}%
|
||||
{eds.,\addabbrvspace trans\adddot\finalandcomma\ and forew\adddot}},
|
||||
editortraf = {{editor, translator\finalandcomma\ and afterword}%
|
||||
{ed.,\addabbrvspace trans\adddot\finalandcomma\ and afterw\adddot}},
|
||||
editorstraf = {{editors, translators\finalandcomma\ and afterword}%
|
||||
{eds.,\addabbrvspace trans\adddot\finalandcomma\ and afterw\adddot}},
|
||||
editorcoin = {{editor, commentator\finalandcomma\ and introduction}%
|
||||
{ed.,\addabbrvspace comm\adddot\finalandcomma\ and introd\adddot}},
|
||||
editorscoin = {{editors, commentators\finalandcomma\ and introduction}%
|
||||
{eds.,\addabbrvspace comm\adddot\finalandcomma\ and introd\adddot}},
|
||||
editorcofo = {{editor, commentator\finalandcomma\ and foreword}%
|
||||
{ed.,\addabbrvspace comm\adddot\finalandcomma\ and forew\adddot}},
|
||||
editorscofo = {{editors, commentators\finalandcomma\ and foreword}%
|
||||
{eds.,\addabbrvspace comm\adddot\finalandcomma\ and forew\adddot}},
|
||||
editorcoaf = {{editor, commentator\finalandcomma\ and afterword}%
|
||||
{ed.,\addabbrvspace comm\adddot\finalandcomma\ and afterw\adddot}},
|
||||
editorscoaf = {{editors, commentators\finalandcomma\ and afterword}%
|
||||
{eds.,\addabbrvspace comm\adddot\finalandcomma\ and afterw\adddot}},
|
||||
editoranin = {{editor, annotator\finalandcomma\ and introduction}%
|
||||
{ed.,\addabbrvspace annot\adddot\finalandcomma\ and introd\adddot}},
|
||||
editorsanin = {{editors, annotators\finalandcomma\ and introduction}%
|
||||
{eds.,\addabbrvspace annot\adddot\finalandcomma\ and introd\adddot}},
|
||||
editoranfo = {{editor, annotator\finalandcomma\ and foreword}%
|
||||
{ed.,\addabbrvspace annot\adddot\finalandcomma\ and forew\adddot}},
|
||||
editorsanfo = {{editors, annotators\finalandcomma\ and foreword}%
|
||||
{eds.,\addabbrvspace annot\adddot\finalandcomma\ and forew\adddot}},
|
||||
editoranaf = {{editor, annotator\finalandcomma\ and afterword}%
|
||||
{ed.,\addabbrvspace annot\adddot\finalandcomma\ and afterw\adddot}},
|
||||
editorsanaf = {{editors, annotators\finalandcomma\ and afterword}%
|
||||
{eds.,\addabbrvspace annot\adddot\finalandcomma\ and afterw\adddot}},
|
||||
editortrcoin = {{editor, translator, commentator\finalandcomma\ and introduction}%
|
||||
{ed.,\addabbrvspace trans., comm\adddot\finalandcomma\ and introd\adddot}},
|
||||
editorstrcoin = {{editors, translators, commentators\finalandcomma\ and introduction}%
|
||||
{eds.,\addabbrvspace trans., comm\adddot\finalandcomma\ and introd\adddot}},
|
||||
editortrcofo = {{editor, translator, commentator\finalandcomma\ and foreword}%
|
||||
{ed.,\addabbrvspace trans., comm\adddot\finalandcomma\ and forew\adddot}},
|
||||
editorstrcofo = {{editors, translators, commentators\finalandcomma\ and foreword}%
|
||||
{eds.,\addabbrvspace trans., comm\adddot\finalandcomma\ and forew\adddot}},
|
||||
editortrcoaf = {{editor, translator, commentator\finalandcomma\ and afterword}%
|
||||
{ed.,\addabbrvspace trans., comm\adddot\finalandcomma\ and afterw\adddot}},
|
||||
editorstrcoaf = {{editors, translators, commentators\finalandcomma\ and afterword}%
|
||||
{eds.,\addabbrvspace trans., comm\adddot\finalandcomma\ and afterw\adddot}},
|
||||
editortranin = {{editor, translator, annotator\finalandcomma\ and introduction}%
|
||||
{ed.,\addabbrvspace trans., annot\adddot\finalandcomma\ and introd\adddot}},
|
||||
editorstranin = {{editors, translators, annotators\finalandcomma\ and introduction}%
|
||||
{eds.,\addabbrvspace trans., annot\adddot\finalandcomma\ and introd\adddot}},
|
||||
editortranfo = {{editor, translator, annotator\finalandcomma\ and foreword}%
|
||||
{ed.,\addabbrvspace trans., annot\adddot\finalandcomma\ and forew\adddot}},
|
||||
editorstranfo = {{editors, translators, annotators\finalandcomma\ and foreword}%
|
||||
{eds.,\addabbrvspace trans., annot\adddot\finalandcomma\ and forew\adddot}},
|
||||
editortranaf = {{editor, translator, annotator\finalandcomma\ and afterword}%
|
||||
{ed.,\addabbrvspace trans., annot\adddot\finalandcomma\ and afterw\adddot}},
|
||||
editorstranaf = {{editors, translators, annotators\finalandcomma\ and afterword}%
|
||||
{eds.,\addabbrvspace trans., annot\adddot\finalandcomma\ and afterw\adddot}},
|
||||
translatorco = {{translator and commentator}%
|
||||
{trans\adddot\ and comm\adddot}},
|
||||
translatorsco = {{translators and commentators}%
|
||||
{trans\adddot\ and comm\adddot}},
|
||||
translatoran = {{translator and annotator}%
|
||||
{trans\adddot\ and annot\adddot}},
|
||||
translatorsan = {{translators and annotators}%
|
||||
{trans\adddot\ and annot\adddot}},
|
||||
translatorin = {{translation and introduction}%
|
||||
{trans\adddot\ and introd\adddot}},
|
||||
translatorsin = {{translation and introduction}%
|
||||
{trans\adddot\ and introd\adddot}},
|
||||
translatorfo = {{translation and foreword}%
|
||||
{trans\adddot\ and forew\adddot}},
|
||||
translatorsfo = {{translation and foreword}%
|
||||
{trans\adddot\ and forew\adddot}},
|
||||
translatoraf = {{translation and afterword}%
|
||||
{trans\adddot\ and afterw\adddot}},
|
||||
translatorsaf = {{translation and afterword}%
|
||||
{trans\adddot\ and afterw\adddot}},
|
||||
translatorcoin = {{translation, commentary\finalandcomma\ and introduction}%
|
||||
{trans., comm\adddot\finalandcomma\ and introd\adddot}},
|
||||
translatorscoin = {{translation, commentary\finalandcomma\ and introduction}%
|
||||
{trans., comm\adddot\finalandcomma\ and introd\adddot}},
|
||||
translatorcofo = {{translation, commentary\finalandcomma\ and foreword}%
|
||||
{trans., comm\adddot\finalandcomma\ and forew\adddot}},
|
||||
translatorscofo = {{translation, commentary\finalandcomma\ and foreword}%
|
||||
{trans., comm\adddot\finalandcomma\ and forew\adddot}},
|
||||
translatorcoaf = {{translation, commentary\finalandcomma\ and afterword}%
|
||||
{trans., comm\adddot\finalandcomma\ and afterw\adddot}},
|
||||
translatorscoaf = {{translation, commentary\finalandcomma\ and afterword}%
|
||||
{trans., comm\adddot\finalandcomma\ and afterw\adddot}},
|
||||
translatoranin = {{translation, annotations\finalandcomma\ and introduction}%
|
||||
{trans., annot\adddot\finalandcomma\ and introd\adddot}},
|
||||
translatorsanin = {{translation, annotations\finalandcomma\ and introduction}%
|
||||
{trans., annot\adddot\finalandcomma\ and introd\adddot}},
|
||||
translatoranfo = {{translation, annotations\finalandcomma\ and foreword}%
|
||||
{trans., annot\adddot\finalandcomma\ and forew\adddot}},
|
||||
translatorsanfo = {{translation, annotations\finalandcomma\ and foreword}%
|
||||
{trans., annot\adddot\finalandcomma\ and forew\adddot}},
|
||||
translatoranaf = {{translation, annotations\finalandcomma\ and afterword}%
|
||||
{trans., annot\adddot\finalandcomma\ and afterw\adddot}},
|
||||
translatorsanaf = {{translation, annotations\finalandcomma\ and afterword}%
|
||||
{trans., annot\adddot\finalandcomma\ and afterw\adddot}},
|
||||
byauthor = {{by}{by}},
|
||||
byeditor = {{edited by}{ed\adddotspace by}},
|
||||
bycompiler = {{compiled by}{comp\adddotspace by}},
|
||||
byredactor = {{redacted by}{red\adddotspace by}},
|
||||
byreviser = {{revised by}{rev\adddotspace by}},
|
||||
byreviewer = {{reviewed by}{rev\adddotspace by}},
|
||||
byfounder = {{founded by}{found\adddotspace by}},
|
||||
bycontinuator = {{continued by}{cont\adddotspace by}},
|
||||
bycollaborator = {{in collaboration with}{in collab\adddotspace with}},% FIXME: unsure
|
||||
bytranslator = {{translated \lbx@lfromlang\ by}{trans\adddot\ \lbx@sfromlang\ by}},
|
||||
bycommentator = {{commented by}{comm\adddot\ by}},
|
||||
byannotator = {{annotated by}{annot\adddot\ by}},
|
||||
withcommentator = {{with a commentary by}{with a comment\adddot\ by}},
|
||||
withannotator = {{with annotations by}{with annots\adddot\ by}},
|
||||
withintroduction = {{with an introduction by}{with an intro\adddot\ by}},
|
||||
withforeword = {{with a foreword by}{with a forew\adddot\ by}},
|
||||
withafterword = {{with an afterword by}{with an afterw\adddot\ by}},
|
||||
byeditortr = {{edited and translated \lbx@lfromlang\ by}%
|
||||
{ed\adddotspace and trans\adddot\ \lbx@sfromlang\ by}},
|
||||
byeditorco = {{edited and commented by}%
|
||||
{ed\adddotspace and comm\adddot\ by}},
|
||||
byeditoran = {{edited and annotated by}%
|
||||
{ed\adddotspace and annot\adddot\ by}},
|
||||
byeditorin = {{edited, with an introduction, by}%
|
||||
{ed.,\addabbrvspace with an introd., by}},
|
||||
byeditorfo = {{edited, with a foreword, by}%
|
||||
{ed.,\addabbrvspace with a forew., by}},
|
||||
byeditoraf = {{edited, with an afterword, by}%
|
||||
{ed.,\addabbrvspace with an afterw., by}},
|
||||
byeditortrco = {{edited, translated \lbx@lfromlang\finalandcomma\ and commented by}%
|
||||
{ed.,\addabbrvspace trans\adddot\ \lbx@sfromlang\finalandcomma\ and comm\adddot\ by}},
|
||||
byeditortran = {{edited, translated \lbx@lfromlang\finalandcomma\ and annotated by}%
|
||||
{ed.,\addabbrvspace trans\adddot\ \lbx@sfromlang\finalandcomma\ and annot\adddot\ by}},
|
||||
byeditortrin = {{edited and translated \lbx@lfromlang, with an introduction, by}%
|
||||
{ed\adddotspace and trans\adddot\ \lbx@sfromlang, with an introd., by}},
|
||||
byeditortrfo = {{edited and translated \lbx@lfromlang, with a foreword, by}%
|
||||
{ed\adddotspace and trans\adddot\ \lbx@sfromlang, with a forew., by}},
|
||||
byeditortraf = {{edited and translated \lbx@lfromlang, with an afterword, by}%
|
||||
{ed\adddotspace and trans\adddot\ \lbx@sfromlang, with an afterw., by}},
|
||||
byeditorcoin = {{edited and commented, with an introduction, by}%
|
||||
{ed\adddotspace and comm., with an introd., by}},
|
||||
byeditorcofo = {{edited and commented, with a foreword, by}%
|
||||
{ed\adddotspace and comm., with a forew., by}},
|
||||
byeditorcoaf = {{edited and commented, with an afterword, by}%
|
||||
{ed\adddotspace and comm., with an afterw., by}},
|
||||
byeditoranin = {{edited and annotated, with an introduction, by}%
|
||||
{ed\adddotspace and annot., with an introd., by}},
|
||||
byeditoranfo = {{edited and annotated, with a foreword, by}%
|
||||
{ed\adddotspace and annot., with a forew., by}},
|
||||
byeditoranaf = {{edited and annotated, with an afterword, by}%
|
||||
{ed\adddotspace and annot., with an afterw., by}},
|
||||
byeditortrcoin = {{edited, translated \lbx@lfromlang\finalandcomma\ and commented, with an introduction, by}%
|
||||
{ed.,\addabbrvspace trans\adddot\ \lbx@sfromlang\finalandcomma\ and comm., with an introd., by}},
|
||||
byeditortrcofo = {{edited, translated \lbx@lfromlang\finalandcomma\ and commented, with a foreword, by}%
|
||||
{ed.,\addabbrvspace trans\adddot\ \lbx@sfromlang\finalandcomma\ and comm., with a forew., by}},
|
||||
byeditortrcoaf = {{edited, translated \lbx@lfromlang\finalandcomma\ and commented, with an afterword, by}%
|
||||
{ed.,\addabbrvspace trans\adddot\ \lbx@sfromlang\finalandcomma\ and comm., with an afterw., by}},
|
||||
byeditortranin = {{edited, translated \lbx@lfromlang\finalandcomma\ and annotated, with an introduction, by}%
|
||||
{ed.,\addabbrvspace trans\adddot\ \lbx@sfromlang\finalandcomma\ and annot, with an introd., by}},
|
||||
byeditortranfo = {{edited, translated \lbx@lfromlang\finalandcomma\ and annotated, with a foreword, by}%
|
||||
{ed.,\addabbrvspace trans\adddot\ \lbx@sfromlang\finalandcomma\ and annot, with a forew., by}},
|
||||
byeditortranaf = {{edited, translated \lbx@lfromlang\finalandcomma\ and annotated, with an afterword, by}%
|
||||
{ed.,\addabbrvspace trans\adddot\ \lbx@sfromlang\finalandcomma\ and annot, with an afterw., by}},
|
||||
bytranslatorco = {{translated \lbx@lfromlang\ and commented by}%
|
||||
{trans\adddot\ \lbx@sfromlang\ and comm\adddot\ by}},
|
||||
bytranslatoran = {{translated \lbx@lfromlang\ and annotated by}%
|
||||
{trans\adddot\ \lbx@sfromlang\ and annot\adddot\ by}},
|
||||
bytranslatorin = {{translated \lbx@lfromlang, with an introduction, by}%
|
||||
{trans\adddot\ \lbx@sfromlang, with an introd., by}},
|
||||
bytranslatorfo = {{translated \lbx@lfromlang, with a foreword, by}%
|
||||
{trans\adddot\ \lbx@sfromlang, with a forew., by}},
|
||||
bytranslatoraf = {{translated \lbx@lfromlang, with an afterword, by}%
|
||||
{trans\adddot\ \lbx@sfromlang, with an afterw., by}},
|
||||
bytranslatorcoin = {{translated \lbx@lfromlang\ and commented, with an introduction, by}%
|
||||
{trans\adddot\ \lbx@sfromlang\ and comm., with an introd., by}},
|
||||
bytranslatorcofo = {{translated \lbx@lfromlang\ and commented, with a foreword, by}%
|
||||
{trans\adddot\ \lbx@sfromlang\ and comm., with a forew., by}},
|
||||
bytranslatorcoaf = {{translated \lbx@lfromlang\ and commented, with an afterword, by}%
|
||||
{trans\adddot\ \lbx@sfromlang\ and comm., with an afterw., by}},
|
||||
bytranslatoranin = {{translated \lbx@lfromlang\ and annotated, with an introduction, by}%
|
||||
{trans\adddot\ \lbx@sfromlang\ and annot., with an introd., by}},
|
||||
bytranslatoranfo = {{translated \lbx@lfromlang\ and annotated, with a foreword, by}%
|
||||
{trans\adddot\ \lbx@sfromlang\ and annot., with a forew., by}},
|
||||
bytranslatoranaf = {{translated \lbx@lfromlang\ and annotated, with an afterword, by}%
|
||||
{trans\adddot\ \lbx@sfromlang\ and annot., with an afterw., by}},
|
||||
and = {{and}{and}},
|
||||
andothers = {{et\addabbrvspace al\adddot}{et\addabbrvspace al\adddot}},
|
||||
andmore = {{et\addabbrvspace al\adddot}{et\addabbrvspace al\adddot}},
|
||||
volume = {{volume}{vol\adddot}},
|
||||
volumes = {{volumes}{vols\adddot}},
|
||||
involumes = {{in}{in}},
|
||||
jourvol = {{volume}{vol\adddot}},
|
||||
jourser = {{series}{ser\adddot}},
|
||||
book = {{book}{book}},
|
||||
part = {{part}{part}},
|
||||
issue = {{issue}{issue}},
|
||||
newseries = {{new series}{new ser\adddot}},
|
||||
oldseries = {{old series}{old ser\adddot}},
|
||||
edition = {{edition}{ed\adddot}},
|
||||
reprint = {{reprint}{repr\adddot}},
|
||||
reprintof = {{reprint of}{repr\adddotspace of}},
|
||||
reprintas = {{reprinted as}{rpt\adddotspace as}},
|
||||
reprintfrom = {{reprinted from}{repr\adddotspace from}},
|
||||
reviewof = {{review of}{rev\adddotspace of}},
|
||||
translationof = {{translation of}{trans\adddotspace of}},
|
||||
translationas = {{translated as}{trans\adddotspace as}},
|
||||
translationfrom = {{translated from}{trans\adddotspace from}},
|
||||
origpubas = {{originally published as}{orig\adddotspace pub\adddotspace as}},
|
||||
origpubin = {{originally published in}{orig\adddotspace pub\adddotspace in}},
|
||||
astitle = {{as}{as}},
|
||||
bypublisher = {{by}{by}},
|
||||
page = {{page}{p\adddot}},
|
||||
pages = {{pages}{pp\adddot}},
|
||||
column = {{column}{col\adddot}},
|
||||
columns = {{columns}{cols\adddot}},
|
||||
line = {{line}{l\adddot}},
|
||||
lines = {{lines}{ll\adddot}},
|
||||
nodate = {{no date}{n\adddot d\adddot}},
|
||||
verse = {{verse}{v\adddot}},
|
||||
verses = {{verses}{vv\adddot}},
|
||||
section = {{section}{\S}},
|
||||
sections = {{sections}{\S\S}},
|
||||
paragraph = {{paragraph}{par\adddot}},
|
||||
paragraphs = {{paragraphs}{par\adddot}},
|
||||
in = {{in}{in}},
|
||||
inseries = {{in}{in}},
|
||||
ofseries = {{of}{of}},
|
||||
number = {{number}{no\adddot}},
|
||||
chapter = {{chapter}{chap\adddot}},
|
||||
mathesis = {{Master's thesis}{MA\addabbrvspace thesis}},
|
||||
phdthesis = {{PhD\addabbrvspace thesis}{PhD\addabbrvspace thesis}},
|
||||
candthesis = {{Candidate thesis}{Cand\adddotspace thesis}},
|
||||
resreport = {{research report}{research rep\adddot}},
|
||||
techreport = {{technical report}{tech\adddotspace rep\adddot}},
|
||||
software = {{computer software}{comp\adddotspace software}},
|
||||
datacd = {{CD-ROM}{CD-ROM}},
|
||||
audiocd = {{audio CD}{audio CD}},
|
||||
version = {{version}{version}},
|
||||
url = {{address}{address}},
|
||||
urlfrom = {{available from}{available from}},
|
||||
urlseen = {{visited on}{visited on}},
|
||||
inpreparation = {{in preparation}{in preparation}},
|
||||
submitted = {{submitted}{submitted}},
|
||||
forthcoming = {{forthcoming}{forthcoming}},
|
||||
inpress = {{in press}{in press}},
|
||||
prepublished = {{pre-published}{pre-published}},
|
||||
citedas = {{henceforth cited as}{henceforth cited as}},
|
||||
thiscite = {{especially}{esp\adddot}},
|
||||
seenote = {{see note}{see n\adddot}},
|
||||
quotedin = {{quoted in}{qtd\adddotspace in}},
|
||||
idem = {{idem}{idem}},
|
||||
idemsm = {{idem}{idem}},
|
||||
idemsf = {{eadem}{eadem}},
|
||||
idemsn = {{idem}{idem}},
|
||||
idempm = {{eidem}{eidem}},
|
||||
idempf = {{eaedem}{eaedem}},
|
||||
idempn = {{eadem}{eadem}},
|
||||
idempp = {{eidem}{eidem}},
|
||||
ibidem = {{ibidem}{ibid\adddot}},
|
||||
opcit = {{op\adddotspace cit\adddot}{op\adddotspace cit\adddot}},
|
||||
loccit = {{loc\adddotspace cit\adddot}{loc\adddotspace cit\adddot}},
|
||||
confer = {{cf\adddot}{cf\adddot}},
|
||||
sequens = {{sq\adddot}{sq\adddot}},
|
||||
sequentes = {{sqq\adddot}{sqq\adddot}},
|
||||
passim = {{passim}{pass\adddot}},
|
||||
see = {{see}{see}},
|
||||
seealso = {{see also}{see also}},
|
||||
backrefpage = {{cited on page}{cit\adddotspace on p\adddot}},
|
||||
backrefpages = {{cited on pages}{cit\adddotspace on pp\adddot}},
|
||||
january = {{January}{Jan\adddot}},
|
||||
february = {{February}{Feb\adddot}},
|
||||
march = {{March}{Mar\adddot}},
|
||||
april = {{April}{Apr\adddot}},
|
||||
may = {{May}{May}},
|
||||
june = {{June}{June}},
|
||||
july = {{July}{July}},
|
||||
august = {{August}{Aug\adddot}},
|
||||
september = {{September}{Sept\adddot}},
|
||||
october = {{October}{Oct\adddot}},
|
||||
november = {{November}{Nov\adddot}},
|
||||
december = {{December}{Dec\adddot}},
|
||||
langamerican = {{American}{American}},
|
||||
langbrazilian = {{Brazilian}{Brazilian}},
|
||||
langcatalan = {{Catalan}{Catalan}},
|
||||
langcroatian = {{Croatian}{Croatian}},
|
||||
langczech = {{Czech}{Czech}},
|
||||
langdanish = {{Danish}{Danish}},
|
||||
langdutch = {{Dutch}{Dutch}},
|
||||
langenglish = {{English}{English}},
|
||||
langfinnish = {{Finnish}{Finnish}},
|
||||
langfrench = {{French}{French}},
|
||||
langgerman = {{German}{German}},
|
||||
langgreek = {{Greek}{Greek}},
|
||||
langitalian = {{Italian}{Italian}},
|
||||
langlatin = {{Latin}{Latin}},
|
||||
langnorwegian = {{Norwegian}{Norwegian}},
|
||||
langpolish = {{Polish}{Polish}},
|
||||
langportuguese = {{Portuguese}{Portuguese}},
|
||||
langrussian = {{Russian}{Russian}},
|
||||
langslovene = {{Slovene}{Slovene}},
|
||||
langspanish = {{Spanish}{Spanish}},
|
||||
langswedish = {{Swedish}{Swedish}},
|
||||
fromamerican = {{from the American}{from the American}},
|
||||
frombrazilian = {{from the Brazilian}{from the Brazilian}},
|
||||
fromcatalan = {{from the Catalan}{from the Catalan}},
|
||||
fromcroatian = {{from the Croatian}{from the Croatian}},
|
||||
fromczech = {{from the Czech}{from the Czech}},
|
||||
fromdanish = {{from the Danish}{from the Danish}},
|
||||
fromdutch = {{from the Dutch}{from the Dutch}},
|
||||
fromenglish = {{from the English}{from the English}},
|
||||
fromfinnish = {{from the Finnish}{from the Finnish}},
|
||||
fromfrench = {{from the French}{from the French}},
|
||||
fromgerman = {{from the German}{from the German}},
|
||||
fromgreek = {{from the Greek}{from the Greek}},
|
||||
fromitalian = {{from the Italian}{from the Italian}},
|
||||
fromlatin = {{from the Latin}{from the Latin}},
|
||||
fromnorwegian = {{from the Norwegian}{from the Norwegian}},
|
||||
frompolish = {{from the Polish}{from the Polish}},
|
||||
fromportuguese = {{from the Portuguese}{from the Portuguese}},
|
||||
fromrussian = {{from the Russian}{from the Russian}},
|
||||
fromslovene = {{from the Slovene}{from the Slovene}},
|
||||
fromspanish = {{from the Spanish}{from the Spanish}},
|
||||
fromswedish = {{from the Swedish}{from the Swedish}},
|
||||
countryde = {{Germany}{DE}},
|
||||
countryeu = {{European Union}{EU}},
|
||||
countryep = {{European Union}{EP}},
|
||||
countryfr = {{France}{FR}},
|
||||
countryuk = {{United Kingdom}{GB}},
|
||||
countryus = {{United States of America}{US}},
|
||||
patent = {{patent}{pat\adddot}},
|
||||
patentde = {{German patent}{German pat\adddot}},
|
||||
patenteu = {{European patent}{European pat\adddot}},
|
||||
patentfr = {{French patent}{French pat\adddot}},
|
||||
patentuk = {{British patent}{British pat\adddot}},
|
||||
patentus = {{U.S\adddotspace patent}{U.S\adddotspace pat\adddot}},
|
||||
patreq = {{patent request}{pat\adddot\ req\adddot}},
|
||||
patreqde = {{German patent request}{German pat\adddot\ req\adddot}},
|
||||
patreqeu = {{European patent request}{European pat\adddot\ req\adddot}},
|
||||
patreqfr = {{French patent request}{French pat\adddot\ req\adddot}},
|
||||
patrequk = {{British patent request}{British pat\adddot\ req\adddot}},
|
||||
patrequs = {{U.S\adddotspace patent request}{U.S\adddotspace pat\adddot\ req\adddot}},
|
||||
file = {{file}{file}},
|
||||
library = {{library}{library}},
|
||||
abstract = {{abstract}{abstract}},
|
||||
annotation = {{annotations}{annotations}},
|
||||
}
|
||||
|
||||
\protected\gdef\lbx@us@mkbibrangetrunc@long#1#2{%
|
||||
\iffieldundef{#2year}
|
||||
{}
|
||||
{\printtext[#2date]{%
|
||||
\iffieldsequal{#2year}{#2endyear}
|
||||
{\csuse{mkbibdate#1}{}{#2month}{#2day}}
|
||||
{\csuse{mkbibdate#1}{#2year}{#2month}{#2day}}%
|
||||
\iffieldundef{#2endyear}
|
||||
{}
|
||||
{\iffieldequalstr{#2endyear}{}
|
||||
{\mbox{\bibdatedash}}
|
||||
{\bibdatedash
|
||||
\iffieldsequal{#2year}{#2endyear}
|
||||
{\iffieldsequal{#2month}{#2endmonth}
|
||||
{\csuse{mkbibdate#1}{#2endyear}{}{#2endday}}
|
||||
{\csuse{mkbibdate#1}{#2endyear}{#2endmonth}{#2endday}}}
|
||||
{\csuse{mkbibdate#1}{#2endyear}{#2endmonth}{#2endday}}}}}}}
|
||||
|
||||
\protected\gdef\lbx@us@mkbibrangetrunc@short#1#2{%
|
||||
\iffieldundef{#2year}
|
||||
{}
|
||||
{\printtext[#2date]{%
|
||||
\iffieldsequal{#2year}{#2endyear}
|
||||
{\csuse{mkbibdate#1}{}{#2month}{#2day}}
|
||||
{\csuse{mkbibdate#1}{#2year}{#2month}{#2day}}%
|
||||
\iffieldundef{#2endyear}
|
||||
{}
|
||||
{\iffieldequalstr{#2endyear}{}
|
||||
{\mbox{\bibdatedash}}
|
||||
{\bibdatedash
|
||||
\csuse{mkbibdate#1}{#2endyear}{#2endmonth}{#2endday}}}}}}
|
||||
|
||||
\protected\gdef\lbx@us@mkbibrangetruncextra@long#1#2{%
|
||||
\iffieldundef{#2year}
|
||||
{}
|
||||
{\printtext[#2date]{%
|
||||
\iffieldsequal{#2year}{#2endyear}
|
||||
{\csuse{mkbibdate#1}{}{#2month}{#2day}}
|
||||
{\csuse{mkbibdate#1}{#2year}{#2month}{#2day}}%
|
||||
\iffieldundef{#2endyear}
|
||||
{\printfield{extrayear}}
|
||||
{\iffieldequalstr{#2endyear}{}
|
||||
{\printfield{extrayear}%
|
||||
\mbox{\bibdatedash}}
|
||||
{\bibdatedash
|
||||
\iffieldsequal{#2year}{#2endyear}
|
||||
{\iffieldsequal{#2month}{#2endmonth}
|
||||
{\csuse{mkbibdate#1}{#2endyear}{}{#2endday}}
|
||||
{\csuse{mkbibdate#1}{#2endyear}{#2endmonth}{#2endday}}}
|
||||
{\csuse{mkbibdate#1}{#2endyear}{#2endmonth}{#2endday}}%
|
||||
\printfield{extrayear}}}}}}
|
||||
|
||||
\protected\gdef\lbx@us@mkbibrangetruncextra@short#1#2{%
|
||||
\iffieldundef{#2year}
|
||||
{}
|
||||
{\printtext[#2date]{%
|
||||
\iffieldsequal{#2year}{#2endyear}
|
||||
{\csuse{mkbibdate#1}{}{#2month}{#2day}}
|
||||
{\csuse{mkbibdate#1}{#2year}{#2month}{#2day}}%
|
||||
\iffieldundef{#2endyear}
|
||||
{\printfield{extrayear}}
|
||||
{\iffieldequalstr{#2endyear}{}
|
||||
{\printfield{extrayear}%
|
||||
\mbox{\bibdatedash}}
|
||||
{\bibdatedash
|
||||
\csuse{mkbibdate#1}{#2endyear}{#2endmonth}{#2endday}%
|
||||
\printfield{extrayear}}}}}}
|
||||
|
||||
\endinput
|
||||
6
samples/TeX/verbose.bbx
Normal file
6
samples/TeX/verbose.bbx
Normal file
@@ -0,0 +1,6 @@
|
||||
\ProvidesFile{verbose.bbx}
|
||||
[\abx@bbxid]
|
||||
|
||||
\RequireBibliographyStyle{authortitle}
|
||||
|
||||
\endinput
|
||||
@@ -393,6 +393,10 @@ class TestBlob < Test::Unit::TestCase
|
||||
# NuGet Packages
|
||||
assert blob("packages/Modernizr.2.0.6/Content/Scripts/modernizr-2.0.6-development-only.js").vendored?
|
||||
|
||||
# Font Awesome
|
||||
assert blob("some/asset/path/font-awesome.min.css").vendored?
|
||||
assert blob("some/asset/path/font-awesome.css").vendored?
|
||||
|
||||
# Normalize
|
||||
assert blob("some/asset/path/normalize.css").vendored?
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ class TestLanguage < Test::Unit::TestCase
|
||||
assert_equal Lexer['C'], Language['OpenCL'].lexer
|
||||
assert_equal Lexer['C'], Language['XS'].lexer
|
||||
assert_equal Lexer['C++'], Language['C++'].lexer
|
||||
assert_equal Lexer['Chapel'], Language['Chapel'].lexer
|
||||
assert_equal Lexer['Coldfusion HTML'], Language['ColdFusion'].lexer
|
||||
assert_equal Lexer['Coq'], Language['Coq'].lexer
|
||||
assert_equal Lexer['FSharp'], Language['F#'].lexer
|
||||
@@ -71,6 +72,7 @@ class TestLanguage < Test::Unit::TestCase
|
||||
assert_equal Language['C'], Language.find_by_alias('c')
|
||||
assert_equal Language['C++'], Language.find_by_alias('c++')
|
||||
assert_equal Language['C++'], Language.find_by_alias('cpp')
|
||||
assert_equal Language['Chapel'], Language.find_by_alias('chpl')
|
||||
assert_equal Language['CoffeeScript'], Language.find_by_alias('coffee')
|
||||
assert_equal Language['CoffeeScript'], Language.find_by_alias('coffee-script')
|
||||
assert_equal Language['ColdFusion'], Language.find_by_alias('cfm')
|
||||
@@ -255,6 +257,7 @@ class TestLanguage < Test::Unit::TestCase
|
||||
assert_equal [Language['Shell']], Language.find_by_filename('.zshrc')
|
||||
assert_equal [Language['Clojure']], Language.find_by_filename('riemann.config')
|
||||
assert_equal [Language['HTML+Django']], Language.find_by_filename('index.jinja')
|
||||
assert_equal [Language['Chapel']], Language.find_by_filename('examples/hello.chpl')
|
||||
end
|
||||
|
||||
def test_find_by_shebang
|
||||
|
||||
@@ -35,15 +35,33 @@ class TestSamples < Test::Unit::TestCase
|
||||
assert_equal data['tokens_total'], data['language_tokens'].inject(0) { |n, (_, c)| n += c }
|
||||
assert_equal data['tokens_total'], data['tokens'].inject(0) { |n, (_, ts)| n += ts.inject(0) { |m, (_, c)| m += c } }
|
||||
end
|
||||
|
||||
|
||||
# Check that there aren't samples with extensions that aren't explicitly defined in languages.yml
|
||||
def test_parity
|
||||
extensions = Samples::DATA['extnames']
|
||||
languages_yml = File.expand_path("../../lib/linguist/languages.yml", __FILE__)
|
||||
languages = YAML.load_file(languages_yml)
|
||||
|
||||
languages.each do |name, options|
|
||||
options['extensions'] ||= []
|
||||
|
||||
if extnames = extensions[name]
|
||||
extnames.each do |extname|
|
||||
next if extname == '.script!'
|
||||
assert options['extensions'].include?(extname), "#{name} has a sample with extension (#{extname}) that isn't explicitly defined in languages.yml"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# If a language extension isn't globally unique then make sure there are samples
|
||||
def test_presence
|
||||
Linguist::Language.all.each do |language|
|
||||
language.all_extensions.each do |extension|
|
||||
language_matches = Language.find_by_filename("foo#{extension}")
|
||||
|
||||
|
||||
# If there is more than one language match for a given extension
|
||||
# then check that there are examples for that language with the extension
|
||||
# then check that there are examples for that language with the extension
|
||||
if language_matches.length > 1
|
||||
language_matches.each do |language|
|
||||
assert File.directory?("samples/#{language.name}"), "#{language.name} is missing a samples directory"
|
||||
|
||||
Reference in New Issue
Block a user