mirror of
https://github.com/KevinMidboe/zoff.git
synced 2025-10-29 18:00:23 +00:00
Added gulp, and started using the file being built by gulp. Fixed some issues with remote control changing. Implemented clients being able to change password
This commit is contained in:
@@ -66,13 +66,4 @@
|
|||||||
<script type="text/javascript" src="https://cdn.socket.io/socket.io-1.3.5.js"></script>
|
<script type="text/javascript" src="https://cdn.socket.io/socket.io-1.3.5.js"></script>
|
||||||
<!--<script type="text/javascript" src="/static/js/lib/iscroll-min.js"></script>-->
|
<!--<script type="text/javascript" src="/static/js/lib/iscroll-min.js"></script>-->
|
||||||
<script type="text/javascript" src="https://crypto-js.googlecode.com/svn/tags/3.1.2/build/rollups/sha256.js"></script>
|
<script type="text/javascript" src="https://crypto-js.googlecode.com/svn/tags/3.1.2/build/rollups/sha256.js"></script>
|
||||||
<script type="text/javascript" src="/static/js/zoff-mini.js"></script>
|
<script type="text/javascript" src="/static/build-js/main.js"></script>
|
||||||
<!--<script type="text/javascript" src="/static/js/list.js"></script>
|
|
||||||
<script type="text/javascript" src="/static/js/searchlist.js"></script>
|
|
||||||
<script type="text/javascript" src="/static/js/playercontrols.js"></script>
|
|
||||||
<script type="text/javascript" src="/static/js/youtube.js"></script>
|
|
||||||
<script type="text/javascript" src="/static/js/search.js"></script>
|
|
||||||
<script type="text/javascript" src="/static/js/admin.js"></script>
|
|
||||||
<script type="text/javascript" src="/static/js/chat.js"></script>
|
|
||||||
<script type="text/javascript" src="/static/js/hostcontroller.js"></script>-->
|
|
||||||
<!--<script type="text/javascript" src="/static/js/remotecontroller.js"></script>-->
|
|
||||||
|
|||||||
19
server/gulpfile.js
Normal file
19
server/gulpfile.js
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
var gulp = require('gulp'),
|
||||||
|
gutil = require('gulp-util'),
|
||||||
|
uglify = require('gulp-uglifyjs'),
|
||||||
|
concat = require('gulp-concat');
|
||||||
|
|
||||||
|
gulp.task('js', function () {
|
||||||
|
gulp.src(['../static/js/*.js', '!../static/js/nochan*', '!../static/js/remotecontroller.js'])
|
||||||
|
.pipe(uglify({
|
||||||
|
mangle: true,
|
||||||
|
compress: true,
|
||||||
|
enclose: true
|
||||||
|
}))
|
||||||
|
.pipe(concat('main.js'))
|
||||||
|
.pipe(gulp.dest('../static/build-js'));
|
||||||
|
});
|
||||||
|
|
||||||
|
gulp.task('default', function(){
|
||||||
|
gulp.watch('../static/js/*.js', ['js']);
|
||||||
|
});
|
||||||
1
server/node_modules/.bin/gulp
generated
vendored
Symbolic link
1
server/node_modules/.bin/gulp
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../gulp/bin/gulp.js
|
||||||
108
server/node_modules/gulp-concat/README.md
generated
vendored
Normal file
108
server/node_modules/gulp-concat/README.md
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|

|
||||||
|
|
||||||
|
## Information
|
||||||
|
|
||||||
|
<table>
|
||||||
|
<tr>
|
||||||
|
<td>Package</td><td>gulp-concat</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Description</td>
|
||||||
|
<td>Concatenates files</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Node Version</td>
|
||||||
|
<td>>= 0.10</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
var concat = require('gulp-concat');
|
||||||
|
|
||||||
|
gulp.task('scripts', function() {
|
||||||
|
return gulp.src('./lib/*.js')
|
||||||
|
.pipe(concat('all.js'))
|
||||||
|
.pipe(gulp.dest('./dist/'));
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
This will concat files by your operating systems newLine. It will take the base directory from the first file that passes through it.
|
||||||
|
|
||||||
|
Files will be concatenated in the order that they are specified in the `gulp.src` function. For example, to concat `./lib/file3.js`, `./lib/file1.js` and `./lib/file2.js` in that order, the following code will create a task to do that:
|
||||||
|
|
||||||
|
```js
|
||||||
|
var concat = require('gulp-concat');
|
||||||
|
|
||||||
|
gulp.task('scripts', function() {
|
||||||
|
return gulp.src(['./lib/file3.js', './lib/file1.js', './lib/file2.js'])
|
||||||
|
.pipe(concat('all.js'))
|
||||||
|
.pipe(gulp.dest('./dist/'));
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
To change the newLine simply pass an object as the second argument to concat with newLine being whatever (\r\n if you want to support any OS to look at it)
|
||||||
|
|
||||||
|
For instance:
|
||||||
|
|
||||||
|
```js
|
||||||
|
.pipe(concat('main.js', {newLine: ';'}))
|
||||||
|
```
|
||||||
|
|
||||||
|
To specify `cwd`, `path` and other [vinyl](https://github.com/wearefractal/vinyl) properties, gulp-concat accepts `Object` as first argument:
|
||||||
|
|
||||||
|
```js
|
||||||
|
var concat = require('gulp-concat');
|
||||||
|
|
||||||
|
gulp.task('scripts', function() {
|
||||||
|
return gulp.src(['./lib/file3.js', './lib/file1.js', './lib/file2.js'])
|
||||||
|
.pipe(concat({ path: 'new.js', stat: { mode: 0666 }}))
|
||||||
|
.pipe(gulp.dest('./dist'));
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
This will concat files into `./dist/new.js`.
|
||||||
|
|
||||||
|
### Source maps
|
||||||
|
|
||||||
|
Source maps can be generated by using [gulp-sourcemaps](https://www.npmjs.org/package/gulp-sourcemaps):
|
||||||
|
|
||||||
|
```js
|
||||||
|
var gulp = require('gulp');
|
||||||
|
var concat = require('gulp-concat');
|
||||||
|
var sourcemaps = require('gulp-sourcemaps');
|
||||||
|
|
||||||
|
gulp.task('javascript', function() {
|
||||||
|
return gulp.src('src/**/*.js')
|
||||||
|
.pipe(sourcemaps.init())
|
||||||
|
.pipe(concat('all.js'))
|
||||||
|
.pipe(sourcemaps.write())
|
||||||
|
.pipe(gulp.dest('dist'));
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## LICENSE
|
||||||
|
|
||||||
|
(MIT License)
|
||||||
|
|
||||||
|
Copyright (c) 2014 Fractal <contact@wearefractal.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||||
|
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||||
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||||
|
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
103
server/node_modules/gulp-concat/index.js
generated
vendored
Normal file
103
server/node_modules/gulp-concat/index.js
generated
vendored
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
'use strict';
|
||||||
|
|
||||||
|
var through = require('through2');
|
||||||
|
var path = require('path');
|
||||||
|
var gutil = require('gulp-util');
|
||||||
|
var PluginError = gutil.PluginError;
|
||||||
|
var File = gutil.File;
|
||||||
|
var Concat = require('concat-with-sourcemaps');
|
||||||
|
|
||||||
|
// file can be a vinyl file object or a string
|
||||||
|
// when a string it will construct a new one
|
||||||
|
module.exports = function(file, opt) {
|
||||||
|
if (!file) {
|
||||||
|
throw new PluginError('gulp-concat', 'Missing file option for gulp-concat');
|
||||||
|
}
|
||||||
|
opt = opt || {};
|
||||||
|
|
||||||
|
// to preserve existing |undefined| behaviour and to introduce |newLine: ""| for binaries
|
||||||
|
if (typeof opt.newLine !== 'string') {
|
||||||
|
opt.newLine = gutil.linefeed;
|
||||||
|
}
|
||||||
|
|
||||||
|
var isUsingSourceMaps = false;
|
||||||
|
var latestFile;
|
||||||
|
var latestMod;
|
||||||
|
var fileName;
|
||||||
|
var concat;
|
||||||
|
|
||||||
|
if (typeof file === 'string') {
|
||||||
|
fileName = file;
|
||||||
|
} else if (typeof file.path === 'string') {
|
||||||
|
fileName = path.basename(file.path);
|
||||||
|
} else {
|
||||||
|
throw new PluginError('gulp-concat', 'Missing path in file options for gulp-concat');
|
||||||
|
}
|
||||||
|
|
||||||
|
function bufferContents(file, enc, cb) {
|
||||||
|
// ignore empty files
|
||||||
|
if (file.isNull()) {
|
||||||
|
cb();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// we don't do streams (yet)
|
||||||
|
if (file.isStream()) {
|
||||||
|
this.emit('error', new PluginError('gulp-concat', 'Streaming not supported'));
|
||||||
|
cb();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// enable sourcemap support for concat
|
||||||
|
// if a sourcemap initialized file comes in
|
||||||
|
if (file.sourceMap && isUsingSourceMaps === false) {
|
||||||
|
isUsingSourceMaps = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// set latest file if not already set,
|
||||||
|
// or if the current file was modified more recently.
|
||||||
|
if (!latestMod || file.stat && file.stat.mtime > latestMod) {
|
||||||
|
latestFile = file;
|
||||||
|
latestMod = file.stat && file.stat.mtime;
|
||||||
|
}
|
||||||
|
|
||||||
|
// construct concat instance
|
||||||
|
if (!concat) {
|
||||||
|
concat = new Concat(isUsingSourceMaps, fileName, opt.newLine);
|
||||||
|
}
|
||||||
|
|
||||||
|
// add file to concat instance
|
||||||
|
concat.add(file.relative, file.contents, file.sourceMap);
|
||||||
|
cb();
|
||||||
|
}
|
||||||
|
|
||||||
|
function endStream(cb) {
|
||||||
|
// no files passed in, no file goes out
|
||||||
|
if (!latestFile || !concat) {
|
||||||
|
cb();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var joinedFile;
|
||||||
|
|
||||||
|
// if file opt was a file path
|
||||||
|
// clone everything from the latest file
|
||||||
|
if (typeof file === 'string') {
|
||||||
|
joinedFile = latestFile.clone({contents: false});
|
||||||
|
joinedFile.path = path.join(latestFile.base, file);
|
||||||
|
} else {
|
||||||
|
joinedFile = new File(file);
|
||||||
|
}
|
||||||
|
|
||||||
|
joinedFile.contents = concat.content;
|
||||||
|
|
||||||
|
if (concat.sourceMapping) {
|
||||||
|
joinedFile.sourceMap = JSON.parse(concat.sourceMap);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.push(joinedFile);
|
||||||
|
cb();
|
||||||
|
}
|
||||||
|
|
||||||
|
return through.obj(bufferContents, endStream);
|
||||||
|
};
|
||||||
13
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/LICENSE.md
generated
vendored
Normal file
13
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/LICENSE.md
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
Copyright (c) 2014, Florian Reiterer <me@florianreiterer.com>
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
44
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/README.md
generated
vendored
Normal file
44
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/README.md
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
## Concat with source maps [![NPM version][npm-image]][npm-url] [![build status][travis-image]][travis-url] [![Test coverage][coveralls-image]][coveralls-url]
|
||||||
|
|
||||||
|
NPM module for concatenating files and generating source maps.
|
||||||
|
|
||||||
|
### Usage example
|
||||||
|
```js
|
||||||
|
var concat = new Concat(true, 'all.js', '\n');
|
||||||
|
concat.add('file1.js', file1Content);
|
||||||
|
concat.add('file2.js', file2Content, file2SourceMap);
|
||||||
|
|
||||||
|
var concatenatedContent = concat.content;
|
||||||
|
var sourceMapForContent = concat.sourceMap;
|
||||||
|
```
|
||||||
|
|
||||||
|
### API
|
||||||
|
|
||||||
|
#### new Concat(generateSourceMap, outFileName, separator)
|
||||||
|
Initialize a new concat object.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
- generateSourceMap: whether or not to generate a source map (default: false)
|
||||||
|
- outFileName: the file name/path of the output file (for the source map)
|
||||||
|
- separator: the string that should separate files (default: no separator)
|
||||||
|
|
||||||
|
#### concat.add(fileName, content, sourceMap)
|
||||||
|
Add a file to the output file.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
- fileName: file name of the input file
|
||||||
|
- content: content (Buffer or string) of the input file
|
||||||
|
- sourceMap: optional source map of the input file (string). Will be merged into the output source map.
|
||||||
|
|
||||||
|
#### concat.content
|
||||||
|
The resulting concatenated file content (Buffer).
|
||||||
|
|
||||||
|
#### concat.sourceMap
|
||||||
|
The resulting source map of the concatenated files (string).
|
||||||
|
|
||||||
|
[npm-image]: https://img.shields.io/npm/v/concat-with-sourcemaps.svg
|
||||||
|
[npm-url]: https://www.npmjs.com/package/concat-with-sourcemaps
|
||||||
|
[travis-image]: https://img.shields.io/travis/floridoo/concat-with-sourcemaps.svg
|
||||||
|
[travis-url]: https://travis-ci.org/floridoo/concat-with-sourcemaps
|
||||||
|
[coveralls-image]: https://img.shields.io/coveralls/floridoo/concat-with-sourcemaps.svg
|
||||||
|
[coveralls-url]: https://coveralls.io/r/floridoo/concat-with-sourcemaps?branch=master
|
||||||
119
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/index.js
generated
vendored
Normal file
119
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/index.js
generated
vendored
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
'use strict';
|
||||||
|
var SourceMapGenerator = require('source-map').SourceMapGenerator;
|
||||||
|
var SourceMapConsumer = require('source-map').SourceMapConsumer;
|
||||||
|
|
||||||
|
function unixStylePath(filePath) {
|
||||||
|
return filePath.replace(/\\/g, '/');
|
||||||
|
}
|
||||||
|
|
||||||
|
function Concat(generateSourceMap, fileName, separator) {
|
||||||
|
this.lineOffset = 0;
|
||||||
|
this.columnOffset = 0;
|
||||||
|
this.sourceMapping = generateSourceMap;
|
||||||
|
this.contentParts = [];
|
||||||
|
|
||||||
|
if (separator === undefined) {
|
||||||
|
this.separator = new Buffer(0);
|
||||||
|
} else {
|
||||||
|
this.separator = new Buffer(separator);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.sourceMapping) {
|
||||||
|
this._sourceMap = new SourceMapGenerator({file: unixStylePath(fileName)});
|
||||||
|
this.separatorLineOffset = 0;
|
||||||
|
this.separatorColumnOffset = 0;
|
||||||
|
var separatorString = this.separator.toString();
|
||||||
|
for (var i = 0; i < separatorString.length; i++) {
|
||||||
|
this.separatorColumnOffset++;
|
||||||
|
if (separatorString[i] === '\n') {
|
||||||
|
this.separatorLineOffset++;
|
||||||
|
this.separatorColumnOffset = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Concat.prototype.add = function(filePath, content, sourceMap) {
|
||||||
|
filePath = unixStylePath(filePath);
|
||||||
|
|
||||||
|
if (!Buffer.isBuffer(content)) {
|
||||||
|
content = new Buffer(content);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.contentParts.length !== 0) {
|
||||||
|
this.contentParts.push(this.separator);
|
||||||
|
}
|
||||||
|
this.contentParts.push(content);
|
||||||
|
|
||||||
|
if (this.sourceMapping) {
|
||||||
|
var contentString = content.toString();
|
||||||
|
var lines = contentString.split('\n').length;
|
||||||
|
|
||||||
|
if (Object.prototype.toString.call(sourceMap) === '[object String]')
|
||||||
|
sourceMap = JSON.parse(sourceMap);
|
||||||
|
|
||||||
|
if (sourceMap && sourceMap.mappings && sourceMap.mappings.length > 0) {
|
||||||
|
var upstreamSM = new SourceMapConsumer(sourceMap);
|
||||||
|
var _this = this;
|
||||||
|
upstreamSM.eachMapping(function(mapping) {
|
||||||
|
if (mapping.source) {
|
||||||
|
_this._sourceMap.addMapping({
|
||||||
|
generated: {
|
||||||
|
line: _this.lineOffset + mapping.generatedLine,
|
||||||
|
column: (mapping.generatedLine === 1 ? _this.columnOffset : 0) + mapping.generatedColumn
|
||||||
|
},
|
||||||
|
original: {
|
||||||
|
line: mapping.originalLine,
|
||||||
|
column: mapping.originalColumn
|
||||||
|
},
|
||||||
|
source: mapping.source,
|
||||||
|
name: mapping.name
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (upstreamSM.sourcesContent) {
|
||||||
|
upstreamSM.sourcesContent.forEach(function(sourceContent, i) {
|
||||||
|
_this._sourceMap.setSourceContent(upstreamSM.sources[i], sourceContent);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (sourceMap && sourceMap.sources && sourceMap.sources.length > 0)
|
||||||
|
filePath = sourceMap.sources[0];
|
||||||
|
for (var i = 1; i <= lines; i++) {
|
||||||
|
this._sourceMap.addMapping({
|
||||||
|
generated: {
|
||||||
|
line: this.lineOffset + i,
|
||||||
|
column: (i === 1 ? this.columnOffset : 0)
|
||||||
|
},
|
||||||
|
original: {
|
||||||
|
line: i,
|
||||||
|
column: 0
|
||||||
|
},
|
||||||
|
source: filePath
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (sourceMap && sourceMap.sourcesContent)
|
||||||
|
this._sourceMap.setSourceContent(filePath, sourceMap.sourcesContent[0]);
|
||||||
|
}
|
||||||
|
if (lines > 1)
|
||||||
|
this.columnOffset = 0;
|
||||||
|
if (this.separatorLineOffset === 0)
|
||||||
|
this.columnOffset += contentString.length - Math.max(0, contentString.lastIndexOf('\n')+1);
|
||||||
|
this.columnOffset += this.separatorColumnOffset;
|
||||||
|
this.lineOffset += lines - 1 + this.separatorLineOffset;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Object.defineProperty(Concat.prototype, 'content', {
|
||||||
|
get: function content() {
|
||||||
|
return Buffer.concat(this.contentParts);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Object.defineProperty(Concat.prototype, 'sourceMap', {
|
||||||
|
get: function sourceMap() {
|
||||||
|
return this._sourceMap ? this._sourceMap.toString() : undefined;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
module.exports = Concat;
|
||||||
1
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/.gitattributes
generated
vendored
Normal file
1
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/.gitattributes
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
bench/scalajs-runtime-sourcemap.js binary
|
||||||
2
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/.npmignore
generated
vendored
Normal file
2
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
bench/
|
||||||
|
test/
|
||||||
4
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/.travis.yml
generated
vendored
Normal file
4
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
language: node_js
|
||||||
|
node_js:
|
||||||
|
- 0.8
|
||||||
|
- "0.10"
|
||||||
233
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/CHANGELOG.md
generated
vendored
Normal file
233
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,233 @@
|
|||||||
|
# Change Log
|
||||||
|
|
||||||
|
## 0.4.2
|
||||||
|
|
||||||
|
* Add an `.npmignore` file so that the benchmarks aren't pulled down by
|
||||||
|
dependent projects. Issue #169.
|
||||||
|
|
||||||
|
* Add an optional `column` argument to
|
||||||
|
`SourceMapConsumer.prototype.allGeneratedPositionsFor` and better handle lines
|
||||||
|
with no mappings. Issues #172 and #173.
|
||||||
|
|
||||||
|
## 0.4.1
|
||||||
|
|
||||||
|
* Fix accidentally defining a global variable. #170.
|
||||||
|
|
||||||
|
## 0.4.0
|
||||||
|
|
||||||
|
* The default direction for fuzzy searching was changed back to its original
|
||||||
|
direction. See #164.
|
||||||
|
|
||||||
|
* There is now a `bias` option you can supply to `SourceMapConsumer` to control
|
||||||
|
the fuzzy searching direction. See #167.
|
||||||
|
|
||||||
|
* About an 8% speed up in parsing source maps. See #159.
|
||||||
|
|
||||||
|
* Added a benchmark for parsing and generating source maps.
|
||||||
|
|
||||||
|
## 0.3.0
|
||||||
|
|
||||||
|
* Change the default direction that searching for positions fuzzes when there is
|
||||||
|
not an exact match. See #154.
|
||||||
|
|
||||||
|
* Support for environments using json2.js for JSON serialization. See #156.
|
||||||
|
|
||||||
|
## 0.2.0
|
||||||
|
|
||||||
|
* Support for consuming "indexed" source maps which do not have any remote
|
||||||
|
sections. See pull request #127. This introduces a minor backwards
|
||||||
|
incompatibility if you are monkey patching `SourceMapConsumer.prototype`
|
||||||
|
methods.
|
||||||
|
|
||||||
|
## 0.1.43
|
||||||
|
|
||||||
|
* Performance improvements for `SourceMapGenerator` and `SourceNode`. See issue
|
||||||
|
#148 for some discussion and issues #150, #151, and #152 for implementations.
|
||||||
|
|
||||||
|
## 0.1.42
|
||||||
|
|
||||||
|
* Fix an issue where `SourceNode`s from different versions of the source-map
|
||||||
|
library couldn't be used in conjunction with each other. See issue #142.
|
||||||
|
|
||||||
|
## 0.1.41
|
||||||
|
|
||||||
|
* Fix a bug with getting the source content of relative sources with a "./"
|
||||||
|
prefix. See issue #145 and [Bug 1090768](bugzil.la/1090768).
|
||||||
|
|
||||||
|
* Add the `SourceMapConsumer.prototype.computeColumnSpans` method to compute the
|
||||||
|
column span of each mapping.
|
||||||
|
|
||||||
|
* Add the `SourceMapConsumer.prototype.allGeneratedPositionsFor` method to find
|
||||||
|
all generated positions associated with a given original source and line.
|
||||||
|
|
||||||
|
## 0.1.40
|
||||||
|
|
||||||
|
* Performance improvements for parsing source maps in SourceMapConsumer.
|
||||||
|
|
||||||
|
## 0.1.39
|
||||||
|
|
||||||
|
* Fix a bug where setting a source's contents to null before any source content
|
||||||
|
had been set before threw a TypeError. See issue #131.
|
||||||
|
|
||||||
|
## 0.1.38
|
||||||
|
|
||||||
|
* Fix a bug where finding relative paths from an empty path were creating
|
||||||
|
absolute paths. See issue #129.
|
||||||
|
|
||||||
|
## 0.1.37
|
||||||
|
|
||||||
|
* Fix a bug where if the source root was an empty string, relative source paths
|
||||||
|
would turn into absolute source paths. Issue #124.
|
||||||
|
|
||||||
|
## 0.1.36
|
||||||
|
|
||||||
|
* Allow the `names` mapping property to be an empty string. Issue #121.
|
||||||
|
|
||||||
|
## 0.1.35
|
||||||
|
|
||||||
|
* A third optional parameter was added to `SourceNode.fromStringWithSourceMap`
|
||||||
|
to specify a path that relative sources in the second parameter should be
|
||||||
|
relative to. Issue #105.
|
||||||
|
|
||||||
|
* If no file property is given to a `SourceMapGenerator`, then the resulting
|
||||||
|
source map will no longer have a `null` file property. The property will
|
||||||
|
simply not exist. Issue #104.
|
||||||
|
|
||||||
|
* Fixed a bug where consecutive newlines were ignored in `SourceNode`s.
|
||||||
|
Issue #116.
|
||||||
|
|
||||||
|
## 0.1.34
|
||||||
|
|
||||||
|
* Make `SourceNode` work with windows style ("\r\n") newlines. Issue #103.
|
||||||
|
|
||||||
|
* Fix bug involving source contents and the
|
||||||
|
`SourceMapGenerator.prototype.applySourceMap`. Issue #100.
|
||||||
|
|
||||||
|
## 0.1.33
|
||||||
|
|
||||||
|
* Fix some edge cases surrounding path joining and URL resolution.
|
||||||
|
|
||||||
|
* Add a third parameter for relative path to
|
||||||
|
`SourceMapGenerator.prototype.applySourceMap`.
|
||||||
|
|
||||||
|
* Fix issues with mappings and EOLs.
|
||||||
|
|
||||||
|
## 0.1.32
|
||||||
|
|
||||||
|
* Fixed a bug where SourceMapConsumer couldn't handle negative relative columns
|
||||||
|
(issue 92).
|
||||||
|
|
||||||
|
* Fixed test runner to actually report number of failed tests as its process
|
||||||
|
exit code.
|
||||||
|
|
||||||
|
* Fixed a typo when reporting bad mappings (issue 87).
|
||||||
|
|
||||||
|
## 0.1.31
|
||||||
|
|
||||||
|
* Delay parsing the mappings in SourceMapConsumer until queried for a source
|
||||||
|
location.
|
||||||
|
|
||||||
|
* Support Sass source maps (which at the time of writing deviate from the spec
|
||||||
|
in small ways) in SourceMapConsumer.
|
||||||
|
|
||||||
|
## 0.1.30
|
||||||
|
|
||||||
|
* Do not join source root with a source, when the source is a data URI.
|
||||||
|
|
||||||
|
* Extend the test runner to allow running single specific test files at a time.
|
||||||
|
|
||||||
|
* Performance improvements in `SourceNode.prototype.walk` and
|
||||||
|
`SourceMapConsumer.prototype.eachMapping`.
|
||||||
|
|
||||||
|
* Source map browser builds will now work inside Workers.
|
||||||
|
|
||||||
|
* Better error messages when attempting to add an invalid mapping to a
|
||||||
|
`SourceMapGenerator`.
|
||||||
|
|
||||||
|
## 0.1.29
|
||||||
|
|
||||||
|
* Allow duplicate entries in the `names` and `sources` arrays of source maps
|
||||||
|
(usually from TypeScript) we are parsing. Fixes github issue 72.
|
||||||
|
|
||||||
|
## 0.1.28
|
||||||
|
|
||||||
|
* Skip duplicate mappings when creating source maps from SourceNode; github
|
||||||
|
issue 75.
|
||||||
|
|
||||||
|
## 0.1.27
|
||||||
|
|
||||||
|
* Don't throw an error when the `file` property is missing in SourceMapConsumer,
|
||||||
|
we don't use it anyway.
|
||||||
|
|
||||||
|
## 0.1.26
|
||||||
|
|
||||||
|
* Fix SourceNode.fromStringWithSourceMap for empty maps. Fixes github issue 70.
|
||||||
|
|
||||||
|
## 0.1.25
|
||||||
|
|
||||||
|
* Make compatible with browserify
|
||||||
|
|
||||||
|
## 0.1.24
|
||||||
|
|
||||||
|
* Fix issue with absolute paths and `file://` URIs. See
|
||||||
|
https://bugzilla.mozilla.org/show_bug.cgi?id=885597
|
||||||
|
|
||||||
|
## 0.1.23
|
||||||
|
|
||||||
|
* Fix issue with absolute paths and sourcesContent, github issue 64.
|
||||||
|
|
||||||
|
## 0.1.22
|
||||||
|
|
||||||
|
* Ignore duplicate mappings in SourceMapGenerator. Fixes github issue 21.
|
||||||
|
|
||||||
|
## 0.1.21
|
||||||
|
|
||||||
|
* Fixed handling of sources that start with a slash so that they are relative to
|
||||||
|
the source root's host.
|
||||||
|
|
||||||
|
## 0.1.20
|
||||||
|
|
||||||
|
* Fixed github issue #43: absolute URLs aren't joined with the source root
|
||||||
|
anymore.
|
||||||
|
|
||||||
|
## 0.1.19
|
||||||
|
|
||||||
|
* Using Travis CI to run tests.
|
||||||
|
|
||||||
|
## 0.1.18
|
||||||
|
|
||||||
|
* Fixed a bug in the handling of sourceRoot.
|
||||||
|
|
||||||
|
## 0.1.17
|
||||||
|
|
||||||
|
* Added SourceNode.fromStringWithSourceMap.
|
||||||
|
|
||||||
|
## 0.1.16
|
||||||
|
|
||||||
|
* Added missing documentation.
|
||||||
|
|
||||||
|
* Fixed the generating of empty mappings in SourceNode.
|
||||||
|
|
||||||
|
## 0.1.15
|
||||||
|
|
||||||
|
* Added SourceMapGenerator.applySourceMap.
|
||||||
|
|
||||||
|
## 0.1.14
|
||||||
|
|
||||||
|
* The sourceRoot is now handled consistently.
|
||||||
|
|
||||||
|
## 0.1.13
|
||||||
|
|
||||||
|
* Added SourceMapGenerator.fromSourceMap.
|
||||||
|
|
||||||
|
## 0.1.12
|
||||||
|
|
||||||
|
* SourceNode now generates empty mappings too.
|
||||||
|
|
||||||
|
## 0.1.11
|
||||||
|
|
||||||
|
* Added name support to SourceNode.
|
||||||
|
|
||||||
|
## 0.1.10
|
||||||
|
|
||||||
|
* Added sourcesContent support to the customer and generator.
|
||||||
28
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/LICENSE
generated
vendored
Normal file
28
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
|
||||||
|
Copyright (c) 2009-2011, Mozilla Foundation and contributors
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
* Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer in the documentation
|
||||||
|
and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
* Neither the names of the Mozilla Foundation nor the names of project
|
||||||
|
contributors may be used to endorse or promote products derived from this
|
||||||
|
software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||||
|
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||||
|
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||||
|
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||||
|
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||||
|
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
166
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/Makefile.dryice.js
generated
vendored
Normal file
166
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/Makefile.dryice.js
generated
vendored
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
var path = require('path');
|
||||||
|
var fs = require('fs');
|
||||||
|
var copy = require('dryice').copy;
|
||||||
|
|
||||||
|
function removeAmdefine(src) {
|
||||||
|
src = String(src).replace(
|
||||||
|
/if\s*\(typeof\s*define\s*!==\s*'function'\)\s*{\s*var\s*define\s*=\s*require\('amdefine'\)\(module,\s*require\);\s*}\s*/g,
|
||||||
|
'');
|
||||||
|
src = src.replace(
|
||||||
|
/\b(define\(.*)('amdefine',?)/gm,
|
||||||
|
'$1');
|
||||||
|
return src;
|
||||||
|
}
|
||||||
|
removeAmdefine.onRead = true;
|
||||||
|
|
||||||
|
function makeNonRelative(src) {
|
||||||
|
return src
|
||||||
|
.replace(/require\('.\//g, 'require(\'source-map/')
|
||||||
|
.replace(/\.\.\/\.\.\/lib\//g, '');
|
||||||
|
}
|
||||||
|
makeNonRelative.onRead = true;
|
||||||
|
|
||||||
|
function buildBrowser() {
|
||||||
|
console.log('\nCreating dist/source-map.js');
|
||||||
|
|
||||||
|
var project = copy.createCommonJsProject({
|
||||||
|
roots: [ path.join(__dirname, 'lib') ]
|
||||||
|
});
|
||||||
|
|
||||||
|
copy({
|
||||||
|
source: [
|
||||||
|
'build/mini-require.js',
|
||||||
|
{
|
||||||
|
project: project,
|
||||||
|
require: [ 'source-map/source-map-generator',
|
||||||
|
'source-map/source-map-consumer',
|
||||||
|
'source-map/source-node']
|
||||||
|
},
|
||||||
|
'build/suffix-browser.js'
|
||||||
|
],
|
||||||
|
filter: [
|
||||||
|
copy.filter.moduleDefines,
|
||||||
|
removeAmdefine
|
||||||
|
],
|
||||||
|
dest: 'dist/source-map.js'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildBrowserMin() {
|
||||||
|
console.log('\nCreating dist/source-map.min.js');
|
||||||
|
|
||||||
|
copy({
|
||||||
|
source: 'dist/source-map.js',
|
||||||
|
filter: copy.filter.uglifyjs,
|
||||||
|
dest: 'dist/source-map.min.js'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildFirefox() {
|
||||||
|
console.log('\nCreating dist/SourceMap.jsm');
|
||||||
|
|
||||||
|
var project = copy.createCommonJsProject({
|
||||||
|
roots: [ path.join(__dirname, 'lib') ]
|
||||||
|
});
|
||||||
|
|
||||||
|
copy({
|
||||||
|
source: [
|
||||||
|
'build/prefix-source-map.jsm',
|
||||||
|
{
|
||||||
|
project: project,
|
||||||
|
require: [ 'source-map/source-map-consumer',
|
||||||
|
'source-map/source-map-generator',
|
||||||
|
'source-map/source-node' ]
|
||||||
|
},
|
||||||
|
'build/suffix-source-map.jsm'
|
||||||
|
],
|
||||||
|
filter: [
|
||||||
|
copy.filter.moduleDefines,
|
||||||
|
removeAmdefine,
|
||||||
|
makeNonRelative
|
||||||
|
],
|
||||||
|
dest: 'dist/SourceMap.jsm'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create dist/test/Utils.jsm
|
||||||
|
console.log('\nCreating dist/test/Utils.jsm');
|
||||||
|
|
||||||
|
project = copy.createCommonJsProject({
|
||||||
|
roots: [ __dirname, path.join(__dirname, 'lib') ]
|
||||||
|
});
|
||||||
|
|
||||||
|
copy({
|
||||||
|
source: [
|
||||||
|
'build/prefix-utils.jsm',
|
||||||
|
'build/assert-shim.js',
|
||||||
|
{
|
||||||
|
project: project,
|
||||||
|
require: [ 'test/source-map/util' ]
|
||||||
|
},
|
||||||
|
'build/suffix-utils.jsm'
|
||||||
|
],
|
||||||
|
filter: [
|
||||||
|
copy.filter.moduleDefines,
|
||||||
|
removeAmdefine,
|
||||||
|
makeNonRelative
|
||||||
|
],
|
||||||
|
dest: 'dist/test/Utils.jsm'
|
||||||
|
});
|
||||||
|
|
||||||
|
function isTestFile(f) {
|
||||||
|
return /^test\-.*?\.js/.test(f);
|
||||||
|
}
|
||||||
|
|
||||||
|
var testFiles = fs.readdirSync(path.join(__dirname, 'test', 'source-map')).filter(isTestFile);
|
||||||
|
|
||||||
|
testFiles.forEach(function (testFile) {
|
||||||
|
console.log('\nCreating', path.join('dist', 'test', testFile.replace(/\-/g, '_')));
|
||||||
|
|
||||||
|
copy({
|
||||||
|
source: [
|
||||||
|
'build/test-prefix.js',
|
||||||
|
path.join('test', 'source-map', testFile),
|
||||||
|
'build/test-suffix.js'
|
||||||
|
],
|
||||||
|
filter: [
|
||||||
|
removeAmdefine,
|
||||||
|
makeNonRelative,
|
||||||
|
function (input, source) {
|
||||||
|
return input.replace('define(',
|
||||||
|
'define("'
|
||||||
|
+ path.join('test', 'source-map', testFile.replace(/\.js$/, ''))
|
||||||
|
+ '", ["require", "exports", "module"], ');
|
||||||
|
},
|
||||||
|
function (input, source) {
|
||||||
|
return input.replace('{THIS_MODULE}', function () {
|
||||||
|
return "test/source-map/" + testFile.replace(/\.js$/, '');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
],
|
||||||
|
dest: path.join('dist', 'test', testFile.replace(/\-/g, '_'))
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function ensureDir(name) {
|
||||||
|
var dirExists = false;
|
||||||
|
try {
|
||||||
|
dirExists = fs.statSync(name).isDirectory();
|
||||||
|
} catch (err) {}
|
||||||
|
|
||||||
|
if (!dirExists) {
|
||||||
|
fs.mkdirSync(name, 0777);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ensureDir("dist");
|
||||||
|
ensureDir("dist/test");
|
||||||
|
buildFirefox();
|
||||||
|
buildBrowser();
|
||||||
|
buildBrowserMin();
|
||||||
500
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/README.md
generated
vendored
Normal file
500
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/README.md
generated
vendored
Normal file
@@ -0,0 +1,500 @@
|
|||||||
|
# Source Map
|
||||||
|
|
||||||
|
This is a library to generate and consume the source map format
|
||||||
|
[described here][format].
|
||||||
|
|
||||||
|
This library is written in the Asynchronous Module Definition format, and works
|
||||||
|
in the following environments:
|
||||||
|
|
||||||
|
* Modern Browsers supporting ECMAScript 5 (either after the build, or with an
|
||||||
|
AMD loader such as RequireJS)
|
||||||
|
|
||||||
|
* Inside Firefox (as a JSM file, after the build)
|
||||||
|
|
||||||
|
* With NodeJS versions 0.8.X and higher
|
||||||
|
|
||||||
|
## Node
|
||||||
|
|
||||||
|
$ npm install source-map
|
||||||
|
|
||||||
|
## Building from Source (for everywhere else)
|
||||||
|
|
||||||
|
Install Node and then run
|
||||||
|
|
||||||
|
$ git clone https://fitzgen@github.com/mozilla/source-map.git
|
||||||
|
$ cd source-map
|
||||||
|
$ npm link .
|
||||||
|
|
||||||
|
Next, run
|
||||||
|
|
||||||
|
$ node Makefile.dryice.js
|
||||||
|
|
||||||
|
This should spew a bunch of stuff to stdout, and create the following files:
|
||||||
|
|
||||||
|
* `dist/source-map.js` - The unminified browser version.
|
||||||
|
|
||||||
|
* `dist/source-map.min.js` - The minified browser version.
|
||||||
|
|
||||||
|
* `dist/SourceMap.jsm` - The JavaScript Module for inclusion in Firefox source.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Consuming a source map
|
||||||
|
|
||||||
|
```js
|
||||||
|
var rawSourceMap = {
|
||||||
|
version: 3,
|
||||||
|
file: 'min.js',
|
||||||
|
names: ['bar', 'baz', 'n'],
|
||||||
|
sources: ['one.js', 'two.js'],
|
||||||
|
sourceRoot: 'http://example.com/www/js/',
|
||||||
|
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||||
|
};
|
||||||
|
|
||||||
|
var smc = new SourceMapConsumer(rawSourceMap);
|
||||||
|
|
||||||
|
console.log(smc.sources);
|
||||||
|
// [ 'http://example.com/www/js/one.js',
|
||||||
|
// 'http://example.com/www/js/two.js' ]
|
||||||
|
|
||||||
|
console.log(smc.originalPositionFor({
|
||||||
|
line: 2,
|
||||||
|
column: 28
|
||||||
|
}));
|
||||||
|
// { source: 'http://example.com/www/js/two.js',
|
||||||
|
// line: 2,
|
||||||
|
// column: 10,
|
||||||
|
// name: 'n' }
|
||||||
|
|
||||||
|
console.log(smc.generatedPositionFor({
|
||||||
|
source: 'http://example.com/www/js/two.js',
|
||||||
|
line: 2,
|
||||||
|
column: 10
|
||||||
|
}));
|
||||||
|
// { line: 2, column: 28 }
|
||||||
|
|
||||||
|
smc.eachMapping(function (m) {
|
||||||
|
// ...
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Generating a source map
|
||||||
|
|
||||||
|
In depth guide:
|
||||||
|
[**Compiling to JavaScript, and Debugging with Source Maps**](https://hacks.mozilla.org/2013/05/compiling-to-javascript-and-debugging-with-source-maps/)
|
||||||
|
|
||||||
|
#### With SourceNode (high level API)
|
||||||
|
|
||||||
|
```js
|
||||||
|
function compile(ast) {
|
||||||
|
switch (ast.type) {
|
||||||
|
case 'BinaryExpression':
|
||||||
|
return new SourceNode(
|
||||||
|
ast.location.line,
|
||||||
|
ast.location.column,
|
||||||
|
ast.location.source,
|
||||||
|
[compile(ast.left), " + ", compile(ast.right)]
|
||||||
|
);
|
||||||
|
case 'Literal':
|
||||||
|
return new SourceNode(
|
||||||
|
ast.location.line,
|
||||||
|
ast.location.column,
|
||||||
|
ast.location.source,
|
||||||
|
String(ast.value)
|
||||||
|
);
|
||||||
|
// ...
|
||||||
|
default:
|
||||||
|
throw new Error("Bad AST");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var ast = parse("40 + 2", "add.js");
|
||||||
|
console.log(compile(ast).toStringWithSourceMap({
|
||||||
|
file: 'add.js'
|
||||||
|
}));
|
||||||
|
// { code: '40 + 2',
|
||||||
|
// map: [object SourceMapGenerator] }
|
||||||
|
```
|
||||||
|
|
||||||
|
#### With SourceMapGenerator (low level API)
|
||||||
|
|
||||||
|
```js
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: "source-mapped.js"
|
||||||
|
});
|
||||||
|
|
||||||
|
map.addMapping({
|
||||||
|
generated: {
|
||||||
|
line: 10,
|
||||||
|
column: 35
|
||||||
|
},
|
||||||
|
source: "foo.js",
|
||||||
|
original: {
|
||||||
|
line: 33,
|
||||||
|
column: 2
|
||||||
|
},
|
||||||
|
name: "christopher"
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(map.toString());
|
||||||
|
// '{"version":3,"file":"source-mapped.js","sources":["foo.js"],"names":["christopher"],"mappings":";;;;;;;;;mCAgCEA"}'
|
||||||
|
```
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
Get a reference to the module:
|
||||||
|
|
||||||
|
```js
|
||||||
|
// NodeJS
|
||||||
|
var sourceMap = require('source-map');
|
||||||
|
|
||||||
|
// Browser builds
|
||||||
|
var sourceMap = window.sourceMap;
|
||||||
|
|
||||||
|
// Inside Firefox
|
||||||
|
let sourceMap = {};
|
||||||
|
Components.utils.import('resource:///modules/devtools/SourceMap.jsm', sourceMap);
|
||||||
|
```
|
||||||
|
|
||||||
|
### SourceMapConsumer
|
||||||
|
|
||||||
|
A SourceMapConsumer instance represents a parsed source map which we can query
|
||||||
|
for information about the original file positions by giving it a file position
|
||||||
|
in the generated source.
|
||||||
|
|
||||||
|
#### new SourceMapConsumer(rawSourceMap)
|
||||||
|
|
||||||
|
The only parameter is the raw source map (either as a string which can be
|
||||||
|
`JSON.parse`'d, or an object). According to the spec, source maps have the
|
||||||
|
following attributes:
|
||||||
|
|
||||||
|
* `version`: Which version of the source map spec this map is following.
|
||||||
|
|
||||||
|
* `sources`: An array of URLs to the original source files.
|
||||||
|
|
||||||
|
* `names`: An array of identifiers which can be referrenced by individual
|
||||||
|
mappings.
|
||||||
|
|
||||||
|
* `sourceRoot`: Optional. The URL root from which all sources are relative.
|
||||||
|
|
||||||
|
* `sourcesContent`: Optional. An array of contents of the original source files.
|
||||||
|
|
||||||
|
* `mappings`: A string of base64 VLQs which contain the actual mappings.
|
||||||
|
|
||||||
|
* `file`: Optional. The generated filename this source map is associated with.
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.computeColumnSpans()
|
||||||
|
|
||||||
|
Compute the last column for each generated mapping. The last column is
|
||||||
|
inclusive.
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)
|
||||||
|
|
||||||
|
Returns the original source, line, and column information for the generated
|
||||||
|
source's line and column positions provided. The only argument is an object with
|
||||||
|
the following properties:
|
||||||
|
|
||||||
|
* `line`: The line number in the generated source.
|
||||||
|
|
||||||
|
* `column`: The column number in the generated source.
|
||||||
|
|
||||||
|
* `bias`: Either `SourceMapConsumer.GREATEST_LOWER_BOUND` or
|
||||||
|
`SourceMapConsumer.LEAST_UPPER_BOUND`. Specifies whether to return the closest
|
||||||
|
element that is smaller than or greater than the one we are searching for,
|
||||||
|
respectively, if the exact element cannot be found. Defaults to
|
||||||
|
`SourceMapConsumer.GREATEST_LOWER_BOUND`.
|
||||||
|
|
||||||
|
and an object is returned with the following properties:
|
||||||
|
|
||||||
|
* `source`: The original source file, or null if this information is not
|
||||||
|
available.
|
||||||
|
|
||||||
|
* `line`: The line number in the original source, or null if this information is
|
||||||
|
not available.
|
||||||
|
|
||||||
|
* `column`: The column number in the original source, or null or null if this
|
||||||
|
information is not available.
|
||||||
|
|
||||||
|
* `name`: The original identifier, or null if this information is not available.
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)
|
||||||
|
|
||||||
|
Returns the generated line and column information for the original source,
|
||||||
|
line, and column positions provided. The only argument is an object with
|
||||||
|
the following properties:
|
||||||
|
|
||||||
|
* `source`: The filename of the original source.
|
||||||
|
|
||||||
|
* `line`: The line number in the original source.
|
||||||
|
|
||||||
|
* `column`: The column number in the original source.
|
||||||
|
|
||||||
|
and an object is returned with the following properties:
|
||||||
|
|
||||||
|
* `line`: The line number in the generated source, or null.
|
||||||
|
|
||||||
|
* `column`: The column number in the generated source, or null.
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)
|
||||||
|
|
||||||
|
Returns all generated line and column information for the original source,
|
||||||
|
line, and column provided. If no column is provided, returns all mappings
|
||||||
|
corresponding to a single line. Otherwise, returns all mappings corresponding to
|
||||||
|
a single line and column.
|
||||||
|
|
||||||
|
The only argument is an object with the following properties:
|
||||||
|
|
||||||
|
* `source`: The filename of the original source.
|
||||||
|
|
||||||
|
* `line`: The line number in the original source.
|
||||||
|
|
||||||
|
* `column`: Optional. The column number in the original source.
|
||||||
|
|
||||||
|
and an array of objects is returned, each with the following properties:
|
||||||
|
|
||||||
|
* `line`: The line number in the generated source, or null.
|
||||||
|
|
||||||
|
* `column`: The column number in the generated source, or null.
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])
|
||||||
|
|
||||||
|
Returns the original source content for the source provided. The only
|
||||||
|
argument is the URL of the original source file.
|
||||||
|
|
||||||
|
If the source content for the given source is not found, then an error is
|
||||||
|
thrown. Optionally, pass `true` as the second param to have `null` returned
|
||||||
|
instead.
|
||||||
|
|
||||||
|
#### SourceMapConsumer.prototype.eachMapping(callback, context, order)
|
||||||
|
|
||||||
|
Iterate over each mapping between an original source/line/column and a
|
||||||
|
generated line/column in this source map.
|
||||||
|
|
||||||
|
* `callback`: The function that is called with each mapping. Mappings have the
|
||||||
|
form `{ source, generatedLine, generatedColumn, originalLine, originalColumn,
|
||||||
|
name }`
|
||||||
|
|
||||||
|
* `context`: Optional. If specified, this object will be the value of `this`
|
||||||
|
every time that `callback` is called.
|
||||||
|
|
||||||
|
* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or
|
||||||
|
`SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over
|
||||||
|
the mappings sorted by the generated file's line/column order or the
|
||||||
|
original's source/line/column order, respectively. Defaults to
|
||||||
|
`SourceMapConsumer.GENERATED_ORDER`.
|
||||||
|
|
||||||
|
### SourceMapGenerator
|
||||||
|
|
||||||
|
An instance of the SourceMapGenerator represents a source map which is being
|
||||||
|
built incrementally.
|
||||||
|
|
||||||
|
#### new SourceMapGenerator([startOfSourceMap])
|
||||||
|
|
||||||
|
You may pass an object with the following properties:
|
||||||
|
|
||||||
|
* `file`: The filename of the generated source that this source map is
|
||||||
|
associated with.
|
||||||
|
|
||||||
|
* `sourceRoot`: A root for all relative URLs in this source map.
|
||||||
|
|
||||||
|
* `skipValidation`: Optional. When `true`, disables validation of mappings as
|
||||||
|
they are added. This can improve performance but should be used with
|
||||||
|
discretion, as a last resort. Even then, one should avoid using this flag when
|
||||||
|
running tests, if possible.
|
||||||
|
|
||||||
|
#### SourceMapGenerator.fromSourceMap(sourceMapConsumer)
|
||||||
|
|
||||||
|
Creates a new SourceMapGenerator based on a SourceMapConsumer
|
||||||
|
|
||||||
|
* `sourceMapConsumer` The SourceMap.
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.addMapping(mapping)
|
||||||
|
|
||||||
|
Add a single mapping from original source line and column to the generated
|
||||||
|
source's line and column for this source map being created. The mapping object
|
||||||
|
should have the following properties:
|
||||||
|
|
||||||
|
* `generated`: An object with the generated line and column positions.
|
||||||
|
|
||||||
|
* `original`: An object with the original line and column positions.
|
||||||
|
|
||||||
|
* `source`: The original source file (relative to the sourceRoot).
|
||||||
|
|
||||||
|
* `name`: An optional original token name for this mapping.
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)
|
||||||
|
|
||||||
|
Set the source content for an original source file.
|
||||||
|
|
||||||
|
* `sourceFile` the URL of the original source file.
|
||||||
|
|
||||||
|
* `sourceContent` the content of the source file.
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])
|
||||||
|
|
||||||
|
Applies a SourceMap for a source file to the SourceMap.
|
||||||
|
Each mapping to the supplied source file is rewritten using the
|
||||||
|
supplied SourceMap. Note: The resolution for the resulting mappings
|
||||||
|
is the minimium of this map and the supplied map.
|
||||||
|
|
||||||
|
* `sourceMapConsumer`: The SourceMap to be applied.
|
||||||
|
|
||||||
|
* `sourceFile`: Optional. The filename of the source file.
|
||||||
|
If omitted, sourceMapConsumer.file will be used, if it exists.
|
||||||
|
Otherwise an error will be thrown.
|
||||||
|
|
||||||
|
* `sourceMapPath`: Optional. The dirname of the path to the SourceMap
|
||||||
|
to be applied. If relative, it is relative to the SourceMap.
|
||||||
|
|
||||||
|
This parameter is needed when the two SourceMaps aren't in the same
|
||||||
|
directory, and the SourceMap to be applied contains relative source
|
||||||
|
paths. If so, those relative source paths need to be rewritten
|
||||||
|
relative to the SourceMap.
|
||||||
|
|
||||||
|
If omitted, it is assumed that both SourceMaps are in the same directory,
|
||||||
|
thus not needing any rewriting. (Supplying `'.'` has the same effect.)
|
||||||
|
|
||||||
|
#### SourceMapGenerator.prototype.toString()
|
||||||
|
|
||||||
|
Renders the source map being generated to a string.
|
||||||
|
|
||||||
|
### SourceNode
|
||||||
|
|
||||||
|
SourceNodes provide a way to abstract over interpolating and/or concatenating
|
||||||
|
snippets of generated JavaScript source code, while maintaining the line and
|
||||||
|
column information associated between those snippets and the original source
|
||||||
|
code. This is useful as the final intermediate representation a compiler might
|
||||||
|
use before outputting the generated JS and source map.
|
||||||
|
|
||||||
|
#### new SourceNode([line, column, source[, chunk[, name]]])
|
||||||
|
|
||||||
|
* `line`: The original line number associated with this source node, or null if
|
||||||
|
it isn't associated with an original line.
|
||||||
|
|
||||||
|
* `column`: The original column number associated with this source node, or null
|
||||||
|
if it isn't associated with an original column.
|
||||||
|
|
||||||
|
* `source`: The original source's filename; null if no filename is provided.
|
||||||
|
|
||||||
|
* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see
|
||||||
|
below.
|
||||||
|
|
||||||
|
* `name`: Optional. The original identifier.
|
||||||
|
|
||||||
|
#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath])
|
||||||
|
|
||||||
|
Creates a SourceNode from generated code and a SourceMapConsumer.
|
||||||
|
|
||||||
|
* `code`: The generated code
|
||||||
|
|
||||||
|
* `sourceMapConsumer` The SourceMap for the generated code
|
||||||
|
|
||||||
|
* `relativePath` The optional path that relative sources in `sourceMapConsumer`
|
||||||
|
should be relative to.
|
||||||
|
|
||||||
|
#### SourceNode.prototype.add(chunk)
|
||||||
|
|
||||||
|
Add a chunk of generated JS to this source node.
|
||||||
|
|
||||||
|
* `chunk`: A string snippet of generated JS code, another instance of
|
||||||
|
`SourceNode`, or an array where each member is one of those things.
|
||||||
|
|
||||||
|
#### SourceNode.prototype.prepend(chunk)
|
||||||
|
|
||||||
|
Prepend a chunk of generated JS to this source node.
|
||||||
|
|
||||||
|
* `chunk`: A string snippet of generated JS code, another instance of
|
||||||
|
`SourceNode`, or an array where each member is one of those things.
|
||||||
|
|
||||||
|
#### SourceNode.prototype.setSourceContent(sourceFile, sourceContent)
|
||||||
|
|
||||||
|
Set the source content for a source file. This will be added to the
|
||||||
|
`SourceMap` in the `sourcesContent` field.
|
||||||
|
|
||||||
|
* `sourceFile`: The filename of the source file
|
||||||
|
|
||||||
|
* `sourceContent`: The content of the source file
|
||||||
|
|
||||||
|
#### SourceNode.prototype.walk(fn)
|
||||||
|
|
||||||
|
Walk over the tree of JS snippets in this node and its children. The walking
|
||||||
|
function is called once for each snippet of JS and is passed that snippet and
|
||||||
|
the its original associated source's line/column location.
|
||||||
|
|
||||||
|
* `fn`: The traversal function.
|
||||||
|
|
||||||
|
#### SourceNode.prototype.walkSourceContents(fn)
|
||||||
|
|
||||||
|
Walk over the tree of SourceNodes. The walking function is called for each
|
||||||
|
source file content and is passed the filename and source content.
|
||||||
|
|
||||||
|
* `fn`: The traversal function.
|
||||||
|
|
||||||
|
#### SourceNode.prototype.join(sep)
|
||||||
|
|
||||||
|
Like `Array.prototype.join` except for SourceNodes. Inserts the separator
|
||||||
|
between each of this source node's children.
|
||||||
|
|
||||||
|
* `sep`: The separator.
|
||||||
|
|
||||||
|
#### SourceNode.prototype.replaceRight(pattern, replacement)
|
||||||
|
|
||||||
|
Call `String.prototype.replace` on the very right-most source snippet. Useful
|
||||||
|
for trimming whitespace from the end of a source node, etc.
|
||||||
|
|
||||||
|
* `pattern`: The pattern to replace.
|
||||||
|
|
||||||
|
* `replacement`: The thing to replace the pattern with.
|
||||||
|
|
||||||
|
#### SourceNode.prototype.toString()
|
||||||
|
|
||||||
|
Return the string representation of this source node. Walks over the tree and
|
||||||
|
concatenates all the various snippets together to one string.
|
||||||
|
|
||||||
|
#### SourceNode.prototype.toStringWithSourceMap([startOfSourceMap])
|
||||||
|
|
||||||
|
Returns the string representation of this tree of source nodes, plus a
|
||||||
|
SourceMapGenerator which contains all the mappings between the generated and
|
||||||
|
original sources.
|
||||||
|
|
||||||
|
The arguments are the same as those to `new SourceMapGenerator`.
|
||||||
|
|
||||||
|
## Tests
|
||||||
|
|
||||||
|
[](https://travis-ci.org/mozilla/source-map)
|
||||||
|
|
||||||
|
Install NodeJS version 0.8.0 or greater, then run `node test/run-tests.js`.
|
||||||
|
|
||||||
|
To add new tests, create a new file named `test/test-<your new test name>.js`
|
||||||
|
and export your test functions with names that start with "test", for example
|
||||||
|
|
||||||
|
```js
|
||||||
|
exports["test doing the foo bar"] = function (assert, util) {
|
||||||
|
...
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
The new test will be located automatically when you run the suite.
|
||||||
|
|
||||||
|
The `util` argument is the test utility module located at `test/source-map/util`.
|
||||||
|
|
||||||
|
The `assert` argument is a cut down version of node's assert module. You have
|
||||||
|
access to the following assertion functions:
|
||||||
|
|
||||||
|
* `doesNotThrow`
|
||||||
|
|
||||||
|
* `equal`
|
||||||
|
|
||||||
|
* `ok`
|
||||||
|
|
||||||
|
* `strictEqual`
|
||||||
|
|
||||||
|
* `throws`
|
||||||
|
|
||||||
|
(The reason for the restricted set of test functions is because we need the
|
||||||
|
tests to run inside Firefox's test suite as well and so the assert module is
|
||||||
|
shimmed in that environment. See `build/assert-shim.js`.)
|
||||||
|
|
||||||
|
[format]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
|
||||||
|
[feature]: https://wiki.mozilla.org/DevTools/Features/SourceMap
|
||||||
|
[Dryice]: https://github.com/mozilla/dryice
|
||||||
56
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/assert-shim.js
generated
vendored
Normal file
56
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/assert-shim.js
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
define('test/source-map/assert', ['exports'], function (exports) {
|
||||||
|
|
||||||
|
let do_throw = function (msg) {
|
||||||
|
throw new Error(msg);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.init = function (throw_fn) {
|
||||||
|
do_throw = throw_fn;
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.doesNotThrow = function (fn) {
|
||||||
|
try {
|
||||||
|
fn();
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
do_throw(e.message);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.equal = function (actual, expected, msg) {
|
||||||
|
msg = msg || String(actual) + ' != ' + String(expected);
|
||||||
|
if (actual != expected) {
|
||||||
|
do_throw(msg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.ok = function (val, msg) {
|
||||||
|
msg = msg || String(val) + ' is falsey';
|
||||||
|
if (!Boolean(val)) {
|
||||||
|
do_throw(msg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.strictEqual = function (actual, expected, msg) {
|
||||||
|
msg = msg || String(actual) + ' !== ' + String(expected);
|
||||||
|
if (actual !== expected) {
|
||||||
|
do_throw(msg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.throws = function (fn) {
|
||||||
|
try {
|
||||||
|
fn();
|
||||||
|
do_throw('Expected an error to be thrown, but it wasn\'t.');
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
});
|
||||||
152
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/mini-require.js
generated
vendored
Normal file
152
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/mini-require.js
generated
vendored
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Define a module along with a payload.
|
||||||
|
* @param {string} moduleName Name for the payload
|
||||||
|
* @param {ignored} deps Ignored. For compatibility with CommonJS AMD Spec
|
||||||
|
* @param {function} payload Function with (require, exports, module) params
|
||||||
|
*/
|
||||||
|
function define(moduleName, deps, payload) {
|
||||||
|
if (typeof moduleName != "string") {
|
||||||
|
throw new TypeError('Expected string, got: ' + moduleName);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (arguments.length == 2) {
|
||||||
|
payload = deps;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (moduleName in define.modules) {
|
||||||
|
throw new Error("Module already defined: " + moduleName);
|
||||||
|
}
|
||||||
|
define.modules[moduleName] = payload;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The global store of un-instantiated modules
|
||||||
|
*/
|
||||||
|
define.modules = {};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* We invoke require() in the context of a Domain so we can have multiple
|
||||||
|
* sets of modules running separate from each other.
|
||||||
|
* This contrasts with JSMs which are singletons, Domains allows us to
|
||||||
|
* optionally load a CommonJS module twice with separate data each time.
|
||||||
|
* Perhaps you want 2 command lines with a different set of commands in each,
|
||||||
|
* for example.
|
||||||
|
*/
|
||||||
|
function Domain() {
|
||||||
|
this.modules = {};
|
||||||
|
this._currentModule = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
(function () {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lookup module names and resolve them by calling the definition function if
|
||||||
|
* needed.
|
||||||
|
* There are 2 ways to call this, either with an array of dependencies and a
|
||||||
|
* callback to call when the dependencies are found (which can happen
|
||||||
|
* asynchronously in an in-page context) or with a single string an no callback
|
||||||
|
* where the dependency is resolved synchronously and returned.
|
||||||
|
* The API is designed to be compatible with the CommonJS AMD spec and
|
||||||
|
* RequireJS.
|
||||||
|
* @param {string[]|string} deps A name, or names for the payload
|
||||||
|
* @param {function|undefined} callback Function to call when the dependencies
|
||||||
|
* are resolved
|
||||||
|
* @return {undefined|object} The module required or undefined for
|
||||||
|
* array/callback method
|
||||||
|
*/
|
||||||
|
Domain.prototype.require = function(deps, callback) {
|
||||||
|
if (Array.isArray(deps)) {
|
||||||
|
var params = deps.map(function(dep) {
|
||||||
|
return this.lookup(dep);
|
||||||
|
}, this);
|
||||||
|
if (callback) {
|
||||||
|
callback.apply(null, params);
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return this.lookup(deps);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
function normalize(path) {
|
||||||
|
var bits = path.split('/');
|
||||||
|
var i = 1;
|
||||||
|
while (i < bits.length) {
|
||||||
|
if (bits[i] === '..') {
|
||||||
|
bits.splice(i-1, 1);
|
||||||
|
} else if (bits[i] === '.') {
|
||||||
|
bits.splice(i, 1);
|
||||||
|
} else {
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return bits.join('/');
|
||||||
|
}
|
||||||
|
|
||||||
|
function join(a, b) {
|
||||||
|
a = a.trim();
|
||||||
|
b = b.trim();
|
||||||
|
if (/^\//.test(b)) {
|
||||||
|
return b;
|
||||||
|
} else {
|
||||||
|
return a.replace(/\/*$/, '/') + b;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function dirname(path) {
|
||||||
|
var bits = path.split('/');
|
||||||
|
bits.pop();
|
||||||
|
return bits.join('/');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lookup module names and resolve them by calling the definition function if
|
||||||
|
* needed.
|
||||||
|
* @param {string} moduleName A name for the payload to lookup
|
||||||
|
* @return {object} The module specified by aModuleName or null if not found.
|
||||||
|
*/
|
||||||
|
Domain.prototype.lookup = function(moduleName) {
|
||||||
|
if (/^\./.test(moduleName)) {
|
||||||
|
moduleName = normalize(join(dirname(this._currentModule), moduleName));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (moduleName in this.modules) {
|
||||||
|
var module = this.modules[moduleName];
|
||||||
|
return module;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(moduleName in define.modules)) {
|
||||||
|
throw new Error("Module not defined: " + moduleName);
|
||||||
|
}
|
||||||
|
|
||||||
|
var module = define.modules[moduleName];
|
||||||
|
|
||||||
|
if (typeof module == "function") {
|
||||||
|
var exports = {};
|
||||||
|
var previousModule = this._currentModule;
|
||||||
|
this._currentModule = moduleName;
|
||||||
|
module(this.require.bind(this), exports, { id: moduleName, uri: "" });
|
||||||
|
this._currentModule = previousModule;
|
||||||
|
module = exports;
|
||||||
|
}
|
||||||
|
|
||||||
|
// cache the resulting module object for next time
|
||||||
|
this.modules[moduleName] = module;
|
||||||
|
|
||||||
|
return module;
|
||||||
|
};
|
||||||
|
|
||||||
|
}());
|
||||||
|
|
||||||
|
define.Domain = Domain;
|
||||||
|
define.globalDomain = new Domain();
|
||||||
|
var require = define.globalDomain.require.bind(define.globalDomain);
|
||||||
20
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/prefix-source-map.jsm
generated
vendored
Normal file
20
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/prefix-source-map.jsm
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
* WARNING!
|
||||||
|
*
|
||||||
|
* Do not edit this file directly, it is built from the sources at
|
||||||
|
* https://github.com/mozilla/source-map/
|
||||||
|
*/
|
||||||
|
|
||||||
|
///////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
|
||||||
|
this.EXPORTED_SYMBOLS = [ "SourceMapConsumer", "SourceMapGenerator", "SourceNode" ];
|
||||||
|
|
||||||
|
Components.utils.import('resource://gre/modules/devtools/Require.jsm');
|
||||||
18
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/prefix-utils.jsm
generated
vendored
Normal file
18
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/prefix-utils.jsm
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
* WARNING!
|
||||||
|
*
|
||||||
|
* Do not edit this file directly, it is built from the sources at
|
||||||
|
* https://github.com/mozilla/source-map/
|
||||||
|
*/
|
||||||
|
|
||||||
|
Components.utils.import('resource://gre/modules/devtools/Require.jsm');
|
||||||
|
Components.utils.import('resource://gre/modules/devtools/SourceMap.jsm');
|
||||||
|
|
||||||
|
this.EXPORTED_SYMBOLS = [ "define", "runSourceMapTests" ];
|
||||||
8
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/suffix-browser.js
generated
vendored
Normal file
8
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/suffix-browser.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
///////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
this.sourceMap = {
|
||||||
|
SourceMapConsumer: require('source-map/source-map-consumer').SourceMapConsumer,
|
||||||
|
SourceMapGenerator: require('source-map/source-map-generator').SourceMapGenerator,
|
||||||
|
SourceNode: require('source-map/source-node').SourceNode
|
||||||
|
};
|
||||||
6
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/suffix-source-map.jsm
generated
vendored
Normal file
6
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/suffix-source-map.jsm
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
///////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
this.SourceMapConsumer = require('source-map/source-map-consumer').SourceMapConsumer;
|
||||||
|
this.SourceMapGenerator = require('source-map/source-map-generator').SourceMapGenerator;
|
||||||
|
this.SourceNode = require('source-map/source-node').SourceNode;
|
||||||
21
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/suffix-utils.jsm
generated
vendored
Normal file
21
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/suffix-utils.jsm
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
function runSourceMapTests(modName, do_throw) {
|
||||||
|
let mod = require(modName);
|
||||||
|
let assert = require('test/source-map/assert');
|
||||||
|
let util = require('test/source-map/util');
|
||||||
|
|
||||||
|
assert.init(do_throw);
|
||||||
|
|
||||||
|
for (let k in mod) {
|
||||||
|
if (/^test/.test(k)) {
|
||||||
|
mod[k](assert, util);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
this.runSourceMapTests = runSourceMapTests;
|
||||||
8
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/test-prefix.js
generated
vendored
Normal file
8
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/test-prefix.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
/*
|
||||||
|
* WARNING!
|
||||||
|
*
|
||||||
|
* Do not edit this file directly, it is built from the sources at
|
||||||
|
* https://github.com/mozilla/source-map/
|
||||||
|
*/
|
||||||
|
|
||||||
|
Components.utils.import('resource://test/Utils.jsm');
|
||||||
3
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/test-suffix.js
generated
vendored
Normal file
3
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/build/test-suffix.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
function run_test() {
|
||||||
|
runSourceMapTests('{THIS_MODULE}', do_throw);
|
||||||
|
}
|
||||||
2844
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/SourceMap.jsm
generated
vendored
Normal file
2844
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/SourceMap.jsm
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2978
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/source-map.js
generated
vendored
Normal file
2978
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/source-map.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/source-map.min.js
generated
vendored
Normal file
1
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/source-map.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
707
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/Utils.jsm
generated
vendored
Normal file
707
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/Utils.jsm
generated
vendored
Normal file
@@ -0,0 +1,707 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
* WARNING!
|
||||||
|
*
|
||||||
|
* Do not edit this file directly, it is built from the sources at
|
||||||
|
* https://github.com/mozilla/source-map/
|
||||||
|
*/
|
||||||
|
|
||||||
|
Components.utils.import('resource://gre/modules/devtools/Require.jsm');
|
||||||
|
Components.utils.import('resource://gre/modules/devtools/SourceMap.jsm');
|
||||||
|
|
||||||
|
this.EXPORTED_SYMBOLS = [ "define", "runSourceMapTests" ];
|
||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
define('test/source-map/assert', ['exports'], function (exports) {
|
||||||
|
|
||||||
|
let do_throw = function (msg) {
|
||||||
|
throw new Error(msg);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.init = function (throw_fn) {
|
||||||
|
do_throw = throw_fn;
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.doesNotThrow = function (fn) {
|
||||||
|
try {
|
||||||
|
fn();
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
do_throw(e.message);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.equal = function (actual, expected, msg) {
|
||||||
|
msg = msg || String(actual) + ' != ' + String(expected);
|
||||||
|
if (actual != expected) {
|
||||||
|
do_throw(msg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.ok = function (val, msg) {
|
||||||
|
msg = msg || String(val) + ' is falsey';
|
||||||
|
if (!Boolean(val)) {
|
||||||
|
do_throw(msg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.strictEqual = function (actual, expected, msg) {
|
||||||
|
msg = msg || String(actual) + ' !== ' + String(expected);
|
||||||
|
if (actual !== expected) {
|
||||||
|
do_throw(msg);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.throws = function (fn) {
|
||||||
|
try {
|
||||||
|
fn();
|
||||||
|
do_throw('Expected an error to be thrown, but it wasn\'t.');
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
});
|
||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
define('test/source-map/util', ['require', 'exports', 'module' , 'lib/source-map/util'], function(require, exports, module) {
|
||||||
|
|
||||||
|
var util = require('source-map/util');
|
||||||
|
|
||||||
|
// This is a test mapping which maps functions from two different files
|
||||||
|
// (one.js and two.js) to a minified generated source.
|
||||||
|
//
|
||||||
|
// Here is one.js:
|
||||||
|
//
|
||||||
|
// ONE.foo = function (bar) {
|
||||||
|
// return baz(bar);
|
||||||
|
// };
|
||||||
|
//
|
||||||
|
// Here is two.js:
|
||||||
|
//
|
||||||
|
// TWO.inc = function (n) {
|
||||||
|
// return n + 1;
|
||||||
|
// };
|
||||||
|
//
|
||||||
|
// And here is the generated code (min.js):
|
||||||
|
//
|
||||||
|
// ONE.foo=function(a){return baz(a);};
|
||||||
|
// TWO.inc=function(a){return a+1;};
|
||||||
|
exports.testGeneratedCode = " ONE.foo=function(a){return baz(a);};\n"+
|
||||||
|
" TWO.inc=function(a){return a+1;};";
|
||||||
|
exports.testMap = {
|
||||||
|
version: 3,
|
||||||
|
file: 'min.js',
|
||||||
|
names: ['bar', 'baz', 'n'],
|
||||||
|
sources: ['one.js', 'two.js'],
|
||||||
|
sourceRoot: '/the/root',
|
||||||
|
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||||
|
};
|
||||||
|
exports.testMapNoSourceRoot = {
|
||||||
|
version: 3,
|
||||||
|
file: 'min.js',
|
||||||
|
names: ['bar', 'baz', 'n'],
|
||||||
|
sources: ['one.js', 'two.js'],
|
||||||
|
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||||
|
};
|
||||||
|
exports.testMapEmptySourceRoot = {
|
||||||
|
version: 3,
|
||||||
|
file: 'min.js',
|
||||||
|
names: ['bar', 'baz', 'n'],
|
||||||
|
sources: ['one.js', 'two.js'],
|
||||||
|
sourceRoot: '',
|
||||||
|
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||||
|
};
|
||||||
|
// This mapping is identical to above, but uses the indexed format instead.
|
||||||
|
exports.indexedTestMap = {
|
||||||
|
version: 3,
|
||||||
|
file: 'min.js',
|
||||||
|
sections: [
|
||||||
|
{
|
||||||
|
offset: {
|
||||||
|
line: 0,
|
||||||
|
column: 0
|
||||||
|
},
|
||||||
|
map: {
|
||||||
|
version: 3,
|
||||||
|
sources: [
|
||||||
|
"one.js"
|
||||||
|
],
|
||||||
|
sourcesContent: [
|
||||||
|
' ONE.foo = function (bar) {\n' +
|
||||||
|
' return baz(bar);\n' +
|
||||||
|
' };',
|
||||||
|
],
|
||||||
|
names: [
|
||||||
|
"bar",
|
||||||
|
"baz"
|
||||||
|
],
|
||||||
|
mappings: "CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID",
|
||||||
|
file: "min.js",
|
||||||
|
sourceRoot: "/the/root"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
offset: {
|
||||||
|
line: 1,
|
||||||
|
column: 0
|
||||||
|
},
|
||||||
|
map: {
|
||||||
|
version: 3,
|
||||||
|
sources: [
|
||||||
|
"two.js"
|
||||||
|
],
|
||||||
|
sourcesContent: [
|
||||||
|
' TWO.inc = function (n) {\n' +
|
||||||
|
' return n + 1;\n' +
|
||||||
|
' };'
|
||||||
|
],
|
||||||
|
names: [
|
||||||
|
"n"
|
||||||
|
],
|
||||||
|
mappings: "CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOA",
|
||||||
|
file: "min.js",
|
||||||
|
sourceRoot: "/the/root"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
exports.indexedTestMapDifferentSourceRoots = {
|
||||||
|
version: 3,
|
||||||
|
file: 'min.js',
|
||||||
|
sections: [
|
||||||
|
{
|
||||||
|
offset: {
|
||||||
|
line: 0,
|
||||||
|
column: 0
|
||||||
|
},
|
||||||
|
map: {
|
||||||
|
version: 3,
|
||||||
|
sources: [
|
||||||
|
"one.js"
|
||||||
|
],
|
||||||
|
sourcesContent: [
|
||||||
|
' ONE.foo = function (bar) {\n' +
|
||||||
|
' return baz(bar);\n' +
|
||||||
|
' };',
|
||||||
|
],
|
||||||
|
names: [
|
||||||
|
"bar",
|
||||||
|
"baz"
|
||||||
|
],
|
||||||
|
mappings: "CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID",
|
||||||
|
file: "min.js",
|
||||||
|
sourceRoot: "/the/root"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
offset: {
|
||||||
|
line: 1,
|
||||||
|
column: 0
|
||||||
|
},
|
||||||
|
map: {
|
||||||
|
version: 3,
|
||||||
|
sources: [
|
||||||
|
"two.js"
|
||||||
|
],
|
||||||
|
sourcesContent: [
|
||||||
|
' TWO.inc = function (n) {\n' +
|
||||||
|
' return n + 1;\n' +
|
||||||
|
' };'
|
||||||
|
],
|
||||||
|
names: [
|
||||||
|
"n"
|
||||||
|
],
|
||||||
|
mappings: "CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOA",
|
||||||
|
file: "min.js",
|
||||||
|
sourceRoot: "/different/root"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
exports.testMapWithSourcesContent = {
|
||||||
|
version: 3,
|
||||||
|
file: 'min.js',
|
||||||
|
names: ['bar', 'baz', 'n'],
|
||||||
|
sources: ['one.js', 'two.js'],
|
||||||
|
sourcesContent: [
|
||||||
|
' ONE.foo = function (bar) {\n' +
|
||||||
|
' return baz(bar);\n' +
|
||||||
|
' };',
|
||||||
|
' TWO.inc = function (n) {\n' +
|
||||||
|
' return n + 1;\n' +
|
||||||
|
' };'
|
||||||
|
],
|
||||||
|
sourceRoot: '/the/root',
|
||||||
|
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||||
|
};
|
||||||
|
exports.testMapRelativeSources = {
|
||||||
|
version: 3,
|
||||||
|
file: 'min.js',
|
||||||
|
names: ['bar', 'baz', 'n'],
|
||||||
|
sources: ['./one.js', './two.js'],
|
||||||
|
sourcesContent: [
|
||||||
|
' ONE.foo = function (bar) {\n' +
|
||||||
|
' return baz(bar);\n' +
|
||||||
|
' };',
|
||||||
|
' TWO.inc = function (n) {\n' +
|
||||||
|
' return n + 1;\n' +
|
||||||
|
' };'
|
||||||
|
],
|
||||||
|
sourceRoot: '/the/root',
|
||||||
|
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||||
|
};
|
||||||
|
exports.emptyMap = {
|
||||||
|
version: 3,
|
||||||
|
file: 'min.js',
|
||||||
|
names: [],
|
||||||
|
sources: [],
|
||||||
|
mappings: ''
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
function assertMapping(generatedLine, generatedColumn, originalSource,
|
||||||
|
originalLine, originalColumn, name, map, assert,
|
||||||
|
dontTestGenerated, dontTestOriginal) {
|
||||||
|
if (!dontTestOriginal) {
|
||||||
|
var origMapping = map.originalPositionFor({
|
||||||
|
line: generatedLine,
|
||||||
|
column: generatedColumn
|
||||||
|
});
|
||||||
|
assert.equal(origMapping.name, name,
|
||||||
|
'Incorrect name, expected ' + JSON.stringify(name)
|
||||||
|
+ ', got ' + JSON.stringify(origMapping.name));
|
||||||
|
assert.equal(origMapping.line, originalLine,
|
||||||
|
'Incorrect line, expected ' + JSON.stringify(originalLine)
|
||||||
|
+ ', got ' + JSON.stringify(origMapping.line));
|
||||||
|
assert.equal(origMapping.column, originalColumn,
|
||||||
|
'Incorrect column, expected ' + JSON.stringify(originalColumn)
|
||||||
|
+ ', got ' + JSON.stringify(origMapping.column));
|
||||||
|
|
||||||
|
var expectedSource;
|
||||||
|
|
||||||
|
if (originalSource && map.sourceRoot && originalSource.indexOf(map.sourceRoot) === 0) {
|
||||||
|
expectedSource = originalSource;
|
||||||
|
} else if (originalSource) {
|
||||||
|
expectedSource = map.sourceRoot
|
||||||
|
? util.join(map.sourceRoot, originalSource)
|
||||||
|
: originalSource;
|
||||||
|
} else {
|
||||||
|
expectedSource = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.equal(origMapping.source, expectedSource,
|
||||||
|
'Incorrect source, expected ' + JSON.stringify(expectedSource)
|
||||||
|
+ ', got ' + JSON.stringify(origMapping.source));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!dontTestGenerated) {
|
||||||
|
var genMapping = map.generatedPositionFor({
|
||||||
|
source: originalSource,
|
||||||
|
line: originalLine,
|
||||||
|
column: originalColumn
|
||||||
|
});
|
||||||
|
assert.equal(genMapping.line, generatedLine,
|
||||||
|
'Incorrect line, expected ' + JSON.stringify(generatedLine)
|
||||||
|
+ ', got ' + JSON.stringify(genMapping.line));
|
||||||
|
assert.equal(genMapping.column, generatedColumn,
|
||||||
|
'Incorrect column, expected ' + JSON.stringify(generatedColumn)
|
||||||
|
+ ', got ' + JSON.stringify(genMapping.column));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.assertMapping = assertMapping;
|
||||||
|
|
||||||
|
function assertEqualMaps(assert, actualMap, expectedMap) {
|
||||||
|
assert.equal(actualMap.version, expectedMap.version, "version mismatch");
|
||||||
|
assert.equal(actualMap.file, expectedMap.file, "file mismatch");
|
||||||
|
assert.equal(actualMap.names.length,
|
||||||
|
expectedMap.names.length,
|
||||||
|
"names length mismatch: " +
|
||||||
|
actualMap.names.join(", ") + " != " + expectedMap.names.join(", "));
|
||||||
|
for (var i = 0; i < actualMap.names.length; i++) {
|
||||||
|
assert.equal(actualMap.names[i],
|
||||||
|
expectedMap.names[i],
|
||||||
|
"names[" + i + "] mismatch: " +
|
||||||
|
actualMap.names.join(", ") + " != " + expectedMap.names.join(", "));
|
||||||
|
}
|
||||||
|
assert.equal(actualMap.sources.length,
|
||||||
|
expectedMap.sources.length,
|
||||||
|
"sources length mismatch: " +
|
||||||
|
actualMap.sources.join(", ") + " != " + expectedMap.sources.join(", "));
|
||||||
|
for (var i = 0; i < actualMap.sources.length; i++) {
|
||||||
|
assert.equal(actualMap.sources[i],
|
||||||
|
expectedMap.sources[i],
|
||||||
|
"sources[" + i + "] length mismatch: " +
|
||||||
|
actualMap.sources.join(", ") + " != " + expectedMap.sources.join(", "));
|
||||||
|
}
|
||||||
|
assert.equal(actualMap.sourceRoot,
|
||||||
|
expectedMap.sourceRoot,
|
||||||
|
"sourceRoot mismatch: " +
|
||||||
|
actualMap.sourceRoot + " != " + expectedMap.sourceRoot);
|
||||||
|
assert.equal(actualMap.mappings, expectedMap.mappings,
|
||||||
|
"mappings mismatch:\nActual: " + actualMap.mappings + "\nExpected: " + expectedMap.mappings);
|
||||||
|
if (actualMap.sourcesContent) {
|
||||||
|
assert.equal(actualMap.sourcesContent.length,
|
||||||
|
expectedMap.sourcesContent.length,
|
||||||
|
"sourcesContent length mismatch");
|
||||||
|
for (var i = 0; i < actualMap.sourcesContent.length; i++) {
|
||||||
|
assert.equal(actualMap.sourcesContent[i],
|
||||||
|
expectedMap.sourcesContent[i],
|
||||||
|
"sourcesContent[" + i + "] mismatch");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.assertEqualMaps = assertEqualMaps;
|
||||||
|
|
||||||
|
});
|
||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
define('lib/source-map/util', ['require', 'exports', 'module' , ], function(require, exports, module) {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is a helper function for getting values from parameter/options
|
||||||
|
* objects.
|
||||||
|
*
|
||||||
|
* @param args The object we are extracting values from
|
||||||
|
* @param name The name of the property we are getting.
|
||||||
|
* @param defaultValue An optional value to return if the property is missing
|
||||||
|
* from the object. If this is not specified and the property is missing, an
|
||||||
|
* error will be thrown.
|
||||||
|
*/
|
||||||
|
function getArg(aArgs, aName, aDefaultValue) {
|
||||||
|
if (aName in aArgs) {
|
||||||
|
return aArgs[aName];
|
||||||
|
} else if (arguments.length === 3) {
|
||||||
|
return aDefaultValue;
|
||||||
|
} else {
|
||||||
|
throw new Error('"' + aName + '" is a required argument.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.getArg = getArg;
|
||||||
|
|
||||||
|
var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/;
|
||||||
|
var dataUrlRegexp = /^data:.+\,.+$/;
|
||||||
|
|
||||||
|
function urlParse(aUrl) {
|
||||||
|
var match = aUrl.match(urlRegexp);
|
||||||
|
if (!match) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
scheme: match[1],
|
||||||
|
auth: match[2],
|
||||||
|
host: match[3],
|
||||||
|
port: match[4],
|
||||||
|
path: match[5]
|
||||||
|
};
|
||||||
|
}
|
||||||
|
exports.urlParse = urlParse;
|
||||||
|
|
||||||
|
function urlGenerate(aParsedUrl) {
|
||||||
|
var url = '';
|
||||||
|
if (aParsedUrl.scheme) {
|
||||||
|
url += aParsedUrl.scheme + ':';
|
||||||
|
}
|
||||||
|
url += '//';
|
||||||
|
if (aParsedUrl.auth) {
|
||||||
|
url += aParsedUrl.auth + '@';
|
||||||
|
}
|
||||||
|
if (aParsedUrl.host) {
|
||||||
|
url += aParsedUrl.host;
|
||||||
|
}
|
||||||
|
if (aParsedUrl.port) {
|
||||||
|
url += ":" + aParsedUrl.port
|
||||||
|
}
|
||||||
|
if (aParsedUrl.path) {
|
||||||
|
url += aParsedUrl.path;
|
||||||
|
}
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
exports.urlGenerate = urlGenerate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalizes a path, or the path portion of a URL:
|
||||||
|
*
|
||||||
|
* - Replaces consequtive slashes with one slash.
|
||||||
|
* - Removes unnecessary '.' parts.
|
||||||
|
* - Removes unnecessary '<dir>/..' parts.
|
||||||
|
*
|
||||||
|
* Based on code in the Node.js 'path' core module.
|
||||||
|
*
|
||||||
|
* @param aPath The path or url to normalize.
|
||||||
|
*/
|
||||||
|
function normalize(aPath) {
|
||||||
|
var path = aPath;
|
||||||
|
var url = urlParse(aPath);
|
||||||
|
if (url) {
|
||||||
|
if (!url.path) {
|
||||||
|
return aPath;
|
||||||
|
}
|
||||||
|
path = url.path;
|
||||||
|
}
|
||||||
|
var isAbsolute = (path.charAt(0) === '/');
|
||||||
|
|
||||||
|
var parts = path.split(/\/+/);
|
||||||
|
for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {
|
||||||
|
part = parts[i];
|
||||||
|
if (part === '.') {
|
||||||
|
parts.splice(i, 1);
|
||||||
|
} else if (part === '..') {
|
||||||
|
up++;
|
||||||
|
} else if (up > 0) {
|
||||||
|
if (part === '') {
|
||||||
|
// The first part is blank if the path is absolute. Trying to go
|
||||||
|
// above the root is a no-op. Therefore we can remove all '..' parts
|
||||||
|
// directly after the root.
|
||||||
|
parts.splice(i + 1, up);
|
||||||
|
up = 0;
|
||||||
|
} else {
|
||||||
|
parts.splice(i, 2);
|
||||||
|
up--;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
path = parts.join('/');
|
||||||
|
|
||||||
|
if (path === '') {
|
||||||
|
path = isAbsolute ? '/' : '.';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url) {
|
||||||
|
url.path = path;
|
||||||
|
return urlGenerate(url);
|
||||||
|
}
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
exports.normalize = normalize;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Joins two paths/URLs.
|
||||||
|
*
|
||||||
|
* @param aRoot The root path or URL.
|
||||||
|
* @param aPath The path or URL to be joined with the root.
|
||||||
|
*
|
||||||
|
* - If aPath is a URL or a data URI, aPath is returned, unless aPath is a
|
||||||
|
* scheme-relative URL: Then the scheme of aRoot, if any, is prepended
|
||||||
|
* first.
|
||||||
|
* - Otherwise aPath is a path. If aRoot is a URL, then its path portion
|
||||||
|
* is updated with the result and aRoot is returned. Otherwise the result
|
||||||
|
* is returned.
|
||||||
|
* - If aPath is absolute, the result is aPath.
|
||||||
|
* - Otherwise the two paths are joined with a slash.
|
||||||
|
* - Joining for example 'http://' and 'www.example.com' is also supported.
|
||||||
|
*/
|
||||||
|
function join(aRoot, aPath) {
|
||||||
|
if (aRoot === "") {
|
||||||
|
aRoot = ".";
|
||||||
|
}
|
||||||
|
if (aPath === "") {
|
||||||
|
aPath = ".";
|
||||||
|
}
|
||||||
|
var aPathUrl = urlParse(aPath);
|
||||||
|
var aRootUrl = urlParse(aRoot);
|
||||||
|
if (aRootUrl) {
|
||||||
|
aRoot = aRootUrl.path || '/';
|
||||||
|
}
|
||||||
|
|
||||||
|
// `join(foo, '//www.example.org')`
|
||||||
|
if (aPathUrl && !aPathUrl.scheme) {
|
||||||
|
if (aRootUrl) {
|
||||||
|
aPathUrl.scheme = aRootUrl.scheme;
|
||||||
|
}
|
||||||
|
return urlGenerate(aPathUrl);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (aPathUrl || aPath.match(dataUrlRegexp)) {
|
||||||
|
return aPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
// `join('http://', 'www.example.com')`
|
||||||
|
if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {
|
||||||
|
aRootUrl.host = aPath;
|
||||||
|
return urlGenerate(aRootUrl);
|
||||||
|
}
|
||||||
|
|
||||||
|
var joined = aPath.charAt(0) === '/'
|
||||||
|
? aPath
|
||||||
|
: normalize(aRoot.replace(/\/+$/, '') + '/' + aPath);
|
||||||
|
|
||||||
|
if (aRootUrl) {
|
||||||
|
aRootUrl.path = joined;
|
||||||
|
return urlGenerate(aRootUrl);
|
||||||
|
}
|
||||||
|
return joined;
|
||||||
|
}
|
||||||
|
exports.join = join;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make a path relative to a URL or another path.
|
||||||
|
*
|
||||||
|
* @param aRoot The root path or URL.
|
||||||
|
* @param aPath The path or URL to be made relative to aRoot.
|
||||||
|
*/
|
||||||
|
function relative(aRoot, aPath) {
|
||||||
|
if (aRoot === "") {
|
||||||
|
aRoot = ".";
|
||||||
|
}
|
||||||
|
|
||||||
|
aRoot = aRoot.replace(/\/$/, '');
|
||||||
|
|
||||||
|
// XXX: It is possible to remove this block, and the tests still pass!
|
||||||
|
var url = urlParse(aRoot);
|
||||||
|
if (aPath.charAt(0) == "/" && url && url.path == "/") {
|
||||||
|
return aPath.slice(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return aPath.indexOf(aRoot + '/') === 0
|
||||||
|
? aPath.substr(aRoot.length + 1)
|
||||||
|
: aPath;
|
||||||
|
}
|
||||||
|
exports.relative = relative;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Because behavior goes wacky when you set `__proto__` on objects, we
|
||||||
|
* have to prefix all the strings in our set with an arbitrary character.
|
||||||
|
*
|
||||||
|
* See https://github.com/mozilla/source-map/pull/31 and
|
||||||
|
* https://github.com/mozilla/source-map/issues/30
|
||||||
|
*
|
||||||
|
* @param String aStr
|
||||||
|
*/
|
||||||
|
function toSetString(aStr) {
|
||||||
|
return '$' + aStr;
|
||||||
|
}
|
||||||
|
exports.toSetString = toSetString;
|
||||||
|
|
||||||
|
function fromSetString(aStr) {
|
||||||
|
return aStr.substr(1);
|
||||||
|
}
|
||||||
|
exports.fromSetString = fromSetString;
|
||||||
|
|
||||||
|
function strcmp(aStr1, aStr2) {
|
||||||
|
var s1 = aStr1 || "";
|
||||||
|
var s2 = aStr2 || "";
|
||||||
|
return (s1 > s2) - (s1 < s2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Comparator between two mappings where the original positions are compared.
|
||||||
|
*
|
||||||
|
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
|
||||||
|
* mappings with the same original source/line/column, but different generated
|
||||||
|
* line and column the same. Useful when searching for a mapping with a
|
||||||
|
* stubbed out mapping.
|
||||||
|
*/
|
||||||
|
function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {
|
||||||
|
var cmp;
|
||||||
|
|
||||||
|
cmp = strcmp(mappingA.source, mappingB.source);
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmp = mappingA.originalLine - mappingB.originalLine;
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
||||||
|
if (cmp || onlyCompareOriginal) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmp = strcmp(mappingA.name, mappingB.name);
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmp = mappingA.generatedLine - mappingB.generatedLine;
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
return mappingA.generatedColumn - mappingB.generatedColumn;
|
||||||
|
};
|
||||||
|
exports.compareByOriginalPositions = compareByOriginalPositions;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Comparator between two mappings where the generated positions are
|
||||||
|
* compared.
|
||||||
|
*
|
||||||
|
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
|
||||||
|
* mappings with the same generated line and column, but different
|
||||||
|
* source/name/original line and column the same. Useful when searching for a
|
||||||
|
* mapping with a stubbed out mapping.
|
||||||
|
*/
|
||||||
|
function compareByGeneratedPositions(mappingA, mappingB, onlyCompareGenerated) {
|
||||||
|
var cmp;
|
||||||
|
|
||||||
|
cmp = mappingA.generatedLine - mappingB.generatedLine;
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
|
||||||
|
if (cmp || onlyCompareGenerated) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmp = strcmp(mappingA.source, mappingB.source);
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmp = mappingA.originalLine - mappingB.originalLine;
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
return strcmp(mappingA.name, mappingB.name);
|
||||||
|
};
|
||||||
|
exports.compareByGeneratedPositions = compareByGeneratedPositions;
|
||||||
|
|
||||||
|
});
|
||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
function runSourceMapTests(modName, do_throw) {
|
||||||
|
let mod = require(modName);
|
||||||
|
let assert = require('test/source-map/assert');
|
||||||
|
let util = require('test/source-map/util');
|
||||||
|
|
||||||
|
assert.init(do_throw);
|
||||||
|
|
||||||
|
for (let k in mod) {
|
||||||
|
if (/^test/.test(k)) {
|
||||||
|
mod[k](assert, util);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
this.runSourceMapTests = runSourceMapTests;
|
||||||
34
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_api.js
generated
vendored
Normal file
34
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_api.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
/*
|
||||||
|
* WARNING!
|
||||||
|
*
|
||||||
|
* Do not edit this file directly, it is built from the sources at
|
||||||
|
* https://github.com/mozilla/source-map/
|
||||||
|
*/
|
||||||
|
|
||||||
|
Components.utils.import('resource://test/Utils.jsm');
|
||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2012 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
define("test/source-map/test-api", ["require", "exports", "module"], function (require, exports, module) {
|
||||||
|
|
||||||
|
var sourceMap;
|
||||||
|
try {
|
||||||
|
sourceMap = require('source-map');
|
||||||
|
} catch (e) {
|
||||||
|
sourceMap = {};
|
||||||
|
Components.utils.import('resource:///modules/devtools/SourceMap.jsm', sourceMap);
|
||||||
|
}
|
||||||
|
|
||||||
|
exports['test that the api is properly exposed in the top level'] = function (assert, util) {
|
||||||
|
assert.equal(typeof sourceMap.SourceMapGenerator, "function");
|
||||||
|
assert.equal(typeof sourceMap.SourceMapConsumer, "function");
|
||||||
|
assert.equal(typeof sourceMap.SourceNode, "function");
|
||||||
|
};
|
||||||
|
|
||||||
|
});
|
||||||
|
function run_test() {
|
||||||
|
runSourceMapTests('test/source-map/test-api', do_throw);
|
||||||
|
}
|
||||||
112
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_array_set.js
generated
vendored
Normal file
112
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_array_set.js
generated
vendored
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
/*
|
||||||
|
* WARNING!
|
||||||
|
*
|
||||||
|
* Do not edit this file directly, it is built from the sources at
|
||||||
|
* https://github.com/mozilla/source-map/
|
||||||
|
*/
|
||||||
|
|
||||||
|
Components.utils.import('resource://test/Utils.jsm');
|
||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
define("test/source-map/test-array-set", ["require", "exports", "module"], function (require, exports, module) {
|
||||||
|
|
||||||
|
var ArraySet = require('source-map/array-set').ArraySet;
|
||||||
|
|
||||||
|
function makeTestSet() {
|
||||||
|
var set = new ArraySet();
|
||||||
|
for (var i = 0; i < 100; i++) {
|
||||||
|
set.add(String(i));
|
||||||
|
}
|
||||||
|
return set;
|
||||||
|
}
|
||||||
|
|
||||||
|
exports['test .has() membership'] = function (assert, util) {
|
||||||
|
var set = makeTestSet();
|
||||||
|
for (var i = 0; i < 100; i++) {
|
||||||
|
assert.ok(set.has(String(i)));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test .indexOf() elements'] = function (assert, util) {
|
||||||
|
var set = makeTestSet();
|
||||||
|
for (var i = 0; i < 100; i++) {
|
||||||
|
assert.strictEqual(set.indexOf(String(i)), i);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test .at() indexing'] = function (assert, util) {
|
||||||
|
var set = makeTestSet();
|
||||||
|
for (var i = 0; i < 100; i++) {
|
||||||
|
assert.strictEqual(set.at(i), String(i));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test creating from an array'] = function (assert, util) {
|
||||||
|
var set = ArraySet.fromArray(['foo', 'bar', 'baz', 'quux', 'hasOwnProperty']);
|
||||||
|
|
||||||
|
assert.ok(set.has('foo'));
|
||||||
|
assert.ok(set.has('bar'));
|
||||||
|
assert.ok(set.has('baz'));
|
||||||
|
assert.ok(set.has('quux'));
|
||||||
|
assert.ok(set.has('hasOwnProperty'));
|
||||||
|
|
||||||
|
assert.strictEqual(set.indexOf('foo'), 0);
|
||||||
|
assert.strictEqual(set.indexOf('bar'), 1);
|
||||||
|
assert.strictEqual(set.indexOf('baz'), 2);
|
||||||
|
assert.strictEqual(set.indexOf('quux'), 3);
|
||||||
|
|
||||||
|
assert.strictEqual(set.at(0), 'foo');
|
||||||
|
assert.strictEqual(set.at(1), 'bar');
|
||||||
|
assert.strictEqual(set.at(2), 'baz');
|
||||||
|
assert.strictEqual(set.at(3), 'quux');
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test that you can add __proto__; see github issue #30'] = function (assert, util) {
|
||||||
|
var set = new ArraySet();
|
||||||
|
set.add('__proto__');
|
||||||
|
assert.ok(set.has('__proto__'));
|
||||||
|
assert.strictEqual(set.at(0), '__proto__');
|
||||||
|
assert.strictEqual(set.indexOf('__proto__'), 0);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test .fromArray() with duplicates'] = function (assert, util) {
|
||||||
|
var set = ArraySet.fromArray(['foo', 'foo']);
|
||||||
|
assert.ok(set.has('foo'));
|
||||||
|
assert.strictEqual(set.at(0), 'foo');
|
||||||
|
assert.strictEqual(set.indexOf('foo'), 0);
|
||||||
|
assert.strictEqual(set.toArray().length, 1);
|
||||||
|
|
||||||
|
set = ArraySet.fromArray(['foo', 'foo'], true);
|
||||||
|
assert.ok(set.has('foo'));
|
||||||
|
assert.strictEqual(set.at(0), 'foo');
|
||||||
|
assert.strictEqual(set.at(1), 'foo');
|
||||||
|
assert.strictEqual(set.indexOf('foo'), 0);
|
||||||
|
assert.strictEqual(set.toArray().length, 2);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test .add() with duplicates'] = function (assert, util) {
|
||||||
|
var set = new ArraySet();
|
||||||
|
set.add('foo');
|
||||||
|
|
||||||
|
set.add('foo');
|
||||||
|
assert.ok(set.has('foo'));
|
||||||
|
assert.strictEqual(set.at(0), 'foo');
|
||||||
|
assert.strictEqual(set.indexOf('foo'), 0);
|
||||||
|
assert.strictEqual(set.toArray().length, 1);
|
||||||
|
|
||||||
|
set.add('foo', true);
|
||||||
|
assert.ok(set.has('foo'));
|
||||||
|
assert.strictEqual(set.at(0), 'foo');
|
||||||
|
assert.strictEqual(set.at(1), 'foo');
|
||||||
|
assert.strictEqual(set.indexOf('foo'), 0);
|
||||||
|
assert.strictEqual(set.toArray().length, 2);
|
||||||
|
};
|
||||||
|
|
||||||
|
});
|
||||||
|
function run_test() {
|
||||||
|
runSourceMapTests('test/source-map/test-array-set', do_throw);
|
||||||
|
}
|
||||||
43
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_base64.js
generated
vendored
Normal file
43
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_base64.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
/*
|
||||||
|
* WARNING!
|
||||||
|
*
|
||||||
|
* Do not edit this file directly, it is built from the sources at
|
||||||
|
* https://github.com/mozilla/source-map/
|
||||||
|
*/
|
||||||
|
|
||||||
|
Components.utils.import('resource://test/Utils.jsm');
|
||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
define("test/source-map/test-base64", ["require", "exports", "module"], function (require, exports, module) {
|
||||||
|
|
||||||
|
var base64 = require('source-map/base64');
|
||||||
|
|
||||||
|
exports['test out of range encoding'] = function (assert, util) {
|
||||||
|
assert.throws(function () {
|
||||||
|
base64.encode(-1);
|
||||||
|
});
|
||||||
|
assert.throws(function () {
|
||||||
|
base64.encode(64);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test out of range decoding'] = function (assert, util) {
|
||||||
|
assert.throws(function () {
|
||||||
|
base64.decode('=');
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test normal encoding and decoding'] = function (assert, util) {
|
||||||
|
for (var i = 0; i < 64; i++) {
|
||||||
|
assert.equal(base64.decode(base64.encode(i)), i);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
});
|
||||||
|
function run_test() {
|
||||||
|
runSourceMapTests('test/source-map/test-base64', do_throw);
|
||||||
|
}
|
||||||
32
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_base64_vlq.js
generated
vendored
Normal file
32
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_base64_vlq.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
/*
|
||||||
|
* WARNING!
|
||||||
|
*
|
||||||
|
* Do not edit this file directly, it is built from the sources at
|
||||||
|
* https://github.com/mozilla/source-map/
|
||||||
|
*/
|
||||||
|
|
||||||
|
Components.utils.import('resource://test/Utils.jsm');
|
||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
define("test/source-map/test-base64-vlq", ["require", "exports", "module"], function (require, exports, module) {
|
||||||
|
|
||||||
|
var base64VLQ = require('source-map/base64-vlq');
|
||||||
|
|
||||||
|
exports['test normal encoding and decoding'] = function (assert, util) {
|
||||||
|
var result = {};
|
||||||
|
for (var i = -255; i < 256; i++) {
|
||||||
|
var str = base64VLQ.encode(i);
|
||||||
|
base64VLQ.decode(str, 0, result);
|
||||||
|
assert.equal(result.value, i);
|
||||||
|
assert.equal(result.rest, str.length);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
});
|
||||||
|
function run_test() {
|
||||||
|
runSourceMapTests('test/source-map/test-base64-vlq', do_throw);
|
||||||
|
}
|
||||||
102
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_binary_search.js
generated
vendored
Normal file
102
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_binary_search.js
generated
vendored
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
/*
|
||||||
|
* WARNING!
|
||||||
|
*
|
||||||
|
* Do not edit this file directly, it is built from the sources at
|
||||||
|
* https://github.com/mozilla/source-map/
|
||||||
|
*/
|
||||||
|
|
||||||
|
Components.utils.import('resource://test/Utils.jsm');
|
||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
define("test/source-map/test-binary-search", ["require", "exports", "module"], function (require, exports, module) {
|
||||||
|
|
||||||
|
var binarySearch = require('source-map/binary-search');
|
||||||
|
|
||||||
|
function numberCompare(a, b) {
|
||||||
|
return a - b;
|
||||||
|
}
|
||||||
|
|
||||||
|
exports['test too high with lub bias'] = function (assert, util) {
|
||||||
|
var needle = 30;
|
||||||
|
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||||
|
|
||||||
|
assert.doesNotThrow(function () {
|
||||||
|
binarySearch.search(needle, haystack, numberCompare);
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(binarySearch.search(needle, haystack, numberCompare), -1);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test too low with lub bias'] = function (assert, util) {
|
||||||
|
var needle = 1;
|
||||||
|
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||||
|
|
||||||
|
assert.doesNotThrow(function () {
|
||||||
|
binarySearch.search(needle, haystack, numberCompare, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare)], 2);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test exact search with lub bias'] = function (assert, util) {
|
||||||
|
var needle = 4;
|
||||||
|
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||||
|
|
||||||
|
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare)], 4);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test fuzzy search with lub bias'] = function (assert, util) {
|
||||||
|
var needle = 19;
|
||||||
|
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||||
|
|
||||||
|
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare)], 20);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test too high with glb bias'] = function (assert, util) {
|
||||||
|
var needle = 30;
|
||||||
|
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||||
|
|
||||||
|
assert.doesNotThrow(function () {
|
||||||
|
binarySearch.search(needle, haystack, numberCompare);
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare,
|
||||||
|
binarySearch.GREATEST_LOWER_BOUND)], 20);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test too low with glb bias'] = function (assert, util) {
|
||||||
|
var needle = 1;
|
||||||
|
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||||
|
|
||||||
|
assert.doesNotThrow(function () {
|
||||||
|
binarySearch.search(needle, haystack, numberCompare,
|
||||||
|
binarySearch.GREATEST_LOWER_BOUND);
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(binarySearch.search(needle, haystack, numberCompare,
|
||||||
|
binarySearch.GREATEST_LOWER_BOUND), -1);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test exact search with glb bias'] = function (assert, util) {
|
||||||
|
var needle = 4;
|
||||||
|
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||||
|
|
||||||
|
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare,
|
||||||
|
binarySearch.GREATEST_LOWER_BOUND)], 4);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test fuzzy search with glb bias'] = function (assert, util) {
|
||||||
|
var needle = 19;
|
||||||
|
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||||
|
|
||||||
|
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare,
|
||||||
|
binarySearch.GREATEST_LOWER_BOUND)], 18);
|
||||||
|
};
|
||||||
|
});
|
||||||
|
function run_test() {
|
||||||
|
runSourceMapTests('test/source-map/test-binary-search', do_throw);
|
||||||
|
}
|
||||||
92
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_dog_fooding.js
generated
vendored
Normal file
92
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_dog_fooding.js
generated
vendored
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
/*
|
||||||
|
* WARNING!
|
||||||
|
*
|
||||||
|
* Do not edit this file directly, it is built from the sources at
|
||||||
|
* https://github.com/mozilla/source-map/
|
||||||
|
*/
|
||||||
|
|
||||||
|
Components.utils.import('resource://test/Utils.jsm');
|
||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
define("test/source-map/test-dog-fooding", ["require", "exports", "module"], function (require, exports, module) {
|
||||||
|
|
||||||
|
var SourceMapConsumer = require('source-map/source-map-consumer').SourceMapConsumer;
|
||||||
|
var SourceMapGenerator = require('source-map/source-map-generator').SourceMapGenerator;
|
||||||
|
|
||||||
|
exports['test eating our own dog food'] = function (assert, util) {
|
||||||
|
var smg = new SourceMapGenerator({
|
||||||
|
file: 'testing.js',
|
||||||
|
sourceRoot: '/wu/tang'
|
||||||
|
});
|
||||||
|
|
||||||
|
smg.addMapping({
|
||||||
|
source: 'gza.coffee',
|
||||||
|
original: { line: 1, column: 0 },
|
||||||
|
generated: { line: 2, column: 2 }
|
||||||
|
});
|
||||||
|
|
||||||
|
smg.addMapping({
|
||||||
|
source: 'gza.coffee',
|
||||||
|
original: { line: 2, column: 0 },
|
||||||
|
generated: { line: 3, column: 2 }
|
||||||
|
});
|
||||||
|
|
||||||
|
smg.addMapping({
|
||||||
|
source: 'gza.coffee',
|
||||||
|
original: { line: 3, column: 0 },
|
||||||
|
generated: { line: 4, column: 2 }
|
||||||
|
});
|
||||||
|
|
||||||
|
smg.addMapping({
|
||||||
|
source: 'gza.coffee',
|
||||||
|
original: { line: 4, column: 0 },
|
||||||
|
generated: { line: 5, column: 2 }
|
||||||
|
});
|
||||||
|
|
||||||
|
smg.addMapping({
|
||||||
|
source: 'gza.coffee',
|
||||||
|
original: { line: 5, column: 10 },
|
||||||
|
generated: { line: 6, column: 12 }
|
||||||
|
});
|
||||||
|
|
||||||
|
var smc = new SourceMapConsumer(smg.toString());
|
||||||
|
|
||||||
|
// Exact
|
||||||
|
util.assertMapping(2, 2, '/wu/tang/gza.coffee', 1, 0, null, smc, assert);
|
||||||
|
util.assertMapping(3, 2, '/wu/tang/gza.coffee', 2, 0, null, smc, assert);
|
||||||
|
util.assertMapping(4, 2, '/wu/tang/gza.coffee', 3, 0, null, smc, assert);
|
||||||
|
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 4, 0, null, smc, assert);
|
||||||
|
util.assertMapping(6, 12, '/wu/tang/gza.coffee', 5, 10, null, smc, assert);
|
||||||
|
|
||||||
|
// Fuzzy
|
||||||
|
|
||||||
|
// Generated to original
|
||||||
|
util.assertMapping(2, 0, '/wu/tang/gza.coffee', 1, 0, null, smc, assert, true);
|
||||||
|
util.assertMapping(2, 9, null, null, null, null, smc, assert, true);
|
||||||
|
util.assertMapping(3, 0, '/wu/tang/gza.coffee', 2, 0, null, smc, assert, true);
|
||||||
|
util.assertMapping(3, 9, null, null, null, null, smc, assert, true);
|
||||||
|
util.assertMapping(4, 0, '/wu/tang/gza.coffee', 3, 0, null, smc, assert, true);
|
||||||
|
util.assertMapping(4, 9, null, null, null, null, smc, assert, true);
|
||||||
|
util.assertMapping(5, 0, '/wu/tang/gza.coffee', 4, 0, null, smc, assert, true);
|
||||||
|
util.assertMapping(5, 9, null, null, null, null, smc, assert, true);
|
||||||
|
util.assertMapping(6, 0, '/wu/tang/gza.coffee', 5, 10, null, smc, assert, true);
|
||||||
|
util.assertMapping(6, 9, '/wu/tang/gza.coffee', 5, 10, null, smc, assert, true);
|
||||||
|
util.assertMapping(6, 13, null, null, null, null, smc, assert, true);
|
||||||
|
|
||||||
|
// Original to generated
|
||||||
|
util.assertMapping(3, 2, '/wu/tang/gza.coffee', 1, 1, null, smc, assert, null, true);
|
||||||
|
util.assertMapping(4, 2, '/wu/tang/gza.coffee', 2, 3, null, smc, assert, null, true);
|
||||||
|
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 3, 6, null, smc, assert, null, true);
|
||||||
|
util.assertMapping(6, 12, '/wu/tang/gza.coffee', 4, 9, null, smc, assert, null, true);
|
||||||
|
util.assertMapping(6, 12, '/wu/tang/gza.coffee', 5, 9, null, smc, assert, null, true);
|
||||||
|
util.assertMapping(null, null, '/wu/tang/gza.coffee', 6, 19, null, smc, assert, null, true);
|
||||||
|
};
|
||||||
|
|
||||||
|
});
|
||||||
|
function run_test() {
|
||||||
|
runSourceMapTests('test/source-map/test-dog-fooding', do_throw);
|
||||||
|
}
|
||||||
882
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_source_map_consumer.js
generated
vendored
Normal file
882
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_source_map_consumer.js
generated
vendored
Normal file
@@ -0,0 +1,882 @@
|
|||||||
|
/*
|
||||||
|
* WARNING!
|
||||||
|
*
|
||||||
|
* Do not edit this file directly, it is built from the sources at
|
||||||
|
* https://github.com/mozilla/source-map/
|
||||||
|
*/
|
||||||
|
|
||||||
|
Components.utils.import('resource://test/Utils.jsm');
|
||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
define("test/source-map/test-source-map-consumer", ["require", "exports", "module"], function (require, exports, module) {
|
||||||
|
|
||||||
|
var SourceMapConsumer = require('source-map/source-map-consumer').SourceMapConsumer;
|
||||||
|
var IndexedSourceMapConsumer = require('source-map/source-map-consumer').IndexedSourceMapConsumer;
|
||||||
|
var BasicSourceMapConsumer = require('source-map/source-map-consumer').BasicSourceMapConsumer;
|
||||||
|
var SourceMapGenerator = require('source-map/source-map-generator').SourceMapGenerator;
|
||||||
|
|
||||||
|
exports['test that we can instantiate with a string or an object'] = function (assert, util) {
|
||||||
|
assert.doesNotThrow(function () {
|
||||||
|
var map = new SourceMapConsumer(util.testMap);
|
||||||
|
});
|
||||||
|
assert.doesNotThrow(function () {
|
||||||
|
var map = new SourceMapConsumer(JSON.stringify(util.testMap));
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test that the object returned from new SourceMapConsumer inherits from SourceMapConsumer'] = function (assert, util) {
|
||||||
|
assert.ok(new SourceMapConsumer(util.testMap) instanceof SourceMapConsumer);
|
||||||
|
}
|
||||||
|
|
||||||
|
exports['test that a BasicSourceMapConsumer is returned for sourcemaps without sections'] = function(assert, util) {
|
||||||
|
assert.ok(new SourceMapConsumer(util.testMap) instanceof BasicSourceMapConsumer);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test that an IndexedSourceMapConsumer is returned for sourcemaps with sections'] = function(assert, util) {
|
||||||
|
assert.ok(new SourceMapConsumer(util.indexedTestMap) instanceof IndexedSourceMapConsumer);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test that the `sources` field has the original sources'] = function (assert, util) {
|
||||||
|
var map;
|
||||||
|
var sources;
|
||||||
|
|
||||||
|
map = new SourceMapConsumer(util.testMap);
|
||||||
|
sources = map.sources;
|
||||||
|
assert.equal(sources[0], '/the/root/one.js');
|
||||||
|
assert.equal(sources[1], '/the/root/two.js');
|
||||||
|
assert.equal(sources.length, 2);
|
||||||
|
|
||||||
|
map = new SourceMapConsumer(util.indexedTestMap);
|
||||||
|
sources = map.sources;
|
||||||
|
assert.equal(sources[0], '/the/root/one.js');
|
||||||
|
assert.equal(sources[1], '/the/root/two.js');
|
||||||
|
assert.equal(sources.length, 2);
|
||||||
|
|
||||||
|
map = new SourceMapConsumer(util.indexedTestMapDifferentSourceRoots);
|
||||||
|
sources = map.sources;
|
||||||
|
assert.equal(sources[0], '/the/root/one.js');
|
||||||
|
assert.equal(sources[1], '/different/root/two.js');
|
||||||
|
assert.equal(sources.length, 2);
|
||||||
|
|
||||||
|
map = new SourceMapConsumer(util.testMapNoSourceRoot);
|
||||||
|
sources = map.sources;
|
||||||
|
assert.equal(sources[0], 'one.js');
|
||||||
|
assert.equal(sources[1], 'two.js');
|
||||||
|
assert.equal(sources.length, 2);
|
||||||
|
|
||||||
|
map = new SourceMapConsumer(util.testMapEmptySourceRoot);
|
||||||
|
sources = map.sources;
|
||||||
|
assert.equal(sources[0], 'one.js');
|
||||||
|
assert.equal(sources[1], 'two.js');
|
||||||
|
assert.equal(sources.length, 2);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test that the source root is reflected in a mapping\'s source field'] = function (assert, util) {
|
||||||
|
var map;
|
||||||
|
var mapping;
|
||||||
|
|
||||||
|
map = new SourceMapConsumer(util.testMap);
|
||||||
|
|
||||||
|
mapping = map.originalPositionFor({
|
||||||
|
line: 2,
|
||||||
|
column: 1
|
||||||
|
});
|
||||||
|
assert.equal(mapping.source, '/the/root/two.js');
|
||||||
|
|
||||||
|
mapping = map.originalPositionFor({
|
||||||
|
line: 1,
|
||||||
|
column: 1
|
||||||
|
});
|
||||||
|
assert.equal(mapping.source, '/the/root/one.js');
|
||||||
|
|
||||||
|
|
||||||
|
map = new SourceMapConsumer(util.testMapNoSourceRoot);
|
||||||
|
|
||||||
|
mapping = map.originalPositionFor({
|
||||||
|
line: 2,
|
||||||
|
column: 1
|
||||||
|
});
|
||||||
|
assert.equal(mapping.source, 'two.js');
|
||||||
|
|
||||||
|
mapping = map.originalPositionFor({
|
||||||
|
line: 1,
|
||||||
|
column: 1
|
||||||
|
});
|
||||||
|
assert.equal(mapping.source, 'one.js');
|
||||||
|
|
||||||
|
|
||||||
|
map = new SourceMapConsumer(util.testMapEmptySourceRoot);
|
||||||
|
|
||||||
|
mapping = map.originalPositionFor({
|
||||||
|
line: 2,
|
||||||
|
column: 1
|
||||||
|
});
|
||||||
|
assert.equal(mapping.source, 'two.js');
|
||||||
|
|
||||||
|
mapping = map.originalPositionFor({
|
||||||
|
line: 1,
|
||||||
|
column: 1
|
||||||
|
});
|
||||||
|
assert.equal(mapping.source, 'one.js');
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test mapping tokens back exactly'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer(util.testMap);
|
||||||
|
|
||||||
|
util.assertMapping(1, 1, '/the/root/one.js', 1, 1, null, map, assert);
|
||||||
|
util.assertMapping(1, 5, '/the/root/one.js', 1, 5, null, map, assert);
|
||||||
|
util.assertMapping(1, 9, '/the/root/one.js', 1, 11, null, map, assert);
|
||||||
|
util.assertMapping(1, 18, '/the/root/one.js', 1, 21, 'bar', map, assert);
|
||||||
|
util.assertMapping(1, 21, '/the/root/one.js', 2, 3, null, map, assert);
|
||||||
|
util.assertMapping(1, 28, '/the/root/one.js', 2, 10, 'baz', map, assert);
|
||||||
|
util.assertMapping(1, 32, '/the/root/one.js', 2, 14, 'bar', map, assert);
|
||||||
|
|
||||||
|
util.assertMapping(2, 1, '/the/root/two.js', 1, 1, null, map, assert);
|
||||||
|
util.assertMapping(2, 5, '/the/root/two.js', 1, 5, null, map, assert);
|
||||||
|
util.assertMapping(2, 9, '/the/root/two.js', 1, 11, null, map, assert);
|
||||||
|
util.assertMapping(2, 18, '/the/root/two.js', 1, 21, 'n', map, assert);
|
||||||
|
util.assertMapping(2, 21, '/the/root/two.js', 2, 3, null, map, assert);
|
||||||
|
util.assertMapping(2, 28, '/the/root/two.js', 2, 10, 'n', map, assert);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test mapping tokens back exactly in indexed source map'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||||
|
|
||||||
|
util.assertMapping(1, 1, '/the/root/one.js', 1, 1, null, map, assert);
|
||||||
|
util.assertMapping(1, 5, '/the/root/one.js', 1, 5, null, map, assert);
|
||||||
|
util.assertMapping(1, 9, '/the/root/one.js', 1, 11, null, map, assert);
|
||||||
|
util.assertMapping(1, 18, '/the/root/one.js', 1, 21, 'bar', map, assert);
|
||||||
|
util.assertMapping(1, 21, '/the/root/one.js', 2, 3, null, map, assert);
|
||||||
|
util.assertMapping(1, 28, '/the/root/one.js', 2, 10, 'baz', map, assert);
|
||||||
|
util.assertMapping(1, 32, '/the/root/one.js', 2, 14, 'bar', map, assert);
|
||||||
|
|
||||||
|
util.assertMapping(2, 1, '/the/root/two.js', 1, 1, null, map, assert);
|
||||||
|
util.assertMapping(2, 5, '/the/root/two.js', 1, 5, null, map, assert);
|
||||||
|
util.assertMapping(2, 9, '/the/root/two.js', 1, 11, null, map, assert);
|
||||||
|
util.assertMapping(2, 18, '/the/root/two.js', 1, 21, 'n', map, assert);
|
||||||
|
util.assertMapping(2, 21, '/the/root/two.js', 2, 3, null, map, assert);
|
||||||
|
util.assertMapping(2, 28, '/the/root/two.js', 2, 10, 'n', map, assert);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
exports['test mapping tokens back exactly'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer(util.testMap);
|
||||||
|
|
||||||
|
util.assertMapping(1, 1, '/the/root/one.js', 1, 1, null, map, assert);
|
||||||
|
util.assertMapping(1, 5, '/the/root/one.js', 1, 5, null, map, assert);
|
||||||
|
util.assertMapping(1, 9, '/the/root/one.js', 1, 11, null, map, assert);
|
||||||
|
util.assertMapping(1, 18, '/the/root/one.js', 1, 21, 'bar', map, assert);
|
||||||
|
util.assertMapping(1, 21, '/the/root/one.js', 2, 3, null, map, assert);
|
||||||
|
util.assertMapping(1, 28, '/the/root/one.js', 2, 10, 'baz', map, assert);
|
||||||
|
util.assertMapping(1, 32, '/the/root/one.js', 2, 14, 'bar', map, assert);
|
||||||
|
|
||||||
|
util.assertMapping(2, 1, '/the/root/two.js', 1, 1, null, map, assert);
|
||||||
|
util.assertMapping(2, 5, '/the/root/two.js', 1, 5, null, map, assert);
|
||||||
|
util.assertMapping(2, 9, '/the/root/two.js', 1, 11, null, map, assert);
|
||||||
|
util.assertMapping(2, 18, '/the/root/two.js', 1, 21, 'n', map, assert);
|
||||||
|
util.assertMapping(2, 21, '/the/root/two.js', 2, 3, null, map, assert);
|
||||||
|
util.assertMapping(2, 28, '/the/root/two.js', 2, 10, 'n', map, assert);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test mapping tokens fuzzy'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer(util.testMap);
|
||||||
|
|
||||||
|
// Finding original positions
|
||||||
|
util.assertMapping(1, 16, '/the/root/one.js', 1, 21, 'bar', map, assert, true);
|
||||||
|
util.assertMapping(1, 26, '/the/root/one.js', 2, 10, 'baz', map, assert, true);
|
||||||
|
util.assertMapping(2, 6, '/the/root/two.js', 1, 11, null, map, assert, true);
|
||||||
|
|
||||||
|
// Finding generated positions
|
||||||
|
util.assertMapping(1, 18, '/the/root/one.js', 1, 20, 'bar', map, assert, null, true);
|
||||||
|
util.assertMapping(1, 28, '/the/root/one.js', 2, 7, 'baz', map, assert, null, true);
|
||||||
|
util.assertMapping(2, 9, '/the/root/two.js', 1, 6, null, map, assert, null, true);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test mapping tokens fuzzy in indexed source map'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||||
|
|
||||||
|
// Finding original positions
|
||||||
|
util.assertMapping(1, 16, '/the/root/one.js', 1, 21, 'bar', map, assert, true);
|
||||||
|
util.assertMapping(1, 28, '/the/root/one.js', 2, 10, 'baz', map, assert, true);
|
||||||
|
util.assertMapping(2, 6, '/the/root/two.js', 1, 11, null, map, assert, true);
|
||||||
|
|
||||||
|
// Finding generated positions
|
||||||
|
util.assertMapping(1, 18, '/the/root/one.js', 1, 20, 'bar', map, assert, null, true);
|
||||||
|
util.assertMapping(1, 28, '/the/root/one.js', 2, 7, 'baz', map, assert, null, true);
|
||||||
|
util.assertMapping(2, 9, '/the/root/two.js', 1, 6, null, map, assert, null, true);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test mappings and end of lines'] = function (assert, util) {
|
||||||
|
var smg = new SourceMapGenerator({
|
||||||
|
file: 'foo.js'
|
||||||
|
});
|
||||||
|
smg.addMapping({
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
generated: { line: 1, column: 1 },
|
||||||
|
source: 'bar.js'
|
||||||
|
});
|
||||||
|
smg.addMapping({
|
||||||
|
original: { line: 2, column: 2 },
|
||||||
|
generated: { line: 2, column: 2 },
|
||||||
|
source: 'bar.js'
|
||||||
|
});
|
||||||
|
|
||||||
|
var map = SourceMapConsumer.fromSourceMap(smg);
|
||||||
|
|
||||||
|
// When finding original positions, mappings end at the end of the line.
|
||||||
|
util.assertMapping(2, 3, null, null, null, null, map, assert, true)
|
||||||
|
|
||||||
|
// When finding generated positions, mappings do not end at the end of the line.
|
||||||
|
util.assertMapping(2, 2, 'bar.js', 1, 2, null, map, assert, null, true);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test creating source map consumers with )]}\' prefix'] = function (assert, util) {
|
||||||
|
assert.doesNotThrow(function () {
|
||||||
|
var map = new SourceMapConsumer(")]}'" + JSON.stringify(util.testMap));
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test eachMapping'] = function (assert, util) {
|
||||||
|
var map;
|
||||||
|
|
||||||
|
map = new SourceMapConsumer(util.testMap);
|
||||||
|
var previousLine = -Infinity;
|
||||||
|
var previousColumn = -Infinity;
|
||||||
|
map.eachMapping(function (mapping) {
|
||||||
|
assert.ok(mapping.generatedLine >= previousLine);
|
||||||
|
|
||||||
|
assert.ok(mapping.source === '/the/root/one.js' || mapping.source === '/the/root/two.js');
|
||||||
|
|
||||||
|
if (mapping.generatedLine === previousLine) {
|
||||||
|
assert.ok(mapping.generatedColumn >= previousColumn);
|
||||||
|
previousColumn = mapping.generatedColumn;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
previousLine = mapping.generatedLine;
|
||||||
|
previousColumn = -Infinity;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
map = new SourceMapConsumer(util.testMapNoSourceRoot);
|
||||||
|
map.eachMapping(function (mapping) {
|
||||||
|
assert.ok(mapping.source === 'one.js' || mapping.source === 'two.js');
|
||||||
|
});
|
||||||
|
|
||||||
|
map = new SourceMapConsumer(util.testMapEmptySourceRoot);
|
||||||
|
map.eachMapping(function (mapping) {
|
||||||
|
assert.ok(mapping.source === 'one.js' || mapping.source === 'two.js');
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test eachMapping for indexed source maps'] = function(assert, util) {
|
||||||
|
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||||
|
var previousLine = -Infinity;
|
||||||
|
var previousColumn = -Infinity;
|
||||||
|
map.eachMapping(function (mapping) {
|
||||||
|
assert.ok(mapping.generatedLine >= previousLine);
|
||||||
|
|
||||||
|
if (mapping.source) {
|
||||||
|
assert.equal(mapping.source.indexOf(util.testMap.sourceRoot), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (mapping.generatedLine === previousLine) {
|
||||||
|
assert.ok(mapping.generatedColumn >= previousColumn);
|
||||||
|
previousColumn = mapping.generatedColumn;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
previousLine = mapping.generatedLine;
|
||||||
|
previousColumn = -Infinity;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
exports['test iterating over mappings in a different order'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer(util.testMap);
|
||||||
|
var previousLine = -Infinity;
|
||||||
|
var previousColumn = -Infinity;
|
||||||
|
var previousSource = "";
|
||||||
|
map.eachMapping(function (mapping) {
|
||||||
|
assert.ok(mapping.source >= previousSource);
|
||||||
|
|
||||||
|
if (mapping.source === previousSource) {
|
||||||
|
assert.ok(mapping.originalLine >= previousLine);
|
||||||
|
|
||||||
|
if (mapping.originalLine === previousLine) {
|
||||||
|
assert.ok(mapping.originalColumn >= previousColumn);
|
||||||
|
previousColumn = mapping.originalColumn;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
previousLine = mapping.originalLine;
|
||||||
|
previousColumn = -Infinity;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
previousSource = mapping.source;
|
||||||
|
previousLine = -Infinity;
|
||||||
|
previousColumn = -Infinity;
|
||||||
|
}
|
||||||
|
}, null, SourceMapConsumer.ORIGINAL_ORDER);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test iterating over mappings in a different order in indexed source maps'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||||
|
var previousLine = -Infinity;
|
||||||
|
var previousColumn = -Infinity;
|
||||||
|
var previousSource = "";
|
||||||
|
map.eachMapping(function (mapping) {
|
||||||
|
assert.ok(mapping.source >= previousSource);
|
||||||
|
|
||||||
|
if (mapping.source === previousSource) {
|
||||||
|
assert.ok(mapping.originalLine >= previousLine);
|
||||||
|
|
||||||
|
if (mapping.originalLine === previousLine) {
|
||||||
|
assert.ok(mapping.originalColumn >= previousColumn);
|
||||||
|
previousColumn = mapping.originalColumn;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
previousLine = mapping.originalLine;
|
||||||
|
previousColumn = -Infinity;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
previousSource = mapping.source;
|
||||||
|
previousLine = -Infinity;
|
||||||
|
previousColumn = -Infinity;
|
||||||
|
}
|
||||||
|
}, null, SourceMapConsumer.ORIGINAL_ORDER);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test that we can set the context for `this` in eachMapping'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer(util.testMap);
|
||||||
|
var context = {};
|
||||||
|
map.eachMapping(function () {
|
||||||
|
assert.equal(this, context);
|
||||||
|
}, context);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test that we can set the context for `this` in eachMapping in indexed source maps'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||||
|
var context = {};
|
||||||
|
map.eachMapping(function () {
|
||||||
|
assert.equal(this, context);
|
||||||
|
}, context);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test that the `sourcesContent` field has the original sources'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer(util.testMapWithSourcesContent);
|
||||||
|
var sourcesContent = map.sourcesContent;
|
||||||
|
|
||||||
|
assert.equal(sourcesContent[0], ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||||
|
assert.equal(sourcesContent[1], ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||||
|
assert.equal(sourcesContent.length, 2);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test that we can get the original sources for the sources'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer(util.testMapWithSourcesContent);
|
||||||
|
var sources = map.sources;
|
||||||
|
|
||||||
|
assert.equal(map.sourceContentFor(sources[0]), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||||
|
assert.equal(map.sourceContentFor(sources[1]), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||||
|
assert.equal(map.sourceContentFor("one.js"), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||||
|
assert.equal(map.sourceContentFor("two.js"), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||||
|
assert.throws(function () {
|
||||||
|
map.sourceContentFor("");
|
||||||
|
}, Error);
|
||||||
|
assert.throws(function () {
|
||||||
|
map.sourceContentFor("/the/root/three.js");
|
||||||
|
}, Error);
|
||||||
|
assert.throws(function () {
|
||||||
|
map.sourceContentFor("three.js");
|
||||||
|
}, Error);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test that we can get the original source content with relative source paths'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer(util.testMapRelativeSources);
|
||||||
|
var sources = map.sources;
|
||||||
|
|
||||||
|
assert.equal(map.sourceContentFor(sources[0]), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||||
|
assert.equal(map.sourceContentFor(sources[1]), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||||
|
assert.equal(map.sourceContentFor("one.js"), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||||
|
assert.equal(map.sourceContentFor("two.js"), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||||
|
assert.throws(function () {
|
||||||
|
map.sourceContentFor("");
|
||||||
|
}, Error);
|
||||||
|
assert.throws(function () {
|
||||||
|
map.sourceContentFor("/the/root/three.js");
|
||||||
|
}, Error);
|
||||||
|
assert.throws(function () {
|
||||||
|
map.sourceContentFor("three.js");
|
||||||
|
}, Error);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test that we can get the original source content for the sources on an indexed source map'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||||
|
var sources = map.sources;
|
||||||
|
|
||||||
|
assert.equal(map.sourceContentFor(sources[0]), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||||
|
assert.equal(map.sourceContentFor(sources[1]), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||||
|
assert.equal(map.sourceContentFor("one.js"), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||||
|
assert.equal(map.sourceContentFor("two.js"), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||||
|
assert.throws(function () {
|
||||||
|
map.sourceContentFor("");
|
||||||
|
}, Error);
|
||||||
|
assert.throws(function () {
|
||||||
|
map.sourceContentFor("/the/root/three.js");
|
||||||
|
}, Error);
|
||||||
|
assert.throws(function () {
|
||||||
|
map.sourceContentFor("three.js");
|
||||||
|
}, Error);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
exports['test sourceRoot + generatedPositionFor'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
sourceRoot: 'foo/bar',
|
||||||
|
file: 'baz.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
generated: { line: 2, column: 2 },
|
||||||
|
source: 'bang.coffee'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 5, column: 5 },
|
||||||
|
generated: { line: 6, column: 6 },
|
||||||
|
source: 'bang.coffee'
|
||||||
|
});
|
||||||
|
map = new SourceMapConsumer(map.toString());
|
||||||
|
|
||||||
|
// Should handle without sourceRoot.
|
||||||
|
var pos = map.generatedPositionFor({
|
||||||
|
line: 1,
|
||||||
|
column: 1,
|
||||||
|
source: 'bang.coffee'
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(pos.line, 2);
|
||||||
|
assert.equal(pos.column, 2);
|
||||||
|
|
||||||
|
// Should handle with sourceRoot.
|
||||||
|
var pos = map.generatedPositionFor({
|
||||||
|
line: 1,
|
||||||
|
column: 1,
|
||||||
|
source: 'foo/bar/bang.coffee'
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(pos.line, 2);
|
||||||
|
assert.equal(pos.column, 2);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test allGeneratedPositionsFor'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'generated.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
generated: { line: 2, column: 2 },
|
||||||
|
source: 'foo.coffee'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
generated: { line: 2, column: 2 },
|
||||||
|
source: 'bar.coffee'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 2, column: 1 },
|
||||||
|
generated: { line: 3, column: 2 },
|
||||||
|
source: 'bar.coffee'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 2, column: 2 },
|
||||||
|
generated: { line: 3, column: 3 },
|
||||||
|
source: 'bar.coffee'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 3, column: 1 },
|
||||||
|
generated: { line: 4, column: 2 },
|
||||||
|
source: 'bar.coffee'
|
||||||
|
});
|
||||||
|
map = new SourceMapConsumer(map.toString());
|
||||||
|
|
||||||
|
var mappings = map.allGeneratedPositionsFor({
|
||||||
|
line: 2,
|
||||||
|
source: 'bar.coffee'
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(mappings.length, 2);
|
||||||
|
assert.equal(mappings[0].line, 3);
|
||||||
|
assert.equal(mappings[0].column, 2);
|
||||||
|
assert.equal(mappings[1].line, 3);
|
||||||
|
assert.equal(mappings[1].column, 3);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test allGeneratedPositionsFor for line with no mappings'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'generated.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
generated: { line: 2, column: 2 },
|
||||||
|
source: 'foo.coffee'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
generated: { line: 2, column: 2 },
|
||||||
|
source: 'bar.coffee'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 3, column: 1 },
|
||||||
|
generated: { line: 4, column: 2 },
|
||||||
|
source: 'bar.coffee'
|
||||||
|
});
|
||||||
|
map = new SourceMapConsumer(map.toString());
|
||||||
|
|
||||||
|
var mappings = map.allGeneratedPositionsFor({
|
||||||
|
line: 2,
|
||||||
|
source: 'bar.coffee'
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(mappings.length, 0);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test allGeneratedPositionsFor source map with no mappings'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'generated.js'
|
||||||
|
});
|
||||||
|
map = new SourceMapConsumer(map.toString());
|
||||||
|
|
||||||
|
var mappings = map.allGeneratedPositionsFor({
|
||||||
|
line: 2,
|
||||||
|
source: 'bar.coffee'
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(mappings.length, 0);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test computeColumnSpans'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'generated.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
generated: { line: 1, column: 1 },
|
||||||
|
source: 'foo.coffee'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 2, column: 1 },
|
||||||
|
generated: { line: 2, column: 1 },
|
||||||
|
source: 'foo.coffee'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 2, column: 2 },
|
||||||
|
generated: { line: 2, column: 10 },
|
||||||
|
source: 'foo.coffee'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 2, column: 3 },
|
||||||
|
generated: { line: 2, column: 20 },
|
||||||
|
source: 'foo.coffee'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 3, column: 1 },
|
||||||
|
generated: { line: 3, column: 1 },
|
||||||
|
source: 'foo.coffee'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 3, column: 2 },
|
||||||
|
generated: { line: 3, column: 2 },
|
||||||
|
source: 'foo.coffee'
|
||||||
|
});
|
||||||
|
map = new SourceMapConsumer(map.toString());
|
||||||
|
|
||||||
|
map.computeColumnSpans();
|
||||||
|
|
||||||
|
var mappings = map.allGeneratedPositionsFor({
|
||||||
|
line: 1,
|
||||||
|
source: 'foo.coffee'
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(mappings.length, 1);
|
||||||
|
// assert.equal(mappings[0].lastColumn, Infinity);
|
||||||
|
|
||||||
|
var mappings = map.allGeneratedPositionsFor({
|
||||||
|
line: 2,
|
||||||
|
source: 'foo.coffee'
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(mappings.length, 3);
|
||||||
|
assert.equal(mappings[0].lastColumn, 9);
|
||||||
|
assert.equal(mappings[1].lastColumn, 19);
|
||||||
|
assert.equal(mappings[2].lastColumn, Infinity);
|
||||||
|
|
||||||
|
var mappings = map.allGeneratedPositionsFor({
|
||||||
|
line: 3,
|
||||||
|
source: 'foo.coffee'
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(mappings.length, 2);
|
||||||
|
assert.equal(mappings[0].lastColumn, 1);
|
||||||
|
assert.equal(mappings[1].lastColumn, Infinity);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test sourceRoot + originalPositionFor'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
sourceRoot: 'foo/bar',
|
||||||
|
file: 'baz.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
generated: { line: 2, column: 2 },
|
||||||
|
source: 'bang.coffee'
|
||||||
|
});
|
||||||
|
map = new SourceMapConsumer(map.toString());
|
||||||
|
|
||||||
|
var pos = map.originalPositionFor({
|
||||||
|
line: 2,
|
||||||
|
column: 2,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should always have the prepended source root
|
||||||
|
assert.equal(pos.source, 'foo/bar/bang.coffee');
|
||||||
|
assert.equal(pos.line, 1);
|
||||||
|
assert.equal(pos.column, 1);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test github issue #56'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
sourceRoot: 'http://',
|
||||||
|
file: 'www.example.com/foo.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
generated: { line: 2, column: 2 },
|
||||||
|
source: 'www.example.com/original.js'
|
||||||
|
});
|
||||||
|
map = new SourceMapConsumer(map.toString());
|
||||||
|
|
||||||
|
var sources = map.sources;
|
||||||
|
assert.equal(sources.length, 1);
|
||||||
|
assert.equal(sources[0], 'http://www.example.com/original.js');
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test github issue #43'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
sourceRoot: 'http://example.com',
|
||||||
|
file: 'foo.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
generated: { line: 2, column: 2 },
|
||||||
|
source: 'http://cdn.example.com/original.js'
|
||||||
|
});
|
||||||
|
map = new SourceMapConsumer(map.toString());
|
||||||
|
|
||||||
|
var sources = map.sources;
|
||||||
|
assert.equal(sources.length, 1,
|
||||||
|
'Should only be one source.');
|
||||||
|
assert.equal(sources[0], 'http://cdn.example.com/original.js',
|
||||||
|
'Should not be joined with the sourceRoot.');
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test absolute path, but same host sources'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
sourceRoot: 'http://example.com/foo/bar',
|
||||||
|
file: 'foo.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
generated: { line: 2, column: 2 },
|
||||||
|
source: '/original.js'
|
||||||
|
});
|
||||||
|
map = new SourceMapConsumer(map.toString());
|
||||||
|
|
||||||
|
var sources = map.sources;
|
||||||
|
assert.equal(sources.length, 1,
|
||||||
|
'Should only be one source.');
|
||||||
|
assert.equal(sources[0], 'http://example.com/original.js',
|
||||||
|
'Source should be relative the host of the source root.');
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test indexed source map errors when sections are out of order by line'] = function(assert, util) {
|
||||||
|
// Make a deep copy of the indexedTestMap
|
||||||
|
var misorderedIndexedTestMap = JSON.parse(JSON.stringify(util.indexedTestMap));
|
||||||
|
|
||||||
|
misorderedIndexedTestMap.sections[0].offset = {
|
||||||
|
line: 2,
|
||||||
|
column: 0
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.throws(function() {
|
||||||
|
new SourceMapConsumer(misorderedIndexedTestMap);
|
||||||
|
}, Error);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test github issue #64'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer({
|
||||||
|
"version": 3,
|
||||||
|
"file": "foo.js",
|
||||||
|
"sourceRoot": "http://example.com/",
|
||||||
|
"sources": ["/a"],
|
||||||
|
"names": [],
|
||||||
|
"mappings": "AACA",
|
||||||
|
"sourcesContent": ["foo"]
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(map.sourceContentFor("a"), "foo");
|
||||||
|
assert.equal(map.sourceContentFor("/a"), "foo");
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test bug 885597'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer({
|
||||||
|
"version": 3,
|
||||||
|
"file": "foo.js",
|
||||||
|
"sourceRoot": "file:///Users/AlGore/Invented/The/Internet/",
|
||||||
|
"sources": ["/a"],
|
||||||
|
"names": [],
|
||||||
|
"mappings": "AACA",
|
||||||
|
"sourcesContent": ["foo"]
|
||||||
|
});
|
||||||
|
|
||||||
|
var s = map.sources[0];
|
||||||
|
assert.equal(map.sourceContentFor(s), "foo");
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test github issue #72, duplicate sources'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer({
|
||||||
|
"version": 3,
|
||||||
|
"file": "foo.js",
|
||||||
|
"sources": ["source1.js", "source1.js", "source3.js"],
|
||||||
|
"names": [],
|
||||||
|
"mappings": ";EAAC;;IAEE;;MEEE",
|
||||||
|
"sourceRoot": "http://example.com"
|
||||||
|
});
|
||||||
|
|
||||||
|
var pos = map.originalPositionFor({
|
||||||
|
line: 2,
|
||||||
|
column: 2
|
||||||
|
});
|
||||||
|
assert.equal(pos.source, 'http://example.com/source1.js');
|
||||||
|
assert.equal(pos.line, 1);
|
||||||
|
assert.equal(pos.column, 1);
|
||||||
|
|
||||||
|
var pos = map.originalPositionFor({
|
||||||
|
line: 4,
|
||||||
|
column: 4
|
||||||
|
});
|
||||||
|
assert.equal(pos.source, 'http://example.com/source1.js');
|
||||||
|
assert.equal(pos.line, 3);
|
||||||
|
assert.equal(pos.column, 3);
|
||||||
|
|
||||||
|
var pos = map.originalPositionFor({
|
||||||
|
line: 6,
|
||||||
|
column: 6
|
||||||
|
});
|
||||||
|
assert.equal(pos.source, 'http://example.com/source3.js');
|
||||||
|
assert.equal(pos.line, 5);
|
||||||
|
assert.equal(pos.column, 5);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test github issue #72, duplicate names'] = function (assert, util) {
|
||||||
|
var map = new SourceMapConsumer({
|
||||||
|
"version": 3,
|
||||||
|
"file": "foo.js",
|
||||||
|
"sources": ["source.js"],
|
||||||
|
"names": ["name1", "name1", "name3"],
|
||||||
|
"mappings": ";EAACA;;IAEEA;;MAEEE",
|
||||||
|
"sourceRoot": "http://example.com"
|
||||||
|
});
|
||||||
|
|
||||||
|
var pos = map.originalPositionFor({
|
||||||
|
line: 2,
|
||||||
|
column: 2
|
||||||
|
});
|
||||||
|
assert.equal(pos.name, 'name1');
|
||||||
|
assert.equal(pos.line, 1);
|
||||||
|
assert.equal(pos.column, 1);
|
||||||
|
|
||||||
|
var pos = map.originalPositionFor({
|
||||||
|
line: 4,
|
||||||
|
column: 4
|
||||||
|
});
|
||||||
|
assert.equal(pos.name, 'name1');
|
||||||
|
assert.equal(pos.line, 3);
|
||||||
|
assert.equal(pos.column, 3);
|
||||||
|
|
||||||
|
var pos = map.originalPositionFor({
|
||||||
|
line: 6,
|
||||||
|
column: 6
|
||||||
|
});
|
||||||
|
assert.equal(pos.name, 'name3');
|
||||||
|
assert.equal(pos.line, 5);
|
||||||
|
assert.equal(pos.column, 5);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test SourceMapConsumer.fromSourceMap'] = function (assert, util) {
|
||||||
|
var smg = new SourceMapGenerator({
|
||||||
|
sourceRoot: 'http://example.com/',
|
||||||
|
file: 'foo.js'
|
||||||
|
});
|
||||||
|
smg.addMapping({
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
generated: { line: 2, column: 2 },
|
||||||
|
source: 'bar.js'
|
||||||
|
});
|
||||||
|
smg.addMapping({
|
||||||
|
original: { line: 2, column: 2 },
|
||||||
|
generated: { line: 4, column: 4 },
|
||||||
|
source: 'baz.js',
|
||||||
|
name: 'dirtMcGirt'
|
||||||
|
});
|
||||||
|
smg.setSourceContent('baz.js', 'baz.js content');
|
||||||
|
|
||||||
|
var smc = SourceMapConsumer.fromSourceMap(smg);
|
||||||
|
assert.equal(smc.file, 'foo.js');
|
||||||
|
assert.equal(smc.sourceRoot, 'http://example.com/');
|
||||||
|
assert.equal(smc.sources.length, 2);
|
||||||
|
assert.equal(smc.sources[0], 'http://example.com/bar.js');
|
||||||
|
assert.equal(smc.sources[1], 'http://example.com/baz.js');
|
||||||
|
assert.equal(smc.sourceContentFor('baz.js'), 'baz.js content');
|
||||||
|
|
||||||
|
var pos = smc.originalPositionFor({
|
||||||
|
line: 2,
|
||||||
|
column: 2
|
||||||
|
});
|
||||||
|
assert.equal(pos.line, 1);
|
||||||
|
assert.equal(pos.column, 1);
|
||||||
|
assert.equal(pos.source, 'http://example.com/bar.js');
|
||||||
|
assert.equal(pos.name, null);
|
||||||
|
|
||||||
|
pos = smc.generatedPositionFor({
|
||||||
|
line: 1,
|
||||||
|
column: 1,
|
||||||
|
source: 'http://example.com/bar.js'
|
||||||
|
});
|
||||||
|
assert.equal(pos.line, 2);
|
||||||
|
assert.equal(pos.column, 2);
|
||||||
|
|
||||||
|
pos = smc.originalPositionFor({
|
||||||
|
line: 4,
|
||||||
|
column: 4
|
||||||
|
});
|
||||||
|
assert.equal(pos.line, 2);
|
||||||
|
assert.equal(pos.column, 2);
|
||||||
|
assert.equal(pos.source, 'http://example.com/baz.js');
|
||||||
|
assert.equal(pos.name, 'dirtMcGirt');
|
||||||
|
|
||||||
|
pos = smc.generatedPositionFor({
|
||||||
|
line: 2,
|
||||||
|
column: 2,
|
||||||
|
source: 'http://example.com/baz.js'
|
||||||
|
});
|
||||||
|
assert.equal(pos.line, 4);
|
||||||
|
assert.equal(pos.column, 4);
|
||||||
|
};
|
||||||
|
});
|
||||||
|
function run_test() {
|
||||||
|
runSourceMapTests('test/source-map/test-source-map-consumer', do_throw);
|
||||||
|
}
|
||||||
687
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_source_map_generator.js
generated
vendored
Normal file
687
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_source_map_generator.js
generated
vendored
Normal file
@@ -0,0 +1,687 @@
|
|||||||
|
/*
|
||||||
|
* WARNING!
|
||||||
|
*
|
||||||
|
* Do not edit this file directly, it is built from the sources at
|
||||||
|
* https://github.com/mozilla/source-map/
|
||||||
|
*/
|
||||||
|
|
||||||
|
Components.utils.import('resource://test/Utils.jsm');
|
||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
define("test/source-map/test-source-map-generator", ["require", "exports", "module"], function (require, exports, module) {
|
||||||
|
|
||||||
|
var SourceMapGenerator = require('source-map/source-map-generator').SourceMapGenerator;
|
||||||
|
var SourceMapConsumer = require('source-map/source-map-consumer').SourceMapConsumer;
|
||||||
|
var SourceNode = require('source-map/source-node').SourceNode;
|
||||||
|
var util = require('source-map/util');
|
||||||
|
|
||||||
|
exports['test some simple stuff'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'foo.js',
|
||||||
|
sourceRoot: '.'
|
||||||
|
});
|
||||||
|
assert.ok(true);
|
||||||
|
|
||||||
|
var map = new SourceMapGenerator().toJSON();
|
||||||
|
assert.ok(!('file' in map));
|
||||||
|
assert.ok(!('sourceRoot' in map));
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test JSON serialization'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'foo.js',
|
||||||
|
sourceRoot: '.'
|
||||||
|
});
|
||||||
|
assert.equal(map.toString(), JSON.stringify(map));
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test adding mappings (case 1)'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'generated-foo.js',
|
||||||
|
sourceRoot: '.'
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.doesNotThrow(function () {
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 1 }
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test adding mappings (case 2)'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'generated-foo.js',
|
||||||
|
sourceRoot: '.'
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.doesNotThrow(function () {
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 1 },
|
||||||
|
source: 'bar.js',
|
||||||
|
original: { line: 1, column: 1 }
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test adding mappings (case 3)'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'generated-foo.js',
|
||||||
|
sourceRoot: '.'
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.doesNotThrow(function () {
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 1 },
|
||||||
|
source: 'bar.js',
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
name: 'someToken'
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test adding mappings (invalid)'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'generated-foo.js',
|
||||||
|
sourceRoot: '.'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Not enough info.
|
||||||
|
assert.throws(function () {
|
||||||
|
map.addMapping({});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Original file position, but no source.
|
||||||
|
assert.throws(function () {
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 1 },
|
||||||
|
original: { line: 1, column: 1 }
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test adding mappings with skipValidation'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'generated-foo.js',
|
||||||
|
sourceRoot: '.',
|
||||||
|
skipValidation: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Not enough info, caught by `util.getArgs`
|
||||||
|
assert.throws(function () {
|
||||||
|
map.addMapping({});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Original file position, but no source. Not checked.
|
||||||
|
assert.doesNotThrow(function () {
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 1 },
|
||||||
|
original: { line: 1, column: 1 }
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test that the correct mappings are being generated'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'min.js',
|
||||||
|
sourceRoot: '/the/root'
|
||||||
|
});
|
||||||
|
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 1 },
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
source: 'one.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 5 },
|
||||||
|
original: { line: 1, column: 5 },
|
||||||
|
source: 'one.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 9 },
|
||||||
|
original: { line: 1, column: 11 },
|
||||||
|
source: 'one.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 18 },
|
||||||
|
original: { line: 1, column: 21 },
|
||||||
|
source: 'one.js',
|
||||||
|
name: 'bar'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 21 },
|
||||||
|
original: { line: 2, column: 3 },
|
||||||
|
source: 'one.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 28 },
|
||||||
|
original: { line: 2, column: 10 },
|
||||||
|
source: 'one.js',
|
||||||
|
name: 'baz'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 32 },
|
||||||
|
original: { line: 2, column: 14 },
|
||||||
|
source: 'one.js',
|
||||||
|
name: 'bar'
|
||||||
|
});
|
||||||
|
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 2, column: 1 },
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
source: 'two.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 2, column: 5 },
|
||||||
|
original: { line: 1, column: 5 },
|
||||||
|
source: 'two.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 2, column: 9 },
|
||||||
|
original: { line: 1, column: 11 },
|
||||||
|
source: 'two.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 2, column: 18 },
|
||||||
|
original: { line: 1, column: 21 },
|
||||||
|
source: 'two.js',
|
||||||
|
name: 'n'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 2, column: 21 },
|
||||||
|
original: { line: 2, column: 3 },
|
||||||
|
source: 'two.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 2, column: 28 },
|
||||||
|
original: { line: 2, column: 10 },
|
||||||
|
source: 'two.js',
|
||||||
|
name: 'n'
|
||||||
|
});
|
||||||
|
|
||||||
|
map = JSON.parse(map.toString());
|
||||||
|
|
||||||
|
util.assertEqualMaps(assert, map, util.testMap);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test that adding a mapping with an empty string name does not break generation'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'generated-foo.js',
|
||||||
|
sourceRoot: '.'
|
||||||
|
});
|
||||||
|
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 1 },
|
||||||
|
source: 'bar.js',
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
name: ''
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.doesNotThrow(function () {
|
||||||
|
JSON.parse(map.toString());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test that source content can be set'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'min.js',
|
||||||
|
sourceRoot: '/the/root'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 1 },
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
source: 'one.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 2, column: 1 },
|
||||||
|
original: { line: 1, column: 1 },
|
||||||
|
source: 'two.js'
|
||||||
|
});
|
||||||
|
map.setSourceContent('one.js', 'one file content');
|
||||||
|
|
||||||
|
map = JSON.parse(map.toString());
|
||||||
|
assert.equal(map.sources[0], 'one.js');
|
||||||
|
assert.equal(map.sources[1], 'two.js');
|
||||||
|
assert.equal(map.sourcesContent[0], 'one file content');
|
||||||
|
assert.equal(map.sourcesContent[1], null);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test .fromSourceMap'] = function (assert, util) {
|
||||||
|
var map = SourceMapGenerator.fromSourceMap(new SourceMapConsumer(util.testMap));
|
||||||
|
util.assertEqualMaps(assert, map.toJSON(), util.testMap);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test .fromSourceMap with sourcesContent'] = function (assert, util) {
|
||||||
|
var map = SourceMapGenerator.fromSourceMap(
|
||||||
|
new SourceMapConsumer(util.testMapWithSourcesContent));
|
||||||
|
util.assertEqualMaps(assert, map.toJSON(), util.testMapWithSourcesContent);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test applySourceMap'] = function (assert, util) {
|
||||||
|
var node = new SourceNode(null, null, null, [
|
||||||
|
new SourceNode(2, 0, 'fileX', 'lineX2\n'),
|
||||||
|
'genA1\n',
|
||||||
|
new SourceNode(2, 0, 'fileY', 'lineY2\n'),
|
||||||
|
'genA2\n',
|
||||||
|
new SourceNode(1, 0, 'fileX', 'lineX1\n'),
|
||||||
|
'genA3\n',
|
||||||
|
new SourceNode(1, 0, 'fileY', 'lineY1\n')
|
||||||
|
]);
|
||||||
|
var mapStep1 = node.toStringWithSourceMap({
|
||||||
|
file: 'fileA'
|
||||||
|
}).map;
|
||||||
|
mapStep1.setSourceContent('fileX', 'lineX1\nlineX2\n');
|
||||||
|
mapStep1 = mapStep1.toJSON();
|
||||||
|
|
||||||
|
node = new SourceNode(null, null, null, [
|
||||||
|
'gen1\n',
|
||||||
|
new SourceNode(1, 0, 'fileA', 'lineA1\n'),
|
||||||
|
new SourceNode(2, 0, 'fileA', 'lineA2\n'),
|
||||||
|
new SourceNode(3, 0, 'fileA', 'lineA3\n'),
|
||||||
|
new SourceNode(4, 0, 'fileA', 'lineA4\n'),
|
||||||
|
new SourceNode(1, 0, 'fileB', 'lineB1\n'),
|
||||||
|
new SourceNode(2, 0, 'fileB', 'lineB2\n'),
|
||||||
|
'gen2\n'
|
||||||
|
]);
|
||||||
|
var mapStep2 = node.toStringWithSourceMap({
|
||||||
|
file: 'fileGen'
|
||||||
|
}).map;
|
||||||
|
mapStep2.setSourceContent('fileB', 'lineB1\nlineB2\n');
|
||||||
|
mapStep2 = mapStep2.toJSON();
|
||||||
|
|
||||||
|
node = new SourceNode(null, null, null, [
|
||||||
|
'gen1\n',
|
||||||
|
new SourceNode(2, 0, 'fileX', 'lineA1\n'),
|
||||||
|
new SourceNode(2, 0, 'fileA', 'lineA2\n'),
|
||||||
|
new SourceNode(2, 0, 'fileY', 'lineA3\n'),
|
||||||
|
new SourceNode(4, 0, 'fileA', 'lineA4\n'),
|
||||||
|
new SourceNode(1, 0, 'fileB', 'lineB1\n'),
|
||||||
|
new SourceNode(2, 0, 'fileB', 'lineB2\n'),
|
||||||
|
'gen2\n'
|
||||||
|
]);
|
||||||
|
var expectedMap = node.toStringWithSourceMap({
|
||||||
|
file: 'fileGen'
|
||||||
|
}).map;
|
||||||
|
expectedMap.setSourceContent('fileX', 'lineX1\nlineX2\n');
|
||||||
|
expectedMap.setSourceContent('fileB', 'lineB1\nlineB2\n');
|
||||||
|
expectedMap = expectedMap.toJSON();
|
||||||
|
|
||||||
|
// apply source map "mapStep1" to "mapStep2"
|
||||||
|
var generator = SourceMapGenerator.fromSourceMap(new SourceMapConsumer(mapStep2));
|
||||||
|
generator.applySourceMap(new SourceMapConsumer(mapStep1));
|
||||||
|
var actualMap = generator.toJSON();
|
||||||
|
|
||||||
|
util.assertEqualMaps(assert, actualMap, expectedMap);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test applySourceMap throws when file is missing'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'test.js'
|
||||||
|
});
|
||||||
|
var map2 = new SourceMapGenerator();
|
||||||
|
assert.throws(function() {
|
||||||
|
map.applySourceMap(new SourceMapConsumer(map2.toJSON()));
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test the two additional parameters of applySourceMap'] = function (assert, util) {
|
||||||
|
// Assume the following directory structure:
|
||||||
|
//
|
||||||
|
// http://foo.org/
|
||||||
|
// bar.coffee
|
||||||
|
// app/
|
||||||
|
// coffee/
|
||||||
|
// foo.coffee
|
||||||
|
// temp/
|
||||||
|
// bundle.js
|
||||||
|
// temp_maps/
|
||||||
|
// bundle.js.map
|
||||||
|
// public/
|
||||||
|
// bundle.min.js
|
||||||
|
// bundle.min.js.map
|
||||||
|
//
|
||||||
|
// http://www.example.com/
|
||||||
|
// baz.coffee
|
||||||
|
|
||||||
|
var bundleMap = new SourceMapGenerator({
|
||||||
|
file: 'bundle.js'
|
||||||
|
});
|
||||||
|
bundleMap.addMapping({
|
||||||
|
generated: { line: 3, column: 3 },
|
||||||
|
original: { line: 2, column: 2 },
|
||||||
|
source: '../../coffee/foo.coffee'
|
||||||
|
});
|
||||||
|
bundleMap.setSourceContent('../../coffee/foo.coffee', 'foo coffee');
|
||||||
|
bundleMap.addMapping({
|
||||||
|
generated: { line: 13, column: 13 },
|
||||||
|
original: { line: 12, column: 12 },
|
||||||
|
source: '/bar.coffee'
|
||||||
|
});
|
||||||
|
bundleMap.setSourceContent('/bar.coffee', 'bar coffee');
|
||||||
|
bundleMap.addMapping({
|
||||||
|
generated: { line: 23, column: 23 },
|
||||||
|
original: { line: 22, column: 22 },
|
||||||
|
source: 'http://www.example.com/baz.coffee'
|
||||||
|
});
|
||||||
|
bundleMap.setSourceContent(
|
||||||
|
'http://www.example.com/baz.coffee',
|
||||||
|
'baz coffee'
|
||||||
|
);
|
||||||
|
bundleMap = new SourceMapConsumer(bundleMap.toJSON());
|
||||||
|
|
||||||
|
var minifiedMap = new SourceMapGenerator({
|
||||||
|
file: 'bundle.min.js',
|
||||||
|
sourceRoot: '..'
|
||||||
|
});
|
||||||
|
minifiedMap.addMapping({
|
||||||
|
generated: { line: 1, column: 1 },
|
||||||
|
original: { line: 3, column: 3 },
|
||||||
|
source: 'temp/bundle.js'
|
||||||
|
});
|
||||||
|
minifiedMap.addMapping({
|
||||||
|
generated: { line: 11, column: 11 },
|
||||||
|
original: { line: 13, column: 13 },
|
||||||
|
source: 'temp/bundle.js'
|
||||||
|
});
|
||||||
|
minifiedMap.addMapping({
|
||||||
|
generated: { line: 21, column: 21 },
|
||||||
|
original: { line: 23, column: 23 },
|
||||||
|
source: 'temp/bundle.js'
|
||||||
|
});
|
||||||
|
minifiedMap = new SourceMapConsumer(minifiedMap.toJSON());
|
||||||
|
|
||||||
|
var expectedMap = function (sources) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'bundle.min.js',
|
||||||
|
sourceRoot: '..'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 1 },
|
||||||
|
original: { line: 2, column: 2 },
|
||||||
|
source: sources[0]
|
||||||
|
});
|
||||||
|
map.setSourceContent(sources[0], 'foo coffee');
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 11, column: 11 },
|
||||||
|
original: { line: 12, column: 12 },
|
||||||
|
source: sources[1]
|
||||||
|
});
|
||||||
|
map.setSourceContent(sources[1], 'bar coffee');
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 21, column: 21 },
|
||||||
|
original: { line: 22, column: 22 },
|
||||||
|
source: sources[2]
|
||||||
|
});
|
||||||
|
map.setSourceContent(sources[2], 'baz coffee');
|
||||||
|
return map.toJSON();
|
||||||
|
}
|
||||||
|
|
||||||
|
var actualMap = function (aSourceMapPath) {
|
||||||
|
var map = SourceMapGenerator.fromSourceMap(minifiedMap);
|
||||||
|
// Note that relying on `bundleMap.file` (which is simply 'bundle.js')
|
||||||
|
// instead of supplying the second parameter wouldn't work here.
|
||||||
|
map.applySourceMap(bundleMap, '../temp/bundle.js', aSourceMapPath);
|
||||||
|
return map.toJSON();
|
||||||
|
}
|
||||||
|
|
||||||
|
util.assertEqualMaps(assert, actualMap('../temp/temp_maps'), expectedMap([
|
||||||
|
'coffee/foo.coffee',
|
||||||
|
'/bar.coffee',
|
||||||
|
'http://www.example.com/baz.coffee'
|
||||||
|
]));
|
||||||
|
|
||||||
|
util.assertEqualMaps(assert, actualMap('/app/temp/temp_maps'), expectedMap([
|
||||||
|
'/app/coffee/foo.coffee',
|
||||||
|
'/bar.coffee',
|
||||||
|
'http://www.example.com/baz.coffee'
|
||||||
|
]));
|
||||||
|
|
||||||
|
util.assertEqualMaps(assert, actualMap('http://foo.org/app/temp/temp_maps'), expectedMap([
|
||||||
|
'http://foo.org/app/coffee/foo.coffee',
|
||||||
|
'http://foo.org/bar.coffee',
|
||||||
|
'http://www.example.com/baz.coffee'
|
||||||
|
]));
|
||||||
|
|
||||||
|
// If the third parameter is omitted or set to the current working
|
||||||
|
// directory we get incorrect source paths:
|
||||||
|
|
||||||
|
util.assertEqualMaps(assert, actualMap(), expectedMap([
|
||||||
|
'../coffee/foo.coffee',
|
||||||
|
'/bar.coffee',
|
||||||
|
'http://www.example.com/baz.coffee'
|
||||||
|
]));
|
||||||
|
|
||||||
|
util.assertEqualMaps(assert, actualMap(''), expectedMap([
|
||||||
|
'../coffee/foo.coffee',
|
||||||
|
'/bar.coffee',
|
||||||
|
'http://www.example.com/baz.coffee'
|
||||||
|
]));
|
||||||
|
|
||||||
|
util.assertEqualMaps(assert, actualMap('.'), expectedMap([
|
||||||
|
'../coffee/foo.coffee',
|
||||||
|
'/bar.coffee',
|
||||||
|
'http://www.example.com/baz.coffee'
|
||||||
|
]));
|
||||||
|
|
||||||
|
util.assertEqualMaps(assert, actualMap('./'), expectedMap([
|
||||||
|
'../coffee/foo.coffee',
|
||||||
|
'/bar.coffee',
|
||||||
|
'http://www.example.com/baz.coffee'
|
||||||
|
]));
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test applySourceMap name handling'] = function (assert, util) {
|
||||||
|
// Imagine some CoffeeScript code being compiled into JavaScript and then
|
||||||
|
// minified.
|
||||||
|
|
||||||
|
var assertName = function(coffeeName, jsName, expectedName) {
|
||||||
|
var minifiedMap = new SourceMapGenerator({
|
||||||
|
file: 'test.js.min'
|
||||||
|
});
|
||||||
|
minifiedMap.addMapping({
|
||||||
|
generated: { line: 1, column: 4 },
|
||||||
|
original: { line: 1, column: 4 },
|
||||||
|
source: 'test.js',
|
||||||
|
name: jsName
|
||||||
|
});
|
||||||
|
|
||||||
|
var coffeeMap = new SourceMapGenerator({
|
||||||
|
file: 'test.js'
|
||||||
|
});
|
||||||
|
coffeeMap.addMapping({
|
||||||
|
generated: { line: 1, column: 4 },
|
||||||
|
original: { line: 1, column: 0 },
|
||||||
|
source: 'test.coffee',
|
||||||
|
name: coffeeName
|
||||||
|
});
|
||||||
|
|
||||||
|
minifiedMap.applySourceMap(new SourceMapConsumer(coffeeMap.toJSON()));
|
||||||
|
|
||||||
|
new SourceMapConsumer(minifiedMap.toJSON()).eachMapping(function(mapping) {
|
||||||
|
assert.equal(mapping.name, expectedName);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
// `foo = 1` -> `var foo = 1;` -> `var a=1`
|
||||||
|
// CoffeeScript doesn’t rename variables, so there’s no need for it to
|
||||||
|
// provide names in its source maps. Minifiers do rename variables and
|
||||||
|
// therefore do provide names in their source maps. So that name should be
|
||||||
|
// retained if the original map lacks names.
|
||||||
|
assertName(null, 'foo', 'foo');
|
||||||
|
|
||||||
|
// `foo = 1` -> `var coffee$foo = 1;` -> `var a=1`
|
||||||
|
// Imagine that CoffeeScript prefixed all variables with `coffee$`. Even
|
||||||
|
// though the minifier then also provides a name, the original name is
|
||||||
|
// what corresponds to the source.
|
||||||
|
assertName('foo', 'coffee$foo', 'foo');
|
||||||
|
|
||||||
|
// `foo = 1` -> `var coffee$foo = 1;` -> `var coffee$foo=1`
|
||||||
|
// Minifiers can turn off variable mangling. Then there’s no need to
|
||||||
|
// provide names in the source map, but the names from the original map are
|
||||||
|
// still needed.
|
||||||
|
assertName('foo', null, 'foo');
|
||||||
|
|
||||||
|
// `foo = 1` -> `var foo = 1;` -> `var foo=1`
|
||||||
|
// No renaming at all.
|
||||||
|
assertName(null, null, null);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test sorting with duplicate generated mappings'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'test.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 3, column: 0 },
|
||||||
|
original: { line: 2, column: 0 },
|
||||||
|
source: 'a.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 2, column: 0 }
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 2, column: 0 }
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
original: { line: 1, column: 0 },
|
||||||
|
source: 'a.js'
|
||||||
|
});
|
||||||
|
|
||||||
|
util.assertEqualMaps(assert, map.toJSON(), {
|
||||||
|
version: 3,
|
||||||
|
file: 'test.js',
|
||||||
|
sources: ['a.js'],
|
||||||
|
names: [],
|
||||||
|
mappings: 'AAAA;A;AACA'
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test ignore duplicate mappings.'] = function (assert, util) {
|
||||||
|
var init = { file: 'min.js', sourceRoot: '/the/root' };
|
||||||
|
var map1, map2;
|
||||||
|
|
||||||
|
// null original source location
|
||||||
|
var nullMapping1 = {
|
||||||
|
generated: { line: 1, column: 0 }
|
||||||
|
};
|
||||||
|
var nullMapping2 = {
|
||||||
|
generated: { line: 2, column: 2 }
|
||||||
|
};
|
||||||
|
|
||||||
|
map1 = new SourceMapGenerator(init);
|
||||||
|
map2 = new SourceMapGenerator(init);
|
||||||
|
|
||||||
|
map1.addMapping(nullMapping1);
|
||||||
|
map1.addMapping(nullMapping1);
|
||||||
|
|
||||||
|
map2.addMapping(nullMapping1);
|
||||||
|
|
||||||
|
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||||
|
|
||||||
|
map1.addMapping(nullMapping2);
|
||||||
|
map1.addMapping(nullMapping1);
|
||||||
|
|
||||||
|
map2.addMapping(nullMapping2);
|
||||||
|
|
||||||
|
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||||
|
|
||||||
|
// original source location
|
||||||
|
var srcMapping1 = {
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
original: { line: 11, column: 0 },
|
||||||
|
source: 'srcMapping1.js'
|
||||||
|
};
|
||||||
|
var srcMapping2 = {
|
||||||
|
generated: { line: 2, column: 2 },
|
||||||
|
original: { line: 11, column: 0 },
|
||||||
|
source: 'srcMapping2.js'
|
||||||
|
};
|
||||||
|
|
||||||
|
map1 = new SourceMapGenerator(init);
|
||||||
|
map2 = new SourceMapGenerator(init);
|
||||||
|
|
||||||
|
map1.addMapping(srcMapping1);
|
||||||
|
map1.addMapping(srcMapping1);
|
||||||
|
|
||||||
|
map2.addMapping(srcMapping1);
|
||||||
|
|
||||||
|
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||||
|
|
||||||
|
map1.addMapping(srcMapping2);
|
||||||
|
map1.addMapping(srcMapping1);
|
||||||
|
|
||||||
|
map2.addMapping(srcMapping2);
|
||||||
|
|
||||||
|
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||||
|
|
||||||
|
// full original source and name information
|
||||||
|
var fullMapping1 = {
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
original: { line: 11, column: 0 },
|
||||||
|
source: 'fullMapping1.js',
|
||||||
|
name: 'fullMapping1'
|
||||||
|
};
|
||||||
|
var fullMapping2 = {
|
||||||
|
generated: { line: 2, column: 2 },
|
||||||
|
original: { line: 11, column: 0 },
|
||||||
|
source: 'fullMapping2.js',
|
||||||
|
name: 'fullMapping2'
|
||||||
|
};
|
||||||
|
|
||||||
|
map1 = new SourceMapGenerator(init);
|
||||||
|
map2 = new SourceMapGenerator(init);
|
||||||
|
|
||||||
|
map1.addMapping(fullMapping1);
|
||||||
|
map1.addMapping(fullMapping1);
|
||||||
|
|
||||||
|
map2.addMapping(fullMapping1);
|
||||||
|
|
||||||
|
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||||
|
|
||||||
|
map1.addMapping(fullMapping2);
|
||||||
|
map1.addMapping(fullMapping1);
|
||||||
|
|
||||||
|
map2.addMapping(fullMapping2);
|
||||||
|
|
||||||
|
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test github issue #72, check for duplicate names or sources'] = function (assert, util) {
|
||||||
|
var map = new SourceMapGenerator({
|
||||||
|
file: 'test.js'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 1, column: 1 },
|
||||||
|
original: { line: 2, column: 2 },
|
||||||
|
source: 'a.js',
|
||||||
|
name: 'foo'
|
||||||
|
});
|
||||||
|
map.addMapping({
|
||||||
|
generated: { line: 3, column: 3 },
|
||||||
|
original: { line: 4, column: 4 },
|
||||||
|
source: 'a.js',
|
||||||
|
name: 'foo'
|
||||||
|
});
|
||||||
|
util.assertEqualMaps(assert, map.toJSON(), {
|
||||||
|
version: 3,
|
||||||
|
file: 'test.js',
|
||||||
|
sources: ['a.js'],
|
||||||
|
names: ['foo'],
|
||||||
|
mappings: 'CACEA;;GAEEA'
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test setting sourcesContent to null when already null'] = function (assert, util) {
|
||||||
|
var smg = new SourceMapGenerator({ file: "foo.js" });
|
||||||
|
assert.doesNotThrow(function() {
|
||||||
|
smg.setSourceContent("bar.js", null);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
});
|
||||||
|
function run_test() {
|
||||||
|
runSourceMapTests('test/source-map/test-source-map-generator', do_throw);
|
||||||
|
}
|
||||||
620
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_source_node.js
generated
vendored
Normal file
620
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_source_node.js
generated
vendored
Normal file
@@ -0,0 +1,620 @@
|
|||||||
|
/*
|
||||||
|
* WARNING!
|
||||||
|
*
|
||||||
|
* Do not edit this file directly, it is built from the sources at
|
||||||
|
* https://github.com/mozilla/source-map/
|
||||||
|
*/
|
||||||
|
|
||||||
|
Components.utils.import('resource://test/Utils.jsm');
|
||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
define("test/source-map/test-source-node", ["require", "exports", "module"], function (require, exports, module) {
|
||||||
|
|
||||||
|
var SourceMapGenerator = require('source-map/source-map-generator').SourceMapGenerator;
|
||||||
|
var SourceMapConsumer = require('source-map/source-map-consumer').SourceMapConsumer;
|
||||||
|
var SourceNode = require('source-map/source-node').SourceNode;
|
||||||
|
|
||||||
|
function forEachNewline(fn) {
|
||||||
|
return function (assert, util) {
|
||||||
|
['\n', '\r\n'].forEach(fn.bind(null, assert, util));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
exports['test .add()'] = function (assert, util) {
|
||||||
|
var node = new SourceNode(null, null, null);
|
||||||
|
|
||||||
|
// Adding a string works.
|
||||||
|
node.add('function noop() {}');
|
||||||
|
|
||||||
|
// Adding another source node works.
|
||||||
|
node.add(new SourceNode(null, null, null));
|
||||||
|
|
||||||
|
// Adding an array works.
|
||||||
|
node.add(['function foo() {',
|
||||||
|
new SourceNode(null, null, null,
|
||||||
|
'return 10;'),
|
||||||
|
'}']);
|
||||||
|
|
||||||
|
// Adding other stuff doesn't.
|
||||||
|
assert.throws(function () {
|
||||||
|
node.add({});
|
||||||
|
});
|
||||||
|
assert.throws(function () {
|
||||||
|
node.add(function () {});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test .prepend()'] = function (assert, util) {
|
||||||
|
var node = new SourceNode(null, null, null);
|
||||||
|
|
||||||
|
// Prepending a string works.
|
||||||
|
node.prepend('function noop() {}');
|
||||||
|
assert.equal(node.children[0], 'function noop() {}');
|
||||||
|
assert.equal(node.children.length, 1);
|
||||||
|
|
||||||
|
// Prepending another source node works.
|
||||||
|
node.prepend(new SourceNode(null, null, null));
|
||||||
|
assert.equal(node.children[0], '');
|
||||||
|
assert.equal(node.children[1], 'function noop() {}');
|
||||||
|
assert.equal(node.children.length, 2);
|
||||||
|
|
||||||
|
// Prepending an array works.
|
||||||
|
node.prepend(['function foo() {',
|
||||||
|
new SourceNode(null, null, null,
|
||||||
|
'return 10;'),
|
||||||
|
'}']);
|
||||||
|
assert.equal(node.children[0], 'function foo() {');
|
||||||
|
assert.equal(node.children[1], 'return 10;');
|
||||||
|
assert.equal(node.children[2], '}');
|
||||||
|
assert.equal(node.children[3], '');
|
||||||
|
assert.equal(node.children[4], 'function noop() {}');
|
||||||
|
assert.equal(node.children.length, 5);
|
||||||
|
|
||||||
|
// Prepending other stuff doesn't.
|
||||||
|
assert.throws(function () {
|
||||||
|
node.prepend({});
|
||||||
|
});
|
||||||
|
assert.throws(function () {
|
||||||
|
node.prepend(function () {});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test .toString()'] = function (assert, util) {
|
||||||
|
assert.equal((new SourceNode(null, null, null,
|
||||||
|
['function foo() {',
|
||||||
|
new SourceNode(null, null, null, 'return 10;'),
|
||||||
|
'}'])).toString(),
|
||||||
|
'function foo() {return 10;}');
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test .join()'] = function (assert, util) {
|
||||||
|
assert.equal((new SourceNode(null, null, null,
|
||||||
|
['a', 'b', 'c', 'd'])).join(', ').toString(),
|
||||||
|
'a, b, c, d');
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test .walk()'] = function (assert, util) {
|
||||||
|
var node = new SourceNode(null, null, null,
|
||||||
|
['(function () {\n',
|
||||||
|
' ', new SourceNode(1, 0, 'a.js', ['someCall()']), ';\n',
|
||||||
|
' ', new SourceNode(2, 0, 'b.js', ['if (foo) bar()']), ';\n',
|
||||||
|
'}());']);
|
||||||
|
var expected = [
|
||||||
|
{ str: '(function () {\n', source: null, line: null, column: null },
|
||||||
|
{ str: ' ', source: null, line: null, column: null },
|
||||||
|
{ str: 'someCall()', source: 'a.js', line: 1, column: 0 },
|
||||||
|
{ str: ';\n', source: null, line: null, column: null },
|
||||||
|
{ str: ' ', source: null, line: null, column: null },
|
||||||
|
{ str: 'if (foo) bar()', source: 'b.js', line: 2, column: 0 },
|
||||||
|
{ str: ';\n', source: null, line: null, column: null },
|
||||||
|
{ str: '}());', source: null, line: null, column: null },
|
||||||
|
];
|
||||||
|
var i = 0;
|
||||||
|
node.walk(function (chunk, loc) {
|
||||||
|
assert.equal(expected[i].str, chunk);
|
||||||
|
assert.equal(expected[i].source, loc.source);
|
||||||
|
assert.equal(expected[i].line, loc.line);
|
||||||
|
assert.equal(expected[i].column, loc.column);
|
||||||
|
i++;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test .replaceRight'] = function (assert, util) {
|
||||||
|
var node;
|
||||||
|
|
||||||
|
// Not nested
|
||||||
|
node = new SourceNode(null, null, null, 'hello world');
|
||||||
|
node.replaceRight(/world/, 'universe');
|
||||||
|
assert.equal(node.toString(), 'hello universe');
|
||||||
|
|
||||||
|
// Nested
|
||||||
|
node = new SourceNode(null, null, null,
|
||||||
|
[new SourceNode(null, null, null, 'hey sexy mama, '),
|
||||||
|
new SourceNode(null, null, null, 'want to kill all humans?')]);
|
||||||
|
node.replaceRight(/kill all humans/, 'watch Futurama');
|
||||||
|
assert.equal(node.toString(), 'hey sexy mama, want to watch Futurama?');
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test .toStringWithSourceMap()'] = forEachNewline(function (assert, util, nl) {
|
||||||
|
var node = new SourceNode(null, null, null,
|
||||||
|
['(function () {' + nl,
|
||||||
|
' ',
|
||||||
|
new SourceNode(1, 0, 'a.js', 'someCall', 'originalCall'),
|
||||||
|
new SourceNode(1, 8, 'a.js', '()'),
|
||||||
|
';' + nl,
|
||||||
|
' ', new SourceNode(2, 0, 'b.js', ['if (foo) bar()']), ';' + nl,
|
||||||
|
'}());']);
|
||||||
|
var result = node.toStringWithSourceMap({
|
||||||
|
file: 'foo.js'
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(result.code, [
|
||||||
|
'(function () {',
|
||||||
|
' someCall();',
|
||||||
|
' if (foo) bar();',
|
||||||
|
'}());'
|
||||||
|
].join(nl));
|
||||||
|
|
||||||
|
var map = result.map;
|
||||||
|
var mapWithoutOptions = node.toStringWithSourceMap().map;
|
||||||
|
|
||||||
|
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||||
|
assert.ok(mapWithoutOptions instanceof SourceMapGenerator, 'mapWithoutOptions instanceof SourceMapGenerator');
|
||||||
|
assert.ok(!('file' in mapWithoutOptions));
|
||||||
|
mapWithoutOptions._file = 'foo.js';
|
||||||
|
util.assertEqualMaps(assert, map.toJSON(), mapWithoutOptions.toJSON());
|
||||||
|
|
||||||
|
map = new SourceMapConsumer(map.toString());
|
||||||
|
|
||||||
|
var actual;
|
||||||
|
|
||||||
|
actual = map.originalPositionFor({
|
||||||
|
line: 1,
|
||||||
|
column: 4
|
||||||
|
});
|
||||||
|
assert.equal(actual.source, null);
|
||||||
|
assert.equal(actual.line, null);
|
||||||
|
assert.equal(actual.column, null);
|
||||||
|
|
||||||
|
actual = map.originalPositionFor({
|
||||||
|
line: 2,
|
||||||
|
column: 2
|
||||||
|
});
|
||||||
|
assert.equal(actual.source, 'a.js');
|
||||||
|
assert.equal(actual.line, 1);
|
||||||
|
assert.equal(actual.column, 0);
|
||||||
|
assert.equal(actual.name, 'originalCall');
|
||||||
|
|
||||||
|
actual = map.originalPositionFor({
|
||||||
|
line: 3,
|
||||||
|
column: 2
|
||||||
|
});
|
||||||
|
assert.equal(actual.source, 'b.js');
|
||||||
|
assert.equal(actual.line, 2);
|
||||||
|
assert.equal(actual.column, 0);
|
||||||
|
|
||||||
|
actual = map.originalPositionFor({
|
||||||
|
line: 3,
|
||||||
|
column: 16
|
||||||
|
});
|
||||||
|
assert.equal(actual.source, null);
|
||||||
|
assert.equal(actual.line, null);
|
||||||
|
assert.equal(actual.column, null);
|
||||||
|
|
||||||
|
actual = map.originalPositionFor({
|
||||||
|
line: 4,
|
||||||
|
column: 2
|
||||||
|
});
|
||||||
|
assert.equal(actual.source, null);
|
||||||
|
assert.equal(actual.line, null);
|
||||||
|
assert.equal(actual.column, null);
|
||||||
|
});
|
||||||
|
|
||||||
|
exports['test .fromStringWithSourceMap()'] = forEachNewline(function (assert, util, nl) {
|
||||||
|
var testCode = util.testGeneratedCode.replace(/\n/g, nl);
|
||||||
|
var node = SourceNode.fromStringWithSourceMap(
|
||||||
|
testCode,
|
||||||
|
new SourceMapConsumer(util.testMap));
|
||||||
|
|
||||||
|
var result = node.toStringWithSourceMap({
|
||||||
|
file: 'min.js'
|
||||||
|
});
|
||||||
|
var map = result.map;
|
||||||
|
var code = result.code;
|
||||||
|
|
||||||
|
assert.equal(code, testCode);
|
||||||
|
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||||
|
map = map.toJSON();
|
||||||
|
assert.equal(map.version, util.testMap.version);
|
||||||
|
assert.equal(map.file, util.testMap.file);
|
||||||
|
assert.equal(map.mappings, util.testMap.mappings);
|
||||||
|
});
|
||||||
|
|
||||||
|
exports['test .fromStringWithSourceMap() empty map'] = forEachNewline(function (assert, util, nl) {
|
||||||
|
var node = SourceNode.fromStringWithSourceMap(
|
||||||
|
util.testGeneratedCode.replace(/\n/g, nl),
|
||||||
|
new SourceMapConsumer(util.emptyMap));
|
||||||
|
var result = node.toStringWithSourceMap({
|
||||||
|
file: 'min.js'
|
||||||
|
});
|
||||||
|
var map = result.map;
|
||||||
|
var code = result.code;
|
||||||
|
|
||||||
|
assert.equal(code, util.testGeneratedCode.replace(/\n/g, nl));
|
||||||
|
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||||
|
map = map.toJSON();
|
||||||
|
assert.equal(map.version, util.emptyMap.version);
|
||||||
|
assert.equal(map.file, util.emptyMap.file);
|
||||||
|
assert.equal(map.mappings.length, util.emptyMap.mappings.length);
|
||||||
|
assert.equal(map.mappings, util.emptyMap.mappings);
|
||||||
|
});
|
||||||
|
|
||||||
|
exports['test .fromStringWithSourceMap() complex version'] = forEachNewline(function (assert, util, nl) {
|
||||||
|
var input = new SourceNode(null, null, null, [
|
||||||
|
"(function() {" + nl,
|
||||||
|
" var Test = {};" + nl,
|
||||||
|
" ", new SourceNode(1, 0, "a.js", "Test.A = { value: 1234 };" + nl),
|
||||||
|
" ", new SourceNode(2, 0, "a.js", "Test.A.x = 'xyz';"), nl,
|
||||||
|
"}());" + nl,
|
||||||
|
"/* Generated Source */"]);
|
||||||
|
input = input.toStringWithSourceMap({
|
||||||
|
file: 'foo.js'
|
||||||
|
});
|
||||||
|
|
||||||
|
var node = SourceNode.fromStringWithSourceMap(
|
||||||
|
input.code,
|
||||||
|
new SourceMapConsumer(input.map.toString()));
|
||||||
|
|
||||||
|
var result = node.toStringWithSourceMap({
|
||||||
|
file: 'foo.js'
|
||||||
|
});
|
||||||
|
var map = result.map;
|
||||||
|
var code = result.code;
|
||||||
|
|
||||||
|
assert.equal(code, input.code);
|
||||||
|
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||||
|
map = map.toJSON();
|
||||||
|
var inputMap = input.map.toJSON();
|
||||||
|
util.assertEqualMaps(assert, map, inputMap);
|
||||||
|
});
|
||||||
|
|
||||||
|
exports['test .fromStringWithSourceMap() third argument'] = function (assert, util) {
|
||||||
|
// Assume the following directory structure:
|
||||||
|
//
|
||||||
|
// http://foo.org/
|
||||||
|
// bar.coffee
|
||||||
|
// app/
|
||||||
|
// coffee/
|
||||||
|
// foo.coffee
|
||||||
|
// coffeeBundle.js # Made from {foo,bar,baz}.coffee
|
||||||
|
// maps/
|
||||||
|
// coffeeBundle.js.map
|
||||||
|
// js/
|
||||||
|
// foo.js
|
||||||
|
// public/
|
||||||
|
// app.js # Made from {foo,coffeeBundle}.js
|
||||||
|
// app.js.map
|
||||||
|
//
|
||||||
|
// http://www.example.com/
|
||||||
|
// baz.coffee
|
||||||
|
|
||||||
|
var coffeeBundle = new SourceNode(1, 0, 'foo.coffee', 'foo(coffee);\n');
|
||||||
|
coffeeBundle.setSourceContent('foo.coffee', 'foo coffee');
|
||||||
|
coffeeBundle.add(new SourceNode(2, 0, '/bar.coffee', 'bar(coffee);\n'));
|
||||||
|
coffeeBundle.add(new SourceNode(3, 0, 'http://www.example.com/baz.coffee', 'baz(coffee);'));
|
||||||
|
coffeeBundle = coffeeBundle.toStringWithSourceMap({
|
||||||
|
file: 'foo.js',
|
||||||
|
sourceRoot: '..'
|
||||||
|
});
|
||||||
|
|
||||||
|
var foo = new SourceNode(1, 0, 'foo.js', 'foo(js);');
|
||||||
|
|
||||||
|
var test = function(relativePath, expectedSources) {
|
||||||
|
var app = new SourceNode();
|
||||||
|
app.add(SourceNode.fromStringWithSourceMap(
|
||||||
|
coffeeBundle.code,
|
||||||
|
new SourceMapConsumer(coffeeBundle.map.toString()),
|
||||||
|
relativePath));
|
||||||
|
app.add(foo);
|
||||||
|
var i = 0;
|
||||||
|
app.walk(function (chunk, loc) {
|
||||||
|
assert.equal(loc.source, expectedSources[i]);
|
||||||
|
i++;
|
||||||
|
});
|
||||||
|
app.walkSourceContents(function (sourceFile, sourceContent) {
|
||||||
|
assert.equal(sourceFile, expectedSources[0]);
|
||||||
|
assert.equal(sourceContent, 'foo coffee');
|
||||||
|
})
|
||||||
|
};
|
||||||
|
|
||||||
|
test('../coffee/maps', [
|
||||||
|
'../coffee/foo.coffee',
|
||||||
|
'/bar.coffee',
|
||||||
|
'http://www.example.com/baz.coffee',
|
||||||
|
'foo.js'
|
||||||
|
]);
|
||||||
|
|
||||||
|
// If the third parameter is omitted or set to the current working
|
||||||
|
// directory we get incorrect source paths:
|
||||||
|
|
||||||
|
test(undefined, [
|
||||||
|
'../foo.coffee',
|
||||||
|
'/bar.coffee',
|
||||||
|
'http://www.example.com/baz.coffee',
|
||||||
|
'foo.js'
|
||||||
|
]);
|
||||||
|
|
||||||
|
test('', [
|
||||||
|
'../foo.coffee',
|
||||||
|
'/bar.coffee',
|
||||||
|
'http://www.example.com/baz.coffee',
|
||||||
|
'foo.js'
|
||||||
|
]);
|
||||||
|
|
||||||
|
test('.', [
|
||||||
|
'../foo.coffee',
|
||||||
|
'/bar.coffee',
|
||||||
|
'http://www.example.com/baz.coffee',
|
||||||
|
'foo.js'
|
||||||
|
]);
|
||||||
|
|
||||||
|
test('./', [
|
||||||
|
'../foo.coffee',
|
||||||
|
'/bar.coffee',
|
||||||
|
'http://www.example.com/baz.coffee',
|
||||||
|
'foo.js'
|
||||||
|
]);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test .toStringWithSourceMap() merging duplicate mappings'] = forEachNewline(function (assert, util, nl) {
|
||||||
|
var input = new SourceNode(null, null, null, [
|
||||||
|
new SourceNode(1, 0, "a.js", "(function"),
|
||||||
|
new SourceNode(1, 0, "a.js", "() {" + nl),
|
||||||
|
" ",
|
||||||
|
new SourceNode(1, 0, "a.js", "var Test = "),
|
||||||
|
new SourceNode(1, 0, "b.js", "{};" + nl),
|
||||||
|
new SourceNode(2, 0, "b.js", "Test"),
|
||||||
|
new SourceNode(2, 0, "b.js", ".A", "A"),
|
||||||
|
new SourceNode(2, 20, "b.js", " = { value: ", "A"),
|
||||||
|
"1234",
|
||||||
|
new SourceNode(2, 40, "b.js", " };" + nl, "A"),
|
||||||
|
"}());" + nl,
|
||||||
|
"/* Generated Source */"
|
||||||
|
]);
|
||||||
|
input = input.toStringWithSourceMap({
|
||||||
|
file: 'foo.js'
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(input.code, [
|
||||||
|
"(function() {",
|
||||||
|
" var Test = {};",
|
||||||
|
"Test.A = { value: 1234 };",
|
||||||
|
"}());",
|
||||||
|
"/* Generated Source */"
|
||||||
|
].join(nl))
|
||||||
|
|
||||||
|
var correctMap = new SourceMapGenerator({
|
||||||
|
file: 'foo.js'
|
||||||
|
});
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
source: 'a.js',
|
||||||
|
original: { line: 1, column: 0 }
|
||||||
|
});
|
||||||
|
// Here is no need for a empty mapping,
|
||||||
|
// because mappings ends at eol
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 2, column: 2 },
|
||||||
|
source: 'a.js',
|
||||||
|
original: { line: 1, column: 0 }
|
||||||
|
});
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 2, column: 13 },
|
||||||
|
source: 'b.js',
|
||||||
|
original: { line: 1, column: 0 }
|
||||||
|
});
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 3, column: 0 },
|
||||||
|
source: 'b.js',
|
||||||
|
original: { line: 2, column: 0 }
|
||||||
|
});
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 3, column: 4 },
|
||||||
|
source: 'b.js',
|
||||||
|
name: 'A',
|
||||||
|
original: { line: 2, column: 0 }
|
||||||
|
});
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 3, column: 6 },
|
||||||
|
source: 'b.js',
|
||||||
|
name: 'A',
|
||||||
|
original: { line: 2, column: 20 }
|
||||||
|
});
|
||||||
|
// This empty mapping is required,
|
||||||
|
// because there is a hole in the middle of the line
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 3, column: 18 }
|
||||||
|
});
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 3, column: 22 },
|
||||||
|
source: 'b.js',
|
||||||
|
name: 'A',
|
||||||
|
original: { line: 2, column: 40 }
|
||||||
|
});
|
||||||
|
// Here is no need for a empty mapping,
|
||||||
|
// because mappings ends at eol
|
||||||
|
|
||||||
|
var inputMap = input.map.toJSON();
|
||||||
|
correctMap = correctMap.toJSON();
|
||||||
|
util.assertEqualMaps(assert, inputMap, correctMap);
|
||||||
|
});
|
||||||
|
|
||||||
|
exports['test .toStringWithSourceMap() multi-line SourceNodes'] = forEachNewline(function (assert, util, nl) {
|
||||||
|
var input = new SourceNode(null, null, null, [
|
||||||
|
new SourceNode(1, 0, "a.js", "(function() {" + nl + "var nextLine = 1;" + nl + "anotherLine();" + nl),
|
||||||
|
new SourceNode(2, 2, "b.js", "Test.call(this, 123);" + nl),
|
||||||
|
new SourceNode(2, 2, "b.js", "this['stuff'] = 'v';" + nl),
|
||||||
|
new SourceNode(2, 2, "b.js", "anotherLine();" + nl),
|
||||||
|
"/*" + nl + "Generated" + nl + "Source" + nl + "*/" + nl,
|
||||||
|
new SourceNode(3, 4, "c.js", "anotherLine();" + nl),
|
||||||
|
"/*" + nl + "Generated" + nl + "Source" + nl + "*/"
|
||||||
|
]);
|
||||||
|
input = input.toStringWithSourceMap({
|
||||||
|
file: 'foo.js'
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(input.code, [
|
||||||
|
"(function() {",
|
||||||
|
"var nextLine = 1;",
|
||||||
|
"anotherLine();",
|
||||||
|
"Test.call(this, 123);",
|
||||||
|
"this['stuff'] = 'v';",
|
||||||
|
"anotherLine();",
|
||||||
|
"/*",
|
||||||
|
"Generated",
|
||||||
|
"Source",
|
||||||
|
"*/",
|
||||||
|
"anotherLine();",
|
||||||
|
"/*",
|
||||||
|
"Generated",
|
||||||
|
"Source",
|
||||||
|
"*/"
|
||||||
|
].join(nl));
|
||||||
|
|
||||||
|
var correctMap = new SourceMapGenerator({
|
||||||
|
file: 'foo.js'
|
||||||
|
});
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 1, column: 0 },
|
||||||
|
source: 'a.js',
|
||||||
|
original: { line: 1, column: 0 }
|
||||||
|
});
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 2, column: 0 },
|
||||||
|
source: 'a.js',
|
||||||
|
original: { line: 1, column: 0 }
|
||||||
|
});
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 3, column: 0 },
|
||||||
|
source: 'a.js',
|
||||||
|
original: { line: 1, column: 0 }
|
||||||
|
});
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 4, column: 0 },
|
||||||
|
source: 'b.js',
|
||||||
|
original: { line: 2, column: 2 }
|
||||||
|
});
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 5, column: 0 },
|
||||||
|
source: 'b.js',
|
||||||
|
original: { line: 2, column: 2 }
|
||||||
|
});
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 6, column: 0 },
|
||||||
|
source: 'b.js',
|
||||||
|
original: { line: 2, column: 2 }
|
||||||
|
});
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 11, column: 0 },
|
||||||
|
source: 'c.js',
|
||||||
|
original: { line: 3, column: 4 }
|
||||||
|
});
|
||||||
|
|
||||||
|
var inputMap = input.map.toJSON();
|
||||||
|
correctMap = correctMap.toJSON();
|
||||||
|
util.assertEqualMaps(assert, inputMap, correctMap);
|
||||||
|
});
|
||||||
|
|
||||||
|
exports['test .toStringWithSourceMap() with empty string'] = function (assert, util) {
|
||||||
|
var node = new SourceNode(1, 0, 'empty.js', '');
|
||||||
|
var result = node.toStringWithSourceMap();
|
||||||
|
assert.equal(result.code, '');
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test .toStringWithSourceMap() with consecutive newlines'] = forEachNewline(function (assert, util, nl) {
|
||||||
|
var input = new SourceNode(null, null, null, [
|
||||||
|
"/***/" + nl + nl,
|
||||||
|
new SourceNode(1, 0, "a.js", "'use strict';" + nl),
|
||||||
|
new SourceNode(2, 0, "a.js", "a();"),
|
||||||
|
]);
|
||||||
|
input = input.toStringWithSourceMap({
|
||||||
|
file: 'foo.js'
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(input.code, [
|
||||||
|
"/***/",
|
||||||
|
"",
|
||||||
|
"'use strict';",
|
||||||
|
"a();",
|
||||||
|
].join(nl));
|
||||||
|
|
||||||
|
var correctMap = new SourceMapGenerator({
|
||||||
|
file: 'foo.js'
|
||||||
|
});
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 3, column: 0 },
|
||||||
|
source: 'a.js',
|
||||||
|
original: { line: 1, column: 0 }
|
||||||
|
});
|
||||||
|
correctMap.addMapping({
|
||||||
|
generated: { line: 4, column: 0 },
|
||||||
|
source: 'a.js',
|
||||||
|
original: { line: 2, column: 0 }
|
||||||
|
});
|
||||||
|
|
||||||
|
var inputMap = input.map.toJSON();
|
||||||
|
correctMap = correctMap.toJSON();
|
||||||
|
util.assertEqualMaps(assert, inputMap, correctMap);
|
||||||
|
});
|
||||||
|
|
||||||
|
exports['test setSourceContent with toStringWithSourceMap'] = function (assert, util) {
|
||||||
|
var aNode = new SourceNode(1, 1, 'a.js', 'a');
|
||||||
|
aNode.setSourceContent('a.js', 'someContent');
|
||||||
|
var node = new SourceNode(null, null, null,
|
||||||
|
['(function () {\n',
|
||||||
|
' ', aNode,
|
||||||
|
' ', new SourceNode(1, 1, 'b.js', 'b'),
|
||||||
|
'}());']);
|
||||||
|
node.setSourceContent('b.js', 'otherContent');
|
||||||
|
var map = node.toStringWithSourceMap({
|
||||||
|
file: 'foo.js'
|
||||||
|
}).map;
|
||||||
|
|
||||||
|
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||||
|
map = new SourceMapConsumer(map.toString());
|
||||||
|
|
||||||
|
assert.equal(map.sources.length, 2);
|
||||||
|
assert.equal(map.sources[0], 'a.js');
|
||||||
|
assert.equal(map.sources[1], 'b.js');
|
||||||
|
assert.equal(map.sourcesContent.length, 2);
|
||||||
|
assert.equal(map.sourcesContent[0], 'someContent');
|
||||||
|
assert.equal(map.sourcesContent[1], 'otherContent');
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test walkSourceContents'] = function (assert, util) {
|
||||||
|
var aNode = new SourceNode(1, 1, 'a.js', 'a');
|
||||||
|
aNode.setSourceContent('a.js', 'someContent');
|
||||||
|
var node = new SourceNode(null, null, null,
|
||||||
|
['(function () {\n',
|
||||||
|
' ', aNode,
|
||||||
|
' ', new SourceNode(1, 1, 'b.js', 'b'),
|
||||||
|
'}());']);
|
||||||
|
node.setSourceContent('b.js', 'otherContent');
|
||||||
|
var results = [];
|
||||||
|
node.walkSourceContents(function (sourceFile, sourceContent) {
|
||||||
|
results.push([sourceFile, sourceContent]);
|
||||||
|
});
|
||||||
|
assert.equal(results.length, 2);
|
||||||
|
assert.equal(results[0][0], 'a.js');
|
||||||
|
assert.equal(results[0][1], 'someContent');
|
||||||
|
assert.equal(results[1][0], 'b.js');
|
||||||
|
assert.equal(results[1][1], 'otherContent');
|
||||||
|
};
|
||||||
|
});
|
||||||
|
function run_test() {
|
||||||
|
runSourceMapTests('test/source-map/test-source-node', do_throw);
|
||||||
|
}
|
||||||
224
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_util.js
generated
vendored
Normal file
224
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/dist/test/test_util.js
generated
vendored
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
/*
|
||||||
|
* WARNING!
|
||||||
|
*
|
||||||
|
* Do not edit this file directly, it is built from the sources at
|
||||||
|
* https://github.com/mozilla/source-map/
|
||||||
|
*/
|
||||||
|
|
||||||
|
Components.utils.import('resource://test/Utils.jsm');
|
||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2014 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
define("test/source-map/test-util", ["require", "exports", "module"], function (require, exports, module) {
|
||||||
|
|
||||||
|
var libUtil = require('source-map/util');
|
||||||
|
|
||||||
|
exports['test urls'] = function (assert, util) {
|
||||||
|
var assertUrl = function (url) {
|
||||||
|
assert.equal(url, libUtil.urlGenerate(libUtil.urlParse(url)));
|
||||||
|
};
|
||||||
|
assertUrl('http://');
|
||||||
|
assertUrl('http://www.example.com');
|
||||||
|
assertUrl('http://user:pass@www.example.com');
|
||||||
|
assertUrl('http://www.example.com:80');
|
||||||
|
assertUrl('http://www.example.com/');
|
||||||
|
assertUrl('http://www.example.com/foo/bar');
|
||||||
|
assertUrl('http://www.example.com/foo/bar/');
|
||||||
|
assertUrl('http://user:pass@www.example.com:80/foo/bar/');
|
||||||
|
|
||||||
|
assertUrl('//');
|
||||||
|
assertUrl('//www.example.com');
|
||||||
|
assertUrl('file:///www.example.com');
|
||||||
|
|
||||||
|
assert.equal(libUtil.urlParse(''), null);
|
||||||
|
assert.equal(libUtil.urlParse('.'), null);
|
||||||
|
assert.equal(libUtil.urlParse('..'), null);
|
||||||
|
assert.equal(libUtil.urlParse('a'), null);
|
||||||
|
assert.equal(libUtil.urlParse('a/b'), null);
|
||||||
|
assert.equal(libUtil.urlParse('a//b'), null);
|
||||||
|
assert.equal(libUtil.urlParse('/a'), null);
|
||||||
|
assert.equal(libUtil.urlParse('data:foo,bar'), null);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test normalize()'] = function (assert, util) {
|
||||||
|
assert.equal(libUtil.normalize('/..'), '/');
|
||||||
|
assert.equal(libUtil.normalize('/../'), '/');
|
||||||
|
assert.equal(libUtil.normalize('/../../../..'), '/');
|
||||||
|
assert.equal(libUtil.normalize('/../../../../a/b/c'), '/a/b/c');
|
||||||
|
assert.equal(libUtil.normalize('/a/b/c/../../../d/../../e'), '/e');
|
||||||
|
|
||||||
|
assert.equal(libUtil.normalize('..'), '..');
|
||||||
|
assert.equal(libUtil.normalize('../'), '../');
|
||||||
|
assert.equal(libUtil.normalize('../../a/'), '../../a/');
|
||||||
|
assert.equal(libUtil.normalize('a/..'), '.');
|
||||||
|
assert.equal(libUtil.normalize('a/../../..'), '../..');
|
||||||
|
|
||||||
|
assert.equal(libUtil.normalize('/.'), '/');
|
||||||
|
assert.equal(libUtil.normalize('/./'), '/');
|
||||||
|
assert.equal(libUtil.normalize('/./././.'), '/');
|
||||||
|
assert.equal(libUtil.normalize('/././././a/b/c'), '/a/b/c');
|
||||||
|
assert.equal(libUtil.normalize('/a/b/c/./././d/././e'), '/a/b/c/d/e');
|
||||||
|
|
||||||
|
assert.equal(libUtil.normalize(''), '.');
|
||||||
|
assert.equal(libUtil.normalize('.'), '.');
|
||||||
|
assert.equal(libUtil.normalize('./'), '.');
|
||||||
|
assert.equal(libUtil.normalize('././a'), 'a');
|
||||||
|
assert.equal(libUtil.normalize('a/./'), 'a/');
|
||||||
|
assert.equal(libUtil.normalize('a/././.'), 'a');
|
||||||
|
|
||||||
|
assert.equal(libUtil.normalize('/a/b//c////d/////'), '/a/b/c/d/');
|
||||||
|
assert.equal(libUtil.normalize('///a/b//c////d/////'), '///a/b/c/d/');
|
||||||
|
assert.equal(libUtil.normalize('a/b//c////d'), 'a/b/c/d');
|
||||||
|
|
||||||
|
assert.equal(libUtil.normalize('.///.././../a/b//./..'), '../../a')
|
||||||
|
|
||||||
|
assert.equal(libUtil.normalize('http://www.example.com'), 'http://www.example.com');
|
||||||
|
assert.equal(libUtil.normalize('http://www.example.com/'), 'http://www.example.com/');
|
||||||
|
assert.equal(libUtil.normalize('http://www.example.com/./..//a/b/c/.././d//'), 'http://www.example.com/a/b/d/');
|
||||||
|
};
|
||||||
|
|
||||||
|
exports['test join()'] = function (assert, util) {
|
||||||
|
assert.equal(libUtil.join('a', 'b'), 'a/b');
|
||||||
|
assert.equal(libUtil.join('a/', 'b'), 'a/b');
|
||||||
|
assert.equal(libUtil.join('a//', 'b'), 'a/b');
|
||||||
|
assert.equal(libUtil.join('a', 'b/'), 'a/b/');
|
||||||
|
assert.equal(libUtil.join('a', 'b//'), 'a/b/');
|
||||||
|
assert.equal(libUtil.join('a/', '/b'), '/b');
|
||||||
|
assert.equal(libUtil.join('a//', '//b'), '//b');
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('a', '..'), '.');
|
||||||
|
assert.equal(libUtil.join('a', '../b'), 'b');
|
||||||
|
assert.equal(libUtil.join('a/b', '../c'), 'a/c');
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('a', '.'), 'a');
|
||||||
|
assert.equal(libUtil.join('a', './b'), 'a/b');
|
||||||
|
assert.equal(libUtil.join('a/b', './c'), 'a/b/c');
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('a', 'http://www.example.com'), 'http://www.example.com');
|
||||||
|
assert.equal(libUtil.join('a', 'data:foo,bar'), 'data:foo,bar');
|
||||||
|
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('', 'b'), 'b');
|
||||||
|
assert.equal(libUtil.join('.', 'b'), 'b');
|
||||||
|
assert.equal(libUtil.join('', 'b/'), 'b/');
|
||||||
|
assert.equal(libUtil.join('.', 'b/'), 'b/');
|
||||||
|
assert.equal(libUtil.join('', 'b//'), 'b/');
|
||||||
|
assert.equal(libUtil.join('.', 'b//'), 'b/');
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('', '..'), '..');
|
||||||
|
assert.equal(libUtil.join('.', '..'), '..');
|
||||||
|
assert.equal(libUtil.join('', '../b'), '../b');
|
||||||
|
assert.equal(libUtil.join('.', '../b'), '../b');
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('', '.'), '.');
|
||||||
|
assert.equal(libUtil.join('.', '.'), '.');
|
||||||
|
assert.equal(libUtil.join('', './b'), 'b');
|
||||||
|
assert.equal(libUtil.join('.', './b'), 'b');
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('', 'http://www.example.com'), 'http://www.example.com');
|
||||||
|
assert.equal(libUtil.join('.', 'http://www.example.com'), 'http://www.example.com');
|
||||||
|
assert.equal(libUtil.join('', 'data:foo,bar'), 'data:foo,bar');
|
||||||
|
assert.equal(libUtil.join('.', 'data:foo,bar'), 'data:foo,bar');
|
||||||
|
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('..', 'b'), '../b');
|
||||||
|
assert.equal(libUtil.join('..', 'b/'), '../b/');
|
||||||
|
assert.equal(libUtil.join('..', 'b//'), '../b/');
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('..', '..'), '../..');
|
||||||
|
assert.equal(libUtil.join('..', '../b'), '../../b');
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('..', '.'), '..');
|
||||||
|
assert.equal(libUtil.join('..', './b'), '../b');
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('..', 'http://www.example.com'), 'http://www.example.com');
|
||||||
|
assert.equal(libUtil.join('..', 'data:foo,bar'), 'data:foo,bar');
|
||||||
|
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('a', ''), 'a');
|
||||||
|
assert.equal(libUtil.join('a', '.'), 'a');
|
||||||
|
assert.equal(libUtil.join('a/', ''), 'a');
|
||||||
|
assert.equal(libUtil.join('a/', '.'), 'a');
|
||||||
|
assert.equal(libUtil.join('a//', ''), 'a');
|
||||||
|
assert.equal(libUtil.join('a//', '.'), 'a');
|
||||||
|
assert.equal(libUtil.join('/a', ''), '/a');
|
||||||
|
assert.equal(libUtil.join('/a', '.'), '/a');
|
||||||
|
assert.equal(libUtil.join('', ''), '.');
|
||||||
|
assert.equal(libUtil.join('.', ''), '.');
|
||||||
|
assert.equal(libUtil.join('.', ''), '.');
|
||||||
|
assert.equal(libUtil.join('.', '.'), '.');
|
||||||
|
assert.equal(libUtil.join('..', ''), '..');
|
||||||
|
assert.equal(libUtil.join('..', '.'), '..');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a', ''), 'http://foo.org/a');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a', '.'), 'http://foo.org/a');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a/', ''), 'http://foo.org/a');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a/', '.'), 'http://foo.org/a');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a//', ''), 'http://foo.org/a');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a//', '.'), 'http://foo.org/a');
|
||||||
|
assert.equal(libUtil.join('http://foo.org', ''), 'http://foo.org/');
|
||||||
|
assert.equal(libUtil.join('http://foo.org', '.'), 'http://foo.org/');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/', ''), 'http://foo.org/');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/', '.'), 'http://foo.org/');
|
||||||
|
assert.equal(libUtil.join('http://foo.org//', ''), 'http://foo.org/');
|
||||||
|
assert.equal(libUtil.join('http://foo.org//', '.'), 'http://foo.org/');
|
||||||
|
assert.equal(libUtil.join('//www.example.com', ''), '//www.example.com/');
|
||||||
|
assert.equal(libUtil.join('//www.example.com', '.'), '//www.example.com/');
|
||||||
|
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a', 'b'), 'http://foo.org/a/b');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a/', 'b'), 'http://foo.org/a/b');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a//', 'b'), 'http://foo.org/a/b');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a', 'b/'), 'http://foo.org/a/b/');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a', 'b//'), 'http://foo.org/a/b/');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a/', '/b'), 'http://foo.org/b');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a//', '//b'), 'http://b');
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a', '..'), 'http://foo.org/');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a', '../b'), 'http://foo.org/b');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a/b', '../c'), 'http://foo.org/a/c');
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a', '.'), 'http://foo.org/a');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a', './b'), 'http://foo.org/a/b');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a/b', './c'), 'http://foo.org/a/b/c');
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a', 'http://www.example.com'), 'http://www.example.com');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/a', 'data:foo,bar'), 'data:foo,bar');
|
||||||
|
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('http://foo.org', 'a'), 'http://foo.org/a');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/', 'a'), 'http://foo.org/a');
|
||||||
|
assert.equal(libUtil.join('http://foo.org//', 'a'), 'http://foo.org/a');
|
||||||
|
assert.equal(libUtil.join('http://foo.org', '/a'), 'http://foo.org/a');
|
||||||
|
assert.equal(libUtil.join('http://foo.org/', '/a'), 'http://foo.org/a');
|
||||||
|
assert.equal(libUtil.join('http://foo.org//', '/a'), 'http://foo.org/a');
|
||||||
|
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('http://', 'www.example.com'), 'http://www.example.com');
|
||||||
|
assert.equal(libUtil.join('file:///', 'www.example.com'), 'file:///www.example.com');
|
||||||
|
assert.equal(libUtil.join('http://', 'ftp://example.com'), 'ftp://example.com');
|
||||||
|
|
||||||
|
assert.equal(libUtil.join('http://www.example.com', '//foo.org/bar'), 'http://foo.org/bar');
|
||||||
|
assert.equal(libUtil.join('//www.example.com', '//foo.org/bar'), '//foo.org/bar');
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO Issue #128: Define and test this function properly.
|
||||||
|
exports['test relative()'] = function (assert, util) {
|
||||||
|
assert.equal(libUtil.relative('/the/root', '/the/root/one.js'), 'one.js');
|
||||||
|
assert.equal(libUtil.relative('/the/root', '/the/rootone.js'), '/the/rootone.js');
|
||||||
|
|
||||||
|
assert.equal(libUtil.relative('', '/the/root/one.js'), '/the/root/one.js');
|
||||||
|
assert.equal(libUtil.relative('.', '/the/root/one.js'), '/the/root/one.js');
|
||||||
|
assert.equal(libUtil.relative('', 'the/root/one.js'), 'the/root/one.js');
|
||||||
|
assert.equal(libUtil.relative('.', 'the/root/one.js'), 'the/root/one.js');
|
||||||
|
|
||||||
|
assert.equal(libUtil.relative('/', '/the/root/one.js'), 'the/root/one.js');
|
||||||
|
assert.equal(libUtil.relative('/', 'the/root/one.js'), 'the/root/one.js');
|
||||||
|
};
|
||||||
|
|
||||||
|
});
|
||||||
|
function run_test() {
|
||||||
|
runSourceMapTests('test/source-map/test-util', do_throw);
|
||||||
|
}
|
||||||
8
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map.js
generated
vendored
Normal file
8
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2009-2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE.txt or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
exports.SourceMapGenerator = require('./source-map/source-map-generator').SourceMapGenerator;
|
||||||
|
exports.SourceMapConsumer = require('./source-map/source-map-consumer').SourceMapConsumer;
|
||||||
|
exports.SourceNode = require('./source-map/source-node').SourceNode;
|
||||||
97
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/array-set.js
generated
vendored
Normal file
97
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/array-set.js
generated
vendored
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
if (typeof define !== 'function') {
|
||||||
|
var define = require('amdefine')(module, require);
|
||||||
|
}
|
||||||
|
define(function (require, exports, module) {
|
||||||
|
|
||||||
|
var util = require('./util');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A data structure which is a combination of an array and a set. Adding a new
|
||||||
|
* member is O(1), testing for membership is O(1), and finding the index of an
|
||||||
|
* element is O(1). Removing elements from the set is not supported. Only
|
||||||
|
* strings are supported for membership.
|
||||||
|
*/
|
||||||
|
function ArraySet() {
|
||||||
|
this._array = [];
|
||||||
|
this._set = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Static method for creating ArraySet instances from an existing array.
|
||||||
|
*/
|
||||||
|
ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) {
|
||||||
|
var set = new ArraySet();
|
||||||
|
for (var i = 0, len = aArray.length; i < len; i++) {
|
||||||
|
set.add(aArray[i], aAllowDuplicates);
|
||||||
|
}
|
||||||
|
return set;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add the given string to this set.
|
||||||
|
*
|
||||||
|
* @param String aStr
|
||||||
|
*/
|
||||||
|
ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) {
|
||||||
|
var isDuplicate = this.has(aStr);
|
||||||
|
var idx = this._array.length;
|
||||||
|
if (!isDuplicate || aAllowDuplicates) {
|
||||||
|
this._array.push(aStr);
|
||||||
|
}
|
||||||
|
if (!isDuplicate) {
|
||||||
|
this._set[util.toSetString(aStr)] = idx;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Is the given string a member of this set?
|
||||||
|
*
|
||||||
|
* @param String aStr
|
||||||
|
*/
|
||||||
|
ArraySet.prototype.has = function ArraySet_has(aStr) {
|
||||||
|
return Object.prototype.hasOwnProperty.call(this._set,
|
||||||
|
util.toSetString(aStr));
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* What is the index of the given string in the array?
|
||||||
|
*
|
||||||
|
* @param String aStr
|
||||||
|
*/
|
||||||
|
ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {
|
||||||
|
if (this.has(aStr)) {
|
||||||
|
return this._set[util.toSetString(aStr)];
|
||||||
|
}
|
||||||
|
throw new Error('"' + aStr + '" is not in the set.');
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* What is the element at the given index?
|
||||||
|
*
|
||||||
|
* @param Number aIdx
|
||||||
|
*/
|
||||||
|
ArraySet.prototype.at = function ArraySet_at(aIdx) {
|
||||||
|
if (aIdx >= 0 && aIdx < this._array.length) {
|
||||||
|
return this._array[aIdx];
|
||||||
|
}
|
||||||
|
throw new Error('No element indexed by ' + aIdx);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the array representation of this set (which has the proper indices
|
||||||
|
* indicated by indexOf). Note that this is a copy of the internal array used
|
||||||
|
* for storing the members so that no one can mess with internal state.
|
||||||
|
*/
|
||||||
|
ArraySet.prototype.toArray = function ArraySet_toArray() {
|
||||||
|
return this._array.slice();
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.ArraySet = ArraySet;
|
||||||
|
|
||||||
|
});
|
||||||
141
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/base64-vlq.js
generated
vendored
Normal file
141
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/base64-vlq.js
generated
vendored
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*
|
||||||
|
* Based on the Base 64 VLQ implementation in Closure Compiler:
|
||||||
|
* https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java
|
||||||
|
*
|
||||||
|
* Copyright 2011 The Closure Compiler Authors. All rights reserved.
|
||||||
|
* Redistribution and use in source and binary forms, with or without
|
||||||
|
* modification, are permitted provided that the following conditions are
|
||||||
|
* met:
|
||||||
|
*
|
||||||
|
* * Redistributions of source code must retain the above copyright
|
||||||
|
* notice, this list of conditions and the following disclaimer.
|
||||||
|
* * Redistributions in binary form must reproduce the above
|
||||||
|
* copyright notice, this list of conditions and the following
|
||||||
|
* disclaimer in the documentation and/or other materials provided
|
||||||
|
* with the distribution.
|
||||||
|
* * Neither the name of Google Inc. nor the names of its
|
||||||
|
* contributors may be used to endorse or promote products derived
|
||||||
|
* from this software without specific prior written permission.
|
||||||
|
*
|
||||||
|
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
if (typeof define !== 'function') {
|
||||||
|
var define = require('amdefine')(module, require);
|
||||||
|
}
|
||||||
|
define(function (require, exports, module) {
|
||||||
|
|
||||||
|
var base64 = require('./base64');
|
||||||
|
|
||||||
|
// A single base 64 digit can contain 6 bits of data. For the base 64 variable
|
||||||
|
// length quantities we use in the source map spec, the first bit is the sign,
|
||||||
|
// the next four bits are the actual value, and the 6th bit is the
|
||||||
|
// continuation bit. The continuation bit tells us whether there are more
|
||||||
|
// digits in this value following this digit.
|
||||||
|
//
|
||||||
|
// Continuation
|
||||||
|
// | Sign
|
||||||
|
// | |
|
||||||
|
// V V
|
||||||
|
// 101011
|
||||||
|
|
||||||
|
var VLQ_BASE_SHIFT = 5;
|
||||||
|
|
||||||
|
// binary: 100000
|
||||||
|
var VLQ_BASE = 1 << VLQ_BASE_SHIFT;
|
||||||
|
|
||||||
|
// binary: 011111
|
||||||
|
var VLQ_BASE_MASK = VLQ_BASE - 1;
|
||||||
|
|
||||||
|
// binary: 100000
|
||||||
|
var VLQ_CONTINUATION_BIT = VLQ_BASE;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts from a two-complement value to a value where the sign bit is
|
||||||
|
* placed in the least significant bit. For example, as decimals:
|
||||||
|
* 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)
|
||||||
|
* 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)
|
||||||
|
*/
|
||||||
|
function toVLQSigned(aValue) {
|
||||||
|
return aValue < 0
|
||||||
|
? ((-aValue) << 1) + 1
|
||||||
|
: (aValue << 1) + 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts to a two-complement value from a value where the sign bit is
|
||||||
|
* placed in the least significant bit. For example, as decimals:
|
||||||
|
* 2 (10 binary) becomes 1, 3 (11 binary) becomes -1
|
||||||
|
* 4 (100 binary) becomes 2, 5 (101 binary) becomes -2
|
||||||
|
*/
|
||||||
|
function fromVLQSigned(aValue) {
|
||||||
|
var isNegative = (aValue & 1) === 1;
|
||||||
|
var shifted = aValue >> 1;
|
||||||
|
return isNegative
|
||||||
|
? -shifted
|
||||||
|
: shifted;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the base 64 VLQ encoded value.
|
||||||
|
*/
|
||||||
|
exports.encode = function base64VLQ_encode(aValue) {
|
||||||
|
var encoded = "";
|
||||||
|
var digit;
|
||||||
|
|
||||||
|
var vlq = toVLQSigned(aValue);
|
||||||
|
|
||||||
|
do {
|
||||||
|
digit = vlq & VLQ_BASE_MASK;
|
||||||
|
vlq >>>= VLQ_BASE_SHIFT;
|
||||||
|
if (vlq > 0) {
|
||||||
|
// There are still more digits in this value, so we must make sure the
|
||||||
|
// continuation bit is marked.
|
||||||
|
digit |= VLQ_CONTINUATION_BIT;
|
||||||
|
}
|
||||||
|
encoded += base64.encode(digit);
|
||||||
|
} while (vlq > 0);
|
||||||
|
|
||||||
|
return encoded;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Decodes the next base 64 VLQ value from the given string and returns the
|
||||||
|
* value and the rest of the string via the out parameter.
|
||||||
|
*/
|
||||||
|
exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) {
|
||||||
|
var strLen = aStr.length;
|
||||||
|
var result = 0;
|
||||||
|
var shift = 0;
|
||||||
|
var continuation, digit;
|
||||||
|
|
||||||
|
do {
|
||||||
|
if (aIndex >= strLen) {
|
||||||
|
throw new Error("Expected more digits in base 64 VLQ value.");
|
||||||
|
}
|
||||||
|
digit = base64.decode(aStr.charAt(aIndex++));
|
||||||
|
continuation = !!(digit & VLQ_CONTINUATION_BIT);
|
||||||
|
digit &= VLQ_BASE_MASK;
|
||||||
|
result = result + (digit << shift);
|
||||||
|
shift += VLQ_BASE_SHIFT;
|
||||||
|
} while (continuation);
|
||||||
|
|
||||||
|
aOutParam.value = fromVLQSigned(result);
|
||||||
|
aOutParam.rest = aIndex;
|
||||||
|
};
|
||||||
|
|
||||||
|
});
|
||||||
42
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/base64.js
generated
vendored
Normal file
42
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/base64.js
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
if (typeof define !== 'function') {
|
||||||
|
var define = require('amdefine')(module, require);
|
||||||
|
}
|
||||||
|
define(function (require, exports, module) {
|
||||||
|
|
||||||
|
var charToIntMap = {};
|
||||||
|
var intToCharMap = {};
|
||||||
|
|
||||||
|
'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
|
||||||
|
.split('')
|
||||||
|
.forEach(function (ch, index) {
|
||||||
|
charToIntMap[ch] = index;
|
||||||
|
intToCharMap[index] = ch;
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Encode an integer in the range of 0 to 63 to a single base 64 digit.
|
||||||
|
*/
|
||||||
|
exports.encode = function base64_encode(aNumber) {
|
||||||
|
if (aNumber in intToCharMap) {
|
||||||
|
return intToCharMap[aNumber];
|
||||||
|
}
|
||||||
|
throw new TypeError("Must be between 0 and 63: " + aNumber);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Decode a single base 64 digit to an integer.
|
||||||
|
*/
|
||||||
|
exports.decode = function base64_decode(aChar) {
|
||||||
|
if (aChar in charToIntMap) {
|
||||||
|
return charToIntMap[aChar];
|
||||||
|
}
|
||||||
|
throw new TypeError("Not a valid base 64 digit: " + aChar);
|
||||||
|
};
|
||||||
|
|
||||||
|
});
|
||||||
117
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/binary-search.js
generated
vendored
Normal file
117
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/binary-search.js
generated
vendored
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
if (typeof define !== 'function') {
|
||||||
|
var define = require('amdefine')(module, require);
|
||||||
|
}
|
||||||
|
define(function (require, exports, module) {
|
||||||
|
|
||||||
|
exports.GREATEST_LOWER_BOUND = 1;
|
||||||
|
exports.LEAST_UPPER_BOUND = 2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recursive implementation of binary search.
|
||||||
|
*
|
||||||
|
* @param aLow Indices here and lower do not contain the needle.
|
||||||
|
* @param aHigh Indices here and higher do not contain the needle.
|
||||||
|
* @param aNeedle The element being searched for.
|
||||||
|
* @param aHaystack The non-empty array being searched.
|
||||||
|
* @param aCompare Function which takes two elements and returns -1, 0, or 1.
|
||||||
|
* @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or
|
||||||
|
* 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the
|
||||||
|
* closest element that is smaller than or greater than the one we are
|
||||||
|
* searching for, respectively, if the exact element cannot be found.
|
||||||
|
*/
|
||||||
|
function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) {
|
||||||
|
// This function terminates when one of the following is true:
|
||||||
|
//
|
||||||
|
// 1. We find the exact element we are looking for.
|
||||||
|
//
|
||||||
|
// 2. We did not find the exact element, but we can return the index of
|
||||||
|
// the next-closest element.
|
||||||
|
//
|
||||||
|
// 3. We did not find the exact element, and there is no next-closest
|
||||||
|
// element than the one we are searching for, so we return -1.
|
||||||
|
var mid = Math.floor((aHigh - aLow) / 2) + aLow;
|
||||||
|
var cmp = aCompare(aNeedle, aHaystack[mid], true);
|
||||||
|
if (cmp === 0) {
|
||||||
|
// Found the element we are looking for.
|
||||||
|
return mid;
|
||||||
|
}
|
||||||
|
else if (cmp > 0) {
|
||||||
|
// Our needle is greater than aHaystack[mid].
|
||||||
|
if (aHigh - mid > 1) {
|
||||||
|
// The element is in the upper half.
|
||||||
|
return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias);
|
||||||
|
}
|
||||||
|
|
||||||
|
// The exact needle element was not found in this haystack. Determine if
|
||||||
|
// we are in termination case (3) or (2) and return the appropriate thing.
|
||||||
|
if (aBias == exports.LEAST_UPPER_BOUND) {
|
||||||
|
return aHigh < aHaystack.length ? aHigh : -1;
|
||||||
|
} else {
|
||||||
|
return mid;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Our needle is less than aHaystack[mid].
|
||||||
|
if (mid - aLow > 1) {
|
||||||
|
// The element is in the lower half.
|
||||||
|
return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias);
|
||||||
|
}
|
||||||
|
|
||||||
|
// we are in termination case (3) or (2) and return the appropriate thing.
|
||||||
|
if (aBias == exports.LEAST_UPPER_BOUND) {
|
||||||
|
return mid;
|
||||||
|
} else {
|
||||||
|
return aLow < 0 ? -1 : aLow;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is an implementation of binary search which will always try and return
|
||||||
|
* the index of the closest element if there is no exact hit. This is because
|
||||||
|
* mappings between original and generated line/col pairs are single points,
|
||||||
|
* and there is an implicit region between each of them, so a miss just means
|
||||||
|
* that you aren't on the very start of a region.
|
||||||
|
*
|
||||||
|
* @param aNeedle The element you are looking for.
|
||||||
|
* @param aHaystack The array that is being searched.
|
||||||
|
* @param aCompare A function which takes the needle and an element in the
|
||||||
|
* array and returns -1, 0, or 1 depending on whether the needle is less
|
||||||
|
* than, equal to, or greater than the element, respectively.
|
||||||
|
* @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or
|
||||||
|
* 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the
|
||||||
|
* closest element that is smaller than or greater than the one we are
|
||||||
|
* searching for, respectively, if the exact element cannot be found.
|
||||||
|
* Defaults to 'binarySearch.GREATEST_LOWER_BOUND'.
|
||||||
|
*/
|
||||||
|
exports.search = function search(aNeedle, aHaystack, aCompare, aBias) {
|
||||||
|
if (aHaystack.length === 0) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack,
|
||||||
|
aCompare, aBias || exports.GREATEST_LOWER_BOUND);
|
||||||
|
if (index < 0) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We have found either the exact element, or the next-closest element than
|
||||||
|
// the one we are searching for. However, there may be more than one such
|
||||||
|
// element. Make sure we always return the smallest of these.
|
||||||
|
while (index - 1 >= 0) {
|
||||||
|
if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
--index;
|
||||||
|
}
|
||||||
|
|
||||||
|
return index;
|
||||||
|
};
|
||||||
|
|
||||||
|
});
|
||||||
86
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/mapping-list.js
generated
vendored
Normal file
86
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/mapping-list.js
generated
vendored
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2014 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
if (typeof define !== 'function') {
|
||||||
|
var define = require('amdefine')(module, require);
|
||||||
|
}
|
||||||
|
define(function (require, exports, module) {
|
||||||
|
|
||||||
|
var util = require('./util');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine whether mappingB is after mappingA with respect to generated
|
||||||
|
* position.
|
||||||
|
*/
|
||||||
|
function generatedPositionAfter(mappingA, mappingB) {
|
||||||
|
// Optimized for most common case
|
||||||
|
var lineA = mappingA.generatedLine;
|
||||||
|
var lineB = mappingB.generatedLine;
|
||||||
|
var columnA = mappingA.generatedColumn;
|
||||||
|
var columnB = mappingB.generatedColumn;
|
||||||
|
return lineB > lineA || lineB == lineA && columnB >= columnA ||
|
||||||
|
util.compareByGeneratedPositions(mappingA, mappingB) <= 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A data structure to provide a sorted view of accumulated mappings in a
|
||||||
|
* performance conscious manner. It trades a neglibable overhead in general
|
||||||
|
* case for a large speedup in case of mappings being added in order.
|
||||||
|
*/
|
||||||
|
function MappingList() {
|
||||||
|
this._array = [];
|
||||||
|
this._sorted = true;
|
||||||
|
// Serves as infimum
|
||||||
|
this._last = {generatedLine: -1, generatedColumn: 0};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Iterate through internal items. This method takes the same arguments that
|
||||||
|
* `Array.prototype.forEach` takes.
|
||||||
|
*
|
||||||
|
* NOTE: The order of the mappings is NOT guaranteed.
|
||||||
|
*/
|
||||||
|
MappingList.prototype.unsortedForEach =
|
||||||
|
function MappingList_forEach(aCallback, aThisArg) {
|
||||||
|
this._array.forEach(aCallback, aThisArg);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add the given source mapping.
|
||||||
|
*
|
||||||
|
* @param Object aMapping
|
||||||
|
*/
|
||||||
|
MappingList.prototype.add = function MappingList_add(aMapping) {
|
||||||
|
var mapping;
|
||||||
|
if (generatedPositionAfter(this._last, aMapping)) {
|
||||||
|
this._last = aMapping;
|
||||||
|
this._array.push(aMapping);
|
||||||
|
} else {
|
||||||
|
this._sorted = false;
|
||||||
|
this._array.push(aMapping);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the flat, sorted array of mappings. The mappings are sorted by
|
||||||
|
* generated position.
|
||||||
|
*
|
||||||
|
* WARNING: This method returns internal data without copying, for
|
||||||
|
* performance. The return value must NOT be mutated, and should be treated as
|
||||||
|
* an immutable borrow. If you want to take ownership, you must make your own
|
||||||
|
* copy.
|
||||||
|
*/
|
||||||
|
MappingList.prototype.toArray = function MappingList_toArray() {
|
||||||
|
if (!this._sorted) {
|
||||||
|
this._array.sort(util.compareByGeneratedPositions);
|
||||||
|
this._sorted = true;
|
||||||
|
}
|
||||||
|
return this._array;
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.MappingList = MappingList;
|
||||||
|
|
||||||
|
});
|
||||||
958
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/source-map-consumer.js
generated
vendored
Normal file
958
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/source-map-consumer.js
generated
vendored
Normal file
@@ -0,0 +1,958 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
if (typeof define !== 'function') {
|
||||||
|
var define = require('amdefine')(module, require);
|
||||||
|
}
|
||||||
|
define(function (require, exports, module) {
|
||||||
|
|
||||||
|
var util = require('./util');
|
||||||
|
var binarySearch = require('./binary-search');
|
||||||
|
var ArraySet = require('./array-set').ArraySet;
|
||||||
|
var base64VLQ = require('./base64-vlq');
|
||||||
|
|
||||||
|
function SourceMapConsumer(aSourceMap) {
|
||||||
|
var sourceMap = aSourceMap;
|
||||||
|
if (typeof aSourceMap === 'string') {
|
||||||
|
sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, ''));
|
||||||
|
}
|
||||||
|
|
||||||
|
return sourceMap.sections != null
|
||||||
|
? new IndexedSourceMapConsumer(sourceMap)
|
||||||
|
: new BasicSourceMapConsumer(sourceMap);
|
||||||
|
}
|
||||||
|
|
||||||
|
SourceMapConsumer.fromSourceMap = function(aSourceMap) {
|
||||||
|
return BasicSourceMapConsumer.fromSourceMap(aSourceMap);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The version of the source mapping spec that we are consuming.
|
||||||
|
*/
|
||||||
|
SourceMapConsumer.prototype._version = 3;
|
||||||
|
|
||||||
|
// `__generatedMappings` and `__originalMappings` are arrays that hold the
|
||||||
|
// parsed mapping coordinates from the source map's "mappings" attribute. They
|
||||||
|
// are lazily instantiated, accessed via the `_generatedMappings` and
|
||||||
|
// `_originalMappings` getters respectively, and we only parse the mappings
|
||||||
|
// and create these arrays once queried for a source location. We jump through
|
||||||
|
// these hoops because there can be many thousands of mappings, and parsing
|
||||||
|
// them is expensive, so we only want to do it if we must.
|
||||||
|
//
|
||||||
|
// Each object in the arrays is of the form:
|
||||||
|
//
|
||||||
|
// {
|
||||||
|
// generatedLine: The line number in the generated code,
|
||||||
|
// generatedColumn: The column number in the generated code,
|
||||||
|
// source: The path to the original source file that generated this
|
||||||
|
// chunk of code,
|
||||||
|
// originalLine: The line number in the original source that
|
||||||
|
// corresponds to this chunk of generated code,
|
||||||
|
// originalColumn: The column number in the original source that
|
||||||
|
// corresponds to this chunk of generated code,
|
||||||
|
// name: The name of the original symbol which generated this chunk of
|
||||||
|
// code.
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// All properties except for `generatedLine` and `generatedColumn` can be
|
||||||
|
// `null`.
|
||||||
|
//
|
||||||
|
// `_generatedMappings` is ordered by the generated positions.
|
||||||
|
//
|
||||||
|
// `_originalMappings` is ordered by the original positions.
|
||||||
|
|
||||||
|
SourceMapConsumer.prototype.__generatedMappings = null;
|
||||||
|
Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', {
|
||||||
|
get: function () {
|
||||||
|
if (!this.__generatedMappings) {
|
||||||
|
this.__generatedMappings = [];
|
||||||
|
this.__originalMappings = [];
|
||||||
|
this._parseMappings(this._mappings, this.sourceRoot);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.__generatedMappings;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
SourceMapConsumer.prototype.__originalMappings = null;
|
||||||
|
Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', {
|
||||||
|
get: function () {
|
||||||
|
if (!this.__originalMappings) {
|
||||||
|
this.__generatedMappings = [];
|
||||||
|
this.__originalMappings = [];
|
||||||
|
this._parseMappings(this._mappings, this.sourceRoot);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.__originalMappings;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
SourceMapConsumer.prototype._nextCharIsMappingSeparator =
|
||||||
|
function SourceMapConsumer_nextCharIsMappingSeparator(aStr, index) {
|
||||||
|
var c = aStr.charAt(index);
|
||||||
|
return c === ";" || c === ",";
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse the mappings in a string in to a data structure which we can easily
|
||||||
|
* query (the ordered arrays in the `this.__generatedMappings` and
|
||||||
|
* `this.__originalMappings` properties).
|
||||||
|
*/
|
||||||
|
SourceMapConsumer.prototype._parseMappings =
|
||||||
|
function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {
|
||||||
|
throw new Error("Subclasses must implement _parseMappings");
|
||||||
|
};
|
||||||
|
|
||||||
|
SourceMapConsumer.GENERATED_ORDER = 1;
|
||||||
|
SourceMapConsumer.ORIGINAL_ORDER = 2;
|
||||||
|
|
||||||
|
SourceMapConsumer.GREATEST_LOWER_BOUND = 1;
|
||||||
|
SourceMapConsumer.LEAST_UPPER_BOUND = 2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Iterate over each mapping between an original source/line/column and a
|
||||||
|
* generated line/column in this source map.
|
||||||
|
*
|
||||||
|
* @param Function aCallback
|
||||||
|
* The function that is called with each mapping.
|
||||||
|
* @param Object aContext
|
||||||
|
* Optional. If specified, this object will be the value of `this` every
|
||||||
|
* time that `aCallback` is called.
|
||||||
|
* @param aOrder
|
||||||
|
* Either `SourceMapConsumer.GENERATED_ORDER` or
|
||||||
|
* `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to
|
||||||
|
* iterate over the mappings sorted by the generated file's line/column
|
||||||
|
* order or the original's source/line/column order, respectively. Defaults to
|
||||||
|
* `SourceMapConsumer.GENERATED_ORDER`.
|
||||||
|
*/
|
||||||
|
SourceMapConsumer.prototype.eachMapping =
|
||||||
|
function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) {
|
||||||
|
var context = aContext || null;
|
||||||
|
var order = aOrder || SourceMapConsumer.GENERATED_ORDER;
|
||||||
|
|
||||||
|
var mappings;
|
||||||
|
switch (order) {
|
||||||
|
case SourceMapConsumer.GENERATED_ORDER:
|
||||||
|
mappings = this._generatedMappings;
|
||||||
|
break;
|
||||||
|
case SourceMapConsumer.ORIGINAL_ORDER:
|
||||||
|
mappings = this._originalMappings;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new Error("Unknown order of iteration.");
|
||||||
|
}
|
||||||
|
|
||||||
|
var sourceRoot = this.sourceRoot;
|
||||||
|
mappings.map(function (mapping) {
|
||||||
|
var source = mapping.source;
|
||||||
|
if (source != null && sourceRoot != null) {
|
||||||
|
source = util.join(sourceRoot, source);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
source: source,
|
||||||
|
generatedLine: mapping.generatedLine,
|
||||||
|
generatedColumn: mapping.generatedColumn,
|
||||||
|
originalLine: mapping.originalLine,
|
||||||
|
originalColumn: mapping.originalColumn,
|
||||||
|
name: mapping.name
|
||||||
|
};
|
||||||
|
}).forEach(aCallback, context);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns all generated line and column information for the original source,
|
||||||
|
* line, and column provided. If no column is provided, returns all mappings
|
||||||
|
* corresponding to a single line. Otherwise, returns all mappings
|
||||||
|
* corresponding to a single line and column.
|
||||||
|
*
|
||||||
|
* The only argument is an object with the following properties:
|
||||||
|
*
|
||||||
|
* - source: The filename of the original source.
|
||||||
|
* - line: The line number in the original source.
|
||||||
|
* - column: Optional. the column number in the original source.
|
||||||
|
*
|
||||||
|
* and an array of objects is returned, each with the following properties:
|
||||||
|
*
|
||||||
|
* - line: The line number in the generated source, or null.
|
||||||
|
* - column: The column number in the generated source, or null.
|
||||||
|
*/
|
||||||
|
SourceMapConsumer.prototype.allGeneratedPositionsFor =
|
||||||
|
function SourceMapConsumer_allGeneratedPositionsFor(aArgs) {
|
||||||
|
// When there is no exact match, BasicSourceMapConsumer.prototype._findMapping
|
||||||
|
// returns the index of the closest mapping less than the needle. By
|
||||||
|
// setting needle.originalColumn to 0, we thus find the last mapping for
|
||||||
|
// the given line, provided such a mapping exists.
|
||||||
|
var needle = {
|
||||||
|
source: util.getArg(aArgs, 'source'),
|
||||||
|
originalLine: util.getArg(aArgs, 'line'),
|
||||||
|
originalColumn: util.getArg(aArgs, 'column', 0)
|
||||||
|
};
|
||||||
|
|
||||||
|
if (this.sourceRoot != null) {
|
||||||
|
needle.source = util.relative(this.sourceRoot, needle.source);
|
||||||
|
}
|
||||||
|
|
||||||
|
var mappings = [];
|
||||||
|
|
||||||
|
var index = this._findMapping(needle,
|
||||||
|
this._originalMappings,
|
||||||
|
"originalLine",
|
||||||
|
"originalColumn",
|
||||||
|
util.compareByOriginalPositions,
|
||||||
|
binarySearch.LEAST_UPPER_BOUND);
|
||||||
|
if (index >= 0) {
|
||||||
|
var mapping = this._originalMappings[index];
|
||||||
|
var originalLine = mapping.originalLine;
|
||||||
|
var originalColumn = mapping.originalColumn;
|
||||||
|
|
||||||
|
// Iterate until either we run out of mappings, or we run into
|
||||||
|
// a mapping for a different line. Since mappings are sorted, this is
|
||||||
|
// guaranteed to find all mappings for the line we are searching for.
|
||||||
|
while (mapping && mapping.originalLine === originalLine &&
|
||||||
|
(aArgs.column === undefined ||
|
||||||
|
mapping.originalColumn === originalColumn)) {
|
||||||
|
mappings.push({
|
||||||
|
line: util.getArg(mapping, 'generatedLine', null),
|
||||||
|
column: util.getArg(mapping, 'generatedColumn', null),
|
||||||
|
lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)
|
||||||
|
});
|
||||||
|
|
||||||
|
mapping = this._originalMappings[++index];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return mappings;
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.SourceMapConsumer = SourceMapConsumer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A BasicSourceMapConsumer instance represents a parsed source map which we can
|
||||||
|
* query for information about the original file positions by giving it a file
|
||||||
|
* position in the generated source.
|
||||||
|
*
|
||||||
|
* The only parameter is the raw source map (either as a JSON string, or
|
||||||
|
* already parsed to an object). According to the spec, source maps have the
|
||||||
|
* following attributes:
|
||||||
|
*
|
||||||
|
* - version: Which version of the source map spec this map is following.
|
||||||
|
* - sources: An array of URLs to the original source files.
|
||||||
|
* - names: An array of identifiers which can be referrenced by individual mappings.
|
||||||
|
* - sourceRoot: Optional. The URL root from which all sources are relative.
|
||||||
|
* - sourcesContent: Optional. An array of contents of the original source files.
|
||||||
|
* - mappings: A string of base64 VLQs which contain the actual mappings.
|
||||||
|
* - file: Optional. The generated file this source map is associated with.
|
||||||
|
*
|
||||||
|
* Here is an example source map, taken from the source map spec[0]:
|
||||||
|
*
|
||||||
|
* {
|
||||||
|
* version : 3,
|
||||||
|
* file: "out.js",
|
||||||
|
* sourceRoot : "",
|
||||||
|
* sources: ["foo.js", "bar.js"],
|
||||||
|
* names: ["src", "maps", "are", "fun"],
|
||||||
|
* mappings: "AA,AB;;ABCDE;"
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1#
|
||||||
|
*/
|
||||||
|
function BasicSourceMapConsumer(aSourceMap) {
|
||||||
|
var sourceMap = aSourceMap;
|
||||||
|
if (typeof aSourceMap === 'string') {
|
||||||
|
sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, ''));
|
||||||
|
}
|
||||||
|
|
||||||
|
var version = util.getArg(sourceMap, 'version');
|
||||||
|
var sources = util.getArg(sourceMap, 'sources');
|
||||||
|
// Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which
|
||||||
|
// requires the array) to play nice here.
|
||||||
|
var names = util.getArg(sourceMap, 'names', []);
|
||||||
|
var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null);
|
||||||
|
var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null);
|
||||||
|
var mappings = util.getArg(sourceMap, 'mappings');
|
||||||
|
var file = util.getArg(sourceMap, 'file', null);
|
||||||
|
|
||||||
|
// Once again, Sass deviates from the spec and supplies the version as a
|
||||||
|
// string rather than a number, so we use loose equality checking here.
|
||||||
|
if (version != this._version) {
|
||||||
|
throw new Error('Unsupported version: ' + version);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Some source maps produce relative source paths like "./foo.js" instead of
|
||||||
|
// "foo.js". Normalize these first so that future comparisons will succeed.
|
||||||
|
// See bugzil.la/1090768.
|
||||||
|
sources = sources.map(util.normalize);
|
||||||
|
|
||||||
|
// Pass `true` below to allow duplicate names and sources. While source maps
|
||||||
|
// are intended to be compressed and deduplicated, the TypeScript compiler
|
||||||
|
// sometimes generates source maps with duplicates in them. See Github issue
|
||||||
|
// #72 and bugzil.la/889492.
|
||||||
|
this._names = ArraySet.fromArray(names, true);
|
||||||
|
this._sources = ArraySet.fromArray(sources, true);
|
||||||
|
|
||||||
|
this.sourceRoot = sourceRoot;
|
||||||
|
this.sourcesContent = sourcesContent;
|
||||||
|
this._mappings = mappings;
|
||||||
|
this.file = file;
|
||||||
|
}
|
||||||
|
|
||||||
|
BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);
|
||||||
|
BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a BasicSourceMapConsumer from a SourceMapGenerator.
|
||||||
|
*
|
||||||
|
* @param SourceMapGenerator aSourceMap
|
||||||
|
* The source map that will be consumed.
|
||||||
|
* @returns BasicSourceMapConsumer
|
||||||
|
*/
|
||||||
|
BasicSourceMapConsumer.fromSourceMap =
|
||||||
|
function SourceMapConsumer_fromSourceMap(aSourceMap) {
|
||||||
|
var smc = Object.create(BasicSourceMapConsumer.prototype);
|
||||||
|
|
||||||
|
smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true);
|
||||||
|
smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true);
|
||||||
|
smc.sourceRoot = aSourceMap._sourceRoot;
|
||||||
|
smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(),
|
||||||
|
smc.sourceRoot);
|
||||||
|
smc.file = aSourceMap._file;
|
||||||
|
|
||||||
|
smc.__generatedMappings = aSourceMap._mappings.toArray().slice();
|
||||||
|
smc.__originalMappings = aSourceMap._mappings.toArray().slice()
|
||||||
|
.sort(util.compareByOriginalPositions);
|
||||||
|
|
||||||
|
return smc;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The version of the source mapping spec that we are consuming.
|
||||||
|
*/
|
||||||
|
BasicSourceMapConsumer.prototype._version = 3;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The list of original sources.
|
||||||
|
*/
|
||||||
|
Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', {
|
||||||
|
get: function () {
|
||||||
|
return this._sources.toArray().map(function (s) {
|
||||||
|
return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s;
|
||||||
|
}, this);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse the mappings in a string in to a data structure which we can easily
|
||||||
|
* query (the ordered arrays in the `this.__generatedMappings` and
|
||||||
|
* `this.__originalMappings` properties).
|
||||||
|
*/
|
||||||
|
BasicSourceMapConsumer.prototype._parseMappings =
|
||||||
|
function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {
|
||||||
|
var generatedLine = 1;
|
||||||
|
var previousGeneratedColumn = 0;
|
||||||
|
var previousOriginalLine = 0;
|
||||||
|
var previousOriginalColumn = 0;
|
||||||
|
var previousSource = 0;
|
||||||
|
var previousName = 0;
|
||||||
|
var length = aStr.length;
|
||||||
|
var index = 0;
|
||||||
|
var cachedValues = {};
|
||||||
|
var temp = {};
|
||||||
|
var mapping, str, values, end, value;
|
||||||
|
|
||||||
|
while (index < length) {
|
||||||
|
if (aStr.charAt(index) === ';') {
|
||||||
|
generatedLine++;
|
||||||
|
++index;
|
||||||
|
previousGeneratedColumn = 0;
|
||||||
|
}
|
||||||
|
else if (aStr.charAt(index) === ',') {
|
||||||
|
++index;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
mapping = {};
|
||||||
|
mapping.generatedLine = generatedLine;
|
||||||
|
|
||||||
|
// Because each offset is encoded relative to the previous one,
|
||||||
|
// many segments often have the same encoding. We can exploit this
|
||||||
|
// fact by caching the parsed variable length fields of each segment,
|
||||||
|
// allowing us to avoid a second parse if we encounter the same
|
||||||
|
// segment again.
|
||||||
|
for (end = index; end < length; ++end) {
|
||||||
|
if (this._nextCharIsMappingSeparator(aStr, end)) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
str = aStr.slice(index, end);
|
||||||
|
|
||||||
|
values = cachedValues[str];
|
||||||
|
if (values) {
|
||||||
|
index += str.length;
|
||||||
|
} else {
|
||||||
|
values = [];
|
||||||
|
while (index < end) {
|
||||||
|
base64VLQ.decode(aStr, index, temp);
|
||||||
|
value = temp.value;
|
||||||
|
index = temp.rest;
|
||||||
|
values.push(value);
|
||||||
|
}
|
||||||
|
cachedValues[str] = values;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generated column.
|
||||||
|
mapping.generatedColumn = previousGeneratedColumn + values[0];
|
||||||
|
previousGeneratedColumn = mapping.generatedColumn;
|
||||||
|
|
||||||
|
if (values.length > 1) {
|
||||||
|
// Original source.
|
||||||
|
mapping.source = this._sources.at(previousSource + values[1]);
|
||||||
|
previousSource += values[1];
|
||||||
|
if (values.length === 2) {
|
||||||
|
throw new Error('Found a source, but no line and column');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Original line.
|
||||||
|
mapping.originalLine = previousOriginalLine + values[2];
|
||||||
|
previousOriginalLine = mapping.originalLine;
|
||||||
|
// Lines are stored 0-based
|
||||||
|
mapping.originalLine += 1;
|
||||||
|
if (values.length === 3) {
|
||||||
|
throw new Error('Found a source and line, but no column');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Original column.
|
||||||
|
mapping.originalColumn = previousOriginalColumn + values[3];
|
||||||
|
previousOriginalColumn = mapping.originalColumn;
|
||||||
|
|
||||||
|
if (values.length > 4) {
|
||||||
|
// Original name.
|
||||||
|
mapping.name = this._names.at(previousName + values[4]);
|
||||||
|
previousName += values[4];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.__generatedMappings.push(mapping);
|
||||||
|
if (typeof mapping.originalLine === 'number') {
|
||||||
|
this.__originalMappings.push(mapping);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.__generatedMappings.sort(util.compareByGeneratedPositions);
|
||||||
|
this.__originalMappings.sort(util.compareByOriginalPositions);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find the mapping that best matches the hypothetical "needle" mapping that
|
||||||
|
* we are searching for in the given "haystack" of mappings.
|
||||||
|
*/
|
||||||
|
BasicSourceMapConsumer.prototype._findMapping =
|
||||||
|
function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName,
|
||||||
|
aColumnName, aComparator, aBias) {
|
||||||
|
// To return the position we are searching for, we must first find the
|
||||||
|
// mapping for the given position and then return the opposite position it
|
||||||
|
// points to. Because the mappings are sorted, we can use binary search to
|
||||||
|
// find the best mapping.
|
||||||
|
|
||||||
|
if (aNeedle[aLineName] <= 0) {
|
||||||
|
throw new TypeError('Line must be greater than or equal to 1, got '
|
||||||
|
+ aNeedle[aLineName]);
|
||||||
|
}
|
||||||
|
if (aNeedle[aColumnName] < 0) {
|
||||||
|
throw new TypeError('Column must be greater than or equal to 0, got '
|
||||||
|
+ aNeedle[aColumnName]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return binarySearch.search(aNeedle, aMappings, aComparator, aBias);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute the last column for each generated mapping. The last column is
|
||||||
|
* inclusive.
|
||||||
|
*/
|
||||||
|
BasicSourceMapConsumer.prototype.computeColumnSpans =
|
||||||
|
function SourceMapConsumer_computeColumnSpans() {
|
||||||
|
for (var index = 0; index < this._generatedMappings.length; ++index) {
|
||||||
|
var mapping = this._generatedMappings[index];
|
||||||
|
|
||||||
|
// Mappings do not contain a field for the last generated columnt. We
|
||||||
|
// can come up with an optimistic estimate, however, by assuming that
|
||||||
|
// mappings are contiguous (i.e. given two consecutive mappings, the
|
||||||
|
// first mapping ends where the second one starts).
|
||||||
|
if (index + 1 < this._generatedMappings.length) {
|
||||||
|
var nextMapping = this._generatedMappings[index + 1];
|
||||||
|
|
||||||
|
if (mapping.generatedLine === nextMapping.generatedLine) {
|
||||||
|
mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The last mapping for each line spans the entire line.
|
||||||
|
mapping.lastGeneratedColumn = Infinity;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the original source, line, and column information for the generated
|
||||||
|
* source's line and column positions provided. The only argument is an object
|
||||||
|
* with the following properties:
|
||||||
|
*
|
||||||
|
* - line: The line number in the generated source.
|
||||||
|
* - column: The column number in the generated source.
|
||||||
|
* - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or
|
||||||
|
* 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the
|
||||||
|
* closest element that is smaller than or greater than the one we are
|
||||||
|
* searching for, respectively, if the exact element cannot be found.
|
||||||
|
* Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.
|
||||||
|
*
|
||||||
|
* and an object is returned with the following properties:
|
||||||
|
*
|
||||||
|
* - source: The original source file, or null.
|
||||||
|
* - line: The line number in the original source, or null.
|
||||||
|
* - column: The column number in the original source, or null.
|
||||||
|
* - name: The original identifier, or null.
|
||||||
|
*/
|
||||||
|
BasicSourceMapConsumer.prototype.originalPositionFor =
|
||||||
|
function SourceMapConsumer_originalPositionFor(aArgs) {
|
||||||
|
var needle = {
|
||||||
|
generatedLine: util.getArg(aArgs, 'line'),
|
||||||
|
generatedColumn: util.getArg(aArgs, 'column')
|
||||||
|
};
|
||||||
|
|
||||||
|
var index = this._findMapping(
|
||||||
|
needle,
|
||||||
|
this._generatedMappings,
|
||||||
|
"generatedLine",
|
||||||
|
"generatedColumn",
|
||||||
|
util.compareByGeneratedPositions,
|
||||||
|
util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (index >= 0) {
|
||||||
|
var mapping = this._generatedMappings[index];
|
||||||
|
|
||||||
|
if (mapping.generatedLine === needle.generatedLine) {
|
||||||
|
var source = util.getArg(mapping, 'source', null);
|
||||||
|
if (source != null && this.sourceRoot != null) {
|
||||||
|
source = util.join(this.sourceRoot, source);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
source: source,
|
||||||
|
line: util.getArg(mapping, 'originalLine', null),
|
||||||
|
column: util.getArg(mapping, 'originalColumn', null),
|
||||||
|
name: util.getArg(mapping, 'name', null)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
source: null,
|
||||||
|
line: null,
|
||||||
|
column: null,
|
||||||
|
name: null
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the original source content. The only argument is the url of the
|
||||||
|
* original source file. Returns null if no original source content is
|
||||||
|
* availible.
|
||||||
|
*/
|
||||||
|
BasicSourceMapConsumer.prototype.sourceContentFor =
|
||||||
|
function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {
|
||||||
|
if (!this.sourcesContent) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.sourceRoot != null) {
|
||||||
|
aSource = util.relative(this.sourceRoot, aSource);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this._sources.has(aSource)) {
|
||||||
|
return this.sourcesContent[this._sources.indexOf(aSource)];
|
||||||
|
}
|
||||||
|
|
||||||
|
var url;
|
||||||
|
if (this.sourceRoot != null
|
||||||
|
&& (url = util.urlParse(this.sourceRoot))) {
|
||||||
|
// XXX: file:// URIs and absolute paths lead to unexpected behavior for
|
||||||
|
// many users. We can help them out when they expect file:// URIs to
|
||||||
|
// behave like it would if they were running a local HTTP server. See
|
||||||
|
// https://bugzilla.mozilla.org/show_bug.cgi?id=885597.
|
||||||
|
var fileUriAbsPath = aSource.replace(/^file:\/\//, "");
|
||||||
|
if (url.scheme == "file"
|
||||||
|
&& this._sources.has(fileUriAbsPath)) {
|
||||||
|
return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)]
|
||||||
|
}
|
||||||
|
|
||||||
|
if ((!url.path || url.path == "/")
|
||||||
|
&& this._sources.has("/" + aSource)) {
|
||||||
|
return this.sourcesContent[this._sources.indexOf("/" + aSource)];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This function is used recursively from
|
||||||
|
// IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we
|
||||||
|
// don't want to throw if we can't find the source - we just want to
|
||||||
|
// return null, so we provide a flag to exit gracefully.
|
||||||
|
if (nullOnMissing) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new Error('"' + aSource + '" is not in the SourceMap.');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the generated line and column information for the original source,
|
||||||
|
* line, and column positions provided. The only argument is an object with
|
||||||
|
* the following properties:
|
||||||
|
*
|
||||||
|
* - source: The filename of the original source.
|
||||||
|
* - line: The line number in the original source.
|
||||||
|
* - column: The column number in the original source.
|
||||||
|
* - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or
|
||||||
|
* 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the
|
||||||
|
* closest element that is smaller than or greater than the one we are
|
||||||
|
* searching for, respectively, if the exact element cannot be found.
|
||||||
|
* Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.
|
||||||
|
*
|
||||||
|
* and an object is returned with the following properties:
|
||||||
|
*
|
||||||
|
* - line: The line number in the generated source, or null.
|
||||||
|
* - column: The column number in the generated source, or null.
|
||||||
|
*/
|
||||||
|
BasicSourceMapConsumer.prototype.generatedPositionFor =
|
||||||
|
function SourceMapConsumer_generatedPositionFor(aArgs) {
|
||||||
|
var needle = {
|
||||||
|
source: util.getArg(aArgs, 'source'),
|
||||||
|
originalLine: util.getArg(aArgs, 'line'),
|
||||||
|
originalColumn: util.getArg(aArgs, 'column')
|
||||||
|
};
|
||||||
|
|
||||||
|
if (this.sourceRoot != null) {
|
||||||
|
needle.source = util.relative(this.sourceRoot, needle.source);
|
||||||
|
}
|
||||||
|
|
||||||
|
var index = this._findMapping(
|
||||||
|
needle,
|
||||||
|
this._originalMappings,
|
||||||
|
"originalLine",
|
||||||
|
"originalColumn",
|
||||||
|
util.compareByOriginalPositions,
|
||||||
|
util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (index >= 0) {
|
||||||
|
var mapping = this._originalMappings[index];
|
||||||
|
|
||||||
|
if (mapping.source === needle.source) {
|
||||||
|
return {
|
||||||
|
line: util.getArg(mapping, 'generatedLine', null),
|
||||||
|
column: util.getArg(mapping, 'generatedColumn', null),
|
||||||
|
lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
line: null,
|
||||||
|
column: null,
|
||||||
|
lastColumn: null
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.BasicSourceMapConsumer = BasicSourceMapConsumer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An IndexedSourceMapConsumer instance represents a parsed source map which
|
||||||
|
* we can query for information. It differs from BasicSourceMapConsumer in
|
||||||
|
* that it takes "indexed" source maps (i.e. ones with a "sections" field) as
|
||||||
|
* input.
|
||||||
|
*
|
||||||
|
* The only parameter is a raw source map (either as a JSON string, or already
|
||||||
|
* parsed to an object). According to the spec for indexed source maps, they
|
||||||
|
* have the following attributes:
|
||||||
|
*
|
||||||
|
* - version: Which version of the source map spec this map is following.
|
||||||
|
* - file: Optional. The generated file this source map is associated with.
|
||||||
|
* - sections: A list of section definitions.
|
||||||
|
*
|
||||||
|
* Each value under the "sections" field has two fields:
|
||||||
|
* - offset: The offset into the original specified at which this section
|
||||||
|
* begins to apply, defined as an object with a "line" and "column"
|
||||||
|
* field.
|
||||||
|
* - map: A source map definition. This source map could also be indexed,
|
||||||
|
* but doesn't have to be.
|
||||||
|
*
|
||||||
|
* Instead of the "map" field, it's also possible to have a "url" field
|
||||||
|
* specifying a URL to retrieve a source map from, but that's currently
|
||||||
|
* unsupported.
|
||||||
|
*
|
||||||
|
* Here's an example source map, taken from the source map spec[0], but
|
||||||
|
* modified to omit a section which uses the "url" field.
|
||||||
|
*
|
||||||
|
* {
|
||||||
|
* version : 3,
|
||||||
|
* file: "app.js",
|
||||||
|
* sections: [{
|
||||||
|
* offset: {line:100, column:10},
|
||||||
|
* map: {
|
||||||
|
* version : 3,
|
||||||
|
* file: "section.js",
|
||||||
|
* sources: ["foo.js", "bar.js"],
|
||||||
|
* names: ["src", "maps", "are", "fun"],
|
||||||
|
* mappings: "AAAA,E;;ABCDE;"
|
||||||
|
* }
|
||||||
|
* }],
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt
|
||||||
|
*/
|
||||||
|
function IndexedSourceMapConsumer(aSourceMap) {
|
||||||
|
var sourceMap = aSourceMap;
|
||||||
|
if (typeof aSourceMap === 'string') {
|
||||||
|
sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, ''));
|
||||||
|
}
|
||||||
|
|
||||||
|
var version = util.getArg(sourceMap, 'version');
|
||||||
|
var sections = util.getArg(sourceMap, 'sections');
|
||||||
|
|
||||||
|
if (version != this._version) {
|
||||||
|
throw new Error('Unsupported version: ' + version);
|
||||||
|
}
|
||||||
|
|
||||||
|
var lastOffset = {
|
||||||
|
line: -1,
|
||||||
|
column: 0
|
||||||
|
};
|
||||||
|
this._sections = sections.map(function (s) {
|
||||||
|
if (s.url) {
|
||||||
|
// The url field will require support for asynchronicity.
|
||||||
|
// See https://github.com/mozilla/source-map/issues/16
|
||||||
|
throw new Error('Support for url field in sections not implemented.');
|
||||||
|
}
|
||||||
|
var offset = util.getArg(s, 'offset');
|
||||||
|
var offsetLine = util.getArg(offset, 'line');
|
||||||
|
var offsetColumn = util.getArg(offset, 'column');
|
||||||
|
|
||||||
|
if (offsetLine < lastOffset.line ||
|
||||||
|
(offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) {
|
||||||
|
throw new Error('Section offsets must be ordered and non-overlapping.');
|
||||||
|
}
|
||||||
|
lastOffset = offset;
|
||||||
|
|
||||||
|
return {
|
||||||
|
generatedOffset: {
|
||||||
|
// The offset fields are 0-based, but we use 1-based indices when
|
||||||
|
// encoding/decoding from VLQ.
|
||||||
|
generatedLine: offsetLine + 1,
|
||||||
|
generatedColumn: offsetColumn + 1
|
||||||
|
},
|
||||||
|
consumer: new SourceMapConsumer(util.getArg(s, 'map'))
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);
|
||||||
|
IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The version of the source mapping spec that we are consuming.
|
||||||
|
*/
|
||||||
|
IndexedSourceMapConsumer.prototype._version = 3;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The list of original sources.
|
||||||
|
*/
|
||||||
|
Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', {
|
||||||
|
get: function () {
|
||||||
|
var sources = [];
|
||||||
|
for (var i = 0; i < this._sections.length; i++) {
|
||||||
|
for (var j = 0; j < this._sections[i].consumer.sources.length; j++) {
|
||||||
|
sources.push(this._sections[i].consumer.sources[j]);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return sources;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the original source, line, and column information for the generated
|
||||||
|
* source's line and column positions provided. The only argument is an object
|
||||||
|
* with the following properties:
|
||||||
|
*
|
||||||
|
* - line: The line number in the generated source.
|
||||||
|
* - column: The column number in the generated source.
|
||||||
|
*
|
||||||
|
* and an object is returned with the following properties:
|
||||||
|
*
|
||||||
|
* - source: The original source file, or null.
|
||||||
|
* - line: The line number in the original source, or null.
|
||||||
|
* - column: The column number in the original source, or null.
|
||||||
|
* - name: The original identifier, or null.
|
||||||
|
*/
|
||||||
|
IndexedSourceMapConsumer.prototype.originalPositionFor =
|
||||||
|
function IndexedSourceMapConsumer_originalPositionFor(aArgs) {
|
||||||
|
var needle = {
|
||||||
|
generatedLine: util.getArg(aArgs, 'line'),
|
||||||
|
generatedColumn: util.getArg(aArgs, 'column')
|
||||||
|
};
|
||||||
|
|
||||||
|
// Find the section containing the generated position we're trying to map
|
||||||
|
// to an original position.
|
||||||
|
var sectionIndex = binarySearch.search(needle, this._sections,
|
||||||
|
function(needle, section) {
|
||||||
|
var cmp = needle.generatedLine - section.generatedOffset.generatedLine;
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (needle.generatedColumn -
|
||||||
|
section.generatedOffset.generatedColumn);
|
||||||
|
});
|
||||||
|
var section = this._sections[sectionIndex];
|
||||||
|
|
||||||
|
if (!section) {
|
||||||
|
return {
|
||||||
|
source: null,
|
||||||
|
line: null,
|
||||||
|
column: null,
|
||||||
|
name: null
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return section.consumer.originalPositionFor({
|
||||||
|
line: needle.generatedLine -
|
||||||
|
(section.generatedOffset.generatedLine - 1),
|
||||||
|
column: needle.generatedColumn -
|
||||||
|
(section.generatedOffset.generatedLine === needle.generatedLine
|
||||||
|
? section.generatedOffset.generatedColumn - 1
|
||||||
|
: 0),
|
||||||
|
bias: aArgs.bias
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the original source content. The only argument is the url of the
|
||||||
|
* original source file. Returns null if no original source content is
|
||||||
|
* available.
|
||||||
|
*/
|
||||||
|
IndexedSourceMapConsumer.prototype.sourceContentFor =
|
||||||
|
function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {
|
||||||
|
for (var i = 0; i < this._sections.length; i++) {
|
||||||
|
var section = this._sections[i];
|
||||||
|
|
||||||
|
var content = section.consumer.sourceContentFor(aSource, true);
|
||||||
|
if (content) {
|
||||||
|
return content;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (nullOnMissing) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new Error('"' + aSource + '" is not in the SourceMap.');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the generated line and column information for the original source,
|
||||||
|
* line, and column positions provided. The only argument is an object with
|
||||||
|
* the following properties:
|
||||||
|
*
|
||||||
|
* - source: The filename of the original source.
|
||||||
|
* - line: The line number in the original source.
|
||||||
|
* - column: The column number in the original source.
|
||||||
|
*
|
||||||
|
* and an object is returned with the following properties:
|
||||||
|
*
|
||||||
|
* - line: The line number in the generated source, or null.
|
||||||
|
* - column: The column number in the generated source, or null.
|
||||||
|
*/
|
||||||
|
IndexedSourceMapConsumer.prototype.generatedPositionFor =
|
||||||
|
function IndexedSourceMapConsumer_generatedPositionFor(aArgs) {
|
||||||
|
for (var i = 0; i < this._sections.length; i++) {
|
||||||
|
var section = this._sections[i];
|
||||||
|
|
||||||
|
// Only consider this section if the requested source is in the list of
|
||||||
|
// sources of the consumer.
|
||||||
|
if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
var generatedPosition = section.consumer.generatedPositionFor(aArgs);
|
||||||
|
if (generatedPosition) {
|
||||||
|
var ret = {
|
||||||
|
line: generatedPosition.line +
|
||||||
|
(section.generatedOffset.generatedLine - 1),
|
||||||
|
column: generatedPosition.column +
|
||||||
|
(section.generatedOffset.generatedLine === generatedPosition.line
|
||||||
|
? section.generatedOffset.generatedColumn - 1
|
||||||
|
: 0)
|
||||||
|
};
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
line: null,
|
||||||
|
column: null
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse the mappings in a string in to a data structure which we can easily
|
||||||
|
* query (the ordered arrays in the `this.__generatedMappings` and
|
||||||
|
* `this.__originalMappings` properties).
|
||||||
|
*/
|
||||||
|
IndexedSourceMapConsumer.prototype._parseMappings =
|
||||||
|
function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) {
|
||||||
|
this.__generatedMappings = [];
|
||||||
|
this.__originalMappings = [];
|
||||||
|
for (var i = 0; i < this._sections.length; i++) {
|
||||||
|
var section = this._sections[i];
|
||||||
|
var sectionMappings = section.consumer._generatedMappings;
|
||||||
|
for (var j = 0; j < sectionMappings.length; j++) {
|
||||||
|
var mapping = sectionMappings[i];
|
||||||
|
|
||||||
|
var source = mapping.source;
|
||||||
|
var sourceRoot = section.consumer.sourceRoot;
|
||||||
|
|
||||||
|
if (source != null && sourceRoot != null) {
|
||||||
|
source = util.join(sourceRoot, source);
|
||||||
|
}
|
||||||
|
|
||||||
|
// The mappings coming from the consumer for the section have
|
||||||
|
// generated positions relative to the start of the section, so we
|
||||||
|
// need to offset them to be relative to the start of the concatenated
|
||||||
|
// generated file.
|
||||||
|
var adjustedMapping = {
|
||||||
|
source: source,
|
||||||
|
generatedLine: mapping.generatedLine +
|
||||||
|
(section.generatedOffset.generatedLine - 1),
|
||||||
|
generatedColumn: mapping.column +
|
||||||
|
(section.generatedOffset.generatedLine === mapping.generatedLine)
|
||||||
|
? section.generatedOffset.generatedColumn - 1
|
||||||
|
: 0,
|
||||||
|
originalLine: mapping.originalLine,
|
||||||
|
originalColumn: mapping.originalColumn,
|
||||||
|
name: mapping.name
|
||||||
|
};
|
||||||
|
|
||||||
|
this.__generatedMappings.push(adjustedMapping);
|
||||||
|
if (typeof adjustedMapping.originalLine === 'number') {
|
||||||
|
this.__originalMappings.push(adjustedMapping);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
this.__generatedMappings.sort(util.compareByGeneratedPositions);
|
||||||
|
this.__originalMappings.sort(util.compareByOriginalPositions);
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer;
|
||||||
|
|
||||||
|
});
|
||||||
400
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/source-map-generator.js
generated
vendored
Normal file
400
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/source-map-generator.js
generated
vendored
Normal file
@@ -0,0 +1,400 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
if (typeof define !== 'function') {
|
||||||
|
var define = require('amdefine')(module, require);
|
||||||
|
}
|
||||||
|
define(function (require, exports, module) {
|
||||||
|
|
||||||
|
var base64VLQ = require('./base64-vlq');
|
||||||
|
var util = require('./util');
|
||||||
|
var ArraySet = require('./array-set').ArraySet;
|
||||||
|
var MappingList = require('./mapping-list').MappingList;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An instance of the SourceMapGenerator represents a source map which is
|
||||||
|
* being built incrementally. You may pass an object with the following
|
||||||
|
* properties:
|
||||||
|
*
|
||||||
|
* - file: The filename of the generated source.
|
||||||
|
* - sourceRoot: A root for all relative URLs in this source map.
|
||||||
|
*/
|
||||||
|
function SourceMapGenerator(aArgs) {
|
||||||
|
if (!aArgs) {
|
||||||
|
aArgs = {};
|
||||||
|
}
|
||||||
|
this._file = util.getArg(aArgs, 'file', null);
|
||||||
|
this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null);
|
||||||
|
this._skipValidation = util.getArg(aArgs, 'skipValidation', false);
|
||||||
|
this._sources = new ArraySet();
|
||||||
|
this._names = new ArraySet();
|
||||||
|
this._mappings = new MappingList();
|
||||||
|
this._sourcesContents = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
SourceMapGenerator.prototype._version = 3;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new SourceMapGenerator based on a SourceMapConsumer
|
||||||
|
*
|
||||||
|
* @param aSourceMapConsumer The SourceMap.
|
||||||
|
*/
|
||||||
|
SourceMapGenerator.fromSourceMap =
|
||||||
|
function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) {
|
||||||
|
var sourceRoot = aSourceMapConsumer.sourceRoot;
|
||||||
|
var generator = new SourceMapGenerator({
|
||||||
|
file: aSourceMapConsumer.file,
|
||||||
|
sourceRoot: sourceRoot
|
||||||
|
});
|
||||||
|
aSourceMapConsumer.eachMapping(function (mapping) {
|
||||||
|
var newMapping = {
|
||||||
|
generated: {
|
||||||
|
line: mapping.generatedLine,
|
||||||
|
column: mapping.generatedColumn
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if (mapping.source != null) {
|
||||||
|
newMapping.source = mapping.source;
|
||||||
|
if (sourceRoot != null) {
|
||||||
|
newMapping.source = util.relative(sourceRoot, newMapping.source);
|
||||||
|
}
|
||||||
|
|
||||||
|
newMapping.original = {
|
||||||
|
line: mapping.originalLine,
|
||||||
|
column: mapping.originalColumn
|
||||||
|
};
|
||||||
|
|
||||||
|
if (mapping.name != null) {
|
||||||
|
newMapping.name = mapping.name;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
generator.addMapping(newMapping);
|
||||||
|
});
|
||||||
|
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
||||||
|
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
||||||
|
if (content != null) {
|
||||||
|
generator.setSourceContent(sourceFile, content);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return generator;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a single mapping from original source line and column to the generated
|
||||||
|
* source's line and column for this source map being created. The mapping
|
||||||
|
* object should have the following properties:
|
||||||
|
*
|
||||||
|
* - generated: An object with the generated line and column positions.
|
||||||
|
* - original: An object with the original line and column positions.
|
||||||
|
* - source: The original source file (relative to the sourceRoot).
|
||||||
|
* - name: An optional original token name for this mapping.
|
||||||
|
*/
|
||||||
|
SourceMapGenerator.prototype.addMapping =
|
||||||
|
function SourceMapGenerator_addMapping(aArgs) {
|
||||||
|
var generated = util.getArg(aArgs, 'generated');
|
||||||
|
var original = util.getArg(aArgs, 'original', null);
|
||||||
|
var source = util.getArg(aArgs, 'source', null);
|
||||||
|
var name = util.getArg(aArgs, 'name', null);
|
||||||
|
|
||||||
|
if (!this._skipValidation) {
|
||||||
|
this._validateMapping(generated, original, source, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (source != null && !this._sources.has(source)) {
|
||||||
|
this._sources.add(source);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (name != null && !this._names.has(name)) {
|
||||||
|
this._names.add(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
this._mappings.add({
|
||||||
|
generatedLine: generated.line,
|
||||||
|
generatedColumn: generated.column,
|
||||||
|
originalLine: original != null && original.line,
|
||||||
|
originalColumn: original != null && original.column,
|
||||||
|
source: source,
|
||||||
|
name: name
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the source content for a source file.
|
||||||
|
*/
|
||||||
|
SourceMapGenerator.prototype.setSourceContent =
|
||||||
|
function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) {
|
||||||
|
var source = aSourceFile;
|
||||||
|
if (this._sourceRoot != null) {
|
||||||
|
source = util.relative(this._sourceRoot, source);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (aSourceContent != null) {
|
||||||
|
// Add the source content to the _sourcesContents map.
|
||||||
|
// Create a new _sourcesContents map if the property is null.
|
||||||
|
if (!this._sourcesContents) {
|
||||||
|
this._sourcesContents = {};
|
||||||
|
}
|
||||||
|
this._sourcesContents[util.toSetString(source)] = aSourceContent;
|
||||||
|
} else if (this._sourcesContents) {
|
||||||
|
// Remove the source file from the _sourcesContents map.
|
||||||
|
// If the _sourcesContents map is empty, set the property to null.
|
||||||
|
delete this._sourcesContents[util.toSetString(source)];
|
||||||
|
if (Object.keys(this._sourcesContents).length === 0) {
|
||||||
|
this._sourcesContents = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Applies the mappings of a sub-source-map for a specific source file to the
|
||||||
|
* source map being generated. Each mapping to the supplied source file is
|
||||||
|
* rewritten using the supplied source map. Note: The resolution for the
|
||||||
|
* resulting mappings is the minimium of this map and the supplied map.
|
||||||
|
*
|
||||||
|
* @param aSourceMapConsumer The source map to be applied.
|
||||||
|
* @param aSourceFile Optional. The filename of the source file.
|
||||||
|
* If omitted, SourceMapConsumer's file property will be used.
|
||||||
|
* @param aSourceMapPath Optional. The dirname of the path to the source map
|
||||||
|
* to be applied. If relative, it is relative to the SourceMapConsumer.
|
||||||
|
* This parameter is needed when the two source maps aren't in the same
|
||||||
|
* directory, and the source map to be applied contains relative source
|
||||||
|
* paths. If so, those relative source paths need to be rewritten
|
||||||
|
* relative to the SourceMapGenerator.
|
||||||
|
*/
|
||||||
|
SourceMapGenerator.prototype.applySourceMap =
|
||||||
|
function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) {
|
||||||
|
var sourceFile = aSourceFile;
|
||||||
|
// If aSourceFile is omitted, we will use the file property of the SourceMap
|
||||||
|
if (aSourceFile == null) {
|
||||||
|
if (aSourceMapConsumer.file == null) {
|
||||||
|
throw new Error(
|
||||||
|
'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' +
|
||||||
|
'or the source map\'s "file" property. Both were omitted.'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
sourceFile = aSourceMapConsumer.file;
|
||||||
|
}
|
||||||
|
var sourceRoot = this._sourceRoot;
|
||||||
|
// Make "sourceFile" relative if an absolute Url is passed.
|
||||||
|
if (sourceRoot != null) {
|
||||||
|
sourceFile = util.relative(sourceRoot, sourceFile);
|
||||||
|
}
|
||||||
|
// Applying the SourceMap can add and remove items from the sources and
|
||||||
|
// the names array.
|
||||||
|
var newSources = new ArraySet();
|
||||||
|
var newNames = new ArraySet();
|
||||||
|
|
||||||
|
// Find mappings for the "sourceFile"
|
||||||
|
this._mappings.unsortedForEach(function (mapping) {
|
||||||
|
if (mapping.source === sourceFile && mapping.originalLine != null) {
|
||||||
|
// Check if it can be mapped by the source map, then update the mapping.
|
||||||
|
var original = aSourceMapConsumer.originalPositionFor({
|
||||||
|
line: mapping.originalLine,
|
||||||
|
column: mapping.originalColumn
|
||||||
|
});
|
||||||
|
if (original.source != null) {
|
||||||
|
// Copy mapping
|
||||||
|
mapping.source = original.source;
|
||||||
|
if (aSourceMapPath != null) {
|
||||||
|
mapping.source = util.join(aSourceMapPath, mapping.source)
|
||||||
|
}
|
||||||
|
if (sourceRoot != null) {
|
||||||
|
mapping.source = util.relative(sourceRoot, mapping.source);
|
||||||
|
}
|
||||||
|
mapping.originalLine = original.line;
|
||||||
|
mapping.originalColumn = original.column;
|
||||||
|
if (original.name != null) {
|
||||||
|
mapping.name = original.name;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var source = mapping.source;
|
||||||
|
if (source != null && !newSources.has(source)) {
|
||||||
|
newSources.add(source);
|
||||||
|
}
|
||||||
|
|
||||||
|
var name = mapping.name;
|
||||||
|
if (name != null && !newNames.has(name)) {
|
||||||
|
newNames.add(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
}, this);
|
||||||
|
this._sources = newSources;
|
||||||
|
this._names = newNames;
|
||||||
|
|
||||||
|
// Copy sourcesContents of applied map.
|
||||||
|
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
||||||
|
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
||||||
|
if (content != null) {
|
||||||
|
if (aSourceMapPath != null) {
|
||||||
|
sourceFile = util.join(aSourceMapPath, sourceFile);
|
||||||
|
}
|
||||||
|
if (sourceRoot != null) {
|
||||||
|
sourceFile = util.relative(sourceRoot, sourceFile);
|
||||||
|
}
|
||||||
|
this.setSourceContent(sourceFile, content);
|
||||||
|
}
|
||||||
|
}, this);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A mapping can have one of the three levels of data:
|
||||||
|
*
|
||||||
|
* 1. Just the generated position.
|
||||||
|
* 2. The Generated position, original position, and original source.
|
||||||
|
* 3. Generated and original position, original source, as well as a name
|
||||||
|
* token.
|
||||||
|
*
|
||||||
|
* To maintain consistency, we validate that any new mapping being added falls
|
||||||
|
* in to one of these categories.
|
||||||
|
*/
|
||||||
|
SourceMapGenerator.prototype._validateMapping =
|
||||||
|
function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource,
|
||||||
|
aName) {
|
||||||
|
if (aGenerated && 'line' in aGenerated && 'column' in aGenerated
|
||||||
|
&& aGenerated.line > 0 && aGenerated.column >= 0
|
||||||
|
&& !aOriginal && !aSource && !aName) {
|
||||||
|
// Case 1.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated
|
||||||
|
&& aOriginal && 'line' in aOriginal && 'column' in aOriginal
|
||||||
|
&& aGenerated.line > 0 && aGenerated.column >= 0
|
||||||
|
&& aOriginal.line > 0 && aOriginal.column >= 0
|
||||||
|
&& aSource) {
|
||||||
|
// Cases 2 and 3.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new Error('Invalid mapping: ' + JSON.stringify({
|
||||||
|
generated: aGenerated,
|
||||||
|
source: aSource,
|
||||||
|
original: aOriginal,
|
||||||
|
name: aName
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Serialize the accumulated mappings in to the stream of base 64 VLQs
|
||||||
|
* specified by the source map format.
|
||||||
|
*/
|
||||||
|
SourceMapGenerator.prototype._serializeMappings =
|
||||||
|
function SourceMapGenerator_serializeMappings() {
|
||||||
|
var previousGeneratedColumn = 0;
|
||||||
|
var previousGeneratedLine = 1;
|
||||||
|
var previousOriginalColumn = 0;
|
||||||
|
var previousOriginalLine = 0;
|
||||||
|
var previousName = 0;
|
||||||
|
var previousSource = 0;
|
||||||
|
var result = '';
|
||||||
|
var mapping;
|
||||||
|
|
||||||
|
var mappings = this._mappings.toArray();
|
||||||
|
|
||||||
|
for (var i = 0, len = mappings.length; i < len; i++) {
|
||||||
|
mapping = mappings[i];
|
||||||
|
|
||||||
|
if (mapping.generatedLine !== previousGeneratedLine) {
|
||||||
|
previousGeneratedColumn = 0;
|
||||||
|
while (mapping.generatedLine !== previousGeneratedLine) {
|
||||||
|
result += ';';
|
||||||
|
previousGeneratedLine++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if (i > 0) {
|
||||||
|
if (!util.compareByGeneratedPositions(mapping, mappings[i - 1])) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
result += ',';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result += base64VLQ.encode(mapping.generatedColumn
|
||||||
|
- previousGeneratedColumn);
|
||||||
|
previousGeneratedColumn = mapping.generatedColumn;
|
||||||
|
|
||||||
|
if (mapping.source != null) {
|
||||||
|
result += base64VLQ.encode(this._sources.indexOf(mapping.source)
|
||||||
|
- previousSource);
|
||||||
|
previousSource = this._sources.indexOf(mapping.source);
|
||||||
|
|
||||||
|
// lines are stored 0-based in SourceMap spec version 3
|
||||||
|
result += base64VLQ.encode(mapping.originalLine - 1
|
||||||
|
- previousOriginalLine);
|
||||||
|
previousOriginalLine = mapping.originalLine - 1;
|
||||||
|
|
||||||
|
result += base64VLQ.encode(mapping.originalColumn
|
||||||
|
- previousOriginalColumn);
|
||||||
|
previousOriginalColumn = mapping.originalColumn;
|
||||||
|
|
||||||
|
if (mapping.name != null) {
|
||||||
|
result += base64VLQ.encode(this._names.indexOf(mapping.name)
|
||||||
|
- previousName);
|
||||||
|
previousName = this._names.indexOf(mapping.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
SourceMapGenerator.prototype._generateSourcesContent =
|
||||||
|
function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) {
|
||||||
|
return aSources.map(function (source) {
|
||||||
|
if (!this._sourcesContents) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (aSourceRoot != null) {
|
||||||
|
source = util.relative(aSourceRoot, source);
|
||||||
|
}
|
||||||
|
var key = util.toSetString(source);
|
||||||
|
return Object.prototype.hasOwnProperty.call(this._sourcesContents,
|
||||||
|
key)
|
||||||
|
? this._sourcesContents[key]
|
||||||
|
: null;
|
||||||
|
}, this);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Externalize the source map.
|
||||||
|
*/
|
||||||
|
SourceMapGenerator.prototype.toJSON =
|
||||||
|
function SourceMapGenerator_toJSON() {
|
||||||
|
var map = {
|
||||||
|
version: this._version,
|
||||||
|
sources: this._sources.toArray(),
|
||||||
|
names: this._names.toArray(),
|
||||||
|
mappings: this._serializeMappings()
|
||||||
|
};
|
||||||
|
if (this._file != null) {
|
||||||
|
map.file = this._file;
|
||||||
|
}
|
||||||
|
if (this._sourceRoot != null) {
|
||||||
|
map.sourceRoot = this._sourceRoot;
|
||||||
|
}
|
||||||
|
if (this._sourcesContents) {
|
||||||
|
map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);
|
||||||
|
}
|
||||||
|
|
||||||
|
return map;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Render the source map being generated to a string.
|
||||||
|
*/
|
||||||
|
SourceMapGenerator.prototype.toString =
|
||||||
|
function SourceMapGenerator_toString() {
|
||||||
|
return JSON.stringify(this.toJSON());
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.SourceMapGenerator = SourceMapGenerator;
|
||||||
|
|
||||||
|
});
|
||||||
414
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/source-node.js
generated
vendored
Normal file
414
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/source-node.js
generated
vendored
Normal file
@@ -0,0 +1,414 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
if (typeof define !== 'function') {
|
||||||
|
var define = require('amdefine')(module, require);
|
||||||
|
}
|
||||||
|
define(function (require, exports, module) {
|
||||||
|
|
||||||
|
var SourceMapGenerator = require('./source-map-generator').SourceMapGenerator;
|
||||||
|
var util = require('./util');
|
||||||
|
|
||||||
|
// Matches a Windows-style `\r\n` newline or a `\n` newline used by all other
|
||||||
|
// operating systems these days (capturing the result).
|
||||||
|
var REGEX_NEWLINE = /(\r?\n)/;
|
||||||
|
|
||||||
|
// Newline character code for charCodeAt() comparisons
|
||||||
|
var NEWLINE_CODE = 10;
|
||||||
|
|
||||||
|
// Private symbol for identifying `SourceNode`s when multiple versions of
|
||||||
|
// the source-map library are loaded. This MUST NOT CHANGE across
|
||||||
|
// versions!
|
||||||
|
var isSourceNode = "$$$isSourceNode$$$";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SourceNodes provide a way to abstract over interpolating/concatenating
|
||||||
|
* snippets of generated JavaScript source code while maintaining the line and
|
||||||
|
* column information associated with the original source code.
|
||||||
|
*
|
||||||
|
* @param aLine The original line number.
|
||||||
|
* @param aColumn The original column number.
|
||||||
|
* @param aSource The original source's filename.
|
||||||
|
* @param aChunks Optional. An array of strings which are snippets of
|
||||||
|
* generated JS, or other SourceNodes.
|
||||||
|
* @param aName The original identifier.
|
||||||
|
*/
|
||||||
|
function SourceNode(aLine, aColumn, aSource, aChunks, aName) {
|
||||||
|
this.children = [];
|
||||||
|
this.sourceContents = {};
|
||||||
|
this.line = aLine == null ? null : aLine;
|
||||||
|
this.column = aColumn == null ? null : aColumn;
|
||||||
|
this.source = aSource == null ? null : aSource;
|
||||||
|
this.name = aName == null ? null : aName;
|
||||||
|
this[isSourceNode] = true;
|
||||||
|
if (aChunks != null) this.add(aChunks);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a SourceNode from generated code and a SourceMapConsumer.
|
||||||
|
*
|
||||||
|
* @param aGeneratedCode The generated code
|
||||||
|
* @param aSourceMapConsumer The SourceMap for the generated code
|
||||||
|
* @param aRelativePath Optional. The path that relative sources in the
|
||||||
|
* SourceMapConsumer should be relative to.
|
||||||
|
*/
|
||||||
|
SourceNode.fromStringWithSourceMap =
|
||||||
|
function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) {
|
||||||
|
// The SourceNode we want to fill with the generated code
|
||||||
|
// and the SourceMap
|
||||||
|
var node = new SourceNode();
|
||||||
|
|
||||||
|
// All even indices of this array are one line of the generated code,
|
||||||
|
// while all odd indices are the newlines between two adjacent lines
|
||||||
|
// (since `REGEX_NEWLINE` captures its match).
|
||||||
|
// Processed fragments are removed from this array, by calling `shiftNextLine`.
|
||||||
|
var remainingLines = aGeneratedCode.split(REGEX_NEWLINE);
|
||||||
|
var shiftNextLine = function() {
|
||||||
|
var lineContents = remainingLines.shift();
|
||||||
|
// The last line of a file might not have a newline.
|
||||||
|
var newLine = remainingLines.shift() || "";
|
||||||
|
return lineContents + newLine;
|
||||||
|
};
|
||||||
|
|
||||||
|
// We need to remember the position of "remainingLines"
|
||||||
|
var lastGeneratedLine = 1, lastGeneratedColumn = 0;
|
||||||
|
|
||||||
|
// The generate SourceNodes we need a code range.
|
||||||
|
// To extract it current and last mapping is used.
|
||||||
|
// Here we store the last mapping.
|
||||||
|
var lastMapping = null;
|
||||||
|
|
||||||
|
aSourceMapConsumer.eachMapping(function (mapping) {
|
||||||
|
if (lastMapping !== null) {
|
||||||
|
// We add the code from "lastMapping" to "mapping":
|
||||||
|
// First check if there is a new line in between.
|
||||||
|
if (lastGeneratedLine < mapping.generatedLine) {
|
||||||
|
var code = "";
|
||||||
|
// Associate first line with "lastMapping"
|
||||||
|
addMappingWithCode(lastMapping, shiftNextLine());
|
||||||
|
lastGeneratedLine++;
|
||||||
|
lastGeneratedColumn = 0;
|
||||||
|
// The remaining code is added without mapping
|
||||||
|
} else {
|
||||||
|
// There is no new line in between.
|
||||||
|
// Associate the code between "lastGeneratedColumn" and
|
||||||
|
// "mapping.generatedColumn" with "lastMapping"
|
||||||
|
var nextLine = remainingLines[0];
|
||||||
|
var code = nextLine.substr(0, mapping.generatedColumn -
|
||||||
|
lastGeneratedColumn);
|
||||||
|
remainingLines[0] = nextLine.substr(mapping.generatedColumn -
|
||||||
|
lastGeneratedColumn);
|
||||||
|
lastGeneratedColumn = mapping.generatedColumn;
|
||||||
|
addMappingWithCode(lastMapping, code);
|
||||||
|
// No more remaining code, continue
|
||||||
|
lastMapping = mapping;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// We add the generated code until the first mapping
|
||||||
|
// to the SourceNode without any mapping.
|
||||||
|
// Each line is added as separate string.
|
||||||
|
while (lastGeneratedLine < mapping.generatedLine) {
|
||||||
|
node.add(shiftNextLine());
|
||||||
|
lastGeneratedLine++;
|
||||||
|
}
|
||||||
|
if (lastGeneratedColumn < mapping.generatedColumn) {
|
||||||
|
var nextLine = remainingLines[0];
|
||||||
|
node.add(nextLine.substr(0, mapping.generatedColumn));
|
||||||
|
remainingLines[0] = nextLine.substr(mapping.generatedColumn);
|
||||||
|
lastGeneratedColumn = mapping.generatedColumn;
|
||||||
|
}
|
||||||
|
lastMapping = mapping;
|
||||||
|
}, this);
|
||||||
|
// We have processed all mappings.
|
||||||
|
if (remainingLines.length > 0) {
|
||||||
|
if (lastMapping) {
|
||||||
|
// Associate the remaining code in the current line with "lastMapping"
|
||||||
|
addMappingWithCode(lastMapping, shiftNextLine());
|
||||||
|
}
|
||||||
|
// and add the remaining lines without any mapping
|
||||||
|
node.add(remainingLines.join(""));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy sourcesContent into SourceNode
|
||||||
|
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
||||||
|
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
||||||
|
if (content != null) {
|
||||||
|
if (aRelativePath != null) {
|
||||||
|
sourceFile = util.join(aRelativePath, sourceFile);
|
||||||
|
}
|
||||||
|
node.setSourceContent(sourceFile, content);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return node;
|
||||||
|
|
||||||
|
function addMappingWithCode(mapping, code) {
|
||||||
|
if (mapping === null || mapping.source === undefined) {
|
||||||
|
node.add(code);
|
||||||
|
} else {
|
||||||
|
var source = aRelativePath
|
||||||
|
? util.join(aRelativePath, mapping.source)
|
||||||
|
: mapping.source;
|
||||||
|
node.add(new SourceNode(mapping.originalLine,
|
||||||
|
mapping.originalColumn,
|
||||||
|
source,
|
||||||
|
code,
|
||||||
|
mapping.name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a chunk of generated JS to this source node.
|
||||||
|
*
|
||||||
|
* @param aChunk A string snippet of generated JS code, another instance of
|
||||||
|
* SourceNode, or an array where each member is one of those things.
|
||||||
|
*/
|
||||||
|
SourceNode.prototype.add = function SourceNode_add(aChunk) {
|
||||||
|
if (Array.isArray(aChunk)) {
|
||||||
|
aChunk.forEach(function (chunk) {
|
||||||
|
this.add(chunk);
|
||||||
|
}, this);
|
||||||
|
}
|
||||||
|
else if (aChunk[isSourceNode] || typeof aChunk === "string") {
|
||||||
|
if (aChunk) {
|
||||||
|
this.children.push(aChunk);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new TypeError(
|
||||||
|
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a chunk of generated JS to the beginning of this source node.
|
||||||
|
*
|
||||||
|
* @param aChunk A string snippet of generated JS code, another instance of
|
||||||
|
* SourceNode, or an array where each member is one of those things.
|
||||||
|
*/
|
||||||
|
SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {
|
||||||
|
if (Array.isArray(aChunk)) {
|
||||||
|
for (var i = aChunk.length-1; i >= 0; i--) {
|
||||||
|
this.prepend(aChunk[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (aChunk[isSourceNode] || typeof aChunk === "string") {
|
||||||
|
this.children.unshift(aChunk);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw new TypeError(
|
||||||
|
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Walk over the tree of JS snippets in this node and its children. The
|
||||||
|
* walking function is called once for each snippet of JS and is passed that
|
||||||
|
* snippet and the its original associated source's line/column location.
|
||||||
|
*
|
||||||
|
* @param aFn The traversal function.
|
||||||
|
*/
|
||||||
|
SourceNode.prototype.walk = function SourceNode_walk(aFn) {
|
||||||
|
var chunk;
|
||||||
|
for (var i = 0, len = this.children.length; i < len; i++) {
|
||||||
|
chunk = this.children[i];
|
||||||
|
if (chunk[isSourceNode]) {
|
||||||
|
chunk.walk(aFn);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if (chunk !== '') {
|
||||||
|
aFn(chunk, { source: this.source,
|
||||||
|
line: this.line,
|
||||||
|
column: this.column,
|
||||||
|
name: this.name });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between
|
||||||
|
* each of `this.children`.
|
||||||
|
*
|
||||||
|
* @param aSep The separator.
|
||||||
|
*/
|
||||||
|
SourceNode.prototype.join = function SourceNode_join(aSep) {
|
||||||
|
var newChildren;
|
||||||
|
var i;
|
||||||
|
var len = this.children.length;
|
||||||
|
if (len > 0) {
|
||||||
|
newChildren = [];
|
||||||
|
for (i = 0; i < len-1; i++) {
|
||||||
|
newChildren.push(this.children[i]);
|
||||||
|
newChildren.push(aSep);
|
||||||
|
}
|
||||||
|
newChildren.push(this.children[i]);
|
||||||
|
this.children = newChildren;
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Call String.prototype.replace on the very right-most source snippet. Useful
|
||||||
|
* for trimming whitespace from the end of a source node, etc.
|
||||||
|
*
|
||||||
|
* @param aPattern The pattern to replace.
|
||||||
|
* @param aReplacement The thing to replace the pattern with.
|
||||||
|
*/
|
||||||
|
SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {
|
||||||
|
var lastChild = this.children[this.children.length - 1];
|
||||||
|
if (lastChild[isSourceNode]) {
|
||||||
|
lastChild.replaceRight(aPattern, aReplacement);
|
||||||
|
}
|
||||||
|
else if (typeof lastChild === 'string') {
|
||||||
|
this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
this.children.push(''.replace(aPattern, aReplacement));
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the source content for a source file. This will be added to the SourceMapGenerator
|
||||||
|
* in the sourcesContent field.
|
||||||
|
*
|
||||||
|
* @param aSourceFile The filename of the source file
|
||||||
|
* @param aSourceContent The content of the source file
|
||||||
|
*/
|
||||||
|
SourceNode.prototype.setSourceContent =
|
||||||
|
function SourceNode_setSourceContent(aSourceFile, aSourceContent) {
|
||||||
|
this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Walk over the tree of SourceNodes. The walking function is called for each
|
||||||
|
* source file content and is passed the filename and source content.
|
||||||
|
*
|
||||||
|
* @param aFn The traversal function.
|
||||||
|
*/
|
||||||
|
SourceNode.prototype.walkSourceContents =
|
||||||
|
function SourceNode_walkSourceContents(aFn) {
|
||||||
|
for (var i = 0, len = this.children.length; i < len; i++) {
|
||||||
|
if (this.children[i][isSourceNode]) {
|
||||||
|
this.children[i].walkSourceContents(aFn);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var sources = Object.keys(this.sourceContents);
|
||||||
|
for (var i = 0, len = sources.length; i < len; i++) {
|
||||||
|
aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the string representation of this source node. Walks over the tree
|
||||||
|
* and concatenates all the various snippets together to one string.
|
||||||
|
*/
|
||||||
|
SourceNode.prototype.toString = function SourceNode_toString() {
|
||||||
|
var str = "";
|
||||||
|
this.walk(function (chunk) {
|
||||||
|
str += chunk;
|
||||||
|
});
|
||||||
|
return str;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the string representation of this source node along with a source
|
||||||
|
* map.
|
||||||
|
*/
|
||||||
|
SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {
|
||||||
|
var generated = {
|
||||||
|
code: "",
|
||||||
|
line: 1,
|
||||||
|
column: 0
|
||||||
|
};
|
||||||
|
var map = new SourceMapGenerator(aArgs);
|
||||||
|
var sourceMappingActive = false;
|
||||||
|
var lastOriginalSource = null;
|
||||||
|
var lastOriginalLine = null;
|
||||||
|
var lastOriginalColumn = null;
|
||||||
|
var lastOriginalName = null;
|
||||||
|
this.walk(function (chunk, original) {
|
||||||
|
generated.code += chunk;
|
||||||
|
if (original.source !== null
|
||||||
|
&& original.line !== null
|
||||||
|
&& original.column !== null) {
|
||||||
|
if(lastOriginalSource !== original.source
|
||||||
|
|| lastOriginalLine !== original.line
|
||||||
|
|| lastOriginalColumn !== original.column
|
||||||
|
|| lastOriginalName !== original.name) {
|
||||||
|
map.addMapping({
|
||||||
|
source: original.source,
|
||||||
|
original: {
|
||||||
|
line: original.line,
|
||||||
|
column: original.column
|
||||||
|
},
|
||||||
|
generated: {
|
||||||
|
line: generated.line,
|
||||||
|
column: generated.column
|
||||||
|
},
|
||||||
|
name: original.name
|
||||||
|
});
|
||||||
|
}
|
||||||
|
lastOriginalSource = original.source;
|
||||||
|
lastOriginalLine = original.line;
|
||||||
|
lastOriginalColumn = original.column;
|
||||||
|
lastOriginalName = original.name;
|
||||||
|
sourceMappingActive = true;
|
||||||
|
} else if (sourceMappingActive) {
|
||||||
|
map.addMapping({
|
||||||
|
generated: {
|
||||||
|
line: generated.line,
|
||||||
|
column: generated.column
|
||||||
|
}
|
||||||
|
});
|
||||||
|
lastOriginalSource = null;
|
||||||
|
sourceMappingActive = false;
|
||||||
|
}
|
||||||
|
for (var idx = 0, length = chunk.length; idx < length; idx++) {
|
||||||
|
if (chunk.charCodeAt(idx) === NEWLINE_CODE) {
|
||||||
|
generated.line++;
|
||||||
|
generated.column = 0;
|
||||||
|
// Mappings end at eol
|
||||||
|
if (idx + 1 === length) {
|
||||||
|
lastOriginalSource = null;
|
||||||
|
sourceMappingActive = false;
|
||||||
|
} else if (sourceMappingActive) {
|
||||||
|
map.addMapping({
|
||||||
|
source: original.source,
|
||||||
|
original: {
|
||||||
|
line: original.line,
|
||||||
|
column: original.column
|
||||||
|
},
|
||||||
|
generated: {
|
||||||
|
line: generated.line,
|
||||||
|
column: generated.column
|
||||||
|
},
|
||||||
|
name: original.name
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
generated.column++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
this.walkSourceContents(function (sourceFile, sourceContent) {
|
||||||
|
map.setSourceContent(sourceFile, sourceContent);
|
||||||
|
});
|
||||||
|
|
||||||
|
return { code: generated.code, map: map };
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.SourceNode = SourceNode;
|
||||||
|
|
||||||
|
});
|
||||||
319
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/util.js
generated
vendored
Normal file
319
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/lib/source-map/util.js
generated
vendored
Normal file
@@ -0,0 +1,319 @@
|
|||||||
|
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||||
|
/*
|
||||||
|
* Copyright 2011 Mozilla Foundation and contributors
|
||||||
|
* Licensed under the New BSD license. See LICENSE or:
|
||||||
|
* http://opensource.org/licenses/BSD-3-Clause
|
||||||
|
*/
|
||||||
|
if (typeof define !== 'function') {
|
||||||
|
var define = require('amdefine')(module, require);
|
||||||
|
}
|
||||||
|
define(function (require, exports, module) {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is a helper function for getting values from parameter/options
|
||||||
|
* objects.
|
||||||
|
*
|
||||||
|
* @param args The object we are extracting values from
|
||||||
|
* @param name The name of the property we are getting.
|
||||||
|
* @param defaultValue An optional value to return if the property is missing
|
||||||
|
* from the object. If this is not specified and the property is missing, an
|
||||||
|
* error will be thrown.
|
||||||
|
*/
|
||||||
|
function getArg(aArgs, aName, aDefaultValue) {
|
||||||
|
if (aName in aArgs) {
|
||||||
|
return aArgs[aName];
|
||||||
|
} else if (arguments.length === 3) {
|
||||||
|
return aDefaultValue;
|
||||||
|
} else {
|
||||||
|
throw new Error('"' + aName + '" is a required argument.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.getArg = getArg;
|
||||||
|
|
||||||
|
var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/;
|
||||||
|
var dataUrlRegexp = /^data:.+\,.+$/;
|
||||||
|
|
||||||
|
function urlParse(aUrl) {
|
||||||
|
var match = aUrl.match(urlRegexp);
|
||||||
|
if (!match) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
scheme: match[1],
|
||||||
|
auth: match[2],
|
||||||
|
host: match[3],
|
||||||
|
port: match[4],
|
||||||
|
path: match[5]
|
||||||
|
};
|
||||||
|
}
|
||||||
|
exports.urlParse = urlParse;
|
||||||
|
|
||||||
|
function urlGenerate(aParsedUrl) {
|
||||||
|
var url = '';
|
||||||
|
if (aParsedUrl.scheme) {
|
||||||
|
url += aParsedUrl.scheme + ':';
|
||||||
|
}
|
||||||
|
url += '//';
|
||||||
|
if (aParsedUrl.auth) {
|
||||||
|
url += aParsedUrl.auth + '@';
|
||||||
|
}
|
||||||
|
if (aParsedUrl.host) {
|
||||||
|
url += aParsedUrl.host;
|
||||||
|
}
|
||||||
|
if (aParsedUrl.port) {
|
||||||
|
url += ":" + aParsedUrl.port
|
||||||
|
}
|
||||||
|
if (aParsedUrl.path) {
|
||||||
|
url += aParsedUrl.path;
|
||||||
|
}
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
exports.urlGenerate = urlGenerate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalizes a path, or the path portion of a URL:
|
||||||
|
*
|
||||||
|
* - Replaces consequtive slashes with one slash.
|
||||||
|
* - Removes unnecessary '.' parts.
|
||||||
|
* - Removes unnecessary '<dir>/..' parts.
|
||||||
|
*
|
||||||
|
* Based on code in the Node.js 'path' core module.
|
||||||
|
*
|
||||||
|
* @param aPath The path or url to normalize.
|
||||||
|
*/
|
||||||
|
function normalize(aPath) {
|
||||||
|
var path = aPath;
|
||||||
|
var url = urlParse(aPath);
|
||||||
|
if (url) {
|
||||||
|
if (!url.path) {
|
||||||
|
return aPath;
|
||||||
|
}
|
||||||
|
path = url.path;
|
||||||
|
}
|
||||||
|
var isAbsolute = (path.charAt(0) === '/');
|
||||||
|
|
||||||
|
var parts = path.split(/\/+/);
|
||||||
|
for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {
|
||||||
|
part = parts[i];
|
||||||
|
if (part === '.') {
|
||||||
|
parts.splice(i, 1);
|
||||||
|
} else if (part === '..') {
|
||||||
|
up++;
|
||||||
|
} else if (up > 0) {
|
||||||
|
if (part === '') {
|
||||||
|
// The first part is blank if the path is absolute. Trying to go
|
||||||
|
// above the root is a no-op. Therefore we can remove all '..' parts
|
||||||
|
// directly after the root.
|
||||||
|
parts.splice(i + 1, up);
|
||||||
|
up = 0;
|
||||||
|
} else {
|
||||||
|
parts.splice(i, 2);
|
||||||
|
up--;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
path = parts.join('/');
|
||||||
|
|
||||||
|
if (path === '') {
|
||||||
|
path = isAbsolute ? '/' : '.';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url) {
|
||||||
|
url.path = path;
|
||||||
|
return urlGenerate(url);
|
||||||
|
}
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
exports.normalize = normalize;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Joins two paths/URLs.
|
||||||
|
*
|
||||||
|
* @param aRoot The root path or URL.
|
||||||
|
* @param aPath The path or URL to be joined with the root.
|
||||||
|
*
|
||||||
|
* - If aPath is a URL or a data URI, aPath is returned, unless aPath is a
|
||||||
|
* scheme-relative URL: Then the scheme of aRoot, if any, is prepended
|
||||||
|
* first.
|
||||||
|
* - Otherwise aPath is a path. If aRoot is a URL, then its path portion
|
||||||
|
* is updated with the result and aRoot is returned. Otherwise the result
|
||||||
|
* is returned.
|
||||||
|
* - If aPath is absolute, the result is aPath.
|
||||||
|
* - Otherwise the two paths are joined with a slash.
|
||||||
|
* - Joining for example 'http://' and 'www.example.com' is also supported.
|
||||||
|
*/
|
||||||
|
function join(aRoot, aPath) {
|
||||||
|
if (aRoot === "") {
|
||||||
|
aRoot = ".";
|
||||||
|
}
|
||||||
|
if (aPath === "") {
|
||||||
|
aPath = ".";
|
||||||
|
}
|
||||||
|
var aPathUrl = urlParse(aPath);
|
||||||
|
var aRootUrl = urlParse(aRoot);
|
||||||
|
if (aRootUrl) {
|
||||||
|
aRoot = aRootUrl.path || '/';
|
||||||
|
}
|
||||||
|
|
||||||
|
// `join(foo, '//www.example.org')`
|
||||||
|
if (aPathUrl && !aPathUrl.scheme) {
|
||||||
|
if (aRootUrl) {
|
||||||
|
aPathUrl.scheme = aRootUrl.scheme;
|
||||||
|
}
|
||||||
|
return urlGenerate(aPathUrl);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (aPathUrl || aPath.match(dataUrlRegexp)) {
|
||||||
|
return aPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
// `join('http://', 'www.example.com')`
|
||||||
|
if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {
|
||||||
|
aRootUrl.host = aPath;
|
||||||
|
return urlGenerate(aRootUrl);
|
||||||
|
}
|
||||||
|
|
||||||
|
var joined = aPath.charAt(0) === '/'
|
||||||
|
? aPath
|
||||||
|
: normalize(aRoot.replace(/\/+$/, '') + '/' + aPath);
|
||||||
|
|
||||||
|
if (aRootUrl) {
|
||||||
|
aRootUrl.path = joined;
|
||||||
|
return urlGenerate(aRootUrl);
|
||||||
|
}
|
||||||
|
return joined;
|
||||||
|
}
|
||||||
|
exports.join = join;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Make a path relative to a URL or another path.
|
||||||
|
*
|
||||||
|
* @param aRoot The root path or URL.
|
||||||
|
* @param aPath The path or URL to be made relative to aRoot.
|
||||||
|
*/
|
||||||
|
function relative(aRoot, aPath) {
|
||||||
|
if (aRoot === "") {
|
||||||
|
aRoot = ".";
|
||||||
|
}
|
||||||
|
|
||||||
|
aRoot = aRoot.replace(/\/$/, '');
|
||||||
|
|
||||||
|
// XXX: It is possible to remove this block, and the tests still pass!
|
||||||
|
var url = urlParse(aRoot);
|
||||||
|
if (aPath.charAt(0) == "/" && url && url.path == "/") {
|
||||||
|
return aPath.slice(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return aPath.indexOf(aRoot + '/') === 0
|
||||||
|
? aPath.substr(aRoot.length + 1)
|
||||||
|
: aPath;
|
||||||
|
}
|
||||||
|
exports.relative = relative;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Because behavior goes wacky when you set `__proto__` on objects, we
|
||||||
|
* have to prefix all the strings in our set with an arbitrary character.
|
||||||
|
*
|
||||||
|
* See https://github.com/mozilla/source-map/pull/31 and
|
||||||
|
* https://github.com/mozilla/source-map/issues/30
|
||||||
|
*
|
||||||
|
* @param String aStr
|
||||||
|
*/
|
||||||
|
function toSetString(aStr) {
|
||||||
|
return '$' + aStr;
|
||||||
|
}
|
||||||
|
exports.toSetString = toSetString;
|
||||||
|
|
||||||
|
function fromSetString(aStr) {
|
||||||
|
return aStr.substr(1);
|
||||||
|
}
|
||||||
|
exports.fromSetString = fromSetString;
|
||||||
|
|
||||||
|
function strcmp(aStr1, aStr2) {
|
||||||
|
var s1 = aStr1 || "";
|
||||||
|
var s2 = aStr2 || "";
|
||||||
|
return (s1 > s2) - (s1 < s2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Comparator between two mappings where the original positions are compared.
|
||||||
|
*
|
||||||
|
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
|
||||||
|
* mappings with the same original source/line/column, but different generated
|
||||||
|
* line and column the same. Useful when searching for a mapping with a
|
||||||
|
* stubbed out mapping.
|
||||||
|
*/
|
||||||
|
function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {
|
||||||
|
var cmp;
|
||||||
|
|
||||||
|
cmp = strcmp(mappingA.source, mappingB.source);
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmp = mappingA.originalLine - mappingB.originalLine;
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
||||||
|
if (cmp || onlyCompareOriginal) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmp = mappingA.generatedLine - mappingB.generatedLine;
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
return strcmp(mappingA.name, mappingB.name);
|
||||||
|
};
|
||||||
|
exports.compareByOriginalPositions = compareByOriginalPositions;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Comparator between two mappings where the generated positions are
|
||||||
|
* compared.
|
||||||
|
*
|
||||||
|
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
|
||||||
|
* mappings with the same generated line and column, but different
|
||||||
|
* source/name/original line and column the same. Useful when searching for a
|
||||||
|
* mapping with a stubbed out mapping.
|
||||||
|
*/
|
||||||
|
function compareByGeneratedPositions(mappingA, mappingB, onlyCompareGenerated) {
|
||||||
|
var cmp;
|
||||||
|
|
||||||
|
cmp = mappingA.generatedLine - mappingB.generatedLine;
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
|
||||||
|
if (cmp || onlyCompareGenerated) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmp = strcmp(mappingA.source, mappingB.source);
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmp = mappingA.originalLine - mappingB.originalLine;
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
||||||
|
if (cmp) {
|
||||||
|
return cmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
return strcmp(mappingA.name, mappingB.name);
|
||||||
|
};
|
||||||
|
exports.compareByGeneratedPositions = compareByGeneratedPositions;
|
||||||
|
|
||||||
|
});
|
||||||
58
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/node_modules/amdefine/LICENSE
generated
vendored
Normal file
58
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/node_modules/amdefine/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
amdefine is released under two licenses: new BSD, and MIT. You may pick the
|
||||||
|
license that best suits your development needs. The text of both licenses are
|
||||||
|
provided below.
|
||||||
|
|
||||||
|
|
||||||
|
The "New" BSD License:
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
Copyright (c) 2011, The Dojo Foundation
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright notice, this
|
||||||
|
list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
this list of conditions and the following disclaimer in the documentation
|
||||||
|
and/or other materials provided with the distribution.
|
||||||
|
* Neither the name of the Dojo Foundation nor the names of its contributors
|
||||||
|
may be used to endorse or promote products derived from this software
|
||||||
|
without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||||
|
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||||
|
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||||
|
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||||
|
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||||
|
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
MIT License
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Copyright (c) 2011, The Dojo Foundation
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
171
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/node_modules/amdefine/README.md
generated
vendored
Normal file
171
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/node_modules/amdefine/README.md
generated
vendored
Normal file
@@ -0,0 +1,171 @@
|
|||||||
|
# amdefine
|
||||||
|
|
||||||
|
A module that can be used to implement AMD's define() in Node. This allows you
|
||||||
|
to code to the AMD API and have the module work in node programs without
|
||||||
|
requiring those other programs to use AMD.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
**1)** Update your package.json to indicate amdefine as a dependency:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
"dependencies": {
|
||||||
|
"amdefine": ">=0.1.0"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Then run `npm install` to get amdefine into your project.
|
||||||
|
|
||||||
|
**2)** At the top of each module that uses define(), place this code:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
if (typeof define !== 'function') { var define = require('amdefine')(module) }
|
||||||
|
```
|
||||||
|
|
||||||
|
**Only use these snippets** when loading amdefine. If you preserve the basic structure,
|
||||||
|
with the braces, it will be stripped out when using the [RequireJS optimizer](#optimizer).
|
||||||
|
|
||||||
|
You can add spaces, line breaks and even require amdefine with a local path, but
|
||||||
|
keep the rest of the structure to get the stripping behavior.
|
||||||
|
|
||||||
|
As you may know, because `if` statements in JavaScript don't have their own scope, the var
|
||||||
|
declaration in the above snippet is made whether the `if` expression is truthy or not. If
|
||||||
|
RequireJS is loaded then the declaration is superfluous because `define` is already already
|
||||||
|
declared in the same scope in RequireJS. Fortunately JavaScript handles multiple `var`
|
||||||
|
declarations of the same variable in the same scope gracefully.
|
||||||
|
|
||||||
|
If you want to deliver amdefine.js with your code rather than specifying it as a dependency
|
||||||
|
with npm, then just download the latest release and refer to it using a relative path:
|
||||||
|
|
||||||
|
[Latest Version](https://github.com/jrburke/amdefine/raw/latest/amdefine.js)
|
||||||
|
|
||||||
|
### amdefine/intercept
|
||||||
|
|
||||||
|
Consider this very experimental.
|
||||||
|
|
||||||
|
Instead of pasting the piece of text for the amdefine setup of a `define`
|
||||||
|
variable in each module you create or consume, you can use `amdefine/intercept`
|
||||||
|
instead. It will automatically insert the above snippet in each .js file loaded
|
||||||
|
by Node.
|
||||||
|
|
||||||
|
**Warning**: you should only use this if you are creating an application that
|
||||||
|
is consuming AMD style defined()'d modules that are distributed via npm and want
|
||||||
|
to run that code in Node.
|
||||||
|
|
||||||
|
For library code where you are not sure if it will be used by others in Node or
|
||||||
|
in the browser, then explicitly depending on amdefine and placing the code
|
||||||
|
snippet above is suggested path, instead of using `amdefine/intercept`. The
|
||||||
|
intercept module affects all .js files loaded in the Node app, and it is
|
||||||
|
inconsiderate to modify global state like that unless you are also controlling
|
||||||
|
the top level app.
|
||||||
|
|
||||||
|
#### Why distribute AMD-style modules via npm?
|
||||||
|
|
||||||
|
npm has a lot of weaknesses for front-end use (installed layout is not great,
|
||||||
|
should have better support for the `baseUrl + moduleID + '.js' style of loading,
|
||||||
|
single file JS installs), but some people want a JS package manager and are
|
||||||
|
willing to live with those constraints. If that is you, but still want to author
|
||||||
|
in AMD style modules to get dynamic require([]), better direct source usage and
|
||||||
|
powerful loader plugin support in the browser, then this tool can help.
|
||||||
|
|
||||||
|
#### amdefine/intercept usage
|
||||||
|
|
||||||
|
Just require it in your top level app module (for example index.js, server.js):
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
require('amdefine/intercept');
|
||||||
|
```
|
||||||
|
|
||||||
|
The module does not return a value, so no need to assign the result to a local
|
||||||
|
variable.
|
||||||
|
|
||||||
|
Then just require() code as you normally would with Node's require(). Any .js
|
||||||
|
loaded after the intercept require will have the amdefine check injected in
|
||||||
|
the .js source as it is loaded. It does not modify the source on disk, just
|
||||||
|
prepends some content to the text of the module as it is loaded by Node.
|
||||||
|
|
||||||
|
#### How amdefine/intercept works
|
||||||
|
|
||||||
|
It overrides the `Module._extensions['.js']` in Node to automatically prepend
|
||||||
|
the amdefine snippet above. So, it will affect any .js file loaded by your
|
||||||
|
app.
|
||||||
|
|
||||||
|
## define() usage
|
||||||
|
|
||||||
|
It is best if you use the anonymous forms of define() in your module:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
define(function (require) {
|
||||||
|
var dependency = require('dependency');
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
define(['dependency'], function (dependency) {
|
||||||
|
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## RequireJS optimizer integration. <a name="optimizer"></name>
|
||||||
|
|
||||||
|
Version 1.0.3 of the [RequireJS optimizer](http://requirejs.org/docs/optimization.html)
|
||||||
|
will have support for stripping the `if (typeof define !== 'function')` check
|
||||||
|
mentioned above, so you can include this snippet for code that runs in the
|
||||||
|
browser, but avoid taking the cost of the if() statement once the code is
|
||||||
|
optimized for deployment.
|
||||||
|
|
||||||
|
## Node 0.4 Support
|
||||||
|
|
||||||
|
If you want to support Node 0.4, then add `require` as the second parameter to amdefine:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
//Only if you want Node 0.4. If using 0.5 or later, use the above snippet.
|
||||||
|
if (typeof define !== 'function') { var define = require('amdefine')(module, require) }
|
||||||
|
```
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
### Synchronous vs Asynchronous
|
||||||
|
|
||||||
|
amdefine creates a define() function that is callable by your code. It will
|
||||||
|
execute and trace dependencies and call the factory function *synchronously*,
|
||||||
|
to keep the behavior in line with Node's synchronous dependency tracing.
|
||||||
|
|
||||||
|
The exception: calling AMD's callback-style require() from inside a factory
|
||||||
|
function. The require callback is called on process.nextTick():
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
define(function (require) {
|
||||||
|
require(['a'], function(a) {
|
||||||
|
//'a' is loaded synchronously, but
|
||||||
|
//this callback is called on process.nextTick().
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Loader Plugins
|
||||||
|
|
||||||
|
Loader plugins are supported as long as they call their load() callbacks
|
||||||
|
synchronously. So ones that do network requests will not work. However plugins
|
||||||
|
like [text](http://requirejs.org/docs/api.html#text) can load text files locally.
|
||||||
|
|
||||||
|
The plugin API's `load.fromText()` is **not supported** in amdefine, so this means
|
||||||
|
transpiler plugins like the [CoffeeScript loader plugin](https://github.com/jrburke/require-cs)
|
||||||
|
will not work. This may be fixable, but it is a bit complex, and I do not have
|
||||||
|
enough node-fu to figure it out yet. See the source for amdefine.js if you want
|
||||||
|
to get an idea of the issues involved.
|
||||||
|
|
||||||
|
## Tests
|
||||||
|
|
||||||
|
To run the tests, cd to **tests** and run:
|
||||||
|
|
||||||
|
```
|
||||||
|
node all.js
|
||||||
|
node all-intercept.js
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
New BSD and MIT. Check the LICENSE file for all the details.
|
||||||
301
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/node_modules/amdefine/amdefine.js
generated
vendored
Normal file
301
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/node_modules/amdefine/amdefine.js
generated
vendored
Normal file
@@ -0,0 +1,301 @@
|
|||||||
|
/** vim: et:ts=4:sw=4:sts=4
|
||||||
|
* @license amdefine 0.1.0 Copyright (c) 2011, The Dojo Foundation All Rights Reserved.
|
||||||
|
* Available via the MIT or new BSD license.
|
||||||
|
* see: http://github.com/jrburke/amdefine for details
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*jslint node: true */
|
||||||
|
/*global module, process */
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a define for node.
|
||||||
|
* @param {Object} module the "module" object that is defined by Node for the
|
||||||
|
* current module.
|
||||||
|
* @param {Function} [requireFn]. Node's require function for the current module.
|
||||||
|
* It only needs to be passed in Node versions before 0.5, when module.require
|
||||||
|
* did not exist.
|
||||||
|
* @returns {Function} a define function that is usable for the current node
|
||||||
|
* module.
|
||||||
|
*/
|
||||||
|
function amdefine(module, requireFn) {
|
||||||
|
'use strict';
|
||||||
|
var defineCache = {},
|
||||||
|
loaderCache = {},
|
||||||
|
alreadyCalled = false,
|
||||||
|
path = require('path'),
|
||||||
|
makeRequire, stringRequire;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trims the . and .. from an array of path segments.
|
||||||
|
* It will keep a leading path segment if a .. will become
|
||||||
|
* the first path segment, to help with module name lookups,
|
||||||
|
* which act like paths, but can be remapped. But the end result,
|
||||||
|
* all paths that use this function should look normalized.
|
||||||
|
* NOTE: this method MODIFIES the input array.
|
||||||
|
* @param {Array} ary the array of path segments.
|
||||||
|
*/
|
||||||
|
function trimDots(ary) {
|
||||||
|
var i, part;
|
||||||
|
for (i = 0; ary[i]; i+= 1) {
|
||||||
|
part = ary[i];
|
||||||
|
if (part === '.') {
|
||||||
|
ary.splice(i, 1);
|
||||||
|
i -= 1;
|
||||||
|
} else if (part === '..') {
|
||||||
|
if (i === 1 && (ary[2] === '..' || ary[0] === '..')) {
|
||||||
|
//End of the line. Keep at least one non-dot
|
||||||
|
//path segment at the front so it can be mapped
|
||||||
|
//correctly to disk. Otherwise, there is likely
|
||||||
|
//no path mapping for a path starting with '..'.
|
||||||
|
//This can still fail, but catches the most reasonable
|
||||||
|
//uses of ..
|
||||||
|
break;
|
||||||
|
} else if (i > 0) {
|
||||||
|
ary.splice(i - 1, 2);
|
||||||
|
i -= 2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalize(name, baseName) {
|
||||||
|
var baseParts;
|
||||||
|
|
||||||
|
//Adjust any relative paths.
|
||||||
|
if (name && name.charAt(0) === '.') {
|
||||||
|
//If have a base name, try to normalize against it,
|
||||||
|
//otherwise, assume it is a top-level require that will
|
||||||
|
//be relative to baseUrl in the end.
|
||||||
|
if (baseName) {
|
||||||
|
baseParts = baseName.split('/');
|
||||||
|
baseParts = baseParts.slice(0, baseParts.length - 1);
|
||||||
|
baseParts = baseParts.concat(name.split('/'));
|
||||||
|
trimDots(baseParts);
|
||||||
|
name = baseParts.join('/');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create the normalize() function passed to a loader plugin's
|
||||||
|
* normalize method.
|
||||||
|
*/
|
||||||
|
function makeNormalize(relName) {
|
||||||
|
return function (name) {
|
||||||
|
return normalize(name, relName);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeLoad(id) {
|
||||||
|
function load(value) {
|
||||||
|
loaderCache[id] = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
load.fromText = function (id, text) {
|
||||||
|
//This one is difficult because the text can/probably uses
|
||||||
|
//define, and any relative paths and requires should be relative
|
||||||
|
//to that id was it would be found on disk. But this would require
|
||||||
|
//bootstrapping a module/require fairly deeply from node core.
|
||||||
|
//Not sure how best to go about that yet.
|
||||||
|
throw new Error('amdefine does not implement load.fromText');
|
||||||
|
};
|
||||||
|
|
||||||
|
return load;
|
||||||
|
}
|
||||||
|
|
||||||
|
makeRequire = function (systemRequire, exports, module, relId) {
|
||||||
|
function amdRequire(deps, callback) {
|
||||||
|
if (typeof deps === 'string') {
|
||||||
|
//Synchronous, single module require('')
|
||||||
|
return stringRequire(systemRequire, exports, module, deps, relId);
|
||||||
|
} else {
|
||||||
|
//Array of dependencies with a callback.
|
||||||
|
|
||||||
|
//Convert the dependencies to modules.
|
||||||
|
deps = deps.map(function (depName) {
|
||||||
|
return stringRequire(systemRequire, exports, module, depName, relId);
|
||||||
|
});
|
||||||
|
|
||||||
|
//Wait for next tick to call back the require call.
|
||||||
|
if (callback) {
|
||||||
|
process.nextTick(function () {
|
||||||
|
callback.apply(null, deps);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
amdRequire.toUrl = function (filePath) {
|
||||||
|
if (filePath.indexOf('.') === 0) {
|
||||||
|
return normalize(filePath, path.dirname(module.filename));
|
||||||
|
} else {
|
||||||
|
return filePath;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return amdRequire;
|
||||||
|
};
|
||||||
|
|
||||||
|
//Favor explicit value, passed in if the module wants to support Node 0.4.
|
||||||
|
requireFn = requireFn || function req() {
|
||||||
|
return module.require.apply(module, arguments);
|
||||||
|
};
|
||||||
|
|
||||||
|
function runFactory(id, deps, factory) {
|
||||||
|
var r, e, m, result;
|
||||||
|
|
||||||
|
if (id) {
|
||||||
|
e = loaderCache[id] = {};
|
||||||
|
m = {
|
||||||
|
id: id,
|
||||||
|
uri: __filename,
|
||||||
|
exports: e
|
||||||
|
};
|
||||||
|
r = makeRequire(requireFn, e, m, id);
|
||||||
|
} else {
|
||||||
|
//Only support one define call per file
|
||||||
|
if (alreadyCalled) {
|
||||||
|
throw new Error('amdefine with no module ID cannot be called more than once per file.');
|
||||||
|
}
|
||||||
|
alreadyCalled = true;
|
||||||
|
|
||||||
|
//Use the real variables from node
|
||||||
|
//Use module.exports for exports, since
|
||||||
|
//the exports in here is amdefine exports.
|
||||||
|
e = module.exports;
|
||||||
|
m = module;
|
||||||
|
r = makeRequire(requireFn, e, m, module.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
//If there are dependencies, they are strings, so need
|
||||||
|
//to convert them to dependency values.
|
||||||
|
if (deps) {
|
||||||
|
deps = deps.map(function (depName) {
|
||||||
|
return r(depName);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
//Call the factory with the right dependencies.
|
||||||
|
if (typeof factory === 'function') {
|
||||||
|
result = factory.apply(m.exports, deps);
|
||||||
|
} else {
|
||||||
|
result = factory;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result !== undefined) {
|
||||||
|
m.exports = result;
|
||||||
|
if (id) {
|
||||||
|
loaderCache[id] = m.exports;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stringRequire = function (systemRequire, exports, module, id, relId) {
|
||||||
|
//Split the ID by a ! so that
|
||||||
|
var index = id.indexOf('!'),
|
||||||
|
originalId = id,
|
||||||
|
prefix, plugin;
|
||||||
|
|
||||||
|
if (index === -1) {
|
||||||
|
id = normalize(id, relId);
|
||||||
|
|
||||||
|
//Straight module lookup. If it is one of the special dependencies,
|
||||||
|
//deal with it, otherwise, delegate to node.
|
||||||
|
if (id === 'require') {
|
||||||
|
return makeRequire(systemRequire, exports, module, relId);
|
||||||
|
} else if (id === 'exports') {
|
||||||
|
return exports;
|
||||||
|
} else if (id === 'module') {
|
||||||
|
return module;
|
||||||
|
} else if (loaderCache.hasOwnProperty(id)) {
|
||||||
|
return loaderCache[id];
|
||||||
|
} else if (defineCache[id]) {
|
||||||
|
runFactory.apply(null, defineCache[id]);
|
||||||
|
return loaderCache[id];
|
||||||
|
} else {
|
||||||
|
if(systemRequire) {
|
||||||
|
return systemRequire(originalId);
|
||||||
|
} else {
|
||||||
|
throw new Error('No module with ID: ' + id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
//There is a plugin in play.
|
||||||
|
prefix = id.substring(0, index);
|
||||||
|
id = id.substring(index + 1, id.length);
|
||||||
|
|
||||||
|
plugin = stringRequire(systemRequire, exports, module, prefix, relId);
|
||||||
|
|
||||||
|
if (plugin.normalize) {
|
||||||
|
id = plugin.normalize(id, makeNormalize(relId));
|
||||||
|
} else {
|
||||||
|
//Normalize the ID normally.
|
||||||
|
id = normalize(id, relId);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (loaderCache[id]) {
|
||||||
|
return loaderCache[id];
|
||||||
|
} else {
|
||||||
|
plugin.load(id, makeRequire(systemRequire, exports, module, relId), makeLoad(id), {});
|
||||||
|
|
||||||
|
return loaderCache[id];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//Create a define function specific to the module asking for amdefine.
|
||||||
|
function define(id, deps, factory) {
|
||||||
|
if (Array.isArray(id)) {
|
||||||
|
factory = deps;
|
||||||
|
deps = id;
|
||||||
|
id = undefined;
|
||||||
|
} else if (typeof id !== 'string') {
|
||||||
|
factory = id;
|
||||||
|
id = deps = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (deps && !Array.isArray(deps)) {
|
||||||
|
factory = deps;
|
||||||
|
deps = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!deps) {
|
||||||
|
deps = ['require', 'exports', 'module'];
|
||||||
|
}
|
||||||
|
|
||||||
|
//Set up properties for this module. If an ID, then use
|
||||||
|
//internal cache. If no ID, then use the external variables
|
||||||
|
//for this node module.
|
||||||
|
if (id) {
|
||||||
|
//Put the module in deep freeze until there is a
|
||||||
|
//require call for it.
|
||||||
|
defineCache[id] = [id, deps, factory];
|
||||||
|
} else {
|
||||||
|
runFactory(id, deps, factory);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//define.require, which has access to all the values in the
|
||||||
|
//cache. Useful for AMD modules that all have IDs in the file,
|
||||||
|
//but need to finally export a value to node based on one of those
|
||||||
|
//IDs.
|
||||||
|
define.require = function (id) {
|
||||||
|
if (loaderCache[id]) {
|
||||||
|
return loaderCache[id];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (defineCache[id]) {
|
||||||
|
runFactory.apply(null, defineCache[id]);
|
||||||
|
return loaderCache[id];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
define.amd = {};
|
||||||
|
|
||||||
|
return define;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = amdefine;
|
||||||
36
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/node_modules/amdefine/intercept.js
generated
vendored
Normal file
36
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/node_modules/amdefine/intercept.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
/*jshint node: true */
|
||||||
|
var inserted,
|
||||||
|
Module = require('module'),
|
||||||
|
fs = require('fs'),
|
||||||
|
existingExtFn = Module._extensions['.js'],
|
||||||
|
amdefineRegExp = /amdefine\.js/;
|
||||||
|
|
||||||
|
inserted = "if (typeof define !== 'function') {var define = require('amdefine')(module)}";
|
||||||
|
|
||||||
|
//From the node/lib/module.js source:
|
||||||
|
function stripBOM(content) {
|
||||||
|
// Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
|
||||||
|
// because the buffer-to-string conversion in `fs.readFileSync()`
|
||||||
|
// translates it to FEFF, the UTF-16 BOM.
|
||||||
|
if (content.charCodeAt(0) === 0xFEFF) {
|
||||||
|
content = content.slice(1);
|
||||||
|
}
|
||||||
|
return content;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Also adapted from the node/lib/module.js source:
|
||||||
|
function intercept(module, filename) {
|
||||||
|
var content = stripBOM(fs.readFileSync(filename, 'utf8'));
|
||||||
|
|
||||||
|
if (!amdefineRegExp.test(module.id)) {
|
||||||
|
content = inserted + content;
|
||||||
|
}
|
||||||
|
|
||||||
|
module._compile(content, filename);
|
||||||
|
}
|
||||||
|
|
||||||
|
intercept._id = 'amdefine/intercept';
|
||||||
|
|
||||||
|
if (!existingExtFn._id || existingExtFn._id !== intercept._id) {
|
||||||
|
Module._extensions['.js'] = intercept;
|
||||||
|
}
|
||||||
31
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/node_modules/amdefine/package.json
generated
vendored
Normal file
31
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/node_modules/amdefine/package.json
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
178
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/package.json
generated
vendored
Normal file
178
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/node_modules/source-map/package.json
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
53
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/package.json
generated
vendored
Normal file
53
server/node_modules/gulp-concat/node_modules/concat-with-sourcemaps/package.json
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
{
|
||||||
|
"name": "concat-with-sourcemaps",
|
||||||
|
"version": "1.0.2",
|
||||||
|
"description": "Concatenate file contents with a custom separator and generate a source map",
|
||||||
|
"homepage": "http://github.com/floridoo/concat-with-sourcemaps",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/floridoo/concat-with-sourcemaps.git"
|
||||||
|
},
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "jshint *.js test/*.js && faucet test/*.js",
|
||||||
|
"tap": "tape test/*.js",
|
||||||
|
"cover": "istanbul cover --dir reports/coverage tape test/*.js",
|
||||||
|
"coveralls": "istanbul cover tape test/*.js --report lcovonly && cat ./coverage/lcov.info | coveralls && rm -rf ./coverage"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"concat",
|
||||||
|
"source map"
|
||||||
|
],
|
||||||
|
"author": {
|
||||||
|
"name": "Florian Reiterer",
|
||||||
|
"email": "me@florianreiterer.com"
|
||||||
|
},
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"source-map": "^0.4.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"jshint": "^2.6.0",
|
||||||
|
"tape": "^3.5.0",
|
||||||
|
"istanbul": "^0.3.6",
|
||||||
|
"faucet": "0.0.1",
|
||||||
|
"coveralls": "^2.10.0"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"index.js",
|
||||||
|
"package.json",
|
||||||
|
"README.md",
|
||||||
|
"LICENSE.md"
|
||||||
|
],
|
||||||
|
"readme": "## Concat with source maps [![NPM version][npm-image]][npm-url] [![build status][travis-image]][travis-url] [![Test coverage][coveralls-image]][coveralls-url]\n\nNPM module for concatenating files and generating source maps.\n\n### Usage example\n```js\nvar concat = new Concat(true, 'all.js', '\\n');\nconcat.add('file1.js', file1Content);\nconcat.add('file2.js', file2Content, file2SourceMap);\n\nvar concatenatedContent = concat.content;\nvar sourceMapForContent = concat.sourceMap;\n```\n\n### API\n\n#### new Concat(generateSourceMap, outFileName, separator)\nInitialize a new concat object.\n\nParameters:\n- generateSourceMap: whether or not to generate a source map (default: false)\n- outFileName: the file name/path of the output file (for the source map)\n- separator: the string that should separate files (default: no separator)\n\n#### concat.add(fileName, content, sourceMap)\nAdd a file to the output file.\n\nParameters:\n- fileName: file name of the input file\n- content: content (Buffer or string) of the input file\n- sourceMap: optional source map of the input file (string). Will be merged into the output source map.\n\n#### concat.content\nThe resulting concatenated file content (Buffer).\n\n#### concat.sourceMap\nThe resulting source map of the concatenated files (string).\n\n[npm-image]: https://img.shields.io/npm/v/concat-with-sourcemaps.svg\n[npm-url]: https://www.npmjs.com/package/concat-with-sourcemaps\n[travis-image]: https://img.shields.io/travis/floridoo/concat-with-sourcemaps.svg\n[travis-url]: https://travis-ci.org/floridoo/concat-with-sourcemaps\n[coveralls-image]: https://img.shields.io/coveralls/floridoo/concat-with-sourcemaps.svg\n[coveralls-url]: https://coveralls.io/r/floridoo/concat-with-sourcemaps?branch=master\n",
|
||||||
|
"readmeFilename": "README.md",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/floridoo/concat-with-sourcemaps/issues"
|
||||||
|
},
|
||||||
|
"_id": "concat-with-sourcemaps@1.0.2",
|
||||||
|
"dist": {
|
||||||
|
"shasum": "8127cc704228d500e61aac98033e854e8e61a2e4"
|
||||||
|
},
|
||||||
|
"_from": "concat-with-sourcemaps@^1.0.0",
|
||||||
|
"_resolved": "https://registry.npmjs.org/concat-with-sourcemaps/-/concat-with-sourcemaps-1.0.2.tgz"
|
||||||
|
}
|
||||||
3
server/node_modules/gulp-concat/node_modules/through2/.npmignore
generated
vendored
Normal file
3
server/node_modules/gulp-concat/node_modules/through2/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
test
|
||||||
|
.jshintrc
|
||||||
|
.travis.yml
|
||||||
39
server/node_modules/gulp-concat/node_modules/through2/LICENSE
generated
vendored
Normal file
39
server/node_modules/gulp-concat/node_modules/through2/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
Copyright 2013, Rod Vagg (the "Original Author")
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
MIT +no-false-attribs License
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person
|
||||||
|
obtaining a copy of this software and associated documentation
|
||||||
|
files (the "Software"), to deal in the Software without
|
||||||
|
restriction, including without limitation the rights to use,
|
||||||
|
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the
|
||||||
|
Software is furnished to do so, subject to the following
|
||||||
|
conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
Distributions of all or part of the Software intended to be used
|
||||||
|
by the recipients as they would use the unmodified Software,
|
||||||
|
containing modifications that substantially alter, remove, or
|
||||||
|
disable functionality of the Software, outside of the documented
|
||||||
|
configuration mechanisms provided by the Software, shall be
|
||||||
|
modified such that the Original Author's bug reporting email
|
||||||
|
addresses and urls are either replaced with the contact information
|
||||||
|
of the parties responsible for the changes, or removed entirely.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||||
|
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||||
|
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||||
|
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
|
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||||
|
OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
Except where noted, this license applies to any and all software
|
||||||
|
programs and associated documentation files created by the
|
||||||
|
Original Author, when distributed with the Software.
|
||||||
132
server/node_modules/gulp-concat/node_modules/through2/README.md
generated
vendored
Normal file
132
server/node_modules/gulp-concat/node_modules/through2/README.md
generated
vendored
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
# through2
|
||||||
|
|
||||||
|
[](https://nodei.co/npm/through2/)
|
||||||
|
|
||||||
|
**A tiny wrapper around Node streams.Transform (Streams2) to avoid explicit subclassing noise**
|
||||||
|
|
||||||
|
Inspired by [Dominic Tarr](https://github.com/dominictarr)'s [through](https://github.com/dominictarr/through) in that it's so much easier to make a stream out of a function than it is to set up the prototype chain properly: `through(function (chunk) { ... })`.
|
||||||
|
|
||||||
|
Note: A **Streams3** version of through2 is available in npm with the tag `"1.0"` rather than `"latest"` so an `npm install through2` will get you the current Streams2 version (version number is 0.x.x). To use a Streams3 version use `npm install through2@1` to fetch the latest version 1.x.x. More information about Streams2 vs Streams3 and recommendations see the article **[Why I don't use Node's core 'stream' module](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html)**.
|
||||||
|
|
||||||
|
```js
|
||||||
|
fs.createReadStream('ex.txt')
|
||||||
|
.pipe(through2(function (chunk, enc, callback) {
|
||||||
|
for (var i = 0; i < chunk.length; i++)
|
||||||
|
if (chunk[i] == 97)
|
||||||
|
chunk[i] = 122 // swap 'a' for 'z'
|
||||||
|
|
||||||
|
this.push(chunk)
|
||||||
|
|
||||||
|
callback()
|
||||||
|
}))
|
||||||
|
.pipe(fs.createWriteStream('out.txt'))
|
||||||
|
```
|
||||||
|
|
||||||
|
Or object streams:
|
||||||
|
|
||||||
|
```js
|
||||||
|
var all = []
|
||||||
|
|
||||||
|
fs.createReadStream('data.csv')
|
||||||
|
.pipe(csv2())
|
||||||
|
.pipe(through2.obj(function (chunk, enc, callback) {
|
||||||
|
var data = {
|
||||||
|
name : chunk[0]
|
||||||
|
, address : chunk[3]
|
||||||
|
, phone : chunk[10]
|
||||||
|
}
|
||||||
|
this.push(data)
|
||||||
|
|
||||||
|
callback()
|
||||||
|
}))
|
||||||
|
.on('data', function (data) {
|
||||||
|
all.push(data)
|
||||||
|
})
|
||||||
|
.on('end', function () {
|
||||||
|
doSomethingSpecial(all)
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that `through2.obj(fn)` is a convenience wrapper around `through2({ objectMode: true }, fn)`.
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
<b><code>through2([ options, ] [ transformFunction ] [, flushFunction ])</code></b>
|
||||||
|
|
||||||
|
Consult the **[stream.Transform](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_transform)** documentation for the exact rules of the `transformFunction` (i.e. `this._transform`) and the optional `flushFunction` (i.e. `this._flush`).
|
||||||
|
|
||||||
|
### options
|
||||||
|
|
||||||
|
The options argument is optional and is passed straight through to `stream.Transform`. So you can use `objectMode:true` if you are processing non-binary streams (or just use `through2.obj()`).
|
||||||
|
|
||||||
|
The `options` argument is first, unlike standard convention, because if I'm passing in an anonymous function then I'd prefer for the options argument to not get lost at the end of the call:
|
||||||
|
|
||||||
|
```js
|
||||||
|
fs.createReadStream('/tmp/important.dat')
|
||||||
|
.pipe(through2({ objectMode: true, allowHalfOpen: false },
|
||||||
|
function (chunk, enc, cb) {
|
||||||
|
cb(null, 'wut?') // note we can use the second argument on the callback
|
||||||
|
// to provide data as an alternative to this.push('wut?')
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.pipe(fs.createWriteStream('/tmp/wut.txt'))
|
||||||
|
```
|
||||||
|
|
||||||
|
### transformFunction
|
||||||
|
|
||||||
|
The `transformFunction` must have the following signature: `function (chunk, encoding, callback) {}`. A minimal implementation should call the `callback` function to indicate that the transformation is done, even if that transformation means discarding the chunk.
|
||||||
|
|
||||||
|
To queue a new chunk, call `this.push(chunk)`—this can be called as many times as required before the `callback()` if you have multiple pieces to send on.
|
||||||
|
|
||||||
|
Alternatively, you may use `callback(err, chunk)` as shorthand for emitting a single chunk or an error.
|
||||||
|
|
||||||
|
If you **do not provide a `transformFunction`** then you will get a simple pass-through stream.
|
||||||
|
|
||||||
|
### flushFunction
|
||||||
|
|
||||||
|
The optional `flushFunction` is provided as the last argument (2nd or 3rd, depending on whether you've supplied options) is called just prior to the stream ending. Can be used to finish up any processing that may be in progress.
|
||||||
|
|
||||||
|
```js
|
||||||
|
fs.createReadStream('/tmp/important.dat')
|
||||||
|
.pipe(through2(
|
||||||
|
function (chunk, enc, cb) { cb(null, chunk) }, // transform is a noop
|
||||||
|
function (cb) { // flush function
|
||||||
|
this.push('tacking on an extra buffer to the end');
|
||||||
|
cb();
|
||||||
|
}
|
||||||
|
))
|
||||||
|
.pipe(fs.createWriteStream('/tmp/wut.txt'));
|
||||||
|
```
|
||||||
|
|
||||||
|
<b><code>through2.ctor([ options, ] transformFunction[, flushFunction ])</code></b>
|
||||||
|
|
||||||
|
Instead of returning a `stream.Transform` instance, `through2.ctor()` returns a **constructor** for a custom Transform. This is useful when you want to use the same transform logic in multiple instances.
|
||||||
|
|
||||||
|
```js
|
||||||
|
var FToC = through2.ctor({objectMode: true}, function (record, encoding, callback) {
|
||||||
|
if (record.temp != null && record.unit = "F") {
|
||||||
|
record.temp = ( ( record.temp - 32 ) * 5 ) / 9
|
||||||
|
record.unit = "C"
|
||||||
|
}
|
||||||
|
this.push(record)
|
||||||
|
callback()
|
||||||
|
})
|
||||||
|
|
||||||
|
// Create instances of FToC like so:
|
||||||
|
var converter = new FToC()
|
||||||
|
// Or:
|
||||||
|
var converter = FToC()
|
||||||
|
// Or specify/override options when you instantiate, if you prefer:
|
||||||
|
var converter = FToC({objectMode: true})
|
||||||
|
```
|
||||||
|
|
||||||
|
## See Also
|
||||||
|
|
||||||
|
- [through2-map](https://github.com/brycebaril/through2-map) - Array.prototype.map analog for streams.
|
||||||
|
- [through2-filter](https://github.com/brycebaril/through2-filter) - Array.prototype.filter analog for streams.
|
||||||
|
- [through2-reduce](https://github.com/brycebaril/through2-reduce) - Array.prototype.reduce analog for streams.
|
||||||
|
- [through2-spy](https://github.com/brycebaril/through2-spy) - Wrapper for simple stream.PassThrough spies.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
**through2** is Copyright (c) 2013 Rod Vagg [@rvagg](https://twitter.com/rvagg) and licenced under the MIT licence. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE file for more details.
|
||||||
5
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/.npmignore
generated
vendored
Normal file
5
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
build/
|
||||||
|
test/
|
||||||
|
examples/
|
||||||
|
fs.js
|
||||||
|
zlib.js
|
||||||
18
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/LICENSE
generated
vendored
Normal file
18
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to
|
||||||
|
deal in the Software without restriction, including without limitation the
|
||||||
|
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
||||||
|
sell copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
|
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||||
|
IN THE SOFTWARE.
|
||||||
15
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/README.md
generated
vendored
Normal file
15
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/README.md
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
# readable-stream
|
||||||
|
|
||||||
|
***Node-core streams for userland***
|
||||||
|
|
||||||
|
[](https://nodei.co/npm/readable-stream/)
|
||||||
|
[](https://nodei.co/npm/readable-stream/)
|
||||||
|
|
||||||
|
This package is a mirror of the Streams2 and Streams3 implementations in Node-core.
|
||||||
|
|
||||||
|
If you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core.
|
||||||
|
|
||||||
|
**readable-stream** comes in two major versions, v1.0.x and v1.1.x. The former tracks the Streams2 implementation in Node 0.10, including bug-fixes and minor improvements as they are added. The latter tracks Streams3 as it develops in Node 0.11; we will likely see a v1.2.x branch for Node 0.12.
|
||||||
|
|
||||||
|
**readable-stream** uses proper patch-level versioning so if you pin to `"~1.0.0"` you’ll get the latest Node 0.10 Streams2 implementation, including any fixes and minor non-breaking improvements. The patch-level versions of 1.0.x and 1.1.x should mirror the patch-level versions of Node-core releases. You should prefer the **1.0.x** releases for now and when you’re ready to start using Streams3, pin to `"~1.1.0"`
|
||||||
|
|
||||||
1
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/duplex.js
generated
vendored
Normal file
1
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/duplex.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
module.exports = require("./lib/_stream_duplex.js")
|
||||||
89
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/lib/_stream_duplex.js
generated
vendored
Normal file
89
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/lib/_stream_duplex.js
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
// Copyright Joyent, Inc. and other Node contributors.
|
||||||
|
//
|
||||||
|
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||||
|
// copy of this software and associated documentation files (the
|
||||||
|
// "Software"), to deal in the Software without restriction, including
|
||||||
|
// without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||||
|
// persons to whom the Software is furnished to do so, subject to the
|
||||||
|
// following conditions:
|
||||||
|
//
|
||||||
|
// The above copyright notice and this permission notice shall be included
|
||||||
|
// in all copies or substantial portions of the Software.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||||
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||||
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||||
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||||
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||||
|
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
// a duplex stream is just a stream that is both readable and writable.
|
||||||
|
// Since JS doesn't have multiple prototypal inheritance, this class
|
||||||
|
// prototypally inherits from Readable, and then parasitically from
|
||||||
|
// Writable.
|
||||||
|
|
||||||
|
module.exports = Duplex;
|
||||||
|
|
||||||
|
/*<replacement>*/
|
||||||
|
var objectKeys = Object.keys || function (obj) {
|
||||||
|
var keys = [];
|
||||||
|
for (var key in obj) keys.push(key);
|
||||||
|
return keys;
|
||||||
|
}
|
||||||
|
/*</replacement>*/
|
||||||
|
|
||||||
|
|
||||||
|
/*<replacement>*/
|
||||||
|
var util = require('core-util-is');
|
||||||
|
util.inherits = require('inherits');
|
||||||
|
/*</replacement>*/
|
||||||
|
|
||||||
|
var Readable = require('./_stream_readable');
|
||||||
|
var Writable = require('./_stream_writable');
|
||||||
|
|
||||||
|
util.inherits(Duplex, Readable);
|
||||||
|
|
||||||
|
forEach(objectKeys(Writable.prototype), function(method) {
|
||||||
|
if (!Duplex.prototype[method])
|
||||||
|
Duplex.prototype[method] = Writable.prototype[method];
|
||||||
|
});
|
||||||
|
|
||||||
|
function Duplex(options) {
|
||||||
|
if (!(this instanceof Duplex))
|
||||||
|
return new Duplex(options);
|
||||||
|
|
||||||
|
Readable.call(this, options);
|
||||||
|
Writable.call(this, options);
|
||||||
|
|
||||||
|
if (options && options.readable === false)
|
||||||
|
this.readable = false;
|
||||||
|
|
||||||
|
if (options && options.writable === false)
|
||||||
|
this.writable = false;
|
||||||
|
|
||||||
|
this.allowHalfOpen = true;
|
||||||
|
if (options && options.allowHalfOpen === false)
|
||||||
|
this.allowHalfOpen = false;
|
||||||
|
|
||||||
|
this.once('end', onend);
|
||||||
|
}
|
||||||
|
|
||||||
|
// the no-half-open enforcer
|
||||||
|
function onend() {
|
||||||
|
// if we allow half-open state, or if the writable side ended,
|
||||||
|
// then we're ok.
|
||||||
|
if (this.allowHalfOpen || this._writableState.ended)
|
||||||
|
return;
|
||||||
|
|
||||||
|
// no more data can be written.
|
||||||
|
// But allow more writes to happen in this tick.
|
||||||
|
process.nextTick(this.end.bind(this));
|
||||||
|
}
|
||||||
|
|
||||||
|
function forEach (xs, f) {
|
||||||
|
for (var i = 0, l = xs.length; i < l; i++) {
|
||||||
|
f(xs[i], i);
|
||||||
|
}
|
||||||
|
}
|
||||||
46
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/lib/_stream_passthrough.js
generated
vendored
Normal file
46
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/lib/_stream_passthrough.js
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
// Copyright Joyent, Inc. and other Node contributors.
|
||||||
|
//
|
||||||
|
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||||
|
// copy of this software and associated documentation files (the
|
||||||
|
// "Software"), to deal in the Software without restriction, including
|
||||||
|
// without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||||
|
// persons to whom the Software is furnished to do so, subject to the
|
||||||
|
// following conditions:
|
||||||
|
//
|
||||||
|
// The above copyright notice and this permission notice shall be included
|
||||||
|
// in all copies or substantial portions of the Software.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||||
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||||
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||||
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||||
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||||
|
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
// a passthrough stream.
|
||||||
|
// basically just the most minimal sort of Transform stream.
|
||||||
|
// Every written chunk gets output as-is.
|
||||||
|
|
||||||
|
module.exports = PassThrough;
|
||||||
|
|
||||||
|
var Transform = require('./_stream_transform');
|
||||||
|
|
||||||
|
/*<replacement>*/
|
||||||
|
var util = require('core-util-is');
|
||||||
|
util.inherits = require('inherits');
|
||||||
|
/*</replacement>*/
|
||||||
|
|
||||||
|
util.inherits(PassThrough, Transform);
|
||||||
|
|
||||||
|
function PassThrough(options) {
|
||||||
|
if (!(this instanceof PassThrough))
|
||||||
|
return new PassThrough(options);
|
||||||
|
|
||||||
|
Transform.call(this, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
PassThrough.prototype._transform = function(chunk, encoding, cb) {
|
||||||
|
cb(null, chunk);
|
||||||
|
};
|
||||||
982
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/lib/_stream_readable.js
generated
vendored
Normal file
982
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/lib/_stream_readable.js
generated
vendored
Normal file
@@ -0,0 +1,982 @@
|
|||||||
|
// Copyright Joyent, Inc. and other Node contributors.
|
||||||
|
//
|
||||||
|
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||||
|
// copy of this software and associated documentation files (the
|
||||||
|
// "Software"), to deal in the Software without restriction, including
|
||||||
|
// without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||||
|
// persons to whom the Software is furnished to do so, subject to the
|
||||||
|
// following conditions:
|
||||||
|
//
|
||||||
|
// The above copyright notice and this permission notice shall be included
|
||||||
|
// in all copies or substantial portions of the Software.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||||
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||||
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||||
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||||
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||||
|
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
module.exports = Readable;
|
||||||
|
|
||||||
|
/*<replacement>*/
|
||||||
|
var isArray = require('isarray');
|
||||||
|
/*</replacement>*/
|
||||||
|
|
||||||
|
|
||||||
|
/*<replacement>*/
|
||||||
|
var Buffer = require('buffer').Buffer;
|
||||||
|
/*</replacement>*/
|
||||||
|
|
||||||
|
Readable.ReadableState = ReadableState;
|
||||||
|
|
||||||
|
var EE = require('events').EventEmitter;
|
||||||
|
|
||||||
|
/*<replacement>*/
|
||||||
|
if (!EE.listenerCount) EE.listenerCount = function(emitter, type) {
|
||||||
|
return emitter.listeners(type).length;
|
||||||
|
};
|
||||||
|
/*</replacement>*/
|
||||||
|
|
||||||
|
var Stream = require('stream');
|
||||||
|
|
||||||
|
/*<replacement>*/
|
||||||
|
var util = require('core-util-is');
|
||||||
|
util.inherits = require('inherits');
|
||||||
|
/*</replacement>*/
|
||||||
|
|
||||||
|
var StringDecoder;
|
||||||
|
|
||||||
|
util.inherits(Readable, Stream);
|
||||||
|
|
||||||
|
function ReadableState(options, stream) {
|
||||||
|
options = options || {};
|
||||||
|
|
||||||
|
// the point at which it stops calling _read() to fill the buffer
|
||||||
|
// Note: 0 is a valid value, means "don't call _read preemptively ever"
|
||||||
|
var hwm = options.highWaterMark;
|
||||||
|
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
|
||||||
|
|
||||||
|
// cast to ints.
|
||||||
|
this.highWaterMark = ~~this.highWaterMark;
|
||||||
|
|
||||||
|
this.buffer = [];
|
||||||
|
this.length = 0;
|
||||||
|
this.pipes = null;
|
||||||
|
this.pipesCount = 0;
|
||||||
|
this.flowing = false;
|
||||||
|
this.ended = false;
|
||||||
|
this.endEmitted = false;
|
||||||
|
this.reading = false;
|
||||||
|
|
||||||
|
// In streams that never have any data, and do push(null) right away,
|
||||||
|
// the consumer can miss the 'end' event if they do some I/O before
|
||||||
|
// consuming the stream. So, we don't emit('end') until some reading
|
||||||
|
// happens.
|
||||||
|
this.calledRead = false;
|
||||||
|
|
||||||
|
// a flag to be able to tell if the onwrite cb is called immediately,
|
||||||
|
// or on a later tick. We set this to true at first, becuase any
|
||||||
|
// actions that shouldn't happen until "later" should generally also
|
||||||
|
// not happen before the first write call.
|
||||||
|
this.sync = true;
|
||||||
|
|
||||||
|
// whenever we return null, then we set a flag to say
|
||||||
|
// that we're awaiting a 'readable' event emission.
|
||||||
|
this.needReadable = false;
|
||||||
|
this.emittedReadable = false;
|
||||||
|
this.readableListening = false;
|
||||||
|
|
||||||
|
|
||||||
|
// object stream flag. Used to make read(n) ignore n and to
|
||||||
|
// make all the buffer merging and length checks go away
|
||||||
|
this.objectMode = !!options.objectMode;
|
||||||
|
|
||||||
|
// Crypto is kind of old and crusty. Historically, its default string
|
||||||
|
// encoding is 'binary' so we have to make this configurable.
|
||||||
|
// Everything else in the universe uses 'utf8', though.
|
||||||
|
this.defaultEncoding = options.defaultEncoding || 'utf8';
|
||||||
|
|
||||||
|
// when piping, we only care about 'readable' events that happen
|
||||||
|
// after read()ing all the bytes and not getting any pushback.
|
||||||
|
this.ranOut = false;
|
||||||
|
|
||||||
|
// the number of writers that are awaiting a drain event in .pipe()s
|
||||||
|
this.awaitDrain = 0;
|
||||||
|
|
||||||
|
// if true, a maybeReadMore has been scheduled
|
||||||
|
this.readingMore = false;
|
||||||
|
|
||||||
|
this.decoder = null;
|
||||||
|
this.encoding = null;
|
||||||
|
if (options.encoding) {
|
||||||
|
if (!StringDecoder)
|
||||||
|
StringDecoder = require('string_decoder/').StringDecoder;
|
||||||
|
this.decoder = new StringDecoder(options.encoding);
|
||||||
|
this.encoding = options.encoding;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function Readable(options) {
|
||||||
|
if (!(this instanceof Readable))
|
||||||
|
return new Readable(options);
|
||||||
|
|
||||||
|
this._readableState = new ReadableState(options, this);
|
||||||
|
|
||||||
|
// legacy
|
||||||
|
this.readable = true;
|
||||||
|
|
||||||
|
Stream.call(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Manually shove something into the read() buffer.
|
||||||
|
// This returns true if the highWaterMark has not been hit yet,
|
||||||
|
// similar to how Writable.write() returns true if you should
|
||||||
|
// write() some more.
|
||||||
|
Readable.prototype.push = function(chunk, encoding) {
|
||||||
|
var state = this._readableState;
|
||||||
|
|
||||||
|
if (typeof chunk === 'string' && !state.objectMode) {
|
||||||
|
encoding = encoding || state.defaultEncoding;
|
||||||
|
if (encoding !== state.encoding) {
|
||||||
|
chunk = new Buffer(chunk, encoding);
|
||||||
|
encoding = '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return readableAddChunk(this, state, chunk, encoding, false);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Unshift should *always* be something directly out of read()
|
||||||
|
Readable.prototype.unshift = function(chunk) {
|
||||||
|
var state = this._readableState;
|
||||||
|
return readableAddChunk(this, state, chunk, '', true);
|
||||||
|
};
|
||||||
|
|
||||||
|
function readableAddChunk(stream, state, chunk, encoding, addToFront) {
|
||||||
|
var er = chunkInvalid(state, chunk);
|
||||||
|
if (er) {
|
||||||
|
stream.emit('error', er);
|
||||||
|
} else if (chunk === null || chunk === undefined) {
|
||||||
|
state.reading = false;
|
||||||
|
if (!state.ended)
|
||||||
|
onEofChunk(stream, state);
|
||||||
|
} else if (state.objectMode || chunk && chunk.length > 0) {
|
||||||
|
if (state.ended && !addToFront) {
|
||||||
|
var e = new Error('stream.push() after EOF');
|
||||||
|
stream.emit('error', e);
|
||||||
|
} else if (state.endEmitted && addToFront) {
|
||||||
|
var e = new Error('stream.unshift() after end event');
|
||||||
|
stream.emit('error', e);
|
||||||
|
} else {
|
||||||
|
if (state.decoder && !addToFront && !encoding)
|
||||||
|
chunk = state.decoder.write(chunk);
|
||||||
|
|
||||||
|
// update the buffer info.
|
||||||
|
state.length += state.objectMode ? 1 : chunk.length;
|
||||||
|
if (addToFront) {
|
||||||
|
state.buffer.unshift(chunk);
|
||||||
|
} else {
|
||||||
|
state.reading = false;
|
||||||
|
state.buffer.push(chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (state.needReadable)
|
||||||
|
emitReadable(stream);
|
||||||
|
|
||||||
|
maybeReadMore(stream, state);
|
||||||
|
}
|
||||||
|
} else if (!addToFront) {
|
||||||
|
state.reading = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return needMoreData(state);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// if it's past the high water mark, we can push in some more.
|
||||||
|
// Also, if we have no data yet, we can stand some
|
||||||
|
// more bytes. This is to work around cases where hwm=0,
|
||||||
|
// such as the repl. Also, if the push() triggered a
|
||||||
|
// readable event, and the user called read(largeNumber) such that
|
||||||
|
// needReadable was set, then we ought to push more, so that another
|
||||||
|
// 'readable' event will be triggered.
|
||||||
|
function needMoreData(state) {
|
||||||
|
return !state.ended &&
|
||||||
|
(state.needReadable ||
|
||||||
|
state.length < state.highWaterMark ||
|
||||||
|
state.length === 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// backwards compatibility.
|
||||||
|
Readable.prototype.setEncoding = function(enc) {
|
||||||
|
if (!StringDecoder)
|
||||||
|
StringDecoder = require('string_decoder/').StringDecoder;
|
||||||
|
this._readableState.decoder = new StringDecoder(enc);
|
||||||
|
this._readableState.encoding = enc;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Don't raise the hwm > 128MB
|
||||||
|
var MAX_HWM = 0x800000;
|
||||||
|
function roundUpToNextPowerOf2(n) {
|
||||||
|
if (n >= MAX_HWM) {
|
||||||
|
n = MAX_HWM;
|
||||||
|
} else {
|
||||||
|
// Get the next highest power of 2
|
||||||
|
n--;
|
||||||
|
for (var p = 1; p < 32; p <<= 1) n |= n >> p;
|
||||||
|
n++;
|
||||||
|
}
|
||||||
|
return n;
|
||||||
|
}
|
||||||
|
|
||||||
|
function howMuchToRead(n, state) {
|
||||||
|
if (state.length === 0 && state.ended)
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
if (state.objectMode)
|
||||||
|
return n === 0 ? 0 : 1;
|
||||||
|
|
||||||
|
if (n === null || isNaN(n)) {
|
||||||
|
// only flow one buffer at a time
|
||||||
|
if (state.flowing && state.buffer.length)
|
||||||
|
return state.buffer[0].length;
|
||||||
|
else
|
||||||
|
return state.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (n <= 0)
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
// If we're asking for more than the target buffer level,
|
||||||
|
// then raise the water mark. Bump up to the next highest
|
||||||
|
// power of 2, to prevent increasing it excessively in tiny
|
||||||
|
// amounts.
|
||||||
|
if (n > state.highWaterMark)
|
||||||
|
state.highWaterMark = roundUpToNextPowerOf2(n);
|
||||||
|
|
||||||
|
// don't have that much. return null, unless we've ended.
|
||||||
|
if (n > state.length) {
|
||||||
|
if (!state.ended) {
|
||||||
|
state.needReadable = true;
|
||||||
|
return 0;
|
||||||
|
} else
|
||||||
|
return state.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
return n;
|
||||||
|
}
|
||||||
|
|
||||||
|
// you can override either this method, or the async _read(n) below.
|
||||||
|
Readable.prototype.read = function(n) {
|
||||||
|
var state = this._readableState;
|
||||||
|
state.calledRead = true;
|
||||||
|
var nOrig = n;
|
||||||
|
var ret;
|
||||||
|
|
||||||
|
if (typeof n !== 'number' || n > 0)
|
||||||
|
state.emittedReadable = false;
|
||||||
|
|
||||||
|
// if we're doing read(0) to trigger a readable event, but we
|
||||||
|
// already have a bunch of data in the buffer, then just trigger
|
||||||
|
// the 'readable' event and move on.
|
||||||
|
if (n === 0 &&
|
||||||
|
state.needReadable &&
|
||||||
|
(state.length >= state.highWaterMark || state.ended)) {
|
||||||
|
emitReadable(this);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
n = howMuchToRead(n, state);
|
||||||
|
|
||||||
|
// if we've ended, and we're now clear, then finish it up.
|
||||||
|
if (n === 0 && state.ended) {
|
||||||
|
ret = null;
|
||||||
|
|
||||||
|
// In cases where the decoder did not receive enough data
|
||||||
|
// to produce a full chunk, then immediately received an
|
||||||
|
// EOF, state.buffer will contain [<Buffer >, <Buffer 00 ...>].
|
||||||
|
// howMuchToRead will see this and coerce the amount to
|
||||||
|
// read to zero (because it's looking at the length of the
|
||||||
|
// first <Buffer > in state.buffer), and we'll end up here.
|
||||||
|
//
|
||||||
|
// This can only happen via state.decoder -- no other venue
|
||||||
|
// exists for pushing a zero-length chunk into state.buffer
|
||||||
|
// and triggering this behavior. In this case, we return our
|
||||||
|
// remaining data and end the stream, if appropriate.
|
||||||
|
if (state.length > 0 && state.decoder) {
|
||||||
|
ret = fromList(n, state);
|
||||||
|
state.length -= ret.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (state.length === 0)
|
||||||
|
endReadable(this);
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
// All the actual chunk generation logic needs to be
|
||||||
|
// *below* the call to _read. The reason is that in certain
|
||||||
|
// synthetic stream cases, such as passthrough streams, _read
|
||||||
|
// may be a completely synchronous operation which may change
|
||||||
|
// the state of the read buffer, providing enough data when
|
||||||
|
// before there was *not* enough.
|
||||||
|
//
|
||||||
|
// So, the steps are:
|
||||||
|
// 1. Figure out what the state of things will be after we do
|
||||||
|
// a read from the buffer.
|
||||||
|
//
|
||||||
|
// 2. If that resulting state will trigger a _read, then call _read.
|
||||||
|
// Note that this may be asynchronous, or synchronous. Yes, it is
|
||||||
|
// deeply ugly to write APIs this way, but that still doesn't mean
|
||||||
|
// that the Readable class should behave improperly, as streams are
|
||||||
|
// designed to be sync/async agnostic.
|
||||||
|
// Take note if the _read call is sync or async (ie, if the read call
|
||||||
|
// has returned yet), so that we know whether or not it's safe to emit
|
||||||
|
// 'readable' etc.
|
||||||
|
//
|
||||||
|
// 3. Actually pull the requested chunks out of the buffer and return.
|
||||||
|
|
||||||
|
// if we need a readable event, then we need to do some reading.
|
||||||
|
var doRead = state.needReadable;
|
||||||
|
|
||||||
|
// if we currently have less than the highWaterMark, then also read some
|
||||||
|
if (state.length - n <= state.highWaterMark)
|
||||||
|
doRead = true;
|
||||||
|
|
||||||
|
// however, if we've ended, then there's no point, and if we're already
|
||||||
|
// reading, then it's unnecessary.
|
||||||
|
if (state.ended || state.reading)
|
||||||
|
doRead = false;
|
||||||
|
|
||||||
|
if (doRead) {
|
||||||
|
state.reading = true;
|
||||||
|
state.sync = true;
|
||||||
|
// if the length is currently zero, then we *need* a readable event.
|
||||||
|
if (state.length === 0)
|
||||||
|
state.needReadable = true;
|
||||||
|
// call internal read method
|
||||||
|
this._read(state.highWaterMark);
|
||||||
|
state.sync = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If _read called its callback synchronously, then `reading`
|
||||||
|
// will be false, and we need to re-evaluate how much data we
|
||||||
|
// can return to the user.
|
||||||
|
if (doRead && !state.reading)
|
||||||
|
n = howMuchToRead(nOrig, state);
|
||||||
|
|
||||||
|
if (n > 0)
|
||||||
|
ret = fromList(n, state);
|
||||||
|
else
|
||||||
|
ret = null;
|
||||||
|
|
||||||
|
if (ret === null) {
|
||||||
|
state.needReadable = true;
|
||||||
|
n = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
state.length -= n;
|
||||||
|
|
||||||
|
// If we have nothing in the buffer, then we want to know
|
||||||
|
// as soon as we *do* get something into the buffer.
|
||||||
|
if (state.length === 0 && !state.ended)
|
||||||
|
state.needReadable = true;
|
||||||
|
|
||||||
|
// If we happened to read() exactly the remaining amount in the
|
||||||
|
// buffer, and the EOF has been seen at this point, then make sure
|
||||||
|
// that we emit 'end' on the very next tick.
|
||||||
|
if (state.ended && !state.endEmitted && state.length === 0)
|
||||||
|
endReadable(this);
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
};
|
||||||
|
|
||||||
|
function chunkInvalid(state, chunk) {
|
||||||
|
var er = null;
|
||||||
|
if (!Buffer.isBuffer(chunk) &&
|
||||||
|
'string' !== typeof chunk &&
|
||||||
|
chunk !== null &&
|
||||||
|
chunk !== undefined &&
|
||||||
|
!state.objectMode) {
|
||||||
|
er = new TypeError('Invalid non-string/buffer chunk');
|
||||||
|
}
|
||||||
|
return er;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function onEofChunk(stream, state) {
|
||||||
|
if (state.decoder && !state.ended) {
|
||||||
|
var chunk = state.decoder.end();
|
||||||
|
if (chunk && chunk.length) {
|
||||||
|
state.buffer.push(chunk);
|
||||||
|
state.length += state.objectMode ? 1 : chunk.length;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
state.ended = true;
|
||||||
|
|
||||||
|
// if we've ended and we have some data left, then emit
|
||||||
|
// 'readable' now to make sure it gets picked up.
|
||||||
|
if (state.length > 0)
|
||||||
|
emitReadable(stream);
|
||||||
|
else
|
||||||
|
endReadable(stream);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Don't emit readable right away in sync mode, because this can trigger
|
||||||
|
// another read() call => stack overflow. This way, it might trigger
|
||||||
|
// a nextTick recursion warning, but that's not so bad.
|
||||||
|
function emitReadable(stream) {
|
||||||
|
var state = stream._readableState;
|
||||||
|
state.needReadable = false;
|
||||||
|
if (state.emittedReadable)
|
||||||
|
return;
|
||||||
|
|
||||||
|
state.emittedReadable = true;
|
||||||
|
if (state.sync)
|
||||||
|
process.nextTick(function() {
|
||||||
|
emitReadable_(stream);
|
||||||
|
});
|
||||||
|
else
|
||||||
|
emitReadable_(stream);
|
||||||
|
}
|
||||||
|
|
||||||
|
function emitReadable_(stream) {
|
||||||
|
stream.emit('readable');
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// at this point, the user has presumably seen the 'readable' event,
|
||||||
|
// and called read() to consume some data. that may have triggered
|
||||||
|
// in turn another _read(n) call, in which case reading = true if
|
||||||
|
// it's in progress.
|
||||||
|
// However, if we're not ended, or reading, and the length < hwm,
|
||||||
|
// then go ahead and try to read some more preemptively.
|
||||||
|
function maybeReadMore(stream, state) {
|
||||||
|
if (!state.readingMore) {
|
||||||
|
state.readingMore = true;
|
||||||
|
process.nextTick(function() {
|
||||||
|
maybeReadMore_(stream, state);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function maybeReadMore_(stream, state) {
|
||||||
|
var len = state.length;
|
||||||
|
while (!state.reading && !state.flowing && !state.ended &&
|
||||||
|
state.length < state.highWaterMark) {
|
||||||
|
stream.read(0);
|
||||||
|
if (len === state.length)
|
||||||
|
// didn't get any data, stop spinning.
|
||||||
|
break;
|
||||||
|
else
|
||||||
|
len = state.length;
|
||||||
|
}
|
||||||
|
state.readingMore = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// abstract method. to be overridden in specific implementation classes.
|
||||||
|
// call cb(er, data) where data is <= n in length.
|
||||||
|
// for virtual (non-string, non-buffer) streams, "length" is somewhat
|
||||||
|
// arbitrary, and perhaps not very meaningful.
|
||||||
|
Readable.prototype._read = function(n) {
|
||||||
|
this.emit('error', new Error('not implemented'));
|
||||||
|
};
|
||||||
|
|
||||||
|
Readable.prototype.pipe = function(dest, pipeOpts) {
|
||||||
|
var src = this;
|
||||||
|
var state = this._readableState;
|
||||||
|
|
||||||
|
switch (state.pipesCount) {
|
||||||
|
case 0:
|
||||||
|
state.pipes = dest;
|
||||||
|
break;
|
||||||
|
case 1:
|
||||||
|
state.pipes = [state.pipes, dest];
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
state.pipes.push(dest);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
state.pipesCount += 1;
|
||||||
|
|
||||||
|
var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
|
||||||
|
dest !== process.stdout &&
|
||||||
|
dest !== process.stderr;
|
||||||
|
|
||||||
|
var endFn = doEnd ? onend : cleanup;
|
||||||
|
if (state.endEmitted)
|
||||||
|
process.nextTick(endFn);
|
||||||
|
else
|
||||||
|
src.once('end', endFn);
|
||||||
|
|
||||||
|
dest.on('unpipe', onunpipe);
|
||||||
|
function onunpipe(readable) {
|
||||||
|
if (readable !== src) return;
|
||||||
|
cleanup();
|
||||||
|
}
|
||||||
|
|
||||||
|
function onend() {
|
||||||
|
dest.end();
|
||||||
|
}
|
||||||
|
|
||||||
|
// when the dest drains, it reduces the awaitDrain counter
|
||||||
|
// on the source. This would be more elegant with a .once()
|
||||||
|
// handler in flow(), but adding and removing repeatedly is
|
||||||
|
// too slow.
|
||||||
|
var ondrain = pipeOnDrain(src);
|
||||||
|
dest.on('drain', ondrain);
|
||||||
|
|
||||||
|
function cleanup() {
|
||||||
|
// cleanup event handlers once the pipe is broken
|
||||||
|
dest.removeListener('close', onclose);
|
||||||
|
dest.removeListener('finish', onfinish);
|
||||||
|
dest.removeListener('drain', ondrain);
|
||||||
|
dest.removeListener('error', onerror);
|
||||||
|
dest.removeListener('unpipe', onunpipe);
|
||||||
|
src.removeListener('end', onend);
|
||||||
|
src.removeListener('end', cleanup);
|
||||||
|
|
||||||
|
// if the reader is waiting for a drain event from this
|
||||||
|
// specific writer, then it would cause it to never start
|
||||||
|
// flowing again.
|
||||||
|
// So, if this is awaiting a drain, then we just call it now.
|
||||||
|
// If we don't know, then assume that we are waiting for one.
|
||||||
|
if (!dest._writableState || dest._writableState.needDrain)
|
||||||
|
ondrain();
|
||||||
|
}
|
||||||
|
|
||||||
|
// if the dest has an error, then stop piping into it.
|
||||||
|
// however, don't suppress the throwing behavior for this.
|
||||||
|
function onerror(er) {
|
||||||
|
unpipe();
|
||||||
|
dest.removeListener('error', onerror);
|
||||||
|
if (EE.listenerCount(dest, 'error') === 0)
|
||||||
|
dest.emit('error', er);
|
||||||
|
}
|
||||||
|
// This is a brutally ugly hack to make sure that our error handler
|
||||||
|
// is attached before any userland ones. NEVER DO THIS.
|
||||||
|
if (!dest._events || !dest._events.error)
|
||||||
|
dest.on('error', onerror);
|
||||||
|
else if (isArray(dest._events.error))
|
||||||
|
dest._events.error.unshift(onerror);
|
||||||
|
else
|
||||||
|
dest._events.error = [onerror, dest._events.error];
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// Both close and finish should trigger unpipe, but only once.
|
||||||
|
function onclose() {
|
||||||
|
dest.removeListener('finish', onfinish);
|
||||||
|
unpipe();
|
||||||
|
}
|
||||||
|
dest.once('close', onclose);
|
||||||
|
function onfinish() {
|
||||||
|
dest.removeListener('close', onclose);
|
||||||
|
unpipe();
|
||||||
|
}
|
||||||
|
dest.once('finish', onfinish);
|
||||||
|
|
||||||
|
function unpipe() {
|
||||||
|
src.unpipe(dest);
|
||||||
|
}
|
||||||
|
|
||||||
|
// tell the dest that it's being piped to
|
||||||
|
dest.emit('pipe', src);
|
||||||
|
|
||||||
|
// start the flow if it hasn't been started already.
|
||||||
|
if (!state.flowing) {
|
||||||
|
// the handler that waits for readable events after all
|
||||||
|
// the data gets sucked out in flow.
|
||||||
|
// This would be easier to follow with a .once() handler
|
||||||
|
// in flow(), but that is too slow.
|
||||||
|
this.on('readable', pipeOnReadable);
|
||||||
|
|
||||||
|
state.flowing = true;
|
||||||
|
process.nextTick(function() {
|
||||||
|
flow(src);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return dest;
|
||||||
|
};
|
||||||
|
|
||||||
|
function pipeOnDrain(src) {
|
||||||
|
return function() {
|
||||||
|
var dest = this;
|
||||||
|
var state = src._readableState;
|
||||||
|
state.awaitDrain--;
|
||||||
|
if (state.awaitDrain === 0)
|
||||||
|
flow(src);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function flow(src) {
|
||||||
|
var state = src._readableState;
|
||||||
|
var chunk;
|
||||||
|
state.awaitDrain = 0;
|
||||||
|
|
||||||
|
function write(dest, i, list) {
|
||||||
|
var written = dest.write(chunk);
|
||||||
|
if (false === written) {
|
||||||
|
state.awaitDrain++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
while (state.pipesCount && null !== (chunk = src.read())) {
|
||||||
|
|
||||||
|
if (state.pipesCount === 1)
|
||||||
|
write(state.pipes, 0, null);
|
||||||
|
else
|
||||||
|
forEach(state.pipes, write);
|
||||||
|
|
||||||
|
src.emit('data', chunk);
|
||||||
|
|
||||||
|
// if anyone needs a drain, then we have to wait for that.
|
||||||
|
if (state.awaitDrain > 0)
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if every destination was unpiped, either before entering this
|
||||||
|
// function, or in the while loop, then stop flowing.
|
||||||
|
//
|
||||||
|
// NB: This is a pretty rare edge case.
|
||||||
|
if (state.pipesCount === 0) {
|
||||||
|
state.flowing = false;
|
||||||
|
|
||||||
|
// if there were data event listeners added, then switch to old mode.
|
||||||
|
if (EE.listenerCount(src, 'data') > 0)
|
||||||
|
emitDataEvents(src);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// at this point, no one needed a drain, so we just ran out of data
|
||||||
|
// on the next readable event, start it over again.
|
||||||
|
state.ranOut = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
function pipeOnReadable() {
|
||||||
|
if (this._readableState.ranOut) {
|
||||||
|
this._readableState.ranOut = false;
|
||||||
|
flow(this);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Readable.prototype.unpipe = function(dest) {
|
||||||
|
var state = this._readableState;
|
||||||
|
|
||||||
|
// if we're not piping anywhere, then do nothing.
|
||||||
|
if (state.pipesCount === 0)
|
||||||
|
return this;
|
||||||
|
|
||||||
|
// just one destination. most common case.
|
||||||
|
if (state.pipesCount === 1) {
|
||||||
|
// passed in one, but it's not the right one.
|
||||||
|
if (dest && dest !== state.pipes)
|
||||||
|
return this;
|
||||||
|
|
||||||
|
if (!dest)
|
||||||
|
dest = state.pipes;
|
||||||
|
|
||||||
|
// got a match.
|
||||||
|
state.pipes = null;
|
||||||
|
state.pipesCount = 0;
|
||||||
|
this.removeListener('readable', pipeOnReadable);
|
||||||
|
state.flowing = false;
|
||||||
|
if (dest)
|
||||||
|
dest.emit('unpipe', this);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
// slow case. multiple pipe destinations.
|
||||||
|
|
||||||
|
if (!dest) {
|
||||||
|
// remove all.
|
||||||
|
var dests = state.pipes;
|
||||||
|
var len = state.pipesCount;
|
||||||
|
state.pipes = null;
|
||||||
|
state.pipesCount = 0;
|
||||||
|
this.removeListener('readable', pipeOnReadable);
|
||||||
|
state.flowing = false;
|
||||||
|
|
||||||
|
for (var i = 0; i < len; i++)
|
||||||
|
dests[i].emit('unpipe', this);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
// try to find the right one.
|
||||||
|
var i = indexOf(state.pipes, dest);
|
||||||
|
if (i === -1)
|
||||||
|
return this;
|
||||||
|
|
||||||
|
state.pipes.splice(i, 1);
|
||||||
|
state.pipesCount -= 1;
|
||||||
|
if (state.pipesCount === 1)
|
||||||
|
state.pipes = state.pipes[0];
|
||||||
|
|
||||||
|
dest.emit('unpipe', this);
|
||||||
|
|
||||||
|
return this;
|
||||||
|
};
|
||||||
|
|
||||||
|
// set up data events if they are asked for
|
||||||
|
// Ensure readable listeners eventually get something
|
||||||
|
Readable.prototype.on = function(ev, fn) {
|
||||||
|
var res = Stream.prototype.on.call(this, ev, fn);
|
||||||
|
|
||||||
|
if (ev === 'data' && !this._readableState.flowing)
|
||||||
|
emitDataEvents(this);
|
||||||
|
|
||||||
|
if (ev === 'readable' && this.readable) {
|
||||||
|
var state = this._readableState;
|
||||||
|
if (!state.readableListening) {
|
||||||
|
state.readableListening = true;
|
||||||
|
state.emittedReadable = false;
|
||||||
|
state.needReadable = true;
|
||||||
|
if (!state.reading) {
|
||||||
|
this.read(0);
|
||||||
|
} else if (state.length) {
|
||||||
|
emitReadable(this, state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
Readable.prototype.addListener = Readable.prototype.on;
|
||||||
|
|
||||||
|
// pause() and resume() are remnants of the legacy readable stream API
|
||||||
|
// If the user uses them, then switch into old mode.
|
||||||
|
Readable.prototype.resume = function() {
|
||||||
|
emitDataEvents(this);
|
||||||
|
this.read(0);
|
||||||
|
this.emit('resume');
|
||||||
|
};
|
||||||
|
|
||||||
|
Readable.prototype.pause = function() {
|
||||||
|
emitDataEvents(this, true);
|
||||||
|
this.emit('pause');
|
||||||
|
};
|
||||||
|
|
||||||
|
function emitDataEvents(stream, startPaused) {
|
||||||
|
var state = stream._readableState;
|
||||||
|
|
||||||
|
if (state.flowing) {
|
||||||
|
// https://github.com/isaacs/readable-stream/issues/16
|
||||||
|
throw new Error('Cannot switch to old mode now.');
|
||||||
|
}
|
||||||
|
|
||||||
|
var paused = startPaused || false;
|
||||||
|
var readable = false;
|
||||||
|
|
||||||
|
// convert to an old-style stream.
|
||||||
|
stream.readable = true;
|
||||||
|
stream.pipe = Stream.prototype.pipe;
|
||||||
|
stream.on = stream.addListener = Stream.prototype.on;
|
||||||
|
|
||||||
|
stream.on('readable', function() {
|
||||||
|
readable = true;
|
||||||
|
|
||||||
|
var c;
|
||||||
|
while (!paused && (null !== (c = stream.read())))
|
||||||
|
stream.emit('data', c);
|
||||||
|
|
||||||
|
if (c === null) {
|
||||||
|
readable = false;
|
||||||
|
stream._readableState.needReadable = true;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
stream.pause = function() {
|
||||||
|
paused = true;
|
||||||
|
this.emit('pause');
|
||||||
|
};
|
||||||
|
|
||||||
|
stream.resume = function() {
|
||||||
|
paused = false;
|
||||||
|
if (readable)
|
||||||
|
process.nextTick(function() {
|
||||||
|
stream.emit('readable');
|
||||||
|
});
|
||||||
|
else
|
||||||
|
this.read(0);
|
||||||
|
this.emit('resume');
|
||||||
|
};
|
||||||
|
|
||||||
|
// now make it start, just in case it hadn't already.
|
||||||
|
stream.emit('readable');
|
||||||
|
}
|
||||||
|
|
||||||
|
// wrap an old-style stream as the async data source.
|
||||||
|
// This is *not* part of the readable stream interface.
|
||||||
|
// It is an ugly unfortunate mess of history.
|
||||||
|
Readable.prototype.wrap = function(stream) {
|
||||||
|
var state = this._readableState;
|
||||||
|
var paused = false;
|
||||||
|
|
||||||
|
var self = this;
|
||||||
|
stream.on('end', function() {
|
||||||
|
if (state.decoder && !state.ended) {
|
||||||
|
var chunk = state.decoder.end();
|
||||||
|
if (chunk && chunk.length)
|
||||||
|
self.push(chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.push(null);
|
||||||
|
});
|
||||||
|
|
||||||
|
stream.on('data', function(chunk) {
|
||||||
|
if (state.decoder)
|
||||||
|
chunk = state.decoder.write(chunk);
|
||||||
|
|
||||||
|
// don't skip over falsy values in objectMode
|
||||||
|
//if (state.objectMode && util.isNullOrUndefined(chunk))
|
||||||
|
if (state.objectMode && (chunk === null || chunk === undefined))
|
||||||
|
return;
|
||||||
|
else if (!state.objectMode && (!chunk || !chunk.length))
|
||||||
|
return;
|
||||||
|
|
||||||
|
var ret = self.push(chunk);
|
||||||
|
if (!ret) {
|
||||||
|
paused = true;
|
||||||
|
stream.pause();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// proxy all the other methods.
|
||||||
|
// important when wrapping filters and duplexes.
|
||||||
|
for (var i in stream) {
|
||||||
|
if (typeof stream[i] === 'function' &&
|
||||||
|
typeof this[i] === 'undefined') {
|
||||||
|
this[i] = function(method) { return function() {
|
||||||
|
return stream[method].apply(stream, arguments);
|
||||||
|
}}(i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// proxy certain important events.
|
||||||
|
var events = ['error', 'close', 'destroy', 'pause', 'resume'];
|
||||||
|
forEach(events, function(ev) {
|
||||||
|
stream.on(ev, self.emit.bind(self, ev));
|
||||||
|
});
|
||||||
|
|
||||||
|
// when we try to consume some more bytes, simply unpause the
|
||||||
|
// underlying stream.
|
||||||
|
self._read = function(n) {
|
||||||
|
if (paused) {
|
||||||
|
paused = false;
|
||||||
|
stream.resume();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return self;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// exposed for testing purposes only.
|
||||||
|
Readable._fromList = fromList;
|
||||||
|
|
||||||
|
// Pluck off n bytes from an array of buffers.
|
||||||
|
// Length is the combined lengths of all the buffers in the list.
|
||||||
|
function fromList(n, state) {
|
||||||
|
var list = state.buffer;
|
||||||
|
var length = state.length;
|
||||||
|
var stringMode = !!state.decoder;
|
||||||
|
var objectMode = !!state.objectMode;
|
||||||
|
var ret;
|
||||||
|
|
||||||
|
// nothing in the list, definitely empty.
|
||||||
|
if (list.length === 0)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
if (length === 0)
|
||||||
|
ret = null;
|
||||||
|
else if (objectMode)
|
||||||
|
ret = list.shift();
|
||||||
|
else if (!n || n >= length) {
|
||||||
|
// read it all, truncate the array.
|
||||||
|
if (stringMode)
|
||||||
|
ret = list.join('');
|
||||||
|
else
|
||||||
|
ret = Buffer.concat(list, length);
|
||||||
|
list.length = 0;
|
||||||
|
} else {
|
||||||
|
// read just some of it.
|
||||||
|
if (n < list[0].length) {
|
||||||
|
// just take a part of the first list item.
|
||||||
|
// slice is the same for buffers and strings.
|
||||||
|
var buf = list[0];
|
||||||
|
ret = buf.slice(0, n);
|
||||||
|
list[0] = buf.slice(n);
|
||||||
|
} else if (n === list[0].length) {
|
||||||
|
// first list is a perfect match
|
||||||
|
ret = list.shift();
|
||||||
|
} else {
|
||||||
|
// complex case.
|
||||||
|
// we have enough to cover it, but it spans past the first buffer.
|
||||||
|
if (stringMode)
|
||||||
|
ret = '';
|
||||||
|
else
|
||||||
|
ret = new Buffer(n);
|
||||||
|
|
||||||
|
var c = 0;
|
||||||
|
for (var i = 0, l = list.length; i < l && c < n; i++) {
|
||||||
|
var buf = list[0];
|
||||||
|
var cpy = Math.min(n - c, buf.length);
|
||||||
|
|
||||||
|
if (stringMode)
|
||||||
|
ret += buf.slice(0, cpy);
|
||||||
|
else
|
||||||
|
buf.copy(ret, c, 0, cpy);
|
||||||
|
|
||||||
|
if (cpy < buf.length)
|
||||||
|
list[0] = buf.slice(cpy);
|
||||||
|
else
|
||||||
|
list.shift();
|
||||||
|
|
||||||
|
c += cpy;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
function endReadable(stream) {
|
||||||
|
var state = stream._readableState;
|
||||||
|
|
||||||
|
// If we get here before consuming all the bytes, then that is a
|
||||||
|
// bug in node. Should never happen.
|
||||||
|
if (state.length > 0)
|
||||||
|
throw new Error('endReadable called on non-empty stream');
|
||||||
|
|
||||||
|
if (!state.endEmitted && state.calledRead) {
|
||||||
|
state.ended = true;
|
||||||
|
process.nextTick(function() {
|
||||||
|
// Check that we didn't get one last unshift.
|
||||||
|
if (!state.endEmitted && state.length === 0) {
|
||||||
|
state.endEmitted = true;
|
||||||
|
stream.readable = false;
|
||||||
|
stream.emit('end');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function forEach (xs, f) {
|
||||||
|
for (var i = 0, l = xs.length; i < l; i++) {
|
||||||
|
f(xs[i], i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function indexOf (xs, x) {
|
||||||
|
for (var i = 0, l = xs.length; i < l; i++) {
|
||||||
|
if (xs[i] === x) return i;
|
||||||
|
}
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
210
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/lib/_stream_transform.js
generated
vendored
Normal file
210
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/lib/_stream_transform.js
generated
vendored
Normal file
@@ -0,0 +1,210 @@
|
|||||||
|
// Copyright Joyent, Inc. and other Node contributors.
|
||||||
|
//
|
||||||
|
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||||
|
// copy of this software and associated documentation files (the
|
||||||
|
// "Software"), to deal in the Software without restriction, including
|
||||||
|
// without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||||
|
// persons to whom the Software is furnished to do so, subject to the
|
||||||
|
// following conditions:
|
||||||
|
//
|
||||||
|
// The above copyright notice and this permission notice shall be included
|
||||||
|
// in all copies or substantial portions of the Software.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||||
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||||
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||||
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||||
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||||
|
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
// a transform stream is a readable/writable stream where you do
|
||||||
|
// something with the data. Sometimes it's called a "filter",
|
||||||
|
// but that's not a great name for it, since that implies a thing where
|
||||||
|
// some bits pass through, and others are simply ignored. (That would
|
||||||
|
// be a valid example of a transform, of course.)
|
||||||
|
//
|
||||||
|
// While the output is causally related to the input, it's not a
|
||||||
|
// necessarily symmetric or synchronous transformation. For example,
|
||||||
|
// a zlib stream might take multiple plain-text writes(), and then
|
||||||
|
// emit a single compressed chunk some time in the future.
|
||||||
|
//
|
||||||
|
// Here's how this works:
|
||||||
|
//
|
||||||
|
// The Transform stream has all the aspects of the readable and writable
|
||||||
|
// stream classes. When you write(chunk), that calls _write(chunk,cb)
|
||||||
|
// internally, and returns false if there's a lot of pending writes
|
||||||
|
// buffered up. When you call read(), that calls _read(n) until
|
||||||
|
// there's enough pending readable data buffered up.
|
||||||
|
//
|
||||||
|
// In a transform stream, the written data is placed in a buffer. When
|
||||||
|
// _read(n) is called, it transforms the queued up data, calling the
|
||||||
|
// buffered _write cb's as it consumes chunks. If consuming a single
|
||||||
|
// written chunk would result in multiple output chunks, then the first
|
||||||
|
// outputted bit calls the readcb, and subsequent chunks just go into
|
||||||
|
// the read buffer, and will cause it to emit 'readable' if necessary.
|
||||||
|
//
|
||||||
|
// This way, back-pressure is actually determined by the reading side,
|
||||||
|
// since _read has to be called to start processing a new chunk. However,
|
||||||
|
// a pathological inflate type of transform can cause excessive buffering
|
||||||
|
// here. For example, imagine a stream where every byte of input is
|
||||||
|
// interpreted as an integer from 0-255, and then results in that many
|
||||||
|
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
|
||||||
|
// 1kb of data being output. In this case, you could write a very small
|
||||||
|
// amount of input, and end up with a very large amount of output. In
|
||||||
|
// such a pathological inflating mechanism, there'd be no way to tell
|
||||||
|
// the system to stop doing the transform. A single 4MB write could
|
||||||
|
// cause the system to run out of memory.
|
||||||
|
//
|
||||||
|
// However, even in such a pathological case, only a single written chunk
|
||||||
|
// would be consumed, and then the rest would wait (un-transformed) until
|
||||||
|
// the results of the previous transformed chunk were consumed.
|
||||||
|
|
||||||
|
module.exports = Transform;
|
||||||
|
|
||||||
|
var Duplex = require('./_stream_duplex');
|
||||||
|
|
||||||
|
/*<replacement>*/
|
||||||
|
var util = require('core-util-is');
|
||||||
|
util.inherits = require('inherits');
|
||||||
|
/*</replacement>*/
|
||||||
|
|
||||||
|
util.inherits(Transform, Duplex);
|
||||||
|
|
||||||
|
|
||||||
|
function TransformState(options, stream) {
|
||||||
|
this.afterTransform = function(er, data) {
|
||||||
|
return afterTransform(stream, er, data);
|
||||||
|
};
|
||||||
|
|
||||||
|
this.needTransform = false;
|
||||||
|
this.transforming = false;
|
||||||
|
this.writecb = null;
|
||||||
|
this.writechunk = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function afterTransform(stream, er, data) {
|
||||||
|
var ts = stream._transformState;
|
||||||
|
ts.transforming = false;
|
||||||
|
|
||||||
|
var cb = ts.writecb;
|
||||||
|
|
||||||
|
if (!cb)
|
||||||
|
return stream.emit('error', new Error('no writecb in Transform class'));
|
||||||
|
|
||||||
|
ts.writechunk = null;
|
||||||
|
ts.writecb = null;
|
||||||
|
|
||||||
|
if (data !== null && data !== undefined)
|
||||||
|
stream.push(data);
|
||||||
|
|
||||||
|
if (cb)
|
||||||
|
cb(er);
|
||||||
|
|
||||||
|
var rs = stream._readableState;
|
||||||
|
rs.reading = false;
|
||||||
|
if (rs.needReadable || rs.length < rs.highWaterMark) {
|
||||||
|
stream._read(rs.highWaterMark);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function Transform(options) {
|
||||||
|
if (!(this instanceof Transform))
|
||||||
|
return new Transform(options);
|
||||||
|
|
||||||
|
Duplex.call(this, options);
|
||||||
|
|
||||||
|
var ts = this._transformState = new TransformState(options, this);
|
||||||
|
|
||||||
|
// when the writable side finishes, then flush out anything remaining.
|
||||||
|
var stream = this;
|
||||||
|
|
||||||
|
// start out asking for a readable event once data is transformed.
|
||||||
|
this._readableState.needReadable = true;
|
||||||
|
|
||||||
|
// we have implemented the _read method, and done the other things
|
||||||
|
// that Readable wants before the first _read call, so unset the
|
||||||
|
// sync guard flag.
|
||||||
|
this._readableState.sync = false;
|
||||||
|
|
||||||
|
this.once('finish', function() {
|
||||||
|
if ('function' === typeof this._flush)
|
||||||
|
this._flush(function(er) {
|
||||||
|
done(stream, er);
|
||||||
|
});
|
||||||
|
else
|
||||||
|
done(stream);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Transform.prototype.push = function(chunk, encoding) {
|
||||||
|
this._transformState.needTransform = false;
|
||||||
|
return Duplex.prototype.push.call(this, chunk, encoding);
|
||||||
|
};
|
||||||
|
|
||||||
|
// This is the part where you do stuff!
|
||||||
|
// override this function in implementation classes.
|
||||||
|
// 'chunk' is an input chunk.
|
||||||
|
//
|
||||||
|
// Call `push(newChunk)` to pass along transformed output
|
||||||
|
// to the readable side. You may call 'push' zero or more times.
|
||||||
|
//
|
||||||
|
// Call `cb(err)` when you are done with this chunk. If you pass
|
||||||
|
// an error, then that'll put the hurt on the whole operation. If you
|
||||||
|
// never call cb(), then you'll never get another chunk.
|
||||||
|
Transform.prototype._transform = function(chunk, encoding, cb) {
|
||||||
|
throw new Error('not implemented');
|
||||||
|
};
|
||||||
|
|
||||||
|
Transform.prototype._write = function(chunk, encoding, cb) {
|
||||||
|
var ts = this._transformState;
|
||||||
|
ts.writecb = cb;
|
||||||
|
ts.writechunk = chunk;
|
||||||
|
ts.writeencoding = encoding;
|
||||||
|
if (!ts.transforming) {
|
||||||
|
var rs = this._readableState;
|
||||||
|
if (ts.needTransform ||
|
||||||
|
rs.needReadable ||
|
||||||
|
rs.length < rs.highWaterMark)
|
||||||
|
this._read(rs.highWaterMark);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Doesn't matter what the args are here.
|
||||||
|
// _transform does all the work.
|
||||||
|
// That we got here means that the readable side wants more data.
|
||||||
|
Transform.prototype._read = function(n) {
|
||||||
|
var ts = this._transformState;
|
||||||
|
|
||||||
|
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
|
||||||
|
ts.transforming = true;
|
||||||
|
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
|
||||||
|
} else {
|
||||||
|
// mark that we need a transform, so that any data that comes in
|
||||||
|
// will get processed, now that we've asked for it.
|
||||||
|
ts.needTransform = true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
function done(stream, er) {
|
||||||
|
if (er)
|
||||||
|
return stream.emit('error', er);
|
||||||
|
|
||||||
|
// if there's nothing in the write buffer, then that means
|
||||||
|
// that nothing more will ever be provided
|
||||||
|
var ws = stream._writableState;
|
||||||
|
var rs = stream._readableState;
|
||||||
|
var ts = stream._transformState;
|
||||||
|
|
||||||
|
if (ws.length)
|
||||||
|
throw new Error('calling transform done when ws.length != 0');
|
||||||
|
|
||||||
|
if (ts.transforming)
|
||||||
|
throw new Error('calling transform done when still transforming');
|
||||||
|
|
||||||
|
return stream.push(null);
|
||||||
|
}
|
||||||
386
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/lib/_stream_writable.js
generated
vendored
Normal file
386
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/lib/_stream_writable.js
generated
vendored
Normal file
@@ -0,0 +1,386 @@
|
|||||||
|
// Copyright Joyent, Inc. and other Node contributors.
|
||||||
|
//
|
||||||
|
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||||
|
// copy of this software and associated documentation files (the
|
||||||
|
// "Software"), to deal in the Software without restriction, including
|
||||||
|
// without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||||
|
// persons to whom the Software is furnished to do so, subject to the
|
||||||
|
// following conditions:
|
||||||
|
//
|
||||||
|
// The above copyright notice and this permission notice shall be included
|
||||||
|
// in all copies or substantial portions of the Software.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||||
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||||
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||||
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||||
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||||
|
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
// A bit simpler than readable streams.
|
||||||
|
// Implement an async ._write(chunk, cb), and it'll handle all
|
||||||
|
// the drain event emission and buffering.
|
||||||
|
|
||||||
|
module.exports = Writable;
|
||||||
|
|
||||||
|
/*<replacement>*/
|
||||||
|
var Buffer = require('buffer').Buffer;
|
||||||
|
/*</replacement>*/
|
||||||
|
|
||||||
|
Writable.WritableState = WritableState;
|
||||||
|
|
||||||
|
|
||||||
|
/*<replacement>*/
|
||||||
|
var util = require('core-util-is');
|
||||||
|
util.inherits = require('inherits');
|
||||||
|
/*</replacement>*/
|
||||||
|
|
||||||
|
var Stream = require('stream');
|
||||||
|
|
||||||
|
util.inherits(Writable, Stream);
|
||||||
|
|
||||||
|
function WriteReq(chunk, encoding, cb) {
|
||||||
|
this.chunk = chunk;
|
||||||
|
this.encoding = encoding;
|
||||||
|
this.callback = cb;
|
||||||
|
}
|
||||||
|
|
||||||
|
function WritableState(options, stream) {
|
||||||
|
options = options || {};
|
||||||
|
|
||||||
|
// the point at which write() starts returning false
|
||||||
|
// Note: 0 is a valid value, means that we always return false if
|
||||||
|
// the entire buffer is not flushed immediately on write()
|
||||||
|
var hwm = options.highWaterMark;
|
||||||
|
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
|
||||||
|
|
||||||
|
// object stream flag to indicate whether or not this stream
|
||||||
|
// contains buffers or objects.
|
||||||
|
this.objectMode = !!options.objectMode;
|
||||||
|
|
||||||
|
// cast to ints.
|
||||||
|
this.highWaterMark = ~~this.highWaterMark;
|
||||||
|
|
||||||
|
this.needDrain = false;
|
||||||
|
// at the start of calling end()
|
||||||
|
this.ending = false;
|
||||||
|
// when end() has been called, and returned
|
||||||
|
this.ended = false;
|
||||||
|
// when 'finish' is emitted
|
||||||
|
this.finished = false;
|
||||||
|
|
||||||
|
// should we decode strings into buffers before passing to _write?
|
||||||
|
// this is here so that some node-core streams can optimize string
|
||||||
|
// handling at a lower level.
|
||||||
|
var noDecode = options.decodeStrings === false;
|
||||||
|
this.decodeStrings = !noDecode;
|
||||||
|
|
||||||
|
// Crypto is kind of old and crusty. Historically, its default string
|
||||||
|
// encoding is 'binary' so we have to make this configurable.
|
||||||
|
// Everything else in the universe uses 'utf8', though.
|
||||||
|
this.defaultEncoding = options.defaultEncoding || 'utf8';
|
||||||
|
|
||||||
|
// not an actual buffer we keep track of, but a measurement
|
||||||
|
// of how much we're waiting to get pushed to some underlying
|
||||||
|
// socket or file.
|
||||||
|
this.length = 0;
|
||||||
|
|
||||||
|
// a flag to see when we're in the middle of a write.
|
||||||
|
this.writing = false;
|
||||||
|
|
||||||
|
// a flag to be able to tell if the onwrite cb is called immediately,
|
||||||
|
// or on a later tick. We set this to true at first, becuase any
|
||||||
|
// actions that shouldn't happen until "later" should generally also
|
||||||
|
// not happen before the first write call.
|
||||||
|
this.sync = true;
|
||||||
|
|
||||||
|
// a flag to know if we're processing previously buffered items, which
|
||||||
|
// may call the _write() callback in the same tick, so that we don't
|
||||||
|
// end up in an overlapped onwrite situation.
|
||||||
|
this.bufferProcessing = false;
|
||||||
|
|
||||||
|
// the callback that's passed to _write(chunk,cb)
|
||||||
|
this.onwrite = function(er) {
|
||||||
|
onwrite(stream, er);
|
||||||
|
};
|
||||||
|
|
||||||
|
// the callback that the user supplies to write(chunk,encoding,cb)
|
||||||
|
this.writecb = null;
|
||||||
|
|
||||||
|
// the amount that is being written when _write is called.
|
||||||
|
this.writelen = 0;
|
||||||
|
|
||||||
|
this.buffer = [];
|
||||||
|
|
||||||
|
// True if the error was already emitted and should not be thrown again
|
||||||
|
this.errorEmitted = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
function Writable(options) {
|
||||||
|
var Duplex = require('./_stream_duplex');
|
||||||
|
|
||||||
|
// Writable ctor is applied to Duplexes, though they're not
|
||||||
|
// instanceof Writable, they're instanceof Readable.
|
||||||
|
if (!(this instanceof Writable) && !(this instanceof Duplex))
|
||||||
|
return new Writable(options);
|
||||||
|
|
||||||
|
this._writableState = new WritableState(options, this);
|
||||||
|
|
||||||
|
// legacy.
|
||||||
|
this.writable = true;
|
||||||
|
|
||||||
|
Stream.call(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise people can pipe Writable streams, which is just wrong.
|
||||||
|
Writable.prototype.pipe = function() {
|
||||||
|
this.emit('error', new Error('Cannot pipe. Not readable.'));
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
function writeAfterEnd(stream, state, cb) {
|
||||||
|
var er = new Error('write after end');
|
||||||
|
// TODO: defer error events consistently everywhere, not just the cb
|
||||||
|
stream.emit('error', er);
|
||||||
|
process.nextTick(function() {
|
||||||
|
cb(er);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we get something that is not a buffer, string, null, or undefined,
|
||||||
|
// and we're not in objectMode, then that's an error.
|
||||||
|
// Otherwise stream chunks are all considered to be of length=1, and the
|
||||||
|
// watermarks determine how many objects to keep in the buffer, rather than
|
||||||
|
// how many bytes or characters.
|
||||||
|
function validChunk(stream, state, chunk, cb) {
|
||||||
|
var valid = true;
|
||||||
|
if (!Buffer.isBuffer(chunk) &&
|
||||||
|
'string' !== typeof chunk &&
|
||||||
|
chunk !== null &&
|
||||||
|
chunk !== undefined &&
|
||||||
|
!state.objectMode) {
|
||||||
|
var er = new TypeError('Invalid non-string/buffer chunk');
|
||||||
|
stream.emit('error', er);
|
||||||
|
process.nextTick(function() {
|
||||||
|
cb(er);
|
||||||
|
});
|
||||||
|
valid = false;
|
||||||
|
}
|
||||||
|
return valid;
|
||||||
|
}
|
||||||
|
|
||||||
|
Writable.prototype.write = function(chunk, encoding, cb) {
|
||||||
|
var state = this._writableState;
|
||||||
|
var ret = false;
|
||||||
|
|
||||||
|
if (typeof encoding === 'function') {
|
||||||
|
cb = encoding;
|
||||||
|
encoding = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Buffer.isBuffer(chunk))
|
||||||
|
encoding = 'buffer';
|
||||||
|
else if (!encoding)
|
||||||
|
encoding = state.defaultEncoding;
|
||||||
|
|
||||||
|
if (typeof cb !== 'function')
|
||||||
|
cb = function() {};
|
||||||
|
|
||||||
|
if (state.ended)
|
||||||
|
writeAfterEnd(this, state, cb);
|
||||||
|
else if (validChunk(this, state, chunk, cb))
|
||||||
|
ret = writeOrBuffer(this, state, chunk, encoding, cb);
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
};
|
||||||
|
|
||||||
|
function decodeChunk(state, chunk, encoding) {
|
||||||
|
if (!state.objectMode &&
|
||||||
|
state.decodeStrings !== false &&
|
||||||
|
typeof chunk === 'string') {
|
||||||
|
chunk = new Buffer(chunk, encoding);
|
||||||
|
}
|
||||||
|
return chunk;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if we're already writing something, then just put this
|
||||||
|
// in the queue, and wait our turn. Otherwise, call _write
|
||||||
|
// If we return false, then we need a drain event, so set that flag.
|
||||||
|
function writeOrBuffer(stream, state, chunk, encoding, cb) {
|
||||||
|
chunk = decodeChunk(state, chunk, encoding);
|
||||||
|
if (Buffer.isBuffer(chunk))
|
||||||
|
encoding = 'buffer';
|
||||||
|
var len = state.objectMode ? 1 : chunk.length;
|
||||||
|
|
||||||
|
state.length += len;
|
||||||
|
|
||||||
|
var ret = state.length < state.highWaterMark;
|
||||||
|
// we must ensure that previous needDrain will not be reset to false.
|
||||||
|
if (!ret)
|
||||||
|
state.needDrain = true;
|
||||||
|
|
||||||
|
if (state.writing)
|
||||||
|
state.buffer.push(new WriteReq(chunk, encoding, cb));
|
||||||
|
else
|
||||||
|
doWrite(stream, state, len, chunk, encoding, cb);
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
function doWrite(stream, state, len, chunk, encoding, cb) {
|
||||||
|
state.writelen = len;
|
||||||
|
state.writecb = cb;
|
||||||
|
state.writing = true;
|
||||||
|
state.sync = true;
|
||||||
|
stream._write(chunk, encoding, state.onwrite);
|
||||||
|
state.sync = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
function onwriteError(stream, state, sync, er, cb) {
|
||||||
|
if (sync)
|
||||||
|
process.nextTick(function() {
|
||||||
|
cb(er);
|
||||||
|
});
|
||||||
|
else
|
||||||
|
cb(er);
|
||||||
|
|
||||||
|
stream._writableState.errorEmitted = true;
|
||||||
|
stream.emit('error', er);
|
||||||
|
}
|
||||||
|
|
||||||
|
function onwriteStateUpdate(state) {
|
||||||
|
state.writing = false;
|
||||||
|
state.writecb = null;
|
||||||
|
state.length -= state.writelen;
|
||||||
|
state.writelen = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
function onwrite(stream, er) {
|
||||||
|
var state = stream._writableState;
|
||||||
|
var sync = state.sync;
|
||||||
|
var cb = state.writecb;
|
||||||
|
|
||||||
|
onwriteStateUpdate(state);
|
||||||
|
|
||||||
|
if (er)
|
||||||
|
onwriteError(stream, state, sync, er, cb);
|
||||||
|
else {
|
||||||
|
// Check if we're actually ready to finish, but don't emit yet
|
||||||
|
var finished = needFinish(stream, state);
|
||||||
|
|
||||||
|
if (!finished && !state.bufferProcessing && state.buffer.length)
|
||||||
|
clearBuffer(stream, state);
|
||||||
|
|
||||||
|
if (sync) {
|
||||||
|
process.nextTick(function() {
|
||||||
|
afterWrite(stream, state, finished, cb);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
afterWrite(stream, state, finished, cb);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function afterWrite(stream, state, finished, cb) {
|
||||||
|
if (!finished)
|
||||||
|
onwriteDrain(stream, state);
|
||||||
|
cb();
|
||||||
|
if (finished)
|
||||||
|
finishMaybe(stream, state);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Must force callback to be called on nextTick, so that we don't
|
||||||
|
// emit 'drain' before the write() consumer gets the 'false' return
|
||||||
|
// value, and has a chance to attach a 'drain' listener.
|
||||||
|
function onwriteDrain(stream, state) {
|
||||||
|
if (state.length === 0 && state.needDrain) {
|
||||||
|
state.needDrain = false;
|
||||||
|
stream.emit('drain');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// if there's something in the buffer waiting, then process it
|
||||||
|
function clearBuffer(stream, state) {
|
||||||
|
state.bufferProcessing = true;
|
||||||
|
|
||||||
|
for (var c = 0; c < state.buffer.length; c++) {
|
||||||
|
var entry = state.buffer[c];
|
||||||
|
var chunk = entry.chunk;
|
||||||
|
var encoding = entry.encoding;
|
||||||
|
var cb = entry.callback;
|
||||||
|
var len = state.objectMode ? 1 : chunk.length;
|
||||||
|
|
||||||
|
doWrite(stream, state, len, chunk, encoding, cb);
|
||||||
|
|
||||||
|
// if we didn't call the onwrite immediately, then
|
||||||
|
// it means that we need to wait until it does.
|
||||||
|
// also, that means that the chunk and cb are currently
|
||||||
|
// being processed, so move the buffer counter past them.
|
||||||
|
if (state.writing) {
|
||||||
|
c++;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
state.bufferProcessing = false;
|
||||||
|
if (c < state.buffer.length)
|
||||||
|
state.buffer = state.buffer.slice(c);
|
||||||
|
else
|
||||||
|
state.buffer.length = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
Writable.prototype._write = function(chunk, encoding, cb) {
|
||||||
|
cb(new Error('not implemented'));
|
||||||
|
};
|
||||||
|
|
||||||
|
Writable.prototype.end = function(chunk, encoding, cb) {
|
||||||
|
var state = this._writableState;
|
||||||
|
|
||||||
|
if (typeof chunk === 'function') {
|
||||||
|
cb = chunk;
|
||||||
|
chunk = null;
|
||||||
|
encoding = null;
|
||||||
|
} else if (typeof encoding === 'function') {
|
||||||
|
cb = encoding;
|
||||||
|
encoding = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof chunk !== 'undefined' && chunk !== null)
|
||||||
|
this.write(chunk, encoding);
|
||||||
|
|
||||||
|
// ignore unnecessary end() calls.
|
||||||
|
if (!state.ending && !state.finished)
|
||||||
|
endWritable(this, state, cb);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
function needFinish(stream, state) {
|
||||||
|
return (state.ending &&
|
||||||
|
state.length === 0 &&
|
||||||
|
!state.finished &&
|
||||||
|
!state.writing);
|
||||||
|
}
|
||||||
|
|
||||||
|
function finishMaybe(stream, state) {
|
||||||
|
var need = needFinish(stream, state);
|
||||||
|
if (need) {
|
||||||
|
state.finished = true;
|
||||||
|
stream.emit('finish');
|
||||||
|
}
|
||||||
|
return need;
|
||||||
|
}
|
||||||
|
|
||||||
|
function endWritable(stream, state, cb) {
|
||||||
|
state.ending = true;
|
||||||
|
finishMaybe(stream, state);
|
||||||
|
if (cb) {
|
||||||
|
if (state.finished)
|
||||||
|
process.nextTick(cb);
|
||||||
|
else
|
||||||
|
stream.once('finish', cb);
|
||||||
|
}
|
||||||
|
state.ended = true;
|
||||||
|
}
|
||||||
3
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/README.md
generated
vendored
Normal file
3
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/README.md
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# core-util-is
|
||||||
|
|
||||||
|
The `util.is*` functions introduced in Node v0.12.
|
||||||
604
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/float.patch
generated
vendored
Normal file
604
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/float.patch
generated
vendored
Normal file
@@ -0,0 +1,604 @@
|
|||||||
|
diff --git a/lib/util.js b/lib/util.js
|
||||||
|
index a03e874..9074e8e 100644
|
||||||
|
--- a/lib/util.js
|
||||||
|
+++ b/lib/util.js
|
||||||
|
@@ -19,430 +19,6 @@
|
||||||
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||||
|
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
-var formatRegExp = /%[sdj%]/g;
|
||||||
|
-exports.format = function(f) {
|
||||||
|
- if (!isString(f)) {
|
||||||
|
- var objects = [];
|
||||||
|
- for (var i = 0; i < arguments.length; i++) {
|
||||||
|
- objects.push(inspect(arguments[i]));
|
||||||
|
- }
|
||||||
|
- return objects.join(' ');
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- var i = 1;
|
||||||
|
- var args = arguments;
|
||||||
|
- var len = args.length;
|
||||||
|
- var str = String(f).replace(formatRegExp, function(x) {
|
||||||
|
- if (x === '%%') return '%';
|
||||||
|
- if (i >= len) return x;
|
||||||
|
- switch (x) {
|
||||||
|
- case '%s': return String(args[i++]);
|
||||||
|
- case '%d': return Number(args[i++]);
|
||||||
|
- case '%j':
|
||||||
|
- try {
|
||||||
|
- return JSON.stringify(args[i++]);
|
||||||
|
- } catch (_) {
|
||||||
|
- return '[Circular]';
|
||||||
|
- }
|
||||||
|
- default:
|
||||||
|
- return x;
|
||||||
|
- }
|
||||||
|
- });
|
||||||
|
- for (var x = args[i]; i < len; x = args[++i]) {
|
||||||
|
- if (isNull(x) || !isObject(x)) {
|
||||||
|
- str += ' ' + x;
|
||||||
|
- } else {
|
||||||
|
- str += ' ' + inspect(x);
|
||||||
|
- }
|
||||||
|
- }
|
||||||
|
- return str;
|
||||||
|
-};
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-// Mark that a method should not be used.
|
||||||
|
-// Returns a modified function which warns once by default.
|
||||||
|
-// If --no-deprecation is set, then it is a no-op.
|
||||||
|
-exports.deprecate = function(fn, msg) {
|
||||||
|
- // Allow for deprecating things in the process of starting up.
|
||||||
|
- if (isUndefined(global.process)) {
|
||||||
|
- return function() {
|
||||||
|
- return exports.deprecate(fn, msg).apply(this, arguments);
|
||||||
|
- };
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- if (process.noDeprecation === true) {
|
||||||
|
- return fn;
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- var warned = false;
|
||||||
|
- function deprecated() {
|
||||||
|
- if (!warned) {
|
||||||
|
- if (process.throwDeprecation) {
|
||||||
|
- throw new Error(msg);
|
||||||
|
- } else if (process.traceDeprecation) {
|
||||||
|
- console.trace(msg);
|
||||||
|
- } else {
|
||||||
|
- console.error(msg);
|
||||||
|
- }
|
||||||
|
- warned = true;
|
||||||
|
- }
|
||||||
|
- return fn.apply(this, arguments);
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- return deprecated;
|
||||||
|
-};
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-var debugs = {};
|
||||||
|
-var debugEnviron;
|
||||||
|
-exports.debuglog = function(set) {
|
||||||
|
- if (isUndefined(debugEnviron))
|
||||||
|
- debugEnviron = process.env.NODE_DEBUG || '';
|
||||||
|
- set = set.toUpperCase();
|
||||||
|
- if (!debugs[set]) {
|
||||||
|
- if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) {
|
||||||
|
- var pid = process.pid;
|
||||||
|
- debugs[set] = function() {
|
||||||
|
- var msg = exports.format.apply(exports, arguments);
|
||||||
|
- console.error('%s %d: %s', set, pid, msg);
|
||||||
|
- };
|
||||||
|
- } else {
|
||||||
|
- debugs[set] = function() {};
|
||||||
|
- }
|
||||||
|
- }
|
||||||
|
- return debugs[set];
|
||||||
|
-};
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-/**
|
||||||
|
- * Echos the value of a value. Trys to print the value out
|
||||||
|
- * in the best way possible given the different types.
|
||||||
|
- *
|
||||||
|
- * @param {Object} obj The object to print out.
|
||||||
|
- * @param {Object} opts Optional options object that alters the output.
|
||||||
|
- */
|
||||||
|
-/* legacy: obj, showHidden, depth, colors*/
|
||||||
|
-function inspect(obj, opts) {
|
||||||
|
- // default options
|
||||||
|
- var ctx = {
|
||||||
|
- seen: [],
|
||||||
|
- stylize: stylizeNoColor
|
||||||
|
- };
|
||||||
|
- // legacy...
|
||||||
|
- if (arguments.length >= 3) ctx.depth = arguments[2];
|
||||||
|
- if (arguments.length >= 4) ctx.colors = arguments[3];
|
||||||
|
- if (isBoolean(opts)) {
|
||||||
|
- // legacy...
|
||||||
|
- ctx.showHidden = opts;
|
||||||
|
- } else if (opts) {
|
||||||
|
- // got an "options" object
|
||||||
|
- exports._extend(ctx, opts);
|
||||||
|
- }
|
||||||
|
- // set default options
|
||||||
|
- if (isUndefined(ctx.showHidden)) ctx.showHidden = false;
|
||||||
|
- if (isUndefined(ctx.depth)) ctx.depth = 2;
|
||||||
|
- if (isUndefined(ctx.colors)) ctx.colors = false;
|
||||||
|
- if (isUndefined(ctx.customInspect)) ctx.customInspect = true;
|
||||||
|
- if (ctx.colors) ctx.stylize = stylizeWithColor;
|
||||||
|
- return formatValue(ctx, obj, ctx.depth);
|
||||||
|
-}
|
||||||
|
-exports.inspect = inspect;
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-// http://en.wikipedia.org/wiki/ANSI_escape_code#graphics
|
||||||
|
-inspect.colors = {
|
||||||
|
- 'bold' : [1, 22],
|
||||||
|
- 'italic' : [3, 23],
|
||||||
|
- 'underline' : [4, 24],
|
||||||
|
- 'inverse' : [7, 27],
|
||||||
|
- 'white' : [37, 39],
|
||||||
|
- 'grey' : [90, 39],
|
||||||
|
- 'black' : [30, 39],
|
||||||
|
- 'blue' : [34, 39],
|
||||||
|
- 'cyan' : [36, 39],
|
||||||
|
- 'green' : [32, 39],
|
||||||
|
- 'magenta' : [35, 39],
|
||||||
|
- 'red' : [31, 39],
|
||||||
|
- 'yellow' : [33, 39]
|
||||||
|
-};
|
||||||
|
-
|
||||||
|
-// Don't use 'blue' not visible on cmd.exe
|
||||||
|
-inspect.styles = {
|
||||||
|
- 'special': 'cyan',
|
||||||
|
- 'number': 'yellow',
|
||||||
|
- 'boolean': 'yellow',
|
||||||
|
- 'undefined': 'grey',
|
||||||
|
- 'null': 'bold',
|
||||||
|
- 'string': 'green',
|
||||||
|
- 'date': 'magenta',
|
||||||
|
- // "name": intentionally not styling
|
||||||
|
- 'regexp': 'red'
|
||||||
|
-};
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-function stylizeWithColor(str, styleType) {
|
||||||
|
- var style = inspect.styles[styleType];
|
||||||
|
-
|
||||||
|
- if (style) {
|
||||||
|
- return '\u001b[' + inspect.colors[style][0] + 'm' + str +
|
||||||
|
- '\u001b[' + inspect.colors[style][1] + 'm';
|
||||||
|
- } else {
|
||||||
|
- return str;
|
||||||
|
- }
|
||||||
|
-}
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-function stylizeNoColor(str, styleType) {
|
||||||
|
- return str;
|
||||||
|
-}
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-function arrayToHash(array) {
|
||||||
|
- var hash = {};
|
||||||
|
-
|
||||||
|
- array.forEach(function(val, idx) {
|
||||||
|
- hash[val] = true;
|
||||||
|
- });
|
||||||
|
-
|
||||||
|
- return hash;
|
||||||
|
-}
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-function formatValue(ctx, value, recurseTimes) {
|
||||||
|
- // Provide a hook for user-specified inspect functions.
|
||||||
|
- // Check that value is an object with an inspect function on it
|
||||||
|
- if (ctx.customInspect &&
|
||||||
|
- value &&
|
||||||
|
- isFunction(value.inspect) &&
|
||||||
|
- // Filter out the util module, it's inspect function is special
|
||||||
|
- value.inspect !== exports.inspect &&
|
||||||
|
- // Also filter out any prototype objects using the circular check.
|
||||||
|
- !(value.constructor && value.constructor.prototype === value)) {
|
||||||
|
- var ret = value.inspect(recurseTimes, ctx);
|
||||||
|
- if (!isString(ret)) {
|
||||||
|
- ret = formatValue(ctx, ret, recurseTimes);
|
||||||
|
- }
|
||||||
|
- return ret;
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- // Primitive types cannot have properties
|
||||||
|
- var primitive = formatPrimitive(ctx, value);
|
||||||
|
- if (primitive) {
|
||||||
|
- return primitive;
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- // Look up the keys of the object.
|
||||||
|
- var keys = Object.keys(value);
|
||||||
|
- var visibleKeys = arrayToHash(keys);
|
||||||
|
-
|
||||||
|
- if (ctx.showHidden) {
|
||||||
|
- keys = Object.getOwnPropertyNames(value);
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- // Some type of object without properties can be shortcutted.
|
||||||
|
- if (keys.length === 0) {
|
||||||
|
- if (isFunction(value)) {
|
||||||
|
- var name = value.name ? ': ' + value.name : '';
|
||||||
|
- return ctx.stylize('[Function' + name + ']', 'special');
|
||||||
|
- }
|
||||||
|
- if (isRegExp(value)) {
|
||||||
|
- return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp');
|
||||||
|
- }
|
||||||
|
- if (isDate(value)) {
|
||||||
|
- return ctx.stylize(Date.prototype.toString.call(value), 'date');
|
||||||
|
- }
|
||||||
|
- if (isError(value)) {
|
||||||
|
- return formatError(value);
|
||||||
|
- }
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- var base = '', array = false, braces = ['{', '}'];
|
||||||
|
-
|
||||||
|
- // Make Array say that they are Array
|
||||||
|
- if (isArray(value)) {
|
||||||
|
- array = true;
|
||||||
|
- braces = ['[', ']'];
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- // Make functions say that they are functions
|
||||||
|
- if (isFunction(value)) {
|
||||||
|
- var n = value.name ? ': ' + value.name : '';
|
||||||
|
- base = ' [Function' + n + ']';
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- // Make RegExps say that they are RegExps
|
||||||
|
- if (isRegExp(value)) {
|
||||||
|
- base = ' ' + RegExp.prototype.toString.call(value);
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- // Make dates with properties first say the date
|
||||||
|
- if (isDate(value)) {
|
||||||
|
- base = ' ' + Date.prototype.toUTCString.call(value);
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- // Make error with message first say the error
|
||||||
|
- if (isError(value)) {
|
||||||
|
- base = ' ' + formatError(value);
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- if (keys.length === 0 && (!array || value.length == 0)) {
|
||||||
|
- return braces[0] + base + braces[1];
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- if (recurseTimes < 0) {
|
||||||
|
- if (isRegExp(value)) {
|
||||||
|
- return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp');
|
||||||
|
- } else {
|
||||||
|
- return ctx.stylize('[Object]', 'special');
|
||||||
|
- }
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- ctx.seen.push(value);
|
||||||
|
-
|
||||||
|
- var output;
|
||||||
|
- if (array) {
|
||||||
|
- output = formatArray(ctx, value, recurseTimes, visibleKeys, keys);
|
||||||
|
- } else {
|
||||||
|
- output = keys.map(function(key) {
|
||||||
|
- return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array);
|
||||||
|
- });
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- ctx.seen.pop();
|
||||||
|
-
|
||||||
|
- return reduceToSingleString(output, base, braces);
|
||||||
|
-}
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-function formatPrimitive(ctx, value) {
|
||||||
|
- if (isUndefined(value))
|
||||||
|
- return ctx.stylize('undefined', 'undefined');
|
||||||
|
- if (isString(value)) {
|
||||||
|
- var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '')
|
||||||
|
- .replace(/'/g, "\\'")
|
||||||
|
- .replace(/\\"/g, '"') + '\'';
|
||||||
|
- return ctx.stylize(simple, 'string');
|
||||||
|
- }
|
||||||
|
- if (isNumber(value)) {
|
||||||
|
- // Format -0 as '-0'. Strict equality won't distinguish 0 from -0,
|
||||||
|
- // so instead we use the fact that 1 / -0 < 0 whereas 1 / 0 > 0 .
|
||||||
|
- if (value === 0 && 1 / value < 0)
|
||||||
|
- return ctx.stylize('-0', 'number');
|
||||||
|
- return ctx.stylize('' + value, 'number');
|
||||||
|
- }
|
||||||
|
- if (isBoolean(value))
|
||||||
|
- return ctx.stylize('' + value, 'boolean');
|
||||||
|
- // For some reason typeof null is "object", so special case here.
|
||||||
|
- if (isNull(value))
|
||||||
|
- return ctx.stylize('null', 'null');
|
||||||
|
-}
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-function formatError(value) {
|
||||||
|
- return '[' + Error.prototype.toString.call(value) + ']';
|
||||||
|
-}
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-function formatArray(ctx, value, recurseTimes, visibleKeys, keys) {
|
||||||
|
- var output = [];
|
||||||
|
- for (var i = 0, l = value.length; i < l; ++i) {
|
||||||
|
- if (hasOwnProperty(value, String(i))) {
|
||||||
|
- output.push(formatProperty(ctx, value, recurseTimes, visibleKeys,
|
||||||
|
- String(i), true));
|
||||||
|
- } else {
|
||||||
|
- output.push('');
|
||||||
|
- }
|
||||||
|
- }
|
||||||
|
- keys.forEach(function(key) {
|
||||||
|
- if (!key.match(/^\d+$/)) {
|
||||||
|
- output.push(formatProperty(ctx, value, recurseTimes, visibleKeys,
|
||||||
|
- key, true));
|
||||||
|
- }
|
||||||
|
- });
|
||||||
|
- return output;
|
||||||
|
-}
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) {
|
||||||
|
- var name, str, desc;
|
||||||
|
- desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] };
|
||||||
|
- if (desc.get) {
|
||||||
|
- if (desc.set) {
|
||||||
|
- str = ctx.stylize('[Getter/Setter]', 'special');
|
||||||
|
- } else {
|
||||||
|
- str = ctx.stylize('[Getter]', 'special');
|
||||||
|
- }
|
||||||
|
- } else {
|
||||||
|
- if (desc.set) {
|
||||||
|
- str = ctx.stylize('[Setter]', 'special');
|
||||||
|
- }
|
||||||
|
- }
|
||||||
|
- if (!hasOwnProperty(visibleKeys, key)) {
|
||||||
|
- name = '[' + key + ']';
|
||||||
|
- }
|
||||||
|
- if (!str) {
|
||||||
|
- if (ctx.seen.indexOf(desc.value) < 0) {
|
||||||
|
- if (isNull(recurseTimes)) {
|
||||||
|
- str = formatValue(ctx, desc.value, null);
|
||||||
|
- } else {
|
||||||
|
- str = formatValue(ctx, desc.value, recurseTimes - 1);
|
||||||
|
- }
|
||||||
|
- if (str.indexOf('\n') > -1) {
|
||||||
|
- if (array) {
|
||||||
|
- str = str.split('\n').map(function(line) {
|
||||||
|
- return ' ' + line;
|
||||||
|
- }).join('\n').substr(2);
|
||||||
|
- } else {
|
||||||
|
- str = '\n' + str.split('\n').map(function(line) {
|
||||||
|
- return ' ' + line;
|
||||||
|
- }).join('\n');
|
||||||
|
- }
|
||||||
|
- }
|
||||||
|
- } else {
|
||||||
|
- str = ctx.stylize('[Circular]', 'special');
|
||||||
|
- }
|
||||||
|
- }
|
||||||
|
- if (isUndefined(name)) {
|
||||||
|
- if (array && key.match(/^\d+$/)) {
|
||||||
|
- return str;
|
||||||
|
- }
|
||||||
|
- name = JSON.stringify('' + key);
|
||||||
|
- if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) {
|
||||||
|
- name = name.substr(1, name.length - 2);
|
||||||
|
- name = ctx.stylize(name, 'name');
|
||||||
|
- } else {
|
||||||
|
- name = name.replace(/'/g, "\\'")
|
||||||
|
- .replace(/\\"/g, '"')
|
||||||
|
- .replace(/(^"|"$)/g, "'");
|
||||||
|
- name = ctx.stylize(name, 'string');
|
||||||
|
- }
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- return name + ': ' + str;
|
||||||
|
-}
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-function reduceToSingleString(output, base, braces) {
|
||||||
|
- var numLinesEst = 0;
|
||||||
|
- var length = output.reduce(function(prev, cur) {
|
||||||
|
- numLinesEst++;
|
||||||
|
- if (cur.indexOf('\n') >= 0) numLinesEst++;
|
||||||
|
- return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1;
|
||||||
|
- }, 0);
|
||||||
|
-
|
||||||
|
- if (length > 60) {
|
||||||
|
- return braces[0] +
|
||||||
|
- (base === '' ? '' : base + '\n ') +
|
||||||
|
- ' ' +
|
||||||
|
- output.join(',\n ') +
|
||||||
|
- ' ' +
|
||||||
|
- braces[1];
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1];
|
||||||
|
-}
|
||||||
|
-
|
||||||
|
-
|
||||||
|
// NOTE: These type checking functions intentionally don't use `instanceof`
|
||||||
|
// because it is fragile and can be easily faked with `Object.create()`.
|
||||||
|
function isArray(ar) {
|
||||||
|
@@ -522,166 +98,10 @@ function isPrimitive(arg) {
|
||||||
|
exports.isPrimitive = isPrimitive;
|
||||||
|
|
||||||
|
function isBuffer(arg) {
|
||||||
|
- return arg instanceof Buffer;
|
||||||
|
+ return Buffer.isBuffer(arg);
|
||||||
|
}
|
||||||
|
exports.isBuffer = isBuffer;
|
||||||
|
|
||||||
|
function objectToString(o) {
|
||||||
|
return Object.prototype.toString.call(o);
|
||||||
|
-}
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-function pad(n) {
|
||||||
|
- return n < 10 ? '0' + n.toString(10) : n.toString(10);
|
||||||
|
-}
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep',
|
||||||
|
- 'Oct', 'Nov', 'Dec'];
|
||||||
|
-
|
||||||
|
-// 26 Feb 16:19:34
|
||||||
|
-function timestamp() {
|
||||||
|
- var d = new Date();
|
||||||
|
- var time = [pad(d.getHours()),
|
||||||
|
- pad(d.getMinutes()),
|
||||||
|
- pad(d.getSeconds())].join(':');
|
||||||
|
- return [d.getDate(), months[d.getMonth()], time].join(' ');
|
||||||
|
-}
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-// log is just a thin wrapper to console.log that prepends a timestamp
|
||||||
|
-exports.log = function() {
|
||||||
|
- console.log('%s - %s', timestamp(), exports.format.apply(exports, arguments));
|
||||||
|
-};
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-/**
|
||||||
|
- * Inherit the prototype methods from one constructor into another.
|
||||||
|
- *
|
||||||
|
- * The Function.prototype.inherits from lang.js rewritten as a standalone
|
||||||
|
- * function (not on Function.prototype). NOTE: If this file is to be loaded
|
||||||
|
- * during bootstrapping this function needs to be rewritten using some native
|
||||||
|
- * functions as prototype setup using normal JavaScript does not work as
|
||||||
|
- * expected during bootstrapping (see mirror.js in r114903).
|
||||||
|
- *
|
||||||
|
- * @param {function} ctor Constructor function which needs to inherit the
|
||||||
|
- * prototype.
|
||||||
|
- * @param {function} superCtor Constructor function to inherit prototype from.
|
||||||
|
- */
|
||||||
|
-exports.inherits = function(ctor, superCtor) {
|
||||||
|
- ctor.super_ = superCtor;
|
||||||
|
- ctor.prototype = Object.create(superCtor.prototype, {
|
||||||
|
- constructor: {
|
||||||
|
- value: ctor,
|
||||||
|
- enumerable: false,
|
||||||
|
- writable: true,
|
||||||
|
- configurable: true
|
||||||
|
- }
|
||||||
|
- });
|
||||||
|
-};
|
||||||
|
-
|
||||||
|
-exports._extend = function(origin, add) {
|
||||||
|
- // Don't do anything if add isn't an object
|
||||||
|
- if (!add || !isObject(add)) return origin;
|
||||||
|
-
|
||||||
|
- var keys = Object.keys(add);
|
||||||
|
- var i = keys.length;
|
||||||
|
- while (i--) {
|
||||||
|
- origin[keys[i]] = add[keys[i]];
|
||||||
|
- }
|
||||||
|
- return origin;
|
||||||
|
-};
|
||||||
|
-
|
||||||
|
-function hasOwnProperty(obj, prop) {
|
||||||
|
- return Object.prototype.hasOwnProperty.call(obj, prop);
|
||||||
|
-}
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-// Deprecated old stuff.
|
||||||
|
-
|
||||||
|
-exports.p = exports.deprecate(function() {
|
||||||
|
- for (var i = 0, len = arguments.length; i < len; ++i) {
|
||||||
|
- console.error(exports.inspect(arguments[i]));
|
||||||
|
- }
|
||||||
|
-}, 'util.p: Use console.error() instead');
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-exports.exec = exports.deprecate(function() {
|
||||||
|
- return require('child_process').exec.apply(this, arguments);
|
||||||
|
-}, 'util.exec is now called `child_process.exec`.');
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-exports.print = exports.deprecate(function() {
|
||||||
|
- for (var i = 0, len = arguments.length; i < len; ++i) {
|
||||||
|
- process.stdout.write(String(arguments[i]));
|
||||||
|
- }
|
||||||
|
-}, 'util.print: Use console.log instead');
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-exports.puts = exports.deprecate(function() {
|
||||||
|
- for (var i = 0, len = arguments.length; i < len; ++i) {
|
||||||
|
- process.stdout.write(arguments[i] + '\n');
|
||||||
|
- }
|
||||||
|
-}, 'util.puts: Use console.log instead');
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-exports.debug = exports.deprecate(function(x) {
|
||||||
|
- process.stderr.write('DEBUG: ' + x + '\n');
|
||||||
|
-}, 'util.debug: Use console.error instead');
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-exports.error = exports.deprecate(function(x) {
|
||||||
|
- for (var i = 0, len = arguments.length; i < len; ++i) {
|
||||||
|
- process.stderr.write(arguments[i] + '\n');
|
||||||
|
- }
|
||||||
|
-}, 'util.error: Use console.error instead');
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-exports.pump = exports.deprecate(function(readStream, writeStream, callback) {
|
||||||
|
- var callbackCalled = false;
|
||||||
|
-
|
||||||
|
- function call(a, b, c) {
|
||||||
|
- if (callback && !callbackCalled) {
|
||||||
|
- callback(a, b, c);
|
||||||
|
- callbackCalled = true;
|
||||||
|
- }
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
- readStream.addListener('data', function(chunk) {
|
||||||
|
- if (writeStream.write(chunk) === false) readStream.pause();
|
||||||
|
- });
|
||||||
|
-
|
||||||
|
- writeStream.addListener('drain', function() {
|
||||||
|
- readStream.resume();
|
||||||
|
- });
|
||||||
|
-
|
||||||
|
- readStream.addListener('end', function() {
|
||||||
|
- writeStream.end();
|
||||||
|
- });
|
||||||
|
-
|
||||||
|
- readStream.addListener('close', function() {
|
||||||
|
- call();
|
||||||
|
- });
|
||||||
|
-
|
||||||
|
- readStream.addListener('error', function(err) {
|
||||||
|
- writeStream.end();
|
||||||
|
- call(err);
|
||||||
|
- });
|
||||||
|
-
|
||||||
|
- writeStream.addListener('error', function(err) {
|
||||||
|
- readStream.destroy();
|
||||||
|
- call(err);
|
||||||
|
- });
|
||||||
|
-}, 'util.pump(): Use readableStream.pipe() instead');
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-var uv;
|
||||||
|
-exports._errnoException = function(err, syscall) {
|
||||||
|
- if (isUndefined(uv)) uv = process.binding('uv');
|
||||||
|
- var errname = uv.errname(err);
|
||||||
|
- var e = new Error(syscall + ' ' + errname);
|
||||||
|
- e.code = errname;
|
||||||
|
- e.errno = errname;
|
||||||
|
- e.syscall = syscall;
|
||||||
|
- return e;
|
||||||
|
-};
|
||||||
|
+}
|
||||||
107
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/lib/util.js
generated
vendored
Normal file
107
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/lib/util.js
generated
vendored
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
// Copyright Joyent, Inc. and other Node contributors.
|
||||||
|
//
|
||||||
|
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||||
|
// copy of this software and associated documentation files (the
|
||||||
|
// "Software"), to deal in the Software without restriction, including
|
||||||
|
// without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||||
|
// persons to whom the Software is furnished to do so, subject to the
|
||||||
|
// following conditions:
|
||||||
|
//
|
||||||
|
// The above copyright notice and this permission notice shall be included
|
||||||
|
// in all copies or substantial portions of the Software.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||||
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||||
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||||
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||||
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||||
|
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
// NOTE: These type checking functions intentionally don't use `instanceof`
|
||||||
|
// because it is fragile and can be easily faked with `Object.create()`.
|
||||||
|
function isArray(ar) {
|
||||||
|
return Array.isArray(ar);
|
||||||
|
}
|
||||||
|
exports.isArray = isArray;
|
||||||
|
|
||||||
|
function isBoolean(arg) {
|
||||||
|
return typeof arg === 'boolean';
|
||||||
|
}
|
||||||
|
exports.isBoolean = isBoolean;
|
||||||
|
|
||||||
|
function isNull(arg) {
|
||||||
|
return arg === null;
|
||||||
|
}
|
||||||
|
exports.isNull = isNull;
|
||||||
|
|
||||||
|
function isNullOrUndefined(arg) {
|
||||||
|
return arg == null;
|
||||||
|
}
|
||||||
|
exports.isNullOrUndefined = isNullOrUndefined;
|
||||||
|
|
||||||
|
function isNumber(arg) {
|
||||||
|
return typeof arg === 'number';
|
||||||
|
}
|
||||||
|
exports.isNumber = isNumber;
|
||||||
|
|
||||||
|
function isString(arg) {
|
||||||
|
return typeof arg === 'string';
|
||||||
|
}
|
||||||
|
exports.isString = isString;
|
||||||
|
|
||||||
|
function isSymbol(arg) {
|
||||||
|
return typeof arg === 'symbol';
|
||||||
|
}
|
||||||
|
exports.isSymbol = isSymbol;
|
||||||
|
|
||||||
|
function isUndefined(arg) {
|
||||||
|
return arg === void 0;
|
||||||
|
}
|
||||||
|
exports.isUndefined = isUndefined;
|
||||||
|
|
||||||
|
function isRegExp(re) {
|
||||||
|
return isObject(re) && objectToString(re) === '[object RegExp]';
|
||||||
|
}
|
||||||
|
exports.isRegExp = isRegExp;
|
||||||
|
|
||||||
|
function isObject(arg) {
|
||||||
|
return typeof arg === 'object' && arg !== null;
|
||||||
|
}
|
||||||
|
exports.isObject = isObject;
|
||||||
|
|
||||||
|
function isDate(d) {
|
||||||
|
return isObject(d) && objectToString(d) === '[object Date]';
|
||||||
|
}
|
||||||
|
exports.isDate = isDate;
|
||||||
|
|
||||||
|
function isError(e) {
|
||||||
|
return isObject(e) &&
|
||||||
|
(objectToString(e) === '[object Error]' || e instanceof Error);
|
||||||
|
}
|
||||||
|
exports.isError = isError;
|
||||||
|
|
||||||
|
function isFunction(arg) {
|
||||||
|
return typeof arg === 'function';
|
||||||
|
}
|
||||||
|
exports.isFunction = isFunction;
|
||||||
|
|
||||||
|
function isPrimitive(arg) {
|
||||||
|
return arg === null ||
|
||||||
|
typeof arg === 'boolean' ||
|
||||||
|
typeof arg === 'number' ||
|
||||||
|
typeof arg === 'string' ||
|
||||||
|
typeof arg === 'symbol' || // ES6 symbol
|
||||||
|
typeof arg === 'undefined';
|
||||||
|
}
|
||||||
|
exports.isPrimitive = isPrimitive;
|
||||||
|
|
||||||
|
function isBuffer(arg) {
|
||||||
|
return Buffer.isBuffer(arg);
|
||||||
|
}
|
||||||
|
exports.isBuffer = isBuffer;
|
||||||
|
|
||||||
|
function objectToString(o) {
|
||||||
|
return Object.prototype.toString.call(o);
|
||||||
|
}
|
||||||
34
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/package.json
generated
vendored
Normal file
34
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/package.json
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
{
|
||||||
|
"name": "core-util-is",
|
||||||
|
"version": "1.0.1",
|
||||||
|
"description": "The `util.is*` functions introduced in Node v0.12.",
|
||||||
|
"main": "lib/util.js",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/isaacs/core-util-is"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"util",
|
||||||
|
"isBuffer",
|
||||||
|
"isArray",
|
||||||
|
"isNumber",
|
||||||
|
"isString",
|
||||||
|
"isRegExp",
|
||||||
|
"isThis",
|
||||||
|
"isThat",
|
||||||
|
"polyfill"
|
||||||
|
],
|
||||||
|
"author": {
|
||||||
|
"name": "Isaac Z. Schlueter",
|
||||||
|
"email": "i@izs.me",
|
||||||
|
"url": "http://blog.izs.me/"
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/isaacs/core-util-is/issues"
|
||||||
|
},
|
||||||
|
"readme": "# core-util-is\n\nThe `util.is*` functions introduced in Node v0.12.\n",
|
||||||
|
"readmeFilename": "README.md",
|
||||||
|
"_id": "core-util-is@1.0.1",
|
||||||
|
"_from": "core-util-is@~1.0.0"
|
||||||
|
}
|
||||||
106
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/util.js
generated
vendored
Normal file
106
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/core-util-is/util.js
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
// Copyright Joyent, Inc. and other Node contributors.
|
||||||
|
//
|
||||||
|
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||||
|
// copy of this software and associated documentation files (the
|
||||||
|
// "Software"), to deal in the Software without restriction, including
|
||||||
|
// without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||||
|
// persons to whom the Software is furnished to do so, subject to the
|
||||||
|
// following conditions:
|
||||||
|
//
|
||||||
|
// The above copyright notice and this permission notice shall be included
|
||||||
|
// in all copies or substantial portions of the Software.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||||
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||||
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||||
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||||
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||||
|
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
// NOTE: These type checking functions intentionally don't use `instanceof`
|
||||||
|
// because it is fragile and can be easily faked with `Object.create()`.
|
||||||
|
function isArray(ar) {
|
||||||
|
return Array.isArray(ar);
|
||||||
|
}
|
||||||
|
exports.isArray = isArray;
|
||||||
|
|
||||||
|
function isBoolean(arg) {
|
||||||
|
return typeof arg === 'boolean';
|
||||||
|
}
|
||||||
|
exports.isBoolean = isBoolean;
|
||||||
|
|
||||||
|
function isNull(arg) {
|
||||||
|
return arg === null;
|
||||||
|
}
|
||||||
|
exports.isNull = isNull;
|
||||||
|
|
||||||
|
function isNullOrUndefined(arg) {
|
||||||
|
return arg == null;
|
||||||
|
}
|
||||||
|
exports.isNullOrUndefined = isNullOrUndefined;
|
||||||
|
|
||||||
|
function isNumber(arg) {
|
||||||
|
return typeof arg === 'number';
|
||||||
|
}
|
||||||
|
exports.isNumber = isNumber;
|
||||||
|
|
||||||
|
function isString(arg) {
|
||||||
|
return typeof arg === 'string';
|
||||||
|
}
|
||||||
|
exports.isString = isString;
|
||||||
|
|
||||||
|
function isSymbol(arg) {
|
||||||
|
return typeof arg === 'symbol';
|
||||||
|
}
|
||||||
|
exports.isSymbol = isSymbol;
|
||||||
|
|
||||||
|
function isUndefined(arg) {
|
||||||
|
return arg === void 0;
|
||||||
|
}
|
||||||
|
exports.isUndefined = isUndefined;
|
||||||
|
|
||||||
|
function isRegExp(re) {
|
||||||
|
return isObject(re) && objectToString(re) === '[object RegExp]';
|
||||||
|
}
|
||||||
|
exports.isRegExp = isRegExp;
|
||||||
|
|
||||||
|
function isObject(arg) {
|
||||||
|
return typeof arg === 'object' && arg !== null;
|
||||||
|
}
|
||||||
|
exports.isObject = isObject;
|
||||||
|
|
||||||
|
function isDate(d) {
|
||||||
|
return isObject(d) && objectToString(d) === '[object Date]';
|
||||||
|
}
|
||||||
|
exports.isDate = isDate;
|
||||||
|
|
||||||
|
function isError(e) {
|
||||||
|
return isObject(e) && objectToString(e) === '[object Error]';
|
||||||
|
}
|
||||||
|
exports.isError = isError;
|
||||||
|
|
||||||
|
function isFunction(arg) {
|
||||||
|
return typeof arg === 'function';
|
||||||
|
}
|
||||||
|
exports.isFunction = isFunction;
|
||||||
|
|
||||||
|
function isPrimitive(arg) {
|
||||||
|
return arg === null ||
|
||||||
|
typeof arg === 'boolean' ||
|
||||||
|
typeof arg === 'number' ||
|
||||||
|
typeof arg === 'string' ||
|
||||||
|
typeof arg === 'symbol' || // ES6 symbol
|
||||||
|
typeof arg === 'undefined';
|
||||||
|
}
|
||||||
|
exports.isPrimitive = isPrimitive;
|
||||||
|
|
||||||
|
function isBuffer(arg) {
|
||||||
|
return arg instanceof Buffer;
|
||||||
|
}
|
||||||
|
exports.isBuffer = isBuffer;
|
||||||
|
|
||||||
|
function objectToString(o) {
|
||||||
|
return Object.prototype.toString.call(o);
|
||||||
|
}
|
||||||
16
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/inherits/LICENSE
generated
vendored
Normal file
16
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/inherits/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
The ISC License
|
||||||
|
|
||||||
|
Copyright (c) Isaac Z. Schlueter
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||||
|
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||||
|
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||||
|
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||||
|
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||||
|
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||||
|
PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
42
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/inherits/README.md
generated
vendored
Normal file
42
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/inherits/README.md
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
Browser-friendly inheritance fully compatible with standard node.js
|
||||||
|
[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor).
|
||||||
|
|
||||||
|
This package exports standard `inherits` from node.js `util` module in
|
||||||
|
node environment, but also provides alternative browser-friendly
|
||||||
|
implementation through [browser
|
||||||
|
field](https://gist.github.com/shtylman/4339901). Alternative
|
||||||
|
implementation is a literal copy of standard one located in standalone
|
||||||
|
module to avoid requiring of `util`. It also has a shim for old
|
||||||
|
browsers with no `Object.create` support.
|
||||||
|
|
||||||
|
While keeping you sure you are using standard `inherits`
|
||||||
|
implementation in node.js environment, it allows bundlers such as
|
||||||
|
[browserify](https://github.com/substack/node-browserify) to not
|
||||||
|
include full `util` package to your client code if all you need is
|
||||||
|
just `inherits` function. It worth, because browser shim for `util`
|
||||||
|
package is large and `inherits` is often the single function you need
|
||||||
|
from it.
|
||||||
|
|
||||||
|
It's recommended to use this package instead of
|
||||||
|
`require('util').inherits` for any code that has chances to be used
|
||||||
|
not only in node.js but in browser too.
|
||||||
|
|
||||||
|
## usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
var inherits = require('inherits');
|
||||||
|
// then use exactly as the standard one
|
||||||
|
```
|
||||||
|
|
||||||
|
## note on version ~1.0
|
||||||
|
|
||||||
|
Version ~1.0 had completely different motivation and is not compatible
|
||||||
|
neither with 2.0 nor with standard node.js `inherits`.
|
||||||
|
|
||||||
|
If you are using version ~1.0 and planning to switch to ~2.0, be
|
||||||
|
careful:
|
||||||
|
|
||||||
|
* new version uses `super_` instead of `super` for referencing
|
||||||
|
superclass
|
||||||
|
* new version overwrites current prototype while old one preserves any
|
||||||
|
existing fields on it
|
||||||
1
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/inherits/inherits.js
generated
vendored
Normal file
1
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/inherits/inherits.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
module.exports = require('util').inherits
|
||||||
23
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/inherits/inherits_browser.js
generated
vendored
Normal file
23
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/inherits/inherits_browser.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
if (typeof Object.create === 'function') {
|
||||||
|
// implementation from standard node.js 'util' module
|
||||||
|
module.exports = function inherits(ctor, superCtor) {
|
||||||
|
ctor.super_ = superCtor
|
||||||
|
ctor.prototype = Object.create(superCtor.prototype, {
|
||||||
|
constructor: {
|
||||||
|
value: ctor,
|
||||||
|
enumerable: false,
|
||||||
|
writable: true,
|
||||||
|
configurable: true
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// old school shim for old browsers
|
||||||
|
module.exports = function inherits(ctor, superCtor) {
|
||||||
|
ctor.super_ = superCtor
|
||||||
|
var TempCtor = function () {}
|
||||||
|
TempCtor.prototype = superCtor.prototype
|
||||||
|
ctor.prototype = new TempCtor()
|
||||||
|
ctor.prototype.constructor = ctor
|
||||||
|
}
|
||||||
|
}
|
||||||
32
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/inherits/package.json
generated
vendored
Normal file
32
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/inherits/package.json
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"name": "inherits",
|
||||||
|
"description": "Browser-friendly inheritance fully compatible with standard node.js inherits()",
|
||||||
|
"version": "2.0.1",
|
||||||
|
"keywords": [
|
||||||
|
"inheritance",
|
||||||
|
"class",
|
||||||
|
"klass",
|
||||||
|
"oop",
|
||||||
|
"object-oriented",
|
||||||
|
"inherits",
|
||||||
|
"browser",
|
||||||
|
"browserify"
|
||||||
|
],
|
||||||
|
"main": "./inherits.js",
|
||||||
|
"browser": "./inherits_browser.js",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/isaacs/inherits"
|
||||||
|
},
|
||||||
|
"license": "ISC",
|
||||||
|
"scripts": {
|
||||||
|
"test": "node test"
|
||||||
|
},
|
||||||
|
"readme": "Browser-friendly inheritance fully compatible with standard node.js\n[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor).\n\nThis package exports standard `inherits` from node.js `util` module in\nnode environment, but also provides alternative browser-friendly\nimplementation through [browser\nfield](https://gist.github.com/shtylman/4339901). Alternative\nimplementation is a literal copy of standard one located in standalone\nmodule to avoid requiring of `util`. It also has a shim for old\nbrowsers with no `Object.create` support.\n\nWhile keeping you sure you are using standard `inherits`\nimplementation in node.js environment, it allows bundlers such as\n[browserify](https://github.com/substack/node-browserify) to not\ninclude full `util` package to your client code if all you need is\njust `inherits` function. It worth, because browser shim for `util`\npackage is large and `inherits` is often the single function you need\nfrom it.\n\nIt's recommended to use this package instead of\n`require('util').inherits` for any code that has chances to be used\nnot only in node.js but in browser too.\n\n## usage\n\n```js\nvar inherits = require('inherits');\n// then use exactly as the standard one\n```\n\n## note on version ~1.0\n\nVersion ~1.0 had completely different motivation and is not compatible\nneither with 2.0 nor with standard node.js `inherits`.\n\nIf you are using version ~1.0 and planning to switch to ~2.0, be\ncareful:\n\n* new version uses `super_` instead of `super` for referencing\n superclass\n* new version overwrites current prototype while old one preserves any\n existing fields on it\n",
|
||||||
|
"readmeFilename": "README.md",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/isaacs/inherits/issues"
|
||||||
|
},
|
||||||
|
"_id": "inherits@2.0.1",
|
||||||
|
"_from": "inherits@~2.0.1"
|
||||||
|
}
|
||||||
25
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/inherits/test.js
generated
vendored
Normal file
25
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/inherits/test.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
var inherits = require('./inherits.js')
|
||||||
|
var assert = require('assert')
|
||||||
|
|
||||||
|
function test(c) {
|
||||||
|
assert(c.constructor === Child)
|
||||||
|
assert(c.constructor.super_ === Parent)
|
||||||
|
assert(Object.getPrototypeOf(c) === Child.prototype)
|
||||||
|
assert(Object.getPrototypeOf(Object.getPrototypeOf(c)) === Parent.prototype)
|
||||||
|
assert(c instanceof Child)
|
||||||
|
assert(c instanceof Parent)
|
||||||
|
}
|
||||||
|
|
||||||
|
function Child() {
|
||||||
|
Parent.call(this)
|
||||||
|
test(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
function Parent() {}
|
||||||
|
|
||||||
|
inherits(Child, Parent)
|
||||||
|
|
||||||
|
var c = new Child
|
||||||
|
test(c)
|
||||||
|
|
||||||
|
console.log('ok')
|
||||||
54
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/isarray/README.md
generated
vendored
Normal file
54
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/isarray/README.md
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
|
||||||
|
# isarray
|
||||||
|
|
||||||
|
`Array#isArray` for older browsers.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```js
|
||||||
|
var isArray = require('isarray');
|
||||||
|
|
||||||
|
console.log(isArray([])); // => true
|
||||||
|
console.log(isArray({})); // => false
|
||||||
|
```
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
With [npm](http://npmjs.org) do
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ npm install isarray
|
||||||
|
```
|
||||||
|
|
||||||
|
Then bundle for the browser with
|
||||||
|
[browserify](https://github.com/substack/browserify).
|
||||||
|
|
||||||
|
With [component](http://component.io) do
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ component install juliangruber/isarray
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
(MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||||
|
of the Software, and to permit persons to whom the Software is furnished to do
|
||||||
|
so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
209
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/isarray/build/build.js
generated
vendored
Normal file
209
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/isarray/build/build.js
generated
vendored
Normal file
@@ -0,0 +1,209 @@
|
|||||||
|
|
||||||
|
/**
|
||||||
|
* Require the given path.
|
||||||
|
*
|
||||||
|
* @param {String} path
|
||||||
|
* @return {Object} exports
|
||||||
|
* @api public
|
||||||
|
*/
|
||||||
|
|
||||||
|
function require(path, parent, orig) {
|
||||||
|
var resolved = require.resolve(path);
|
||||||
|
|
||||||
|
// lookup failed
|
||||||
|
if (null == resolved) {
|
||||||
|
orig = orig || path;
|
||||||
|
parent = parent || 'root';
|
||||||
|
var err = new Error('Failed to require "' + orig + '" from "' + parent + '"');
|
||||||
|
err.path = orig;
|
||||||
|
err.parent = parent;
|
||||||
|
err.require = true;
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
var module = require.modules[resolved];
|
||||||
|
|
||||||
|
// perform real require()
|
||||||
|
// by invoking the module's
|
||||||
|
// registered function
|
||||||
|
if (!module.exports) {
|
||||||
|
module.exports = {};
|
||||||
|
module.client = module.component = true;
|
||||||
|
module.call(this, module.exports, require.relative(resolved), module);
|
||||||
|
}
|
||||||
|
|
||||||
|
return module.exports;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Registered modules.
|
||||||
|
*/
|
||||||
|
|
||||||
|
require.modules = {};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Registered aliases.
|
||||||
|
*/
|
||||||
|
|
||||||
|
require.aliases = {};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve `path`.
|
||||||
|
*
|
||||||
|
* Lookup:
|
||||||
|
*
|
||||||
|
* - PATH/index.js
|
||||||
|
* - PATH.js
|
||||||
|
* - PATH
|
||||||
|
*
|
||||||
|
* @param {String} path
|
||||||
|
* @return {String} path or null
|
||||||
|
* @api private
|
||||||
|
*/
|
||||||
|
|
||||||
|
require.resolve = function(path) {
|
||||||
|
if (path.charAt(0) === '/') path = path.slice(1);
|
||||||
|
var index = path + '/index.js';
|
||||||
|
|
||||||
|
var paths = [
|
||||||
|
path,
|
||||||
|
path + '.js',
|
||||||
|
path + '.json',
|
||||||
|
path + '/index.js',
|
||||||
|
path + '/index.json'
|
||||||
|
];
|
||||||
|
|
||||||
|
for (var i = 0; i < paths.length; i++) {
|
||||||
|
var path = paths[i];
|
||||||
|
if (require.modules.hasOwnProperty(path)) return path;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (require.aliases.hasOwnProperty(index)) {
|
||||||
|
return require.aliases[index];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize `path` relative to the current path.
|
||||||
|
*
|
||||||
|
* @param {String} curr
|
||||||
|
* @param {String} path
|
||||||
|
* @return {String}
|
||||||
|
* @api private
|
||||||
|
*/
|
||||||
|
|
||||||
|
require.normalize = function(curr, path) {
|
||||||
|
var segs = [];
|
||||||
|
|
||||||
|
if ('.' != path.charAt(0)) return path;
|
||||||
|
|
||||||
|
curr = curr.split('/');
|
||||||
|
path = path.split('/');
|
||||||
|
|
||||||
|
for (var i = 0; i < path.length; ++i) {
|
||||||
|
if ('..' == path[i]) {
|
||||||
|
curr.pop();
|
||||||
|
} else if ('.' != path[i] && '' != path[i]) {
|
||||||
|
segs.push(path[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return curr.concat(segs).join('/');
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register module at `path` with callback `definition`.
|
||||||
|
*
|
||||||
|
* @param {String} path
|
||||||
|
* @param {Function} definition
|
||||||
|
* @api private
|
||||||
|
*/
|
||||||
|
|
||||||
|
require.register = function(path, definition) {
|
||||||
|
require.modules[path] = definition;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Alias a module definition.
|
||||||
|
*
|
||||||
|
* @param {String} from
|
||||||
|
* @param {String} to
|
||||||
|
* @api private
|
||||||
|
*/
|
||||||
|
|
||||||
|
require.alias = function(from, to) {
|
||||||
|
if (!require.modules.hasOwnProperty(from)) {
|
||||||
|
throw new Error('Failed to alias "' + from + '", it does not exist');
|
||||||
|
}
|
||||||
|
require.aliases[to] = from;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return a require function relative to the `parent` path.
|
||||||
|
*
|
||||||
|
* @param {String} parent
|
||||||
|
* @return {Function}
|
||||||
|
* @api private
|
||||||
|
*/
|
||||||
|
|
||||||
|
require.relative = function(parent) {
|
||||||
|
var p = require.normalize(parent, '..');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* lastIndexOf helper.
|
||||||
|
*/
|
||||||
|
|
||||||
|
function lastIndexOf(arr, obj) {
|
||||||
|
var i = arr.length;
|
||||||
|
while (i--) {
|
||||||
|
if (arr[i] === obj) return i;
|
||||||
|
}
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The relative require() itself.
|
||||||
|
*/
|
||||||
|
|
||||||
|
function localRequire(path) {
|
||||||
|
var resolved = localRequire.resolve(path);
|
||||||
|
return require(resolved, parent, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve relative to the parent.
|
||||||
|
*/
|
||||||
|
|
||||||
|
localRequire.resolve = function(path) {
|
||||||
|
var c = path.charAt(0);
|
||||||
|
if ('/' == c) return path.slice(1);
|
||||||
|
if ('.' == c) return require.normalize(p, path);
|
||||||
|
|
||||||
|
// resolve deps by returning
|
||||||
|
// the dep in the nearest "deps"
|
||||||
|
// directory
|
||||||
|
var segs = parent.split('/');
|
||||||
|
var i = lastIndexOf(segs, 'deps') + 1;
|
||||||
|
if (!i) i = 0;
|
||||||
|
path = segs.slice(0, i + 1).join('/') + '/deps/' + path;
|
||||||
|
return path;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if module is defined at `path`.
|
||||||
|
*/
|
||||||
|
|
||||||
|
localRequire.exists = function(path) {
|
||||||
|
return require.modules.hasOwnProperty(localRequire.resolve(path));
|
||||||
|
};
|
||||||
|
|
||||||
|
return localRequire;
|
||||||
|
};
|
||||||
|
require.register("isarray/index.js", function(exports, require, module){
|
||||||
|
module.exports = Array.isArray || function (arr) {
|
||||||
|
return Object.prototype.toString.call(arr) == '[object Array]';
|
||||||
|
};
|
||||||
|
|
||||||
|
});
|
||||||
|
require.alias("isarray/index.js", "isarray/index.js");
|
||||||
|
|
||||||
19
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/isarray/component.json
generated
vendored
Normal file
19
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/isarray/component.json
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
{
|
||||||
|
"name" : "isarray",
|
||||||
|
"description" : "Array#isArray for older browsers",
|
||||||
|
"version" : "0.0.1",
|
||||||
|
"repository" : "juliangruber/isarray",
|
||||||
|
"homepage": "https://github.com/juliangruber/isarray",
|
||||||
|
"main" : "index.js",
|
||||||
|
"scripts" : [
|
||||||
|
"index.js"
|
||||||
|
],
|
||||||
|
"dependencies" : {},
|
||||||
|
"keywords": ["browser","isarray","array"],
|
||||||
|
"author": {
|
||||||
|
"name": "Julian Gruber",
|
||||||
|
"email": "mail@juliangruber.com",
|
||||||
|
"url": "http://juliangruber.com"
|
||||||
|
},
|
||||||
|
"license": "MIT"
|
||||||
|
}
|
||||||
3
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/isarray/index.js
generated
vendored
Normal file
3
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/isarray/index.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
module.exports = Array.isArray || function (arr) {
|
||||||
|
return Object.prototype.toString.call(arr) == '[object Array]';
|
||||||
|
};
|
||||||
36
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/isarray/package.json
generated
vendored
Normal file
36
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/isarray/package.json
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
{
|
||||||
|
"name": "isarray",
|
||||||
|
"description": "Array#isArray for older browsers",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/juliangruber/isarray.git"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/juliangruber/isarray",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"test": "tap test/*.js"
|
||||||
|
},
|
||||||
|
"dependencies": {},
|
||||||
|
"devDependencies": {
|
||||||
|
"tap": "*"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"browser",
|
||||||
|
"isarray",
|
||||||
|
"array"
|
||||||
|
],
|
||||||
|
"author": {
|
||||||
|
"name": "Julian Gruber",
|
||||||
|
"email": "mail@juliangruber.com",
|
||||||
|
"url": "http://juliangruber.com"
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"readme": "\n# isarray\n\n`Array#isArray` for older browsers.\n\n## Usage\n\n```js\nvar isArray = require('isarray');\n\nconsole.log(isArray([])); // => true\nconsole.log(isArray({})); // => false\n```\n\n## Installation\n\nWith [npm](http://npmjs.org) do\n\n```bash\n$ npm install isarray\n```\n\nThen bundle for the browser with\n[browserify](https://github.com/substack/browserify).\n\nWith [component](http://component.io) do\n\n```bash\n$ component install juliangruber/isarray\n```\n\n## License\n\n(MIT)\n\nCopyright (c) 2013 Julian Gruber <julian@juliangruber.com>\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of\nthis software and associated documentation files (the \"Software\"), to deal in\nthe Software without restriction, including without limitation the rights to\nuse, copy, modify, merge, publish, distribute, sublicense, and/or sell copies\nof the Software, and to permit persons to whom the Software is furnished to do\nso, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n",
|
||||||
|
"readmeFilename": "README.md",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/juliangruber/isarray/issues"
|
||||||
|
},
|
||||||
|
"_id": "isarray@0.0.1",
|
||||||
|
"_from": "isarray@0.0.1"
|
||||||
|
}
|
||||||
2
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/.npmignore
generated
vendored
Normal file
2
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
build
|
||||||
|
test
|
||||||
20
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/LICENSE
generated
vendored
Normal file
20
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
Copyright Joyent, Inc. and other Node contributors.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a
|
||||||
|
copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||||
|
persons to whom the Software is furnished to do so, subject to the
|
||||||
|
following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included
|
||||||
|
in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||||
|
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||||
|
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||||
|
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||||
|
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||||
|
USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
7
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/README.md
generated
vendored
Normal file
7
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/README.md
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
**string_decoder.js** (`require('string_decoder')`) from Node.js core
|
||||||
|
|
||||||
|
Copyright Joyent, Inc. and other Node contributors. See LICENCE file for details.
|
||||||
|
|
||||||
|
Version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. **Prefer the stable version over the unstable.**
|
||||||
|
|
||||||
|
The *build/* directory contains a build script that will scrape the source from the [joyent/node](https://github.com/joyent/node) repo given a specific Node version.
|
||||||
221
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/index.js
generated
vendored
Normal file
221
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/index.js
generated
vendored
Normal file
@@ -0,0 +1,221 @@
|
|||||||
|
// Copyright Joyent, Inc. and other Node contributors.
|
||||||
|
//
|
||||||
|
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||||
|
// copy of this software and associated documentation files (the
|
||||||
|
// "Software"), to deal in the Software without restriction, including
|
||||||
|
// without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
// distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||||
|
// persons to whom the Software is furnished to do so, subject to the
|
||||||
|
// following conditions:
|
||||||
|
//
|
||||||
|
// The above copyright notice and this permission notice shall be included
|
||||||
|
// in all copies or substantial portions of the Software.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||||
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||||
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||||
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||||
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||||
|
// USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
var Buffer = require('buffer').Buffer;
|
||||||
|
|
||||||
|
var isBufferEncoding = Buffer.isEncoding
|
||||||
|
|| function(encoding) {
|
||||||
|
switch (encoding && encoding.toLowerCase()) {
|
||||||
|
case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true;
|
||||||
|
default: return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function assertEncoding(encoding) {
|
||||||
|
if (encoding && !isBufferEncoding(encoding)) {
|
||||||
|
throw new Error('Unknown encoding: ' + encoding);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringDecoder provides an interface for efficiently splitting a series of
|
||||||
|
// buffers into a series of JS strings without breaking apart multi-byte
|
||||||
|
// characters. CESU-8 is handled as part of the UTF-8 encoding.
|
||||||
|
//
|
||||||
|
// @TODO Handling all encodings inside a single object makes it very difficult
|
||||||
|
// to reason about this code, so it should be split up in the future.
|
||||||
|
// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code
|
||||||
|
// points as used by CESU-8.
|
||||||
|
var StringDecoder = exports.StringDecoder = function(encoding) {
|
||||||
|
this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, '');
|
||||||
|
assertEncoding(encoding);
|
||||||
|
switch (this.encoding) {
|
||||||
|
case 'utf8':
|
||||||
|
// CESU-8 represents each of Surrogate Pair by 3-bytes
|
||||||
|
this.surrogateSize = 3;
|
||||||
|
break;
|
||||||
|
case 'ucs2':
|
||||||
|
case 'utf16le':
|
||||||
|
// UTF-16 represents each of Surrogate Pair by 2-bytes
|
||||||
|
this.surrogateSize = 2;
|
||||||
|
this.detectIncompleteChar = utf16DetectIncompleteChar;
|
||||||
|
break;
|
||||||
|
case 'base64':
|
||||||
|
// Base-64 stores 3 bytes in 4 chars, and pads the remainder.
|
||||||
|
this.surrogateSize = 3;
|
||||||
|
this.detectIncompleteChar = base64DetectIncompleteChar;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
this.write = passThroughWrite;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enough space to store all bytes of a single character. UTF-8 needs 4
|
||||||
|
// bytes, but CESU-8 may require up to 6 (3 bytes per surrogate).
|
||||||
|
this.charBuffer = new Buffer(6);
|
||||||
|
// Number of bytes received for the current incomplete multi-byte character.
|
||||||
|
this.charReceived = 0;
|
||||||
|
// Number of bytes expected for the current incomplete multi-byte character.
|
||||||
|
this.charLength = 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
// write decodes the given buffer and returns it as JS string that is
|
||||||
|
// guaranteed to not contain any partial multi-byte characters. Any partial
|
||||||
|
// character found at the end of the buffer is buffered up, and will be
|
||||||
|
// returned when calling write again with the remaining bytes.
|
||||||
|
//
|
||||||
|
// Note: Converting a Buffer containing an orphan surrogate to a String
|
||||||
|
// currently works, but converting a String to a Buffer (via `new Buffer`, or
|
||||||
|
// Buffer#write) will replace incomplete surrogates with the unicode
|
||||||
|
// replacement character. See https://codereview.chromium.org/121173009/ .
|
||||||
|
StringDecoder.prototype.write = function(buffer) {
|
||||||
|
var charStr = '';
|
||||||
|
// if our last write ended with an incomplete multibyte character
|
||||||
|
while (this.charLength) {
|
||||||
|
// determine how many remaining bytes this buffer has to offer for this char
|
||||||
|
var available = (buffer.length >= this.charLength - this.charReceived) ?
|
||||||
|
this.charLength - this.charReceived :
|
||||||
|
buffer.length;
|
||||||
|
|
||||||
|
// add the new bytes to the char buffer
|
||||||
|
buffer.copy(this.charBuffer, this.charReceived, 0, available);
|
||||||
|
this.charReceived += available;
|
||||||
|
|
||||||
|
if (this.charReceived < this.charLength) {
|
||||||
|
// still not enough chars in this buffer? wait for more ...
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove bytes belonging to the current character from the buffer
|
||||||
|
buffer = buffer.slice(available, buffer.length);
|
||||||
|
|
||||||
|
// get the character that was split
|
||||||
|
charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding);
|
||||||
|
|
||||||
|
// CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
|
||||||
|
var charCode = charStr.charCodeAt(charStr.length - 1);
|
||||||
|
if (charCode >= 0xD800 && charCode <= 0xDBFF) {
|
||||||
|
this.charLength += this.surrogateSize;
|
||||||
|
charStr = '';
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
this.charReceived = this.charLength = 0;
|
||||||
|
|
||||||
|
// if there are no more bytes in this buffer, just emit our char
|
||||||
|
if (buffer.length === 0) {
|
||||||
|
return charStr;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// determine and set charLength / charReceived
|
||||||
|
this.detectIncompleteChar(buffer);
|
||||||
|
|
||||||
|
var end = buffer.length;
|
||||||
|
if (this.charLength) {
|
||||||
|
// buffer the incomplete character bytes we got
|
||||||
|
buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end);
|
||||||
|
end -= this.charReceived;
|
||||||
|
}
|
||||||
|
|
||||||
|
charStr += buffer.toString(this.encoding, 0, end);
|
||||||
|
|
||||||
|
var end = charStr.length - 1;
|
||||||
|
var charCode = charStr.charCodeAt(end);
|
||||||
|
// CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
|
||||||
|
if (charCode >= 0xD800 && charCode <= 0xDBFF) {
|
||||||
|
var size = this.surrogateSize;
|
||||||
|
this.charLength += size;
|
||||||
|
this.charReceived += size;
|
||||||
|
this.charBuffer.copy(this.charBuffer, size, 0, size);
|
||||||
|
buffer.copy(this.charBuffer, 0, 0, size);
|
||||||
|
return charStr.substring(0, end);
|
||||||
|
}
|
||||||
|
|
||||||
|
// or just emit the charStr
|
||||||
|
return charStr;
|
||||||
|
};
|
||||||
|
|
||||||
|
// detectIncompleteChar determines if there is an incomplete UTF-8 character at
|
||||||
|
// the end of the given buffer. If so, it sets this.charLength to the byte
|
||||||
|
// length that character, and sets this.charReceived to the number of bytes
|
||||||
|
// that are available for this character.
|
||||||
|
StringDecoder.prototype.detectIncompleteChar = function(buffer) {
|
||||||
|
// determine how many bytes we have to check at the end of this buffer
|
||||||
|
var i = (buffer.length >= 3) ? 3 : buffer.length;
|
||||||
|
|
||||||
|
// Figure out if one of the last i bytes of our buffer announces an
|
||||||
|
// incomplete char.
|
||||||
|
for (; i > 0; i--) {
|
||||||
|
var c = buffer[buffer.length - i];
|
||||||
|
|
||||||
|
// See http://en.wikipedia.org/wiki/UTF-8#Description
|
||||||
|
|
||||||
|
// 110XXXXX
|
||||||
|
if (i == 1 && c >> 5 == 0x06) {
|
||||||
|
this.charLength = 2;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1110XXXX
|
||||||
|
if (i <= 2 && c >> 4 == 0x0E) {
|
||||||
|
this.charLength = 3;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 11110XXX
|
||||||
|
if (i <= 3 && c >> 3 == 0x1E) {
|
||||||
|
this.charLength = 4;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.charReceived = i;
|
||||||
|
};
|
||||||
|
|
||||||
|
StringDecoder.prototype.end = function(buffer) {
|
||||||
|
var res = '';
|
||||||
|
if (buffer && buffer.length)
|
||||||
|
res = this.write(buffer);
|
||||||
|
|
||||||
|
if (this.charReceived) {
|
||||||
|
var cr = this.charReceived;
|
||||||
|
var buf = this.charBuffer;
|
||||||
|
var enc = this.encoding;
|
||||||
|
res += buf.slice(0, cr).toString(enc);
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
|
||||||
|
function passThroughWrite(buffer) {
|
||||||
|
return buffer.toString(this.encoding);
|
||||||
|
}
|
||||||
|
|
||||||
|
function utf16DetectIncompleteChar(buffer) {
|
||||||
|
this.charReceived = buffer.length % 2;
|
||||||
|
this.charLength = this.charReceived ? 2 : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
function base64DetectIncompleteChar(buffer) {
|
||||||
|
this.charReceived = buffer.length % 3;
|
||||||
|
this.charLength = this.charReceived ? 3 : 0;
|
||||||
|
}
|
||||||
32
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/package.json
generated
vendored
Normal file
32
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/node_modules/string_decoder/package.json
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"name": "string_decoder",
|
||||||
|
"version": "0.10.31",
|
||||||
|
"description": "The string_decoder module from Node core",
|
||||||
|
"main": "index.js",
|
||||||
|
"dependencies": {},
|
||||||
|
"devDependencies": {
|
||||||
|
"tap": "~0.4.8"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "tap test/simple/*.js"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/rvagg/string_decoder.git"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/rvagg/string_decoder",
|
||||||
|
"keywords": [
|
||||||
|
"string",
|
||||||
|
"decoder",
|
||||||
|
"browser",
|
||||||
|
"browserify"
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"readme": "**string_decoder.js** (`require('string_decoder')`) from Node.js core\n\nCopyright Joyent, Inc. and other Node contributors. See LICENCE file for details.\n\nVersion numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. **Prefer the stable version over the unstable.**\n\nThe *build/* directory contains a build script that will scrape the source from the [joyent/node](https://github.com/joyent/node) repo given a specific Node version.",
|
||||||
|
"readmeFilename": "README.md",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/rvagg/string_decoder/issues"
|
||||||
|
},
|
||||||
|
"_id": "string_decoder@0.10.31",
|
||||||
|
"_from": "string_decoder@~0.10.x"
|
||||||
|
}
|
||||||
43
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/package.json
generated
vendored
Normal file
43
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/package.json
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
{
|
||||||
|
"name": "readable-stream",
|
||||||
|
"version": "1.0.33",
|
||||||
|
"description": "Streams2, a user-land copy of the stream library from Node.js v0.10.x",
|
||||||
|
"main": "readable.js",
|
||||||
|
"dependencies": {
|
||||||
|
"core-util-is": "~1.0.0",
|
||||||
|
"isarray": "0.0.1",
|
||||||
|
"string_decoder": "~0.10.x",
|
||||||
|
"inherits": "~2.0.1"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"tap": "~0.2.6"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"test": "tap test/simple/*.js"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/isaacs/readable-stream"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"readable",
|
||||||
|
"stream",
|
||||||
|
"pipe"
|
||||||
|
],
|
||||||
|
"browser": {
|
||||||
|
"util": false
|
||||||
|
},
|
||||||
|
"author": {
|
||||||
|
"name": "Isaac Z. Schlueter",
|
||||||
|
"email": "i@izs.me",
|
||||||
|
"url": "http://blog.izs.me/"
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"readme": "# readable-stream\n\n***Node-core streams for userland***\n\n[](https://nodei.co/npm/readable-stream/)\n[](https://nodei.co/npm/readable-stream/)\n\nThis package is a mirror of the Streams2 and Streams3 implementations in Node-core.\n\nIf you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *\"stream\"* module in Node-core.\n\n**readable-stream** comes in two major versions, v1.0.x and v1.1.x. The former tracks the Streams2 implementation in Node 0.10, including bug-fixes and minor improvements as they are added. The latter tracks Streams3 as it develops in Node 0.11; we will likely see a v1.2.x branch for Node 0.12.\n\n**readable-stream** uses proper patch-level versioning so if you pin to `\"~1.0.0\"` you’ll get the latest Node 0.10 Streams2 implementation, including any fixes and minor non-breaking improvements. The patch-level versions of 1.0.x and 1.1.x should mirror the patch-level versions of Node-core releases. You should prefer the **1.0.x** releases for now and when you’re ready to start using Streams3, pin to `\"~1.1.0\"`\n\n",
|
||||||
|
"readmeFilename": "README.md",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/isaacs/readable-stream/issues"
|
||||||
|
},
|
||||||
|
"_id": "readable-stream@1.0.33",
|
||||||
|
"_from": "readable-stream@>=1.0.33-1 <1.1.0-0"
|
||||||
|
}
|
||||||
1
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/passthrough.js
generated
vendored
Normal file
1
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/passthrough.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
module.exports = require("./lib/_stream_passthrough.js")
|
||||||
8
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/readable.js
generated
vendored
Normal file
8
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/readable.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
var Stream = require('stream'); // hack to fix a circular dependency issue when used with browserify
|
||||||
|
exports = module.exports = require('./lib/_stream_readable.js');
|
||||||
|
exports.Stream = Stream;
|
||||||
|
exports.Readable = exports;
|
||||||
|
exports.Writable = require('./lib/_stream_writable.js');
|
||||||
|
exports.Duplex = require('./lib/_stream_duplex.js');
|
||||||
|
exports.Transform = require('./lib/_stream_transform.js');
|
||||||
|
exports.PassThrough = require('./lib/_stream_passthrough.js');
|
||||||
1
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/transform.js
generated
vendored
Normal file
1
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/transform.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
module.exports = require("./lib/_stream_transform.js")
|
||||||
1
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/writable.js
generated
vendored
Normal file
1
server/node_modules/gulp-concat/node_modules/through2/node_modules/readable-stream/writable.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
module.exports = require("./lib/_stream_writable.js")
|
||||||
30
server/node_modules/gulp-concat/node_modules/through2/node_modules/xtend/.jshintrc
generated
vendored
Normal file
30
server/node_modules/gulp-concat/node_modules/through2/node_modules/xtend/.jshintrc
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
"maxdepth": 4,
|
||||||
|
"maxstatements": 200,
|
||||||
|
"maxcomplexity": 12,
|
||||||
|
"maxlen": 80,
|
||||||
|
"maxparams": 5,
|
||||||
|
|
||||||
|
"curly": true,
|
||||||
|
"eqeqeq": true,
|
||||||
|
"immed": true,
|
||||||
|
"latedef": false,
|
||||||
|
"noarg": true,
|
||||||
|
"noempty": true,
|
||||||
|
"nonew": true,
|
||||||
|
"undef": true,
|
||||||
|
"unused": "vars",
|
||||||
|
"trailing": true,
|
||||||
|
|
||||||
|
"quotmark": true,
|
||||||
|
"expr": true,
|
||||||
|
"asi": true,
|
||||||
|
|
||||||
|
"browser": false,
|
||||||
|
"esnext": true,
|
||||||
|
"devel": false,
|
||||||
|
"node": false,
|
||||||
|
"nonstandard": false,
|
||||||
|
|
||||||
|
"predef": ["require", "module", "__dirname", "__filename"]
|
||||||
|
}
|
||||||
1
server/node_modules/gulp-concat/node_modules/through2/node_modules/xtend/.npmignore
generated
vendored
Normal file
1
server/node_modules/gulp-concat/node_modules/through2/node_modules/xtend/.npmignore
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
node_modules
|
||||||
19
server/node_modules/gulp-concat/node_modules/through2/node_modules/xtend/LICENCE
generated
vendored
Normal file
19
server/node_modules/gulp-concat/node_modules/through2/node_modules/xtend/LICENCE
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
Copyright (c) 2012-2014 Raynos.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
4
server/node_modules/gulp-concat/node_modules/through2/node_modules/xtend/Makefile
generated
vendored
Normal file
4
server/node_modules/gulp-concat/node_modules/through2/node_modules/xtend/Makefile
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
browser:
|
||||||
|
node ./support/compile
|
||||||
|
|
||||||
|
.PHONY: browser
|
||||||
32
server/node_modules/gulp-concat/node_modules/through2/node_modules/xtend/README.md
generated
vendored
Normal file
32
server/node_modules/gulp-concat/node_modules/through2/node_modules/xtend/README.md
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
# xtend
|
||||||
|
|
||||||
|
[![browser support][3]][4]
|
||||||
|
|
||||||
|
[](http://github.com/badges/stability-badges)
|
||||||
|
|
||||||
|
Extend like a boss
|
||||||
|
|
||||||
|
xtend is a basic utility library which allows you to extend an object by appending all of the properties from each object in a list. When there are identical properties, the right-most property takes precedence.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```js
|
||||||
|
var extend = require("xtend")
|
||||||
|
|
||||||
|
// extend returns a new object. Does not mutate arguments
|
||||||
|
var combination = extend({
|
||||||
|
a: "a",
|
||||||
|
b: 'c'
|
||||||
|
}, {
|
||||||
|
b: "b"
|
||||||
|
})
|
||||||
|
// { a: "a", b: "b" }
|
||||||
|
```
|
||||||
|
|
||||||
|
## Stability status: Locked
|
||||||
|
|
||||||
|
## MIT Licenced
|
||||||
|
|
||||||
|
|
||||||
|
[3]: http://ci.testling.com/Raynos/xtend.png
|
||||||
|
[4]: http://ci.testling.com/Raynos/xtend
|
||||||
17
server/node_modules/gulp-concat/node_modules/through2/node_modules/xtend/immutable.js
generated
vendored
Normal file
17
server/node_modules/gulp-concat/node_modules/through2/node_modules/xtend/immutable.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
module.exports = extend
|
||||||
|
|
||||||
|
function extend() {
|
||||||
|
var target = {}
|
||||||
|
|
||||||
|
for (var i = 0; i < arguments.length; i++) {
|
||||||
|
var source = arguments[i]
|
||||||
|
|
||||||
|
for (var key in source) {
|
||||||
|
if (source.hasOwnProperty(key)) {
|
||||||
|
target[key] = source[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return target
|
||||||
|
}
|
||||||
15
server/node_modules/gulp-concat/node_modules/through2/node_modules/xtend/mutable.js
generated
vendored
Normal file
15
server/node_modules/gulp-concat/node_modules/through2/node_modules/xtend/mutable.js
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
module.exports = extend
|
||||||
|
|
||||||
|
function extend(target) {
|
||||||
|
for (var i = 1; i < arguments.length; i++) {
|
||||||
|
var source = arguments[i]
|
||||||
|
|
||||||
|
for (var key in source) {
|
||||||
|
if (source.hasOwnProperty(key)) {
|
||||||
|
target[key] = source[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return target
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user