mirror of
https://github.com/KevinMidboe/zoff.git
synced 2025-10-29 18:00:23 +00:00
Added gulp, and started using the file being built by gulp. Fixed some issues with remote control changing. Implemented clients being able to change password
This commit is contained in:
18
server/node_modules/gulp-uglifyjs/.jshintrc
generated
vendored
Normal file
18
server/node_modules/gulp-uglifyjs/.jshintrc
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"bitwise": true,
|
||||
"camelcase": true,
|
||||
"curly": false,
|
||||
"devel": true,
|
||||
"eqeqeq": true,
|
||||
"indent": 2,
|
||||
"latedef": true,
|
||||
"newcap": true,
|
||||
"node": true,
|
||||
"noempty": true,
|
||||
"nonew": true,
|
||||
"quotmark": "single",
|
||||
"strict": true,
|
||||
"trailing": true,
|
||||
"undef": true,
|
||||
"unused": true
|
||||
}
|
||||
2
server/node_modules/gulp-uglifyjs/.npmignore
generated
vendored
Normal file
2
server/node_modules/gulp-uglifyjs/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules/
|
||||
npm-debug.log
|
||||
4
server/node_modules/gulp-uglifyjs/.travis.yml
generated
vendored
Normal file
4
server/node_modules/gulp-uglifyjs/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- "0.11"
|
||||
- "0.10"
|
||||
60
server/node_modules/gulp-uglifyjs/CHANGELOG.md
generated
vendored
Normal file
60
server/node_modules/gulp-uglifyjs/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
# gulp-uglifyjs changelog
|
||||
|
||||
## 0.6.2
|
||||
- Remove dependency on lodash, path
|
||||
- Update dependencies
|
||||
- Update deprecation notice
|
||||
|
||||
## 0.6.1
|
||||
- Don't break pipe when no files given [1791bbf](https://github.com/craigjennings11/gulp-uglifyjs/commit/1791bbf979754d60c26ce205c58c9bfe746eed3c)
|
||||
|
||||
## 0.6.0
|
||||
- Add support for mangle options [f35998c](https://github.com/craigjennings11/gulp-uglifyjs/commit/f35998c7a73dcd2ea24661dc801e34b6bfbe092c)
|
||||
|
||||
## 0.5.0
|
||||
|
||||
- Adding support for inSourceMap option [12460aa](https://github.com/craigjennings11/gulp-uglifyjs/commit/12460aa1bdcb373d0055ab323077bda5ebd76d35)
|
||||
- Adding support for sourceRoot option to mirror Uglify.minify() API more closely
|
||||
|
||||
## 0.4.4
|
||||
|
||||
- Using correct filename for source_map.file option by default [#15](https://github.com/craigjennings11/gulp-uglifyjs/issues/15)
|
||||
|
||||
## 0.4.3
|
||||
|
||||
- Fixing problem where source wouldn't load in browser [#11](https://github.com/craigjennings11/gulp-uglifyjs/issues/11)
|
||||
|
||||
## 0.4.2
|
||||
|
||||
- Adding more details to error reporting
|
||||
|
||||
## 0.4.1
|
||||
|
||||
- Added better reporting when UglifyJS throws an error
|
||||
|
||||
## 0.4.0
|
||||
|
||||
- Added `sourceMapIncludeSources` option [da7684e](https://github.com/craigjennings11/gulp-uglifyjs/commit/da7684ea23475f5fc78f142ddb1556d9795309ba)
|
||||
|
||||
## 0.3.0
|
||||
|
||||
- Added `wrap` and `enclose` options [4ca8c29](https://github.com/craigjennings11/gulp-uglifyjs/commit/4ca8c2979fc08d85649056535d0dcb00eff9bb7a)
|
||||
|
||||
## 0.2.1
|
||||
|
||||
- No longer errors out when no files are passed in
|
||||
|
||||
## 0.2.0
|
||||
|
||||
- Refactored interaction with UglifyJS to have more granular control over
|
||||
compression.
|
||||
- Fixed [#1](https://github.com/craigjennings11/gulp-uglifyjs/issues/1)
|
||||
|
||||
## 0.1.1
|
||||
|
||||
- Added tests
|
||||
- Clarified portions of README
|
||||
|
||||
## 0.1.0
|
||||
|
||||
- Initial release
|
||||
20
server/node_modules/gulp-uglifyjs/LICENSE
generated
vendored
Normal file
20
server/node_modules/gulp-uglifyjs/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Craig Jennings
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
146
server/node_modules/gulp-uglifyjs/README.md
generated
vendored
Normal file
146
server/node_modules/gulp-uglifyjs/README.md
generated
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
# gulp-uglifyjs [![NPM version][npm-image]][npm-url] [![Build Status][travis-image]][travis-url]
|
||||
|
||||
__DEPRECATED__: This plugin has been blacklisted as it relies on Uglify to concat
|
||||
the files instead of using [gulp-concat][concat-link], which breaks the "It
|
||||
should do one thing" paradigm. When I created this plugin, there was no way to
|
||||
get source maps to work with gulp, however now there is a [gulp-sourcemaps][sourcemaps-link]
|
||||
plugin that achieves the same goal. gulp-uglifyjs still works great and gives
|
||||
very granular control over the Uglify execution, I'm just giving you a heads up
|
||||
that other options now exist.
|
||||
|
||||
> Minify multiple JavaScript with UglifyJS2.
|
||||
|
||||
This plugin is based off of [gulp-uglify][uglify-link] but allows you to
|
||||
directly use all of Uglify's options. The main difference between these two
|
||||
plugins is that passing in an array of files (or glob pattern that matches
|
||||
multiple files) will pass that array directly to Uglify. Uglify will handle both
|
||||
concatenating and minifying those files.
|
||||
|
||||
Letting Uglify handle the concatenation and minification allows for the
|
||||
generated source map to be able to separate the files in the browser for easier
|
||||
debugging, and it avoids the intermediate step of `concat()`. Using `concat()`
|
||||
and then `uglify()` would result in a source map for the already concatenated
|
||||
file which has the possibility of being quite large, making it hard to find the
|
||||
code you are wanting to debug.
|
||||
|
||||
[sourcemaps-link]: https://github.com/floridoo/gulp-sourcemaps
|
||||
[concat-link]: https://github.com/wearefractal/gulp-concat
|
||||
[uglify-link]: https://github.com/terinjokes/gulp-uglify
|
||||
|
||||
## Usage
|
||||
|
||||
```javascript
|
||||
var uglify = require('gulp-uglifyjs');
|
||||
|
||||
gulp.task('uglify', function() {
|
||||
gulp.src('public/js/*.js')
|
||||
.pipe(uglify())
|
||||
.pipe(gulp.dest('dist/js'))
|
||||
});
|
||||
```
|
||||
|
||||
This will concatenate and minify all files found by `public/js/*.js` and write
|
||||
it to a file with the same name as the first file found.
|
||||
|
||||
## API
|
||||
|
||||
`uglify([filename], [options])`
|
||||
|
||||
- filename - optional
|
||||
|
||||
Override the default output filename.
|
||||
|
||||
- options - optional
|
||||
|
||||
These options are directly passed to Uglify. Below are some examples of what
|
||||
is available. To see a full list, read UglifyJS's [documentation](https://github.com/mishoo/UglifyJS2/#api-reference)
|
||||
|
||||
- `outSourceMap`
|
||||
|
||||
(default `false`) Give a `string` to be the name of the source map. Set to
|
||||
`true` and the source map name will be the same as the output file name
|
||||
suffixed with `.map`.
|
||||
|
||||
Note: if you would like your source map to point to relative paths instead
|
||||
of absolute paths (which you probably will if it is hosted on a server), set
|
||||
the `basePath` option to be the base of where the content is served.
|
||||
|
||||
- `inSourceMap`
|
||||
|
||||
If you're compressing compiled JavaScript and have a source map for it, you
|
||||
can use the inSourceMap argument. This can only be used in conjunction with
|
||||
`outSourceMap`.
|
||||
|
||||
- `sourceRoot`
|
||||
|
||||
Sets the root location for where the source files should be found
|
||||
|
||||
- `mangle`
|
||||
|
||||
(default `{}`) Set to `false` to skip mangling names. See [options](https://github.com/mishoo/UglifyJS2/blob/e37b67d013c4537a36bb3c24f4f99e72efbf6d4b/lib/scope.js#L328) for possible key/value pairs.
|
||||
|
||||
- `output`
|
||||
|
||||
(default `null`) Pass an Object if you wish to specify additional [output options](http://lisperator.net/uglifyjs/codegen).
|
||||
The defaults are optimized for best compression.
|
||||
|
||||
- `compress`
|
||||
|
||||
(default `{}`) Pass an Object to specify custom [compressor options](http://lisperator.net/uglifyjs/compress).
|
||||
Pass `false` to skip compression completely.
|
||||
|
||||
- `enclose`
|
||||
|
||||
(default `undefined`) Pass an Object to wrap all of the code in a closure
|
||||
with a configurable arguments/parameters list. Each key-value pair in the
|
||||
enclose object is effectively an argument-parameter pair.
|
||||
|
||||
- `wrap`
|
||||
|
||||
(default `undefined`) Pass a String to wrap all of the code in a closure.
|
||||
This is an easy way to make sure nothing is leaking. Variables that need to
|
||||
be public `exports` and `global` are made available. The value of wrap is
|
||||
the global variable exports will be available as.
|
||||
|
||||
- `exportAll`
|
||||
|
||||
(default `false`) Set to `true` to make all global functions and variables
|
||||
available via the `export` variable. Only available when using `wrap`.
|
||||
|
||||
### Examples
|
||||
|
||||
To minify multiple files into a 'app.min.js' and create a source map:
|
||||
|
||||
```js
|
||||
gulp.task('uglify', function() {
|
||||
gulp.src('public/js/*.js')
|
||||
.pipe(uglify('app.min.js', {
|
||||
outSourceMap: true
|
||||
}))
|
||||
.pipe(gulp.dest('dist/js'))
|
||||
});
|
||||
```
|
||||
|
||||
To minify multiple files from different paths to 'app.js', but skip mangling
|
||||
and beautify the results:
|
||||
|
||||
```js
|
||||
gulp.task('uglify', function() {
|
||||
gulp.src(['public/js/*.js', 'bower_components/**/*.js'])
|
||||
.pipe(uglify('app.js', {
|
||||
mangle: false,
|
||||
output: {
|
||||
beautify: true
|
||||
}
|
||||
}))
|
||||
.pipe(gulp.dest('dist/js'))
|
||||
});
|
||||
```
|
||||
|
||||
[npm-image]: http://img.shields.io/npm/v/gulp-uglifyjs.svg
|
||||
[npm-url]: https://www.npmjs.org/package/gulp-uglifyjs/
|
||||
|
||||
[downloads-image]: http://img.shields.io/npm/dm/gulp-uglifyjs.svg
|
||||
|
||||
[travis-url]: https://travis-ci.org/craigjennings11/gulp-uglifyjs
|
||||
[travis-image]: https://travis-ci.org/craigjennings11/gulp-uglifyjs.svg?branch=master
|
||||
170
server/node_modules/gulp-uglifyjs/index.js
generated
vendored
Normal file
170
server/node_modules/gulp-uglifyjs/index.js
generated
vendored
Normal file
@@ -0,0 +1,170 @@
|
||||
var fs = require('fs');
|
||||
var gutil = require('gulp-util');
|
||||
var path = require('path');
|
||||
var through = require('through');
|
||||
var UglifyJS = require('uglify-js');
|
||||
|
||||
var File = gutil.File;
|
||||
var PluginError = gutil.PluginError;
|
||||
|
||||
module.exports = function(filename, options) {
|
||||
'use strict';
|
||||
|
||||
var baseFile = null;
|
||||
var basePath = '.';
|
||||
var sourcesContent = {};
|
||||
var toplevel = null;
|
||||
|
||||
if (typeof filename === 'object') {
|
||||
// options given, but no filename
|
||||
options = filename;
|
||||
filename = null;
|
||||
}
|
||||
|
||||
// Assign default values to options
|
||||
options = options || {};
|
||||
if (options.compress !== false) options.compress = options.compress || { warnings: false };
|
||||
if (options.mangle !== false) options.mangle = options.mangle || {};
|
||||
options.output = options.output || {};
|
||||
|
||||
// Needed to get the relative paths correct in the source map
|
||||
if (options.basePath) {
|
||||
basePath = process.cwd() + path.sep + options.basePath;
|
||||
}
|
||||
|
||||
function bufferFiles(file) {
|
||||
/* jshint validthis: true */
|
||||
if (file.isNull()) return;
|
||||
|
||||
if (file.isStream()) {
|
||||
return this.emit('error', new PluginError('gulp-uglifyjs', 'Streaming not supported'));
|
||||
}
|
||||
|
||||
if (!baseFile) {
|
||||
baseFile = file;
|
||||
|
||||
// Set the filename if one wasn't given
|
||||
if (!filename) {
|
||||
filename = baseFile.relative;
|
||||
}
|
||||
|
||||
// Set the outSourceMap filename if one was requested
|
||||
if (options.outSourceMap === true) {
|
||||
options.outSourceMap = filename + '.map';
|
||||
}
|
||||
}
|
||||
|
||||
var code = file.contents.toString();
|
||||
|
||||
sourcesContent[file.path] = code;
|
||||
|
||||
try {
|
||||
toplevel = UglifyJS.parse(code, {
|
||||
filename: path.relative(basePath, file.path),
|
||||
toplevel: toplevel
|
||||
});
|
||||
} catch(e) {
|
||||
gutil.log('gulp-uglifyjs - UglifyJS threw an error');
|
||||
gutil.log(gutil.colors.red('file: ') + file.path + ':' + e.line + ',' + e.col);
|
||||
gutil.log(gutil.colors.red('error: ') + e.message);
|
||||
|
||||
return this.emit('error', new PluginError('gulp-uglifyjs', 'Aborting due to previous errors'));
|
||||
}
|
||||
}
|
||||
|
||||
function minify() {
|
||||
/* jshint validthis: true, camelcase: false */
|
||||
if(!toplevel) {
|
||||
gutil.log('gulp-uglifyjs - No files given; aborting minification');
|
||||
this.emit('end');
|
||||
return;
|
||||
}
|
||||
|
||||
if (options.wrap) {
|
||||
toplevel = toplevel.wrap_commonjs(options.wrap, options.exportAll);
|
||||
}
|
||||
|
||||
if (options.enclose) {
|
||||
var argParameterList = [];
|
||||
|
||||
if (options.enclose !== true) {
|
||||
Object.keys(options.enclose).forEach(function(key) {
|
||||
argParameterList.push(key + ':' + options.enclose[key]);
|
||||
});
|
||||
}
|
||||
|
||||
toplevel = toplevel.wrap_enclose(argParameterList);
|
||||
}
|
||||
|
||||
toplevel.figure_out_scope();
|
||||
|
||||
if (options.compress !== false) {
|
||||
var compressor = UglifyJS.Compressor(options.compress);
|
||||
toplevel = toplevel.transform(compressor);
|
||||
|
||||
toplevel.figure_out_scope();
|
||||
}
|
||||
|
||||
if (options.mangle !== false) {
|
||||
toplevel.compute_char_frequency();
|
||||
toplevel.mangle_names(options.mangle);
|
||||
}
|
||||
|
||||
// Setup source map if one was requested
|
||||
if (options.outSourceMap) {
|
||||
options.output.source_map = options.output.source_map || {
|
||||
file: filename,
|
||||
root: options.sourceRoot || baseFile.cwd,
|
||||
};
|
||||
|
||||
if (options.inSourceMap) {
|
||||
options.output.source_map.orig = fs.readFileSync(options.inSourceMap).toString();
|
||||
}
|
||||
|
||||
var map = UglifyJS.SourceMap(options.output.source_map);
|
||||
options.output.source_map = map;
|
||||
|
||||
if (options.sourceMapIncludeSources) {
|
||||
for (var file in sourcesContent) {
|
||||
if (sourcesContent.hasOwnProperty(file)) {
|
||||
options.output.source_map.get().setSourceContent(file, sourcesContent[file]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Output the minified code
|
||||
var stream = UglifyJS.OutputStream(options.output);
|
||||
toplevel.print(stream);
|
||||
var min = stream.get();
|
||||
|
||||
if (options.outSourceMap) {
|
||||
// Manually add source map comment to uglified code
|
||||
min += '\r\n//# sourceMappingURL=' + options.outSourceMap;
|
||||
}
|
||||
|
||||
var compressedFile = new File({
|
||||
cwd: baseFile.cwd,
|
||||
base: baseFile.base,
|
||||
path: path.join(baseFile.base, filename),
|
||||
contents: new Buffer(min)
|
||||
});
|
||||
|
||||
this.push(compressedFile);
|
||||
|
||||
if (options.outSourceMap) {
|
||||
var sourceMap = new File({
|
||||
cwd: baseFile.cwd,
|
||||
base: baseFile.base,
|
||||
path: path.join(baseFile.base, options.outSourceMap),
|
||||
contents: new Buffer(options.output.source_map.toString())
|
||||
});
|
||||
|
||||
this.push(sourceMap);
|
||||
}
|
||||
|
||||
this.emit('end');
|
||||
}
|
||||
|
||||
return through(bufferFiles, minify);
|
||||
};
|
||||
1
server/node_modules/gulp-uglifyjs/node_modules/.bin/uglifyjs
generated
vendored
Symbolic link
1
server/node_modules/gulp-uglifyjs/node_modules/.bin/uglifyjs
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../uglify-js/bin/uglifyjs
|
||||
5
server/node_modules/gulp-uglifyjs/node_modules/through/.travis.yml
generated
vendored
Normal file
5
server/node_modules/gulp-uglifyjs/node_modules/through/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 0.6
|
||||
- 0.8
|
||||
- "0.10"
|
||||
15
server/node_modules/gulp-uglifyjs/node_modules/through/LICENSE.APACHE2
generated
vendored
Normal file
15
server/node_modules/gulp-uglifyjs/node_modules/through/LICENSE.APACHE2
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
Apache License, Version 2.0
|
||||
|
||||
Copyright (c) 2011 Dominic Tarr
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
24
server/node_modules/gulp-uglifyjs/node_modules/through/LICENSE.MIT
generated
vendored
Normal file
24
server/node_modules/gulp-uglifyjs/node_modules/through/LICENSE.MIT
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2011 Dominic Tarr
|
||||
|
||||
Permission is hereby granted, free of charge,
|
||||
to any person obtaining a copy of this software and
|
||||
associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify,
|
||||
merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom
|
||||
the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
|
||||
ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
108
server/node_modules/gulp-uglifyjs/node_modules/through/index.js
generated
vendored
Normal file
108
server/node_modules/gulp-uglifyjs/node_modules/through/index.js
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
var Stream = require('stream')
|
||||
|
||||
// through
|
||||
//
|
||||
// a stream that does nothing but re-emit the input.
|
||||
// useful for aggregating a series of changing but not ending streams into one stream)
|
||||
|
||||
exports = module.exports = through
|
||||
through.through = through
|
||||
|
||||
//create a readable writable stream.
|
||||
|
||||
function through (write, end, opts) {
|
||||
write = write || function (data) { this.queue(data) }
|
||||
end = end || function () { this.queue(null) }
|
||||
|
||||
var ended = false, destroyed = false, buffer = [], _ended = false
|
||||
var stream = new Stream()
|
||||
stream.readable = stream.writable = true
|
||||
stream.paused = false
|
||||
|
||||
// stream.autoPause = !(opts && opts.autoPause === false)
|
||||
stream.autoDestroy = !(opts && opts.autoDestroy === false)
|
||||
|
||||
stream.write = function (data) {
|
||||
write.call(this, data)
|
||||
return !stream.paused
|
||||
}
|
||||
|
||||
function drain() {
|
||||
while(buffer.length && !stream.paused) {
|
||||
var data = buffer.shift()
|
||||
if(null === data)
|
||||
return stream.emit('end')
|
||||
else
|
||||
stream.emit('data', data)
|
||||
}
|
||||
}
|
||||
|
||||
stream.queue = stream.push = function (data) {
|
||||
// console.error(ended)
|
||||
if(_ended) return stream
|
||||
if(data === null) _ended = true
|
||||
buffer.push(data)
|
||||
drain()
|
||||
return stream
|
||||
}
|
||||
|
||||
//this will be registered as the first 'end' listener
|
||||
//must call destroy next tick, to make sure we're after any
|
||||
//stream piped from here.
|
||||
//this is only a problem if end is not emitted synchronously.
|
||||
//a nicer way to do this is to make sure this is the last listener for 'end'
|
||||
|
||||
stream.on('end', function () {
|
||||
stream.readable = false
|
||||
if(!stream.writable && stream.autoDestroy)
|
||||
process.nextTick(function () {
|
||||
stream.destroy()
|
||||
})
|
||||
})
|
||||
|
||||
function _end () {
|
||||
stream.writable = false
|
||||
end.call(stream)
|
||||
if(!stream.readable && stream.autoDestroy)
|
||||
stream.destroy()
|
||||
}
|
||||
|
||||
stream.end = function (data) {
|
||||
if(ended) return
|
||||
ended = true
|
||||
if(arguments.length) stream.write(data)
|
||||
_end() // will emit or queue
|
||||
return stream
|
||||
}
|
||||
|
||||
stream.destroy = function () {
|
||||
if(destroyed) return
|
||||
destroyed = true
|
||||
ended = true
|
||||
buffer.length = 0
|
||||
stream.writable = stream.readable = false
|
||||
stream.emit('close')
|
||||
return stream
|
||||
}
|
||||
|
||||
stream.pause = function () {
|
||||
if(stream.paused) return
|
||||
stream.paused = true
|
||||
return stream
|
||||
}
|
||||
|
||||
stream.resume = function () {
|
||||
if(stream.paused) {
|
||||
stream.paused = false
|
||||
stream.emit('resume')
|
||||
}
|
||||
drain()
|
||||
//may have become paused again,
|
||||
//as drain emits 'data'.
|
||||
if(!stream.paused)
|
||||
stream.emit('drain')
|
||||
return stream
|
||||
}
|
||||
return stream
|
||||
}
|
||||
|
||||
51
server/node_modules/gulp-uglifyjs/node_modules/through/package.json
generated
vendored
Normal file
51
server/node_modules/gulp-uglifyjs/node_modules/through/package.json
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
{
|
||||
"name": "through",
|
||||
"version": "2.3.7",
|
||||
"description": "simplified stream construction",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "set -e; for t in test/*.js; do node $t; done"
|
||||
},
|
||||
"devDependencies": {
|
||||
"stream-spec": "~0.3.5",
|
||||
"tape": "~2.3.2",
|
||||
"from": "~0.1.3"
|
||||
},
|
||||
"keywords": [
|
||||
"stream",
|
||||
"streams",
|
||||
"user-streams",
|
||||
"pipe"
|
||||
],
|
||||
"author": {
|
||||
"name": "Dominic Tarr",
|
||||
"email": "dominic.tarr@gmail.com",
|
||||
"url": "dominictarr.com"
|
||||
},
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/dominictarr/through.git"
|
||||
},
|
||||
"homepage": "http://github.com/dominictarr/through",
|
||||
"testling": {
|
||||
"browsers": [
|
||||
"ie/8..latest",
|
||||
"ff/15..latest",
|
||||
"chrome/20..latest",
|
||||
"safari/5.1..latest"
|
||||
],
|
||||
"files": "test/*.js"
|
||||
},
|
||||
"readme": "#through\n\n[](http://travis-ci.org/dominictarr/through)\n[](https://ci.testling.com/dominictarr/through)\n\nEasy way to create a `Stream` that is both `readable` and `writable`. \n\n* Pass in optional `write` and `end` methods.\n* `through` takes care of pause/resume logic if you use `this.queue(data)` instead of `this.emit('data', data)`.\n* Use `this.pause()` and `this.resume()` to manage flow.\n* Check `this.paused` to see current flow state. (`write` always returns `!this.paused`).\n\nThis function is the basis for most of the synchronous streams in \n[event-stream](http://github.com/dominictarr/event-stream).\n\n``` js\nvar through = require('through')\n\nthrough(function write(data) {\n this.queue(data) //data *must* not be null\n },\n function end () { //optional\n this.queue(null)\n })\n```\n\nOr, can also be used _without_ buffering on pause, use `this.emit('data', data)`,\nand this.emit('end')\n\n``` js\nvar through = require('through')\n\nthrough(function write(data) {\n this.emit('data', data)\n //this.pause() \n },\n function end () { //optional\n this.emit('end')\n })\n```\n\n## Extended Options\n\nYou will probably not need these 99% of the time.\n\n### autoDestroy=false\n\nBy default, `through` emits close when the writable\nand readable side of the stream has ended.\nIf that is not desired, set `autoDestroy=false`.\n\n``` js\nvar through = require('through')\n\n//like this\nvar ts = through(write, end, {autoDestroy: false})\n//or like this\nvar ts = through(write, end)\nts.autoDestroy = false\n```\n\n## License\n\nMIT / Apache2\n",
|
||||
"readmeFilename": "readme.markdown",
|
||||
"bugs": {
|
||||
"url": "https://github.com/dominictarr/through/issues"
|
||||
},
|
||||
"_id": "through@2.3.7",
|
||||
"dist": {
|
||||
"shasum": "678553f9fafbd66316500a4cf41f9a7aba087e5d"
|
||||
},
|
||||
"_from": "through@^2.3.4",
|
||||
"_resolved": "https://registry.npmjs.org/through/-/through-2.3.7.tgz"
|
||||
}
|
||||
64
server/node_modules/gulp-uglifyjs/node_modules/through/readme.markdown
generated
vendored
Normal file
64
server/node_modules/gulp-uglifyjs/node_modules/through/readme.markdown
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
#through
|
||||
|
||||
[](http://travis-ci.org/dominictarr/through)
|
||||
[](https://ci.testling.com/dominictarr/through)
|
||||
|
||||
Easy way to create a `Stream` that is both `readable` and `writable`.
|
||||
|
||||
* Pass in optional `write` and `end` methods.
|
||||
* `through` takes care of pause/resume logic if you use `this.queue(data)` instead of `this.emit('data', data)`.
|
||||
* Use `this.pause()` and `this.resume()` to manage flow.
|
||||
* Check `this.paused` to see current flow state. (`write` always returns `!this.paused`).
|
||||
|
||||
This function is the basis for most of the synchronous streams in
|
||||
[event-stream](http://github.com/dominictarr/event-stream).
|
||||
|
||||
``` js
|
||||
var through = require('through')
|
||||
|
||||
through(function write(data) {
|
||||
this.queue(data) //data *must* not be null
|
||||
},
|
||||
function end () { //optional
|
||||
this.queue(null)
|
||||
})
|
||||
```
|
||||
|
||||
Or, can also be used _without_ buffering on pause, use `this.emit('data', data)`,
|
||||
and this.emit('end')
|
||||
|
||||
``` js
|
||||
var through = require('through')
|
||||
|
||||
through(function write(data) {
|
||||
this.emit('data', data)
|
||||
//this.pause()
|
||||
},
|
||||
function end () { //optional
|
||||
this.emit('end')
|
||||
})
|
||||
```
|
||||
|
||||
## Extended Options
|
||||
|
||||
You will probably not need these 99% of the time.
|
||||
|
||||
### autoDestroy=false
|
||||
|
||||
By default, `through` emits close when the writable
|
||||
and readable side of the stream has ended.
|
||||
If that is not desired, set `autoDestroy=false`.
|
||||
|
||||
``` js
|
||||
var through = require('through')
|
||||
|
||||
//like this
|
||||
var ts = through(write, end, {autoDestroy: false})
|
||||
//or like this
|
||||
var ts = through(write, end)
|
||||
ts.autoDestroy = false
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT / Apache2
|
||||
28
server/node_modules/gulp-uglifyjs/node_modules/through/test/async.js
generated
vendored
Normal file
28
server/node_modules/gulp-uglifyjs/node_modules/through/test/async.js
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
var from = require('from')
|
||||
var through = require('../')
|
||||
|
||||
var tape = require('tape')
|
||||
|
||||
tape('simple async example', function (t) {
|
||||
|
||||
var n = 0, expected = [1,2,3,4,5], actual = []
|
||||
from(expected)
|
||||
.pipe(through(function(data) {
|
||||
this.pause()
|
||||
n ++
|
||||
setTimeout(function(){
|
||||
console.log('pushing data', data)
|
||||
this.push(data)
|
||||
this.resume()
|
||||
}.bind(this), 300)
|
||||
})).pipe(through(function(data) {
|
||||
console.log('pushing data second time', data);
|
||||
this.push(data)
|
||||
})).on('data', function (d) {
|
||||
actual.push(d)
|
||||
}).on('end', function() {
|
||||
t.deepEqual(actual, expected)
|
||||
t.end()
|
||||
})
|
||||
|
||||
})
|
||||
30
server/node_modules/gulp-uglifyjs/node_modules/through/test/auto-destroy.js
generated
vendored
Normal file
30
server/node_modules/gulp-uglifyjs/node_modules/through/test/auto-destroy.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
var test = require('tape')
|
||||
var through = require('../')
|
||||
|
||||
// must emit end before close.
|
||||
|
||||
test('end before close', function (assert) {
|
||||
var ts = through()
|
||||
ts.autoDestroy = false
|
||||
var ended = false, closed = false
|
||||
|
||||
ts.on('end', function () {
|
||||
assert.ok(!closed)
|
||||
ended = true
|
||||
})
|
||||
ts.on('close', function () {
|
||||
assert.ok(ended)
|
||||
closed = true
|
||||
})
|
||||
|
||||
ts.write(1)
|
||||
ts.write(2)
|
||||
ts.write(3)
|
||||
ts.end()
|
||||
assert.ok(ended)
|
||||
assert.notOk(closed)
|
||||
ts.destroy()
|
||||
assert.ok(closed)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
71
server/node_modules/gulp-uglifyjs/node_modules/through/test/buffering.js
generated
vendored
Normal file
71
server/node_modules/gulp-uglifyjs/node_modules/through/test/buffering.js
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
var test = require('tape')
|
||||
var through = require('../')
|
||||
|
||||
// must emit end before close.
|
||||
|
||||
test('buffering', function(assert) {
|
||||
var ts = through(function (data) {
|
||||
this.queue(data)
|
||||
}, function () {
|
||||
this.queue(null)
|
||||
})
|
||||
|
||||
var ended = false, actual = []
|
||||
|
||||
ts.on('data', actual.push.bind(actual))
|
||||
ts.on('end', function () {
|
||||
ended = true
|
||||
})
|
||||
|
||||
ts.write(1)
|
||||
ts.write(2)
|
||||
ts.write(3)
|
||||
assert.deepEqual(actual, [1, 2, 3])
|
||||
ts.pause()
|
||||
ts.write(4)
|
||||
ts.write(5)
|
||||
ts.write(6)
|
||||
assert.deepEqual(actual, [1, 2, 3])
|
||||
ts.resume()
|
||||
assert.deepEqual(actual, [1, 2, 3, 4, 5, 6])
|
||||
ts.pause()
|
||||
ts.end()
|
||||
assert.ok(!ended)
|
||||
ts.resume()
|
||||
assert.ok(ended)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('buffering has data in queue, when ends', function (assert) {
|
||||
|
||||
/*
|
||||
* If stream ends while paused with data in the queue,
|
||||
* stream should still emit end after all data is written
|
||||
* on resume.
|
||||
*/
|
||||
|
||||
var ts = through(function (data) {
|
||||
this.queue(data)
|
||||
}, function () {
|
||||
this.queue(null)
|
||||
})
|
||||
|
||||
var ended = false, actual = []
|
||||
|
||||
ts.on('data', actual.push.bind(actual))
|
||||
ts.on('end', function () {
|
||||
ended = true
|
||||
})
|
||||
|
||||
ts.pause()
|
||||
ts.write(1)
|
||||
ts.write(2)
|
||||
ts.write(3)
|
||||
ts.end()
|
||||
assert.deepEqual(actual, [], 'no data written yet, still paused')
|
||||
assert.ok(!ended, 'end not emitted yet, still paused')
|
||||
ts.resume()
|
||||
assert.deepEqual(actual, [1, 2, 3], 'resumed, all data should be delivered')
|
||||
assert.ok(ended, 'end should be emitted once all data was delivered')
|
||||
assert.end();
|
||||
})
|
||||
45
server/node_modules/gulp-uglifyjs/node_modules/through/test/end.js
generated
vendored
Normal file
45
server/node_modules/gulp-uglifyjs/node_modules/through/test/end.js
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
var test = require('tape')
|
||||
var through = require('../')
|
||||
|
||||
// must emit end before close.
|
||||
|
||||
test('end before close', function (assert) {
|
||||
var ts = through()
|
||||
var ended = false, closed = false
|
||||
|
||||
ts.on('end', function () {
|
||||
assert.ok(!closed)
|
||||
ended = true
|
||||
})
|
||||
ts.on('close', function () {
|
||||
assert.ok(ended)
|
||||
closed = true
|
||||
})
|
||||
|
||||
ts.write(1)
|
||||
ts.write(2)
|
||||
ts.write(3)
|
||||
ts.end()
|
||||
assert.ok(ended)
|
||||
assert.ok(closed)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
test('end only once', function (t) {
|
||||
|
||||
var ts = through()
|
||||
var ended = false, closed = false
|
||||
|
||||
ts.on('end', function () {
|
||||
t.equal(ended, false)
|
||||
ended = true
|
||||
})
|
||||
|
||||
ts.queue(null)
|
||||
ts.queue(null)
|
||||
ts.queue(null)
|
||||
|
||||
ts.resume()
|
||||
|
||||
t.end()
|
||||
})
|
||||
133
server/node_modules/gulp-uglifyjs/node_modules/through/test/index.js
generated
vendored
Normal file
133
server/node_modules/gulp-uglifyjs/node_modules/through/test/index.js
generated
vendored
Normal file
@@ -0,0 +1,133 @@
|
||||
|
||||
var test = require('tape')
|
||||
var spec = require('stream-spec')
|
||||
var through = require('../')
|
||||
|
||||
/*
|
||||
I'm using these two functions, and not streams and pipe
|
||||
so there is less to break. if this test fails it must be
|
||||
the implementation of _through_
|
||||
*/
|
||||
|
||||
function write(array, stream) {
|
||||
array = array.slice()
|
||||
function next() {
|
||||
while(array.length)
|
||||
if(stream.write(array.shift()) === false)
|
||||
return stream.once('drain', next)
|
||||
|
||||
stream.end()
|
||||
}
|
||||
|
||||
next()
|
||||
}
|
||||
|
||||
function read(stream, callback) {
|
||||
var actual = []
|
||||
stream.on('data', function (data) {
|
||||
actual.push(data)
|
||||
})
|
||||
stream.once('end', function () {
|
||||
callback(null, actual)
|
||||
})
|
||||
stream.once('error', function (err) {
|
||||
callback(err)
|
||||
})
|
||||
}
|
||||
|
||||
test('simple defaults', function(assert) {
|
||||
|
||||
var l = 1000
|
||||
, expected = []
|
||||
|
||||
while(l--) expected.push(l * Math.random())
|
||||
|
||||
var t = through()
|
||||
var s = spec(t).through().pausable()
|
||||
|
||||
read(t, function (err, actual) {
|
||||
assert.ifError(err)
|
||||
assert.deepEqual(actual, expected)
|
||||
assert.end()
|
||||
})
|
||||
|
||||
t.on('close', s.validate)
|
||||
|
||||
write(expected, t)
|
||||
});
|
||||
|
||||
test('simple functions', function(assert) {
|
||||
|
||||
var l = 1000
|
||||
, expected = []
|
||||
|
||||
while(l--) expected.push(l * Math.random())
|
||||
|
||||
var t = through(function (data) {
|
||||
this.emit('data', data*2)
|
||||
})
|
||||
var s = spec(t).through().pausable()
|
||||
|
||||
|
||||
read(t, function (err, actual) {
|
||||
assert.ifError(err)
|
||||
assert.deepEqual(actual, expected.map(function (data) {
|
||||
return data*2
|
||||
}))
|
||||
assert.end()
|
||||
})
|
||||
|
||||
t.on('close', s.validate)
|
||||
|
||||
write(expected, t)
|
||||
})
|
||||
|
||||
test('pauses', function(assert) {
|
||||
|
||||
var l = 1000
|
||||
, expected = []
|
||||
|
||||
while(l--) expected.push(l) //Math.random())
|
||||
|
||||
var t = through()
|
||||
|
||||
var s = spec(t)
|
||||
.through()
|
||||
.pausable()
|
||||
|
||||
t.on('data', function () {
|
||||
if(Math.random() > 0.1) return
|
||||
t.pause()
|
||||
process.nextTick(function () {
|
||||
t.resume()
|
||||
})
|
||||
})
|
||||
|
||||
read(t, function (err, actual) {
|
||||
assert.ifError(err)
|
||||
assert.deepEqual(actual, expected)
|
||||
})
|
||||
|
||||
t.on('close', function () {
|
||||
s.validate()
|
||||
assert.end()
|
||||
})
|
||||
|
||||
write(expected, t)
|
||||
})
|
||||
|
||||
test('does not soft-end on `undefined`', function(assert) {
|
||||
var stream = through()
|
||||
, count = 0
|
||||
|
||||
stream.on('data', function (data) {
|
||||
count++
|
||||
})
|
||||
|
||||
stream.write(undefined)
|
||||
stream.write(undefined)
|
||||
|
||||
assert.equal(count, 2)
|
||||
|
||||
assert.end()
|
||||
})
|
||||
29
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/LICENSE
generated
vendored
Normal file
29
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
UglifyJS is released under the BSD license:
|
||||
|
||||
Copyright 2012-2013 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
779
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/README.md
generated
vendored
Normal file
779
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/README.md
generated
vendored
Normal file
@@ -0,0 +1,779 @@
|
||||
UglifyJS 2
|
||||
==========
|
||||
[](https://travis-ci.org/mishoo/UglifyJS2)
|
||||
|
||||
UglifyJS is a JavaScript parser, minifier, compressor or beautifier toolkit.
|
||||
|
||||
This page documents the command line utility. For
|
||||
[API and internals documentation see my website](http://lisperator.net/uglifyjs/).
|
||||
There's also an
|
||||
[in-browser online demo](http://lisperator.net/uglifyjs/#demo) (for Firefox,
|
||||
Chrome and probably Safari).
|
||||
|
||||
Install
|
||||
-------
|
||||
|
||||
First make sure you have installed the latest version of [node.js](http://nodejs.org/)
|
||||
(You may need to restart your computer after this step).
|
||||
|
||||
From NPM for use as a command line app:
|
||||
|
||||
npm install uglify-js -g
|
||||
|
||||
From NPM for programmatic use:
|
||||
|
||||
npm install uglify-js
|
||||
|
||||
From Git:
|
||||
|
||||
git clone git://github.com/mishoo/UglifyJS2.git
|
||||
cd UglifyJS2
|
||||
npm link .
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
uglifyjs [input files] [options]
|
||||
|
||||
UglifyJS2 can take multiple input files. It's recommended that you pass the
|
||||
input files first, then pass the options. UglifyJS will parse input files
|
||||
in sequence and apply any compression options. The files are parsed in the
|
||||
same global scope, that is, a reference from a file to some
|
||||
variable/function declared in another file will be matched properly.
|
||||
|
||||
If you want to read from STDIN instead, pass a single dash instead of input
|
||||
files.
|
||||
|
||||
If you wish to pass your options before the input files, separate the two with
|
||||
a double dash to prevent input files being used as option arguments:
|
||||
|
||||
uglifyjs --compress --mangle -- input.js
|
||||
|
||||
The available options are:
|
||||
|
||||
```
|
||||
--source-map Specify an output file where to generate source
|
||||
map.
|
||||
--source-map-root The path to the original source to be included
|
||||
in the source map.
|
||||
--source-map-url The path to the source map to be added in //#
|
||||
sourceMappingURL. Defaults to the value passed
|
||||
with --source-map.
|
||||
--source-map-include-sources Pass this flag if you want to include the
|
||||
content of source files in the source map as
|
||||
sourcesContent property.
|
||||
--in-source-map Input source map, useful if you're compressing
|
||||
JS that was generated from some other original
|
||||
code.
|
||||
--screw-ie8 Pass this flag if you don't care about full
|
||||
compliance with Internet Explorer 6-8 quirks
|
||||
(by default UglifyJS will try to be IE-proof).
|
||||
--expr Parse a single expression, rather than a
|
||||
program (for parsing JSON)
|
||||
-p, --prefix Skip prefix for original filenames that appear
|
||||
in source maps. For example -p 3 will drop 3
|
||||
directories from file names and ensure they are
|
||||
relative paths. You can also specify -p
|
||||
relative, which will make UglifyJS figure out
|
||||
itself the relative paths between original
|
||||
sources, the source map and the output file.
|
||||
-o, --output Output file (default STDOUT).
|
||||
-b, --beautify Beautify output/specify output options.
|
||||
-m, --mangle Mangle names/pass mangler options.
|
||||
-r, --reserved Reserved names to exclude from mangling.
|
||||
-c, --compress Enable compressor/pass compressor options. Pass
|
||||
options like -c
|
||||
hoist_vars=false,if_return=false. Use -c with
|
||||
no argument to use the default compression
|
||||
options.
|
||||
-d, --define Global definitions
|
||||
-e, --enclose Embed everything in a big function, with a
|
||||
configurable parameter/argument list.
|
||||
--comments Preserve copyright comments in the output. By
|
||||
default this works like Google Closure, keeping
|
||||
JSDoc-style comments that contain "@license" or
|
||||
"@preserve". You can optionally pass one of the
|
||||
following arguments to this flag:
|
||||
- "all" to keep all comments
|
||||
- a valid JS regexp (needs to start with a
|
||||
slash) to keep only comments that match.
|
||||
Note that currently not *all* comments can be
|
||||
kept when compression is on, because of dead
|
||||
code removal or cascading statements into
|
||||
sequences.
|
||||
--preamble Preamble to prepend to the output. You can use
|
||||
this to insert a comment, for example for
|
||||
licensing information. This will not be
|
||||
parsed, but the source map will adjust for its
|
||||
presence.
|
||||
--stats Display operations run time on STDERR.
|
||||
--acorn Use Acorn for parsing.
|
||||
--spidermonkey Assume input files are SpiderMonkey AST format
|
||||
(as JSON).
|
||||
--self Build itself (UglifyJS2) as a library (implies
|
||||
--wrap=UglifyJS --export-all)
|
||||
--wrap Embed everything in a big function, making the
|
||||
“exports” and “global” variables available. You
|
||||
need to pass an argument to this option to
|
||||
specify the name that your module will take
|
||||
when included in, say, a browser.
|
||||
--export-all Only used when --wrap, this tells UglifyJS to
|
||||
add code to automatically export all globals.
|
||||
--lint Display some scope warnings
|
||||
-v, --verbose Verbose
|
||||
-V, --version Print version number and exit.
|
||||
--noerr Don't throw an error for unknown options in -c,
|
||||
-b or -m.
|
||||
--bare-returns Allow return outside of functions. Useful when
|
||||
minifying CommonJS modules.
|
||||
--keep-fnames Do not mangle/drop function names. Useful for
|
||||
code relying on Function.prototype.name.
|
||||
--reserved-file File containing reserved names
|
||||
--reserve-domprops Make (most?) DOM properties reserved for
|
||||
--mangle-props
|
||||
--mangle-props Mangle property names
|
||||
--name-cache File to hold mangled names mappings
|
||||
```
|
||||
|
||||
Specify `--output` (`-o`) to declare the output file. Otherwise the output
|
||||
goes to STDOUT.
|
||||
|
||||
## Source map options
|
||||
|
||||
UglifyJS2 can generate a source map file, which is highly useful for
|
||||
debugging your compressed JavaScript. To get a source map, pass
|
||||
`--source-map output.js.map` (full path to the file where you want the
|
||||
source map dumped).
|
||||
|
||||
Additionally you might need `--source-map-root` to pass the URL where the
|
||||
original files can be found. In case you are passing full paths to input
|
||||
files to UglifyJS, you can use `--prefix` (`-p`) to specify the number of
|
||||
directories to drop from the path prefix when declaring files in the source
|
||||
map.
|
||||
|
||||
For example:
|
||||
|
||||
uglifyjs /home/doe/work/foo/src/js/file1.js \
|
||||
/home/doe/work/foo/src/js/file2.js \
|
||||
-o foo.min.js \
|
||||
--source-map foo.min.js.map \
|
||||
--source-map-root http://foo.com/src \
|
||||
-p 5 -c -m
|
||||
|
||||
The above will compress and mangle `file1.js` and `file2.js`, will drop the
|
||||
output in `foo.min.js` and the source map in `foo.min.js.map`. The source
|
||||
mapping will refer to `http://foo.com/src/js/file1.js` and
|
||||
`http://foo.com/src/js/file2.js` (in fact it will list `http://foo.com/src`
|
||||
as the source map root, and the original files as `js/file1.js` and
|
||||
`js/file2.js`).
|
||||
|
||||
### Composed source map
|
||||
|
||||
When you're compressing JS code that was output by a compiler such as
|
||||
CoffeeScript, mapping to the JS code won't be too helpful. Instead, you'd
|
||||
like to map back to the original code (i.e. CoffeeScript). UglifyJS has an
|
||||
option to take an input source map. Assuming you have a mapping from
|
||||
CoffeeScript → compiled JS, UglifyJS can generate a map from CoffeeScript →
|
||||
compressed JS by mapping every token in the compiled JS to its original
|
||||
location.
|
||||
|
||||
To use this feature you need to pass `--in-source-map
|
||||
/path/to/input/source.map`. Normally the input source map should also point
|
||||
to the file containing the generated JS, so if that's correct you can omit
|
||||
input files from the command line.
|
||||
|
||||
## Mangler options
|
||||
|
||||
To enable the mangler you need to pass `--mangle` (`-m`). The following
|
||||
(comma-separated) options are supported:
|
||||
|
||||
- `sort` — to assign shorter names to most frequently used variables. This
|
||||
saves a few hundred bytes on jQuery before gzip, but the output is
|
||||
_bigger_ after gzip (and seems to happen for other libraries I tried it
|
||||
on) therefore it's not enabled by default.
|
||||
|
||||
- `toplevel` — mangle names declared in the toplevel scope (disabled by
|
||||
default).
|
||||
|
||||
- `eval` — mangle names visible in scopes where `eval` or `with` are used
|
||||
(disabled by default).
|
||||
|
||||
When mangling is enabled but you want to prevent certain names from being
|
||||
mangled, you can declare those names with `--reserved` (`-r`) — pass a
|
||||
comma-separated list of names. For example:
|
||||
|
||||
uglifyjs ... -m -r '$,require,exports'
|
||||
|
||||
to prevent the `require`, `exports` and `$` names from being changed.
|
||||
|
||||
### Mangling property names (`--mangle-props`)
|
||||
|
||||
**Note:** this will probably break your code. Mangling property names is a
|
||||
separate step, different from variable name mangling. Pass
|
||||
`--mangle-props`. It will mangle all properties that are seen in some
|
||||
object literal, or that are assigned to. For example:
|
||||
|
||||
```js
|
||||
var x = {
|
||||
foo: 1
|
||||
};
|
||||
|
||||
x.bar = 2;
|
||||
x["baz"] = 3;
|
||||
x[condition ? "moo" : "boo"] = 4;
|
||||
console.log(x.something());
|
||||
```
|
||||
|
||||
In the above code, `foo`, `bar`, `baz`, `moo` and `boo` will be replaced
|
||||
with single characters, while `something()` will be left as is.
|
||||
|
||||
In order for this to be of any use, we should avoid mangling standard JS
|
||||
names. For instance, if your code would contain `x.length = 10`, then
|
||||
`length` becomes a candidate for mangling and it will be mangled throughout
|
||||
the code, regardless if it's being used as part of your own objects or
|
||||
accessing an array's length. To avoid that, you can use `--reserved-file`
|
||||
to pass a filename that should contain the names to be excluded from
|
||||
mangling. This file can be used both for excluding variable names and
|
||||
property names. It could look like this, for example:
|
||||
|
||||
```js
|
||||
{
|
||||
"vars": [ "define", "require", ... ],
|
||||
"props": [ "length", "prototype", ... ]
|
||||
}
|
||||
```
|
||||
|
||||
`--reserved-file` can be an array of file names (either a single
|
||||
comma-separated argument, or you can pass multiple `--reserved-file`
|
||||
arguments) — in this case it will exclude names from all those files.
|
||||
|
||||
A default exclusion file is provided in `tools/domprops.json` which should
|
||||
cover most standard JS and DOM properties defined in various browsers. Pass
|
||||
`--reserve-domprops` to read that in.
|
||||
|
||||
When you compress multiple files using this option, in order for them to
|
||||
work together in the end we need to ensure somehow that one property gets
|
||||
mangled to the same name in all of them. For this, pass `--name-cache
|
||||
filename.json` and UglifyJS will maintain these mappings in a file which can
|
||||
then be reused. It should be initially empty. Example:
|
||||
|
||||
```
|
||||
rm -f /tmp/cache.json # start fresh
|
||||
uglifyjs file1.js file2.js --mangle-props --name-cache /tmp/cache.json -o part1.js
|
||||
uglifyjs file3.js file4.js --mangle-props --name-cache /tmp/cache.json -o part2.js
|
||||
```
|
||||
|
||||
Now, `part1.js` and `part2.js` will be consistent with each other in terms
|
||||
of mangled property names.
|
||||
|
||||
Using the name cache is not necessary if you compress all your files in a
|
||||
single call to UglifyJS.
|
||||
|
||||
## Compressor options
|
||||
|
||||
You need to pass `--compress` (`-c`) to enable the compressor. Optionally
|
||||
you can pass a comma-separated list of options. Options are in the form
|
||||
`foo=bar`, or just `foo` (the latter implies a boolean option that you want
|
||||
to set `true`; it's effectively a shortcut for `foo=true`).
|
||||
|
||||
- `sequences` -- join consecutive simple statements using the comma operator
|
||||
|
||||
- `properties` -- rewrite property access using the dot notation, for
|
||||
example `foo["bar"] → foo.bar`
|
||||
|
||||
- `dead_code` -- remove unreachable code
|
||||
|
||||
- `drop_debugger` -- remove `debugger;` statements
|
||||
|
||||
- `unsafe` (default: false) -- apply "unsafe" transformations (discussion below)
|
||||
|
||||
- `conditionals` -- apply optimizations for `if`-s and conditional
|
||||
expressions
|
||||
|
||||
- `comparisons` -- apply certain optimizations to binary nodes, for example:
|
||||
`!(a <= b) → a > b` (only when `unsafe`), attempts to negate binary nodes,
|
||||
e.g. `a = !b && !c && !d && !e → a=!(b||c||d||e)` etc.
|
||||
|
||||
- `evaluate` -- attempt to evaluate constant expressions
|
||||
|
||||
- `booleans` -- various optimizations for boolean context, for example `!!a
|
||||
? b : c → a ? b : c`
|
||||
|
||||
- `loops` -- optimizations for `do`, `while` and `for` loops when we can
|
||||
statically determine the condition
|
||||
|
||||
- `unused` -- drop unreferenced functions and variables
|
||||
|
||||
- `hoist_funs` -- hoist function declarations
|
||||
|
||||
- `hoist_vars` (default: false) -- hoist `var` declarations (this is `false`
|
||||
by default because it seems to increase the size of the output in general)
|
||||
|
||||
- `if_return` -- optimizations for if/return and if/continue
|
||||
|
||||
- `join_vars` -- join consecutive `var` statements
|
||||
|
||||
- `cascade` -- small optimization for sequences, transform `x, x` into `x`
|
||||
and `x = something(), x` into `x = something()`
|
||||
|
||||
- `warnings` -- display warnings when dropping unreachable code or unused
|
||||
declarations etc.
|
||||
|
||||
- `negate_iife` -- negate "Immediately-Called Function Expressions"
|
||||
where the return value is discarded, to avoid the parens that the
|
||||
code generator would insert.
|
||||
|
||||
- `pure_getters` -- the default is `false`. If you pass `true` for
|
||||
this, UglifyJS will assume that object property access
|
||||
(e.g. `foo.bar` or `foo["bar"]`) doesn't have any side effects.
|
||||
|
||||
- `pure_funcs` -- default `null`. You can pass an array of names and
|
||||
UglifyJS will assume that those functions do not produce side
|
||||
effects. DANGER: will not check if the name is redefined in scope.
|
||||
An example case here, for instance `var q = Math.floor(a/b)`. If
|
||||
variable `q` is not used elsewhere, UglifyJS will drop it, but will
|
||||
still keep the `Math.floor(a/b)`, not knowing what it does. You can
|
||||
pass `pure_funcs: [ 'Math.floor' ]` to let it know that this
|
||||
function won't produce any side effect, in which case the whole
|
||||
statement would get discarded. The current implementation adds some
|
||||
overhead (compression will be slower).
|
||||
|
||||
- `drop_console` -- default `false`. Pass `true` to discard calls to
|
||||
`console.*` functions.
|
||||
|
||||
- `keep_fargs` -- default `false`. Pass `true` to prevent the
|
||||
compressor from discarding unused function arguments. You need this
|
||||
for code which relies on `Function.length`.
|
||||
|
||||
### The `unsafe` option
|
||||
|
||||
It enables some transformations that *might* break code logic in certain
|
||||
contrived cases, but should be fine for most code. You might want to try it
|
||||
on your own code, it should reduce the minified size. Here's what happens
|
||||
when this flag is on:
|
||||
|
||||
- `new Array(1, 2, 3)` or `Array(1, 2, 3)` → `[ 1, 2, 3 ]`
|
||||
- `new Object()` → `{}`
|
||||
- `String(exp)` or `exp.toString()` → `"" + exp`
|
||||
- `new Object/RegExp/Function/Error/Array (...)` → we discard the `new`
|
||||
- `typeof foo == "undefined"` → `foo === void 0`
|
||||
- `void 0` → `undefined` (if there is a variable named "undefined" in
|
||||
scope; we do it because the variable name will be mangled, typically
|
||||
reduced to a single character)
|
||||
- discards unused function arguments (affects `function.length`)
|
||||
|
||||
### Conditional compilation
|
||||
|
||||
You can use the `--define` (`-d`) switch in order to declare global
|
||||
variables that UglifyJS will assume to be constants (unless defined in
|
||||
scope). For example if you pass `--define DEBUG=false` then, coupled with
|
||||
dead code removal UglifyJS will discard the following from the output:
|
||||
```javascript
|
||||
if (DEBUG) {
|
||||
console.log("debug stuff");
|
||||
}
|
||||
```
|
||||
|
||||
UglifyJS will warn about the condition being always false and about dropping
|
||||
unreachable code; for now there is no option to turn off only this specific
|
||||
warning, you can pass `warnings=false` to turn off *all* warnings.
|
||||
|
||||
Another way of doing that is to declare your globals as constants in a
|
||||
separate file and include it into the build. For example you can have a
|
||||
`build/defines.js` file with the following:
|
||||
```javascript
|
||||
const DEBUG = false;
|
||||
const PRODUCTION = true;
|
||||
// etc.
|
||||
```
|
||||
|
||||
and build your code like this:
|
||||
|
||||
uglifyjs build/defines.js js/foo.js js/bar.js... -c
|
||||
|
||||
UglifyJS will notice the constants and, since they cannot be altered, it
|
||||
will evaluate references to them to the value itself and drop unreachable
|
||||
code as usual. The possible downside of this approach is that the build
|
||||
will contain the `const` declarations.
|
||||
|
||||
<a name="codegen-options"></a>
|
||||
## Beautifier options
|
||||
|
||||
The code generator tries to output shortest code possible by default. In
|
||||
case you want beautified output, pass `--beautify` (`-b`). Optionally you
|
||||
can pass additional arguments that control the code output:
|
||||
|
||||
- `beautify` (default `true`) -- whether to actually beautify the output.
|
||||
Passing `-b` will set this to true, but you might need to pass `-b` even
|
||||
when you want to generate minified code, in order to specify additional
|
||||
arguments, so you can use `-b beautify=false` to override it.
|
||||
- `indent-level` (default 4)
|
||||
- `indent-start` (default 0) -- prefix all lines by that many spaces
|
||||
- `quote-keys` (default `false`) -- pass `true` to quote all keys in literal
|
||||
objects
|
||||
- `space-colon` (default `true`) -- insert a space after the colon signs
|
||||
- `ascii-only` (default `false`) -- escape Unicode characters in strings and
|
||||
regexps
|
||||
- `inline-script` (default `false`) -- escape the slash in occurrences of
|
||||
`</script` in strings
|
||||
- `width` (default 80) -- only takes effect when beautification is on, this
|
||||
specifies an (orientative) line width that the beautifier will try to
|
||||
obey. It refers to the width of the line text (excluding indentation).
|
||||
It doesn't work very well currently, but it does make the code generated
|
||||
by UglifyJS more readable.
|
||||
- `max-line-len` (default 32000) -- maximum line length (for uglified code)
|
||||
- `bracketize` (default `false`) -- always insert brackets in `if`, `for`,
|
||||
`do`, `while` or `with` statements, even if their body is a single
|
||||
statement.
|
||||
- `semicolons` (default `true`) -- separate statements with semicolons. If
|
||||
you pass `false` then whenever possible we will use a newline instead of a
|
||||
semicolon, leading to more readable output of uglified code (size before
|
||||
gzip could be smaller; size after gzip insignificantly larger).
|
||||
- `preamble` (default `null`) -- when passed it must be a string and
|
||||
it will be prepended to the output literally. The source map will
|
||||
adjust for this text. Can be used to insert a comment containing
|
||||
licensing information, for example.
|
||||
- `quote_style` (default `0`) -- preferred quote style for strings (affects
|
||||
quoted property names and directives as well):
|
||||
- `0` -- prefers double quotes, switches to single quotes when there are
|
||||
more double quotes in the string itself.
|
||||
- `1` -- always use single quotes
|
||||
- `2` -- always use double quotes
|
||||
- `3` -- always use the original quotes
|
||||
|
||||
### Keeping copyright notices or other comments
|
||||
|
||||
You can pass `--comments` to retain certain comments in the output. By
|
||||
default it will keep JSDoc-style comments that contain "@preserve",
|
||||
"@license" or "@cc_on" (conditional compilation for IE). You can pass
|
||||
`--comments all` to keep all the comments, or a valid JavaScript regexp to
|
||||
keep only comments that match this regexp. For example `--comments
|
||||
'/foo|bar/'` will keep only comments that contain "foo" or "bar".
|
||||
|
||||
Note, however, that there might be situations where comments are lost. For
|
||||
example:
|
||||
```javascript
|
||||
function f() {
|
||||
/** @preserve Foo Bar */
|
||||
function g() {
|
||||
// this function is never called
|
||||
}
|
||||
return something();
|
||||
}
|
||||
```
|
||||
|
||||
Even though it has "@preserve", the comment will be lost because the inner
|
||||
function `g` (which is the AST node to which the comment is attached to) is
|
||||
discarded by the compressor as not referenced.
|
||||
|
||||
The safest comments where to place copyright information (or other info that
|
||||
needs to be kept in the output) are comments attached to toplevel nodes.
|
||||
|
||||
## Support for the SpiderMonkey AST
|
||||
|
||||
UglifyJS2 has its own abstract syntax tree format; for
|
||||
[practical reasons](http://lisperator.net/blog/uglifyjs-why-not-switching-to-spidermonkey-ast/)
|
||||
we can't easily change to using the SpiderMonkey AST internally. However,
|
||||
UglifyJS now has a converter which can import a SpiderMonkey AST.
|
||||
|
||||
For example [Acorn][acorn] is a super-fast parser that produces a
|
||||
SpiderMonkey AST. It has a small CLI utility that parses one file and dumps
|
||||
the AST in JSON on the standard output. To use UglifyJS to mangle and
|
||||
compress that:
|
||||
|
||||
acorn file.js | uglifyjs --spidermonkey -m -c
|
||||
|
||||
The `--spidermonkey` option tells UglifyJS that all input files are not
|
||||
JavaScript, but JS code described in SpiderMonkey AST in JSON. Therefore we
|
||||
don't use our own parser in this case, but just transform that AST into our
|
||||
internal AST.
|
||||
|
||||
### Use Acorn for parsing
|
||||
|
||||
More for fun, I added the `--acorn` option which will use Acorn to do all
|
||||
the parsing. If you pass this option, UglifyJS will `require("acorn")`.
|
||||
|
||||
Acorn is really fast (e.g. 250ms instead of 380ms on some 650K code), but
|
||||
converting the SpiderMonkey tree that Acorn produces takes another 150ms so
|
||||
in total it's a bit more than just using UglifyJS's own parser.
|
||||
|
||||
### Using UglifyJS to transform SpiderMonkey AST
|
||||
|
||||
Now you can use UglifyJS as any other intermediate tool for transforming
|
||||
JavaScript ASTs in SpiderMonkey format.
|
||||
|
||||
Example:
|
||||
|
||||
```javascript
|
||||
function uglify(ast, options, mangle) {
|
||||
// Conversion from SpiderMonkey AST to internal format
|
||||
var uAST = UglifyJS.AST_Node.from_mozilla_ast(ast);
|
||||
|
||||
// Compression
|
||||
uAST.figure_out_scope();
|
||||
uAST = uAST.transform(UglifyJS.Compressor(options));
|
||||
|
||||
// Mangling (optional)
|
||||
if (mangle) {
|
||||
uAST.figure_out_scope();
|
||||
uAST.compute_char_frequency();
|
||||
uAST.mangle_names();
|
||||
}
|
||||
|
||||
// Back-conversion to SpiderMonkey AST
|
||||
return uAST.to_mozilla_ast();
|
||||
}
|
||||
```
|
||||
|
||||
Check out
|
||||
[original blog post](http://rreverser.com/using-mozilla-ast-with-uglifyjs/)
|
||||
for details.
|
||||
|
||||
API Reference
|
||||
-------------
|
||||
|
||||
Assuming installation via NPM, you can load UglifyJS in your application
|
||||
like this:
|
||||
```javascript
|
||||
var UglifyJS = require("uglify-js");
|
||||
```
|
||||
|
||||
It exports a lot of names, but I'll discuss here the basics that are needed
|
||||
for parsing, mangling and compressing a piece of code. The sequence is (1)
|
||||
parse, (2) compress, (3) mangle, (4) generate output code.
|
||||
|
||||
### The simple way
|
||||
|
||||
There's a single toplevel function which combines all the steps. If you
|
||||
don't need additional customization, you might want to go with `minify`.
|
||||
Example:
|
||||
```javascript
|
||||
var result = UglifyJS.minify("/path/to/file.js");
|
||||
console.log(result.code); // minified output
|
||||
// if you need to pass code instead of file name
|
||||
var result = UglifyJS.minify("var b = function () {};", {fromString: true});
|
||||
```
|
||||
|
||||
You can also compress multiple files:
|
||||
```javascript
|
||||
var result = UglifyJS.minify([ "file1.js", "file2.js", "file3.js" ]);
|
||||
console.log(result.code);
|
||||
```
|
||||
|
||||
To generate a source map:
|
||||
```javascript
|
||||
var result = UglifyJS.minify([ "file1.js", "file2.js", "file3.js" ], {
|
||||
outSourceMap: "out.js.map"
|
||||
});
|
||||
console.log(result.code); // minified output
|
||||
console.log(result.map);
|
||||
```
|
||||
|
||||
Note that the source map is not saved in a file, it's just returned in
|
||||
`result.map`. The value passed for `outSourceMap` is only used to set the
|
||||
`file` attribute in the source map (see [the spec][sm-spec]).
|
||||
|
||||
You can also specify sourceRoot property to be included in source map:
|
||||
```javascript
|
||||
var result = UglifyJS.minify([ "file1.js", "file2.js", "file3.js" ], {
|
||||
outSourceMap: "out.js.map",
|
||||
sourceRoot: "http://example.com/src"
|
||||
});
|
||||
```
|
||||
|
||||
If you're compressing compiled JavaScript and have a source map for it, you
|
||||
can use the `inSourceMap` argument:
|
||||
```javascript
|
||||
var result = UglifyJS.minify("compiled.js", {
|
||||
inSourceMap: "compiled.js.map",
|
||||
outSourceMap: "minified.js.map"
|
||||
});
|
||||
// same as before, it returns `code` and `map`
|
||||
```
|
||||
|
||||
If your input source map is not in a file, you can pass it in as an object
|
||||
using the `inSourceMap` argument:
|
||||
|
||||
```javascript
|
||||
var result = UglifyJS.minify("compiled.js", {
|
||||
inSourceMap: JSON.parse(my_source_map_string),
|
||||
outSourceMap: "minified.js.map"
|
||||
});
|
||||
```
|
||||
|
||||
The `inSourceMap` is only used if you also request `outSourceMap` (it makes
|
||||
no sense otherwise).
|
||||
|
||||
Other options:
|
||||
|
||||
- `warnings` (default `false`) — pass `true` to display compressor warnings.
|
||||
|
||||
- `fromString` (default `false`) — if you pass `true` then you can pass
|
||||
JavaScript source code, rather than file names.
|
||||
|
||||
- `mangle` — pass `false` to skip mangling names.
|
||||
|
||||
- `output` (default `null`) — pass an object if you wish to specify
|
||||
additional [output options][codegen]. The defaults are optimized
|
||||
for best compression.
|
||||
|
||||
- `compress` (default `{}`) — pass `false` to skip compressing entirely.
|
||||
Pass an object to specify custom [compressor options][compressor].
|
||||
|
||||
We could add more options to `UglifyJS.minify` — if you need additional
|
||||
functionality please suggest!
|
||||
|
||||
### The hard way
|
||||
|
||||
Following there's more detailed API info, in case the `minify` function is
|
||||
too simple for your needs.
|
||||
|
||||
#### The parser
|
||||
```javascript
|
||||
var toplevel_ast = UglifyJS.parse(code, options);
|
||||
```
|
||||
|
||||
`options` is optional and if present it must be an object. The following
|
||||
properties are available:
|
||||
|
||||
- `strict` — disable automatic semicolon insertion and support for trailing
|
||||
comma in arrays and objects
|
||||
- `filename` — the name of the file where this code is coming from
|
||||
- `toplevel` — a `toplevel` node (as returned by a previous invocation of
|
||||
`parse`)
|
||||
|
||||
The last two options are useful when you'd like to minify multiple files and
|
||||
get a single file as the output and a proper source map. Our CLI tool does
|
||||
something like this:
|
||||
```javascript
|
||||
var toplevel = null;
|
||||
files.forEach(function(file){
|
||||
var code = fs.readFileSync(file, "utf8");
|
||||
toplevel = UglifyJS.parse(code, {
|
||||
filename: file,
|
||||
toplevel: toplevel
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
After this, we have in `toplevel` a big AST containing all our files, with
|
||||
each token having proper information about where it came from.
|
||||
|
||||
#### Scope information
|
||||
|
||||
UglifyJS contains a scope analyzer that you need to call manually before
|
||||
compressing or mangling. Basically it augments various nodes in the AST
|
||||
with information about where is a name defined, how many times is a name
|
||||
referenced, if it is a global or not, if a function is using `eval` or the
|
||||
`with` statement etc. I will discuss this some place else, for now what's
|
||||
important to know is that you need to call the following before doing
|
||||
anything with the tree:
|
||||
```javascript
|
||||
toplevel.figure_out_scope()
|
||||
```
|
||||
|
||||
#### Compression
|
||||
|
||||
Like this:
|
||||
```javascript
|
||||
var compressor = UglifyJS.Compressor(options);
|
||||
var compressed_ast = toplevel.transform(compressor);
|
||||
```
|
||||
|
||||
The `options` can be missing. Available options are discussed above in
|
||||
“Compressor options”. Defaults should lead to best compression in most
|
||||
scripts.
|
||||
|
||||
The compressor is destructive, so don't rely that `toplevel` remains the
|
||||
original tree.
|
||||
|
||||
#### Mangling
|
||||
|
||||
After compression it is a good idea to call again `figure_out_scope` (since
|
||||
the compressor might drop unused variables / unreachable code and this might
|
||||
change the number of identifiers or their position). Optionally, you can
|
||||
call a trick that helps after Gzip (counting character frequency in
|
||||
non-mangleable words). Example:
|
||||
```javascript
|
||||
compressed_ast.figure_out_scope();
|
||||
compressed_ast.compute_char_frequency();
|
||||
compressed_ast.mangle_names();
|
||||
```
|
||||
|
||||
#### Generating output
|
||||
|
||||
AST nodes have a `print` method that takes an output stream. Essentially,
|
||||
to generate code you do this:
|
||||
```javascript
|
||||
var stream = UglifyJS.OutputStream(options);
|
||||
compressed_ast.print(stream);
|
||||
var code = stream.toString(); // this is your minified code
|
||||
```
|
||||
|
||||
or, for a shortcut you can do:
|
||||
```javascript
|
||||
var code = compressed_ast.print_to_string(options);
|
||||
```
|
||||
|
||||
As usual, `options` is optional. The output stream accepts a lot of options,
|
||||
most of them documented above in section “Beautifier options”. The two
|
||||
which we care about here are `source_map` and `comments`.
|
||||
|
||||
#### Keeping comments in the output
|
||||
|
||||
In order to keep certain comments in the output you need to pass the
|
||||
`comments` option. Pass a RegExp or a function. If you pass a RegExp, only
|
||||
those comments whose body matches the regexp will be kept. Note that body
|
||||
means without the initial `//` or `/*`. If you pass a function, it will be
|
||||
called for every comment in the tree and will receive two arguments: the
|
||||
node that the comment is attached to, and the comment token itself.
|
||||
|
||||
The comment token has these properties:
|
||||
|
||||
- `type`: "comment1" for single-line comments or "comment2" for multi-line
|
||||
comments
|
||||
- `value`: the comment body
|
||||
- `pos` and `endpos`: the start/end positions (zero-based indexes) in the
|
||||
original code where this comment appears
|
||||
- `line` and `col`: the line and column where this comment appears in the
|
||||
original code
|
||||
- `file` — the file name of the original file
|
||||
- `nlb` — true if there was a newline before this comment in the original
|
||||
code, or if this comment contains a newline.
|
||||
|
||||
Your function should return `true` to keep the comment, or a falsy value
|
||||
otherwise.
|
||||
|
||||
#### Generating a source mapping
|
||||
|
||||
You need to pass the `source_map` argument when calling `print`. It needs
|
||||
to be a `SourceMap` object (which is a thin wrapper on top of the
|
||||
[source-map][source-map] library).
|
||||
|
||||
Example:
|
||||
```javascript
|
||||
var source_map = UglifyJS.SourceMap(source_map_options);
|
||||
var stream = UglifyJS.OutputStream({
|
||||
...
|
||||
source_map: source_map
|
||||
});
|
||||
compressed_ast.print(stream);
|
||||
|
||||
var code = stream.toString();
|
||||
var map = source_map.toString(); // json output for your source map
|
||||
```
|
||||
|
||||
The `source_map_options` (optional) can contain the following properties:
|
||||
|
||||
- `file`: the name of the JavaScript output file that this mapping refers to
|
||||
- `root`: the `sourceRoot` property (see the [spec][sm-spec])
|
||||
- `orig`: the "original source map", handy when you compress generated JS
|
||||
and want to map the minified output back to the original code where it
|
||||
came from. It can be simply a string in JSON, or a JSON object containing
|
||||
the original source map.
|
||||
|
||||
[acorn]: https://github.com/marijnh/acorn
|
||||
[source-map]: https://github.com/mozilla/source-map
|
||||
[sm-spec]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
|
||||
[codegen]: http://lisperator.net/uglifyjs/codegen
|
||||
[compressor]: http://lisperator.net/uglifyjs/compress
|
||||
77
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/bin/extract-props.js
generated
vendored
Executable file
77
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/bin/extract-props.js
generated
vendored
Executable file
@@ -0,0 +1,77 @@
|
||||
#! /usr/bin/env node
|
||||
|
||||
"use strict";
|
||||
|
||||
var U2 = require("../tools/node");
|
||||
var fs = require("fs");
|
||||
var yargs = require("yargs");
|
||||
var ARGS = yargs
|
||||
.describe("o", "Output file")
|
||||
.argv;
|
||||
var files = ARGS._.slice();
|
||||
var output = {
|
||||
vars: {},
|
||||
props: {}
|
||||
};
|
||||
|
||||
if (ARGS.o) try {
|
||||
output = JSON.parse(fs.readFileSync(ARGS.o, "utf8"));
|
||||
} catch(ex) {}
|
||||
|
||||
files.forEach(getProps);
|
||||
|
||||
if (ARGS.o) {
|
||||
fs.writeFileSync(ARGS.o, JSON.stringify(output, null, 2), "utf8");
|
||||
} else {
|
||||
console.log("%s", JSON.stringify(output, null, 2));
|
||||
}
|
||||
|
||||
function getProps(filename) {
|
||||
var code = fs.readFileSync(filename, "utf8");
|
||||
var ast = U2.parse(code);
|
||||
|
||||
ast.walk(new U2.TreeWalker(function(node){
|
||||
if (node instanceof U2.AST_ObjectKeyVal) {
|
||||
add(node.key);
|
||||
}
|
||||
else if (node instanceof U2.AST_ObjectProperty) {
|
||||
add(node.key.name);
|
||||
}
|
||||
else if (node instanceof U2.AST_Dot) {
|
||||
add(node.property);
|
||||
}
|
||||
else if (node instanceof U2.AST_Sub) {
|
||||
addStrings(node.property);
|
||||
}
|
||||
}));
|
||||
|
||||
function addStrings(node) {
|
||||
var out = {};
|
||||
try {
|
||||
(function walk(node){
|
||||
node.walk(new U2.TreeWalker(function(node){
|
||||
if (node instanceof U2.AST_Seq) {
|
||||
walk(node.cdr);
|
||||
return true;
|
||||
}
|
||||
if (node instanceof U2.AST_String) {
|
||||
add(node.value);
|
||||
return true;
|
||||
}
|
||||
if (node instanceof U2.AST_Conditional) {
|
||||
walk(node.consequent);
|
||||
walk(node.alternative);
|
||||
return true;
|
||||
}
|
||||
throw out;
|
||||
}));
|
||||
})(node);
|
||||
} catch(ex) {
|
||||
if (ex !== out) throw ex;
|
||||
}
|
||||
}
|
||||
|
||||
function add(name) {
|
||||
output.props[name] = true;
|
||||
}
|
||||
}
|
||||
541
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/bin/uglifyjs
generated
vendored
Executable file
541
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/bin/uglifyjs
generated
vendored
Executable file
@@ -0,0 +1,541 @@
|
||||
#! /usr/bin/env node
|
||||
// -*- js -*-
|
||||
|
||||
"use strict";
|
||||
|
||||
var UglifyJS = require("../tools/node");
|
||||
var sys = require("util");
|
||||
var yargs = require("yargs");
|
||||
var fs = require("fs");
|
||||
var path = require("path");
|
||||
var async = require("async");
|
||||
var acorn;
|
||||
var ARGS = yargs
|
||||
.usage("$0 input1.js [input2.js ...] [options]\n\
|
||||
Use a single dash to read input from the standard input.\
|
||||
\n\n\
|
||||
NOTE: by default there is no mangling/compression.\n\
|
||||
Without [options] it will simply parse input files and dump the AST\n\
|
||||
with whitespace and comments discarded. To achieve compression and\n\
|
||||
mangling you need to use `-c` and `-m`.\
|
||||
")
|
||||
.describe("source-map", "Specify an output file where to generate source map.")
|
||||
.describe("source-map-root", "The path to the original source to be included in the source map.")
|
||||
.describe("source-map-url", "The path to the source map to be added in //# sourceMappingURL. Defaults to the value passed with --source-map.")
|
||||
.describe("source-map-include-sources", "Pass this flag if you want to include the content of source files in the source map as sourcesContent property.")
|
||||
.describe("in-source-map", "Input source map, useful if you're compressing JS that was generated from some other original code.")
|
||||
.describe("screw-ie8", "Pass this flag if you don't care about full compliance with Internet Explorer 6-8 quirks (by default UglifyJS will try to be IE-proof).")
|
||||
.describe("expr", "Parse a single expression, rather than a program (for parsing JSON)")
|
||||
.describe("p", "Skip prefix for original filenames that appear in source maps. \
|
||||
For example -p 3 will drop 3 directories from file names and ensure they are relative paths. \
|
||||
You can also specify -p relative, which will make UglifyJS figure out itself the relative paths between original sources, \
|
||||
the source map and the output file.")
|
||||
.describe("o", "Output file (default STDOUT).")
|
||||
.describe("b", "Beautify output/specify output options.")
|
||||
.describe("m", "Mangle names/pass mangler options.")
|
||||
.describe("r", "Reserved names to exclude from mangling.")
|
||||
.describe("c", "Enable compressor/pass compressor options. \
|
||||
Pass options like -c hoist_vars=false,if_return=false. \
|
||||
Use -c with no argument to use the default compression options.")
|
||||
.describe("d", "Global definitions")
|
||||
.describe("e", "Embed everything in a big function, with a configurable parameter/argument list.")
|
||||
|
||||
.describe("comments", "Preserve copyright comments in the output. \
|
||||
By default this works like Google Closure, keeping JSDoc-style comments that contain \"@license\" or \"@preserve\". \
|
||||
You can optionally pass one of the following arguments to this flag:\n\
|
||||
- \"all\" to keep all comments\n\
|
||||
- a valid JS regexp (needs to start with a slash) to keep only comments that match.\n\
|
||||
\
|
||||
Note that currently not *all* comments can be kept when compression is on, \
|
||||
because of dead code removal or cascading statements into sequences.")
|
||||
|
||||
.describe("preamble", "Preamble to prepend to the output. You can use this to insert a \
|
||||
comment, for example for licensing information. This will not be \
|
||||
parsed, but the source map will adjust for its presence.")
|
||||
|
||||
.describe("stats", "Display operations run time on STDERR.")
|
||||
.describe("acorn", "Use Acorn for parsing.")
|
||||
.describe("spidermonkey", "Assume input files are SpiderMonkey AST format (as JSON).")
|
||||
.describe("self", "Build itself (UglifyJS2) as a library (implies --wrap=UglifyJS --export-all)")
|
||||
.describe("wrap", "Embed everything in a big function, making the “exports” and “global” variables available. \
|
||||
You need to pass an argument to this option to specify the name that your module will take when included in, say, a browser.")
|
||||
.describe("export-all", "Only used when --wrap, this tells UglifyJS to add code to automatically export all globals.")
|
||||
.describe("lint", "Display some scope warnings")
|
||||
.describe("v", "Verbose")
|
||||
.describe("V", "Print version number and exit.")
|
||||
.describe("noerr", "Don't throw an error for unknown options in -c, -b or -m.")
|
||||
.describe("bare-returns", "Allow return outside of functions. Useful when minifying CommonJS modules.")
|
||||
.describe("keep-fnames", "Do not mangle/drop function names. Useful for code relying on Function.prototype.name.")
|
||||
.describe("quotes", "Quote style (0 - auto, 1 - single, 2 - double, 3 - original)")
|
||||
.describe("reserved-file", "File containing reserved names")
|
||||
.describe("reserve-domprops", "Make (most?) DOM properties reserved for --mangle-props")
|
||||
.describe("mangle-props", "Mangle property names")
|
||||
.describe("name-cache", "File to hold mangled names mappings")
|
||||
|
||||
.alias("p", "prefix")
|
||||
.alias("o", "output")
|
||||
.alias("v", "verbose")
|
||||
.alias("b", "beautify")
|
||||
.alias("m", "mangle")
|
||||
.alias("c", "compress")
|
||||
.alias("d", "define")
|
||||
.alias("r", "reserved")
|
||||
.alias("V", "version")
|
||||
.alias("e", "enclose")
|
||||
.alias("q", "quotes")
|
||||
|
||||
.string("source-map")
|
||||
.string("source-map-root")
|
||||
.string("source-map-url")
|
||||
.string("b")
|
||||
.string("beautify")
|
||||
.string("m")
|
||||
.string("mangle")
|
||||
.string("c")
|
||||
.string("compress")
|
||||
.string("d")
|
||||
.string("define")
|
||||
.string("e")
|
||||
.string("enclose")
|
||||
.string("comments")
|
||||
.string("wrap")
|
||||
.string("p")
|
||||
.string("prefix")
|
||||
.string("name-cache")
|
||||
.array("reserved-file")
|
||||
|
||||
.boolean("expr")
|
||||
.boolean("source-map-include-sources")
|
||||
.boolean("screw-ie8")
|
||||
.boolean("export-all")
|
||||
.boolean("self")
|
||||
.boolean("v")
|
||||
.boolean("verbose")
|
||||
.boolean("stats")
|
||||
.boolean("acorn")
|
||||
.boolean("spidermonkey")
|
||||
.boolean("lint")
|
||||
.boolean("V")
|
||||
.boolean("version")
|
||||
.boolean("noerr")
|
||||
.boolean("bare-returns")
|
||||
.boolean("keep-fnames")
|
||||
.boolean("mangle-props")
|
||||
.boolean("reserve-domprops")
|
||||
|
||||
.wrap(80)
|
||||
|
||||
.argv
|
||||
;
|
||||
|
||||
normalize(ARGS);
|
||||
|
||||
if (ARGS.noerr) {
|
||||
UglifyJS.DefaultsError.croak = function(msg, defs) {
|
||||
print_error("WARN: " + msg);
|
||||
};
|
||||
}
|
||||
|
||||
if (ARGS.version || ARGS.V) {
|
||||
var json = require("../package.json");
|
||||
print(json.name + ' ' + json.version);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (ARGS.ast_help) {
|
||||
var desc = UglifyJS.describe_ast();
|
||||
print(typeof desc == "string" ? desc : JSON.stringify(desc, null, 2));
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (ARGS.h || ARGS.help) {
|
||||
print(yargs.help());
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (ARGS.acorn) {
|
||||
acorn = require("acorn");
|
||||
}
|
||||
|
||||
var COMPRESS = getOptions("c", true);
|
||||
var MANGLE = getOptions("m", true);
|
||||
var BEAUTIFY = getOptions("b", true);
|
||||
var RESERVED = null;
|
||||
|
||||
if (ARGS.reserved_file) ARGS.reserved_file.forEach(function(filename){
|
||||
RESERVED = UglifyJS.readReservedFile(filename, RESERVED);
|
||||
});
|
||||
|
||||
if (ARGS.reserve_domprops) {
|
||||
RESERVED = UglifyJS.readDefaultReservedFile(RESERVED);
|
||||
}
|
||||
|
||||
if (ARGS.d) {
|
||||
if (COMPRESS) COMPRESS.global_defs = getOptions("d");
|
||||
}
|
||||
|
||||
if (ARGS.r) {
|
||||
if (MANGLE) MANGLE.except = ARGS.r.replace(/^\s+|\s+$/g).split(/\s*,+\s*/);
|
||||
}
|
||||
|
||||
if (RESERVED && MANGLE) {
|
||||
if (!MANGLE.except) MANGLE.except = RESERVED.vars;
|
||||
else MANGLE.except = MANGLE.except.concat(RESERVED.vars);
|
||||
}
|
||||
|
||||
function readNameCache(key) {
|
||||
return UglifyJS.readNameCache(ARGS.name_cache, key);
|
||||
}
|
||||
|
||||
function writeNameCache(key, cache) {
|
||||
return UglifyJS.writeNameCache(ARGS.name_cache, key, cache);
|
||||
}
|
||||
|
||||
if (ARGS.quotes === true) {
|
||||
ARGS.quotes = 3;
|
||||
}
|
||||
|
||||
var OUTPUT_OPTIONS = {
|
||||
beautify : BEAUTIFY ? true : false,
|
||||
preamble : ARGS.preamble || null,
|
||||
quote_style : ARGS.quotes != null ? ARGS.quotes : 0
|
||||
};
|
||||
|
||||
if (ARGS.screw_ie8) {
|
||||
if (COMPRESS) COMPRESS.screw_ie8 = true;
|
||||
if (MANGLE) MANGLE.screw_ie8 = true;
|
||||
OUTPUT_OPTIONS.screw_ie8 = true;
|
||||
}
|
||||
|
||||
if (ARGS.keep_fnames) {
|
||||
if (COMPRESS) COMPRESS.keep_fnames = true;
|
||||
if (MANGLE) MANGLE.keep_fnames = true;
|
||||
}
|
||||
|
||||
if (BEAUTIFY)
|
||||
UglifyJS.merge(OUTPUT_OPTIONS, BEAUTIFY);
|
||||
|
||||
if (ARGS.comments != null) {
|
||||
if (/^\/.*\/[a-zA-Z]*$/.test(ARGS.comments)) {
|
||||
var regex_pos = ARGS.comments.lastIndexOf("/");
|
||||
try {
|
||||
OUTPUT_OPTIONS.comments = new RegExp(ARGS.comments.substr(1, regex_pos - 1), ARGS.comments.substr(regex_pos + 1));
|
||||
} catch (e) {
|
||||
print_error("ERROR: Invalid --comments: " + e.message);
|
||||
}
|
||||
} else if (ARGS.comments == "all") {
|
||||
OUTPUT_OPTIONS.comments = true;
|
||||
} else {
|
||||
OUTPUT_OPTIONS.comments = function(node, comment) {
|
||||
var text = comment.value;
|
||||
var type = comment.type;
|
||||
if (type == "comment2") {
|
||||
// multiline comment
|
||||
return /@preserve|@license|@cc_on/i.test(text);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var files = ARGS._.slice();
|
||||
|
||||
if (ARGS.self) {
|
||||
if (files.length > 0) {
|
||||
print_error("WARN: Ignoring input files since --self was passed");
|
||||
}
|
||||
files = UglifyJS.FILES;
|
||||
if (!ARGS.wrap) ARGS.wrap = "UglifyJS";
|
||||
ARGS.export_all = true;
|
||||
}
|
||||
|
||||
var ORIG_MAP = ARGS.in_source_map;
|
||||
|
||||
if (ORIG_MAP) {
|
||||
ORIG_MAP = JSON.parse(fs.readFileSync(ORIG_MAP));
|
||||
if (files.length == 0) {
|
||||
print_error("INFO: Using file from the input source map: " + ORIG_MAP.file);
|
||||
files = [ ORIG_MAP.file ];
|
||||
}
|
||||
if (ARGS.source_map_root == null) {
|
||||
ARGS.source_map_root = ORIG_MAP.sourceRoot;
|
||||
}
|
||||
}
|
||||
|
||||
if (files.length == 0) {
|
||||
files = [ "-" ];
|
||||
}
|
||||
|
||||
if (files.indexOf("-") >= 0 && ARGS.source_map) {
|
||||
print_error("ERROR: Source map doesn't work with input from STDIN");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (files.filter(function(el){ return el == "-" }).length > 1) {
|
||||
print_error("ERROR: Can read a single file from STDIN (two or more dashes specified)");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var STATS = {};
|
||||
var OUTPUT_FILE = ARGS.o;
|
||||
var TOPLEVEL = null;
|
||||
var P_RELATIVE = ARGS.p && ARGS.p == "relative";
|
||||
var SOURCES_CONTENT = {};
|
||||
|
||||
var SOURCE_MAP = ARGS.source_map ? UglifyJS.SourceMap({
|
||||
file: P_RELATIVE ? path.relative(path.dirname(ARGS.source_map), OUTPUT_FILE) : OUTPUT_FILE,
|
||||
root: ARGS.source_map_root,
|
||||
orig: ORIG_MAP,
|
||||
}) : null;
|
||||
|
||||
OUTPUT_OPTIONS.source_map = SOURCE_MAP;
|
||||
|
||||
try {
|
||||
var output = UglifyJS.OutputStream(OUTPUT_OPTIONS);
|
||||
var compressor = COMPRESS && UglifyJS.Compressor(COMPRESS);
|
||||
} catch(ex) {
|
||||
if (ex instanceof UglifyJS.DefaultsError) {
|
||||
print_error(ex.msg);
|
||||
print_error("Supported options:");
|
||||
print_error(sys.inspect(ex.defs));
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
async.eachLimit(files, 1, function (file, cb) {
|
||||
read_whole_file(file, function (err, code) {
|
||||
if (err) {
|
||||
print_error("ERROR: can't read file: " + file);
|
||||
process.exit(1);
|
||||
}
|
||||
if (ARGS.p != null) {
|
||||
if (P_RELATIVE) {
|
||||
file = path.relative(path.dirname(ARGS.source_map), file).replace(/\\/g, '/');
|
||||
} else {
|
||||
var p = parseInt(ARGS.p, 10);
|
||||
if (!isNaN(p)) {
|
||||
file = file.replace(/^\/+/, "").split(/\/+/).slice(ARGS.p).join("/");
|
||||
}
|
||||
}
|
||||
}
|
||||
SOURCES_CONTENT[file] = code;
|
||||
time_it("parse", function(){
|
||||
if (ARGS.spidermonkey) {
|
||||
var program = JSON.parse(code);
|
||||
if (!TOPLEVEL) TOPLEVEL = program;
|
||||
else TOPLEVEL.body = TOPLEVEL.body.concat(program.body);
|
||||
}
|
||||
else if (ARGS.acorn) {
|
||||
TOPLEVEL = acorn.parse(code, {
|
||||
locations : true,
|
||||
sourceFile : file,
|
||||
program : TOPLEVEL
|
||||
});
|
||||
}
|
||||
else {
|
||||
try {
|
||||
TOPLEVEL = UglifyJS.parse(code, {
|
||||
filename : file,
|
||||
toplevel : TOPLEVEL,
|
||||
expression : ARGS.expr,
|
||||
bare_returns : ARGS.bare_returns,
|
||||
});
|
||||
} catch(ex) {
|
||||
if (ex instanceof UglifyJS.JS_Parse_Error) {
|
||||
print_error("Parse error at " + file + ":" + ex.line + "," + ex.col);
|
||||
print_error(ex.message);
|
||||
print_error(ex.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
throw ex;
|
||||
}
|
||||
};
|
||||
});
|
||||
cb();
|
||||
});
|
||||
}, function () {
|
||||
if (ARGS.acorn || ARGS.spidermonkey) time_it("convert_ast", function(){
|
||||
TOPLEVEL = UglifyJS.AST_Node.from_mozilla_ast(TOPLEVEL);
|
||||
});
|
||||
|
||||
if (ARGS.wrap != null) {
|
||||
TOPLEVEL = TOPLEVEL.wrap_commonjs(ARGS.wrap, ARGS.export_all);
|
||||
}
|
||||
|
||||
if (ARGS.enclose != null) {
|
||||
var arg_parameter_list = ARGS.enclose;
|
||||
if (arg_parameter_list === true) {
|
||||
arg_parameter_list = [];
|
||||
}
|
||||
else if (!(arg_parameter_list instanceof Array)) {
|
||||
arg_parameter_list = [arg_parameter_list];
|
||||
}
|
||||
TOPLEVEL = TOPLEVEL.wrap_enclose(arg_parameter_list);
|
||||
}
|
||||
|
||||
if (ARGS.mangle_props || ARGS.name_cache) (function(){
|
||||
var reserved = RESERVED ? RESERVED.props : null;
|
||||
var cache = readNameCache("props");
|
||||
TOPLEVEL = UglifyJS.mangle_properties(TOPLEVEL, {
|
||||
reserved : reserved,
|
||||
cache : cache,
|
||||
only_cache : !ARGS.mangle_props
|
||||
});
|
||||
writeNameCache("props", cache);
|
||||
})();
|
||||
|
||||
var SCOPE_IS_NEEDED = COMPRESS || MANGLE || ARGS.lint;
|
||||
var TL_CACHE = readNameCache("vars");
|
||||
|
||||
if (SCOPE_IS_NEEDED) {
|
||||
time_it("scope", function(){
|
||||
TOPLEVEL.figure_out_scope({ screw_ie8: ARGS.screw_ie8, cache: TL_CACHE });
|
||||
if (ARGS.lint) {
|
||||
TOPLEVEL.scope_warnings();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (COMPRESS) {
|
||||
time_it("squeeze", function(){
|
||||
TOPLEVEL = TOPLEVEL.transform(compressor);
|
||||
});
|
||||
}
|
||||
|
||||
if (SCOPE_IS_NEEDED) {
|
||||
time_it("scope", function(){
|
||||
TOPLEVEL.figure_out_scope({ screw_ie8: ARGS.screw_ie8, cache: TL_CACHE });
|
||||
if (MANGLE && !TL_CACHE) {
|
||||
TOPLEVEL.compute_char_frequency(MANGLE);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (MANGLE) time_it("mangle", function(){
|
||||
MANGLE.cache = TL_CACHE;
|
||||
TOPLEVEL.mangle_names(MANGLE);
|
||||
});
|
||||
|
||||
writeNameCache("vars", TL_CACHE);
|
||||
|
||||
if (ARGS.source_map_include_sources) {
|
||||
for (var file in SOURCES_CONTENT) {
|
||||
if (SOURCES_CONTENT.hasOwnProperty(file)) {
|
||||
SOURCE_MAP.get().setSourceContent(file, SOURCES_CONTENT[file]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
time_it("generate", function(){
|
||||
TOPLEVEL.print(output);
|
||||
});
|
||||
|
||||
output = output.get();
|
||||
|
||||
if (SOURCE_MAP) {
|
||||
fs.writeFileSync(ARGS.source_map, SOURCE_MAP, "utf8");
|
||||
var source_map_url = ARGS.source_map_url || (
|
||||
P_RELATIVE
|
||||
? path.relative(path.dirname(OUTPUT_FILE), ARGS.source_map)
|
||||
: ARGS.source_map
|
||||
);
|
||||
output += "\n//# sourceMappingURL=" + source_map_url;
|
||||
}
|
||||
|
||||
if (OUTPUT_FILE) {
|
||||
fs.writeFileSync(OUTPUT_FILE, output, "utf8");
|
||||
} else {
|
||||
print(output);
|
||||
}
|
||||
|
||||
if (ARGS.stats) {
|
||||
print_error(UglifyJS.string_template("Timing information (compressed {count} files):", {
|
||||
count: files.length
|
||||
}));
|
||||
for (var i in STATS) if (STATS.hasOwnProperty(i)) {
|
||||
print_error(UglifyJS.string_template("- {name}: {time}s", {
|
||||
name: i,
|
||||
time: (STATS[i] / 1000).toFixed(3)
|
||||
}));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
/* -----[ functions ]----- */
|
||||
|
||||
function normalize(o) {
|
||||
for (var i in o) if (o.hasOwnProperty(i) && /-/.test(i)) {
|
||||
o[i.replace(/-/g, "_")] = o[i];
|
||||
delete o[i];
|
||||
}
|
||||
}
|
||||
|
||||
function getOptions(x, constants) {
|
||||
x = ARGS[x];
|
||||
if (x == null) return null;
|
||||
var ret = {};
|
||||
if (x !== "") {
|
||||
var ast;
|
||||
try {
|
||||
ast = UglifyJS.parse(x, { expression: true });
|
||||
} catch(ex) {
|
||||
if (ex instanceof UglifyJS.JS_Parse_Error) {
|
||||
print_error("Error parsing arguments in: " + x);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
ast.walk(new UglifyJS.TreeWalker(function(node){
|
||||
if (node instanceof UglifyJS.AST_Seq) return; // descend
|
||||
if (node instanceof UglifyJS.AST_Assign) {
|
||||
var name = node.left.print_to_string({ beautify: false }).replace(/-/g, "_");
|
||||
var value = node.right;
|
||||
if (constants)
|
||||
value = new Function("return (" + value.print_to_string() + ")")();
|
||||
ret[name] = value;
|
||||
return true; // no descend
|
||||
}
|
||||
if (node instanceof UglifyJS.AST_Symbol || node instanceof UglifyJS.AST_Binary) {
|
||||
var name = node.print_to_string({ beautify: false }).replace(/-/g, "_");
|
||||
ret[name] = true;
|
||||
return true; // no descend
|
||||
}
|
||||
print_error(node.TYPE)
|
||||
print_error("Error parsing arguments in: " + x);
|
||||
process.exit(1);
|
||||
}));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
function read_whole_file(filename, cb) {
|
||||
if (filename == "-") {
|
||||
var chunks = [];
|
||||
process.stdin.setEncoding('utf-8');
|
||||
process.stdin.on('data', function (chunk) {
|
||||
chunks.push(chunk);
|
||||
}).on('end', function () {
|
||||
cb(null, chunks.join(""));
|
||||
});
|
||||
process.openStdin();
|
||||
} else {
|
||||
fs.readFile(filename, "utf-8", cb);
|
||||
}
|
||||
}
|
||||
|
||||
function time_it(name, cont) {
|
||||
var t1 = new Date().getTime();
|
||||
var ret = cont();
|
||||
if (ARGS.stats) {
|
||||
var spent = new Date().getTime() - t1;
|
||||
if (STATS[name]) STATS[name] += spent;
|
||||
else STATS[name] = spent;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
function print_error(msg) {
|
||||
console.error("%s", msg);
|
||||
}
|
||||
|
||||
function print(txt) {
|
||||
console.log("%s", txt);
|
||||
}
|
||||
995
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/ast.js
generated
vendored
Normal file
995
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/ast.js
generated
vendored
Normal file
@@ -0,0 +1,995 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
"use strict";
|
||||
|
||||
function DEFNODE(type, props, methods, base) {
|
||||
if (arguments.length < 4) base = AST_Node;
|
||||
if (!props) props = [];
|
||||
else props = props.split(/\s+/);
|
||||
var self_props = props;
|
||||
if (base && base.PROPS)
|
||||
props = props.concat(base.PROPS);
|
||||
var code = "return function AST_" + type + "(props){ if (props) { ";
|
||||
for (var i = props.length; --i >= 0;) {
|
||||
code += "this." + props[i] + " = props." + props[i] + ";";
|
||||
}
|
||||
var proto = base && new base;
|
||||
if (proto && proto.initialize || (methods && methods.initialize))
|
||||
code += "this.initialize();";
|
||||
code += "}}";
|
||||
var ctor = new Function(code)();
|
||||
if (proto) {
|
||||
ctor.prototype = proto;
|
||||
ctor.BASE = base;
|
||||
}
|
||||
if (base) base.SUBCLASSES.push(ctor);
|
||||
ctor.prototype.CTOR = ctor;
|
||||
ctor.PROPS = props || null;
|
||||
ctor.SELF_PROPS = self_props;
|
||||
ctor.SUBCLASSES = [];
|
||||
if (type) {
|
||||
ctor.prototype.TYPE = ctor.TYPE = type;
|
||||
}
|
||||
if (methods) for (i in methods) if (methods.hasOwnProperty(i)) {
|
||||
if (/^\$/.test(i)) {
|
||||
ctor[i.substr(1)] = methods[i];
|
||||
} else {
|
||||
ctor.prototype[i] = methods[i];
|
||||
}
|
||||
}
|
||||
ctor.DEFMETHOD = function(name, method) {
|
||||
this.prototype[name] = method;
|
||||
};
|
||||
return ctor;
|
||||
};
|
||||
|
||||
var AST_Token = DEFNODE("Token", "type value line col pos endline endcol endpos nlb comments_before file", {
|
||||
}, null);
|
||||
|
||||
var AST_Node = DEFNODE("Node", "start end", {
|
||||
clone: function() {
|
||||
return new this.CTOR(this);
|
||||
},
|
||||
$documentation: "Base class of all AST nodes",
|
||||
$propdoc: {
|
||||
start: "[AST_Token] The first token of this node",
|
||||
end: "[AST_Token] The last token of this node"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this);
|
||||
},
|
||||
walk: function(visitor) {
|
||||
return this._walk(visitor); // not sure the indirection will be any help
|
||||
}
|
||||
}, null);
|
||||
|
||||
AST_Node.warn_function = null;
|
||||
AST_Node.warn = function(txt, props) {
|
||||
if (AST_Node.warn_function)
|
||||
AST_Node.warn_function(string_template(txt, props));
|
||||
};
|
||||
|
||||
/* -----[ statements ]----- */
|
||||
|
||||
var AST_Statement = DEFNODE("Statement", null, {
|
||||
$documentation: "Base class of all statements",
|
||||
});
|
||||
|
||||
var AST_Debugger = DEFNODE("Debugger", null, {
|
||||
$documentation: "Represents a debugger statement",
|
||||
}, AST_Statement);
|
||||
|
||||
var AST_Directive = DEFNODE("Directive", "value scope quote", {
|
||||
$documentation: "Represents a directive, like \"use strict\";",
|
||||
$propdoc: {
|
||||
value: "[string] The value of this directive as a plain string (it's not an AST_String!)",
|
||||
scope: "[AST_Scope/S] The scope that this directive affects",
|
||||
quote: "[string] the original quote character"
|
||||
},
|
||||
}, AST_Statement);
|
||||
|
||||
var AST_SimpleStatement = DEFNODE("SimpleStatement", "body", {
|
||||
$documentation: "A statement consisting of an expression, i.e. a = 1 + 2",
|
||||
$propdoc: {
|
||||
body: "[AST_Node] an expression node (should not be instanceof AST_Statement)"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Statement);
|
||||
|
||||
function walk_body(node, visitor) {
|
||||
if (node.body instanceof AST_Statement) {
|
||||
node.body._walk(visitor);
|
||||
}
|
||||
else node.body.forEach(function(stat){
|
||||
stat._walk(visitor);
|
||||
});
|
||||
};
|
||||
|
||||
var AST_Block = DEFNODE("Block", "body", {
|
||||
$documentation: "A body of statements (usually bracketed)",
|
||||
$propdoc: {
|
||||
body: "[AST_Statement*] an array of statements"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
walk_body(this, visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Statement);
|
||||
|
||||
var AST_BlockStatement = DEFNODE("BlockStatement", null, {
|
||||
$documentation: "A block statement",
|
||||
}, AST_Block);
|
||||
|
||||
var AST_EmptyStatement = DEFNODE("EmptyStatement", null, {
|
||||
$documentation: "The empty statement (empty block or simply a semicolon)",
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this);
|
||||
}
|
||||
}, AST_Statement);
|
||||
|
||||
var AST_StatementWithBody = DEFNODE("StatementWithBody", "body", {
|
||||
$documentation: "Base class for all statements that contain one nested body: `For`, `ForIn`, `Do`, `While`, `With`",
|
||||
$propdoc: {
|
||||
body: "[AST_Statement] the body; this should always be present, even if it's an AST_EmptyStatement"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Statement);
|
||||
|
||||
var AST_LabeledStatement = DEFNODE("LabeledStatement", "label", {
|
||||
$documentation: "Statement with a label",
|
||||
$propdoc: {
|
||||
label: "[AST_Label] a label definition"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.label._walk(visitor);
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_StatementWithBody);
|
||||
|
||||
var AST_IterationStatement = DEFNODE("IterationStatement", null, {
|
||||
$documentation: "Internal class. All loops inherit from it."
|
||||
}, AST_StatementWithBody);
|
||||
|
||||
var AST_DWLoop = DEFNODE("DWLoop", "condition", {
|
||||
$documentation: "Base class for do/while statements",
|
||||
$propdoc: {
|
||||
condition: "[AST_Node] the loop condition. Should not be instanceof AST_Statement"
|
||||
}
|
||||
}, AST_IterationStatement);
|
||||
|
||||
var AST_Do = DEFNODE("Do", null, {
|
||||
$documentation: "A `do` statement",
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.body._walk(visitor);
|
||||
this.condition._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_DWLoop);
|
||||
|
||||
var AST_While = DEFNODE("While", null, {
|
||||
$documentation: "A `while` statement",
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.condition._walk(visitor);
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_DWLoop);
|
||||
|
||||
var AST_For = DEFNODE("For", "init condition step", {
|
||||
$documentation: "A `for` statement",
|
||||
$propdoc: {
|
||||
init: "[AST_Node?] the `for` initialization code, or null if empty",
|
||||
condition: "[AST_Node?] the `for` termination clause, or null if empty",
|
||||
step: "[AST_Node?] the `for` update clause, or null if empty"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
if (this.init) this.init._walk(visitor);
|
||||
if (this.condition) this.condition._walk(visitor);
|
||||
if (this.step) this.step._walk(visitor);
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_IterationStatement);
|
||||
|
||||
var AST_ForIn = DEFNODE("ForIn", "init name object", {
|
||||
$documentation: "A `for ... in` statement",
|
||||
$propdoc: {
|
||||
init: "[AST_Node] the `for/in` initialization code",
|
||||
name: "[AST_SymbolRef?] the loop variable, only if `init` is AST_Var",
|
||||
object: "[AST_Node] the object that we're looping through"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.init._walk(visitor);
|
||||
this.object._walk(visitor);
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_IterationStatement);
|
||||
|
||||
var AST_With = DEFNODE("With", "expression", {
|
||||
$documentation: "A `with` statement",
|
||||
$propdoc: {
|
||||
expression: "[AST_Node] the `with` expression"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.expression._walk(visitor);
|
||||
this.body._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_StatementWithBody);
|
||||
|
||||
/* -----[ scope and functions ]----- */
|
||||
|
||||
var AST_Scope = DEFNODE("Scope", "directives variables functions uses_with uses_eval parent_scope enclosed cname", {
|
||||
$documentation: "Base class for all statements introducing a lexical scope",
|
||||
$propdoc: {
|
||||
directives: "[string*/S] an array of directives declared in this scope",
|
||||
variables: "[Object/S] a map of name -> SymbolDef for all variables/functions defined in this scope",
|
||||
functions: "[Object/S] like `variables`, but only lists function declarations",
|
||||
uses_with: "[boolean/S] tells whether this scope uses the `with` statement",
|
||||
uses_eval: "[boolean/S] tells whether this scope contains a direct call to the global `eval`",
|
||||
parent_scope: "[AST_Scope?/S] link to the parent scope",
|
||||
enclosed: "[SymbolDef*/S] a list of all symbol definitions that are accessed from this scope or any subscopes",
|
||||
cname: "[integer/S] current index for mangling variables (used internally by the mangler)",
|
||||
},
|
||||
}, AST_Block);
|
||||
|
||||
var AST_Toplevel = DEFNODE("Toplevel", "globals", {
|
||||
$documentation: "The toplevel scope",
|
||||
$propdoc: {
|
||||
globals: "[Object/S] a map of name -> SymbolDef for all undeclared names",
|
||||
},
|
||||
wrap_enclose: function(arg_parameter_pairs) {
|
||||
var self = this;
|
||||
var args = [];
|
||||
var parameters = [];
|
||||
|
||||
arg_parameter_pairs.forEach(function(pair) {
|
||||
var splitAt = pair.lastIndexOf(":");
|
||||
|
||||
args.push(pair.substr(0, splitAt));
|
||||
parameters.push(pair.substr(splitAt + 1));
|
||||
});
|
||||
|
||||
var wrapped_tl = "(function(" + parameters.join(",") + "){ '$ORIG'; })(" + args.join(",") + ")";
|
||||
wrapped_tl = parse(wrapped_tl);
|
||||
wrapped_tl = wrapped_tl.transform(new TreeTransformer(function before(node){
|
||||
if (node instanceof AST_Directive && node.value == "$ORIG") {
|
||||
return MAP.splice(self.body);
|
||||
}
|
||||
}));
|
||||
return wrapped_tl;
|
||||
},
|
||||
wrap_commonjs: function(name, export_all) {
|
||||
var self = this;
|
||||
var to_export = [];
|
||||
if (export_all) {
|
||||
self.figure_out_scope();
|
||||
self.walk(new TreeWalker(function(node){
|
||||
if (node instanceof AST_SymbolDeclaration && node.definition().global) {
|
||||
if (!find_if(function(n){ return n.name == node.name }, to_export))
|
||||
to_export.push(node);
|
||||
}
|
||||
}));
|
||||
}
|
||||
var wrapped_tl = "(function(exports, global){ global['" + name + "'] = exports; '$ORIG'; '$EXPORTS'; }({}, (function(){return this}())))";
|
||||
wrapped_tl = parse(wrapped_tl);
|
||||
wrapped_tl = wrapped_tl.transform(new TreeTransformer(function before(node){
|
||||
if (node instanceof AST_SimpleStatement) {
|
||||
node = node.body;
|
||||
if (node instanceof AST_String) switch (node.getValue()) {
|
||||
case "$ORIG":
|
||||
return MAP.splice(self.body);
|
||||
case "$EXPORTS":
|
||||
var body = [];
|
||||
to_export.forEach(function(sym){
|
||||
body.push(new AST_SimpleStatement({
|
||||
body: new AST_Assign({
|
||||
left: new AST_Sub({
|
||||
expression: new AST_SymbolRef({ name: "exports" }),
|
||||
property: new AST_String({ value: sym.name }),
|
||||
}),
|
||||
operator: "=",
|
||||
right: new AST_SymbolRef(sym),
|
||||
}),
|
||||
}));
|
||||
});
|
||||
return MAP.splice(body);
|
||||
}
|
||||
}
|
||||
}));
|
||||
return wrapped_tl;
|
||||
}
|
||||
}, AST_Scope);
|
||||
|
||||
var AST_Lambda = DEFNODE("Lambda", "name argnames uses_arguments", {
|
||||
$documentation: "Base class for functions",
|
||||
$propdoc: {
|
||||
name: "[AST_SymbolDeclaration?] the name of this function",
|
||||
argnames: "[AST_SymbolFunarg*] array of function arguments",
|
||||
uses_arguments: "[boolean/S] tells whether this function accesses the arguments array"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
if (this.name) this.name._walk(visitor);
|
||||
this.argnames.forEach(function(arg){
|
||||
arg._walk(visitor);
|
||||
});
|
||||
walk_body(this, visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Scope);
|
||||
|
||||
var AST_Accessor = DEFNODE("Accessor", null, {
|
||||
$documentation: "A setter/getter function. The `name` property is always null."
|
||||
}, AST_Lambda);
|
||||
|
||||
var AST_Function = DEFNODE("Function", null, {
|
||||
$documentation: "A function expression"
|
||||
}, AST_Lambda);
|
||||
|
||||
var AST_Defun = DEFNODE("Defun", null, {
|
||||
$documentation: "A function definition"
|
||||
}, AST_Lambda);
|
||||
|
||||
/* -----[ JUMPS ]----- */
|
||||
|
||||
var AST_Jump = DEFNODE("Jump", null, {
|
||||
$documentation: "Base class for “jumps” (for now that's `return`, `throw`, `break` and `continue`)"
|
||||
}, AST_Statement);
|
||||
|
||||
var AST_Exit = DEFNODE("Exit", "value", {
|
||||
$documentation: "Base class for “exits” (`return` and `throw`)",
|
||||
$propdoc: {
|
||||
value: "[AST_Node?] the value returned or thrown by this statement; could be null for AST_Return"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, this.value && function(){
|
||||
this.value._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Jump);
|
||||
|
||||
var AST_Return = DEFNODE("Return", null, {
|
||||
$documentation: "A `return` statement"
|
||||
}, AST_Exit);
|
||||
|
||||
var AST_Throw = DEFNODE("Throw", null, {
|
||||
$documentation: "A `throw` statement"
|
||||
}, AST_Exit);
|
||||
|
||||
var AST_LoopControl = DEFNODE("LoopControl", "label", {
|
||||
$documentation: "Base class for loop control statements (`break` and `continue`)",
|
||||
$propdoc: {
|
||||
label: "[AST_LabelRef?] the label, or null if none",
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, this.label && function(){
|
||||
this.label._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Jump);
|
||||
|
||||
var AST_Break = DEFNODE("Break", null, {
|
||||
$documentation: "A `break` statement"
|
||||
}, AST_LoopControl);
|
||||
|
||||
var AST_Continue = DEFNODE("Continue", null, {
|
||||
$documentation: "A `continue` statement"
|
||||
}, AST_LoopControl);
|
||||
|
||||
/* -----[ IF ]----- */
|
||||
|
||||
var AST_If = DEFNODE("If", "condition alternative", {
|
||||
$documentation: "A `if` statement",
|
||||
$propdoc: {
|
||||
condition: "[AST_Node] the `if` condition",
|
||||
alternative: "[AST_Statement?] the `else` part, or null if not present"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.condition._walk(visitor);
|
||||
this.body._walk(visitor);
|
||||
if (this.alternative) this.alternative._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_StatementWithBody);
|
||||
|
||||
/* -----[ SWITCH ]----- */
|
||||
|
||||
var AST_Switch = DEFNODE("Switch", "expression", {
|
||||
$documentation: "A `switch` statement",
|
||||
$propdoc: {
|
||||
expression: "[AST_Node] the `switch` “discriminant”"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.expression._walk(visitor);
|
||||
walk_body(this, visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Block);
|
||||
|
||||
var AST_SwitchBranch = DEFNODE("SwitchBranch", null, {
|
||||
$documentation: "Base class for `switch` branches",
|
||||
}, AST_Block);
|
||||
|
||||
var AST_Default = DEFNODE("Default", null, {
|
||||
$documentation: "A `default` switch branch",
|
||||
}, AST_SwitchBranch);
|
||||
|
||||
var AST_Case = DEFNODE("Case", "expression", {
|
||||
$documentation: "A `case` switch branch",
|
||||
$propdoc: {
|
||||
expression: "[AST_Node] the `case` expression"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.expression._walk(visitor);
|
||||
walk_body(this, visitor);
|
||||
});
|
||||
}
|
||||
}, AST_SwitchBranch);
|
||||
|
||||
/* -----[ EXCEPTIONS ]----- */
|
||||
|
||||
var AST_Try = DEFNODE("Try", "bcatch bfinally", {
|
||||
$documentation: "A `try` statement",
|
||||
$propdoc: {
|
||||
bcatch: "[AST_Catch?] the catch block, or null if not present",
|
||||
bfinally: "[AST_Finally?] the finally block, or null if not present"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
walk_body(this, visitor);
|
||||
if (this.bcatch) this.bcatch._walk(visitor);
|
||||
if (this.bfinally) this.bfinally._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Block);
|
||||
|
||||
var AST_Catch = DEFNODE("Catch", "argname", {
|
||||
$documentation: "A `catch` node; only makes sense as part of a `try` statement",
|
||||
$propdoc: {
|
||||
argname: "[AST_SymbolCatch] symbol for the exception"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.argname._walk(visitor);
|
||||
walk_body(this, visitor);
|
||||
});
|
||||
}
|
||||
}, AST_Block);
|
||||
|
||||
var AST_Finally = DEFNODE("Finally", null, {
|
||||
$documentation: "A `finally` node; only makes sense as part of a `try` statement"
|
||||
}, AST_Block);
|
||||
|
||||
/* -----[ VAR/CONST ]----- */
|
||||
|
||||
var AST_Definitions = DEFNODE("Definitions", "definitions", {
|
||||
$documentation: "Base class for `var` or `const` nodes (variable declarations/initializations)",
|
||||
$propdoc: {
|
||||
definitions: "[AST_VarDef*] array of variable definitions"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.definitions.forEach(function(def){
|
||||
def._walk(visitor);
|
||||
});
|
||||
});
|
||||
}
|
||||
}, AST_Statement);
|
||||
|
||||
var AST_Var = DEFNODE("Var", null, {
|
||||
$documentation: "A `var` statement"
|
||||
}, AST_Definitions);
|
||||
|
||||
var AST_Const = DEFNODE("Const", null, {
|
||||
$documentation: "A `const` statement"
|
||||
}, AST_Definitions);
|
||||
|
||||
var AST_VarDef = DEFNODE("VarDef", "name value", {
|
||||
$documentation: "A variable declaration; only appears in a AST_Definitions node",
|
||||
$propdoc: {
|
||||
name: "[AST_SymbolVar|AST_SymbolConst] name of the variable",
|
||||
value: "[AST_Node?] initializer, or null of there's no initializer"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.name._walk(visitor);
|
||||
if (this.value) this.value._walk(visitor);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/* -----[ OTHER ]----- */
|
||||
|
||||
var AST_Call = DEFNODE("Call", "expression args", {
|
||||
$documentation: "A function call expression",
|
||||
$propdoc: {
|
||||
expression: "[AST_Node] expression to invoke as function",
|
||||
args: "[AST_Node*] array of arguments"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.expression._walk(visitor);
|
||||
this.args.forEach(function(arg){
|
||||
arg._walk(visitor);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
var AST_New = DEFNODE("New", null, {
|
||||
$documentation: "An object instantiation. Derives from a function call since it has exactly the same properties"
|
||||
}, AST_Call);
|
||||
|
||||
var AST_Seq = DEFNODE("Seq", "car cdr", {
|
||||
$documentation: "A sequence expression (two comma-separated expressions)",
|
||||
$propdoc: {
|
||||
car: "[AST_Node] first element in sequence",
|
||||
cdr: "[AST_Node] second element in sequence"
|
||||
},
|
||||
$cons: function(x, y) {
|
||||
var seq = new AST_Seq(x);
|
||||
seq.car = x;
|
||||
seq.cdr = y;
|
||||
return seq;
|
||||
},
|
||||
$from_array: function(array) {
|
||||
if (array.length == 0) return null;
|
||||
if (array.length == 1) return array[0].clone();
|
||||
var list = null;
|
||||
for (var i = array.length; --i >= 0;) {
|
||||
list = AST_Seq.cons(array[i], list);
|
||||
}
|
||||
var p = list;
|
||||
while (p) {
|
||||
if (p.cdr && !p.cdr.cdr) {
|
||||
p.cdr = p.cdr.car;
|
||||
break;
|
||||
}
|
||||
p = p.cdr;
|
||||
}
|
||||
return list;
|
||||
},
|
||||
to_array: function() {
|
||||
var p = this, a = [];
|
||||
while (p) {
|
||||
a.push(p.car);
|
||||
if (p.cdr && !(p.cdr instanceof AST_Seq)) {
|
||||
a.push(p.cdr);
|
||||
break;
|
||||
}
|
||||
p = p.cdr;
|
||||
}
|
||||
return a;
|
||||
},
|
||||
add: function(node) {
|
||||
var p = this;
|
||||
while (p) {
|
||||
if (!(p.cdr instanceof AST_Seq)) {
|
||||
var cell = AST_Seq.cons(p.cdr, node);
|
||||
return p.cdr = cell;
|
||||
}
|
||||
p = p.cdr;
|
||||
}
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.car._walk(visitor);
|
||||
if (this.cdr) this.cdr._walk(visitor);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
var AST_PropAccess = DEFNODE("PropAccess", "expression property", {
|
||||
$documentation: "Base class for property access expressions, i.e. `a.foo` or `a[\"foo\"]`",
|
||||
$propdoc: {
|
||||
expression: "[AST_Node] the “container” expression",
|
||||
property: "[AST_Node|string] the property to access. For AST_Dot this is always a plain string, while for AST_Sub it's an arbitrary AST_Node"
|
||||
}
|
||||
});
|
||||
|
||||
var AST_Dot = DEFNODE("Dot", null, {
|
||||
$documentation: "A dotted property access expression",
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.expression._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_PropAccess);
|
||||
|
||||
var AST_Sub = DEFNODE("Sub", null, {
|
||||
$documentation: "Index-style property access, i.e. `a[\"foo\"]`",
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.expression._walk(visitor);
|
||||
this.property._walk(visitor);
|
||||
});
|
||||
}
|
||||
}, AST_PropAccess);
|
||||
|
||||
var AST_Unary = DEFNODE("Unary", "operator expression", {
|
||||
$documentation: "Base class for unary expressions",
|
||||
$propdoc: {
|
||||
operator: "[string] the operator",
|
||||
expression: "[AST_Node] expression that this unary operator applies to"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.expression._walk(visitor);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
var AST_UnaryPrefix = DEFNODE("UnaryPrefix", null, {
|
||||
$documentation: "Unary prefix expression, i.e. `typeof i` or `++i`"
|
||||
}, AST_Unary);
|
||||
|
||||
var AST_UnaryPostfix = DEFNODE("UnaryPostfix", null, {
|
||||
$documentation: "Unary postfix expression, i.e. `i++`"
|
||||
}, AST_Unary);
|
||||
|
||||
var AST_Binary = DEFNODE("Binary", "left operator right", {
|
||||
$documentation: "Binary expression, i.e. `a + b`",
|
||||
$propdoc: {
|
||||
left: "[AST_Node] left-hand side expression",
|
||||
operator: "[string] the operator",
|
||||
right: "[AST_Node] right-hand side expression"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.left._walk(visitor);
|
||||
this.right._walk(visitor);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
var AST_Conditional = DEFNODE("Conditional", "condition consequent alternative", {
|
||||
$documentation: "Conditional expression using the ternary operator, i.e. `a ? b : c`",
|
||||
$propdoc: {
|
||||
condition: "[AST_Node]",
|
||||
consequent: "[AST_Node]",
|
||||
alternative: "[AST_Node]"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.condition._walk(visitor);
|
||||
this.consequent._walk(visitor);
|
||||
this.alternative._walk(visitor);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
var AST_Assign = DEFNODE("Assign", null, {
|
||||
$documentation: "An assignment expression — `a = b + 5`",
|
||||
}, AST_Binary);
|
||||
|
||||
/* -----[ LITERALS ]----- */
|
||||
|
||||
var AST_Array = DEFNODE("Array", "elements", {
|
||||
$documentation: "An array literal",
|
||||
$propdoc: {
|
||||
elements: "[AST_Node*] array of elements"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.elements.forEach(function(el){
|
||||
el._walk(visitor);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
var AST_Object = DEFNODE("Object", "properties", {
|
||||
$documentation: "An object literal",
|
||||
$propdoc: {
|
||||
properties: "[AST_ObjectProperty*] array of properties"
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.properties.forEach(function(prop){
|
||||
prop._walk(visitor);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
var AST_ObjectProperty = DEFNODE("ObjectProperty", "key value", {
|
||||
$documentation: "Base class for literal object properties",
|
||||
$propdoc: {
|
||||
key: "[string] the property name converted to a string for ObjectKeyVal. For setters and getters this is an arbitrary AST_Node.",
|
||||
value: "[AST_Node] property value. For setters and getters this is an AST_Function."
|
||||
},
|
||||
_walk: function(visitor) {
|
||||
return visitor._visit(this, function(){
|
||||
this.value._walk(visitor);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
var AST_ObjectKeyVal = DEFNODE("ObjectKeyVal", "quote", {
|
||||
$documentation: "A key: value object property",
|
||||
$propdoc: {
|
||||
quote: "[string] the original quote character"
|
||||
}
|
||||
}, AST_ObjectProperty);
|
||||
|
||||
var AST_ObjectSetter = DEFNODE("ObjectSetter", null, {
|
||||
$documentation: "An object setter property",
|
||||
}, AST_ObjectProperty);
|
||||
|
||||
var AST_ObjectGetter = DEFNODE("ObjectGetter", null, {
|
||||
$documentation: "An object getter property",
|
||||
}, AST_ObjectProperty);
|
||||
|
||||
var AST_Symbol = DEFNODE("Symbol", "scope name thedef", {
|
||||
$propdoc: {
|
||||
name: "[string] name of this symbol",
|
||||
scope: "[AST_Scope/S] the current scope (not necessarily the definition scope)",
|
||||
thedef: "[SymbolDef/S] the definition of this symbol"
|
||||
},
|
||||
$documentation: "Base class for all symbols",
|
||||
});
|
||||
|
||||
var AST_SymbolAccessor = DEFNODE("SymbolAccessor", null, {
|
||||
$documentation: "The name of a property accessor (setter/getter function)"
|
||||
}, AST_Symbol);
|
||||
|
||||
var AST_SymbolDeclaration = DEFNODE("SymbolDeclaration", "init", {
|
||||
$documentation: "A declaration symbol (symbol in var/const, function name or argument, symbol in catch)",
|
||||
$propdoc: {
|
||||
init: "[AST_Node*/S] array of initializers for this declaration."
|
||||
}
|
||||
}, AST_Symbol);
|
||||
|
||||
var AST_SymbolVar = DEFNODE("SymbolVar", null, {
|
||||
$documentation: "Symbol defining a variable",
|
||||
}, AST_SymbolDeclaration);
|
||||
|
||||
var AST_SymbolConst = DEFNODE("SymbolConst", null, {
|
||||
$documentation: "A constant declaration"
|
||||
}, AST_SymbolDeclaration);
|
||||
|
||||
var AST_SymbolFunarg = DEFNODE("SymbolFunarg", null, {
|
||||
$documentation: "Symbol naming a function argument",
|
||||
}, AST_SymbolVar);
|
||||
|
||||
var AST_SymbolDefun = DEFNODE("SymbolDefun", null, {
|
||||
$documentation: "Symbol defining a function",
|
||||
}, AST_SymbolDeclaration);
|
||||
|
||||
var AST_SymbolLambda = DEFNODE("SymbolLambda", null, {
|
||||
$documentation: "Symbol naming a function expression",
|
||||
}, AST_SymbolDeclaration);
|
||||
|
||||
var AST_SymbolCatch = DEFNODE("SymbolCatch", null, {
|
||||
$documentation: "Symbol naming the exception in catch",
|
||||
}, AST_SymbolDeclaration);
|
||||
|
||||
var AST_Label = DEFNODE("Label", "references", {
|
||||
$documentation: "Symbol naming a label (declaration)",
|
||||
$propdoc: {
|
||||
references: "[AST_LoopControl*] a list of nodes referring to this label"
|
||||
},
|
||||
initialize: function() {
|
||||
this.references = [];
|
||||
this.thedef = this;
|
||||
}
|
||||
}, AST_Symbol);
|
||||
|
||||
var AST_SymbolRef = DEFNODE("SymbolRef", null, {
|
||||
$documentation: "Reference to some symbol (not definition/declaration)",
|
||||
}, AST_Symbol);
|
||||
|
||||
var AST_LabelRef = DEFNODE("LabelRef", null, {
|
||||
$documentation: "Reference to a label symbol",
|
||||
}, AST_Symbol);
|
||||
|
||||
var AST_This = DEFNODE("This", null, {
|
||||
$documentation: "The `this` symbol",
|
||||
}, AST_Symbol);
|
||||
|
||||
var AST_Constant = DEFNODE("Constant", null, {
|
||||
$documentation: "Base class for all constants",
|
||||
getValue: function() {
|
||||
return this.value;
|
||||
}
|
||||
});
|
||||
|
||||
var AST_String = DEFNODE("String", "value quote", {
|
||||
$documentation: "A string literal",
|
||||
$propdoc: {
|
||||
value: "[string] the contents of this string",
|
||||
quote: "[string] the original quote character"
|
||||
}
|
||||
}, AST_Constant);
|
||||
|
||||
var AST_Number = DEFNODE("Number", "value", {
|
||||
$documentation: "A number literal",
|
||||
$propdoc: {
|
||||
value: "[number] the numeric value"
|
||||
}
|
||||
}, AST_Constant);
|
||||
|
||||
var AST_RegExp = DEFNODE("RegExp", "value", {
|
||||
$documentation: "A regexp literal",
|
||||
$propdoc: {
|
||||
value: "[RegExp] the actual regexp"
|
||||
}
|
||||
}, AST_Constant);
|
||||
|
||||
var AST_Atom = DEFNODE("Atom", null, {
|
||||
$documentation: "Base class for atoms",
|
||||
}, AST_Constant);
|
||||
|
||||
var AST_Null = DEFNODE("Null", null, {
|
||||
$documentation: "The `null` atom",
|
||||
value: null
|
||||
}, AST_Atom);
|
||||
|
||||
var AST_NaN = DEFNODE("NaN", null, {
|
||||
$documentation: "The impossible value",
|
||||
value: 0/0
|
||||
}, AST_Atom);
|
||||
|
||||
var AST_Undefined = DEFNODE("Undefined", null, {
|
||||
$documentation: "The `undefined` value",
|
||||
value: (function(){}())
|
||||
}, AST_Atom);
|
||||
|
||||
var AST_Hole = DEFNODE("Hole", null, {
|
||||
$documentation: "A hole in an array",
|
||||
value: (function(){}())
|
||||
}, AST_Atom);
|
||||
|
||||
var AST_Infinity = DEFNODE("Infinity", null, {
|
||||
$documentation: "The `Infinity` value",
|
||||
value: 1/0
|
||||
}, AST_Atom);
|
||||
|
||||
var AST_Boolean = DEFNODE("Boolean", null, {
|
||||
$documentation: "Base class for booleans",
|
||||
}, AST_Atom);
|
||||
|
||||
var AST_False = DEFNODE("False", null, {
|
||||
$documentation: "The `false` atom",
|
||||
value: false
|
||||
}, AST_Boolean);
|
||||
|
||||
var AST_True = DEFNODE("True", null, {
|
||||
$documentation: "The `true` atom",
|
||||
value: true
|
||||
}, AST_Boolean);
|
||||
|
||||
/* -----[ TreeWalker ]----- */
|
||||
|
||||
function TreeWalker(callback) {
|
||||
this.visit = callback;
|
||||
this.stack = [];
|
||||
};
|
||||
TreeWalker.prototype = {
|
||||
_visit: function(node, descend) {
|
||||
this.stack.push(node);
|
||||
var ret = this.visit(node, descend ? function(){
|
||||
descend.call(node);
|
||||
} : noop);
|
||||
if (!ret && descend) {
|
||||
descend.call(node);
|
||||
}
|
||||
this.stack.pop();
|
||||
return ret;
|
||||
},
|
||||
parent: function(n) {
|
||||
return this.stack[this.stack.length - 2 - (n || 0)];
|
||||
},
|
||||
push: function (node) {
|
||||
this.stack.push(node);
|
||||
},
|
||||
pop: function() {
|
||||
return this.stack.pop();
|
||||
},
|
||||
self: function() {
|
||||
return this.stack[this.stack.length - 1];
|
||||
},
|
||||
find_parent: function(type) {
|
||||
var stack = this.stack;
|
||||
for (var i = stack.length; --i >= 0;) {
|
||||
var x = stack[i];
|
||||
if (x instanceof type) return x;
|
||||
}
|
||||
},
|
||||
has_directive: function(type) {
|
||||
return this.find_parent(AST_Scope).has_directive(type);
|
||||
},
|
||||
in_boolean_context: function() {
|
||||
var stack = this.stack;
|
||||
var i = stack.length, self = stack[--i];
|
||||
while (i > 0) {
|
||||
var p = stack[--i];
|
||||
if ((p instanceof AST_If && p.condition === self) ||
|
||||
(p instanceof AST_Conditional && p.condition === self) ||
|
||||
(p instanceof AST_DWLoop && p.condition === self) ||
|
||||
(p instanceof AST_For && p.condition === self) ||
|
||||
(p instanceof AST_UnaryPrefix && p.operator == "!" && p.expression === self))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
if (!(p instanceof AST_Binary && (p.operator == "&&" || p.operator == "||")))
|
||||
return false;
|
||||
self = p;
|
||||
}
|
||||
},
|
||||
loopcontrol_target: function(label) {
|
||||
var stack = this.stack;
|
||||
if (label) for (var i = stack.length; --i >= 0;) {
|
||||
var x = stack[i];
|
||||
if (x instanceof AST_LabeledStatement && x.label.name == label.name) {
|
||||
return x.body;
|
||||
}
|
||||
} else for (var i = stack.length; --i >= 0;) {
|
||||
var x = stack[i];
|
||||
if (x instanceof AST_Switch || x instanceof AST_IterationStatement)
|
||||
return x;
|
||||
}
|
||||
}
|
||||
};
|
||||
2486
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/compress.js
generated
vendored
Normal file
2486
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/compress.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
507
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/mozilla-ast.js
generated
vendored
Normal file
507
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/mozilla-ast.js
generated
vendored
Normal file
@@ -0,0 +1,507 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
"use strict";
|
||||
|
||||
(function(){
|
||||
|
||||
var MOZ_TO_ME = {
|
||||
ExpressionStatement: function(M) {
|
||||
var expr = M.expression;
|
||||
if (expr.type === "Literal" && typeof expr.value === "string") {
|
||||
return new AST_Directive({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
value: expr.value
|
||||
});
|
||||
}
|
||||
return new AST_SimpleStatement({
|
||||
start: my_start_token(M),
|
||||
end: my_end_token(M),
|
||||
body: from_moz(expr)
|
||||
});
|
||||
},
|
||||
TryStatement: function(M) {
|
||||
var handlers = M.handlers || [M.handler];
|
||||
if (handlers.length > 1 || M.guardedHandlers && M.guardedHandlers.length) {
|
||||
throw new Error("Multiple catch clauses are not supported.");
|
||||
}
|
||||
return new AST_Try({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
body : from_moz(M.block).body,
|
||||
bcatch : from_moz(handlers[0]),
|
||||
bfinally : M.finalizer ? new AST_Finally(from_moz(M.finalizer)) : null
|
||||
});
|
||||
},
|
||||
Property: function(M) {
|
||||
var key = M.key;
|
||||
var name = key.type == "Identifier" ? key.name : key.value;
|
||||
var args = {
|
||||
start : my_start_token(key),
|
||||
end : my_end_token(M.value),
|
||||
key : name,
|
||||
value : from_moz(M.value)
|
||||
};
|
||||
switch (M.kind) {
|
||||
case "init":
|
||||
return new AST_ObjectKeyVal(args);
|
||||
case "set":
|
||||
args.value.name = from_moz(key);
|
||||
return new AST_ObjectSetter(args);
|
||||
case "get":
|
||||
args.value.name = from_moz(key);
|
||||
return new AST_ObjectGetter(args);
|
||||
}
|
||||
},
|
||||
ObjectExpression: function(M) {
|
||||
return new AST_Object({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
properties : M.properties.map(function(prop){
|
||||
prop.type = "Property";
|
||||
return from_moz(prop)
|
||||
})
|
||||
});
|
||||
},
|
||||
SequenceExpression: function(M) {
|
||||
return AST_Seq.from_array(M.expressions.map(from_moz));
|
||||
},
|
||||
MemberExpression: function(M) {
|
||||
return new (M.computed ? AST_Sub : AST_Dot)({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
property : M.computed ? from_moz(M.property) : M.property.name,
|
||||
expression : from_moz(M.object)
|
||||
});
|
||||
},
|
||||
SwitchCase: function(M) {
|
||||
return new (M.test ? AST_Case : AST_Default)({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
expression : from_moz(M.test),
|
||||
body : M.consequent.map(from_moz)
|
||||
});
|
||||
},
|
||||
VariableDeclaration: function(M) {
|
||||
return new (M.kind === "const" ? AST_Const : AST_Var)({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
definitions : M.declarations.map(from_moz)
|
||||
});
|
||||
},
|
||||
Literal: function(M) {
|
||||
var val = M.value, args = {
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M)
|
||||
};
|
||||
if (val === null) return new AST_Null(args);
|
||||
switch (typeof val) {
|
||||
case "string":
|
||||
args.value = val;
|
||||
return new AST_String(args);
|
||||
case "number":
|
||||
args.value = val;
|
||||
return new AST_Number(args);
|
||||
case "boolean":
|
||||
return new (val ? AST_True : AST_False)(args);
|
||||
default:
|
||||
args.value = val;
|
||||
return new AST_RegExp(args);
|
||||
}
|
||||
},
|
||||
Identifier: function(M) {
|
||||
var p = FROM_MOZ_STACK[FROM_MOZ_STACK.length - 2];
|
||||
return new ( p.type == "LabeledStatement" ? AST_Label
|
||||
: p.type == "VariableDeclarator" && p.id === M ? (p.kind == "const" ? AST_SymbolConst : AST_SymbolVar)
|
||||
: p.type == "FunctionExpression" ? (p.id === M ? AST_SymbolLambda : AST_SymbolFunarg)
|
||||
: p.type == "FunctionDeclaration" ? (p.id === M ? AST_SymbolDefun : AST_SymbolFunarg)
|
||||
: p.type == "CatchClause" ? AST_SymbolCatch
|
||||
: p.type == "BreakStatement" || p.type == "ContinueStatement" ? AST_LabelRef
|
||||
: AST_SymbolRef)({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
name : M.name
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
MOZ_TO_ME.UpdateExpression =
|
||||
MOZ_TO_ME.UnaryExpression = function To_Moz_Unary(M) {
|
||||
var prefix = "prefix" in M ? M.prefix
|
||||
: M.type == "UnaryExpression" ? true : false;
|
||||
return new (prefix ? AST_UnaryPrefix : AST_UnaryPostfix)({
|
||||
start : my_start_token(M),
|
||||
end : my_end_token(M),
|
||||
operator : M.operator,
|
||||
expression : from_moz(M.argument)
|
||||
});
|
||||
};
|
||||
|
||||
map("Program", AST_Toplevel, "body@body");
|
||||
map("EmptyStatement", AST_EmptyStatement);
|
||||
map("BlockStatement", AST_BlockStatement, "body@body");
|
||||
map("IfStatement", AST_If, "test>condition, consequent>body, alternate>alternative");
|
||||
map("LabeledStatement", AST_LabeledStatement, "label>label, body>body");
|
||||
map("BreakStatement", AST_Break, "label>label");
|
||||
map("ContinueStatement", AST_Continue, "label>label");
|
||||
map("WithStatement", AST_With, "object>expression, body>body");
|
||||
map("SwitchStatement", AST_Switch, "discriminant>expression, cases@body");
|
||||
map("ReturnStatement", AST_Return, "argument>value");
|
||||
map("ThrowStatement", AST_Throw, "argument>value");
|
||||
map("WhileStatement", AST_While, "test>condition, body>body");
|
||||
map("DoWhileStatement", AST_Do, "test>condition, body>body");
|
||||
map("ForStatement", AST_For, "init>init, test>condition, update>step, body>body");
|
||||
map("ForInStatement", AST_ForIn, "left>init, right>object, body>body");
|
||||
map("DebuggerStatement", AST_Debugger);
|
||||
map("FunctionDeclaration", AST_Defun, "id>name, params@argnames, body%body");
|
||||
map("VariableDeclarator", AST_VarDef, "id>name, init>value");
|
||||
map("CatchClause", AST_Catch, "param>argname, body%body");
|
||||
|
||||
map("ThisExpression", AST_This);
|
||||
map("ArrayExpression", AST_Array, "elements@elements");
|
||||
map("FunctionExpression", AST_Function, "id>name, params@argnames, body%body");
|
||||
map("BinaryExpression", AST_Binary, "operator=operator, left>left, right>right");
|
||||
map("LogicalExpression", AST_Binary, "operator=operator, left>left, right>right");
|
||||
map("AssignmentExpression", AST_Assign, "operator=operator, left>left, right>right");
|
||||
map("ConditionalExpression", AST_Conditional, "test>condition, consequent>consequent, alternate>alternative");
|
||||
map("NewExpression", AST_New, "callee>expression, arguments@args");
|
||||
map("CallExpression", AST_Call, "callee>expression, arguments@args");
|
||||
|
||||
def_to_moz(AST_Directive, function To_Moz_Directive(M) {
|
||||
return {
|
||||
type: "ExpressionStatement",
|
||||
expression: {
|
||||
type: "Literal",
|
||||
value: M.value
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_SimpleStatement, function To_Moz_ExpressionStatement(M) {
|
||||
return {
|
||||
type: "ExpressionStatement",
|
||||
expression: to_moz(M.body)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_SwitchBranch, function To_Moz_SwitchCase(M) {
|
||||
return {
|
||||
type: "SwitchCase",
|
||||
test: to_moz(M.expression),
|
||||
consequent: M.body.map(to_moz)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Try, function To_Moz_TryStatement(M) {
|
||||
return {
|
||||
type: "TryStatement",
|
||||
block: to_moz_block(M),
|
||||
handler: to_moz(M.bcatch),
|
||||
guardedHandlers: [],
|
||||
finalizer: to_moz(M.bfinally)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Catch, function To_Moz_CatchClause(M) {
|
||||
return {
|
||||
type: "CatchClause",
|
||||
param: to_moz(M.argname),
|
||||
guard: null,
|
||||
body: to_moz_block(M)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Definitions, function To_Moz_VariableDeclaration(M) {
|
||||
return {
|
||||
type: "VariableDeclaration",
|
||||
kind: M instanceof AST_Const ? "const" : "var",
|
||||
declarations: M.definitions.map(to_moz)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Seq, function To_Moz_SequenceExpression(M) {
|
||||
return {
|
||||
type: "SequenceExpression",
|
||||
expressions: M.to_array().map(to_moz)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_PropAccess, function To_Moz_MemberExpression(M) {
|
||||
var isComputed = M instanceof AST_Sub;
|
||||
return {
|
||||
type: "MemberExpression",
|
||||
object: to_moz(M.expression),
|
||||
computed: isComputed,
|
||||
property: isComputed ? to_moz(M.property) : {type: "Identifier", name: M.property}
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Unary, function To_Moz_Unary(M) {
|
||||
return {
|
||||
type: M.operator == "++" || M.operator == "--" ? "UpdateExpression" : "UnaryExpression",
|
||||
operator: M.operator,
|
||||
prefix: M instanceof AST_UnaryPrefix,
|
||||
argument: to_moz(M.expression)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Binary, function To_Moz_BinaryExpression(M) {
|
||||
return {
|
||||
type: M.operator == "&&" || M.operator == "||" ? "LogicalExpression" : "BinaryExpression",
|
||||
left: to_moz(M.left),
|
||||
operator: M.operator,
|
||||
right: to_moz(M.right)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Object, function To_Moz_ObjectExpression(M) {
|
||||
return {
|
||||
type: "ObjectExpression",
|
||||
properties: M.properties.map(to_moz)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_ObjectProperty, function To_Moz_Property(M) {
|
||||
var key = (
|
||||
is_identifier(M.key)
|
||||
? {type: "Identifier", name: M.key}
|
||||
: {type: "Literal", value: M.key}
|
||||
);
|
||||
var kind;
|
||||
if (M instanceof AST_ObjectKeyVal) {
|
||||
kind = "init";
|
||||
} else
|
||||
if (M instanceof AST_ObjectGetter) {
|
||||
kind = "get";
|
||||
} else
|
||||
if (M instanceof AST_ObjectSetter) {
|
||||
kind = "set";
|
||||
}
|
||||
return {
|
||||
type: "Property",
|
||||
kind: kind,
|
||||
key: key,
|
||||
value: to_moz(M.value)
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Symbol, function To_Moz_Identifier(M) {
|
||||
var def = M.definition();
|
||||
return {
|
||||
type: "Identifier",
|
||||
name: def ? def.mangled_name || def.name : M.name
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Constant, function To_Moz_Literal(M) {
|
||||
var value = M.value;
|
||||
if (typeof value === 'number' && (value < 0 || (value === 0 && 1 / value < 0))) {
|
||||
return {
|
||||
type: "UnaryExpression",
|
||||
operator: "-",
|
||||
prefix: true,
|
||||
argument: {
|
||||
type: "Literal",
|
||||
value: -value
|
||||
}
|
||||
};
|
||||
}
|
||||
return {
|
||||
type: "Literal",
|
||||
value: value
|
||||
};
|
||||
});
|
||||
|
||||
def_to_moz(AST_Atom, function To_Moz_Atom(M) {
|
||||
return {
|
||||
type: "Identifier",
|
||||
name: String(M.value)
|
||||
};
|
||||
});
|
||||
|
||||
AST_Boolean.DEFMETHOD("to_mozilla_ast", AST_Constant.prototype.to_mozilla_ast);
|
||||
AST_Null.DEFMETHOD("to_mozilla_ast", AST_Constant.prototype.to_mozilla_ast);
|
||||
AST_Hole.DEFMETHOD("to_mozilla_ast", function To_Moz_ArrayHole() { return null });
|
||||
|
||||
AST_Block.DEFMETHOD("to_mozilla_ast", AST_BlockStatement.prototype.to_mozilla_ast);
|
||||
AST_Lambda.DEFMETHOD("to_mozilla_ast", AST_Function.prototype.to_mozilla_ast);
|
||||
|
||||
/* -----[ tools ]----- */
|
||||
|
||||
function my_start_token(moznode) {
|
||||
var loc = moznode.loc, start = loc && loc.start;
|
||||
var range = moznode.range;
|
||||
return new AST_Token({
|
||||
file : loc && loc.source,
|
||||
line : start && start.line,
|
||||
col : start && start.column,
|
||||
pos : range ? range[0] : moznode.start,
|
||||
endline : start && start.line,
|
||||
endcol : start && start.column,
|
||||
endpos : range ? range[0] : moznode.start
|
||||
});
|
||||
};
|
||||
|
||||
function my_end_token(moznode) {
|
||||
var loc = moznode.loc, end = loc && loc.end;
|
||||
var range = moznode.range;
|
||||
return new AST_Token({
|
||||
file : loc && loc.source,
|
||||
line : end && end.line,
|
||||
col : end && end.column,
|
||||
pos : range ? range[1] : moznode.end,
|
||||
endline : end && end.line,
|
||||
endcol : end && end.column,
|
||||
endpos : range ? range[1] : moznode.end
|
||||
});
|
||||
};
|
||||
|
||||
function map(moztype, mytype, propmap) {
|
||||
var moz_to_me = "function From_Moz_" + moztype + "(M){\n";
|
||||
moz_to_me += "return new " + mytype.name + "({\n" +
|
||||
"start: my_start_token(M),\n" +
|
||||
"end: my_end_token(M)";
|
||||
|
||||
var me_to_moz = "function To_Moz_" + moztype + "(M){\n";
|
||||
me_to_moz += "return {\n" +
|
||||
"type: " + JSON.stringify(moztype);
|
||||
|
||||
if (propmap) propmap.split(/\s*,\s*/).forEach(function(prop){
|
||||
var m = /([a-z0-9$_]+)(=|@|>|%)([a-z0-9$_]+)/i.exec(prop);
|
||||
if (!m) throw new Error("Can't understand property map: " + prop);
|
||||
var moz = m[1], how = m[2], my = m[3];
|
||||
moz_to_me += ",\n" + my + ": ";
|
||||
me_to_moz += ",\n" + moz + ": ";
|
||||
switch (how) {
|
||||
case "@":
|
||||
moz_to_me += "M." + moz + ".map(from_moz)";
|
||||
me_to_moz += "M." + my + ".map(to_moz)";
|
||||
break;
|
||||
case ">":
|
||||
moz_to_me += "from_moz(M." + moz + ")";
|
||||
me_to_moz += "to_moz(M." + my + ")";
|
||||
break;
|
||||
case "=":
|
||||
moz_to_me += "M." + moz;
|
||||
me_to_moz += "M." + my;
|
||||
break;
|
||||
case "%":
|
||||
moz_to_me += "from_moz(M." + moz + ").body";
|
||||
me_to_moz += "to_moz_block(M)";
|
||||
break;
|
||||
default:
|
||||
throw new Error("Can't understand operator in propmap: " + prop);
|
||||
}
|
||||
});
|
||||
|
||||
moz_to_me += "\n})\n}";
|
||||
me_to_moz += "\n}\n}";
|
||||
|
||||
//moz_to_me = parse(moz_to_me).print_to_string({ beautify: true });
|
||||
//me_to_moz = parse(me_to_moz).print_to_string({ beautify: true });
|
||||
//console.log(moz_to_me);
|
||||
|
||||
moz_to_me = new Function("my_start_token", "my_end_token", "from_moz", "return(" + moz_to_me + ")")(
|
||||
my_start_token, my_end_token, from_moz
|
||||
);
|
||||
me_to_moz = new Function("to_moz", "to_moz_block", "return(" + me_to_moz + ")")(
|
||||
to_moz, to_moz_block
|
||||
);
|
||||
MOZ_TO_ME[moztype] = moz_to_me;
|
||||
def_to_moz(mytype, me_to_moz);
|
||||
};
|
||||
|
||||
var FROM_MOZ_STACK = null;
|
||||
|
||||
function from_moz(node) {
|
||||
FROM_MOZ_STACK.push(node);
|
||||
var ret = node != null ? MOZ_TO_ME[node.type](node) : null;
|
||||
FROM_MOZ_STACK.pop();
|
||||
return ret;
|
||||
};
|
||||
|
||||
AST_Node.from_mozilla_ast = function(node){
|
||||
var save_stack = FROM_MOZ_STACK;
|
||||
FROM_MOZ_STACK = [];
|
||||
var ast = from_moz(node);
|
||||
FROM_MOZ_STACK = save_stack;
|
||||
return ast;
|
||||
};
|
||||
|
||||
function set_moz_loc(mynode, moznode, myparent) {
|
||||
var start = mynode.start;
|
||||
var end = mynode.end;
|
||||
if (start.pos != null && end.endpos != null) {
|
||||
moznode.range = [start.pos, end.endpos];
|
||||
}
|
||||
if (start.line) {
|
||||
moznode.loc = {
|
||||
start: {line: start.line, column: start.col},
|
||||
end: end.endline ? {line: end.endline, column: end.endcol} : null
|
||||
};
|
||||
if (start.file) {
|
||||
moznode.loc.source = start.file;
|
||||
}
|
||||
}
|
||||
return moznode;
|
||||
};
|
||||
|
||||
function def_to_moz(mytype, handler) {
|
||||
mytype.DEFMETHOD("to_mozilla_ast", function() {
|
||||
return set_moz_loc(this, handler(this));
|
||||
});
|
||||
};
|
||||
|
||||
function to_moz(node) {
|
||||
return node != null ? node.to_mozilla_ast() : null;
|
||||
};
|
||||
|
||||
function to_moz_block(node) {
|
||||
return {
|
||||
type: "BlockStatement",
|
||||
body: node.body.map(to_moz)
|
||||
};
|
||||
};
|
||||
|
||||
})();
|
||||
1326
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/output.js
generated
vendored
Normal file
1326
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/output.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1486
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/parse.js
generated
vendored
Normal file
1486
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/parse.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
217
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/propmangle.js
generated
vendored
Normal file
217
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/propmangle.js
generated
vendored
Normal file
@@ -0,0 +1,217 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
"use strict";
|
||||
|
||||
function find_builtins() {
|
||||
var a = [];
|
||||
[ Object, Array, Function, Number,
|
||||
String, Boolean, Error, Math,
|
||||
Date, RegExp
|
||||
].forEach(function(ctor){
|
||||
Object.getOwnPropertyNames(ctor).map(add);
|
||||
if (ctor.prototype) {
|
||||
Object.getOwnPropertyNames(ctor.prototype).map(add);
|
||||
}
|
||||
});
|
||||
function add(name) {
|
||||
push_uniq(a, name);
|
||||
}
|
||||
return a;
|
||||
}
|
||||
|
||||
function mangle_properties(ast, options) {
|
||||
options = defaults(options, {
|
||||
reserved : null,
|
||||
cache : null,
|
||||
only_cache : false
|
||||
});
|
||||
|
||||
var reserved = options.reserved;
|
||||
if (reserved == null)
|
||||
reserved = find_builtins();
|
||||
|
||||
var cache = options.cache;
|
||||
if (cache == null) {
|
||||
cache = {
|
||||
cname: -1,
|
||||
props: new Dictionary()
|
||||
};
|
||||
}
|
||||
|
||||
var names_to_mangle = [];
|
||||
|
||||
// step 1: find candidates to mangle
|
||||
ast.walk(new TreeWalker(function(node){
|
||||
if (node instanceof AST_ObjectKeyVal) {
|
||||
add(node.key);
|
||||
}
|
||||
else if (node instanceof AST_ObjectProperty) {
|
||||
// setter or getter, since KeyVal is handled above
|
||||
add(node.key.name);
|
||||
}
|
||||
else if (node instanceof AST_Dot) {
|
||||
if (this.parent() instanceof AST_Assign) {
|
||||
add(node.property);
|
||||
}
|
||||
}
|
||||
else if (node instanceof AST_Sub) {
|
||||
if (this.parent() instanceof AST_Assign) {
|
||||
addStrings(node.property);
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
// step 2: transform the tree, renaming properties
|
||||
return ast.transform(new TreeTransformer(function(node){
|
||||
if (node instanceof AST_ObjectKeyVal) {
|
||||
if (should_mangle(node.key)) {
|
||||
node.key = mangle(node.key);
|
||||
}
|
||||
}
|
||||
else if (node instanceof AST_ObjectProperty) {
|
||||
// setter or getter
|
||||
if (should_mangle(node.key.name)) {
|
||||
node.key.name = mangle(node.key.name);
|
||||
}
|
||||
}
|
||||
else if (node instanceof AST_Dot) {
|
||||
if (should_mangle(node.property)) {
|
||||
node.property = mangle(node.property);
|
||||
}
|
||||
}
|
||||
else if (node instanceof AST_Sub) {
|
||||
node.property = mangleStrings(node.property);
|
||||
}
|
||||
// else if (node instanceof AST_String) {
|
||||
// if (should_mangle(node.value)) {
|
||||
// AST_Node.warn(
|
||||
// "Found \"{prop}\" property candidate for mangling in an arbitrary string [{file}:{line},{col}]", {
|
||||
// file : node.start.file,
|
||||
// line : node.start.line,
|
||||
// col : node.start.col,
|
||||
// prop : node.value
|
||||
// }
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
}));
|
||||
|
||||
// only function declarations after this line
|
||||
|
||||
function can_mangle(name) {
|
||||
if (reserved.indexOf(name) >= 0) return false;
|
||||
if (options.only_cache) {
|
||||
return cache.props.has(name);
|
||||
}
|
||||
if (/^[0-9.]+$/.test(name)) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
function should_mangle(name) {
|
||||
if (reserved.indexOf(name) >= 0) return false;
|
||||
return cache.props.has(name)
|
||||
|| names_to_mangle.indexOf(name) >= 0;
|
||||
}
|
||||
|
||||
function add(name) {
|
||||
if (can_mangle(name))
|
||||
push_uniq(names_to_mangle, name);
|
||||
}
|
||||
|
||||
function mangle(name) {
|
||||
var mangled = cache.props.get(name);
|
||||
if (!mangled) {
|
||||
do {
|
||||
mangled = base54(++cache.cname);
|
||||
} while (!can_mangle(mangled));
|
||||
cache.props.set(name, mangled);
|
||||
}
|
||||
return mangled;
|
||||
}
|
||||
|
||||
function addStrings(node) {
|
||||
var out = {};
|
||||
try {
|
||||
(function walk(node){
|
||||
node.walk(new TreeWalker(function(node){
|
||||
if (node instanceof AST_Seq) {
|
||||
walk(node.cdr);
|
||||
return true;
|
||||
}
|
||||
if (node instanceof AST_String) {
|
||||
add(node.value);
|
||||
return true;
|
||||
}
|
||||
if (node instanceof AST_Conditional) {
|
||||
walk(node.consequent);
|
||||
walk(node.alternative);
|
||||
return true;
|
||||
}
|
||||
throw out;
|
||||
}));
|
||||
})(node);
|
||||
} catch(ex) {
|
||||
if (ex !== out) throw ex;
|
||||
}
|
||||
}
|
||||
|
||||
function mangleStrings(node) {
|
||||
return node.transform(new TreeTransformer(function(node){
|
||||
if (node instanceof AST_Seq) {
|
||||
node.cdr = mangleStrings(node.cdr);
|
||||
}
|
||||
else if (node instanceof AST_String) {
|
||||
if (should_mangle(node.value)) {
|
||||
node.value = mangle(node.value);
|
||||
}
|
||||
}
|
||||
else if (node instanceof AST_Conditional) {
|
||||
node.consequent = mangleStrings(node.consequent);
|
||||
node.alternative = mangleStrings(node.alternative);
|
||||
}
|
||||
return node;
|
||||
}));
|
||||
}
|
||||
|
||||
}
|
||||
601
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/scope.js
generated
vendored
Normal file
601
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/scope.js
generated
vendored
Normal file
@@ -0,0 +1,601 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
"use strict";
|
||||
|
||||
function SymbolDef(scope, index, orig) {
|
||||
this.name = orig.name;
|
||||
this.orig = [ orig ];
|
||||
this.scope = scope;
|
||||
this.references = [];
|
||||
this.global = false;
|
||||
this.mangled_name = null;
|
||||
this.undeclared = false;
|
||||
this.constant = false;
|
||||
this.index = index;
|
||||
};
|
||||
|
||||
SymbolDef.prototype = {
|
||||
unmangleable: function(options) {
|
||||
if (!options) options = {};
|
||||
|
||||
return (this.global && !options.toplevel)
|
||||
|| this.undeclared
|
||||
|| (!options.eval && (this.scope.uses_eval || this.scope.uses_with))
|
||||
|| (options.keep_fnames
|
||||
&& (this.orig[0] instanceof AST_SymbolLambda
|
||||
|| this.orig[0] instanceof AST_SymbolDefun));
|
||||
},
|
||||
mangle: function(options) {
|
||||
var cache = options.cache && options.cache.props;
|
||||
if (this.global && cache && cache.has(this.name)) {
|
||||
this.mangled_name = cache.get(this.name);
|
||||
}
|
||||
else if (!this.mangled_name && !this.unmangleable(options)) {
|
||||
var s = this.scope;
|
||||
if (!options.screw_ie8 && this.orig[0] instanceof AST_SymbolLambda)
|
||||
s = s.parent_scope;
|
||||
this.mangled_name = s.next_mangled(options, this);
|
||||
if (this.global && cache) {
|
||||
cache.set(this.name, this.mangled_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
AST_Toplevel.DEFMETHOD("figure_out_scope", function(options){
|
||||
options = defaults(options, {
|
||||
screw_ie8: false,
|
||||
cache: null
|
||||
});
|
||||
|
||||
// pass 1: setup scope chaining and handle definitions
|
||||
var self = this;
|
||||
var scope = self.parent_scope = null;
|
||||
var defun = null;
|
||||
var nesting = 0;
|
||||
var tw = new TreeWalker(function(node, descend){
|
||||
if (options.screw_ie8 && node instanceof AST_Catch) {
|
||||
var save_scope = scope;
|
||||
scope = new AST_Scope(node);
|
||||
scope.init_scope_vars(nesting);
|
||||
scope.parent_scope = save_scope;
|
||||
descend();
|
||||
scope = save_scope;
|
||||
return true;
|
||||
}
|
||||
if (node instanceof AST_Scope) {
|
||||
node.init_scope_vars(nesting);
|
||||
var save_scope = node.parent_scope = scope;
|
||||
var save_defun = defun;
|
||||
defun = scope = node;
|
||||
++nesting; descend(); --nesting;
|
||||
scope = save_scope;
|
||||
defun = save_defun;
|
||||
return true; // don't descend again in TreeWalker
|
||||
}
|
||||
if (node instanceof AST_Directive) {
|
||||
node.scope = scope;
|
||||
push_uniq(scope.directives, node.value);
|
||||
return true;
|
||||
}
|
||||
if (node instanceof AST_With) {
|
||||
for (var s = scope; s; s = s.parent_scope)
|
||||
s.uses_with = true;
|
||||
return;
|
||||
}
|
||||
if (node instanceof AST_Symbol) {
|
||||
node.scope = scope;
|
||||
}
|
||||
if (node instanceof AST_SymbolLambda) {
|
||||
defun.def_function(node);
|
||||
}
|
||||
else if (node instanceof AST_SymbolDefun) {
|
||||
// Careful here, the scope where this should be defined is
|
||||
// the parent scope. The reason is that we enter a new
|
||||
// scope when we encounter the AST_Defun node (which is
|
||||
// instanceof AST_Scope) but we get to the symbol a bit
|
||||
// later.
|
||||
(node.scope = defun.parent_scope).def_function(node);
|
||||
}
|
||||
else if (node instanceof AST_SymbolVar
|
||||
|| node instanceof AST_SymbolConst) {
|
||||
var def = defun.def_variable(node);
|
||||
def.constant = node instanceof AST_SymbolConst;
|
||||
def.init = tw.parent().value;
|
||||
}
|
||||
else if (node instanceof AST_SymbolCatch) {
|
||||
(options.screw_ie8 ? scope : defun)
|
||||
.def_variable(node);
|
||||
}
|
||||
});
|
||||
self.walk(tw);
|
||||
|
||||
// pass 2: find back references and eval
|
||||
var func = null;
|
||||
var globals = self.globals = new Dictionary();
|
||||
var tw = new TreeWalker(function(node, descend){
|
||||
if (node instanceof AST_Lambda) {
|
||||
var prev_func = func;
|
||||
func = node;
|
||||
descend();
|
||||
func = prev_func;
|
||||
return true;
|
||||
}
|
||||
if (node instanceof AST_SymbolRef) {
|
||||
var name = node.name;
|
||||
var sym = node.scope.find_variable(name);
|
||||
if (!sym) {
|
||||
var g;
|
||||
if (globals.has(name)) {
|
||||
g = globals.get(name);
|
||||
} else {
|
||||
g = new SymbolDef(self, globals.size(), node);
|
||||
g.undeclared = true;
|
||||
g.global = true;
|
||||
globals.set(name, g);
|
||||
}
|
||||
node.thedef = g;
|
||||
if (name == "eval" && tw.parent() instanceof AST_Call) {
|
||||
for (var s = node.scope; s && !s.uses_eval; s = s.parent_scope)
|
||||
s.uses_eval = true;
|
||||
}
|
||||
if (func && name == "arguments") {
|
||||
func.uses_arguments = true;
|
||||
}
|
||||
} else {
|
||||
node.thedef = sym;
|
||||
}
|
||||
node.reference();
|
||||
return true;
|
||||
}
|
||||
});
|
||||
self.walk(tw);
|
||||
|
||||
if (options.cache) {
|
||||
this.cname = options.cache.cname;
|
||||
}
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("init_scope_vars", function(nesting){
|
||||
this.directives = []; // contains the directives defined in this scope, i.e. "use strict"
|
||||
this.variables = new Dictionary(); // map name to AST_SymbolVar (variables defined in this scope; includes functions)
|
||||
this.functions = new Dictionary(); // map name to AST_SymbolDefun (functions defined in this scope)
|
||||
this.uses_with = false; // will be set to true if this or some nested scope uses the `with` statement
|
||||
this.uses_eval = false; // will be set to true if this or nested scope uses the global `eval`
|
||||
this.parent_scope = null; // the parent scope
|
||||
this.enclosed = []; // a list of variables from this or outer scope(s) that are referenced from this or inner scopes
|
||||
this.cname = -1; // the current index for mangling functions/variables
|
||||
this.nesting = nesting; // the nesting level of this scope (0 means toplevel)
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("strict", function(){
|
||||
return this.has_directive("use strict");
|
||||
});
|
||||
|
||||
AST_Lambda.DEFMETHOD("init_scope_vars", function(){
|
||||
AST_Scope.prototype.init_scope_vars.apply(this, arguments);
|
||||
this.uses_arguments = false;
|
||||
});
|
||||
|
||||
AST_SymbolRef.DEFMETHOD("reference", function() {
|
||||
var def = this.definition();
|
||||
def.references.push(this);
|
||||
var s = this.scope;
|
||||
while (s) {
|
||||
push_uniq(s.enclosed, def);
|
||||
if (s === def.scope) break;
|
||||
s = s.parent_scope;
|
||||
}
|
||||
this.frame = this.scope.nesting - def.scope.nesting;
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("find_variable", function(name){
|
||||
if (name instanceof AST_Symbol) name = name.name;
|
||||
return this.variables.get(name)
|
||||
|| (this.parent_scope && this.parent_scope.find_variable(name));
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("has_directive", function(value){
|
||||
return this.parent_scope && this.parent_scope.has_directive(value)
|
||||
|| (this.directives.indexOf(value) >= 0 ? this : null);
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("def_function", function(symbol){
|
||||
this.functions.set(symbol.name, this.def_variable(symbol));
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("def_variable", function(symbol){
|
||||
var def;
|
||||
if (!this.variables.has(symbol.name)) {
|
||||
def = new SymbolDef(this, this.variables.size(), symbol);
|
||||
this.variables.set(symbol.name, def);
|
||||
def.global = !this.parent_scope;
|
||||
} else {
|
||||
def = this.variables.get(symbol.name);
|
||||
def.orig.push(symbol);
|
||||
}
|
||||
return symbol.thedef = def;
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("next_mangled", function(options){
|
||||
var ext = this.enclosed;
|
||||
out: while (true) {
|
||||
var m = base54(++this.cname);
|
||||
if (!is_identifier(m)) continue; // skip over "do"
|
||||
|
||||
// https://github.com/mishoo/UglifyJS2/issues/242 -- do not
|
||||
// shadow a name excepted from mangling.
|
||||
if (options.except.indexOf(m) >= 0) continue;
|
||||
|
||||
// we must ensure that the mangled name does not shadow a name
|
||||
// from some parent scope that is referenced in this or in
|
||||
// inner scopes.
|
||||
for (var i = ext.length; --i >= 0;) {
|
||||
var sym = ext[i];
|
||||
var name = sym.mangled_name || (sym.unmangleable(options) && sym.name);
|
||||
if (m == name) continue out;
|
||||
}
|
||||
return m;
|
||||
}
|
||||
});
|
||||
|
||||
AST_Function.DEFMETHOD("next_mangled", function(options, def){
|
||||
// #179, #326
|
||||
// in Safari strict mode, something like (function x(x){...}) is a syntax error;
|
||||
// a function expression's argument cannot shadow the function expression's name
|
||||
|
||||
var tricky_def = def.orig[0] instanceof AST_SymbolFunarg && this.name && this.name.definition();
|
||||
while (true) {
|
||||
var name = AST_Lambda.prototype.next_mangled.call(this, options, def);
|
||||
if (!(tricky_def && tricky_def.mangled_name == name))
|
||||
return name;
|
||||
}
|
||||
});
|
||||
|
||||
AST_Scope.DEFMETHOD("references", function(sym){
|
||||
if (sym instanceof AST_Symbol) sym = sym.definition();
|
||||
return this.enclosed.indexOf(sym) < 0 ? null : sym;
|
||||
});
|
||||
|
||||
AST_Symbol.DEFMETHOD("unmangleable", function(options){
|
||||
return this.definition().unmangleable(options);
|
||||
});
|
||||
|
||||
// property accessors are not mangleable
|
||||
AST_SymbolAccessor.DEFMETHOD("unmangleable", function(){
|
||||
return true;
|
||||
});
|
||||
|
||||
// labels are always mangleable
|
||||
AST_Label.DEFMETHOD("unmangleable", function(){
|
||||
return false;
|
||||
});
|
||||
|
||||
AST_Symbol.DEFMETHOD("unreferenced", function(){
|
||||
return this.definition().references.length == 0
|
||||
&& !(this.scope.uses_eval || this.scope.uses_with);
|
||||
});
|
||||
|
||||
AST_Symbol.DEFMETHOD("undeclared", function(){
|
||||
return this.definition().undeclared;
|
||||
});
|
||||
|
||||
AST_LabelRef.DEFMETHOD("undeclared", function(){
|
||||
return false;
|
||||
});
|
||||
|
||||
AST_Label.DEFMETHOD("undeclared", function(){
|
||||
return false;
|
||||
});
|
||||
|
||||
AST_Symbol.DEFMETHOD("definition", function(){
|
||||
return this.thedef;
|
||||
});
|
||||
|
||||
AST_Symbol.DEFMETHOD("global", function(){
|
||||
return this.definition().global;
|
||||
});
|
||||
|
||||
AST_Toplevel.DEFMETHOD("_default_mangler_options", function(options){
|
||||
return defaults(options, {
|
||||
except : [],
|
||||
eval : false,
|
||||
sort : false,
|
||||
toplevel : false,
|
||||
screw_ie8 : false,
|
||||
keep_fnames : false
|
||||
});
|
||||
});
|
||||
|
||||
AST_Toplevel.DEFMETHOD("mangle_names", function(options){
|
||||
options = this._default_mangler_options(options);
|
||||
// We only need to mangle declaration nodes. Special logic wired
|
||||
// into the code generator will display the mangled name if it's
|
||||
// present (and for AST_SymbolRef-s it'll use the mangled name of
|
||||
// the AST_SymbolDeclaration that it points to).
|
||||
var lname = -1;
|
||||
var to_mangle = [];
|
||||
|
||||
if (options.cache) {
|
||||
this.globals.each(function(symbol){
|
||||
if (options.except.indexOf(symbol.name) < 0) {
|
||||
to_mangle.push(symbol);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
var tw = new TreeWalker(function(node, descend){
|
||||
if (node instanceof AST_LabeledStatement) {
|
||||
// lname is incremented when we get to the AST_Label
|
||||
var save_nesting = lname;
|
||||
descend();
|
||||
lname = save_nesting;
|
||||
return true; // don't descend again in TreeWalker
|
||||
}
|
||||
if (node instanceof AST_Scope) {
|
||||
var p = tw.parent(), a = [];
|
||||
node.variables.each(function(symbol){
|
||||
if (options.except.indexOf(symbol.name) < 0) {
|
||||
a.push(symbol);
|
||||
}
|
||||
});
|
||||
if (options.sort) a.sort(function(a, b){
|
||||
return b.references.length - a.references.length;
|
||||
});
|
||||
to_mangle.push.apply(to_mangle, a);
|
||||
return;
|
||||
}
|
||||
if (node instanceof AST_Label) {
|
||||
var name;
|
||||
do name = base54(++lname); while (!is_identifier(name));
|
||||
node.mangled_name = name;
|
||||
return true;
|
||||
}
|
||||
if (options.screw_ie8 && node instanceof AST_SymbolCatch) {
|
||||
to_mangle.push(node.definition());
|
||||
return;
|
||||
}
|
||||
});
|
||||
this.walk(tw);
|
||||
to_mangle.forEach(function(def){ def.mangle(options) });
|
||||
|
||||
if (options.cache) {
|
||||
options.cache.cname = this.cname;
|
||||
}
|
||||
});
|
||||
|
||||
AST_Toplevel.DEFMETHOD("compute_char_frequency", function(options){
|
||||
options = this._default_mangler_options(options);
|
||||
var tw = new TreeWalker(function(node){
|
||||
if (node instanceof AST_Constant)
|
||||
base54.consider(node.print_to_string());
|
||||
else if (node instanceof AST_Return)
|
||||
base54.consider("return");
|
||||
else if (node instanceof AST_Throw)
|
||||
base54.consider("throw");
|
||||
else if (node instanceof AST_Continue)
|
||||
base54.consider("continue");
|
||||
else if (node instanceof AST_Break)
|
||||
base54.consider("break");
|
||||
else if (node instanceof AST_Debugger)
|
||||
base54.consider("debugger");
|
||||
else if (node instanceof AST_Directive)
|
||||
base54.consider(node.value);
|
||||
else if (node instanceof AST_While)
|
||||
base54.consider("while");
|
||||
else if (node instanceof AST_Do)
|
||||
base54.consider("do while");
|
||||
else if (node instanceof AST_If) {
|
||||
base54.consider("if");
|
||||
if (node.alternative) base54.consider("else");
|
||||
}
|
||||
else if (node instanceof AST_Var)
|
||||
base54.consider("var");
|
||||
else if (node instanceof AST_Const)
|
||||
base54.consider("const");
|
||||
else if (node instanceof AST_Lambda)
|
||||
base54.consider("function");
|
||||
else if (node instanceof AST_For)
|
||||
base54.consider("for");
|
||||
else if (node instanceof AST_ForIn)
|
||||
base54.consider("for in");
|
||||
else if (node instanceof AST_Switch)
|
||||
base54.consider("switch");
|
||||
else if (node instanceof AST_Case)
|
||||
base54.consider("case");
|
||||
else if (node instanceof AST_Default)
|
||||
base54.consider("default");
|
||||
else if (node instanceof AST_With)
|
||||
base54.consider("with");
|
||||
else if (node instanceof AST_ObjectSetter)
|
||||
base54.consider("set" + node.key);
|
||||
else if (node instanceof AST_ObjectGetter)
|
||||
base54.consider("get" + node.key);
|
||||
else if (node instanceof AST_ObjectKeyVal)
|
||||
base54.consider(node.key);
|
||||
else if (node instanceof AST_New)
|
||||
base54.consider("new");
|
||||
else if (node instanceof AST_This)
|
||||
base54.consider("this");
|
||||
else if (node instanceof AST_Try)
|
||||
base54.consider("try");
|
||||
else if (node instanceof AST_Catch)
|
||||
base54.consider("catch");
|
||||
else if (node instanceof AST_Finally)
|
||||
base54.consider("finally");
|
||||
else if (node instanceof AST_Symbol && node.unmangleable(options))
|
||||
base54.consider(node.name);
|
||||
else if (node instanceof AST_Unary || node instanceof AST_Binary)
|
||||
base54.consider(node.operator);
|
||||
else if (node instanceof AST_Dot)
|
||||
base54.consider(node.property);
|
||||
});
|
||||
this.walk(tw);
|
||||
base54.sort();
|
||||
});
|
||||
|
||||
var base54 = (function() {
|
||||
var string = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_0123456789";
|
||||
var chars, frequency;
|
||||
function reset() {
|
||||
frequency = Object.create(null);
|
||||
chars = string.split("").map(function(ch){ return ch.charCodeAt(0) });
|
||||
chars.forEach(function(ch){ frequency[ch] = 0 });
|
||||
}
|
||||
base54.consider = function(str){
|
||||
for (var i = str.length; --i >= 0;) {
|
||||
var code = str.charCodeAt(i);
|
||||
if (code in frequency) ++frequency[code];
|
||||
}
|
||||
};
|
||||
base54.sort = function() {
|
||||
chars = mergeSort(chars, function(a, b){
|
||||
if (is_digit(a) && !is_digit(b)) return 1;
|
||||
if (is_digit(b) && !is_digit(a)) return -1;
|
||||
return frequency[b] - frequency[a];
|
||||
});
|
||||
};
|
||||
base54.reset = reset;
|
||||
reset();
|
||||
base54.get = function(){ return chars };
|
||||
base54.freq = function(){ return frequency };
|
||||
function base54(num) {
|
||||
var ret = "", base = 54;
|
||||
num++;
|
||||
do {
|
||||
num--;
|
||||
ret += String.fromCharCode(chars[num % base]);
|
||||
num = Math.floor(num / base);
|
||||
base = 64;
|
||||
} while (num > 0);
|
||||
return ret;
|
||||
};
|
||||
return base54;
|
||||
})();
|
||||
|
||||
AST_Toplevel.DEFMETHOD("scope_warnings", function(options){
|
||||
options = defaults(options, {
|
||||
undeclared : false, // this makes a lot of noise
|
||||
unreferenced : true,
|
||||
assign_to_global : true,
|
||||
func_arguments : true,
|
||||
nested_defuns : true,
|
||||
eval : true
|
||||
});
|
||||
var tw = new TreeWalker(function(node){
|
||||
if (options.undeclared
|
||||
&& node instanceof AST_SymbolRef
|
||||
&& node.undeclared())
|
||||
{
|
||||
// XXX: this also warns about JS standard names,
|
||||
// i.e. Object, Array, parseInt etc. Should add a list of
|
||||
// exceptions.
|
||||
AST_Node.warn("Undeclared symbol: {name} [{file}:{line},{col}]", {
|
||||
name: node.name,
|
||||
file: node.start.file,
|
||||
line: node.start.line,
|
||||
col: node.start.col
|
||||
});
|
||||
}
|
||||
if (options.assign_to_global)
|
||||
{
|
||||
var sym = null;
|
||||
if (node instanceof AST_Assign && node.left instanceof AST_SymbolRef)
|
||||
sym = node.left;
|
||||
else if (node instanceof AST_ForIn && node.init instanceof AST_SymbolRef)
|
||||
sym = node.init;
|
||||
if (sym
|
||||
&& (sym.undeclared()
|
||||
|| (sym.global() && sym.scope !== sym.definition().scope))) {
|
||||
AST_Node.warn("{msg}: {name} [{file}:{line},{col}]", {
|
||||
msg: sym.undeclared() ? "Accidental global?" : "Assignment to global",
|
||||
name: sym.name,
|
||||
file: sym.start.file,
|
||||
line: sym.start.line,
|
||||
col: sym.start.col
|
||||
});
|
||||
}
|
||||
}
|
||||
if (options.eval
|
||||
&& node instanceof AST_SymbolRef
|
||||
&& node.undeclared()
|
||||
&& node.name == "eval") {
|
||||
AST_Node.warn("Eval is used [{file}:{line},{col}]", node.start);
|
||||
}
|
||||
if (options.unreferenced
|
||||
&& (node instanceof AST_SymbolDeclaration || node instanceof AST_Label)
|
||||
&& !(node instanceof AST_SymbolCatch)
|
||||
&& node.unreferenced()) {
|
||||
AST_Node.warn("{type} {name} is declared but not referenced [{file}:{line},{col}]", {
|
||||
type: node instanceof AST_Label ? "Label" : "Symbol",
|
||||
name: node.name,
|
||||
file: node.start.file,
|
||||
line: node.start.line,
|
||||
col: node.start.col
|
||||
});
|
||||
}
|
||||
if (options.func_arguments
|
||||
&& node instanceof AST_Lambda
|
||||
&& node.uses_arguments) {
|
||||
AST_Node.warn("arguments used in function {name} [{file}:{line},{col}]", {
|
||||
name: node.name ? node.name.name : "anonymous",
|
||||
file: node.start.file,
|
||||
line: node.start.line,
|
||||
col: node.start.col
|
||||
});
|
||||
}
|
||||
if (options.nested_defuns
|
||||
&& node instanceof AST_Defun
|
||||
&& !(tw.parent() instanceof AST_Scope)) {
|
||||
AST_Node.warn("Function {name} declared in nested statement \"{type}\" [{file}:{line},{col}]", {
|
||||
name: node.name.name,
|
||||
type: tw.parent().TYPE,
|
||||
file: node.start.file,
|
||||
line: node.start.line,
|
||||
col: node.start.col
|
||||
});
|
||||
}
|
||||
});
|
||||
this.walk(tw);
|
||||
});
|
||||
92
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/sourcemap.js
generated
vendored
Normal file
92
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/sourcemap.js
generated
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
"use strict";
|
||||
|
||||
// a small wrapper around fitzgen's source-map library
|
||||
function SourceMap(options) {
|
||||
options = defaults(options, {
|
||||
file : null,
|
||||
root : null,
|
||||
orig : null,
|
||||
|
||||
orig_line_diff : 0,
|
||||
dest_line_diff : 0,
|
||||
});
|
||||
var orig_map = options.orig && new MOZ_SourceMap.SourceMapConsumer(options.orig);
|
||||
var generator;
|
||||
if (orig_map) {
|
||||
generator = MOZ_SourceMap.SourceMapGenerator.fromSourceMap(orig_map);
|
||||
} else {
|
||||
generator = new MOZ_SourceMap.SourceMapGenerator({
|
||||
file : options.file,
|
||||
sourceRoot : options.root
|
||||
});
|
||||
}
|
||||
function add(source, gen_line, gen_col, orig_line, orig_col, name) {
|
||||
if (orig_map) {
|
||||
var info = orig_map.originalPositionFor({
|
||||
line: orig_line,
|
||||
column: orig_col
|
||||
});
|
||||
if (info.source === null) {
|
||||
return;
|
||||
}
|
||||
source = info.source;
|
||||
orig_line = info.line;
|
||||
orig_col = info.column;
|
||||
name = info.name || name;
|
||||
}
|
||||
generator.addMapping({
|
||||
generated : { line: gen_line + options.dest_line_diff, column: gen_col },
|
||||
original : { line: orig_line + options.orig_line_diff, column: orig_col },
|
||||
source : source,
|
||||
name : name
|
||||
});
|
||||
}
|
||||
return {
|
||||
add : add,
|
||||
get : function() { return generator },
|
||||
toString : function() { return JSON.stringify(generator.toJSON()); }
|
||||
};
|
||||
};
|
||||
218
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/transform.js
generated
vendored
Normal file
218
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/transform.js
generated
vendored
Normal file
@@ -0,0 +1,218 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
"use strict";
|
||||
|
||||
// Tree transformer helpers.
|
||||
|
||||
function TreeTransformer(before, after) {
|
||||
TreeWalker.call(this);
|
||||
this.before = before;
|
||||
this.after = after;
|
||||
}
|
||||
TreeTransformer.prototype = new TreeWalker;
|
||||
|
||||
(function(undefined){
|
||||
|
||||
function _(node, descend) {
|
||||
node.DEFMETHOD("transform", function(tw, in_list){
|
||||
var x, y;
|
||||
tw.push(this);
|
||||
if (tw.before) x = tw.before(this, descend, in_list);
|
||||
if (x === undefined) {
|
||||
if (!tw.after) {
|
||||
x = this;
|
||||
descend(x, tw);
|
||||
} else {
|
||||
tw.stack[tw.stack.length - 1] = x = this.clone();
|
||||
descend(x, tw);
|
||||
y = tw.after(x, in_list);
|
||||
if (y !== undefined) x = y;
|
||||
}
|
||||
}
|
||||
tw.pop();
|
||||
return x;
|
||||
});
|
||||
};
|
||||
|
||||
function do_list(list, tw) {
|
||||
return MAP(list, function(node){
|
||||
return node.transform(tw, true);
|
||||
});
|
||||
};
|
||||
|
||||
_(AST_Node, noop);
|
||||
|
||||
_(AST_LabeledStatement, function(self, tw){
|
||||
self.label = self.label.transform(tw);
|
||||
self.body = self.body.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_SimpleStatement, function(self, tw){
|
||||
self.body = self.body.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Block, function(self, tw){
|
||||
self.body = do_list(self.body, tw);
|
||||
});
|
||||
|
||||
_(AST_DWLoop, function(self, tw){
|
||||
self.condition = self.condition.transform(tw);
|
||||
self.body = self.body.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_For, function(self, tw){
|
||||
if (self.init) self.init = self.init.transform(tw);
|
||||
if (self.condition) self.condition = self.condition.transform(tw);
|
||||
if (self.step) self.step = self.step.transform(tw);
|
||||
self.body = self.body.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_ForIn, function(self, tw){
|
||||
self.init = self.init.transform(tw);
|
||||
self.object = self.object.transform(tw);
|
||||
self.body = self.body.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_With, function(self, tw){
|
||||
self.expression = self.expression.transform(tw);
|
||||
self.body = self.body.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Exit, function(self, tw){
|
||||
if (self.value) self.value = self.value.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_LoopControl, function(self, tw){
|
||||
if (self.label) self.label = self.label.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_If, function(self, tw){
|
||||
self.condition = self.condition.transform(tw);
|
||||
self.body = self.body.transform(tw);
|
||||
if (self.alternative) self.alternative = self.alternative.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Switch, function(self, tw){
|
||||
self.expression = self.expression.transform(tw);
|
||||
self.body = do_list(self.body, tw);
|
||||
});
|
||||
|
||||
_(AST_Case, function(self, tw){
|
||||
self.expression = self.expression.transform(tw);
|
||||
self.body = do_list(self.body, tw);
|
||||
});
|
||||
|
||||
_(AST_Try, function(self, tw){
|
||||
self.body = do_list(self.body, tw);
|
||||
if (self.bcatch) self.bcatch = self.bcatch.transform(tw);
|
||||
if (self.bfinally) self.bfinally = self.bfinally.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Catch, function(self, tw){
|
||||
self.argname = self.argname.transform(tw);
|
||||
self.body = do_list(self.body, tw);
|
||||
});
|
||||
|
||||
_(AST_Definitions, function(self, tw){
|
||||
self.definitions = do_list(self.definitions, tw);
|
||||
});
|
||||
|
||||
_(AST_VarDef, function(self, tw){
|
||||
self.name = self.name.transform(tw);
|
||||
if (self.value) self.value = self.value.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Lambda, function(self, tw){
|
||||
if (self.name) self.name = self.name.transform(tw);
|
||||
self.argnames = do_list(self.argnames, tw);
|
||||
self.body = do_list(self.body, tw);
|
||||
});
|
||||
|
||||
_(AST_Call, function(self, tw){
|
||||
self.expression = self.expression.transform(tw);
|
||||
self.args = do_list(self.args, tw);
|
||||
});
|
||||
|
||||
_(AST_Seq, function(self, tw){
|
||||
self.car = self.car.transform(tw);
|
||||
self.cdr = self.cdr.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Dot, function(self, tw){
|
||||
self.expression = self.expression.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Sub, function(self, tw){
|
||||
self.expression = self.expression.transform(tw);
|
||||
self.property = self.property.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Unary, function(self, tw){
|
||||
self.expression = self.expression.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Binary, function(self, tw){
|
||||
self.left = self.left.transform(tw);
|
||||
self.right = self.right.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Conditional, function(self, tw){
|
||||
self.condition = self.condition.transform(tw);
|
||||
self.consequent = self.consequent.transform(tw);
|
||||
self.alternative = self.alternative.transform(tw);
|
||||
});
|
||||
|
||||
_(AST_Array, function(self, tw){
|
||||
self.elements = do_list(self.elements, tw);
|
||||
});
|
||||
|
||||
_(AST_Object, function(self, tw){
|
||||
self.properties = do_list(self.properties, tw);
|
||||
});
|
||||
|
||||
_(AST_ObjectProperty, function(self, tw){
|
||||
self.value = self.value.transform(tw);
|
||||
});
|
||||
|
||||
})();
|
||||
310
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/utils.js
generated
vendored
Normal file
310
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/lib/utils.js
generated
vendored
Normal file
@@ -0,0 +1,310 @@
|
||||
/***********************************************************************
|
||||
|
||||
A JavaScript tokenizer / parser / beautifier / compressor.
|
||||
https://github.com/mishoo/UglifyJS2
|
||||
|
||||
-------------------------------- (C) ---------------------------------
|
||||
|
||||
Author: Mihai Bazon
|
||||
<mihai.bazon@gmail.com>
|
||||
http://mihai.bazon.net/blog
|
||||
|
||||
Distributed under the BSD license:
|
||||
|
||||
Copyright 2012 (c) Mihai Bazon <mihai.bazon@gmail.com>
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
|
||||
***********************************************************************/
|
||||
|
||||
"use strict";
|
||||
|
||||
function array_to_hash(a) {
|
||||
var ret = Object.create(null);
|
||||
for (var i = 0; i < a.length; ++i)
|
||||
ret[a[i]] = true;
|
||||
return ret;
|
||||
};
|
||||
|
||||
function slice(a, start) {
|
||||
return Array.prototype.slice.call(a, start || 0);
|
||||
};
|
||||
|
||||
function characters(str) {
|
||||
return str.split("");
|
||||
};
|
||||
|
||||
function member(name, array) {
|
||||
for (var i = array.length; --i >= 0;)
|
||||
if (array[i] == name)
|
||||
return true;
|
||||
return false;
|
||||
};
|
||||
|
||||
function find_if(func, array) {
|
||||
for (var i = 0, n = array.length; i < n; ++i) {
|
||||
if (func(array[i]))
|
||||
return array[i];
|
||||
}
|
||||
};
|
||||
|
||||
function repeat_string(str, i) {
|
||||
if (i <= 0) return "";
|
||||
if (i == 1) return str;
|
||||
var d = repeat_string(str, i >> 1);
|
||||
d += d;
|
||||
if (i & 1) d += str;
|
||||
return d;
|
||||
};
|
||||
|
||||
function DefaultsError(msg, defs) {
|
||||
Error.call(this, msg);
|
||||
this.msg = msg;
|
||||
this.defs = defs;
|
||||
};
|
||||
DefaultsError.prototype = Object.create(Error.prototype);
|
||||
DefaultsError.prototype.constructor = DefaultsError;
|
||||
|
||||
DefaultsError.croak = function(msg, defs) {
|
||||
throw new DefaultsError(msg, defs);
|
||||
};
|
||||
|
||||
function defaults(args, defs, croak) {
|
||||
if (args === true)
|
||||
args = {};
|
||||
var ret = args || {};
|
||||
if (croak) for (var i in ret) if (ret.hasOwnProperty(i) && !defs.hasOwnProperty(i))
|
||||
DefaultsError.croak("`" + i + "` is not a supported option", defs);
|
||||
for (var i in defs) if (defs.hasOwnProperty(i)) {
|
||||
ret[i] = (args && args.hasOwnProperty(i)) ? args[i] : defs[i];
|
||||
}
|
||||
return ret;
|
||||
};
|
||||
|
||||
function merge(obj, ext) {
|
||||
var count = 0;
|
||||
for (var i in ext) if (ext.hasOwnProperty(i)) {
|
||||
obj[i] = ext[i];
|
||||
count++;
|
||||
}
|
||||
return count;
|
||||
};
|
||||
|
||||
function noop() {};
|
||||
|
||||
var MAP = (function(){
|
||||
function MAP(a, f, backwards) {
|
||||
var ret = [], top = [], i;
|
||||
function doit() {
|
||||
var val = f(a[i], i);
|
||||
var is_last = val instanceof Last;
|
||||
if (is_last) val = val.v;
|
||||
if (val instanceof AtTop) {
|
||||
val = val.v;
|
||||
if (val instanceof Splice) {
|
||||
top.push.apply(top, backwards ? val.v.slice().reverse() : val.v);
|
||||
} else {
|
||||
top.push(val);
|
||||
}
|
||||
}
|
||||
else if (val !== skip) {
|
||||
if (val instanceof Splice) {
|
||||
ret.push.apply(ret, backwards ? val.v.slice().reverse() : val.v);
|
||||
} else {
|
||||
ret.push(val);
|
||||
}
|
||||
}
|
||||
return is_last;
|
||||
};
|
||||
if (a instanceof Array) {
|
||||
if (backwards) {
|
||||
for (i = a.length; --i >= 0;) if (doit()) break;
|
||||
ret.reverse();
|
||||
top.reverse();
|
||||
} else {
|
||||
for (i = 0; i < a.length; ++i) if (doit()) break;
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (i in a) if (a.hasOwnProperty(i)) if (doit()) break;
|
||||
}
|
||||
return top.concat(ret);
|
||||
};
|
||||
MAP.at_top = function(val) { return new AtTop(val) };
|
||||
MAP.splice = function(val) { return new Splice(val) };
|
||||
MAP.last = function(val) { return new Last(val) };
|
||||
var skip = MAP.skip = {};
|
||||
function AtTop(val) { this.v = val };
|
||||
function Splice(val) { this.v = val };
|
||||
function Last(val) { this.v = val };
|
||||
return MAP;
|
||||
})();
|
||||
|
||||
function push_uniq(array, el) {
|
||||
if (array.indexOf(el) < 0)
|
||||
array.push(el);
|
||||
};
|
||||
|
||||
function string_template(text, props) {
|
||||
return text.replace(/\{(.+?)\}/g, function(str, p){
|
||||
return props[p];
|
||||
});
|
||||
};
|
||||
|
||||
function remove(array, el) {
|
||||
for (var i = array.length; --i >= 0;) {
|
||||
if (array[i] === el) array.splice(i, 1);
|
||||
}
|
||||
};
|
||||
|
||||
function mergeSort(array, cmp) {
|
||||
if (array.length < 2) return array.slice();
|
||||
function merge(a, b) {
|
||||
var r = [], ai = 0, bi = 0, i = 0;
|
||||
while (ai < a.length && bi < b.length) {
|
||||
cmp(a[ai], b[bi]) <= 0
|
||||
? r[i++] = a[ai++]
|
||||
: r[i++] = b[bi++];
|
||||
}
|
||||
if (ai < a.length) r.push.apply(r, a.slice(ai));
|
||||
if (bi < b.length) r.push.apply(r, b.slice(bi));
|
||||
return r;
|
||||
};
|
||||
function _ms(a) {
|
||||
if (a.length <= 1)
|
||||
return a;
|
||||
var m = Math.floor(a.length / 2), left = a.slice(0, m), right = a.slice(m);
|
||||
left = _ms(left);
|
||||
right = _ms(right);
|
||||
return merge(left, right);
|
||||
};
|
||||
return _ms(array);
|
||||
};
|
||||
|
||||
function set_difference(a, b) {
|
||||
return a.filter(function(el){
|
||||
return b.indexOf(el) < 0;
|
||||
});
|
||||
};
|
||||
|
||||
function set_intersection(a, b) {
|
||||
return a.filter(function(el){
|
||||
return b.indexOf(el) >= 0;
|
||||
});
|
||||
};
|
||||
|
||||
// this function is taken from Acorn [1], written by Marijn Haverbeke
|
||||
// [1] https://github.com/marijnh/acorn
|
||||
function makePredicate(words) {
|
||||
if (!(words instanceof Array)) words = words.split(" ");
|
||||
var f = "", cats = [];
|
||||
out: for (var i = 0; i < words.length; ++i) {
|
||||
for (var j = 0; j < cats.length; ++j)
|
||||
if (cats[j][0].length == words[i].length) {
|
||||
cats[j].push(words[i]);
|
||||
continue out;
|
||||
}
|
||||
cats.push([words[i]]);
|
||||
}
|
||||
function compareTo(arr) {
|
||||
if (arr.length == 1) return f += "return str === " + JSON.stringify(arr[0]) + ";";
|
||||
f += "switch(str){";
|
||||
for (var i = 0; i < arr.length; ++i) f += "case " + JSON.stringify(arr[i]) + ":";
|
||||
f += "return true}return false;";
|
||||
}
|
||||
// When there are more than three length categories, an outer
|
||||
// switch first dispatches on the lengths, to save on comparisons.
|
||||
if (cats.length > 3) {
|
||||
cats.sort(function(a, b) {return b.length - a.length;});
|
||||
f += "switch(str.length){";
|
||||
for (var i = 0; i < cats.length; ++i) {
|
||||
var cat = cats[i];
|
||||
f += "case " + cat[0].length + ":";
|
||||
compareTo(cat);
|
||||
}
|
||||
f += "}";
|
||||
// Otherwise, simply generate a flat `switch` statement.
|
||||
} else {
|
||||
compareTo(words);
|
||||
}
|
||||
return new Function("str", f);
|
||||
};
|
||||
|
||||
function all(array, predicate) {
|
||||
for (var i = array.length; --i >= 0;)
|
||||
if (!predicate(array[i]))
|
||||
return false;
|
||||
return true;
|
||||
};
|
||||
|
||||
function Dictionary() {
|
||||
this._values = Object.create(null);
|
||||
this._size = 0;
|
||||
};
|
||||
Dictionary.prototype = {
|
||||
set: function(key, val) {
|
||||
if (!this.has(key)) ++this._size;
|
||||
this._values["$" + key] = val;
|
||||
return this;
|
||||
},
|
||||
add: function(key, val) {
|
||||
if (this.has(key)) {
|
||||
this.get(key).push(val);
|
||||
} else {
|
||||
this.set(key, [ val ]);
|
||||
}
|
||||
return this;
|
||||
},
|
||||
get: function(key) { return this._values["$" + key] },
|
||||
del: function(key) {
|
||||
if (this.has(key)) {
|
||||
--this._size;
|
||||
delete this._values["$" + key];
|
||||
}
|
||||
return this;
|
||||
},
|
||||
has: function(key) { return ("$" + key) in this._values },
|
||||
each: function(f) {
|
||||
for (var i in this._values)
|
||||
f(this._values[i], i.substr(1));
|
||||
},
|
||||
size: function() {
|
||||
return this._size;
|
||||
},
|
||||
map: function(f) {
|
||||
var ret = [];
|
||||
for (var i in this._values)
|
||||
ret.push(f(this._values[i], i.substr(1)));
|
||||
return ret;
|
||||
},
|
||||
toObject: function() { return this._values }
|
||||
};
|
||||
Dictionary.fromObject = function(obj) {
|
||||
var dict = new Dictionary();
|
||||
dict._size = merge(dict._values, obj);
|
||||
return dict;
|
||||
};
|
||||
19
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/async/LICENSE
generated
vendored
Normal file
19
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/async/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
Copyright (c) 2010 Caolan McMahon
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
1425
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/async/README.md
generated
vendored
Normal file
1425
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/async/README.md
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
11
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/async/component.json
generated
vendored
Normal file
11
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/async/component.json
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"name": "async",
|
||||
"repo": "caolan/async",
|
||||
"description": "Higher-order functions and common patterns for asynchronous code",
|
||||
"version": "0.1.23",
|
||||
"keywords": [],
|
||||
"dependencies": {},
|
||||
"development": {},
|
||||
"main": "lib/async.js",
|
||||
"scripts": [ "lib/async.js" ]
|
||||
}
|
||||
958
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/async/lib/async.js
generated
vendored
Executable file
958
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/async/lib/async.js
generated
vendored
Executable file
@@ -0,0 +1,958 @@
|
||||
/*global setImmediate: false, setTimeout: false, console: false */
|
||||
(function () {
|
||||
|
||||
var async = {};
|
||||
|
||||
// global on the server, window in the browser
|
||||
var root, previous_async;
|
||||
|
||||
root = this;
|
||||
if (root != null) {
|
||||
previous_async = root.async;
|
||||
}
|
||||
|
||||
async.noConflict = function () {
|
||||
root.async = previous_async;
|
||||
return async;
|
||||
};
|
||||
|
||||
function only_once(fn) {
|
||||
var called = false;
|
||||
return function() {
|
||||
if (called) throw new Error("Callback was already called.");
|
||||
called = true;
|
||||
fn.apply(root, arguments);
|
||||
}
|
||||
}
|
||||
|
||||
//// cross-browser compatiblity functions ////
|
||||
|
||||
var _each = function (arr, iterator) {
|
||||
if (arr.forEach) {
|
||||
return arr.forEach(iterator);
|
||||
}
|
||||
for (var i = 0; i < arr.length; i += 1) {
|
||||
iterator(arr[i], i, arr);
|
||||
}
|
||||
};
|
||||
|
||||
var _map = function (arr, iterator) {
|
||||
if (arr.map) {
|
||||
return arr.map(iterator);
|
||||
}
|
||||
var results = [];
|
||||
_each(arr, function (x, i, a) {
|
||||
results.push(iterator(x, i, a));
|
||||
});
|
||||
return results;
|
||||
};
|
||||
|
||||
var _reduce = function (arr, iterator, memo) {
|
||||
if (arr.reduce) {
|
||||
return arr.reduce(iterator, memo);
|
||||
}
|
||||
_each(arr, function (x, i, a) {
|
||||
memo = iterator(memo, x, i, a);
|
||||
});
|
||||
return memo;
|
||||
};
|
||||
|
||||
var _keys = function (obj) {
|
||||
if (Object.keys) {
|
||||
return Object.keys(obj);
|
||||
}
|
||||
var keys = [];
|
||||
for (var k in obj) {
|
||||
if (obj.hasOwnProperty(k)) {
|
||||
keys.push(k);
|
||||
}
|
||||
}
|
||||
return keys;
|
||||
};
|
||||
|
||||
//// exported async module functions ////
|
||||
|
||||
//// nextTick implementation with browser-compatible fallback ////
|
||||
if (typeof process === 'undefined' || !(process.nextTick)) {
|
||||
if (typeof setImmediate === 'function') {
|
||||
async.nextTick = function (fn) {
|
||||
// not a direct alias for IE10 compatibility
|
||||
setImmediate(fn);
|
||||
};
|
||||
async.setImmediate = async.nextTick;
|
||||
}
|
||||
else {
|
||||
async.nextTick = function (fn) {
|
||||
setTimeout(fn, 0);
|
||||
};
|
||||
async.setImmediate = async.nextTick;
|
||||
}
|
||||
}
|
||||
else {
|
||||
async.nextTick = process.nextTick;
|
||||
if (typeof setImmediate !== 'undefined') {
|
||||
async.setImmediate = function (fn) {
|
||||
// not a direct alias for IE10 compatibility
|
||||
setImmediate(fn);
|
||||
};
|
||||
}
|
||||
else {
|
||||
async.setImmediate = async.nextTick;
|
||||
}
|
||||
}
|
||||
|
||||
async.each = function (arr, iterator, callback) {
|
||||
callback = callback || function () {};
|
||||
if (!arr.length) {
|
||||
return callback();
|
||||
}
|
||||
var completed = 0;
|
||||
_each(arr, function (x) {
|
||||
iterator(x, only_once(function (err) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
callback = function () {};
|
||||
}
|
||||
else {
|
||||
completed += 1;
|
||||
if (completed >= arr.length) {
|
||||
callback(null);
|
||||
}
|
||||
}
|
||||
}));
|
||||
});
|
||||
};
|
||||
async.forEach = async.each;
|
||||
|
||||
async.eachSeries = function (arr, iterator, callback) {
|
||||
callback = callback || function () {};
|
||||
if (!arr.length) {
|
||||
return callback();
|
||||
}
|
||||
var completed = 0;
|
||||
var iterate = function () {
|
||||
iterator(arr[completed], function (err) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
callback = function () {};
|
||||
}
|
||||
else {
|
||||
completed += 1;
|
||||
if (completed >= arr.length) {
|
||||
callback(null);
|
||||
}
|
||||
else {
|
||||
iterate();
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
iterate();
|
||||
};
|
||||
async.forEachSeries = async.eachSeries;
|
||||
|
||||
async.eachLimit = function (arr, limit, iterator, callback) {
|
||||
var fn = _eachLimit(limit);
|
||||
fn.apply(null, [arr, iterator, callback]);
|
||||
};
|
||||
async.forEachLimit = async.eachLimit;
|
||||
|
||||
var _eachLimit = function (limit) {
|
||||
|
||||
return function (arr, iterator, callback) {
|
||||
callback = callback || function () {};
|
||||
if (!arr.length || limit <= 0) {
|
||||
return callback();
|
||||
}
|
||||
var completed = 0;
|
||||
var started = 0;
|
||||
var running = 0;
|
||||
|
||||
(function replenish () {
|
||||
if (completed >= arr.length) {
|
||||
return callback();
|
||||
}
|
||||
|
||||
while (running < limit && started < arr.length) {
|
||||
started += 1;
|
||||
running += 1;
|
||||
iterator(arr[started - 1], function (err) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
callback = function () {};
|
||||
}
|
||||
else {
|
||||
completed += 1;
|
||||
running -= 1;
|
||||
if (completed >= arr.length) {
|
||||
callback();
|
||||
}
|
||||
else {
|
||||
replenish();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
})();
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
var doParallel = function (fn) {
|
||||
return function () {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
return fn.apply(null, [async.each].concat(args));
|
||||
};
|
||||
};
|
||||
var doParallelLimit = function(limit, fn) {
|
||||
return function () {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
return fn.apply(null, [_eachLimit(limit)].concat(args));
|
||||
};
|
||||
};
|
||||
var doSeries = function (fn) {
|
||||
return function () {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
return fn.apply(null, [async.eachSeries].concat(args));
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
var _asyncMap = function (eachfn, arr, iterator, callback) {
|
||||
var results = [];
|
||||
arr = _map(arr, function (x, i) {
|
||||
return {index: i, value: x};
|
||||
});
|
||||
eachfn(arr, function (x, callback) {
|
||||
iterator(x.value, function (err, v) {
|
||||
results[x.index] = v;
|
||||
callback(err);
|
||||
});
|
||||
}, function (err) {
|
||||
callback(err, results);
|
||||
});
|
||||
};
|
||||
async.map = doParallel(_asyncMap);
|
||||
async.mapSeries = doSeries(_asyncMap);
|
||||
async.mapLimit = function (arr, limit, iterator, callback) {
|
||||
return _mapLimit(limit)(arr, iterator, callback);
|
||||
};
|
||||
|
||||
var _mapLimit = function(limit) {
|
||||
return doParallelLimit(limit, _asyncMap);
|
||||
};
|
||||
|
||||
// reduce only has a series version, as doing reduce in parallel won't
|
||||
// work in many situations.
|
||||
async.reduce = function (arr, memo, iterator, callback) {
|
||||
async.eachSeries(arr, function (x, callback) {
|
||||
iterator(memo, x, function (err, v) {
|
||||
memo = v;
|
||||
callback(err);
|
||||
});
|
||||
}, function (err) {
|
||||
callback(err, memo);
|
||||
});
|
||||
};
|
||||
// inject alias
|
||||
async.inject = async.reduce;
|
||||
// foldl alias
|
||||
async.foldl = async.reduce;
|
||||
|
||||
async.reduceRight = function (arr, memo, iterator, callback) {
|
||||
var reversed = _map(arr, function (x) {
|
||||
return x;
|
||||
}).reverse();
|
||||
async.reduce(reversed, memo, iterator, callback);
|
||||
};
|
||||
// foldr alias
|
||||
async.foldr = async.reduceRight;
|
||||
|
||||
var _filter = function (eachfn, arr, iterator, callback) {
|
||||
var results = [];
|
||||
arr = _map(arr, function (x, i) {
|
||||
return {index: i, value: x};
|
||||
});
|
||||
eachfn(arr, function (x, callback) {
|
||||
iterator(x.value, function (v) {
|
||||
if (v) {
|
||||
results.push(x);
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}, function (err) {
|
||||
callback(_map(results.sort(function (a, b) {
|
||||
return a.index - b.index;
|
||||
}), function (x) {
|
||||
return x.value;
|
||||
}));
|
||||
});
|
||||
};
|
||||
async.filter = doParallel(_filter);
|
||||
async.filterSeries = doSeries(_filter);
|
||||
// select alias
|
||||
async.select = async.filter;
|
||||
async.selectSeries = async.filterSeries;
|
||||
|
||||
var _reject = function (eachfn, arr, iterator, callback) {
|
||||
var results = [];
|
||||
arr = _map(arr, function (x, i) {
|
||||
return {index: i, value: x};
|
||||
});
|
||||
eachfn(arr, function (x, callback) {
|
||||
iterator(x.value, function (v) {
|
||||
if (!v) {
|
||||
results.push(x);
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}, function (err) {
|
||||
callback(_map(results.sort(function (a, b) {
|
||||
return a.index - b.index;
|
||||
}), function (x) {
|
||||
return x.value;
|
||||
}));
|
||||
});
|
||||
};
|
||||
async.reject = doParallel(_reject);
|
||||
async.rejectSeries = doSeries(_reject);
|
||||
|
||||
var _detect = function (eachfn, arr, iterator, main_callback) {
|
||||
eachfn(arr, function (x, callback) {
|
||||
iterator(x, function (result) {
|
||||
if (result) {
|
||||
main_callback(x);
|
||||
main_callback = function () {};
|
||||
}
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
}, function (err) {
|
||||
main_callback();
|
||||
});
|
||||
};
|
||||
async.detect = doParallel(_detect);
|
||||
async.detectSeries = doSeries(_detect);
|
||||
|
||||
async.some = function (arr, iterator, main_callback) {
|
||||
async.each(arr, function (x, callback) {
|
||||
iterator(x, function (v) {
|
||||
if (v) {
|
||||
main_callback(true);
|
||||
main_callback = function () {};
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}, function (err) {
|
||||
main_callback(false);
|
||||
});
|
||||
};
|
||||
// any alias
|
||||
async.any = async.some;
|
||||
|
||||
async.every = function (arr, iterator, main_callback) {
|
||||
async.each(arr, function (x, callback) {
|
||||
iterator(x, function (v) {
|
||||
if (!v) {
|
||||
main_callback(false);
|
||||
main_callback = function () {};
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}, function (err) {
|
||||
main_callback(true);
|
||||
});
|
||||
};
|
||||
// all alias
|
||||
async.all = async.every;
|
||||
|
||||
async.sortBy = function (arr, iterator, callback) {
|
||||
async.map(arr, function (x, callback) {
|
||||
iterator(x, function (err, criteria) {
|
||||
if (err) {
|
||||
callback(err);
|
||||
}
|
||||
else {
|
||||
callback(null, {value: x, criteria: criteria});
|
||||
}
|
||||
});
|
||||
}, function (err, results) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
else {
|
||||
var fn = function (left, right) {
|
||||
var a = left.criteria, b = right.criteria;
|
||||
return a < b ? -1 : a > b ? 1 : 0;
|
||||
};
|
||||
callback(null, _map(results.sort(fn), function (x) {
|
||||
return x.value;
|
||||
}));
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
async.auto = function (tasks, callback) {
|
||||
callback = callback || function () {};
|
||||
var keys = _keys(tasks);
|
||||
if (!keys.length) {
|
||||
return callback(null);
|
||||
}
|
||||
|
||||
var results = {};
|
||||
|
||||
var listeners = [];
|
||||
var addListener = function (fn) {
|
||||
listeners.unshift(fn);
|
||||
};
|
||||
var removeListener = function (fn) {
|
||||
for (var i = 0; i < listeners.length; i += 1) {
|
||||
if (listeners[i] === fn) {
|
||||
listeners.splice(i, 1);
|
||||
return;
|
||||
}
|
||||
}
|
||||
};
|
||||
var taskComplete = function () {
|
||||
_each(listeners.slice(0), function (fn) {
|
||||
fn();
|
||||
});
|
||||
};
|
||||
|
||||
addListener(function () {
|
||||
if (_keys(results).length === keys.length) {
|
||||
callback(null, results);
|
||||
callback = function () {};
|
||||
}
|
||||
});
|
||||
|
||||
_each(keys, function (k) {
|
||||
var task = (tasks[k] instanceof Function) ? [tasks[k]]: tasks[k];
|
||||
var taskCallback = function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (args.length <= 1) {
|
||||
args = args[0];
|
||||
}
|
||||
if (err) {
|
||||
var safeResults = {};
|
||||
_each(_keys(results), function(rkey) {
|
||||
safeResults[rkey] = results[rkey];
|
||||
});
|
||||
safeResults[k] = args;
|
||||
callback(err, safeResults);
|
||||
// stop subsequent errors hitting callback multiple times
|
||||
callback = function () {};
|
||||
}
|
||||
else {
|
||||
results[k] = args;
|
||||
async.setImmediate(taskComplete);
|
||||
}
|
||||
};
|
||||
var requires = task.slice(0, Math.abs(task.length - 1)) || [];
|
||||
var ready = function () {
|
||||
return _reduce(requires, function (a, x) {
|
||||
return (a && results.hasOwnProperty(x));
|
||||
}, true) && !results.hasOwnProperty(k);
|
||||
};
|
||||
if (ready()) {
|
||||
task[task.length - 1](taskCallback, results);
|
||||
}
|
||||
else {
|
||||
var listener = function () {
|
||||
if (ready()) {
|
||||
removeListener(listener);
|
||||
task[task.length - 1](taskCallback, results);
|
||||
}
|
||||
};
|
||||
addListener(listener);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
async.waterfall = function (tasks, callback) {
|
||||
callback = callback || function () {};
|
||||
if (tasks.constructor !== Array) {
|
||||
var err = new Error('First argument to waterfall must be an array of functions');
|
||||
return callback(err);
|
||||
}
|
||||
if (!tasks.length) {
|
||||
return callback();
|
||||
}
|
||||
var wrapIterator = function (iterator) {
|
||||
return function (err) {
|
||||
if (err) {
|
||||
callback.apply(null, arguments);
|
||||
callback = function () {};
|
||||
}
|
||||
else {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
var next = iterator.next();
|
||||
if (next) {
|
||||
args.push(wrapIterator(next));
|
||||
}
|
||||
else {
|
||||
args.push(callback);
|
||||
}
|
||||
async.setImmediate(function () {
|
||||
iterator.apply(null, args);
|
||||
});
|
||||
}
|
||||
};
|
||||
};
|
||||
wrapIterator(async.iterator(tasks))();
|
||||
};
|
||||
|
||||
var _parallel = function(eachfn, tasks, callback) {
|
||||
callback = callback || function () {};
|
||||
if (tasks.constructor === Array) {
|
||||
eachfn.map(tasks, function (fn, callback) {
|
||||
if (fn) {
|
||||
fn(function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (args.length <= 1) {
|
||||
args = args[0];
|
||||
}
|
||||
callback.call(null, err, args);
|
||||
});
|
||||
}
|
||||
}, callback);
|
||||
}
|
||||
else {
|
||||
var results = {};
|
||||
eachfn.each(_keys(tasks), function (k, callback) {
|
||||
tasks[k](function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (args.length <= 1) {
|
||||
args = args[0];
|
||||
}
|
||||
results[k] = args;
|
||||
callback(err);
|
||||
});
|
||||
}, function (err) {
|
||||
callback(err, results);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
async.parallel = function (tasks, callback) {
|
||||
_parallel({ map: async.map, each: async.each }, tasks, callback);
|
||||
};
|
||||
|
||||
async.parallelLimit = function(tasks, limit, callback) {
|
||||
_parallel({ map: _mapLimit(limit), each: _eachLimit(limit) }, tasks, callback);
|
||||
};
|
||||
|
||||
async.series = function (tasks, callback) {
|
||||
callback = callback || function () {};
|
||||
if (tasks.constructor === Array) {
|
||||
async.mapSeries(tasks, function (fn, callback) {
|
||||
if (fn) {
|
||||
fn(function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (args.length <= 1) {
|
||||
args = args[0];
|
||||
}
|
||||
callback.call(null, err, args);
|
||||
});
|
||||
}
|
||||
}, callback);
|
||||
}
|
||||
else {
|
||||
var results = {};
|
||||
async.eachSeries(_keys(tasks), function (k, callback) {
|
||||
tasks[k](function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (args.length <= 1) {
|
||||
args = args[0];
|
||||
}
|
||||
results[k] = args;
|
||||
callback(err);
|
||||
});
|
||||
}, function (err) {
|
||||
callback(err, results);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
async.iterator = function (tasks) {
|
||||
var makeCallback = function (index) {
|
||||
var fn = function () {
|
||||
if (tasks.length) {
|
||||
tasks[index].apply(null, arguments);
|
||||
}
|
||||
return fn.next();
|
||||
};
|
||||
fn.next = function () {
|
||||
return (index < tasks.length - 1) ? makeCallback(index + 1): null;
|
||||
};
|
||||
return fn;
|
||||
};
|
||||
return makeCallback(0);
|
||||
};
|
||||
|
||||
async.apply = function (fn) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
return function () {
|
||||
return fn.apply(
|
||||
null, args.concat(Array.prototype.slice.call(arguments))
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
var _concat = function (eachfn, arr, fn, callback) {
|
||||
var r = [];
|
||||
eachfn(arr, function (x, cb) {
|
||||
fn(x, function (err, y) {
|
||||
r = r.concat(y || []);
|
||||
cb(err);
|
||||
});
|
||||
}, function (err) {
|
||||
callback(err, r);
|
||||
});
|
||||
};
|
||||
async.concat = doParallel(_concat);
|
||||
async.concatSeries = doSeries(_concat);
|
||||
|
||||
async.whilst = function (test, iterator, callback) {
|
||||
if (test()) {
|
||||
iterator(function (err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
async.whilst(test, iterator, callback);
|
||||
});
|
||||
}
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
};
|
||||
|
||||
async.doWhilst = function (iterator, test, callback) {
|
||||
iterator(function (err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
if (test()) {
|
||||
async.doWhilst(iterator, test, callback);
|
||||
}
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
async.until = function (test, iterator, callback) {
|
||||
if (!test()) {
|
||||
iterator(function (err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
async.until(test, iterator, callback);
|
||||
});
|
||||
}
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
};
|
||||
|
||||
async.doUntil = function (iterator, test, callback) {
|
||||
iterator(function (err) {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
if (!test()) {
|
||||
async.doUntil(iterator, test, callback);
|
||||
}
|
||||
else {
|
||||
callback();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
async.queue = function (worker, concurrency) {
|
||||
if (concurrency === undefined) {
|
||||
concurrency = 1;
|
||||
}
|
||||
function _insert(q, data, pos, callback) {
|
||||
if(data.constructor !== Array) {
|
||||
data = [data];
|
||||
}
|
||||
_each(data, function(task) {
|
||||
var item = {
|
||||
data: task,
|
||||
callback: typeof callback === 'function' ? callback : null
|
||||
};
|
||||
|
||||
if (pos) {
|
||||
q.tasks.unshift(item);
|
||||
} else {
|
||||
q.tasks.push(item);
|
||||
}
|
||||
|
||||
if (q.saturated && q.tasks.length === concurrency) {
|
||||
q.saturated();
|
||||
}
|
||||
async.setImmediate(q.process);
|
||||
});
|
||||
}
|
||||
|
||||
var workers = 0;
|
||||
var q = {
|
||||
tasks: [],
|
||||
concurrency: concurrency,
|
||||
saturated: null,
|
||||
empty: null,
|
||||
drain: null,
|
||||
push: function (data, callback) {
|
||||
_insert(q, data, false, callback);
|
||||
},
|
||||
unshift: function (data, callback) {
|
||||
_insert(q, data, true, callback);
|
||||
},
|
||||
process: function () {
|
||||
if (workers < q.concurrency && q.tasks.length) {
|
||||
var task = q.tasks.shift();
|
||||
if (q.empty && q.tasks.length === 0) {
|
||||
q.empty();
|
||||
}
|
||||
workers += 1;
|
||||
var next = function () {
|
||||
workers -= 1;
|
||||
if (task.callback) {
|
||||
task.callback.apply(task, arguments);
|
||||
}
|
||||
if (q.drain && q.tasks.length + workers === 0) {
|
||||
q.drain();
|
||||
}
|
||||
q.process();
|
||||
};
|
||||
var cb = only_once(next);
|
||||
worker(task.data, cb);
|
||||
}
|
||||
},
|
||||
length: function () {
|
||||
return q.tasks.length;
|
||||
},
|
||||
running: function () {
|
||||
return workers;
|
||||
}
|
||||
};
|
||||
return q;
|
||||
};
|
||||
|
||||
async.cargo = function (worker, payload) {
|
||||
var working = false,
|
||||
tasks = [];
|
||||
|
||||
var cargo = {
|
||||
tasks: tasks,
|
||||
payload: payload,
|
||||
saturated: null,
|
||||
empty: null,
|
||||
drain: null,
|
||||
push: function (data, callback) {
|
||||
if(data.constructor !== Array) {
|
||||
data = [data];
|
||||
}
|
||||
_each(data, function(task) {
|
||||
tasks.push({
|
||||
data: task,
|
||||
callback: typeof callback === 'function' ? callback : null
|
||||
});
|
||||
if (cargo.saturated && tasks.length === payload) {
|
||||
cargo.saturated();
|
||||
}
|
||||
});
|
||||
async.setImmediate(cargo.process);
|
||||
},
|
||||
process: function process() {
|
||||
if (working) return;
|
||||
if (tasks.length === 0) {
|
||||
if(cargo.drain) cargo.drain();
|
||||
return;
|
||||
}
|
||||
|
||||
var ts = typeof payload === 'number'
|
||||
? tasks.splice(0, payload)
|
||||
: tasks.splice(0);
|
||||
|
||||
var ds = _map(ts, function (task) {
|
||||
return task.data;
|
||||
});
|
||||
|
||||
if(cargo.empty) cargo.empty();
|
||||
working = true;
|
||||
worker(ds, function () {
|
||||
working = false;
|
||||
|
||||
var args = arguments;
|
||||
_each(ts, function (data) {
|
||||
if (data.callback) {
|
||||
data.callback.apply(null, args);
|
||||
}
|
||||
});
|
||||
|
||||
process();
|
||||
});
|
||||
},
|
||||
length: function () {
|
||||
return tasks.length;
|
||||
},
|
||||
running: function () {
|
||||
return working;
|
||||
}
|
||||
};
|
||||
return cargo;
|
||||
};
|
||||
|
||||
var _console_fn = function (name) {
|
||||
return function (fn) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
fn.apply(null, args.concat([function (err) {
|
||||
var args = Array.prototype.slice.call(arguments, 1);
|
||||
if (typeof console !== 'undefined') {
|
||||
if (err) {
|
||||
if (console.error) {
|
||||
console.error(err);
|
||||
}
|
||||
}
|
||||
else if (console[name]) {
|
||||
_each(args, function (x) {
|
||||
console[name](x);
|
||||
});
|
||||
}
|
||||
}
|
||||
}]));
|
||||
};
|
||||
};
|
||||
async.log = _console_fn('log');
|
||||
async.dir = _console_fn('dir');
|
||||
/*async.info = _console_fn('info');
|
||||
async.warn = _console_fn('warn');
|
||||
async.error = _console_fn('error');*/
|
||||
|
||||
async.memoize = function (fn, hasher) {
|
||||
var memo = {};
|
||||
var queues = {};
|
||||
hasher = hasher || function (x) {
|
||||
return x;
|
||||
};
|
||||
var memoized = function () {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
var callback = args.pop();
|
||||
var key = hasher.apply(null, args);
|
||||
if (key in memo) {
|
||||
callback.apply(null, memo[key]);
|
||||
}
|
||||
else if (key in queues) {
|
||||
queues[key].push(callback);
|
||||
}
|
||||
else {
|
||||
queues[key] = [callback];
|
||||
fn.apply(null, args.concat([function () {
|
||||
memo[key] = arguments;
|
||||
var q = queues[key];
|
||||
delete queues[key];
|
||||
for (var i = 0, l = q.length; i < l; i++) {
|
||||
q[i].apply(null, arguments);
|
||||
}
|
||||
}]));
|
||||
}
|
||||
};
|
||||
memoized.memo = memo;
|
||||
memoized.unmemoized = fn;
|
||||
return memoized;
|
||||
};
|
||||
|
||||
async.unmemoize = function (fn) {
|
||||
return function () {
|
||||
return (fn.unmemoized || fn).apply(null, arguments);
|
||||
};
|
||||
};
|
||||
|
||||
async.times = function (count, iterator, callback) {
|
||||
var counter = [];
|
||||
for (var i = 0; i < count; i++) {
|
||||
counter.push(i);
|
||||
}
|
||||
return async.map(counter, iterator, callback);
|
||||
};
|
||||
|
||||
async.timesSeries = function (count, iterator, callback) {
|
||||
var counter = [];
|
||||
for (var i = 0; i < count; i++) {
|
||||
counter.push(i);
|
||||
}
|
||||
return async.mapSeries(counter, iterator, callback);
|
||||
};
|
||||
|
||||
async.compose = function (/* functions... */) {
|
||||
var fns = Array.prototype.reverse.call(arguments);
|
||||
return function () {
|
||||
var that = this;
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
var callback = args.pop();
|
||||
async.reduce(fns, args, function (newargs, fn, cb) {
|
||||
fn.apply(that, newargs.concat([function () {
|
||||
var err = arguments[0];
|
||||
var nextargs = Array.prototype.slice.call(arguments, 1);
|
||||
cb(err, nextargs);
|
||||
}]))
|
||||
},
|
||||
function (err, results) {
|
||||
callback.apply(that, [err].concat(results));
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
var _applyEach = function (eachfn, fns /*args...*/) {
|
||||
var go = function () {
|
||||
var that = this;
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
var callback = args.pop();
|
||||
return eachfn(fns, function (fn, cb) {
|
||||
fn.apply(that, args.concat([cb]));
|
||||
},
|
||||
callback);
|
||||
};
|
||||
if (arguments.length > 2) {
|
||||
var args = Array.prototype.slice.call(arguments, 2);
|
||||
return go.apply(this, args);
|
||||
}
|
||||
else {
|
||||
return go;
|
||||
}
|
||||
};
|
||||
async.applyEach = doParallel(_applyEach);
|
||||
async.applyEachSeries = doSeries(_applyEach);
|
||||
|
||||
async.forever = function (fn, callback) {
|
||||
function next(err) {
|
||||
if (err) {
|
||||
if (callback) {
|
||||
return callback(err);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
fn(next);
|
||||
}
|
||||
next();
|
||||
};
|
||||
|
||||
// AMD / RequireJS
|
||||
if (typeof define !== 'undefined' && define.amd) {
|
||||
define([], function () {
|
||||
return async;
|
||||
});
|
||||
}
|
||||
// Node.js
|
||||
else if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = async;
|
||||
}
|
||||
// included directly via <script> tag
|
||||
else {
|
||||
root.async = async;
|
||||
}
|
||||
|
||||
}());
|
||||
42
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/async/package.json
generated
vendored
Normal file
42
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/async/package.json
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/.npmignore
generated
vendored
Normal file
2
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
dist/*
|
||||
node_modules/*
|
||||
1
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/.tern-port
generated
vendored
Normal file
1
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/.tern-port
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
55494
|
||||
4
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/.travis.yml
generated
vendored
Normal file
4
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 0.8
|
||||
- "0.10"
|
||||
137
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/CHANGELOG.md
generated
vendored
Normal file
137
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
# Change Log
|
||||
|
||||
## 0.1.34
|
||||
|
||||
* Make `SourceNode` work with windows style ("\r\n") newlines. Issue #103.
|
||||
|
||||
* Fix bug involving source contents and the
|
||||
`SourceMapGenerator.prototype.applySourceMap`. Issue #100.
|
||||
|
||||
## 0.1.33
|
||||
|
||||
* Fix some edge cases surrounding path joining and URL resolution.
|
||||
|
||||
* Add a third parameter for relative path to
|
||||
`SourceMapGenerator.prototype.applySourceMap`.
|
||||
|
||||
* Fix issues with mappings and EOLs.
|
||||
|
||||
## 0.1.32
|
||||
|
||||
* Fixed a bug where SourceMapConsumer couldn't handle negative relative columns
|
||||
(issue 92).
|
||||
|
||||
* Fixed test runner to actually report number of failed tests as its process
|
||||
exit code.
|
||||
|
||||
* Fixed a typo when reporting bad mappings (issue 87).
|
||||
|
||||
## 0.1.31
|
||||
|
||||
* Delay parsing the mappings in SourceMapConsumer until queried for a source
|
||||
location.
|
||||
|
||||
* Support Sass source maps (which at the time of writing deviate from the spec
|
||||
in small ways) in SourceMapConsumer.
|
||||
|
||||
## 0.1.30
|
||||
|
||||
* Do not join source root with a source, when the source is a data URI.
|
||||
|
||||
* Extend the test runner to allow running single specific test files at a time.
|
||||
|
||||
* Performance improvements in `SourceNode.prototype.walk` and
|
||||
`SourceMapConsumer.prototype.eachMapping`.
|
||||
|
||||
* Source map browser builds will now work inside Workers.
|
||||
|
||||
* Better error messages when attempting to add an invalid mapping to a
|
||||
`SourceMapGenerator`.
|
||||
|
||||
## 0.1.29
|
||||
|
||||
* Allow duplicate entries in the `names` and `sources` arrays of source maps
|
||||
(usually from TypeScript) we are parsing. Fixes github issue 72.
|
||||
|
||||
## 0.1.28
|
||||
|
||||
* Skip duplicate mappings when creating source maps from SourceNode; github
|
||||
issue 75.
|
||||
|
||||
## 0.1.27
|
||||
|
||||
* Don't throw an error when the `file` property is missing in SourceMapConsumer,
|
||||
we don't use it anyway.
|
||||
|
||||
## 0.1.26
|
||||
|
||||
* Fix SourceNode.fromStringWithSourceMap for empty maps. Fixes github issue 70.
|
||||
|
||||
## 0.1.25
|
||||
|
||||
* Make compatible with browserify
|
||||
|
||||
## 0.1.24
|
||||
|
||||
* Fix issue with absolute paths and `file://` URIs. See
|
||||
https://bugzilla.mozilla.org/show_bug.cgi?id=885597
|
||||
|
||||
## 0.1.23
|
||||
|
||||
* Fix issue with absolute paths and sourcesContent, github issue 64.
|
||||
|
||||
## 0.1.22
|
||||
|
||||
* Ignore duplicate mappings in SourceMapGenerator. Fixes github issue 21.
|
||||
|
||||
## 0.1.21
|
||||
|
||||
* Fixed handling of sources that start with a slash so that they are relative to
|
||||
the source root's host.
|
||||
|
||||
## 0.1.20
|
||||
|
||||
* Fixed github issue #43: absolute URLs aren't joined with the source root
|
||||
anymore.
|
||||
|
||||
## 0.1.19
|
||||
|
||||
* Using Travis CI to run tests.
|
||||
|
||||
## 0.1.18
|
||||
|
||||
* Fixed a bug in the handling of sourceRoot.
|
||||
|
||||
## 0.1.17
|
||||
|
||||
* Added SourceNode.fromStringWithSourceMap.
|
||||
|
||||
## 0.1.16
|
||||
|
||||
* Added missing documentation.
|
||||
|
||||
* Fixed the generating of empty mappings in SourceNode.
|
||||
|
||||
## 0.1.15
|
||||
|
||||
* Added SourceMapGenerator.applySourceMap.
|
||||
|
||||
## 0.1.14
|
||||
|
||||
* The sourceRoot is now handled consistently.
|
||||
|
||||
## 0.1.13
|
||||
|
||||
* Added SourceMapGenerator.fromSourceMap.
|
||||
|
||||
## 0.1.12
|
||||
|
||||
* SourceNode now generates empty mappings too.
|
||||
|
||||
## 0.1.11
|
||||
|
||||
* Added name support to SourceNode.
|
||||
|
||||
## 0.1.10
|
||||
|
||||
* Added sourcesContent support to the customer and generator.
|
||||
28
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/LICENSE
generated
vendored
Normal file
28
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
|
||||
Copyright (c) 2009-2011, Mozilla Foundation and contributors
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the names of the Mozilla Foundation nor the names of project
|
||||
contributors may be used to endorse or promote products derived from this
|
||||
software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
166
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/Makefile.dryice.js
generated
vendored
Normal file
166
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/Makefile.dryice.js
generated
vendored
Normal file
@@ -0,0 +1,166 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var copy = require('dryice').copy;
|
||||
|
||||
function removeAmdefine(src) {
|
||||
src = String(src).replace(
|
||||
/if\s*\(typeof\s*define\s*!==\s*'function'\)\s*{\s*var\s*define\s*=\s*require\('amdefine'\)\(module,\s*require\);\s*}\s*/g,
|
||||
'');
|
||||
src = src.replace(
|
||||
/\b(define\(.*)('amdefine',?)/gm,
|
||||
'$1');
|
||||
return src;
|
||||
}
|
||||
removeAmdefine.onRead = true;
|
||||
|
||||
function makeNonRelative(src) {
|
||||
return src
|
||||
.replace(/require\('.\//g, 'require(\'source-map/')
|
||||
.replace(/\.\.\/\.\.\/lib\//g, '');
|
||||
}
|
||||
makeNonRelative.onRead = true;
|
||||
|
||||
function buildBrowser() {
|
||||
console.log('\nCreating dist/source-map.js');
|
||||
|
||||
var project = copy.createCommonJsProject({
|
||||
roots: [ path.join(__dirname, 'lib') ]
|
||||
});
|
||||
|
||||
copy({
|
||||
source: [
|
||||
'build/mini-require.js',
|
||||
{
|
||||
project: project,
|
||||
require: [ 'source-map/source-map-generator',
|
||||
'source-map/source-map-consumer',
|
||||
'source-map/source-node']
|
||||
},
|
||||
'build/suffix-browser.js'
|
||||
],
|
||||
filter: [
|
||||
copy.filter.moduleDefines,
|
||||
removeAmdefine
|
||||
],
|
||||
dest: 'dist/source-map.js'
|
||||
});
|
||||
}
|
||||
|
||||
function buildBrowserMin() {
|
||||
console.log('\nCreating dist/source-map.min.js');
|
||||
|
||||
copy({
|
||||
source: 'dist/source-map.js',
|
||||
filter: copy.filter.uglifyjs,
|
||||
dest: 'dist/source-map.min.js'
|
||||
});
|
||||
}
|
||||
|
||||
function buildFirefox() {
|
||||
console.log('\nCreating dist/SourceMap.jsm');
|
||||
|
||||
var project = copy.createCommonJsProject({
|
||||
roots: [ path.join(__dirname, 'lib') ]
|
||||
});
|
||||
|
||||
copy({
|
||||
source: [
|
||||
'build/prefix-source-map.jsm',
|
||||
{
|
||||
project: project,
|
||||
require: [ 'source-map/source-map-consumer',
|
||||
'source-map/source-map-generator',
|
||||
'source-map/source-node' ]
|
||||
},
|
||||
'build/suffix-source-map.jsm'
|
||||
],
|
||||
filter: [
|
||||
copy.filter.moduleDefines,
|
||||
removeAmdefine,
|
||||
makeNonRelative
|
||||
],
|
||||
dest: 'dist/SourceMap.jsm'
|
||||
});
|
||||
|
||||
// Create dist/test/Utils.jsm
|
||||
console.log('\nCreating dist/test/Utils.jsm');
|
||||
|
||||
project = copy.createCommonJsProject({
|
||||
roots: [ __dirname, path.join(__dirname, 'lib') ]
|
||||
});
|
||||
|
||||
copy({
|
||||
source: [
|
||||
'build/prefix-utils.jsm',
|
||||
'build/assert-shim.js',
|
||||
{
|
||||
project: project,
|
||||
require: [ 'test/source-map/util' ]
|
||||
},
|
||||
'build/suffix-utils.jsm'
|
||||
],
|
||||
filter: [
|
||||
copy.filter.moduleDefines,
|
||||
removeAmdefine,
|
||||
makeNonRelative
|
||||
],
|
||||
dest: 'dist/test/Utils.jsm'
|
||||
});
|
||||
|
||||
function isTestFile(f) {
|
||||
return /^test\-.*?\.js/.test(f);
|
||||
}
|
||||
|
||||
var testFiles = fs.readdirSync(path.join(__dirname, 'test', 'source-map')).filter(isTestFile);
|
||||
|
||||
testFiles.forEach(function (testFile) {
|
||||
console.log('\nCreating', path.join('dist', 'test', testFile.replace(/\-/g, '_')));
|
||||
|
||||
copy({
|
||||
source: [
|
||||
'build/test-prefix.js',
|
||||
path.join('test', 'source-map', testFile),
|
||||
'build/test-suffix.js'
|
||||
],
|
||||
filter: [
|
||||
removeAmdefine,
|
||||
makeNonRelative,
|
||||
function (input, source) {
|
||||
return input.replace('define(',
|
||||
'define("'
|
||||
+ path.join('test', 'source-map', testFile.replace(/\.js$/, ''))
|
||||
+ '", ["require", "exports", "module"], ');
|
||||
},
|
||||
function (input, source) {
|
||||
return input.replace('{THIS_MODULE}', function () {
|
||||
return "test/source-map/" + testFile.replace(/\.js$/, '');
|
||||
});
|
||||
}
|
||||
],
|
||||
dest: path.join('dist', 'test', testFile.replace(/\-/g, '_'))
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function ensureDir(name) {
|
||||
var dirExists = false;
|
||||
try {
|
||||
dirExists = fs.statSync(name).isDirectory();
|
||||
} catch (err) {}
|
||||
|
||||
if (!dirExists) {
|
||||
fs.mkdirSync(name, 0777);
|
||||
}
|
||||
}
|
||||
|
||||
ensureDir("dist");
|
||||
ensureDir("dist/test");
|
||||
buildFirefox();
|
||||
buildBrowser();
|
||||
buildBrowserMin();
|
||||
446
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/README.md
generated
vendored
Normal file
446
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/README.md
generated
vendored
Normal file
@@ -0,0 +1,446 @@
|
||||
# Source Map
|
||||
|
||||
This is a library to generate and consume the source map format
|
||||
[described here][format].
|
||||
|
||||
This library is written in the Asynchronous Module Definition format, and works
|
||||
in the following environments:
|
||||
|
||||
* Modern Browsers supporting ECMAScript 5 (either after the build, or with an
|
||||
AMD loader such as RequireJS)
|
||||
|
||||
* Inside Firefox (as a JSM file, after the build)
|
||||
|
||||
* With NodeJS versions 0.8.X and higher
|
||||
|
||||
## Node
|
||||
|
||||
$ npm install source-map
|
||||
|
||||
## Building from Source (for everywhere else)
|
||||
|
||||
Install Node and then run
|
||||
|
||||
$ git clone https://fitzgen@github.com/mozilla/source-map.git
|
||||
$ cd source-map
|
||||
$ npm link .
|
||||
|
||||
Next, run
|
||||
|
||||
$ node Makefile.dryice.js
|
||||
|
||||
This should spew a bunch of stuff to stdout, and create the following files:
|
||||
|
||||
* `dist/source-map.js` - The unminified browser version.
|
||||
|
||||
* `dist/source-map.min.js` - The minified browser version.
|
||||
|
||||
* `dist/SourceMap.jsm` - The JavaScript Module for inclusion in Firefox source.
|
||||
|
||||
## Examples
|
||||
|
||||
### Consuming a source map
|
||||
|
||||
var rawSourceMap = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['one.js', 'two.js'],
|
||||
sourceRoot: 'http://example.com/www/js/',
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
|
||||
var smc = new SourceMapConsumer(rawSourceMap);
|
||||
|
||||
console.log(smc.sources);
|
||||
// [ 'http://example.com/www/js/one.js',
|
||||
// 'http://example.com/www/js/two.js' ]
|
||||
|
||||
console.log(smc.originalPositionFor({
|
||||
line: 2,
|
||||
column: 28
|
||||
}));
|
||||
// { source: 'http://example.com/www/js/two.js',
|
||||
// line: 2,
|
||||
// column: 10,
|
||||
// name: 'n' }
|
||||
|
||||
console.log(smc.generatedPositionFor({
|
||||
source: 'http://example.com/www/js/two.js',
|
||||
line: 2,
|
||||
column: 10
|
||||
}));
|
||||
// { line: 2, column: 28 }
|
||||
|
||||
smc.eachMapping(function (m) {
|
||||
// ...
|
||||
});
|
||||
|
||||
### Generating a source map
|
||||
|
||||
In depth guide:
|
||||
[**Compiling to JavaScript, and Debugging with Source Maps**](https://hacks.mozilla.org/2013/05/compiling-to-javascript-and-debugging-with-source-maps/)
|
||||
|
||||
#### With SourceNode (high level API)
|
||||
|
||||
function compile(ast) {
|
||||
switch (ast.type) {
|
||||
case 'BinaryExpression':
|
||||
return new SourceNode(
|
||||
ast.location.line,
|
||||
ast.location.column,
|
||||
ast.location.source,
|
||||
[compile(ast.left), " + ", compile(ast.right)]
|
||||
);
|
||||
case 'Literal':
|
||||
return new SourceNode(
|
||||
ast.location.line,
|
||||
ast.location.column,
|
||||
ast.location.source,
|
||||
String(ast.value)
|
||||
);
|
||||
// ...
|
||||
default:
|
||||
throw new Error("Bad AST");
|
||||
}
|
||||
}
|
||||
|
||||
var ast = parse("40 + 2", "add.js");
|
||||
console.log(compile(ast).toStringWithSourceMap({
|
||||
file: 'add.js'
|
||||
}));
|
||||
// { code: '40 + 2',
|
||||
// map: [object SourceMapGenerator] }
|
||||
|
||||
#### With SourceMapGenerator (low level API)
|
||||
|
||||
var map = new SourceMapGenerator({
|
||||
file: "source-mapped.js"
|
||||
});
|
||||
|
||||
map.addMapping({
|
||||
generated: {
|
||||
line: 10,
|
||||
column: 35
|
||||
},
|
||||
source: "foo.js",
|
||||
original: {
|
||||
line: 33,
|
||||
column: 2
|
||||
},
|
||||
name: "christopher"
|
||||
});
|
||||
|
||||
console.log(map.toString());
|
||||
// '{"version":3,"file":"source-mapped.js","sources":["foo.js"],"names":["christopher"],"mappings":";;;;;;;;;mCAgCEA"}'
|
||||
|
||||
## API
|
||||
|
||||
Get a reference to the module:
|
||||
|
||||
// NodeJS
|
||||
var sourceMap = require('source-map');
|
||||
|
||||
// Browser builds
|
||||
var sourceMap = window.sourceMap;
|
||||
|
||||
// Inside Firefox
|
||||
let sourceMap = {};
|
||||
Components.utils.import('resource:///modules/devtools/SourceMap.jsm', sourceMap);
|
||||
|
||||
### SourceMapConsumer
|
||||
|
||||
A SourceMapConsumer instance represents a parsed source map which we can query
|
||||
for information about the original file positions by giving it a file position
|
||||
in the generated source.
|
||||
|
||||
#### new SourceMapConsumer(rawSourceMap)
|
||||
|
||||
The only parameter is the raw source map (either as a string which can be
|
||||
`JSON.parse`'d, or an object). According to the spec, source maps have the
|
||||
following attributes:
|
||||
|
||||
* `version`: Which version of the source map spec this map is following.
|
||||
|
||||
* `sources`: An array of URLs to the original source files.
|
||||
|
||||
* `names`: An array of identifiers which can be referrenced by individual
|
||||
mappings.
|
||||
|
||||
* `sourceRoot`: Optional. The URL root from which all sources are relative.
|
||||
|
||||
* `sourcesContent`: Optional. An array of contents of the original source files.
|
||||
|
||||
* `mappings`: A string of base64 VLQs which contain the actual mappings.
|
||||
|
||||
* `file`: Optional. The generated filename this source map is associated with.
|
||||
|
||||
#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)
|
||||
|
||||
Returns the original source, line, and column information for the generated
|
||||
source's line and column positions provided. The only argument is an object with
|
||||
the following properties:
|
||||
|
||||
* `line`: The line number in the generated source.
|
||||
|
||||
* `column`: The column number in the generated source.
|
||||
|
||||
and an object is returned with the following properties:
|
||||
|
||||
* `source`: The original source file, or null if this information is not
|
||||
available.
|
||||
|
||||
* `line`: The line number in the original source, or null if this information is
|
||||
not available.
|
||||
|
||||
* `column`: The column number in the original source, or null or null if this
|
||||
information is not available.
|
||||
|
||||
* `name`: The original identifier, or null if this information is not available.
|
||||
|
||||
#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)
|
||||
|
||||
Returns the generated line and column information for the original source,
|
||||
line, and column positions provided. The only argument is an object with
|
||||
the following properties:
|
||||
|
||||
* `source`: The filename of the original source.
|
||||
|
||||
* `line`: The line number in the original source.
|
||||
|
||||
* `column`: The column number in the original source.
|
||||
|
||||
and an object is returned with the following properties:
|
||||
|
||||
* `line`: The line number in the generated source, or null.
|
||||
|
||||
* `column`: The column number in the generated source, or null.
|
||||
|
||||
#### SourceMapConsumer.prototype.sourceContentFor(source)
|
||||
|
||||
Returns the original source content for the source provided. The only
|
||||
argument is the URL of the original source file.
|
||||
|
||||
#### SourceMapConsumer.prototype.eachMapping(callback, context, order)
|
||||
|
||||
Iterate over each mapping between an original source/line/column and a
|
||||
generated line/column in this source map.
|
||||
|
||||
* `callback`: The function that is called with each mapping. Mappings have the
|
||||
form `{ source, generatedLine, generatedColumn, originalLine, originalColumn,
|
||||
name }`
|
||||
|
||||
* `context`: Optional. If specified, this object will be the value of `this`
|
||||
every time that `callback` is called.
|
||||
|
||||
* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or
|
||||
`SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over
|
||||
the mappings sorted by the generated file's line/column order or the
|
||||
original's source/line/column order, respectively. Defaults to
|
||||
`SourceMapConsumer.GENERATED_ORDER`.
|
||||
|
||||
### SourceMapGenerator
|
||||
|
||||
An instance of the SourceMapGenerator represents a source map which is being
|
||||
built incrementally.
|
||||
|
||||
#### new SourceMapGenerator([startOfSourceMap])
|
||||
|
||||
You may pass an object with the following properties:
|
||||
|
||||
* `file`: The filename of the generated source that this source map is
|
||||
associated with.
|
||||
|
||||
* `sourceRoot`: A root for all relative URLs in this source map.
|
||||
|
||||
#### SourceMapGenerator.fromSourceMap(sourceMapConsumer)
|
||||
|
||||
Creates a new SourceMapGenerator based on a SourceMapConsumer
|
||||
|
||||
* `sourceMapConsumer` The SourceMap.
|
||||
|
||||
#### SourceMapGenerator.prototype.addMapping(mapping)
|
||||
|
||||
Add a single mapping from original source line and column to the generated
|
||||
source's line and column for this source map being created. The mapping object
|
||||
should have the following properties:
|
||||
|
||||
* `generated`: An object with the generated line and column positions.
|
||||
|
||||
* `original`: An object with the original line and column positions.
|
||||
|
||||
* `source`: The original source file (relative to the sourceRoot).
|
||||
|
||||
* `name`: An optional original token name for this mapping.
|
||||
|
||||
#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)
|
||||
|
||||
Set the source content for an original source file.
|
||||
|
||||
* `sourceFile` the URL of the original source file.
|
||||
|
||||
* `sourceContent` the content of the source file.
|
||||
|
||||
#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])
|
||||
|
||||
Applies a SourceMap for a source file to the SourceMap.
|
||||
Each mapping to the supplied source file is rewritten using the
|
||||
supplied SourceMap. Note: The resolution for the resulting mappings
|
||||
is the minimium of this map and the supplied map.
|
||||
|
||||
* `sourceMapConsumer`: The SourceMap to be applied.
|
||||
|
||||
* `sourceFile`: Optional. The filename of the source file.
|
||||
If omitted, sourceMapConsumer.file will be used, if it exists.
|
||||
Otherwise an error will be thrown.
|
||||
|
||||
* `sourceMapPath`: Optional. The dirname of the path to the SourceMap
|
||||
to be applied. If relative, it is relative to the SourceMap.
|
||||
|
||||
This parameter is needed when the two SourceMaps aren't in the same
|
||||
directory, and the SourceMap to be applied contains relative source
|
||||
paths. If so, those relative source paths need to be rewritten
|
||||
relative to the SourceMap.
|
||||
|
||||
If omitted, it is assumed that both SourceMaps are in the same directory,
|
||||
thus not needing any rewriting. (Supplying `'.'` has the same effect.)
|
||||
|
||||
#### SourceMapGenerator.prototype.toString()
|
||||
|
||||
Renders the source map being generated to a string.
|
||||
|
||||
### SourceNode
|
||||
|
||||
SourceNodes provide a way to abstract over interpolating and/or concatenating
|
||||
snippets of generated JavaScript source code, while maintaining the line and
|
||||
column information associated between those snippets and the original source
|
||||
code. This is useful as the final intermediate representation a compiler might
|
||||
use before outputting the generated JS and source map.
|
||||
|
||||
#### new SourceNode([line, column, source[, chunk[, name]]])
|
||||
|
||||
* `line`: The original line number associated with this source node, or null if
|
||||
it isn't associated with an original line.
|
||||
|
||||
* `column`: The original column number associated with this source node, or null
|
||||
if it isn't associated with an original column.
|
||||
|
||||
* `source`: The original source's filename; null if no filename is provided.
|
||||
|
||||
* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see
|
||||
below.
|
||||
|
||||
* `name`: Optional. The original identifier.
|
||||
|
||||
#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer)
|
||||
|
||||
Creates a SourceNode from generated code and a SourceMapConsumer.
|
||||
|
||||
* `code`: The generated code
|
||||
|
||||
* `sourceMapConsumer` The SourceMap for the generated code
|
||||
|
||||
#### SourceNode.prototype.add(chunk)
|
||||
|
||||
Add a chunk of generated JS to this source node.
|
||||
|
||||
* `chunk`: A string snippet of generated JS code, another instance of
|
||||
`SourceNode`, or an array where each member is one of those things.
|
||||
|
||||
#### SourceNode.prototype.prepend(chunk)
|
||||
|
||||
Prepend a chunk of generated JS to this source node.
|
||||
|
||||
* `chunk`: A string snippet of generated JS code, another instance of
|
||||
`SourceNode`, or an array where each member is one of those things.
|
||||
|
||||
#### SourceNode.prototype.setSourceContent(sourceFile, sourceContent)
|
||||
|
||||
Set the source content for a source file. This will be added to the
|
||||
`SourceMap` in the `sourcesContent` field.
|
||||
|
||||
* `sourceFile`: The filename of the source file
|
||||
|
||||
* `sourceContent`: The content of the source file
|
||||
|
||||
#### SourceNode.prototype.walk(fn)
|
||||
|
||||
Walk over the tree of JS snippets in this node and its children. The walking
|
||||
function is called once for each snippet of JS and is passed that snippet and
|
||||
the its original associated source's line/column location.
|
||||
|
||||
* `fn`: The traversal function.
|
||||
|
||||
#### SourceNode.prototype.walkSourceContents(fn)
|
||||
|
||||
Walk over the tree of SourceNodes. The walking function is called for each
|
||||
source file content and is passed the filename and source content.
|
||||
|
||||
* `fn`: The traversal function.
|
||||
|
||||
#### SourceNode.prototype.join(sep)
|
||||
|
||||
Like `Array.prototype.join` except for SourceNodes. Inserts the separator
|
||||
between each of this source node's children.
|
||||
|
||||
* `sep`: The separator.
|
||||
|
||||
#### SourceNode.prototype.replaceRight(pattern, replacement)
|
||||
|
||||
Call `String.prototype.replace` on the very right-most source snippet. Useful
|
||||
for trimming whitespace from the end of a source node, etc.
|
||||
|
||||
* `pattern`: The pattern to replace.
|
||||
|
||||
* `replacement`: The thing to replace the pattern with.
|
||||
|
||||
#### SourceNode.prototype.toString()
|
||||
|
||||
Return the string representation of this source node. Walks over the tree and
|
||||
concatenates all the various snippets together to one string.
|
||||
|
||||
### SourceNode.prototype.toStringWithSourceMap([startOfSourceMap])
|
||||
|
||||
Returns the string representation of this tree of source nodes, plus a
|
||||
SourceMapGenerator which contains all the mappings between the generated and
|
||||
original sources.
|
||||
|
||||
The arguments are the same as those to `new SourceMapGenerator`.
|
||||
|
||||
## Tests
|
||||
|
||||
[](https://travis-ci.org/mozilla/source-map)
|
||||
|
||||
Install NodeJS version 0.8.0 or greater, then run `node test/run-tests.js`.
|
||||
|
||||
To add new tests, create a new file named `test/test-<your new test name>.js`
|
||||
and export your test functions with names that start with "test", for example
|
||||
|
||||
exports["test doing the foo bar"] = function (assert, util) {
|
||||
...
|
||||
};
|
||||
|
||||
The new test will be located automatically when you run the suite.
|
||||
|
||||
The `util` argument is the test utility module located at `test/source-map/util`.
|
||||
|
||||
The `assert` argument is a cut down version of node's assert module. You have
|
||||
access to the following assertion functions:
|
||||
|
||||
* `doesNotThrow`
|
||||
|
||||
* `equal`
|
||||
|
||||
* `ok`
|
||||
|
||||
* `strictEqual`
|
||||
|
||||
* `throws`
|
||||
|
||||
(The reason for the restricted set of test functions is because we need the
|
||||
tests to run inside Firefox's test suite as well and so the assert module is
|
||||
shimmed in that environment. See `build/assert-shim.js`.)
|
||||
|
||||
[format]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
|
||||
[feature]: https://wiki.mozilla.org/DevTools/Features/SourceMap
|
||||
[Dryice]: https://github.com/mozilla/dryice
|
||||
56
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/assert-shim.js
generated
vendored
Normal file
56
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/assert-shim.js
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
define('test/source-map/assert', ['exports'], function (exports) {
|
||||
|
||||
let do_throw = function (msg) {
|
||||
throw new Error(msg);
|
||||
};
|
||||
|
||||
exports.init = function (throw_fn) {
|
||||
do_throw = throw_fn;
|
||||
};
|
||||
|
||||
exports.doesNotThrow = function (fn) {
|
||||
try {
|
||||
fn();
|
||||
}
|
||||
catch (e) {
|
||||
do_throw(e.message);
|
||||
}
|
||||
};
|
||||
|
||||
exports.equal = function (actual, expected, msg) {
|
||||
msg = msg || String(actual) + ' != ' + String(expected);
|
||||
if (actual != expected) {
|
||||
do_throw(msg);
|
||||
}
|
||||
};
|
||||
|
||||
exports.ok = function (val, msg) {
|
||||
msg = msg || String(val) + ' is falsey';
|
||||
if (!Boolean(val)) {
|
||||
do_throw(msg);
|
||||
}
|
||||
};
|
||||
|
||||
exports.strictEqual = function (actual, expected, msg) {
|
||||
msg = msg || String(actual) + ' !== ' + String(expected);
|
||||
if (actual !== expected) {
|
||||
do_throw(msg);
|
||||
}
|
||||
};
|
||||
|
||||
exports.throws = function (fn) {
|
||||
try {
|
||||
fn();
|
||||
do_throw('Expected an error to be thrown, but it wasn\'t.');
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
};
|
||||
|
||||
});
|
||||
152
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/mini-require.js
generated
vendored
Normal file
152
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/mini-require.js
generated
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
/**
|
||||
* Define a module along with a payload.
|
||||
* @param {string} moduleName Name for the payload
|
||||
* @param {ignored} deps Ignored. For compatibility with CommonJS AMD Spec
|
||||
* @param {function} payload Function with (require, exports, module) params
|
||||
*/
|
||||
function define(moduleName, deps, payload) {
|
||||
if (typeof moduleName != "string") {
|
||||
throw new TypeError('Expected string, got: ' + moduleName);
|
||||
}
|
||||
|
||||
if (arguments.length == 2) {
|
||||
payload = deps;
|
||||
}
|
||||
|
||||
if (moduleName in define.modules) {
|
||||
throw new Error("Module already defined: " + moduleName);
|
||||
}
|
||||
define.modules[moduleName] = payload;
|
||||
};
|
||||
|
||||
/**
|
||||
* The global store of un-instantiated modules
|
||||
*/
|
||||
define.modules = {};
|
||||
|
||||
|
||||
/**
|
||||
* We invoke require() in the context of a Domain so we can have multiple
|
||||
* sets of modules running separate from each other.
|
||||
* This contrasts with JSMs which are singletons, Domains allows us to
|
||||
* optionally load a CommonJS module twice with separate data each time.
|
||||
* Perhaps you want 2 command lines with a different set of commands in each,
|
||||
* for example.
|
||||
*/
|
||||
function Domain() {
|
||||
this.modules = {};
|
||||
this._currentModule = null;
|
||||
}
|
||||
|
||||
(function () {
|
||||
|
||||
/**
|
||||
* Lookup module names and resolve them by calling the definition function if
|
||||
* needed.
|
||||
* There are 2 ways to call this, either with an array of dependencies and a
|
||||
* callback to call when the dependencies are found (which can happen
|
||||
* asynchronously in an in-page context) or with a single string an no callback
|
||||
* where the dependency is resolved synchronously and returned.
|
||||
* The API is designed to be compatible with the CommonJS AMD spec and
|
||||
* RequireJS.
|
||||
* @param {string[]|string} deps A name, or names for the payload
|
||||
* @param {function|undefined} callback Function to call when the dependencies
|
||||
* are resolved
|
||||
* @return {undefined|object} The module required or undefined for
|
||||
* array/callback method
|
||||
*/
|
||||
Domain.prototype.require = function(deps, callback) {
|
||||
if (Array.isArray(deps)) {
|
||||
var params = deps.map(function(dep) {
|
||||
return this.lookup(dep);
|
||||
}, this);
|
||||
if (callback) {
|
||||
callback.apply(null, params);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
else {
|
||||
return this.lookup(deps);
|
||||
}
|
||||
};
|
||||
|
||||
function normalize(path) {
|
||||
var bits = path.split('/');
|
||||
var i = 1;
|
||||
while (i < bits.length) {
|
||||
if (bits[i] === '..') {
|
||||
bits.splice(i-1, 1);
|
||||
} else if (bits[i] === '.') {
|
||||
bits.splice(i, 1);
|
||||
} else {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
return bits.join('/');
|
||||
}
|
||||
|
||||
function join(a, b) {
|
||||
a = a.trim();
|
||||
b = b.trim();
|
||||
if (/^\//.test(b)) {
|
||||
return b;
|
||||
} else {
|
||||
return a.replace(/\/*$/, '/') + b;
|
||||
}
|
||||
}
|
||||
|
||||
function dirname(path) {
|
||||
var bits = path.split('/');
|
||||
bits.pop();
|
||||
return bits.join('/');
|
||||
}
|
||||
|
||||
/**
|
||||
* Lookup module names and resolve them by calling the definition function if
|
||||
* needed.
|
||||
* @param {string} moduleName A name for the payload to lookup
|
||||
* @return {object} The module specified by aModuleName or null if not found.
|
||||
*/
|
||||
Domain.prototype.lookup = function(moduleName) {
|
||||
if (/^\./.test(moduleName)) {
|
||||
moduleName = normalize(join(dirname(this._currentModule), moduleName));
|
||||
}
|
||||
|
||||
if (moduleName in this.modules) {
|
||||
var module = this.modules[moduleName];
|
||||
return module;
|
||||
}
|
||||
|
||||
if (!(moduleName in define.modules)) {
|
||||
throw new Error("Module not defined: " + moduleName);
|
||||
}
|
||||
|
||||
var module = define.modules[moduleName];
|
||||
|
||||
if (typeof module == "function") {
|
||||
var exports = {};
|
||||
var previousModule = this._currentModule;
|
||||
this._currentModule = moduleName;
|
||||
module(this.require.bind(this), exports, { id: moduleName, uri: "" });
|
||||
this._currentModule = previousModule;
|
||||
module = exports;
|
||||
}
|
||||
|
||||
// cache the resulting module object for next time
|
||||
this.modules[moduleName] = module;
|
||||
|
||||
return module;
|
||||
};
|
||||
|
||||
}());
|
||||
|
||||
define.Domain = Domain;
|
||||
define.globalDomain = new Domain();
|
||||
var require = define.globalDomain.require.bind(define.globalDomain);
|
||||
20
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/prefix-source-map.jsm
generated
vendored
Normal file
20
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/prefix-source-map.jsm
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
/*
|
||||
* WARNING!
|
||||
*
|
||||
* Do not edit this file directly, it is built from the sources at
|
||||
* https://github.com/mozilla/source-map/
|
||||
*/
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
this.EXPORTED_SYMBOLS = [ "SourceMapConsumer", "SourceMapGenerator", "SourceNode" ];
|
||||
|
||||
Components.utils.import('resource://gre/modules/devtools/Require.jsm');
|
||||
18
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/prefix-utils.jsm
generated
vendored
Normal file
18
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/prefix-utils.jsm
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
/*
|
||||
* WARNING!
|
||||
*
|
||||
* Do not edit this file directly, it is built from the sources at
|
||||
* https://github.com/mozilla/source-map/
|
||||
*/
|
||||
|
||||
Components.utils.import('resource://gre/modules/devtools/Require.jsm');
|
||||
Components.utils.import('resource://gre/modules/devtools/SourceMap.jsm');
|
||||
|
||||
this.EXPORTED_SYMBOLS = [ "define", "runSourceMapTests" ];
|
||||
8
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/suffix-browser.js
generated
vendored
Normal file
8
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/suffix-browser.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
this.sourceMap = {
|
||||
SourceMapConsumer: require('source-map/source-map-consumer').SourceMapConsumer,
|
||||
SourceMapGenerator: require('source-map/source-map-generator').SourceMapGenerator,
|
||||
SourceNode: require('source-map/source-node').SourceNode
|
||||
};
|
||||
6
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/suffix-source-map.jsm
generated
vendored
Normal file
6
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/suffix-source-map.jsm
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
this.SourceMapConsumer = require('source-map/source-map-consumer').SourceMapConsumer;
|
||||
this.SourceMapGenerator = require('source-map/source-map-generator').SourceMapGenerator;
|
||||
this.SourceNode = require('source-map/source-node').SourceNode;
|
||||
21
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/suffix-utils.jsm
generated
vendored
Normal file
21
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/suffix-utils.jsm
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
function runSourceMapTests(modName, do_throw) {
|
||||
let mod = require(modName);
|
||||
let assert = require('test/source-map/assert');
|
||||
let util = require('test/source-map/util');
|
||||
|
||||
assert.init(do_throw);
|
||||
|
||||
for (let k in mod) {
|
||||
if (/^test/.test(k)) {
|
||||
mod[k](assert, util);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
this.runSourceMapTests = runSourceMapTests;
|
||||
8
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/test-prefix.js
generated
vendored
Normal file
8
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/test-prefix.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
/*
|
||||
* WARNING!
|
||||
*
|
||||
* Do not edit this file directly, it is built from the sources at
|
||||
* https://github.com/mozilla/source-map/
|
||||
*/
|
||||
|
||||
Components.utils.import('resource://test/Utils.jsm');
|
||||
3
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/test-suffix.js
generated
vendored
Normal file
3
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/build/test-suffix.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
function run_test() {
|
||||
runSourceMapTests('{THIS_MODULE}', do_throw);
|
||||
}
|
||||
8
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map.js
generated
vendored
Normal file
8
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
/*
|
||||
* Copyright 2009-2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE.txt or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
exports.SourceMapGenerator = require('./source-map/source-map-generator').SourceMapGenerator;
|
||||
exports.SourceMapConsumer = require('./source-map/source-map-consumer').SourceMapConsumer;
|
||||
exports.SourceNode = require('./source-map/source-node').SourceNode;
|
||||
97
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map/array-set.js
generated
vendored
Normal file
97
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map/array-set.js
generated
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var util = require('./util');
|
||||
|
||||
/**
|
||||
* A data structure which is a combination of an array and a set. Adding a new
|
||||
* member is O(1), testing for membership is O(1), and finding the index of an
|
||||
* element is O(1). Removing elements from the set is not supported. Only
|
||||
* strings are supported for membership.
|
||||
*/
|
||||
function ArraySet() {
|
||||
this._array = [];
|
||||
this._set = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Static method for creating ArraySet instances from an existing array.
|
||||
*/
|
||||
ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) {
|
||||
var set = new ArraySet();
|
||||
for (var i = 0, len = aArray.length; i < len; i++) {
|
||||
set.add(aArray[i], aAllowDuplicates);
|
||||
}
|
||||
return set;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add the given string to this set.
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) {
|
||||
var isDuplicate = this.has(aStr);
|
||||
var idx = this._array.length;
|
||||
if (!isDuplicate || aAllowDuplicates) {
|
||||
this._array.push(aStr);
|
||||
}
|
||||
if (!isDuplicate) {
|
||||
this._set[util.toSetString(aStr)] = idx;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Is the given string a member of this set?
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
ArraySet.prototype.has = function ArraySet_has(aStr) {
|
||||
return Object.prototype.hasOwnProperty.call(this._set,
|
||||
util.toSetString(aStr));
|
||||
};
|
||||
|
||||
/**
|
||||
* What is the index of the given string in the array?
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {
|
||||
if (this.has(aStr)) {
|
||||
return this._set[util.toSetString(aStr)];
|
||||
}
|
||||
throw new Error('"' + aStr + '" is not in the set.');
|
||||
};
|
||||
|
||||
/**
|
||||
* What is the element at the given index?
|
||||
*
|
||||
* @param Number aIdx
|
||||
*/
|
||||
ArraySet.prototype.at = function ArraySet_at(aIdx) {
|
||||
if (aIdx >= 0 && aIdx < this._array.length) {
|
||||
return this._array[aIdx];
|
||||
}
|
||||
throw new Error('No element indexed by ' + aIdx);
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the array representation of this set (which has the proper indices
|
||||
* indicated by indexOf). Note that this is a copy of the internal array used
|
||||
* for storing the members so that no one can mess with internal state.
|
||||
*/
|
||||
ArraySet.prototype.toArray = function ArraySet_toArray() {
|
||||
return this._array.slice();
|
||||
};
|
||||
|
||||
exports.ArraySet = ArraySet;
|
||||
|
||||
});
|
||||
144
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map/base64-vlq.js
generated
vendored
Normal file
144
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map/base64-vlq.js
generated
vendored
Normal file
@@ -0,0 +1,144 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*
|
||||
* Based on the Base 64 VLQ implementation in Closure Compiler:
|
||||
* https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java
|
||||
*
|
||||
* Copyright 2011 The Closure Compiler Authors. All rights reserved.
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following
|
||||
* disclaimer in the documentation and/or other materials provided
|
||||
* with the distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived
|
||||
* from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var base64 = require('./base64');
|
||||
|
||||
// A single base 64 digit can contain 6 bits of data. For the base 64 variable
|
||||
// length quantities we use in the source map spec, the first bit is the sign,
|
||||
// the next four bits are the actual value, and the 6th bit is the
|
||||
// continuation bit. The continuation bit tells us whether there are more
|
||||
// digits in this value following this digit.
|
||||
//
|
||||
// Continuation
|
||||
// | Sign
|
||||
// | |
|
||||
// V V
|
||||
// 101011
|
||||
|
||||
var VLQ_BASE_SHIFT = 5;
|
||||
|
||||
// binary: 100000
|
||||
var VLQ_BASE = 1 << VLQ_BASE_SHIFT;
|
||||
|
||||
// binary: 011111
|
||||
var VLQ_BASE_MASK = VLQ_BASE - 1;
|
||||
|
||||
// binary: 100000
|
||||
var VLQ_CONTINUATION_BIT = VLQ_BASE;
|
||||
|
||||
/**
|
||||
* Converts from a two-complement value to a value where the sign bit is
|
||||
* is placed in the least significant bit. For example, as decimals:
|
||||
* 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)
|
||||
* 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)
|
||||
*/
|
||||
function toVLQSigned(aValue) {
|
||||
return aValue < 0
|
||||
? ((-aValue) << 1) + 1
|
||||
: (aValue << 1) + 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts to a two-complement value from a value where the sign bit is
|
||||
* is placed in the least significant bit. For example, as decimals:
|
||||
* 2 (10 binary) becomes 1, 3 (11 binary) becomes -1
|
||||
* 4 (100 binary) becomes 2, 5 (101 binary) becomes -2
|
||||
*/
|
||||
function fromVLQSigned(aValue) {
|
||||
var isNegative = (aValue & 1) === 1;
|
||||
var shifted = aValue >> 1;
|
||||
return isNegative
|
||||
? -shifted
|
||||
: shifted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the base 64 VLQ encoded value.
|
||||
*/
|
||||
exports.encode = function base64VLQ_encode(aValue) {
|
||||
var encoded = "";
|
||||
var digit;
|
||||
|
||||
var vlq = toVLQSigned(aValue);
|
||||
|
||||
do {
|
||||
digit = vlq & VLQ_BASE_MASK;
|
||||
vlq >>>= VLQ_BASE_SHIFT;
|
||||
if (vlq > 0) {
|
||||
// There are still more digits in this value, so we must make sure the
|
||||
// continuation bit is marked.
|
||||
digit |= VLQ_CONTINUATION_BIT;
|
||||
}
|
||||
encoded += base64.encode(digit);
|
||||
} while (vlq > 0);
|
||||
|
||||
return encoded;
|
||||
};
|
||||
|
||||
/**
|
||||
* Decodes the next base 64 VLQ value from the given string and returns the
|
||||
* value and the rest of the string.
|
||||
*/
|
||||
exports.decode = function base64VLQ_decode(aStr) {
|
||||
var i = 0;
|
||||
var strLen = aStr.length;
|
||||
var result = 0;
|
||||
var shift = 0;
|
||||
var continuation, digit;
|
||||
|
||||
do {
|
||||
if (i >= strLen) {
|
||||
throw new Error("Expected more digits in base 64 VLQ value.");
|
||||
}
|
||||
digit = base64.decode(aStr.charAt(i++));
|
||||
continuation = !!(digit & VLQ_CONTINUATION_BIT);
|
||||
digit &= VLQ_BASE_MASK;
|
||||
result = result + (digit << shift);
|
||||
shift += VLQ_BASE_SHIFT;
|
||||
} while (continuation);
|
||||
|
||||
return {
|
||||
value: fromVLQSigned(result),
|
||||
rest: aStr.slice(i)
|
||||
};
|
||||
};
|
||||
|
||||
});
|
||||
42
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map/base64.js
generated
vendored
Normal file
42
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map/base64.js
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var charToIntMap = {};
|
||||
var intToCharMap = {};
|
||||
|
||||
'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
|
||||
.split('')
|
||||
.forEach(function (ch, index) {
|
||||
charToIntMap[ch] = index;
|
||||
intToCharMap[index] = ch;
|
||||
});
|
||||
|
||||
/**
|
||||
* Encode an integer in the range of 0 to 63 to a single base 64 digit.
|
||||
*/
|
||||
exports.encode = function base64_encode(aNumber) {
|
||||
if (aNumber in intToCharMap) {
|
||||
return intToCharMap[aNumber];
|
||||
}
|
||||
throw new TypeError("Must be between 0 and 63: " + aNumber);
|
||||
};
|
||||
|
||||
/**
|
||||
* Decode a single base 64 digit to an integer.
|
||||
*/
|
||||
exports.decode = function base64_decode(aChar) {
|
||||
if (aChar in charToIntMap) {
|
||||
return charToIntMap[aChar];
|
||||
}
|
||||
throw new TypeError("Not a valid base 64 digit: " + aChar);
|
||||
};
|
||||
|
||||
});
|
||||
81
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map/binary-search.js
generated
vendored
Normal file
81
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map/binary-search.js
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
/**
|
||||
* Recursive implementation of binary search.
|
||||
*
|
||||
* @param aLow Indices here and lower do not contain the needle.
|
||||
* @param aHigh Indices here and higher do not contain the needle.
|
||||
* @param aNeedle The element being searched for.
|
||||
* @param aHaystack The non-empty array being searched.
|
||||
* @param aCompare Function which takes two elements and returns -1, 0, or 1.
|
||||
*/
|
||||
function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare) {
|
||||
// This function terminates when one of the following is true:
|
||||
//
|
||||
// 1. We find the exact element we are looking for.
|
||||
//
|
||||
// 2. We did not find the exact element, but we can return the next
|
||||
// closest element that is less than that element.
|
||||
//
|
||||
// 3. We did not find the exact element, and there is no next-closest
|
||||
// element which is less than the one we are searching for, so we
|
||||
// return null.
|
||||
var mid = Math.floor((aHigh - aLow) / 2) + aLow;
|
||||
var cmp = aCompare(aNeedle, aHaystack[mid], true);
|
||||
if (cmp === 0) {
|
||||
// Found the element we are looking for.
|
||||
return aHaystack[mid];
|
||||
}
|
||||
else if (cmp > 0) {
|
||||
// aHaystack[mid] is greater than our needle.
|
||||
if (aHigh - mid > 1) {
|
||||
// The element is in the upper half.
|
||||
return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare);
|
||||
}
|
||||
// We did not find an exact match, return the next closest one
|
||||
// (termination case 2).
|
||||
return aHaystack[mid];
|
||||
}
|
||||
else {
|
||||
// aHaystack[mid] is less than our needle.
|
||||
if (mid - aLow > 1) {
|
||||
// The element is in the lower half.
|
||||
return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare);
|
||||
}
|
||||
// The exact needle element was not found in this haystack. Determine if
|
||||
// we are in termination case (2) or (3) and return the appropriate thing.
|
||||
return aLow < 0
|
||||
? null
|
||||
: aHaystack[aLow];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This is an implementation of binary search which will always try and return
|
||||
* the next lowest value checked if there is no exact hit. This is because
|
||||
* mappings between original and generated line/col pairs are single points,
|
||||
* and there is an implicit region between each of them, so a miss just means
|
||||
* that you aren't on the very start of a region.
|
||||
*
|
||||
* @param aNeedle The element you are looking for.
|
||||
* @param aHaystack The array that is being searched.
|
||||
* @param aCompare A function which takes the needle and an element in the
|
||||
* array and returns -1, 0, or 1 depending on whether the needle is less
|
||||
* than, equal to, or greater than the element, respectively.
|
||||
*/
|
||||
exports.search = function search(aNeedle, aHaystack, aCompare) {
|
||||
return aHaystack.length > 0
|
||||
? recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, aCompare)
|
||||
: null;
|
||||
};
|
||||
|
||||
});
|
||||
478
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map/source-map-consumer.js
generated
vendored
Normal file
478
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map/source-map-consumer.js
generated
vendored
Normal file
@@ -0,0 +1,478 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var util = require('./util');
|
||||
var binarySearch = require('./binary-search');
|
||||
var ArraySet = require('./array-set').ArraySet;
|
||||
var base64VLQ = require('./base64-vlq');
|
||||
|
||||
/**
|
||||
* A SourceMapConsumer instance represents a parsed source map which we can
|
||||
* query for information about the original file positions by giving it a file
|
||||
* position in the generated source.
|
||||
*
|
||||
* The only parameter is the raw source map (either as a JSON string, or
|
||||
* already parsed to an object). According to the spec, source maps have the
|
||||
* following attributes:
|
||||
*
|
||||
* - version: Which version of the source map spec this map is following.
|
||||
* - sources: An array of URLs to the original source files.
|
||||
* - names: An array of identifiers which can be referrenced by individual mappings.
|
||||
* - sourceRoot: Optional. The URL root from which all sources are relative.
|
||||
* - sourcesContent: Optional. An array of contents of the original source files.
|
||||
* - mappings: A string of base64 VLQs which contain the actual mappings.
|
||||
* - file: Optional. The generated file this source map is associated with.
|
||||
*
|
||||
* Here is an example source map, taken from the source map spec[0]:
|
||||
*
|
||||
* {
|
||||
* version : 3,
|
||||
* file: "out.js",
|
||||
* sourceRoot : "",
|
||||
* sources: ["foo.js", "bar.js"],
|
||||
* names: ["src", "maps", "are", "fun"],
|
||||
* mappings: "AA,AB;;ABCDE;"
|
||||
* }
|
||||
*
|
||||
* [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1#
|
||||
*/
|
||||
function SourceMapConsumer(aSourceMap) {
|
||||
var sourceMap = aSourceMap;
|
||||
if (typeof aSourceMap === 'string') {
|
||||
sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, ''));
|
||||
}
|
||||
|
||||
var version = util.getArg(sourceMap, 'version');
|
||||
var sources = util.getArg(sourceMap, 'sources');
|
||||
// Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which
|
||||
// requires the array) to play nice here.
|
||||
var names = util.getArg(sourceMap, 'names', []);
|
||||
var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null);
|
||||
var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null);
|
||||
var mappings = util.getArg(sourceMap, 'mappings');
|
||||
var file = util.getArg(sourceMap, 'file', null);
|
||||
|
||||
// Once again, Sass deviates from the spec and supplies the version as a
|
||||
// string rather than a number, so we use loose equality checking here.
|
||||
if (version != this._version) {
|
||||
throw new Error('Unsupported version: ' + version);
|
||||
}
|
||||
|
||||
// Pass `true` below to allow duplicate names and sources. While source maps
|
||||
// are intended to be compressed and deduplicated, the TypeScript compiler
|
||||
// sometimes generates source maps with duplicates in them. See Github issue
|
||||
// #72 and bugzil.la/889492.
|
||||
this._names = ArraySet.fromArray(names, true);
|
||||
this._sources = ArraySet.fromArray(sources, true);
|
||||
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sourcesContent = sourcesContent;
|
||||
this._mappings = mappings;
|
||||
this.file = file;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a SourceMapConsumer from a SourceMapGenerator.
|
||||
*
|
||||
* @param SourceMapGenerator aSourceMap
|
||||
* The source map that will be consumed.
|
||||
* @returns SourceMapConsumer
|
||||
*/
|
||||
SourceMapConsumer.fromSourceMap =
|
||||
function SourceMapConsumer_fromSourceMap(aSourceMap) {
|
||||
var smc = Object.create(SourceMapConsumer.prototype);
|
||||
|
||||
smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true);
|
||||
smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true);
|
||||
smc.sourceRoot = aSourceMap._sourceRoot;
|
||||
smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(),
|
||||
smc.sourceRoot);
|
||||
smc.file = aSourceMap._file;
|
||||
|
||||
smc.__generatedMappings = aSourceMap._mappings.slice()
|
||||
.sort(util.compareByGeneratedPositions);
|
||||
smc.__originalMappings = aSourceMap._mappings.slice()
|
||||
.sort(util.compareByOriginalPositions);
|
||||
|
||||
return smc;
|
||||
};
|
||||
|
||||
/**
|
||||
* The version of the source mapping spec that we are consuming.
|
||||
*/
|
||||
SourceMapConsumer.prototype._version = 3;
|
||||
|
||||
/**
|
||||
* The list of original sources.
|
||||
*/
|
||||
Object.defineProperty(SourceMapConsumer.prototype, 'sources', {
|
||||
get: function () {
|
||||
return this._sources.toArray().map(function (s) {
|
||||
return this.sourceRoot ? util.join(this.sourceRoot, s) : s;
|
||||
}, this);
|
||||
}
|
||||
});
|
||||
|
||||
// `__generatedMappings` and `__originalMappings` are arrays that hold the
|
||||
// parsed mapping coordinates from the source map's "mappings" attribute. They
|
||||
// are lazily instantiated, accessed via the `_generatedMappings` and
|
||||
// `_originalMappings` getters respectively, and we only parse the mappings
|
||||
// and create these arrays once queried for a source location. We jump through
|
||||
// these hoops because there can be many thousands of mappings, and parsing
|
||||
// them is expensive, so we only want to do it if we must.
|
||||
//
|
||||
// Each object in the arrays is of the form:
|
||||
//
|
||||
// {
|
||||
// generatedLine: The line number in the generated code,
|
||||
// generatedColumn: The column number in the generated code,
|
||||
// source: The path to the original source file that generated this
|
||||
// chunk of code,
|
||||
// originalLine: The line number in the original source that
|
||||
// corresponds to this chunk of generated code,
|
||||
// originalColumn: The column number in the original source that
|
||||
// corresponds to this chunk of generated code,
|
||||
// name: The name of the original symbol which generated this chunk of
|
||||
// code.
|
||||
// }
|
||||
//
|
||||
// All properties except for `generatedLine` and `generatedColumn` can be
|
||||
// `null`.
|
||||
//
|
||||
// `_generatedMappings` is ordered by the generated positions.
|
||||
//
|
||||
// `_originalMappings` is ordered by the original positions.
|
||||
|
||||
SourceMapConsumer.prototype.__generatedMappings = null;
|
||||
Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', {
|
||||
get: function () {
|
||||
if (!this.__generatedMappings) {
|
||||
this.__generatedMappings = [];
|
||||
this.__originalMappings = [];
|
||||
this._parseMappings(this._mappings, this.sourceRoot);
|
||||
}
|
||||
|
||||
return this.__generatedMappings;
|
||||
}
|
||||
});
|
||||
|
||||
SourceMapConsumer.prototype.__originalMappings = null;
|
||||
Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', {
|
||||
get: function () {
|
||||
if (!this.__originalMappings) {
|
||||
this.__generatedMappings = [];
|
||||
this.__originalMappings = [];
|
||||
this._parseMappings(this._mappings, this.sourceRoot);
|
||||
}
|
||||
|
||||
return this.__originalMappings;
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Parse the mappings in a string in to a data structure which we can easily
|
||||
* query (the ordered arrays in the `this.__generatedMappings` and
|
||||
* `this.__originalMappings` properties).
|
||||
*/
|
||||
SourceMapConsumer.prototype._parseMappings =
|
||||
function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {
|
||||
var generatedLine = 1;
|
||||
var previousGeneratedColumn = 0;
|
||||
var previousOriginalLine = 0;
|
||||
var previousOriginalColumn = 0;
|
||||
var previousSource = 0;
|
||||
var previousName = 0;
|
||||
var mappingSeparator = /^[,;]/;
|
||||
var str = aStr;
|
||||
var mapping;
|
||||
var temp;
|
||||
|
||||
while (str.length > 0) {
|
||||
if (str.charAt(0) === ';') {
|
||||
generatedLine++;
|
||||
str = str.slice(1);
|
||||
previousGeneratedColumn = 0;
|
||||
}
|
||||
else if (str.charAt(0) === ',') {
|
||||
str = str.slice(1);
|
||||
}
|
||||
else {
|
||||
mapping = {};
|
||||
mapping.generatedLine = generatedLine;
|
||||
|
||||
// Generated column.
|
||||
temp = base64VLQ.decode(str);
|
||||
mapping.generatedColumn = previousGeneratedColumn + temp.value;
|
||||
previousGeneratedColumn = mapping.generatedColumn;
|
||||
str = temp.rest;
|
||||
|
||||
if (str.length > 0 && !mappingSeparator.test(str.charAt(0))) {
|
||||
// Original source.
|
||||
temp = base64VLQ.decode(str);
|
||||
mapping.source = this._sources.at(previousSource + temp.value);
|
||||
previousSource += temp.value;
|
||||
str = temp.rest;
|
||||
if (str.length === 0 || mappingSeparator.test(str.charAt(0))) {
|
||||
throw new Error('Found a source, but no line and column');
|
||||
}
|
||||
|
||||
// Original line.
|
||||
temp = base64VLQ.decode(str);
|
||||
mapping.originalLine = previousOriginalLine + temp.value;
|
||||
previousOriginalLine = mapping.originalLine;
|
||||
// Lines are stored 0-based
|
||||
mapping.originalLine += 1;
|
||||
str = temp.rest;
|
||||
if (str.length === 0 || mappingSeparator.test(str.charAt(0))) {
|
||||
throw new Error('Found a source and line, but no column');
|
||||
}
|
||||
|
||||
// Original column.
|
||||
temp = base64VLQ.decode(str);
|
||||
mapping.originalColumn = previousOriginalColumn + temp.value;
|
||||
previousOriginalColumn = mapping.originalColumn;
|
||||
str = temp.rest;
|
||||
|
||||
if (str.length > 0 && !mappingSeparator.test(str.charAt(0))) {
|
||||
// Original name.
|
||||
temp = base64VLQ.decode(str);
|
||||
mapping.name = this._names.at(previousName + temp.value);
|
||||
previousName += temp.value;
|
||||
str = temp.rest;
|
||||
}
|
||||
}
|
||||
|
||||
this.__generatedMappings.push(mapping);
|
||||
if (typeof mapping.originalLine === 'number') {
|
||||
this.__originalMappings.push(mapping);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.__generatedMappings.sort(util.compareByGeneratedPositions);
|
||||
this.__originalMappings.sort(util.compareByOriginalPositions);
|
||||
};
|
||||
|
||||
/**
|
||||
* Find the mapping that best matches the hypothetical "needle" mapping that
|
||||
* we are searching for in the given "haystack" of mappings.
|
||||
*/
|
||||
SourceMapConsumer.prototype._findMapping =
|
||||
function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName,
|
||||
aColumnName, aComparator) {
|
||||
// To return the position we are searching for, we must first find the
|
||||
// mapping for the given position and then return the opposite position it
|
||||
// points to. Because the mappings are sorted, we can use binary search to
|
||||
// find the best mapping.
|
||||
|
||||
if (aNeedle[aLineName] <= 0) {
|
||||
throw new TypeError('Line must be greater than or equal to 1, got '
|
||||
+ aNeedle[aLineName]);
|
||||
}
|
||||
if (aNeedle[aColumnName] < 0) {
|
||||
throw new TypeError('Column must be greater than or equal to 0, got '
|
||||
+ aNeedle[aColumnName]);
|
||||
}
|
||||
|
||||
return binarySearch.search(aNeedle, aMappings, aComparator);
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the original source, line, and column information for the generated
|
||||
* source's line and column positions provided. The only argument is an object
|
||||
* with the following properties:
|
||||
*
|
||||
* - line: The line number in the generated source.
|
||||
* - column: The column number in the generated source.
|
||||
*
|
||||
* and an object is returned with the following properties:
|
||||
*
|
||||
* - source: The original source file, or null.
|
||||
* - line: The line number in the original source, or null.
|
||||
* - column: The column number in the original source, or null.
|
||||
* - name: The original identifier, or null.
|
||||
*/
|
||||
SourceMapConsumer.prototype.originalPositionFor =
|
||||
function SourceMapConsumer_originalPositionFor(aArgs) {
|
||||
var needle = {
|
||||
generatedLine: util.getArg(aArgs, 'line'),
|
||||
generatedColumn: util.getArg(aArgs, 'column')
|
||||
};
|
||||
|
||||
var mapping = this._findMapping(needle,
|
||||
this._generatedMappings,
|
||||
"generatedLine",
|
||||
"generatedColumn",
|
||||
util.compareByGeneratedPositions);
|
||||
|
||||
if (mapping && mapping.generatedLine === needle.generatedLine) {
|
||||
var source = util.getArg(mapping, 'source', null);
|
||||
if (source && this.sourceRoot) {
|
||||
source = util.join(this.sourceRoot, source);
|
||||
}
|
||||
return {
|
||||
source: source,
|
||||
line: util.getArg(mapping, 'originalLine', null),
|
||||
column: util.getArg(mapping, 'originalColumn', null),
|
||||
name: util.getArg(mapping, 'name', null)
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
source: null,
|
||||
line: null,
|
||||
column: null,
|
||||
name: null
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the original source content. The only argument is the url of the
|
||||
* original source file. Returns null if no original source content is
|
||||
* availible.
|
||||
*/
|
||||
SourceMapConsumer.prototype.sourceContentFor =
|
||||
function SourceMapConsumer_sourceContentFor(aSource) {
|
||||
if (!this.sourcesContent) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (this.sourceRoot) {
|
||||
aSource = util.relative(this.sourceRoot, aSource);
|
||||
}
|
||||
|
||||
if (this._sources.has(aSource)) {
|
||||
return this.sourcesContent[this._sources.indexOf(aSource)];
|
||||
}
|
||||
|
||||
var url;
|
||||
if (this.sourceRoot
|
||||
&& (url = util.urlParse(this.sourceRoot))) {
|
||||
// XXX: file:// URIs and absolute paths lead to unexpected behavior for
|
||||
// many users. We can help them out when they expect file:// URIs to
|
||||
// behave like it would if they were running a local HTTP server. See
|
||||
// https://bugzilla.mozilla.org/show_bug.cgi?id=885597.
|
||||
var fileUriAbsPath = aSource.replace(/^file:\/\//, "");
|
||||
if (url.scheme == "file"
|
||||
&& this._sources.has(fileUriAbsPath)) {
|
||||
return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)]
|
||||
}
|
||||
|
||||
if ((!url.path || url.path == "/")
|
||||
&& this._sources.has("/" + aSource)) {
|
||||
return this.sourcesContent[this._sources.indexOf("/" + aSource)];
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('"' + aSource + '" is not in the SourceMap.');
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the generated line and column information for the original source,
|
||||
* line, and column positions provided. The only argument is an object with
|
||||
* the following properties:
|
||||
*
|
||||
* - source: The filename of the original source.
|
||||
* - line: The line number in the original source.
|
||||
* - column: The column number in the original source.
|
||||
*
|
||||
* and an object is returned with the following properties:
|
||||
*
|
||||
* - line: The line number in the generated source, or null.
|
||||
* - column: The column number in the generated source, or null.
|
||||
*/
|
||||
SourceMapConsumer.prototype.generatedPositionFor =
|
||||
function SourceMapConsumer_generatedPositionFor(aArgs) {
|
||||
var needle = {
|
||||
source: util.getArg(aArgs, 'source'),
|
||||
originalLine: util.getArg(aArgs, 'line'),
|
||||
originalColumn: util.getArg(aArgs, 'column')
|
||||
};
|
||||
|
||||
if (this.sourceRoot) {
|
||||
needle.source = util.relative(this.sourceRoot, needle.source);
|
||||
}
|
||||
|
||||
var mapping = this._findMapping(needle,
|
||||
this._originalMappings,
|
||||
"originalLine",
|
||||
"originalColumn",
|
||||
util.compareByOriginalPositions);
|
||||
|
||||
if (mapping) {
|
||||
return {
|
||||
line: util.getArg(mapping, 'generatedLine', null),
|
||||
column: util.getArg(mapping, 'generatedColumn', null)
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
line: null,
|
||||
column: null
|
||||
};
|
||||
};
|
||||
|
||||
SourceMapConsumer.GENERATED_ORDER = 1;
|
||||
SourceMapConsumer.ORIGINAL_ORDER = 2;
|
||||
|
||||
/**
|
||||
* Iterate over each mapping between an original source/line/column and a
|
||||
* generated line/column in this source map.
|
||||
*
|
||||
* @param Function aCallback
|
||||
* The function that is called with each mapping.
|
||||
* @param Object aContext
|
||||
* Optional. If specified, this object will be the value of `this` every
|
||||
* time that `aCallback` is called.
|
||||
* @param aOrder
|
||||
* Either `SourceMapConsumer.GENERATED_ORDER` or
|
||||
* `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to
|
||||
* iterate over the mappings sorted by the generated file's line/column
|
||||
* order or the original's source/line/column order, respectively. Defaults to
|
||||
* `SourceMapConsumer.GENERATED_ORDER`.
|
||||
*/
|
||||
SourceMapConsumer.prototype.eachMapping =
|
||||
function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) {
|
||||
var context = aContext || null;
|
||||
var order = aOrder || SourceMapConsumer.GENERATED_ORDER;
|
||||
|
||||
var mappings;
|
||||
switch (order) {
|
||||
case SourceMapConsumer.GENERATED_ORDER:
|
||||
mappings = this._generatedMappings;
|
||||
break;
|
||||
case SourceMapConsumer.ORIGINAL_ORDER:
|
||||
mappings = this._originalMappings;
|
||||
break;
|
||||
default:
|
||||
throw new Error("Unknown order of iteration.");
|
||||
}
|
||||
|
||||
var sourceRoot = this.sourceRoot;
|
||||
mappings.map(function (mapping) {
|
||||
var source = mapping.source;
|
||||
if (source && sourceRoot) {
|
||||
source = util.join(sourceRoot, source);
|
||||
}
|
||||
return {
|
||||
source: source,
|
||||
generatedLine: mapping.generatedLine,
|
||||
generatedColumn: mapping.generatedColumn,
|
||||
originalLine: mapping.originalLine,
|
||||
originalColumn: mapping.originalColumn,
|
||||
name: mapping.name
|
||||
};
|
||||
}).forEach(aCallback, context);
|
||||
};
|
||||
|
||||
exports.SourceMapConsumer = SourceMapConsumer;
|
||||
|
||||
});
|
||||
400
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map/source-map-generator.js
generated
vendored
Normal file
400
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map/source-map-generator.js
generated
vendored
Normal file
@@ -0,0 +1,400 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var base64VLQ = require('./base64-vlq');
|
||||
var util = require('./util');
|
||||
var ArraySet = require('./array-set').ArraySet;
|
||||
|
||||
/**
|
||||
* An instance of the SourceMapGenerator represents a source map which is
|
||||
* being built incrementally. You may pass an object with the following
|
||||
* properties:
|
||||
*
|
||||
* - file: The filename of the generated source.
|
||||
* - sourceRoot: A root for all relative URLs in this source map.
|
||||
*/
|
||||
function SourceMapGenerator(aArgs) {
|
||||
if (!aArgs) {
|
||||
aArgs = {};
|
||||
}
|
||||
this._file = util.getArg(aArgs, 'file', null);
|
||||
this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null);
|
||||
this._sources = new ArraySet();
|
||||
this._names = new ArraySet();
|
||||
this._mappings = [];
|
||||
this._sourcesContents = null;
|
||||
}
|
||||
|
||||
SourceMapGenerator.prototype._version = 3;
|
||||
|
||||
/**
|
||||
* Creates a new SourceMapGenerator based on a SourceMapConsumer
|
||||
*
|
||||
* @param aSourceMapConsumer The SourceMap.
|
||||
*/
|
||||
SourceMapGenerator.fromSourceMap =
|
||||
function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) {
|
||||
var sourceRoot = aSourceMapConsumer.sourceRoot;
|
||||
var generator = new SourceMapGenerator({
|
||||
file: aSourceMapConsumer.file,
|
||||
sourceRoot: sourceRoot
|
||||
});
|
||||
aSourceMapConsumer.eachMapping(function (mapping) {
|
||||
var newMapping = {
|
||||
generated: {
|
||||
line: mapping.generatedLine,
|
||||
column: mapping.generatedColumn
|
||||
}
|
||||
};
|
||||
|
||||
if (mapping.source) {
|
||||
newMapping.source = mapping.source;
|
||||
if (sourceRoot) {
|
||||
newMapping.source = util.relative(sourceRoot, newMapping.source);
|
||||
}
|
||||
|
||||
newMapping.original = {
|
||||
line: mapping.originalLine,
|
||||
column: mapping.originalColumn
|
||||
};
|
||||
|
||||
if (mapping.name) {
|
||||
newMapping.name = mapping.name;
|
||||
}
|
||||
}
|
||||
|
||||
generator.addMapping(newMapping);
|
||||
});
|
||||
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
||||
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
||||
if (content) {
|
||||
generator.setSourceContent(sourceFile, content);
|
||||
}
|
||||
});
|
||||
return generator;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a single mapping from original source line and column to the generated
|
||||
* source's line and column for this source map being created. The mapping
|
||||
* object should have the following properties:
|
||||
*
|
||||
* - generated: An object with the generated line and column positions.
|
||||
* - original: An object with the original line and column positions.
|
||||
* - source: The original source file (relative to the sourceRoot).
|
||||
* - name: An optional original token name for this mapping.
|
||||
*/
|
||||
SourceMapGenerator.prototype.addMapping =
|
||||
function SourceMapGenerator_addMapping(aArgs) {
|
||||
var generated = util.getArg(aArgs, 'generated');
|
||||
var original = util.getArg(aArgs, 'original', null);
|
||||
var source = util.getArg(aArgs, 'source', null);
|
||||
var name = util.getArg(aArgs, 'name', null);
|
||||
|
||||
this._validateMapping(generated, original, source, name);
|
||||
|
||||
if (source && !this._sources.has(source)) {
|
||||
this._sources.add(source);
|
||||
}
|
||||
|
||||
if (name && !this._names.has(name)) {
|
||||
this._names.add(name);
|
||||
}
|
||||
|
||||
this._mappings.push({
|
||||
generatedLine: generated.line,
|
||||
generatedColumn: generated.column,
|
||||
originalLine: original != null && original.line,
|
||||
originalColumn: original != null && original.column,
|
||||
source: source,
|
||||
name: name
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Set the source content for a source file.
|
||||
*/
|
||||
SourceMapGenerator.prototype.setSourceContent =
|
||||
function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) {
|
||||
var source = aSourceFile;
|
||||
if (this._sourceRoot) {
|
||||
source = util.relative(this._sourceRoot, source);
|
||||
}
|
||||
|
||||
if (aSourceContent !== null) {
|
||||
// Add the source content to the _sourcesContents map.
|
||||
// Create a new _sourcesContents map if the property is null.
|
||||
if (!this._sourcesContents) {
|
||||
this._sourcesContents = {};
|
||||
}
|
||||
this._sourcesContents[util.toSetString(source)] = aSourceContent;
|
||||
} else {
|
||||
// Remove the source file from the _sourcesContents map.
|
||||
// If the _sourcesContents map is empty, set the property to null.
|
||||
delete this._sourcesContents[util.toSetString(source)];
|
||||
if (Object.keys(this._sourcesContents).length === 0) {
|
||||
this._sourcesContents = null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Applies the mappings of a sub-source-map for a specific source file to the
|
||||
* source map being generated. Each mapping to the supplied source file is
|
||||
* rewritten using the supplied source map. Note: The resolution for the
|
||||
* resulting mappings is the minimium of this map and the supplied map.
|
||||
*
|
||||
* @param aSourceMapConsumer The source map to be applied.
|
||||
* @param aSourceFile Optional. The filename of the source file.
|
||||
* If omitted, SourceMapConsumer's file property will be used.
|
||||
* @param aSourceMapPath Optional. The dirname of the path to the source map
|
||||
* to be applied. If relative, it is relative to the SourceMapConsumer.
|
||||
* This parameter is needed when the two source maps aren't in the same
|
||||
* directory, and the source map to be applied contains relative source
|
||||
* paths. If so, those relative source paths need to be rewritten
|
||||
* relative to the SourceMapGenerator.
|
||||
*/
|
||||
SourceMapGenerator.prototype.applySourceMap =
|
||||
function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) {
|
||||
// If aSourceFile is omitted, we will use the file property of the SourceMap
|
||||
if (!aSourceFile) {
|
||||
if (!aSourceMapConsumer.file) {
|
||||
throw new Error(
|
||||
'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' +
|
||||
'or the source map\'s "file" property. Both were omitted.'
|
||||
);
|
||||
}
|
||||
aSourceFile = aSourceMapConsumer.file;
|
||||
}
|
||||
var sourceRoot = this._sourceRoot;
|
||||
// Make "aSourceFile" relative if an absolute Url is passed.
|
||||
if (sourceRoot) {
|
||||
aSourceFile = util.relative(sourceRoot, aSourceFile);
|
||||
}
|
||||
// Applying the SourceMap can add and remove items from the sources and
|
||||
// the names array.
|
||||
var newSources = new ArraySet();
|
||||
var newNames = new ArraySet();
|
||||
|
||||
// Find mappings for the "aSourceFile"
|
||||
this._mappings.forEach(function (mapping) {
|
||||
if (mapping.source === aSourceFile && mapping.originalLine) {
|
||||
// Check if it can be mapped by the source map, then update the mapping.
|
||||
var original = aSourceMapConsumer.originalPositionFor({
|
||||
line: mapping.originalLine,
|
||||
column: mapping.originalColumn
|
||||
});
|
||||
if (original.source !== null) {
|
||||
// Copy mapping
|
||||
mapping.source = original.source;
|
||||
if (aSourceMapPath) {
|
||||
mapping.source = util.join(aSourceMapPath, mapping.source)
|
||||
}
|
||||
if (sourceRoot) {
|
||||
mapping.source = util.relative(sourceRoot, mapping.source);
|
||||
}
|
||||
mapping.originalLine = original.line;
|
||||
mapping.originalColumn = original.column;
|
||||
if (original.name !== null && mapping.name !== null) {
|
||||
// Only use the identifier name if it's an identifier
|
||||
// in both SourceMaps
|
||||
mapping.name = original.name;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var source = mapping.source;
|
||||
if (source && !newSources.has(source)) {
|
||||
newSources.add(source);
|
||||
}
|
||||
|
||||
var name = mapping.name;
|
||||
if (name && !newNames.has(name)) {
|
||||
newNames.add(name);
|
||||
}
|
||||
|
||||
}, this);
|
||||
this._sources = newSources;
|
||||
this._names = newNames;
|
||||
|
||||
// Copy sourcesContents of applied map.
|
||||
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
||||
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
||||
if (content) {
|
||||
if (aSourceMapPath) {
|
||||
sourceFile = util.join(aSourceMapPath, sourceFile);
|
||||
}
|
||||
if (sourceRoot) {
|
||||
sourceFile = util.relative(sourceRoot, sourceFile);
|
||||
}
|
||||
this.setSourceContent(sourceFile, content);
|
||||
}
|
||||
}, this);
|
||||
};
|
||||
|
||||
/**
|
||||
* A mapping can have one of the three levels of data:
|
||||
*
|
||||
* 1. Just the generated position.
|
||||
* 2. The Generated position, original position, and original source.
|
||||
* 3. Generated and original position, original source, as well as a name
|
||||
* token.
|
||||
*
|
||||
* To maintain consistency, we validate that any new mapping being added falls
|
||||
* in to one of these categories.
|
||||
*/
|
||||
SourceMapGenerator.prototype._validateMapping =
|
||||
function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource,
|
||||
aName) {
|
||||
if (aGenerated && 'line' in aGenerated && 'column' in aGenerated
|
||||
&& aGenerated.line > 0 && aGenerated.column >= 0
|
||||
&& !aOriginal && !aSource && !aName) {
|
||||
// Case 1.
|
||||
return;
|
||||
}
|
||||
else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated
|
||||
&& aOriginal && 'line' in aOriginal && 'column' in aOriginal
|
||||
&& aGenerated.line > 0 && aGenerated.column >= 0
|
||||
&& aOriginal.line > 0 && aOriginal.column >= 0
|
||||
&& aSource) {
|
||||
// Cases 2 and 3.
|
||||
return;
|
||||
}
|
||||
else {
|
||||
throw new Error('Invalid mapping: ' + JSON.stringify({
|
||||
generated: aGenerated,
|
||||
source: aSource,
|
||||
original: aOriginal,
|
||||
name: aName
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Serialize the accumulated mappings in to the stream of base 64 VLQs
|
||||
* specified by the source map format.
|
||||
*/
|
||||
SourceMapGenerator.prototype._serializeMappings =
|
||||
function SourceMapGenerator_serializeMappings() {
|
||||
var previousGeneratedColumn = 0;
|
||||
var previousGeneratedLine = 1;
|
||||
var previousOriginalColumn = 0;
|
||||
var previousOriginalLine = 0;
|
||||
var previousName = 0;
|
||||
var previousSource = 0;
|
||||
var result = '';
|
||||
var mapping;
|
||||
|
||||
// The mappings must be guaranteed to be in sorted order before we start
|
||||
// serializing them or else the generated line numbers (which are defined
|
||||
// via the ';' separators) will be all messed up. Note: it might be more
|
||||
// performant to maintain the sorting as we insert them, rather than as we
|
||||
// serialize them, but the big O is the same either way.
|
||||
this._mappings.sort(util.compareByGeneratedPositions);
|
||||
|
||||
for (var i = 0, len = this._mappings.length; i < len; i++) {
|
||||
mapping = this._mappings[i];
|
||||
|
||||
if (mapping.generatedLine !== previousGeneratedLine) {
|
||||
previousGeneratedColumn = 0;
|
||||
while (mapping.generatedLine !== previousGeneratedLine) {
|
||||
result += ';';
|
||||
previousGeneratedLine++;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (i > 0) {
|
||||
if (!util.compareByGeneratedPositions(mapping, this._mappings[i - 1])) {
|
||||
continue;
|
||||
}
|
||||
result += ',';
|
||||
}
|
||||
}
|
||||
|
||||
result += base64VLQ.encode(mapping.generatedColumn
|
||||
- previousGeneratedColumn);
|
||||
previousGeneratedColumn = mapping.generatedColumn;
|
||||
|
||||
if (mapping.source) {
|
||||
result += base64VLQ.encode(this._sources.indexOf(mapping.source)
|
||||
- previousSource);
|
||||
previousSource = this._sources.indexOf(mapping.source);
|
||||
|
||||
// lines are stored 0-based in SourceMap spec version 3
|
||||
result += base64VLQ.encode(mapping.originalLine - 1
|
||||
- previousOriginalLine);
|
||||
previousOriginalLine = mapping.originalLine - 1;
|
||||
|
||||
result += base64VLQ.encode(mapping.originalColumn
|
||||
- previousOriginalColumn);
|
||||
previousOriginalColumn = mapping.originalColumn;
|
||||
|
||||
if (mapping.name) {
|
||||
result += base64VLQ.encode(this._names.indexOf(mapping.name)
|
||||
- previousName);
|
||||
previousName = this._names.indexOf(mapping.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
SourceMapGenerator.prototype._generateSourcesContent =
|
||||
function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) {
|
||||
return aSources.map(function (source) {
|
||||
if (!this._sourcesContents) {
|
||||
return null;
|
||||
}
|
||||
if (aSourceRoot) {
|
||||
source = util.relative(aSourceRoot, source);
|
||||
}
|
||||
var key = util.toSetString(source);
|
||||
return Object.prototype.hasOwnProperty.call(this._sourcesContents,
|
||||
key)
|
||||
? this._sourcesContents[key]
|
||||
: null;
|
||||
}, this);
|
||||
};
|
||||
|
||||
/**
|
||||
* Externalize the source map.
|
||||
*/
|
||||
SourceMapGenerator.prototype.toJSON =
|
||||
function SourceMapGenerator_toJSON() {
|
||||
var map = {
|
||||
version: this._version,
|
||||
file: this._file,
|
||||
sources: this._sources.toArray(),
|
||||
names: this._names.toArray(),
|
||||
mappings: this._serializeMappings()
|
||||
};
|
||||
if (this._sourceRoot) {
|
||||
map.sourceRoot = this._sourceRoot;
|
||||
}
|
||||
if (this._sourcesContents) {
|
||||
map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);
|
||||
}
|
||||
|
||||
return map;
|
||||
};
|
||||
|
||||
/**
|
||||
* Render the source map being generated to a string.
|
||||
*/
|
||||
SourceMapGenerator.prototype.toString =
|
||||
function SourceMapGenerator_toString() {
|
||||
return JSON.stringify(this);
|
||||
};
|
||||
|
||||
exports.SourceMapGenerator = SourceMapGenerator;
|
||||
|
||||
});
|
||||
400
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map/source-node.js
generated
vendored
Normal file
400
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map/source-node.js
generated
vendored
Normal file
@@ -0,0 +1,400 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var SourceMapGenerator = require('./source-map-generator').SourceMapGenerator;
|
||||
var util = require('./util');
|
||||
|
||||
// Matches a Windows-style `\r\n` newline or a `\n` newline used by all other
|
||||
// operating systems these days (capturing the result).
|
||||
var REGEX_NEWLINE = /(\r?\n)/g;
|
||||
|
||||
// Matches a Windows-style newline, or any character.
|
||||
var REGEX_CHARACTER = /\r\n|[\s\S]/g;
|
||||
|
||||
/**
|
||||
* SourceNodes provide a way to abstract over interpolating/concatenating
|
||||
* snippets of generated JavaScript source code while maintaining the line and
|
||||
* column information associated with the original source code.
|
||||
*
|
||||
* @param aLine The original line number.
|
||||
* @param aColumn The original column number.
|
||||
* @param aSource The original source's filename.
|
||||
* @param aChunks Optional. An array of strings which are snippets of
|
||||
* generated JS, or other SourceNodes.
|
||||
* @param aName The original identifier.
|
||||
*/
|
||||
function SourceNode(aLine, aColumn, aSource, aChunks, aName) {
|
||||
this.children = [];
|
||||
this.sourceContents = {};
|
||||
this.line = aLine === undefined ? null : aLine;
|
||||
this.column = aColumn === undefined ? null : aColumn;
|
||||
this.source = aSource === undefined ? null : aSource;
|
||||
this.name = aName === undefined ? null : aName;
|
||||
if (aChunks != null) this.add(aChunks);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a SourceNode from generated code and a SourceMapConsumer.
|
||||
*
|
||||
* @param aGeneratedCode The generated code
|
||||
* @param aSourceMapConsumer The SourceMap for the generated code
|
||||
*/
|
||||
SourceNode.fromStringWithSourceMap =
|
||||
function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer) {
|
||||
// The SourceNode we want to fill with the generated code
|
||||
// and the SourceMap
|
||||
var node = new SourceNode();
|
||||
|
||||
// All even indices of this array are one line of the generated code,
|
||||
// while all odd indices are the newlines between two adjacent lines
|
||||
// (since `REGEX_NEWLINE` captures its match).
|
||||
// Processed fragments are removed from this array, by calling `shiftNextLine`.
|
||||
var remainingLines = aGeneratedCode.split(REGEX_NEWLINE);
|
||||
var shiftNextLine = function() {
|
||||
var lineContents = remainingLines.shift();
|
||||
// The last line of a file might not have a newline.
|
||||
var newLine = remainingLines.shift() || "";
|
||||
return lineContents + newLine;
|
||||
};
|
||||
|
||||
// We need to remember the position of "remainingLines"
|
||||
var lastGeneratedLine = 1, lastGeneratedColumn = 0;
|
||||
|
||||
// The generate SourceNodes we need a code range.
|
||||
// To extract it current and last mapping is used.
|
||||
// Here we store the last mapping.
|
||||
var lastMapping = null;
|
||||
|
||||
aSourceMapConsumer.eachMapping(function (mapping) {
|
||||
if (lastMapping !== null) {
|
||||
// We add the code from "lastMapping" to "mapping":
|
||||
// First check if there is a new line in between.
|
||||
if (lastGeneratedLine < mapping.generatedLine) {
|
||||
var code = "";
|
||||
// Associate first line with "lastMapping"
|
||||
addMappingWithCode(lastMapping, shiftNextLine());
|
||||
lastGeneratedLine++;
|
||||
lastGeneratedColumn = 0;
|
||||
// The remaining code is added without mapping
|
||||
} else {
|
||||
// There is no new line in between.
|
||||
// Associate the code between "lastGeneratedColumn" and
|
||||
// "mapping.generatedColumn" with "lastMapping"
|
||||
var nextLine = remainingLines[0];
|
||||
var code = nextLine.substr(0, mapping.generatedColumn -
|
||||
lastGeneratedColumn);
|
||||
remainingLines[0] = nextLine.substr(mapping.generatedColumn -
|
||||
lastGeneratedColumn);
|
||||
lastGeneratedColumn = mapping.generatedColumn;
|
||||
addMappingWithCode(lastMapping, code);
|
||||
// No more remaining code, continue
|
||||
lastMapping = mapping;
|
||||
return;
|
||||
}
|
||||
}
|
||||
// We add the generated code until the first mapping
|
||||
// to the SourceNode without any mapping.
|
||||
// Each line is added as separate string.
|
||||
while (lastGeneratedLine < mapping.generatedLine) {
|
||||
node.add(shiftNextLine());
|
||||
lastGeneratedLine++;
|
||||
}
|
||||
if (lastGeneratedColumn < mapping.generatedColumn) {
|
||||
var nextLine = remainingLines[0];
|
||||
node.add(nextLine.substr(0, mapping.generatedColumn));
|
||||
remainingLines[0] = nextLine.substr(mapping.generatedColumn);
|
||||
lastGeneratedColumn = mapping.generatedColumn;
|
||||
}
|
||||
lastMapping = mapping;
|
||||
}, this);
|
||||
// We have processed all mappings.
|
||||
if (remainingLines.length > 0) {
|
||||
if (lastMapping) {
|
||||
// Associate the remaining code in the current line with "lastMapping"
|
||||
addMappingWithCode(lastMapping, shiftNextLine());
|
||||
}
|
||||
// and add the remaining lines without any mapping
|
||||
node.add(remainingLines.join(""));
|
||||
}
|
||||
|
||||
// Copy sourcesContent into SourceNode
|
||||
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
||||
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
||||
if (content) {
|
||||
node.setSourceContent(sourceFile, content);
|
||||
}
|
||||
});
|
||||
|
||||
return node;
|
||||
|
||||
function addMappingWithCode(mapping, code) {
|
||||
if (mapping === null || mapping.source === undefined) {
|
||||
node.add(code);
|
||||
} else {
|
||||
node.add(new SourceNode(mapping.originalLine,
|
||||
mapping.originalColumn,
|
||||
mapping.source,
|
||||
code,
|
||||
mapping.name));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a chunk of generated JS to this source node.
|
||||
*
|
||||
* @param aChunk A string snippet of generated JS code, another instance of
|
||||
* SourceNode, or an array where each member is one of those things.
|
||||
*/
|
||||
SourceNode.prototype.add = function SourceNode_add(aChunk) {
|
||||
if (Array.isArray(aChunk)) {
|
||||
aChunk.forEach(function (chunk) {
|
||||
this.add(chunk);
|
||||
}, this);
|
||||
}
|
||||
else if (aChunk instanceof SourceNode || typeof aChunk === "string") {
|
||||
if (aChunk) {
|
||||
this.children.push(aChunk);
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw new TypeError(
|
||||
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
|
||||
);
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a chunk of generated JS to the beginning of this source node.
|
||||
*
|
||||
* @param aChunk A string snippet of generated JS code, another instance of
|
||||
* SourceNode, or an array where each member is one of those things.
|
||||
*/
|
||||
SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {
|
||||
if (Array.isArray(aChunk)) {
|
||||
for (var i = aChunk.length-1; i >= 0; i--) {
|
||||
this.prepend(aChunk[i]);
|
||||
}
|
||||
}
|
||||
else if (aChunk instanceof SourceNode || typeof aChunk === "string") {
|
||||
this.children.unshift(aChunk);
|
||||
}
|
||||
else {
|
||||
throw new TypeError(
|
||||
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
|
||||
);
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Walk over the tree of JS snippets in this node and its children. The
|
||||
* walking function is called once for each snippet of JS and is passed that
|
||||
* snippet and the its original associated source's line/column location.
|
||||
*
|
||||
* @param aFn The traversal function.
|
||||
*/
|
||||
SourceNode.prototype.walk = function SourceNode_walk(aFn) {
|
||||
var chunk;
|
||||
for (var i = 0, len = this.children.length; i < len; i++) {
|
||||
chunk = this.children[i];
|
||||
if (chunk instanceof SourceNode) {
|
||||
chunk.walk(aFn);
|
||||
}
|
||||
else {
|
||||
if (chunk !== '') {
|
||||
aFn(chunk, { source: this.source,
|
||||
line: this.line,
|
||||
column: this.column,
|
||||
name: this.name });
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between
|
||||
* each of `this.children`.
|
||||
*
|
||||
* @param aSep The separator.
|
||||
*/
|
||||
SourceNode.prototype.join = function SourceNode_join(aSep) {
|
||||
var newChildren;
|
||||
var i;
|
||||
var len = this.children.length;
|
||||
if (len > 0) {
|
||||
newChildren = [];
|
||||
for (i = 0; i < len-1; i++) {
|
||||
newChildren.push(this.children[i]);
|
||||
newChildren.push(aSep);
|
||||
}
|
||||
newChildren.push(this.children[i]);
|
||||
this.children = newChildren;
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Call String.prototype.replace on the very right-most source snippet. Useful
|
||||
* for trimming whitespace from the end of a source node, etc.
|
||||
*
|
||||
* @param aPattern The pattern to replace.
|
||||
* @param aReplacement The thing to replace the pattern with.
|
||||
*/
|
||||
SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {
|
||||
var lastChild = this.children[this.children.length - 1];
|
||||
if (lastChild instanceof SourceNode) {
|
||||
lastChild.replaceRight(aPattern, aReplacement);
|
||||
}
|
||||
else if (typeof lastChild === 'string') {
|
||||
this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);
|
||||
}
|
||||
else {
|
||||
this.children.push(''.replace(aPattern, aReplacement));
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Set the source content for a source file. This will be added to the SourceMapGenerator
|
||||
* in the sourcesContent field.
|
||||
*
|
||||
* @param aSourceFile The filename of the source file
|
||||
* @param aSourceContent The content of the source file
|
||||
*/
|
||||
SourceNode.prototype.setSourceContent =
|
||||
function SourceNode_setSourceContent(aSourceFile, aSourceContent) {
|
||||
this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;
|
||||
};
|
||||
|
||||
/**
|
||||
* Walk over the tree of SourceNodes. The walking function is called for each
|
||||
* source file content and is passed the filename and source content.
|
||||
*
|
||||
* @param aFn The traversal function.
|
||||
*/
|
||||
SourceNode.prototype.walkSourceContents =
|
||||
function SourceNode_walkSourceContents(aFn) {
|
||||
for (var i = 0, len = this.children.length; i < len; i++) {
|
||||
if (this.children[i] instanceof SourceNode) {
|
||||
this.children[i].walkSourceContents(aFn);
|
||||
}
|
||||
}
|
||||
|
||||
var sources = Object.keys(this.sourceContents);
|
||||
for (var i = 0, len = sources.length; i < len; i++) {
|
||||
aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the string representation of this source node. Walks over the tree
|
||||
* and concatenates all the various snippets together to one string.
|
||||
*/
|
||||
SourceNode.prototype.toString = function SourceNode_toString() {
|
||||
var str = "";
|
||||
this.walk(function (chunk) {
|
||||
str += chunk;
|
||||
});
|
||||
return str;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the string representation of this source node along with a source
|
||||
* map.
|
||||
*/
|
||||
SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {
|
||||
var generated = {
|
||||
code: "",
|
||||
line: 1,
|
||||
column: 0
|
||||
};
|
||||
var map = new SourceMapGenerator(aArgs);
|
||||
var sourceMappingActive = false;
|
||||
var lastOriginalSource = null;
|
||||
var lastOriginalLine = null;
|
||||
var lastOriginalColumn = null;
|
||||
var lastOriginalName = null;
|
||||
this.walk(function (chunk, original) {
|
||||
generated.code += chunk;
|
||||
if (original.source !== null
|
||||
&& original.line !== null
|
||||
&& original.column !== null) {
|
||||
if(lastOriginalSource !== original.source
|
||||
|| lastOriginalLine !== original.line
|
||||
|| lastOriginalColumn !== original.column
|
||||
|| lastOriginalName !== original.name) {
|
||||
map.addMapping({
|
||||
source: original.source,
|
||||
original: {
|
||||
line: original.line,
|
||||
column: original.column
|
||||
},
|
||||
generated: {
|
||||
line: generated.line,
|
||||
column: generated.column
|
||||
},
|
||||
name: original.name
|
||||
});
|
||||
}
|
||||
lastOriginalSource = original.source;
|
||||
lastOriginalLine = original.line;
|
||||
lastOriginalColumn = original.column;
|
||||
lastOriginalName = original.name;
|
||||
sourceMappingActive = true;
|
||||
} else if (sourceMappingActive) {
|
||||
map.addMapping({
|
||||
generated: {
|
||||
line: generated.line,
|
||||
column: generated.column
|
||||
}
|
||||
});
|
||||
lastOriginalSource = null;
|
||||
sourceMappingActive = false;
|
||||
}
|
||||
chunk.match(REGEX_CHARACTER).forEach(function (ch, idx, array) {
|
||||
if (REGEX_NEWLINE.test(ch)) {
|
||||
generated.line++;
|
||||
generated.column = 0;
|
||||
// Mappings end at eol
|
||||
if (idx + 1 === array.length) {
|
||||
lastOriginalSource = null;
|
||||
sourceMappingActive = false;
|
||||
} else if (sourceMappingActive) {
|
||||
map.addMapping({
|
||||
source: original.source,
|
||||
original: {
|
||||
line: original.line,
|
||||
column: original.column
|
||||
},
|
||||
generated: {
|
||||
line: generated.line,
|
||||
column: generated.column
|
||||
},
|
||||
name: original.name
|
||||
});
|
||||
}
|
||||
} else {
|
||||
generated.column += ch.length;
|
||||
}
|
||||
});
|
||||
});
|
||||
this.walkSourceContents(function (sourceFile, sourceContent) {
|
||||
map.setSourceContent(sourceFile, sourceContent);
|
||||
});
|
||||
|
||||
return { code: generated.code, map: map };
|
||||
};
|
||||
|
||||
exports.SourceNode = SourceNode;
|
||||
|
||||
});
|
||||
302
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map/util.js
generated
vendored
Normal file
302
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/lib/source-map/util.js
generated
vendored
Normal file
@@ -0,0 +1,302 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
/**
|
||||
* This is a helper function for getting values from parameter/options
|
||||
* objects.
|
||||
*
|
||||
* @param args The object we are extracting values from
|
||||
* @param name The name of the property we are getting.
|
||||
* @param defaultValue An optional value to return if the property is missing
|
||||
* from the object. If this is not specified and the property is missing, an
|
||||
* error will be thrown.
|
||||
*/
|
||||
function getArg(aArgs, aName, aDefaultValue) {
|
||||
if (aName in aArgs) {
|
||||
return aArgs[aName];
|
||||
} else if (arguments.length === 3) {
|
||||
return aDefaultValue;
|
||||
} else {
|
||||
throw new Error('"' + aName + '" is a required argument.');
|
||||
}
|
||||
}
|
||||
exports.getArg = getArg;
|
||||
|
||||
var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/;
|
||||
var dataUrlRegexp = /^data:.+\,.+$/;
|
||||
|
||||
function urlParse(aUrl) {
|
||||
var match = aUrl.match(urlRegexp);
|
||||
if (!match) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
scheme: match[1],
|
||||
auth: match[2],
|
||||
host: match[3],
|
||||
port: match[4],
|
||||
path: match[5]
|
||||
};
|
||||
}
|
||||
exports.urlParse = urlParse;
|
||||
|
||||
function urlGenerate(aParsedUrl) {
|
||||
var url = '';
|
||||
if (aParsedUrl.scheme) {
|
||||
url += aParsedUrl.scheme + ':';
|
||||
}
|
||||
url += '//';
|
||||
if (aParsedUrl.auth) {
|
||||
url += aParsedUrl.auth + '@';
|
||||
}
|
||||
if (aParsedUrl.host) {
|
||||
url += aParsedUrl.host;
|
||||
}
|
||||
if (aParsedUrl.port) {
|
||||
url += ":" + aParsedUrl.port
|
||||
}
|
||||
if (aParsedUrl.path) {
|
||||
url += aParsedUrl.path;
|
||||
}
|
||||
return url;
|
||||
}
|
||||
exports.urlGenerate = urlGenerate;
|
||||
|
||||
/**
|
||||
* Normalizes a path, or the path portion of a URL:
|
||||
*
|
||||
* - Replaces consequtive slashes with one slash.
|
||||
* - Removes unnecessary '.' parts.
|
||||
* - Removes unnecessary '<dir>/..' parts.
|
||||
*
|
||||
* Based on code in the Node.js 'path' core module.
|
||||
*
|
||||
* @param aPath The path or url to normalize.
|
||||
*/
|
||||
function normalize(aPath) {
|
||||
var path = aPath;
|
||||
var url = urlParse(aPath);
|
||||
if (url) {
|
||||
if (!url.path) {
|
||||
return aPath;
|
||||
}
|
||||
path = url.path;
|
||||
}
|
||||
var isAbsolute = (path.charAt(0) === '/');
|
||||
|
||||
var parts = path.split(/\/+/);
|
||||
for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {
|
||||
part = parts[i];
|
||||
if (part === '.') {
|
||||
parts.splice(i, 1);
|
||||
} else if (part === '..') {
|
||||
up++;
|
||||
} else if (up > 0) {
|
||||
if (part === '') {
|
||||
// The first part is blank if the path is absolute. Trying to go
|
||||
// above the root is a no-op. Therefore we can remove all '..' parts
|
||||
// directly after the root.
|
||||
parts.splice(i + 1, up);
|
||||
up = 0;
|
||||
} else {
|
||||
parts.splice(i, 2);
|
||||
up--;
|
||||
}
|
||||
}
|
||||
}
|
||||
path = parts.join('/');
|
||||
|
||||
if (path === '') {
|
||||
path = isAbsolute ? '/' : '.';
|
||||
}
|
||||
|
||||
if (url) {
|
||||
url.path = path;
|
||||
return urlGenerate(url);
|
||||
}
|
||||
return path;
|
||||
}
|
||||
exports.normalize = normalize;
|
||||
|
||||
/**
|
||||
* Joins two paths/URLs.
|
||||
*
|
||||
* @param aRoot The root path or URL.
|
||||
* @param aPath The path or URL to be joined with the root.
|
||||
*
|
||||
* - If aPath is a URL or a data URI, aPath is returned, unless aPath is a
|
||||
* scheme-relative URL: Then the scheme of aRoot, if any, is prepended
|
||||
* first.
|
||||
* - Otherwise aPath is a path. If aRoot is a URL, then its path portion
|
||||
* is updated with the result and aRoot is returned. Otherwise the result
|
||||
* is returned.
|
||||
* - If aPath is absolute, the result is aPath.
|
||||
* - Otherwise the two paths are joined with a slash.
|
||||
* - Joining for example 'http://' and 'www.example.com' is also supported.
|
||||
*/
|
||||
function join(aRoot, aPath) {
|
||||
var aPathUrl = urlParse(aPath);
|
||||
var aRootUrl = urlParse(aRoot);
|
||||
if (aRootUrl) {
|
||||
aRoot = aRootUrl.path || '/';
|
||||
}
|
||||
|
||||
// `join(foo, '//www.example.org')`
|
||||
if (aPathUrl && !aPathUrl.scheme) {
|
||||
if (aRootUrl) {
|
||||
aPathUrl.scheme = aRootUrl.scheme;
|
||||
}
|
||||
return urlGenerate(aPathUrl);
|
||||
}
|
||||
|
||||
if (aPathUrl || aPath.match(dataUrlRegexp)) {
|
||||
return aPath;
|
||||
}
|
||||
|
||||
// `join('http://', 'www.example.com')`
|
||||
if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {
|
||||
aRootUrl.host = aPath;
|
||||
return urlGenerate(aRootUrl);
|
||||
}
|
||||
|
||||
var joined = aPath.charAt(0) === '/'
|
||||
? aPath
|
||||
: normalize(aRoot.replace(/\/+$/, '') + '/' + aPath);
|
||||
|
||||
if (aRootUrl) {
|
||||
aRootUrl.path = joined;
|
||||
return urlGenerate(aRootUrl);
|
||||
}
|
||||
return joined;
|
||||
}
|
||||
exports.join = join;
|
||||
|
||||
/**
|
||||
* Because behavior goes wacky when you set `__proto__` on objects, we
|
||||
* have to prefix all the strings in our set with an arbitrary character.
|
||||
*
|
||||
* See https://github.com/mozilla/source-map/pull/31 and
|
||||
* https://github.com/mozilla/source-map/issues/30
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
function toSetString(aStr) {
|
||||
return '$' + aStr;
|
||||
}
|
||||
exports.toSetString = toSetString;
|
||||
|
||||
function fromSetString(aStr) {
|
||||
return aStr.substr(1);
|
||||
}
|
||||
exports.fromSetString = fromSetString;
|
||||
|
||||
function relative(aRoot, aPath) {
|
||||
aRoot = aRoot.replace(/\/$/, '');
|
||||
|
||||
var url = urlParse(aRoot);
|
||||
if (aPath.charAt(0) == "/" && url && url.path == "/") {
|
||||
return aPath.slice(1);
|
||||
}
|
||||
|
||||
return aPath.indexOf(aRoot + '/') === 0
|
||||
? aPath.substr(aRoot.length + 1)
|
||||
: aPath;
|
||||
}
|
||||
exports.relative = relative;
|
||||
|
||||
function strcmp(aStr1, aStr2) {
|
||||
var s1 = aStr1 || "";
|
||||
var s2 = aStr2 || "";
|
||||
return (s1 > s2) - (s1 < s2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Comparator between two mappings where the original positions are compared.
|
||||
*
|
||||
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
|
||||
* mappings with the same original source/line/column, but different generated
|
||||
* line and column the same. Useful when searching for a mapping with a
|
||||
* stubbed out mapping.
|
||||
*/
|
||||
function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {
|
||||
var cmp;
|
||||
|
||||
cmp = strcmp(mappingA.source, mappingB.source);
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalLine - mappingB.originalLine;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
||||
if (cmp || onlyCompareOriginal) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = strcmp(mappingA.name, mappingB.name);
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.generatedLine - mappingB.generatedLine;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
return mappingA.generatedColumn - mappingB.generatedColumn;
|
||||
};
|
||||
exports.compareByOriginalPositions = compareByOriginalPositions;
|
||||
|
||||
/**
|
||||
* Comparator between two mappings where the generated positions are
|
||||
* compared.
|
||||
*
|
||||
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
|
||||
* mappings with the same generated line and column, but different
|
||||
* source/name/original line and column the same. Useful when searching for a
|
||||
* mapping with a stubbed out mapping.
|
||||
*/
|
||||
function compareByGeneratedPositions(mappingA, mappingB, onlyCompareGenerated) {
|
||||
var cmp;
|
||||
|
||||
cmp = mappingA.generatedLine - mappingB.generatedLine;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
|
||||
if (cmp || onlyCompareGenerated) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = strcmp(mappingA.source, mappingB.source);
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalLine - mappingB.originalLine;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
return strcmp(mappingA.name, mappingB.name);
|
||||
};
|
||||
exports.compareByGeneratedPositions = compareByGeneratedPositions;
|
||||
|
||||
});
|
||||
58
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/LICENSE
generated
vendored
Normal file
58
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
amdefine is released under two licenses: new BSD, and MIT. You may pick the
|
||||
license that best suits your development needs. The text of both licenses are
|
||||
provided below.
|
||||
|
||||
|
||||
The "New" BSD License:
|
||||
----------------------
|
||||
|
||||
Copyright (c) 2011, The Dojo Foundation
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
* Neither the name of the Dojo Foundation nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
|
||||
MIT License
|
||||
-----------
|
||||
|
||||
Copyright (c) 2011, The Dojo Foundation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
171
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/README.md
generated
vendored
Normal file
171
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/README.md
generated
vendored
Normal file
@@ -0,0 +1,171 @@
|
||||
# amdefine
|
||||
|
||||
A module that can be used to implement AMD's define() in Node. This allows you
|
||||
to code to the AMD API and have the module work in node programs without
|
||||
requiring those other programs to use AMD.
|
||||
|
||||
## Usage
|
||||
|
||||
**1)** Update your package.json to indicate amdefine as a dependency:
|
||||
|
||||
```javascript
|
||||
"dependencies": {
|
||||
"amdefine": ">=0.1.0"
|
||||
}
|
||||
```
|
||||
|
||||
Then run `npm install` to get amdefine into your project.
|
||||
|
||||
**2)** At the top of each module that uses define(), place this code:
|
||||
|
||||
```javascript
|
||||
if (typeof define !== 'function') { var define = require('amdefine')(module) }
|
||||
```
|
||||
|
||||
**Only use these snippets** when loading amdefine. If you preserve the basic structure,
|
||||
with the braces, it will be stripped out when using the [RequireJS optimizer](#optimizer).
|
||||
|
||||
You can add spaces, line breaks and even require amdefine with a local path, but
|
||||
keep the rest of the structure to get the stripping behavior.
|
||||
|
||||
As you may know, because `if` statements in JavaScript don't have their own scope, the var
|
||||
declaration in the above snippet is made whether the `if` expression is truthy or not. If
|
||||
RequireJS is loaded then the declaration is superfluous because `define` is already already
|
||||
declared in the same scope in RequireJS. Fortunately JavaScript handles multiple `var`
|
||||
declarations of the same variable in the same scope gracefully.
|
||||
|
||||
If you want to deliver amdefine.js with your code rather than specifying it as a dependency
|
||||
with npm, then just download the latest release and refer to it using a relative path:
|
||||
|
||||
[Latest Version](https://github.com/jrburke/amdefine/raw/latest/amdefine.js)
|
||||
|
||||
### amdefine/intercept
|
||||
|
||||
Consider this very experimental.
|
||||
|
||||
Instead of pasting the piece of text for the amdefine setup of a `define`
|
||||
variable in each module you create or consume, you can use `amdefine/intercept`
|
||||
instead. It will automatically insert the above snippet in each .js file loaded
|
||||
by Node.
|
||||
|
||||
**Warning**: you should only use this if you are creating an application that
|
||||
is consuming AMD style defined()'d modules that are distributed via npm and want
|
||||
to run that code in Node.
|
||||
|
||||
For library code where you are not sure if it will be used by others in Node or
|
||||
in the browser, then explicitly depending on amdefine and placing the code
|
||||
snippet above is suggested path, instead of using `amdefine/intercept`. The
|
||||
intercept module affects all .js files loaded in the Node app, and it is
|
||||
inconsiderate to modify global state like that unless you are also controlling
|
||||
the top level app.
|
||||
|
||||
#### Why distribute AMD-style modules via npm?
|
||||
|
||||
npm has a lot of weaknesses for front-end use (installed layout is not great,
|
||||
should have better support for the `baseUrl + moduleID + '.js' style of loading,
|
||||
single file JS installs), but some people want a JS package manager and are
|
||||
willing to live with those constraints. If that is you, but still want to author
|
||||
in AMD style modules to get dynamic require([]), better direct source usage and
|
||||
powerful loader plugin support in the browser, then this tool can help.
|
||||
|
||||
#### amdefine/intercept usage
|
||||
|
||||
Just require it in your top level app module (for example index.js, server.js):
|
||||
|
||||
```javascript
|
||||
require('amdefine/intercept');
|
||||
```
|
||||
|
||||
The module does not return a value, so no need to assign the result to a local
|
||||
variable.
|
||||
|
||||
Then just require() code as you normally would with Node's require(). Any .js
|
||||
loaded after the intercept require will have the amdefine check injected in
|
||||
the .js source as it is loaded. It does not modify the source on disk, just
|
||||
prepends some content to the text of the module as it is loaded by Node.
|
||||
|
||||
#### How amdefine/intercept works
|
||||
|
||||
It overrides the `Module._extensions['.js']` in Node to automatically prepend
|
||||
the amdefine snippet above. So, it will affect any .js file loaded by your
|
||||
app.
|
||||
|
||||
## define() usage
|
||||
|
||||
It is best if you use the anonymous forms of define() in your module:
|
||||
|
||||
```javascript
|
||||
define(function (require) {
|
||||
var dependency = require('dependency');
|
||||
});
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```javascript
|
||||
define(['dependency'], function (dependency) {
|
||||
|
||||
});
|
||||
```
|
||||
|
||||
## RequireJS optimizer integration. <a name="optimizer"></name>
|
||||
|
||||
Version 1.0.3 of the [RequireJS optimizer](http://requirejs.org/docs/optimization.html)
|
||||
will have support for stripping the `if (typeof define !== 'function')` check
|
||||
mentioned above, so you can include this snippet for code that runs in the
|
||||
browser, but avoid taking the cost of the if() statement once the code is
|
||||
optimized for deployment.
|
||||
|
||||
## Node 0.4 Support
|
||||
|
||||
If you want to support Node 0.4, then add `require` as the second parameter to amdefine:
|
||||
|
||||
```javascript
|
||||
//Only if you want Node 0.4. If using 0.5 or later, use the above snippet.
|
||||
if (typeof define !== 'function') { var define = require('amdefine')(module, require) }
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
### Synchronous vs Asynchronous
|
||||
|
||||
amdefine creates a define() function that is callable by your code. It will
|
||||
execute and trace dependencies and call the factory function *synchronously*,
|
||||
to keep the behavior in line with Node's synchronous dependency tracing.
|
||||
|
||||
The exception: calling AMD's callback-style require() from inside a factory
|
||||
function. The require callback is called on process.nextTick():
|
||||
|
||||
```javascript
|
||||
define(function (require) {
|
||||
require(['a'], function(a) {
|
||||
//'a' is loaded synchronously, but
|
||||
//this callback is called on process.nextTick().
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Loader Plugins
|
||||
|
||||
Loader plugins are supported as long as they call their load() callbacks
|
||||
synchronously. So ones that do network requests will not work. However plugins
|
||||
like [text](http://requirejs.org/docs/api.html#text) can load text files locally.
|
||||
|
||||
The plugin API's `load.fromText()` is **not supported** in amdefine, so this means
|
||||
transpiler plugins like the [CoffeeScript loader plugin](https://github.com/jrburke/require-cs)
|
||||
will not work. This may be fixable, but it is a bit complex, and I do not have
|
||||
enough node-fu to figure it out yet. See the source for amdefine.js if you want
|
||||
to get an idea of the issues involved.
|
||||
|
||||
## Tests
|
||||
|
||||
To run the tests, cd to **tests** and run:
|
||||
|
||||
```
|
||||
node all.js
|
||||
node all-intercept.js
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
New BSD and MIT. Check the LICENSE file for all the details.
|
||||
301
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/amdefine.js
generated
vendored
Normal file
301
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/amdefine.js
generated
vendored
Normal file
@@ -0,0 +1,301 @@
|
||||
/** vim: et:ts=4:sw=4:sts=4
|
||||
* @license amdefine 0.1.0 Copyright (c) 2011, The Dojo Foundation All Rights Reserved.
|
||||
* Available via the MIT or new BSD license.
|
||||
* see: http://github.com/jrburke/amdefine for details
|
||||
*/
|
||||
|
||||
/*jslint node: true */
|
||||
/*global module, process */
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Creates a define for node.
|
||||
* @param {Object} module the "module" object that is defined by Node for the
|
||||
* current module.
|
||||
* @param {Function} [requireFn]. Node's require function for the current module.
|
||||
* It only needs to be passed in Node versions before 0.5, when module.require
|
||||
* did not exist.
|
||||
* @returns {Function} a define function that is usable for the current node
|
||||
* module.
|
||||
*/
|
||||
function amdefine(module, requireFn) {
|
||||
'use strict';
|
||||
var defineCache = {},
|
||||
loaderCache = {},
|
||||
alreadyCalled = false,
|
||||
path = require('path'),
|
||||
makeRequire, stringRequire;
|
||||
|
||||
/**
|
||||
* Trims the . and .. from an array of path segments.
|
||||
* It will keep a leading path segment if a .. will become
|
||||
* the first path segment, to help with module name lookups,
|
||||
* which act like paths, but can be remapped. But the end result,
|
||||
* all paths that use this function should look normalized.
|
||||
* NOTE: this method MODIFIES the input array.
|
||||
* @param {Array} ary the array of path segments.
|
||||
*/
|
||||
function trimDots(ary) {
|
||||
var i, part;
|
||||
for (i = 0; ary[i]; i+= 1) {
|
||||
part = ary[i];
|
||||
if (part === '.') {
|
||||
ary.splice(i, 1);
|
||||
i -= 1;
|
||||
} else if (part === '..') {
|
||||
if (i === 1 && (ary[2] === '..' || ary[0] === '..')) {
|
||||
//End of the line. Keep at least one non-dot
|
||||
//path segment at the front so it can be mapped
|
||||
//correctly to disk. Otherwise, there is likely
|
||||
//no path mapping for a path starting with '..'.
|
||||
//This can still fail, but catches the most reasonable
|
||||
//uses of ..
|
||||
break;
|
||||
} else if (i > 0) {
|
||||
ary.splice(i - 1, 2);
|
||||
i -= 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function normalize(name, baseName) {
|
||||
var baseParts;
|
||||
|
||||
//Adjust any relative paths.
|
||||
if (name && name.charAt(0) === '.') {
|
||||
//If have a base name, try to normalize against it,
|
||||
//otherwise, assume it is a top-level require that will
|
||||
//be relative to baseUrl in the end.
|
||||
if (baseName) {
|
||||
baseParts = baseName.split('/');
|
||||
baseParts = baseParts.slice(0, baseParts.length - 1);
|
||||
baseParts = baseParts.concat(name.split('/'));
|
||||
trimDots(baseParts);
|
||||
name = baseParts.join('/');
|
||||
}
|
||||
}
|
||||
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the normalize() function passed to a loader plugin's
|
||||
* normalize method.
|
||||
*/
|
||||
function makeNormalize(relName) {
|
||||
return function (name) {
|
||||
return normalize(name, relName);
|
||||
};
|
||||
}
|
||||
|
||||
function makeLoad(id) {
|
||||
function load(value) {
|
||||
loaderCache[id] = value;
|
||||
}
|
||||
|
||||
load.fromText = function (id, text) {
|
||||
//This one is difficult because the text can/probably uses
|
||||
//define, and any relative paths and requires should be relative
|
||||
//to that id was it would be found on disk. But this would require
|
||||
//bootstrapping a module/require fairly deeply from node core.
|
||||
//Not sure how best to go about that yet.
|
||||
throw new Error('amdefine does not implement load.fromText');
|
||||
};
|
||||
|
||||
return load;
|
||||
}
|
||||
|
||||
makeRequire = function (systemRequire, exports, module, relId) {
|
||||
function amdRequire(deps, callback) {
|
||||
if (typeof deps === 'string') {
|
||||
//Synchronous, single module require('')
|
||||
return stringRequire(systemRequire, exports, module, deps, relId);
|
||||
} else {
|
||||
//Array of dependencies with a callback.
|
||||
|
||||
//Convert the dependencies to modules.
|
||||
deps = deps.map(function (depName) {
|
||||
return stringRequire(systemRequire, exports, module, depName, relId);
|
||||
});
|
||||
|
||||
//Wait for next tick to call back the require call.
|
||||
if (callback) {
|
||||
process.nextTick(function () {
|
||||
callback.apply(null, deps);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
amdRequire.toUrl = function (filePath) {
|
||||
if (filePath.indexOf('.') === 0) {
|
||||
return normalize(filePath, path.dirname(module.filename));
|
||||
} else {
|
||||
return filePath;
|
||||
}
|
||||
};
|
||||
|
||||
return amdRequire;
|
||||
};
|
||||
|
||||
//Favor explicit value, passed in if the module wants to support Node 0.4.
|
||||
requireFn = requireFn || function req() {
|
||||
return module.require.apply(module, arguments);
|
||||
};
|
||||
|
||||
function runFactory(id, deps, factory) {
|
||||
var r, e, m, result;
|
||||
|
||||
if (id) {
|
||||
e = loaderCache[id] = {};
|
||||
m = {
|
||||
id: id,
|
||||
uri: __filename,
|
||||
exports: e
|
||||
};
|
||||
r = makeRequire(requireFn, e, m, id);
|
||||
} else {
|
||||
//Only support one define call per file
|
||||
if (alreadyCalled) {
|
||||
throw new Error('amdefine with no module ID cannot be called more than once per file.');
|
||||
}
|
||||
alreadyCalled = true;
|
||||
|
||||
//Use the real variables from node
|
||||
//Use module.exports for exports, since
|
||||
//the exports in here is amdefine exports.
|
||||
e = module.exports;
|
||||
m = module;
|
||||
r = makeRequire(requireFn, e, m, module.id);
|
||||
}
|
||||
|
||||
//If there are dependencies, they are strings, so need
|
||||
//to convert them to dependency values.
|
||||
if (deps) {
|
||||
deps = deps.map(function (depName) {
|
||||
return r(depName);
|
||||
});
|
||||
}
|
||||
|
||||
//Call the factory with the right dependencies.
|
||||
if (typeof factory === 'function') {
|
||||
result = factory.apply(m.exports, deps);
|
||||
} else {
|
||||
result = factory;
|
||||
}
|
||||
|
||||
if (result !== undefined) {
|
||||
m.exports = result;
|
||||
if (id) {
|
||||
loaderCache[id] = m.exports;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stringRequire = function (systemRequire, exports, module, id, relId) {
|
||||
//Split the ID by a ! so that
|
||||
var index = id.indexOf('!'),
|
||||
originalId = id,
|
||||
prefix, plugin;
|
||||
|
||||
if (index === -1) {
|
||||
id = normalize(id, relId);
|
||||
|
||||
//Straight module lookup. If it is one of the special dependencies,
|
||||
//deal with it, otherwise, delegate to node.
|
||||
if (id === 'require') {
|
||||
return makeRequire(systemRequire, exports, module, relId);
|
||||
} else if (id === 'exports') {
|
||||
return exports;
|
||||
} else if (id === 'module') {
|
||||
return module;
|
||||
} else if (loaderCache.hasOwnProperty(id)) {
|
||||
return loaderCache[id];
|
||||
} else if (defineCache[id]) {
|
||||
runFactory.apply(null, defineCache[id]);
|
||||
return loaderCache[id];
|
||||
} else {
|
||||
if(systemRequire) {
|
||||
return systemRequire(originalId);
|
||||
} else {
|
||||
throw new Error('No module with ID: ' + id);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//There is a plugin in play.
|
||||
prefix = id.substring(0, index);
|
||||
id = id.substring(index + 1, id.length);
|
||||
|
||||
plugin = stringRequire(systemRequire, exports, module, prefix, relId);
|
||||
|
||||
if (plugin.normalize) {
|
||||
id = plugin.normalize(id, makeNormalize(relId));
|
||||
} else {
|
||||
//Normalize the ID normally.
|
||||
id = normalize(id, relId);
|
||||
}
|
||||
|
||||
if (loaderCache[id]) {
|
||||
return loaderCache[id];
|
||||
} else {
|
||||
plugin.load(id, makeRequire(systemRequire, exports, module, relId), makeLoad(id), {});
|
||||
|
||||
return loaderCache[id];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Create a define function specific to the module asking for amdefine.
|
||||
function define(id, deps, factory) {
|
||||
if (Array.isArray(id)) {
|
||||
factory = deps;
|
||||
deps = id;
|
||||
id = undefined;
|
||||
} else if (typeof id !== 'string') {
|
||||
factory = id;
|
||||
id = deps = undefined;
|
||||
}
|
||||
|
||||
if (deps && !Array.isArray(deps)) {
|
||||
factory = deps;
|
||||
deps = undefined;
|
||||
}
|
||||
|
||||
if (!deps) {
|
||||
deps = ['require', 'exports', 'module'];
|
||||
}
|
||||
|
||||
//Set up properties for this module. If an ID, then use
|
||||
//internal cache. If no ID, then use the external variables
|
||||
//for this node module.
|
||||
if (id) {
|
||||
//Put the module in deep freeze until there is a
|
||||
//require call for it.
|
||||
defineCache[id] = [id, deps, factory];
|
||||
} else {
|
||||
runFactory(id, deps, factory);
|
||||
}
|
||||
}
|
||||
|
||||
//define.require, which has access to all the values in the
|
||||
//cache. Useful for AMD modules that all have IDs in the file,
|
||||
//but need to finally export a value to node based on one of those
|
||||
//IDs.
|
||||
define.require = function (id) {
|
||||
if (loaderCache[id]) {
|
||||
return loaderCache[id];
|
||||
}
|
||||
|
||||
if (defineCache[id]) {
|
||||
runFactory.apply(null, defineCache[id]);
|
||||
return loaderCache[id];
|
||||
}
|
||||
};
|
||||
|
||||
define.amd = {};
|
||||
|
||||
return define;
|
||||
}
|
||||
|
||||
module.exports = amdefine;
|
||||
36
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/intercept.js
generated
vendored
Normal file
36
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/intercept.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
/*jshint node: true */
|
||||
var inserted,
|
||||
Module = require('module'),
|
||||
fs = require('fs'),
|
||||
existingExtFn = Module._extensions['.js'],
|
||||
amdefineRegExp = /amdefine\.js/;
|
||||
|
||||
inserted = "if (typeof define !== 'function') {var define = require('amdefine')(module)}";
|
||||
|
||||
//From the node/lib/module.js source:
|
||||
function stripBOM(content) {
|
||||
// Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
|
||||
// because the buffer-to-string conversion in `fs.readFileSync()`
|
||||
// translates it to FEFF, the UTF-16 BOM.
|
||||
if (content.charCodeAt(0) === 0xFEFF) {
|
||||
content = content.slice(1);
|
||||
}
|
||||
return content;
|
||||
}
|
||||
|
||||
//Also adapted from the node/lib/module.js source:
|
||||
function intercept(module, filename) {
|
||||
var content = stripBOM(fs.readFileSync(filename, 'utf8'));
|
||||
|
||||
if (!amdefineRegExp.test(module.id)) {
|
||||
content = inserted + content;
|
||||
}
|
||||
|
||||
module._compile(content, filename);
|
||||
}
|
||||
|
||||
intercept._id = 'amdefine/intercept';
|
||||
|
||||
if (!existingExtFn._id || existingExtFn._id !== intercept._id) {
|
||||
Module._extensions['.js'] = intercept;
|
||||
}
|
||||
27
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/package.json
generated
vendored
Normal file
27
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/node_modules/amdefine/package.json
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
130
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/package.json
generated
vendored
Normal file
130
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/package.json
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
62
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/run-tests.js
generated
vendored
Executable file
62
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/run-tests.js
generated
vendored
Executable file
@@ -0,0 +1,62 @@
|
||||
#!/usr/bin/env node
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
var assert = require('assert');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var util = require('./source-map/util');
|
||||
|
||||
function run(tests) {
|
||||
var total = 0;
|
||||
var passed = 0;
|
||||
|
||||
for (var i = 0; i < tests.length; i++) {
|
||||
for (var k in tests[i].testCase) {
|
||||
if (/^test/.test(k)) {
|
||||
total++;
|
||||
try {
|
||||
tests[i].testCase[k](assert, util);
|
||||
passed++;
|
||||
}
|
||||
catch (e) {
|
||||
console.log('FAILED ' + tests[i].name + ': ' + k + '!');
|
||||
console.log(e.stack);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log('');
|
||||
console.log(passed + ' / ' + total + ' tests passed.');
|
||||
console.log('');
|
||||
|
||||
return total - passed;
|
||||
}
|
||||
|
||||
function isTestFile(f) {
|
||||
var testToRun = process.argv[2];
|
||||
return testToRun
|
||||
? path.basename(testToRun) === f
|
||||
: /^test\-.*?\.js/.test(f);
|
||||
}
|
||||
|
||||
function toModule(f) {
|
||||
return './source-map/' + f.replace(/\.js$/, '');
|
||||
}
|
||||
|
||||
var requires = fs.readdirSync(path.join(__dirname, 'source-map'))
|
||||
.filter(isTestFile)
|
||||
.map(toModule);
|
||||
|
||||
var code = run(requires.map(require).map(function (mod, i) {
|
||||
return {
|
||||
name: requires[i],
|
||||
testCase: mod
|
||||
};
|
||||
}));
|
||||
|
||||
process.exit(code);
|
||||
26
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-api.js
generated
vendored
Normal file
26
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-api.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2012 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var sourceMap;
|
||||
try {
|
||||
sourceMap = require('../../lib/source-map');
|
||||
} catch (e) {
|
||||
sourceMap = {};
|
||||
Components.utils.import('resource:///modules/devtools/SourceMap.jsm', sourceMap);
|
||||
}
|
||||
|
||||
exports['test that the api is properly exposed in the top level'] = function (assert, util) {
|
||||
assert.equal(typeof sourceMap.SourceMapGenerator, "function");
|
||||
assert.equal(typeof sourceMap.SourceMapConsumer, "function");
|
||||
assert.equal(typeof sourceMap.SourceNode, "function");
|
||||
};
|
||||
|
||||
});
|
||||
104
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-array-set.js
generated
vendored
Normal file
104
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-array-set.js
generated
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var ArraySet = require('../../lib/source-map/array-set').ArraySet;
|
||||
|
||||
function makeTestSet() {
|
||||
var set = new ArraySet();
|
||||
for (var i = 0; i < 100; i++) {
|
||||
set.add(String(i));
|
||||
}
|
||||
return set;
|
||||
}
|
||||
|
||||
exports['test .has() membership'] = function (assert, util) {
|
||||
var set = makeTestSet();
|
||||
for (var i = 0; i < 100; i++) {
|
||||
assert.ok(set.has(String(i)));
|
||||
}
|
||||
};
|
||||
|
||||
exports['test .indexOf() elements'] = function (assert, util) {
|
||||
var set = makeTestSet();
|
||||
for (var i = 0; i < 100; i++) {
|
||||
assert.strictEqual(set.indexOf(String(i)), i);
|
||||
}
|
||||
};
|
||||
|
||||
exports['test .at() indexing'] = function (assert, util) {
|
||||
var set = makeTestSet();
|
||||
for (var i = 0; i < 100; i++) {
|
||||
assert.strictEqual(set.at(i), String(i));
|
||||
}
|
||||
};
|
||||
|
||||
exports['test creating from an array'] = function (assert, util) {
|
||||
var set = ArraySet.fromArray(['foo', 'bar', 'baz', 'quux', 'hasOwnProperty']);
|
||||
|
||||
assert.ok(set.has('foo'));
|
||||
assert.ok(set.has('bar'));
|
||||
assert.ok(set.has('baz'));
|
||||
assert.ok(set.has('quux'));
|
||||
assert.ok(set.has('hasOwnProperty'));
|
||||
|
||||
assert.strictEqual(set.indexOf('foo'), 0);
|
||||
assert.strictEqual(set.indexOf('bar'), 1);
|
||||
assert.strictEqual(set.indexOf('baz'), 2);
|
||||
assert.strictEqual(set.indexOf('quux'), 3);
|
||||
|
||||
assert.strictEqual(set.at(0), 'foo');
|
||||
assert.strictEqual(set.at(1), 'bar');
|
||||
assert.strictEqual(set.at(2), 'baz');
|
||||
assert.strictEqual(set.at(3), 'quux');
|
||||
};
|
||||
|
||||
exports['test that you can add __proto__; see github issue #30'] = function (assert, util) {
|
||||
var set = new ArraySet();
|
||||
set.add('__proto__');
|
||||
assert.ok(set.has('__proto__'));
|
||||
assert.strictEqual(set.at(0), '__proto__');
|
||||
assert.strictEqual(set.indexOf('__proto__'), 0);
|
||||
};
|
||||
|
||||
exports['test .fromArray() with duplicates'] = function (assert, util) {
|
||||
var set = ArraySet.fromArray(['foo', 'foo']);
|
||||
assert.ok(set.has('foo'));
|
||||
assert.strictEqual(set.at(0), 'foo');
|
||||
assert.strictEqual(set.indexOf('foo'), 0);
|
||||
assert.strictEqual(set.toArray().length, 1);
|
||||
|
||||
set = ArraySet.fromArray(['foo', 'foo'], true);
|
||||
assert.ok(set.has('foo'));
|
||||
assert.strictEqual(set.at(0), 'foo');
|
||||
assert.strictEqual(set.at(1), 'foo');
|
||||
assert.strictEqual(set.indexOf('foo'), 0);
|
||||
assert.strictEqual(set.toArray().length, 2);
|
||||
};
|
||||
|
||||
exports['test .add() with duplicates'] = function (assert, util) {
|
||||
var set = new ArraySet();
|
||||
set.add('foo');
|
||||
|
||||
set.add('foo');
|
||||
assert.ok(set.has('foo'));
|
||||
assert.strictEqual(set.at(0), 'foo');
|
||||
assert.strictEqual(set.indexOf('foo'), 0);
|
||||
assert.strictEqual(set.toArray().length, 1);
|
||||
|
||||
set.add('foo', true);
|
||||
assert.ok(set.has('foo'));
|
||||
assert.strictEqual(set.at(0), 'foo');
|
||||
assert.strictEqual(set.at(1), 'foo');
|
||||
assert.strictEqual(set.indexOf('foo'), 0);
|
||||
assert.strictEqual(set.toArray().length, 2);
|
||||
};
|
||||
|
||||
});
|
||||
24
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-base64-vlq.js
generated
vendored
Normal file
24
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-base64-vlq.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var base64VLQ = require('../../lib/source-map/base64-vlq');
|
||||
|
||||
exports['test normal encoding and decoding'] = function (assert, util) {
|
||||
var result;
|
||||
for (var i = -255; i < 256; i++) {
|
||||
result = base64VLQ.decode(base64VLQ.encode(i));
|
||||
assert.ok(result);
|
||||
assert.equal(result.value, i);
|
||||
assert.equal(result.rest, "");
|
||||
}
|
||||
};
|
||||
|
||||
});
|
||||
35
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-base64.js
generated
vendored
Normal file
35
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-base64.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var base64 = require('../../lib/source-map/base64');
|
||||
|
||||
exports['test out of range encoding'] = function (assert, util) {
|
||||
assert.throws(function () {
|
||||
base64.encode(-1);
|
||||
});
|
||||
assert.throws(function () {
|
||||
base64.encode(64);
|
||||
});
|
||||
};
|
||||
|
||||
exports['test out of range decoding'] = function (assert, util) {
|
||||
assert.throws(function () {
|
||||
base64.decode('=');
|
||||
});
|
||||
};
|
||||
|
||||
exports['test normal encoding and decoding'] = function (assert, util) {
|
||||
for (var i = 0; i < 64; i++) {
|
||||
assert.equal(base64.decode(base64.encode(i)), i);
|
||||
}
|
||||
};
|
||||
|
||||
});
|
||||
54
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-binary-search.js
generated
vendored
Normal file
54
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-binary-search.js
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var binarySearch = require('../../lib/source-map/binary-search');
|
||||
|
||||
function numberCompare(a, b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
exports['test too high'] = function (assert, util) {
|
||||
var needle = 30;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
binarySearch.search(needle, haystack, numberCompare);
|
||||
});
|
||||
|
||||
assert.equal(binarySearch.search(needle, haystack, numberCompare), 20);
|
||||
};
|
||||
|
||||
exports['test too low'] = function (assert, util) {
|
||||
var needle = 1;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
binarySearch.search(needle, haystack, numberCompare);
|
||||
});
|
||||
|
||||
assert.equal(binarySearch.search(needle, haystack, numberCompare), null);
|
||||
};
|
||||
|
||||
exports['test exact search'] = function (assert, util) {
|
||||
var needle = 4;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.equal(binarySearch.search(needle, haystack, numberCompare), 4);
|
||||
};
|
||||
|
||||
exports['test fuzzy search'] = function (assert, util) {
|
||||
var needle = 19;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.equal(binarySearch.search(needle, haystack, numberCompare), 18);
|
||||
};
|
||||
|
||||
});
|
||||
84
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-dog-fooding.js
generated
vendored
Normal file
84
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-dog-fooding.js
generated
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
|
||||
var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
|
||||
|
||||
exports['test eating our own dog food'] = function (assert, util) {
|
||||
var smg = new SourceMapGenerator({
|
||||
file: 'testing.js',
|
||||
sourceRoot: '/wu/tang'
|
||||
});
|
||||
|
||||
smg.addMapping({
|
||||
source: 'gza.coffee',
|
||||
original: { line: 1, column: 0 },
|
||||
generated: { line: 2, column: 2 }
|
||||
});
|
||||
|
||||
smg.addMapping({
|
||||
source: 'gza.coffee',
|
||||
original: { line: 2, column: 0 },
|
||||
generated: { line: 3, column: 2 }
|
||||
});
|
||||
|
||||
smg.addMapping({
|
||||
source: 'gza.coffee',
|
||||
original: { line: 3, column: 0 },
|
||||
generated: { line: 4, column: 2 }
|
||||
});
|
||||
|
||||
smg.addMapping({
|
||||
source: 'gza.coffee',
|
||||
original: { line: 4, column: 0 },
|
||||
generated: { line: 5, column: 2 }
|
||||
});
|
||||
|
||||
smg.addMapping({
|
||||
source: 'gza.coffee',
|
||||
original: { line: 5, column: 10 },
|
||||
generated: { line: 6, column: 12 }
|
||||
});
|
||||
|
||||
var smc = new SourceMapConsumer(smg.toString());
|
||||
|
||||
// Exact
|
||||
util.assertMapping(2, 2, '/wu/tang/gza.coffee', 1, 0, null, smc, assert);
|
||||
util.assertMapping(3, 2, '/wu/tang/gza.coffee', 2, 0, null, smc, assert);
|
||||
util.assertMapping(4, 2, '/wu/tang/gza.coffee', 3, 0, null, smc, assert);
|
||||
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 4, 0, null, smc, assert);
|
||||
util.assertMapping(6, 12, '/wu/tang/gza.coffee', 5, 10, null, smc, assert);
|
||||
|
||||
// Fuzzy
|
||||
|
||||
// Generated to original
|
||||
util.assertMapping(2, 0, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(2, 9, '/wu/tang/gza.coffee', 1, 0, null, smc, assert, true);
|
||||
util.assertMapping(3, 0, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(3, 9, '/wu/tang/gza.coffee', 2, 0, null, smc, assert, true);
|
||||
util.assertMapping(4, 0, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(4, 9, '/wu/tang/gza.coffee', 3, 0, null, smc, assert, true);
|
||||
util.assertMapping(5, 0, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(5, 9, '/wu/tang/gza.coffee', 4, 0, null, smc, assert, true);
|
||||
util.assertMapping(6, 0, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(6, 9, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(6, 13, '/wu/tang/gza.coffee', 5, 10, null, smc, assert, true);
|
||||
|
||||
// Original to generated
|
||||
util.assertMapping(2, 2, '/wu/tang/gza.coffee', 1, 1, null, smc, assert, null, true);
|
||||
util.assertMapping(3, 2, '/wu/tang/gza.coffee', 2, 3, null, smc, assert, null, true);
|
||||
util.assertMapping(4, 2, '/wu/tang/gza.coffee', 3, 6, null, smc, assert, null, true);
|
||||
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 4, 9, null, smc, assert, null, true);
|
||||
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 5, 9, null, smc, assert, null, true);
|
||||
util.assertMapping(6, 12, '/wu/tang/gza.coffee', 6, 19, null, smc, assert, null, true);
|
||||
};
|
||||
|
||||
});
|
||||
475
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-source-map-consumer.js
generated
vendored
Normal file
475
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-source-map-consumer.js
generated
vendored
Normal file
@@ -0,0 +1,475 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
|
||||
var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
|
||||
|
||||
exports['test that we can instantiate with a string or an object'] = function (assert, util) {
|
||||
assert.doesNotThrow(function () {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
});
|
||||
assert.doesNotThrow(function () {
|
||||
var map = new SourceMapConsumer(JSON.stringify(util.testMap));
|
||||
});
|
||||
};
|
||||
|
||||
exports['test that the `sources` field has the original sources'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
var sources = map.sources;
|
||||
|
||||
assert.equal(sources[0], '/the/root/one.js');
|
||||
assert.equal(sources[1], '/the/root/two.js');
|
||||
assert.equal(sources.length, 2);
|
||||
};
|
||||
|
||||
exports['test that the source root is reflected in a mapping\'s source field'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
var mapping;
|
||||
|
||||
mapping = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 1
|
||||
});
|
||||
assert.equal(mapping.source, '/the/root/two.js');
|
||||
|
||||
mapping = map.originalPositionFor({
|
||||
line: 1,
|
||||
column: 1
|
||||
});
|
||||
assert.equal(mapping.source, '/the/root/one.js');
|
||||
};
|
||||
|
||||
exports['test mapping tokens back exactly'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
|
||||
util.assertMapping(1, 1, '/the/root/one.js', 1, 1, null, map, assert);
|
||||
util.assertMapping(1, 5, '/the/root/one.js', 1, 5, null, map, assert);
|
||||
util.assertMapping(1, 9, '/the/root/one.js', 1, 11, null, map, assert);
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 21, 'bar', map, assert);
|
||||
util.assertMapping(1, 21, '/the/root/one.js', 2, 3, null, map, assert);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 10, 'baz', map, assert);
|
||||
util.assertMapping(1, 32, '/the/root/one.js', 2, 14, 'bar', map, assert);
|
||||
|
||||
util.assertMapping(2, 1, '/the/root/two.js', 1, 1, null, map, assert);
|
||||
util.assertMapping(2, 5, '/the/root/two.js', 1, 5, null, map, assert);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 11, null, map, assert);
|
||||
util.assertMapping(2, 18, '/the/root/two.js', 1, 21, 'n', map, assert);
|
||||
util.assertMapping(2, 21, '/the/root/two.js', 2, 3, null, map, assert);
|
||||
util.assertMapping(2, 28, '/the/root/two.js', 2, 10, 'n', map, assert);
|
||||
};
|
||||
|
||||
exports['test mapping tokens fuzzy'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
|
||||
// Finding original positions
|
||||
util.assertMapping(1, 20, '/the/root/one.js', 1, 21, 'bar', map, assert, true);
|
||||
util.assertMapping(1, 30, '/the/root/one.js', 2, 10, 'baz', map, assert, true);
|
||||
util.assertMapping(2, 12, '/the/root/two.js', 1, 11, null, map, assert, true);
|
||||
|
||||
// Finding generated positions
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 22, 'bar', map, assert, null, true);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 13, 'baz', map, assert, null, true);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 16, null, map, assert, null, true);
|
||||
};
|
||||
|
||||
exports['test mappings and end of lines'] = function (assert, util) {
|
||||
var smg = new SourceMapGenerator({
|
||||
file: 'foo.js'
|
||||
});
|
||||
smg.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 1, column: 1 },
|
||||
source: 'bar.js'
|
||||
});
|
||||
smg.addMapping({
|
||||
original: { line: 2, column: 2 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'bar.js'
|
||||
});
|
||||
|
||||
var map = SourceMapConsumer.fromSourceMap(smg);
|
||||
|
||||
// When finding original positions, mappings end at the end of the line.
|
||||
util.assertMapping(2, 1, null, null, null, null, map, assert, true)
|
||||
|
||||
// When finding generated positions, mappings do not end at the end of the line.
|
||||
util.assertMapping(1, 1, 'bar.js', 2, 1, null, map, assert, null, true);
|
||||
};
|
||||
|
||||
exports['test creating source map consumers with )]}\' prefix'] = function (assert, util) {
|
||||
assert.doesNotThrow(function () {
|
||||
var map = new SourceMapConsumer(")]}'" + JSON.stringify(util.testMap));
|
||||
});
|
||||
};
|
||||
|
||||
exports['test eachMapping'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
var previousLine = -Infinity;
|
||||
var previousColumn = -Infinity;
|
||||
map.eachMapping(function (mapping) {
|
||||
assert.ok(mapping.generatedLine >= previousLine);
|
||||
|
||||
if (mapping.source) {
|
||||
assert.equal(mapping.source.indexOf(util.testMap.sourceRoot), 0);
|
||||
}
|
||||
|
||||
if (mapping.generatedLine === previousLine) {
|
||||
assert.ok(mapping.generatedColumn >= previousColumn);
|
||||
previousColumn = mapping.generatedColumn;
|
||||
}
|
||||
else {
|
||||
previousLine = mapping.generatedLine;
|
||||
previousColumn = -Infinity;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
exports['test iterating over mappings in a different order'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
var previousLine = -Infinity;
|
||||
var previousColumn = -Infinity;
|
||||
var previousSource = "";
|
||||
map.eachMapping(function (mapping) {
|
||||
assert.ok(mapping.source >= previousSource);
|
||||
|
||||
if (mapping.source === previousSource) {
|
||||
assert.ok(mapping.originalLine >= previousLine);
|
||||
|
||||
if (mapping.originalLine === previousLine) {
|
||||
assert.ok(mapping.originalColumn >= previousColumn);
|
||||
previousColumn = mapping.originalColumn;
|
||||
}
|
||||
else {
|
||||
previousLine = mapping.originalLine;
|
||||
previousColumn = -Infinity;
|
||||
}
|
||||
}
|
||||
else {
|
||||
previousSource = mapping.source;
|
||||
previousLine = -Infinity;
|
||||
previousColumn = -Infinity;
|
||||
}
|
||||
}, null, SourceMapConsumer.ORIGINAL_ORDER);
|
||||
};
|
||||
|
||||
exports['test that we can set the context for `this` in eachMapping'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
var context = {};
|
||||
map.eachMapping(function () {
|
||||
assert.equal(this, context);
|
||||
}, context);
|
||||
};
|
||||
|
||||
exports['test that the `sourcesContent` field has the original sources'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMapWithSourcesContent);
|
||||
var sourcesContent = map.sourcesContent;
|
||||
|
||||
assert.equal(sourcesContent[0], ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(sourcesContent[1], ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.equal(sourcesContent.length, 2);
|
||||
};
|
||||
|
||||
exports['test that we can get the original sources for the sources'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMapWithSourcesContent);
|
||||
var sources = map.sources;
|
||||
|
||||
assert.equal(map.sourceContentFor(sources[0]), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(map.sourceContentFor(sources[1]), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.equal(map.sourceContentFor("one.js"), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(map.sourceContentFor("two.js"), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("");
|
||||
}, Error);
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("/the/root/three.js");
|
||||
}, Error);
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("three.js");
|
||||
}, Error);
|
||||
};
|
||||
|
||||
exports['test sourceRoot + generatedPositionFor'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
sourceRoot: 'foo/bar',
|
||||
file: 'baz.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'bang.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 5, column: 5 },
|
||||
generated: { line: 6, column: 6 },
|
||||
source: 'bang.coffee'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
// Should handle without sourceRoot.
|
||||
var pos = map.generatedPositionFor({
|
||||
line: 1,
|
||||
column: 1,
|
||||
source: 'bang.coffee'
|
||||
});
|
||||
|
||||
assert.equal(pos.line, 2);
|
||||
assert.equal(pos.column, 2);
|
||||
|
||||
// Should handle with sourceRoot.
|
||||
var pos = map.generatedPositionFor({
|
||||
line: 1,
|
||||
column: 1,
|
||||
source: 'foo/bar/bang.coffee'
|
||||
});
|
||||
|
||||
assert.equal(pos.line, 2);
|
||||
assert.equal(pos.column, 2);
|
||||
};
|
||||
|
||||
exports['test sourceRoot + originalPositionFor'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
sourceRoot: 'foo/bar',
|
||||
file: 'baz.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'bang.coffee'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 2,
|
||||
});
|
||||
|
||||
// Should always have the prepended source root
|
||||
assert.equal(pos.source, 'foo/bar/bang.coffee');
|
||||
assert.equal(pos.line, 1);
|
||||
assert.equal(pos.column, 1);
|
||||
};
|
||||
|
||||
exports['test github issue #56'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
sourceRoot: 'http://',
|
||||
file: 'www.example.com/foo.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'www.example.com/original.js'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var sources = map.sources;
|
||||
assert.equal(sources.length, 1);
|
||||
assert.equal(sources[0], 'http://www.example.com/original.js');
|
||||
};
|
||||
|
||||
exports['test github issue #43'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
sourceRoot: 'http://example.com',
|
||||
file: 'foo.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'http://cdn.example.com/original.js'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var sources = map.sources;
|
||||
assert.equal(sources.length, 1,
|
||||
'Should only be one source.');
|
||||
assert.equal(sources[0], 'http://cdn.example.com/original.js',
|
||||
'Should not be joined with the sourceRoot.');
|
||||
};
|
||||
|
||||
exports['test absolute path, but same host sources'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
sourceRoot: 'http://example.com/foo/bar',
|
||||
file: 'foo.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: '/original.js'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var sources = map.sources;
|
||||
assert.equal(sources.length, 1,
|
||||
'Should only be one source.');
|
||||
assert.equal(sources[0], 'http://example.com/original.js',
|
||||
'Source should be relative the host of the source root.');
|
||||
};
|
||||
|
||||
exports['test github issue #64'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer({
|
||||
"version": 3,
|
||||
"file": "foo.js",
|
||||
"sourceRoot": "http://example.com/",
|
||||
"sources": ["/a"],
|
||||
"names": [],
|
||||
"mappings": "AACA",
|
||||
"sourcesContent": ["foo"]
|
||||
});
|
||||
|
||||
assert.equal(map.sourceContentFor("a"), "foo");
|
||||
assert.equal(map.sourceContentFor("/a"), "foo");
|
||||
};
|
||||
|
||||
exports['test bug 885597'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer({
|
||||
"version": 3,
|
||||
"file": "foo.js",
|
||||
"sourceRoot": "file:///Users/AlGore/Invented/The/Internet/",
|
||||
"sources": ["/a"],
|
||||
"names": [],
|
||||
"mappings": "AACA",
|
||||
"sourcesContent": ["foo"]
|
||||
});
|
||||
|
||||
var s = map.sources[0];
|
||||
assert.equal(map.sourceContentFor(s), "foo");
|
||||
};
|
||||
|
||||
exports['test github issue #72, duplicate sources'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer({
|
||||
"version": 3,
|
||||
"file": "foo.js",
|
||||
"sources": ["source1.js", "source1.js", "source3.js"],
|
||||
"names": [],
|
||||
"mappings": ";EAAC;;IAEE;;MEEE",
|
||||
"sourceRoot": "http://example.com"
|
||||
});
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(pos.source, 'http://example.com/source1.js');
|
||||
assert.equal(pos.line, 1);
|
||||
assert.equal(pos.column, 1);
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 4,
|
||||
column: 4
|
||||
});
|
||||
assert.equal(pos.source, 'http://example.com/source1.js');
|
||||
assert.equal(pos.line, 3);
|
||||
assert.equal(pos.column, 3);
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 6,
|
||||
column: 6
|
||||
});
|
||||
assert.equal(pos.source, 'http://example.com/source3.js');
|
||||
assert.equal(pos.line, 5);
|
||||
assert.equal(pos.column, 5);
|
||||
};
|
||||
|
||||
exports['test github issue #72, duplicate names'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer({
|
||||
"version": 3,
|
||||
"file": "foo.js",
|
||||
"sources": ["source.js"],
|
||||
"names": ["name1", "name1", "name3"],
|
||||
"mappings": ";EAACA;;IAEEA;;MAEEE",
|
||||
"sourceRoot": "http://example.com"
|
||||
});
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(pos.name, 'name1');
|
||||
assert.equal(pos.line, 1);
|
||||
assert.equal(pos.column, 1);
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 4,
|
||||
column: 4
|
||||
});
|
||||
assert.equal(pos.name, 'name1');
|
||||
assert.equal(pos.line, 3);
|
||||
assert.equal(pos.column, 3);
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 6,
|
||||
column: 6
|
||||
});
|
||||
assert.equal(pos.name, 'name3');
|
||||
assert.equal(pos.line, 5);
|
||||
assert.equal(pos.column, 5);
|
||||
};
|
||||
|
||||
exports['test SourceMapConsumer.fromSourceMap'] = function (assert, util) {
|
||||
var smg = new SourceMapGenerator({
|
||||
sourceRoot: 'http://example.com/',
|
||||
file: 'foo.js'
|
||||
});
|
||||
smg.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'bar.js'
|
||||
});
|
||||
smg.addMapping({
|
||||
original: { line: 2, column: 2 },
|
||||
generated: { line: 4, column: 4 },
|
||||
source: 'baz.js',
|
||||
name: 'dirtMcGirt'
|
||||
});
|
||||
smg.setSourceContent('baz.js', 'baz.js content');
|
||||
|
||||
var smc = SourceMapConsumer.fromSourceMap(smg);
|
||||
assert.equal(smc.file, 'foo.js');
|
||||
assert.equal(smc.sourceRoot, 'http://example.com/');
|
||||
assert.equal(smc.sources.length, 2);
|
||||
assert.equal(smc.sources[0], 'http://example.com/bar.js');
|
||||
assert.equal(smc.sources[1], 'http://example.com/baz.js');
|
||||
assert.equal(smc.sourceContentFor('baz.js'), 'baz.js content');
|
||||
|
||||
var pos = smc.originalPositionFor({
|
||||
line: 2,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(pos.line, 1);
|
||||
assert.equal(pos.column, 1);
|
||||
assert.equal(pos.source, 'http://example.com/bar.js');
|
||||
assert.equal(pos.name, null);
|
||||
|
||||
pos = smc.generatedPositionFor({
|
||||
line: 1,
|
||||
column: 1,
|
||||
source: 'http://example.com/bar.js'
|
||||
});
|
||||
assert.equal(pos.line, 2);
|
||||
assert.equal(pos.column, 2);
|
||||
|
||||
pos = smc.originalPositionFor({
|
||||
line: 4,
|
||||
column: 4
|
||||
});
|
||||
assert.equal(pos.line, 2);
|
||||
assert.equal(pos.column, 2);
|
||||
assert.equal(pos.source, 'http://example.com/baz.js');
|
||||
assert.equal(pos.name, 'dirtMcGirt');
|
||||
|
||||
pos = smc.generatedPositionFor({
|
||||
line: 2,
|
||||
column: 2,
|
||||
source: 'http://example.com/baz.js'
|
||||
});
|
||||
assert.equal(pos.line, 4);
|
||||
assert.equal(pos.column, 4);
|
||||
};
|
||||
});
|
||||
549
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-source-map-generator.js
generated
vendored
Normal file
549
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-source-map-generator.js
generated
vendored
Normal file
@@ -0,0 +1,549 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
|
||||
var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
|
||||
var SourceNode = require('../../lib/source-map/source-node').SourceNode;
|
||||
var util = require('./util');
|
||||
|
||||
exports['test some simple stuff'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
assert.ok(true);
|
||||
|
||||
var map = new SourceMapGenerator();
|
||||
assert.ok(true);
|
||||
};
|
||||
|
||||
exports['test JSON serialization'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
assert.equal(map.toString(), JSON.stringify(map));
|
||||
};
|
||||
|
||||
exports['test adding mappings (case 1)'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 }
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test adding mappings (case 2)'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
source: 'bar.js',
|
||||
original: { line: 1, column: 1 }
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test adding mappings (case 3)'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
source: 'bar.js',
|
||||
original: { line: 1, column: 1 },
|
||||
name: 'someToken'
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test adding mappings (invalid)'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
|
||||
// Not enough info.
|
||||
assert.throws(function () {
|
||||
map.addMapping({});
|
||||
});
|
||||
|
||||
// Original file position, but no source.
|
||||
assert.throws(function () {
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 1, column: 1 }
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test that the correct mappings are being generated'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'min.js',
|
||||
sourceRoot: '/the/root'
|
||||
});
|
||||
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 1, column: 1 },
|
||||
source: 'one.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 5 },
|
||||
original: { line: 1, column: 5 },
|
||||
source: 'one.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 9 },
|
||||
original: { line: 1, column: 11 },
|
||||
source: 'one.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 18 },
|
||||
original: { line: 1, column: 21 },
|
||||
source: 'one.js',
|
||||
name: 'bar'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 21 },
|
||||
original: { line: 2, column: 3 },
|
||||
source: 'one.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 28 },
|
||||
original: { line: 2, column: 10 },
|
||||
source: 'one.js',
|
||||
name: 'baz'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 32 },
|
||||
original: { line: 2, column: 14 },
|
||||
source: 'one.js',
|
||||
name: 'bar'
|
||||
});
|
||||
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 1 },
|
||||
original: { line: 1, column: 1 },
|
||||
source: 'two.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 5 },
|
||||
original: { line: 1, column: 5 },
|
||||
source: 'two.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 9 },
|
||||
original: { line: 1, column: 11 },
|
||||
source: 'two.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 18 },
|
||||
original: { line: 1, column: 21 },
|
||||
source: 'two.js',
|
||||
name: 'n'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 21 },
|
||||
original: { line: 2, column: 3 },
|
||||
source: 'two.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 28 },
|
||||
original: { line: 2, column: 10 },
|
||||
source: 'two.js',
|
||||
name: 'n'
|
||||
});
|
||||
|
||||
map = JSON.parse(map.toString());
|
||||
|
||||
util.assertEqualMaps(assert, map, util.testMap);
|
||||
};
|
||||
|
||||
exports['test that source content can be set'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'min.js',
|
||||
sourceRoot: '/the/root'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 1, column: 1 },
|
||||
source: 'one.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 1 },
|
||||
original: { line: 1, column: 1 },
|
||||
source: 'two.js'
|
||||
});
|
||||
map.setSourceContent('one.js', 'one file content');
|
||||
|
||||
map = JSON.parse(map.toString());
|
||||
assert.equal(map.sources[0], 'one.js');
|
||||
assert.equal(map.sources[1], 'two.js');
|
||||
assert.equal(map.sourcesContent[0], 'one file content');
|
||||
assert.equal(map.sourcesContent[1], null);
|
||||
};
|
||||
|
||||
exports['test .fromSourceMap'] = function (assert, util) {
|
||||
var map = SourceMapGenerator.fromSourceMap(new SourceMapConsumer(util.testMap));
|
||||
util.assertEqualMaps(assert, map.toJSON(), util.testMap);
|
||||
};
|
||||
|
||||
exports['test .fromSourceMap with sourcesContent'] = function (assert, util) {
|
||||
var map = SourceMapGenerator.fromSourceMap(
|
||||
new SourceMapConsumer(util.testMapWithSourcesContent));
|
||||
util.assertEqualMaps(assert, map.toJSON(), util.testMapWithSourcesContent);
|
||||
};
|
||||
|
||||
exports['test applySourceMap'] = function (assert, util) {
|
||||
var node = new SourceNode(null, null, null, [
|
||||
new SourceNode(2, 0, 'fileX', 'lineX2\n'),
|
||||
'genA1\n',
|
||||
new SourceNode(2, 0, 'fileY', 'lineY2\n'),
|
||||
'genA2\n',
|
||||
new SourceNode(1, 0, 'fileX', 'lineX1\n'),
|
||||
'genA3\n',
|
||||
new SourceNode(1, 0, 'fileY', 'lineY1\n')
|
||||
]);
|
||||
var mapStep1 = node.toStringWithSourceMap({
|
||||
file: 'fileA'
|
||||
}).map;
|
||||
mapStep1.setSourceContent('fileX', 'lineX1\nlineX2\n');
|
||||
mapStep1 = mapStep1.toJSON();
|
||||
|
||||
node = new SourceNode(null, null, null, [
|
||||
'gen1\n',
|
||||
new SourceNode(1, 0, 'fileA', 'lineA1\n'),
|
||||
new SourceNode(2, 0, 'fileA', 'lineA2\n'),
|
||||
new SourceNode(3, 0, 'fileA', 'lineA3\n'),
|
||||
new SourceNode(4, 0, 'fileA', 'lineA4\n'),
|
||||
new SourceNode(1, 0, 'fileB', 'lineB1\n'),
|
||||
new SourceNode(2, 0, 'fileB', 'lineB2\n'),
|
||||
'gen2\n'
|
||||
]);
|
||||
var mapStep2 = node.toStringWithSourceMap({
|
||||
file: 'fileGen'
|
||||
}).map;
|
||||
mapStep2.setSourceContent('fileB', 'lineB1\nlineB2\n');
|
||||
mapStep2 = mapStep2.toJSON();
|
||||
|
||||
node = new SourceNode(null, null, null, [
|
||||
'gen1\n',
|
||||
new SourceNode(2, 0, 'fileX', 'lineA1\n'),
|
||||
new SourceNode(2, 0, 'fileA', 'lineA2\n'),
|
||||
new SourceNode(2, 0, 'fileY', 'lineA3\n'),
|
||||
new SourceNode(4, 0, 'fileA', 'lineA4\n'),
|
||||
new SourceNode(1, 0, 'fileB', 'lineB1\n'),
|
||||
new SourceNode(2, 0, 'fileB', 'lineB2\n'),
|
||||
'gen2\n'
|
||||
]);
|
||||
var expectedMap = node.toStringWithSourceMap({
|
||||
file: 'fileGen'
|
||||
}).map;
|
||||
expectedMap.setSourceContent('fileX', 'lineX1\nlineX2\n');
|
||||
expectedMap.setSourceContent('fileB', 'lineB1\nlineB2\n');
|
||||
expectedMap = expectedMap.toJSON();
|
||||
|
||||
// apply source map "mapStep1" to "mapStep2"
|
||||
var generator = SourceMapGenerator.fromSourceMap(new SourceMapConsumer(mapStep2));
|
||||
generator.applySourceMap(new SourceMapConsumer(mapStep1));
|
||||
var actualMap = generator.toJSON();
|
||||
|
||||
util.assertEqualMaps(assert, actualMap, expectedMap);
|
||||
};
|
||||
|
||||
exports['test applySourceMap throws when file is missing'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'test.js'
|
||||
});
|
||||
var map2 = new SourceMapGenerator();
|
||||
assert.throws(function() {
|
||||
map.applySourceMap(new SourceMapConsumer(map2.toJSON()));
|
||||
});
|
||||
};
|
||||
|
||||
exports['test the two additional parameters of applySourceMap'] = function (assert, util) {
|
||||
// Assume the following directory structure:
|
||||
//
|
||||
// http://foo.org/
|
||||
// bar.coffee
|
||||
// app/
|
||||
// coffee/
|
||||
// foo.coffee
|
||||
// temp/
|
||||
// bundle.js
|
||||
// temp_maps/
|
||||
// bundle.js.map
|
||||
// public/
|
||||
// bundle.min.js
|
||||
// bundle.min.js.map
|
||||
//
|
||||
// http://www.example.com/
|
||||
// baz.coffee
|
||||
|
||||
var bundleMap = new SourceMapGenerator({
|
||||
file: 'bundle.js'
|
||||
});
|
||||
bundleMap.addMapping({
|
||||
generated: { line: 3, column: 3 },
|
||||
original: { line: 2, column: 2 },
|
||||
source: '../coffee/foo.coffee'
|
||||
});
|
||||
bundleMap.setSourceContent('../coffee/foo.coffee', 'foo coffee');
|
||||
bundleMap.addMapping({
|
||||
generated: { line: 13, column: 13 },
|
||||
original: { line: 12, column: 12 },
|
||||
source: '/bar.coffee'
|
||||
});
|
||||
bundleMap.setSourceContent('/bar.coffee', 'bar coffee');
|
||||
bundleMap.addMapping({
|
||||
generated: { line: 23, column: 23 },
|
||||
original: { line: 22, column: 22 },
|
||||
source: 'http://www.example.com/baz.coffee'
|
||||
});
|
||||
bundleMap.setSourceContent(
|
||||
'http://www.example.com/baz.coffee',
|
||||
'baz coffee'
|
||||
);
|
||||
bundleMap = new SourceMapConsumer(bundleMap.toJSON());
|
||||
|
||||
var minifiedMap = new SourceMapGenerator({
|
||||
file: 'bundle.min.js',
|
||||
sourceRoot: '..'
|
||||
});
|
||||
minifiedMap.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 3, column: 3 },
|
||||
source: 'temp/bundle.js'
|
||||
});
|
||||
minifiedMap.addMapping({
|
||||
generated: { line: 11, column: 11 },
|
||||
original: { line: 13, column: 13 },
|
||||
source: 'temp/bundle.js'
|
||||
});
|
||||
minifiedMap.addMapping({
|
||||
generated: { line: 21, column: 21 },
|
||||
original: { line: 23, column: 23 },
|
||||
source: 'temp/bundle.js'
|
||||
});
|
||||
minifiedMap = new SourceMapConsumer(minifiedMap.toJSON());
|
||||
|
||||
var expectedMap = function (sources) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'bundle.min.js',
|
||||
sourceRoot: '..'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 2, column: 2 },
|
||||
source: sources[0]
|
||||
});
|
||||
map.setSourceContent(sources[0], 'foo coffee');
|
||||
map.addMapping({
|
||||
generated: { line: 11, column: 11 },
|
||||
original: { line: 12, column: 12 },
|
||||
source: sources[1]
|
||||
});
|
||||
map.setSourceContent(sources[1], 'bar coffee');
|
||||
map.addMapping({
|
||||
generated: { line: 21, column: 21 },
|
||||
original: { line: 22, column: 22 },
|
||||
source: sources[2]
|
||||
});
|
||||
map.setSourceContent(sources[2], 'baz coffee');
|
||||
return map.toJSON();
|
||||
}
|
||||
|
||||
var actualMap = function (aSourceMapPath) {
|
||||
var map = SourceMapGenerator.fromSourceMap(minifiedMap);
|
||||
// Note that relying on `bundleMap.file` (which is simply 'bundle.js')
|
||||
// instead of supplying the second parameter wouldn't work here.
|
||||
map.applySourceMap(bundleMap, '../temp/bundle.js', aSourceMapPath);
|
||||
return map.toJSON();
|
||||
}
|
||||
|
||||
util.assertEqualMaps(assert, actualMap('../temp_maps'), expectedMap([
|
||||
'coffee/foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
|
||||
util.assertEqualMaps(assert, actualMap('/app/temp_maps'), expectedMap([
|
||||
'/app/coffee/foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
|
||||
util.assertEqualMaps(assert, actualMap('http://foo.org/app/temp_maps'), expectedMap([
|
||||
'http://foo.org/app/coffee/foo.coffee',
|
||||
'http://foo.org/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
};
|
||||
|
||||
exports['test sorting with duplicate generated mappings'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'test.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 3, column: 0 },
|
||||
original: { line: 2, column: 0 },
|
||||
source: 'a.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 0 }
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 0 }
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 0 },
|
||||
original: { line: 1, column: 0 },
|
||||
source: 'a.js'
|
||||
});
|
||||
|
||||
util.assertEqualMaps(assert, map.toJSON(), {
|
||||
version: 3,
|
||||
file: 'test.js',
|
||||
sources: ['a.js'],
|
||||
names: [],
|
||||
mappings: 'AAAA;A;AACA'
|
||||
});
|
||||
};
|
||||
|
||||
exports['test ignore duplicate mappings.'] = function (assert, util) {
|
||||
var init = { file: 'min.js', sourceRoot: '/the/root' };
|
||||
var map1, map2;
|
||||
|
||||
// null original source location
|
||||
var nullMapping1 = {
|
||||
generated: { line: 1, column: 0 }
|
||||
};
|
||||
var nullMapping2 = {
|
||||
generated: { line: 2, column: 2 }
|
||||
};
|
||||
|
||||
map1 = new SourceMapGenerator(init);
|
||||
map2 = new SourceMapGenerator(init);
|
||||
|
||||
map1.addMapping(nullMapping1);
|
||||
map1.addMapping(nullMapping1);
|
||||
|
||||
map2.addMapping(nullMapping1);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
|
||||
map1.addMapping(nullMapping2);
|
||||
map1.addMapping(nullMapping1);
|
||||
|
||||
map2.addMapping(nullMapping2);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
|
||||
// original source location
|
||||
var srcMapping1 = {
|
||||
generated: { line: 1, column: 0 },
|
||||
original: { line: 11, column: 0 },
|
||||
source: 'srcMapping1.js'
|
||||
};
|
||||
var srcMapping2 = {
|
||||
generated: { line: 2, column: 2 },
|
||||
original: { line: 11, column: 0 },
|
||||
source: 'srcMapping2.js'
|
||||
};
|
||||
|
||||
map1 = new SourceMapGenerator(init);
|
||||
map2 = new SourceMapGenerator(init);
|
||||
|
||||
map1.addMapping(srcMapping1);
|
||||
map1.addMapping(srcMapping1);
|
||||
|
||||
map2.addMapping(srcMapping1);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
|
||||
map1.addMapping(srcMapping2);
|
||||
map1.addMapping(srcMapping1);
|
||||
|
||||
map2.addMapping(srcMapping2);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
|
||||
// full original source and name information
|
||||
var fullMapping1 = {
|
||||
generated: { line: 1, column: 0 },
|
||||
original: { line: 11, column: 0 },
|
||||
source: 'fullMapping1.js',
|
||||
name: 'fullMapping1'
|
||||
};
|
||||
var fullMapping2 = {
|
||||
generated: { line: 2, column: 2 },
|
||||
original: { line: 11, column: 0 },
|
||||
source: 'fullMapping2.js',
|
||||
name: 'fullMapping2'
|
||||
};
|
||||
|
||||
map1 = new SourceMapGenerator(init);
|
||||
map2 = new SourceMapGenerator(init);
|
||||
|
||||
map1.addMapping(fullMapping1);
|
||||
map1.addMapping(fullMapping1);
|
||||
|
||||
map2.addMapping(fullMapping1);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
|
||||
map1.addMapping(fullMapping2);
|
||||
map1.addMapping(fullMapping1);
|
||||
|
||||
map2.addMapping(fullMapping2);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
};
|
||||
|
||||
exports['test github issue #72, check for duplicate names or sources'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'test.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 2, column: 2 },
|
||||
source: 'a.js',
|
||||
name: 'foo'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 3, column: 3 },
|
||||
original: { line: 4, column: 4 },
|
||||
source: 'a.js',
|
||||
name: 'foo'
|
||||
});
|
||||
util.assertEqualMaps(assert, map.toJSON(), {
|
||||
version: 3,
|
||||
file: 'test.js',
|
||||
sources: ['a.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'CACEA;;GAEEA'
|
||||
});
|
||||
};
|
||||
|
||||
});
|
||||
487
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-source-node.js
generated
vendored
Normal file
487
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-source-node.js
generated
vendored
Normal file
@@ -0,0 +1,487 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
|
||||
var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
|
||||
var SourceNode = require('../../lib/source-map/source-node').SourceNode;
|
||||
|
||||
function forEachNewline(fn) {
|
||||
return function (assert, util) {
|
||||
['\n', '\r\n'].forEach(fn.bind(null, assert, util));
|
||||
}
|
||||
}
|
||||
|
||||
exports['test .add()'] = function (assert, util) {
|
||||
var node = new SourceNode(null, null, null);
|
||||
|
||||
// Adding a string works.
|
||||
node.add('function noop() {}');
|
||||
|
||||
// Adding another source node works.
|
||||
node.add(new SourceNode(null, null, null));
|
||||
|
||||
// Adding an array works.
|
||||
node.add(['function foo() {',
|
||||
new SourceNode(null, null, null,
|
||||
'return 10;'),
|
||||
'}']);
|
||||
|
||||
// Adding other stuff doesn't.
|
||||
assert.throws(function () {
|
||||
node.add({});
|
||||
});
|
||||
assert.throws(function () {
|
||||
node.add(function () {});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test .prepend()'] = function (assert, util) {
|
||||
var node = new SourceNode(null, null, null);
|
||||
|
||||
// Prepending a string works.
|
||||
node.prepend('function noop() {}');
|
||||
assert.equal(node.children[0], 'function noop() {}');
|
||||
assert.equal(node.children.length, 1);
|
||||
|
||||
// Prepending another source node works.
|
||||
node.prepend(new SourceNode(null, null, null));
|
||||
assert.equal(node.children[0], '');
|
||||
assert.equal(node.children[1], 'function noop() {}');
|
||||
assert.equal(node.children.length, 2);
|
||||
|
||||
// Prepending an array works.
|
||||
node.prepend(['function foo() {',
|
||||
new SourceNode(null, null, null,
|
||||
'return 10;'),
|
||||
'}']);
|
||||
assert.equal(node.children[0], 'function foo() {');
|
||||
assert.equal(node.children[1], 'return 10;');
|
||||
assert.equal(node.children[2], '}');
|
||||
assert.equal(node.children[3], '');
|
||||
assert.equal(node.children[4], 'function noop() {}');
|
||||
assert.equal(node.children.length, 5);
|
||||
|
||||
// Prepending other stuff doesn't.
|
||||
assert.throws(function () {
|
||||
node.prepend({});
|
||||
});
|
||||
assert.throws(function () {
|
||||
node.prepend(function () {});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test .toString()'] = function (assert, util) {
|
||||
assert.equal((new SourceNode(null, null, null,
|
||||
['function foo() {',
|
||||
new SourceNode(null, null, null, 'return 10;'),
|
||||
'}'])).toString(),
|
||||
'function foo() {return 10;}');
|
||||
};
|
||||
|
||||
exports['test .join()'] = function (assert, util) {
|
||||
assert.equal((new SourceNode(null, null, null,
|
||||
['a', 'b', 'c', 'd'])).join(', ').toString(),
|
||||
'a, b, c, d');
|
||||
};
|
||||
|
||||
exports['test .walk()'] = function (assert, util) {
|
||||
var node = new SourceNode(null, null, null,
|
||||
['(function () {\n',
|
||||
' ', new SourceNode(1, 0, 'a.js', ['someCall()']), ';\n',
|
||||
' ', new SourceNode(2, 0, 'b.js', ['if (foo) bar()']), ';\n',
|
||||
'}());']);
|
||||
var expected = [
|
||||
{ str: '(function () {\n', source: null, line: null, column: null },
|
||||
{ str: ' ', source: null, line: null, column: null },
|
||||
{ str: 'someCall()', source: 'a.js', line: 1, column: 0 },
|
||||
{ str: ';\n', source: null, line: null, column: null },
|
||||
{ str: ' ', source: null, line: null, column: null },
|
||||
{ str: 'if (foo) bar()', source: 'b.js', line: 2, column: 0 },
|
||||
{ str: ';\n', source: null, line: null, column: null },
|
||||
{ str: '}());', source: null, line: null, column: null },
|
||||
];
|
||||
var i = 0;
|
||||
node.walk(function (chunk, loc) {
|
||||
assert.equal(expected[i].str, chunk);
|
||||
assert.equal(expected[i].source, loc.source);
|
||||
assert.equal(expected[i].line, loc.line);
|
||||
assert.equal(expected[i].column, loc.column);
|
||||
i++;
|
||||
});
|
||||
};
|
||||
|
||||
exports['test .replaceRight'] = function (assert, util) {
|
||||
var node;
|
||||
|
||||
// Not nested
|
||||
node = new SourceNode(null, null, null, 'hello world');
|
||||
node.replaceRight(/world/, 'universe');
|
||||
assert.equal(node.toString(), 'hello universe');
|
||||
|
||||
// Nested
|
||||
node = new SourceNode(null, null, null,
|
||||
[new SourceNode(null, null, null, 'hey sexy mama, '),
|
||||
new SourceNode(null, null, null, 'want to kill all humans?')]);
|
||||
node.replaceRight(/kill all humans/, 'watch Futurama');
|
||||
assert.equal(node.toString(), 'hey sexy mama, want to watch Futurama?');
|
||||
};
|
||||
|
||||
exports['test .toStringWithSourceMap()'] = forEachNewline(function (assert, util, nl) {
|
||||
var node = new SourceNode(null, null, null,
|
||||
['(function () {' + nl,
|
||||
' ',
|
||||
new SourceNode(1, 0, 'a.js', 'someCall', 'originalCall'),
|
||||
new SourceNode(1, 8, 'a.js', '()'),
|
||||
';' + nl,
|
||||
' ', new SourceNode(2, 0, 'b.js', ['if (foo) bar()']), ';' + nl,
|
||||
'}());']);
|
||||
var result = node.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
|
||||
assert.equal(result.code, [
|
||||
'(function () {',
|
||||
' someCall();',
|
||||
' if (foo) bar();',
|
||||
'}());'
|
||||
].join(nl));
|
||||
|
||||
var map = result.map;
|
||||
var mapWithoutOptions = node.toStringWithSourceMap().map;
|
||||
|
||||
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||
assert.ok(mapWithoutOptions instanceof SourceMapGenerator, 'mapWithoutOptions instanceof SourceMapGenerator');
|
||||
mapWithoutOptions._file = 'foo.js';
|
||||
util.assertEqualMaps(assert, map.toJSON(), mapWithoutOptions.toJSON());
|
||||
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var actual;
|
||||
|
||||
actual = map.originalPositionFor({
|
||||
line: 1,
|
||||
column: 4
|
||||
});
|
||||
assert.equal(actual.source, null);
|
||||
assert.equal(actual.line, null);
|
||||
assert.equal(actual.column, null);
|
||||
|
||||
actual = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(actual.source, 'a.js');
|
||||
assert.equal(actual.line, 1);
|
||||
assert.equal(actual.column, 0);
|
||||
assert.equal(actual.name, 'originalCall');
|
||||
|
||||
actual = map.originalPositionFor({
|
||||
line: 3,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(actual.source, 'b.js');
|
||||
assert.equal(actual.line, 2);
|
||||
assert.equal(actual.column, 0);
|
||||
|
||||
actual = map.originalPositionFor({
|
||||
line: 3,
|
||||
column: 16
|
||||
});
|
||||
assert.equal(actual.source, null);
|
||||
assert.equal(actual.line, null);
|
||||
assert.equal(actual.column, null);
|
||||
|
||||
actual = map.originalPositionFor({
|
||||
line: 4,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(actual.source, null);
|
||||
assert.equal(actual.line, null);
|
||||
assert.equal(actual.column, null);
|
||||
});
|
||||
|
||||
exports['test .fromStringWithSourceMap()'] = forEachNewline(function (assert, util, nl) {
|
||||
var testCode = util.testGeneratedCode.replace(/\n/g, nl);
|
||||
var node = SourceNode.fromStringWithSourceMap(
|
||||
testCode,
|
||||
new SourceMapConsumer(util.testMap));
|
||||
|
||||
var result = node.toStringWithSourceMap({
|
||||
file: 'min.js'
|
||||
});
|
||||
var map = result.map;
|
||||
var code = result.code;
|
||||
|
||||
assert.equal(code, testCode);
|
||||
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||
map = map.toJSON();
|
||||
assert.equal(map.version, util.testMap.version);
|
||||
assert.equal(map.file, util.testMap.file);
|
||||
assert.equal(map.mappings, util.testMap.mappings);
|
||||
});
|
||||
|
||||
exports['test .fromStringWithSourceMap() empty map'] = forEachNewline(function (assert, util, nl) {
|
||||
var node = SourceNode.fromStringWithSourceMap(
|
||||
util.testGeneratedCode.replace(/\n/g, nl),
|
||||
new SourceMapConsumer(util.emptyMap));
|
||||
var result = node.toStringWithSourceMap({
|
||||
file: 'min.js'
|
||||
});
|
||||
var map = result.map;
|
||||
var code = result.code;
|
||||
|
||||
assert.equal(code, util.testGeneratedCode.replace(/\n/g, nl));
|
||||
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||
map = map.toJSON();
|
||||
assert.equal(map.version, util.emptyMap.version);
|
||||
assert.equal(map.file, util.emptyMap.file);
|
||||
assert.equal(map.mappings.length, util.emptyMap.mappings.length);
|
||||
assert.equal(map.mappings, util.emptyMap.mappings);
|
||||
});
|
||||
|
||||
exports['test .fromStringWithSourceMap() complex version'] = forEachNewline(function (assert, util, nl) {
|
||||
var input = new SourceNode(null, null, null, [
|
||||
"(function() {" + nl,
|
||||
" var Test = {};" + nl,
|
||||
" ", new SourceNode(1, 0, "a.js", "Test.A = { value: 1234 };" + nl),
|
||||
" ", new SourceNode(2, 0, "a.js", "Test.A.x = 'xyz';"), nl,
|
||||
"}());" + nl,
|
||||
"/* Generated Source */"]);
|
||||
input = input.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
|
||||
var node = SourceNode.fromStringWithSourceMap(
|
||||
input.code,
|
||||
new SourceMapConsumer(input.map.toString()));
|
||||
|
||||
var result = node.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
var map = result.map;
|
||||
var code = result.code;
|
||||
|
||||
assert.equal(code, input.code);
|
||||
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||
map = map.toJSON();
|
||||
var inputMap = input.map.toJSON();
|
||||
util.assertEqualMaps(assert, map, inputMap);
|
||||
});
|
||||
|
||||
exports['test .toStringWithSourceMap() merging duplicate mappings'] = forEachNewline(function (assert, util, nl) {
|
||||
var input = new SourceNode(null, null, null, [
|
||||
new SourceNode(1, 0, "a.js", "(function"),
|
||||
new SourceNode(1, 0, "a.js", "() {" + nl),
|
||||
" ",
|
||||
new SourceNode(1, 0, "a.js", "var Test = "),
|
||||
new SourceNode(1, 0, "b.js", "{};" + nl),
|
||||
new SourceNode(2, 0, "b.js", "Test"),
|
||||
new SourceNode(2, 0, "b.js", ".A", "A"),
|
||||
new SourceNode(2, 20, "b.js", " = { value: ", "A"),
|
||||
"1234",
|
||||
new SourceNode(2, 40, "b.js", " };" + nl, "A"),
|
||||
"}());" + nl,
|
||||
"/* Generated Source */"
|
||||
]);
|
||||
input = input.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
|
||||
assert.equal(input.code, [
|
||||
"(function() {",
|
||||
" var Test = {};",
|
||||
"Test.A = { value: 1234 };",
|
||||
"}());",
|
||||
"/* Generated Source */"
|
||||
].join(nl))
|
||||
|
||||
var correctMap = new SourceMapGenerator({
|
||||
file: 'foo.js'
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 1, column: 0 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
// Here is no need for a empty mapping,
|
||||
// because mappings ends at eol
|
||||
correctMap.addMapping({
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 2, column: 13 },
|
||||
source: 'b.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 0 },
|
||||
source: 'b.js',
|
||||
original: { line: 2, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 4 },
|
||||
source: 'b.js',
|
||||
name: 'A',
|
||||
original: { line: 2, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 6 },
|
||||
source: 'b.js',
|
||||
name: 'A',
|
||||
original: { line: 2, column: 20 }
|
||||
});
|
||||
// This empty mapping is required,
|
||||
// because there is a hole in the middle of the line
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 18 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 22 },
|
||||
source: 'b.js',
|
||||
name: 'A',
|
||||
original: { line: 2, column: 40 }
|
||||
});
|
||||
// Here is no need for a empty mapping,
|
||||
// because mappings ends at eol
|
||||
|
||||
var inputMap = input.map.toJSON();
|
||||
correctMap = correctMap.toJSON();
|
||||
util.assertEqualMaps(assert, inputMap, correctMap);
|
||||
});
|
||||
|
||||
exports['test .toStringWithSourceMap() multi-line SourceNodes'] = forEachNewline(function (assert, util, nl) {
|
||||
var input = new SourceNode(null, null, null, [
|
||||
new SourceNode(1, 0, "a.js", "(function() {" + nl + "var nextLine = 1;" + nl + "anotherLine();" + nl),
|
||||
new SourceNode(2, 2, "b.js", "Test.call(this, 123);" + nl),
|
||||
new SourceNode(2, 2, "b.js", "this['stuff'] = 'v';" + nl),
|
||||
new SourceNode(2, 2, "b.js", "anotherLine();" + nl),
|
||||
"/*" + nl + "Generated" + nl + "Source" + nl + "*/" + nl,
|
||||
new SourceNode(3, 4, "c.js", "anotherLine();" + nl),
|
||||
"/*" + nl + "Generated" + nl + "Source" + nl + "*/"
|
||||
]);
|
||||
input = input.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
|
||||
assert.equal(input.code, [
|
||||
"(function() {",
|
||||
"var nextLine = 1;",
|
||||
"anotherLine();",
|
||||
"Test.call(this, 123);",
|
||||
"this['stuff'] = 'v';",
|
||||
"anotherLine();",
|
||||
"/*",
|
||||
"Generated",
|
||||
"Source",
|
||||
"*/",
|
||||
"anotherLine();",
|
||||
"/*",
|
||||
"Generated",
|
||||
"Source",
|
||||
"*/"
|
||||
].join(nl));
|
||||
|
||||
var correctMap = new SourceMapGenerator({
|
||||
file: 'foo.js'
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 1, column: 0 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 2, column: 0 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 0 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 4, column: 0 },
|
||||
source: 'b.js',
|
||||
original: { line: 2, column: 2 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 5, column: 0 },
|
||||
source: 'b.js',
|
||||
original: { line: 2, column: 2 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 6, column: 0 },
|
||||
source: 'b.js',
|
||||
original: { line: 2, column: 2 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 11, column: 0 },
|
||||
source: 'c.js',
|
||||
original: { line: 3, column: 4 }
|
||||
});
|
||||
|
||||
var inputMap = input.map.toJSON();
|
||||
correctMap = correctMap.toJSON();
|
||||
util.assertEqualMaps(assert, inputMap, correctMap);
|
||||
});
|
||||
|
||||
exports['test .toStringWithSourceMap() with empty string'] = function (assert, util) {
|
||||
var node = new SourceNode(1, 0, 'empty.js', '');
|
||||
var result = node.toStringWithSourceMap();
|
||||
assert.equal(result.code, '');
|
||||
};
|
||||
|
||||
exports['test setSourceContent with toStringWithSourceMap'] = function (assert, util) {
|
||||
var aNode = new SourceNode(1, 1, 'a.js', 'a');
|
||||
aNode.setSourceContent('a.js', 'someContent');
|
||||
var node = new SourceNode(null, null, null,
|
||||
['(function () {\n',
|
||||
' ', aNode,
|
||||
' ', new SourceNode(1, 1, 'b.js', 'b'),
|
||||
'}());']);
|
||||
node.setSourceContent('b.js', 'otherContent');
|
||||
var map = node.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
}).map;
|
||||
|
||||
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
assert.equal(map.sources.length, 2);
|
||||
assert.equal(map.sources[0], 'a.js');
|
||||
assert.equal(map.sources[1], 'b.js');
|
||||
assert.equal(map.sourcesContent.length, 2);
|
||||
assert.equal(map.sourcesContent[0], 'someContent');
|
||||
assert.equal(map.sourcesContent[1], 'otherContent');
|
||||
};
|
||||
|
||||
exports['test walkSourceContents'] = function (assert, util) {
|
||||
var aNode = new SourceNode(1, 1, 'a.js', 'a');
|
||||
aNode.setSourceContent('a.js', 'someContent');
|
||||
var node = new SourceNode(null, null, null,
|
||||
['(function () {\n',
|
||||
' ', aNode,
|
||||
' ', new SourceNode(1, 1, 'b.js', 'b'),
|
||||
'}());']);
|
||||
node.setSourceContent('b.js', 'otherContent');
|
||||
var results = [];
|
||||
node.walkSourceContents(function (sourceFile, sourceContent) {
|
||||
results.push([sourceFile, sourceContent]);
|
||||
});
|
||||
assert.equal(results.length, 2);
|
||||
assert.equal(results[0][0], 'a.js');
|
||||
assert.equal(results[0][1], 'someContent');
|
||||
assert.equal(results[1][0], 'b.js');
|
||||
assert.equal(results[1][1], 'otherContent');
|
||||
};
|
||||
});
|
||||
127
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-util.js
generated
vendored
Normal file
127
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/test-util.js
generated
vendored
Normal file
@@ -0,0 +1,127 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2014 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var libUtil = require('../../lib/source-map/util');
|
||||
|
||||
exports['test urls'] = function (assert, util) {
|
||||
var assertUrl = function (url) {
|
||||
assert.equal(url, libUtil.urlGenerate(libUtil.urlParse(url)));
|
||||
};
|
||||
assertUrl('http://');
|
||||
assertUrl('http://www.example.com');
|
||||
assertUrl('http://user:pass@www.example.com');
|
||||
assertUrl('http://www.example.com:80');
|
||||
assertUrl('http://www.example.com/');
|
||||
assertUrl('http://www.example.com/foo/bar');
|
||||
assertUrl('http://www.example.com/foo/bar/');
|
||||
assertUrl('http://user:pass@www.example.com:80/foo/bar/');
|
||||
|
||||
assertUrl('//');
|
||||
assertUrl('//www.example.com');
|
||||
assertUrl('file:///www.example.com');
|
||||
|
||||
assert.equal(libUtil.urlParse('a//b'), null);
|
||||
};
|
||||
|
||||
exports['test normalize()'] = function (assert, util) {
|
||||
assert.equal(libUtil.normalize('/..'), '/');
|
||||
assert.equal(libUtil.normalize('/../'), '/');
|
||||
assert.equal(libUtil.normalize('/../../../..'), '/');
|
||||
assert.equal(libUtil.normalize('/../../../../a/b/c'), '/a/b/c');
|
||||
assert.equal(libUtil.normalize('/a/b/c/../../../d/../../e'), '/e');
|
||||
|
||||
assert.equal(libUtil.normalize('..'), '..');
|
||||
assert.equal(libUtil.normalize('../'), '../');
|
||||
assert.equal(libUtil.normalize('../../a/'), '../../a/');
|
||||
assert.equal(libUtil.normalize('a/..'), '.');
|
||||
assert.equal(libUtil.normalize('a/../../..'), '../..');
|
||||
|
||||
assert.equal(libUtil.normalize('/.'), '/');
|
||||
assert.equal(libUtil.normalize('/./'), '/');
|
||||
assert.equal(libUtil.normalize('/./././.'), '/');
|
||||
assert.equal(libUtil.normalize('/././././a/b/c'), '/a/b/c');
|
||||
assert.equal(libUtil.normalize('/a/b/c/./././d/././e'), '/a/b/c/d/e');
|
||||
|
||||
assert.equal(libUtil.normalize('.'), '.');
|
||||
assert.equal(libUtil.normalize('./'), '.');
|
||||
assert.equal(libUtil.normalize('././a'), 'a');
|
||||
assert.equal(libUtil.normalize('a/./'), 'a/');
|
||||
assert.equal(libUtil.normalize('a/././.'), 'a');
|
||||
|
||||
assert.equal(libUtil.normalize('/a/b//c////d/////'), '/a/b/c/d/');
|
||||
assert.equal(libUtil.normalize('///a/b//c////d/////'), '///a/b/c/d/');
|
||||
assert.equal(libUtil.normalize('a/b//c////d'), 'a/b/c/d');
|
||||
|
||||
assert.equal(libUtil.normalize('.///.././../a/b//./..'), '../../a')
|
||||
|
||||
assert.equal(libUtil.normalize('http://www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.normalize('http://www.example.com/'), 'http://www.example.com/');
|
||||
assert.equal(libUtil.normalize('http://www.example.com/./..//a/b/c/.././d//'), 'http://www.example.com/a/b/d/');
|
||||
};
|
||||
|
||||
exports['test join()'] = function (assert, util) {
|
||||
assert.equal(libUtil.join('a', 'b'), 'a/b');
|
||||
assert.equal(libUtil.join('a/', 'b'), 'a/b');
|
||||
assert.equal(libUtil.join('a//', 'b'), 'a/b');
|
||||
assert.equal(libUtil.join('a', 'b/'), 'a/b/');
|
||||
assert.equal(libUtil.join('a', 'b//'), 'a/b/');
|
||||
assert.equal(libUtil.join('a/', '/b'), '/b');
|
||||
assert.equal(libUtil.join('a//', '//b'), '//b');
|
||||
|
||||
assert.equal(libUtil.join('a', '..'), '.');
|
||||
assert.equal(libUtil.join('a', '../b'), 'b');
|
||||
assert.equal(libUtil.join('a/b', '../c'), 'a/c');
|
||||
|
||||
assert.equal(libUtil.join('a', '.'), 'a');
|
||||
assert.equal(libUtil.join('a', './b'), 'a/b');
|
||||
assert.equal(libUtil.join('a/b', './c'), 'a/b/c');
|
||||
|
||||
assert.equal(libUtil.join('a', 'http://www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.join('a', 'data:foo,bar'), 'data:foo,bar');
|
||||
|
||||
|
||||
assert.equal(libUtil.join('http://foo.org/a', 'b'), 'http://foo.org/a/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a/', 'b'), 'http://foo.org/a/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a//', 'b'), 'http://foo.org/a/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a', 'b/'), 'http://foo.org/a/b/');
|
||||
assert.equal(libUtil.join('http://foo.org/a', 'b//'), 'http://foo.org/a/b/');
|
||||
assert.equal(libUtil.join('http://foo.org/a/', '/b'), 'http://foo.org/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a//', '//b'), 'http://b');
|
||||
|
||||
assert.equal(libUtil.join('http://foo.org/a', '..'), 'http://foo.org/');
|
||||
assert.equal(libUtil.join('http://foo.org/a', '../b'), 'http://foo.org/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a/b', '../c'), 'http://foo.org/a/c');
|
||||
|
||||
assert.equal(libUtil.join('http://foo.org/a', '.'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/a', './b'), 'http://foo.org/a/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a/b', './c'), 'http://foo.org/a/b/c');
|
||||
|
||||
assert.equal(libUtil.join('http://foo.org/a', 'http://www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.join('http://foo.org/a', 'data:foo,bar'), 'data:foo,bar');
|
||||
|
||||
|
||||
assert.equal(libUtil.join('http://foo.org', 'a'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/', 'a'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org//', 'a'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org', '/a'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/', '/a'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org//', '/a'), 'http://foo.org/a');
|
||||
|
||||
|
||||
assert.equal(libUtil.join('http://', 'www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.join('file:///', 'www.example.com'), 'file:///www.example.com');
|
||||
assert.equal(libUtil.join('http://', 'ftp://example.com'), 'ftp://example.com');
|
||||
|
||||
assert.equal(libUtil.join('http://www.example.com', '//foo.org/bar'), 'http://foo.org/bar');
|
||||
assert.equal(libUtil.join('//www.example.com', '//foo.org/bar'), '//foo.org/bar');
|
||||
};
|
||||
|
||||
});
|
||||
161
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/util.js
generated
vendored
Normal file
161
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/source-map/test/source-map/util.js
generated
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var util = require('../../lib/source-map/util');
|
||||
|
||||
// This is a test mapping which maps functions from two different files
|
||||
// (one.js and two.js) to a minified generated source.
|
||||
//
|
||||
// Here is one.js:
|
||||
//
|
||||
// ONE.foo = function (bar) {
|
||||
// return baz(bar);
|
||||
// };
|
||||
//
|
||||
// Here is two.js:
|
||||
//
|
||||
// TWO.inc = function (n) {
|
||||
// return n + 1;
|
||||
// };
|
||||
//
|
||||
// And here is the generated code (min.js):
|
||||
//
|
||||
// ONE.foo=function(a){return baz(a);};
|
||||
// TWO.inc=function(a){return a+1;};
|
||||
exports.testGeneratedCode = " ONE.foo=function(a){return baz(a);};\n"+
|
||||
" TWO.inc=function(a){return a+1;};";
|
||||
exports.testMap = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['one.js', 'two.js'],
|
||||
sourceRoot: '/the/root',
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
exports.testMapWithSourcesContent = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['one.js', 'two.js'],
|
||||
sourcesContent: [
|
||||
' ONE.foo = function (bar) {\n' +
|
||||
' return baz(bar);\n' +
|
||||
' };',
|
||||
' TWO.inc = function (n) {\n' +
|
||||
' return n + 1;\n' +
|
||||
' };'
|
||||
],
|
||||
sourceRoot: '/the/root',
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
exports.emptyMap = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: [],
|
||||
sources: [],
|
||||
mappings: ''
|
||||
};
|
||||
|
||||
|
||||
function assertMapping(generatedLine, generatedColumn, originalSource,
|
||||
originalLine, originalColumn, name, map, assert,
|
||||
dontTestGenerated, dontTestOriginal) {
|
||||
if (!dontTestOriginal) {
|
||||
var origMapping = map.originalPositionFor({
|
||||
line: generatedLine,
|
||||
column: generatedColumn
|
||||
});
|
||||
assert.equal(origMapping.name, name,
|
||||
'Incorrect name, expected ' + JSON.stringify(name)
|
||||
+ ', got ' + JSON.stringify(origMapping.name));
|
||||
assert.equal(origMapping.line, originalLine,
|
||||
'Incorrect line, expected ' + JSON.stringify(originalLine)
|
||||
+ ', got ' + JSON.stringify(origMapping.line));
|
||||
assert.equal(origMapping.column, originalColumn,
|
||||
'Incorrect column, expected ' + JSON.stringify(originalColumn)
|
||||
+ ', got ' + JSON.stringify(origMapping.column));
|
||||
|
||||
var expectedSource;
|
||||
|
||||
if (originalSource && map.sourceRoot && originalSource.indexOf(map.sourceRoot) === 0) {
|
||||
expectedSource = originalSource;
|
||||
} else if (originalSource) {
|
||||
expectedSource = map.sourceRoot
|
||||
? util.join(map.sourceRoot, originalSource)
|
||||
: originalSource;
|
||||
} else {
|
||||
expectedSource = null;
|
||||
}
|
||||
|
||||
assert.equal(origMapping.source, expectedSource,
|
||||
'Incorrect source, expected ' + JSON.stringify(expectedSource)
|
||||
+ ', got ' + JSON.stringify(origMapping.source));
|
||||
}
|
||||
|
||||
if (!dontTestGenerated) {
|
||||
var genMapping = map.generatedPositionFor({
|
||||
source: originalSource,
|
||||
line: originalLine,
|
||||
column: originalColumn
|
||||
});
|
||||
assert.equal(genMapping.line, generatedLine,
|
||||
'Incorrect line, expected ' + JSON.stringify(generatedLine)
|
||||
+ ', got ' + JSON.stringify(genMapping.line));
|
||||
assert.equal(genMapping.column, generatedColumn,
|
||||
'Incorrect column, expected ' + JSON.stringify(generatedColumn)
|
||||
+ ', got ' + JSON.stringify(genMapping.column));
|
||||
}
|
||||
}
|
||||
exports.assertMapping = assertMapping;
|
||||
|
||||
function assertEqualMaps(assert, actualMap, expectedMap) {
|
||||
assert.equal(actualMap.version, expectedMap.version, "version mismatch");
|
||||
assert.equal(actualMap.file, expectedMap.file, "file mismatch");
|
||||
assert.equal(actualMap.names.length,
|
||||
expectedMap.names.length,
|
||||
"names length mismatch: " +
|
||||
actualMap.names.join(", ") + " != " + expectedMap.names.join(", "));
|
||||
for (var i = 0; i < actualMap.names.length; i++) {
|
||||
assert.equal(actualMap.names[i],
|
||||
expectedMap.names[i],
|
||||
"names[" + i + "] mismatch: " +
|
||||
actualMap.names.join(", ") + " != " + expectedMap.names.join(", "));
|
||||
}
|
||||
assert.equal(actualMap.sources.length,
|
||||
expectedMap.sources.length,
|
||||
"sources length mismatch: " +
|
||||
actualMap.sources.join(", ") + " != " + expectedMap.sources.join(", "));
|
||||
for (var i = 0; i < actualMap.sources.length; i++) {
|
||||
assert.equal(actualMap.sources[i],
|
||||
expectedMap.sources[i],
|
||||
"sources[" + i + "] length mismatch: " +
|
||||
actualMap.sources.join(", ") + " != " + expectedMap.sources.join(", "));
|
||||
}
|
||||
assert.equal(actualMap.sourceRoot,
|
||||
expectedMap.sourceRoot,
|
||||
"sourceRoot mismatch: " +
|
||||
actualMap.sourceRoot + " != " + expectedMap.sourceRoot);
|
||||
assert.equal(actualMap.mappings, expectedMap.mappings,
|
||||
"mappings mismatch:\nActual: " + actualMap.mappings + "\nExpected: " + expectedMap.mappings);
|
||||
if (actualMap.sourcesContent) {
|
||||
assert.equal(actualMap.sourcesContent.length,
|
||||
expectedMap.sourcesContent.length,
|
||||
"sourcesContent length mismatch");
|
||||
for (var i = 0; i < actualMap.sourcesContent.length; i++) {
|
||||
assert.equal(actualMap.sourcesContent[i],
|
||||
expectedMap.sourcesContent[i],
|
||||
"sourcesContent[" + i + "] mismatch");
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.assertEqualMaps = assertEqualMaps;
|
||||
|
||||
});
|
||||
14
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/uglify-to-browserify/.npmignore
generated
vendored
Normal file
14
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/uglify-to-browserify/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
lib-cov
|
||||
*.seed
|
||||
*.log
|
||||
*.csv
|
||||
*.dat
|
||||
*.out
|
||||
*.pid
|
||||
*.gz
|
||||
pids
|
||||
logs
|
||||
results
|
||||
npm-debug.log
|
||||
node_modules
|
||||
/test/output.js
|
||||
3
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/uglify-to-browserify/.travis.yml
generated
vendored
Normal file
3
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/uglify-to-browserify/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- "0.10"
|
||||
19
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/uglify-to-browserify/LICENSE
generated
vendored
Normal file
19
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/uglify-to-browserify/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
Copyright (c) 2013 Forbes Lindesay
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
15
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/uglify-to-browserify/README.md
generated
vendored
Normal file
15
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/uglify-to-browserify/README.md
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
# uglify-to-browserify
|
||||
|
||||
A transform to make UglifyJS work in browserify.
|
||||
|
||||
[](https://travis-ci.org/ForbesLindesay/uglify-to-browserify)
|
||||
[](https://gemnasium.com/ForbesLindesay/uglify-to-browserify)
|
||||
[](http://badge.fury.io/js/uglify-to-browserify)
|
||||
|
||||
## Installation
|
||||
|
||||
npm install uglify-to-browserify
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
49
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/uglify-to-browserify/index.js
generated
vendored
Normal file
49
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/uglify-to-browserify/index.js
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
'use strict'
|
||||
|
||||
var fs = require('fs')
|
||||
var PassThrough = require('stream').PassThrough
|
||||
var Transform = require('stream').Transform
|
||||
|
||||
if (typeof Transform === 'undefined') {
|
||||
throw new Error('UglifyJS only supports browserify when using node >= 0.10.x')
|
||||
}
|
||||
|
||||
var cache = {}
|
||||
module.exports = transform
|
||||
function transform(file) {
|
||||
if (!/tools\/node\.js$/.test(file.replace(/\\/g,'/'))) return new PassThrough();
|
||||
if (cache[file]) return makeStream(cache[file])
|
||||
var uglify = require(file)
|
||||
var src = 'var sys = require("util");\nvar MOZ_SourceMap = require("source-map");\nvar UglifyJS = exports;\n' + uglify.FILES.map(function (path) { return fs.readFileSync(path, 'utf8') }).join('\n')
|
||||
|
||||
var ast = uglify.parse(src)
|
||||
ast.figure_out_scope()
|
||||
|
||||
var variables = ast.variables
|
||||
.map(function (node, name) {
|
||||
return name
|
||||
})
|
||||
|
||||
src += '\n\n' + variables.map(function (v) { return 'exports.' + v + ' = ' + v + ';' }).join('\n') + '\n\n'
|
||||
|
||||
src += 'exports.AST_Node.warn_function = function (txt) { if (typeof console != "undefined" && typeof console.warn === "function") console.warn(txt) }\n\n'
|
||||
|
||||
src += 'exports.minify = ' + uglify.minify.toString() + ';\n\n'
|
||||
src += 'exports.describe_ast = ' + uglify.describe_ast.toString() + ';'
|
||||
|
||||
// TODO: remove once https://github.com/substack/node-browserify/issues/631 is resolved
|
||||
src = src.replace(/"for"/g, '"fo" + "r"')
|
||||
|
||||
cache[file] = src
|
||||
return makeStream(src);
|
||||
}
|
||||
|
||||
function makeStream(src) {
|
||||
var res = new Transform();
|
||||
res._transform = function (chunk, encoding, callback) { callback() }
|
||||
res._flush = function (callback) {
|
||||
res.push(src)
|
||||
callback()
|
||||
}
|
||||
return res;
|
||||
}
|
||||
29
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/uglify-to-browserify/package.json
generated
vendored
Normal file
29
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/uglify-to-browserify/package.json
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"name": "uglify-to-browserify",
|
||||
"version": "1.0.2",
|
||||
"description": "A transform to make UglifyJS work in browserify.",
|
||||
"keywords": [],
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"uglify-js": "~2.4.0",
|
||||
"source-map": "~0.1.27"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "node test/index.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/ForbesLindesay/uglify-to-browserify.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "ForbesLindesay"
|
||||
},
|
||||
"license": "MIT",
|
||||
"readme": "# uglify-to-browserify\r\n\r\nA transform to make UglifyJS work in browserify.\r\n\r\n[](https://travis-ci.org/ForbesLindesay/uglify-to-browserify)\r\n[](https://gemnasium.com/ForbesLindesay/uglify-to-browserify)\r\n[](http://badge.fury.io/js/uglify-to-browserify)\r\n\r\n## Installation\r\n\r\n npm install uglify-to-browserify\r\n\r\n## License\r\n\r\n MIT",
|
||||
"readmeFilename": "README.md",
|
||||
"bugs": {
|
||||
"url": "https://github.com/ForbesLindesay/uglify-to-browserify/issues"
|
||||
},
|
||||
"_id": "uglify-to-browserify@1.0.2",
|
||||
"_from": "uglify-to-browserify@~1.0.0"
|
||||
}
|
||||
22
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/uglify-to-browserify/test/index.js
generated
vendored
Normal file
22
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/uglify-to-browserify/test/index.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
var fs = require('fs')
|
||||
var br = require('../')
|
||||
var test = fs.readFileSync(require.resolve('uglify-js/test/run-tests.js'), 'utf8')
|
||||
.replace(/^#.*\n/, '')
|
||||
|
||||
var transform = br(require.resolve('uglify-js'))
|
||||
transform.pipe(fs.createWriteStream(__dirname + '/output.js'))
|
||||
.on('close', function () {
|
||||
Function('module,require', test)({
|
||||
filename: require.resolve('uglify-js/test/run-tests.js')
|
||||
},
|
||||
function (name) {
|
||||
if (name === '../tools/node') {
|
||||
return require('./output.js')
|
||||
} else if (/^[a-z]+$/.test(name)) {
|
||||
return require(name)
|
||||
} else {
|
||||
throw new Error('I didn\'t expect you to require ' + name)
|
||||
}
|
||||
})
|
||||
})
|
||||
transform.end(fs.readFileSync(require.resolve('uglify-js'), 'utf8'))
|
||||
21
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/LICENSE
generated
vendored
Normal file
21
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
Copyright 2010 James Halliday (mail@substack.net)
|
||||
|
||||
This project is free software released under the MIT/X11 license:
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
901
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/README.md
generated
vendored
Normal file
901
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/README.md
generated
vendored
Normal file
@@ -0,0 +1,901 @@
|
||||
yargs
|
||||
========
|
||||
|
||||
Yargs be a node.js library fer hearties tryin' ter parse optstrings.
|
||||
|
||||
With yargs, ye be havin' a map that leads straight to yer treasure! Treasure of course, being a simple option hash.
|
||||
|
||||
[](https://travis-ci.org/bcoe/yargs)
|
||||
[](https://gemnasium.com/bcoe/yargs)
|
||||
[](https://coveralls.io/r/bcoe/yargs?branch=)
|
||||
[](https://www.npmjs.com/package/yargs)
|
||||
|
||||
> Yargs is the official successor to optimist. Please feel free to submit issues and pull requests. If you'd like to contribute and don't know where to start, have a look at [the issue list](https://github.com/bcoe/yargs/issues) :)
|
||||
|
||||
examples
|
||||
========
|
||||
|
||||
With yargs, the options be just a hash!
|
||||
-------------------------------------------------------------------
|
||||
|
||||
xup.js:
|
||||
|
||||
````javascript
|
||||
#!/usr/bin/env node
|
||||
var argv = require('yargs').argv;
|
||||
|
||||
if (argv.rif - 5 * argv.xup > 7.138) {
|
||||
console.log('Plunder more riffiwobbles!');
|
||||
}
|
||||
else {
|
||||
console.log('Drop the xupptumblers!');
|
||||
}
|
||||
````
|
||||
|
||||
***
|
||||
|
||||
$ ./xup.js --rif=55 --xup=9.52
|
||||
Plunder more riffiwobbles!
|
||||
|
||||
$ ./xup.js --rif 12 --xup 8.1
|
||||
Drop the xupptumblers!
|
||||
|
||||

|
||||
|
||||
But don't walk the plank just yet! There be more! You can do short options:
|
||||
-------------------------------------------------
|
||||
|
||||
short.js:
|
||||
|
||||
````javascript
|
||||
#!/usr/bin/env node
|
||||
var argv = require('yargs').argv;
|
||||
console.log('(%d,%d)', argv.x, argv.y);
|
||||
````
|
||||
|
||||
***
|
||||
|
||||
$ ./short.js -x 10 -y 21
|
||||
(10,21)
|
||||
|
||||
And booleans, both long, short, and even grouped:
|
||||
----------------------------------
|
||||
|
||||
bool.js:
|
||||
|
||||
````javascript
|
||||
#!/usr/bin/env node
|
||||
var util = require('util');
|
||||
var argv = require('yargs').argv;
|
||||
|
||||
if (argv.s) {
|
||||
util.print(argv.fr ? 'Le perroquet dit: ' : 'The parrot says: ');
|
||||
}
|
||||
console.log(
|
||||
(argv.fr ? 'couac' : 'squawk') + (argv.p ? '!' : '')
|
||||
);
|
||||
````
|
||||
|
||||
***
|
||||
|
||||
$ ./bool.js -s
|
||||
The parrot says: squawk
|
||||
|
||||
$ ./bool.js -sp
|
||||
The parrot says: squawk!
|
||||
|
||||
$ ./bool.js -sp --fr
|
||||
Le perroquet dit: couac!
|
||||
|
||||
And non-hyphenated options too! Just use `argv._`!
|
||||
-------------------------------------------------
|
||||
|
||||
nonopt.js:
|
||||
|
||||
````javascript
|
||||
#!/usr/bin/env node
|
||||
var argv = require('yargs').argv;
|
||||
console.log('(%d,%d)', argv.x, argv.y);
|
||||
console.log(argv._);
|
||||
````
|
||||
|
||||
***
|
||||
|
||||
$ ./nonopt.js -x 6.82 -y 3.35 rum
|
||||
(6.82,3.35)
|
||||
[ 'rum' ]
|
||||
|
||||
$ ./nonopt.js "me hearties" -x 0.54 yo -y 1.12 ho
|
||||
(0.54,1.12)
|
||||
[ 'me hearties', 'yo', 'ho' ]
|
||||
|
||||
Yargs even counts your booleans!
|
||||
----------------------------------------------------------------------
|
||||
|
||||
count.js
|
||||
|
||||
````javascript
|
||||
#!/usr/bin/env node
|
||||
var argv = require('yargs')
|
||||
.count('verbose')
|
||||
.alias('v', 'verbose')
|
||||
.argv;
|
||||
|
||||
VERBOSE_LEVEL = argv.verbose;
|
||||
|
||||
function WARN() { VERBOSE_LEVEL >= 0 && console.log.apply(console, arguments); }
|
||||
function INFO() { VERBOSE_LEVEL >= 1 && console.log.apply(console, arguments); }
|
||||
function DEBUG() { VERBOSE_LEVEL >= 2 && console.log.apply(console, arguments); }
|
||||
|
||||
WARN("Showing only important stuff");
|
||||
INFO("Showing semi-mportant stuff too");
|
||||
DEBUG("Extra chatty mode");
|
||||
````
|
||||
|
||||
***
|
||||
$ node count.js
|
||||
Showing only important stuff
|
||||
|
||||
$ node count.js -v
|
||||
Showing only important stuff
|
||||
Showing semi-important stuff too
|
||||
|
||||
$ node count.js -vv
|
||||
Showing only important stuff
|
||||
Showing semi-important stuff too
|
||||
Extra chatty mode
|
||||
|
||||
$ node count.js -v --verbose
|
||||
Showing only important stuff
|
||||
Showing semi-important stuff too
|
||||
Extra chatty mode
|
||||
|
||||
Tell users how to use yer options and make demands.
|
||||
-------------------------------------------------
|
||||
|
||||
divide.js:
|
||||
|
||||
````javascript
|
||||
#!/usr/bin/env node
|
||||
var argv = require('yargs')
|
||||
.usage('Usage: $0 -x [num] -y [num]')
|
||||
.demand(['x','y'])
|
||||
.argv;
|
||||
|
||||
console.log(argv.x / argv.y);
|
||||
````
|
||||
|
||||
***
|
||||
|
||||
$ ./divide.js -x 55 -y 11
|
||||
5
|
||||
|
||||
$ node ./divide.js -x 4.91 -z 2.51
|
||||
Usage: node ./divide.js -x [num] -y [num]
|
||||
|
||||
Options:
|
||||
-x [required]
|
||||
-y [required]
|
||||
|
||||
Missing required arguments: y
|
||||
|
||||
After yer demands have been met, demand more! Ask for non-hypenated arguments!
|
||||
-----------------------------------------
|
||||
|
||||
demand_count.js:
|
||||
|
||||
````javascript
|
||||
#!/usr/bin/env node
|
||||
var argv = require('yargs')
|
||||
.demand(2)
|
||||
.argv;
|
||||
console.dir(argv)
|
||||
````
|
||||
|
||||
***
|
||||
|
||||
$ ./demand_count.js a
|
||||
Not enough arguments, expected 2, but only found 1
|
||||
$ ./demand_count.js a b
|
||||
{ _: [ 'a', 'b' ], '$0': 'node ./demand_count.js' }
|
||||
$ ./demand_count.js a b c
|
||||
{ _: [ 'a', 'b', 'c' ], '$0': 'node ./demand_count.js' }
|
||||
|
||||
EVEN MORE SHIVER ME TIMBERS!
|
||||
------------------
|
||||
|
||||
default_singles.js:
|
||||
|
||||
````javascript
|
||||
#!/usr/bin/env node
|
||||
var argv = require('yargs')
|
||||
.default('x', 10)
|
||||
.default('y', 10)
|
||||
.argv
|
||||
;
|
||||
console.log(argv.x + argv.y);
|
||||
````
|
||||
|
||||
***
|
||||
|
||||
$ ./default_singles.js -x 5
|
||||
15
|
||||
|
||||
default_hash.js:
|
||||
|
||||
````javascript
|
||||
#!/usr/bin/env node
|
||||
var argv = require('yargs')
|
||||
.default({ x : 10, y : 10 })
|
||||
.argv
|
||||
;
|
||||
console.log(argv.x + argv.y);
|
||||
````
|
||||
|
||||
***
|
||||
|
||||
$ ./default_hash.js -y 7
|
||||
17
|
||||
|
||||
And if you really want to get all descriptive about it...
|
||||
---------------------------------------------------------
|
||||
|
||||
boolean_single.js
|
||||
|
||||
````javascript
|
||||
#!/usr/bin/env node
|
||||
var argv = require('yargs')
|
||||
.boolean('v')
|
||||
.argv
|
||||
;
|
||||
console.dir(argv.v);
|
||||
console.dir(argv._);
|
||||
````
|
||||
|
||||
***
|
||||
|
||||
$ ./boolean_single.js -v "me hearties" yo ho
|
||||
true
|
||||
[ 'me hearties', 'yo', 'ho' ]
|
||||
|
||||
|
||||
boolean_double.js
|
||||
|
||||
````javascript
|
||||
#!/usr/bin/env node
|
||||
var argv = require('yargs')
|
||||
.boolean(['x','y','z'])
|
||||
.argv
|
||||
;
|
||||
console.dir([ argv.x, argv.y, argv.z ]);
|
||||
console.dir(argv._);
|
||||
````
|
||||
|
||||
***
|
||||
|
||||
$ ./boolean_double.js -x -z one two three
|
||||
[ true, false, true ]
|
||||
[ 'one', 'two', 'three' ]
|
||||
|
||||
Yargs is here to help you...
|
||||
---------------------------
|
||||
|
||||
Ye can describe parameters fer help messages and set aliases. Yargs figures
|
||||
out how ter format a handy help string automatically.
|
||||
|
||||
line_count.js
|
||||
|
||||
````javascript
|
||||
#!/usr/bin/env node
|
||||
var argv = require('yargs')
|
||||
.usage('Usage: $0 <command> [options]')
|
||||
.command('count', 'Count the lines in a file')
|
||||
.demand(1)
|
||||
.example('$0 count -f foo.js', 'count the lines in the given file')
|
||||
.demand('f')
|
||||
.alias('f', 'file')
|
||||
.nargs('f', 1)
|
||||
.describe('f', 'Load a file')
|
||||
.help('h')
|
||||
.alias('h', 'help')
|
||||
.epilog('copyright 2015')
|
||||
.argv;
|
||||
|
||||
var fs = require('fs');
|
||||
var s = fs.createReadStream(argv.file);
|
||||
|
||||
var lines = 0;
|
||||
s.on('data', function (buf) {
|
||||
lines += buf.toString().match(/\n/g).length;
|
||||
});
|
||||
|
||||
s.on('end', function () {
|
||||
console.log(lines);
|
||||
});
|
||||
````
|
||||
|
||||
***
|
||||
$ node line_count.js count
|
||||
Usage: node test.js <command> [options]
|
||||
|
||||
Commands:
|
||||
count Count the lines in a file
|
||||
|
||||
Options:
|
||||
-f, --file Load a file [required]
|
||||
-h, --help Show help
|
||||
|
||||
Examples:
|
||||
node test.js count -f foo.js count the lines in the given file
|
||||
|
||||
copyright 2015
|
||||
|
||||
Missing required arguments: f
|
||||
|
||||
$ node line_count.js count --file line_count.js
|
||||
20
|
||||
|
||||
$ node line_count.js count -f line_count.js
|
||||
20
|
||||
|
||||
methods
|
||||
=======
|
||||
|
||||
By itself,
|
||||
|
||||
````javascript
|
||||
require('yargs').argv
|
||||
`````
|
||||
|
||||
will use `process.argv` array to construct the `argv` object.
|
||||
|
||||
You can pass in the `process.argv` yourself:
|
||||
|
||||
````javascript
|
||||
require('yargs')([ '-x', '1', '-y', '2' ]).argv
|
||||
````
|
||||
|
||||
or use .parse() to do the same thing:
|
||||
|
||||
````javascript
|
||||
require('yargs').parse([ '-x', '1', '-y', '2' ])
|
||||
````
|
||||
|
||||
The rest of these methods below come in just before the terminating `.argv`.
|
||||
|
||||
.alias(key, alias)
|
||||
------------------
|
||||
|
||||
Set key names as equivalent such that updates to a key will propagate to aliases
|
||||
and vice-versa.
|
||||
|
||||
Optionally `.alias()` can take an object that maps keys to aliases.
|
||||
Each key of this object should be the canonical version of the option, and each
|
||||
value should be a string or an array of strings.
|
||||
|
||||
.default(key, value, [description])
|
||||
--------------------
|
||||
|
||||
Set `argv[key]` to `value` if no option was specified on `process.argv`.
|
||||
|
||||
Optionally `.default()` can take an object that maps keys to default values.
|
||||
|
||||
But wait, there's more! the default value can be a `function` which returns
|
||||
a value. The name of the function will be used in the usage string:
|
||||
|
||||
```js
|
||||
var argv = require('yargs')
|
||||
.default('random', function randomValue() {
|
||||
return Math.random() * 256;
|
||||
}).argv;
|
||||
```
|
||||
|
||||
Optionally, `description` can also be provided and will take precedence over
|
||||
displaying the value in the usage instructions:
|
||||
|
||||
```js
|
||||
.default('timeout', 60000, '(one-minute)');
|
||||
```
|
||||
|
||||
.demand(key, [msg | boolean])
|
||||
-----------------------------
|
||||
.require(key, [msg | boolean])
|
||||
------------------------------
|
||||
.required(key, [msg | boolean])
|
||||
-------------------------------
|
||||
|
||||
If `key` is a string, show the usage information and exit if `key` wasn't
|
||||
specified in `process.argv`.
|
||||
|
||||
If `key` is a number, demand at least as many non-option arguments, which show
|
||||
up in `argv._`.
|
||||
|
||||
If `key` is an Array, demand each element.
|
||||
|
||||
If a `msg` string is given, it will be printed when the argument is missing,
|
||||
instead of the standard error message. This is especially helpful for the non-option arguments in `argv._`.
|
||||
|
||||
If a `boolean` value is given, it controls whether the option is demanded;
|
||||
this is useful when using `.options()` to specify command line parameters.
|
||||
|
||||
.requiresArg(key)
|
||||
-----------------
|
||||
|
||||
Specifies either a single option key (string), or an array of options that
|
||||
must be followed by option values. If any option value is missing, show the
|
||||
usage information and exit.
|
||||
|
||||
The default behaviour is to set the value of any key not followed by an
|
||||
option value to `true`.
|
||||
|
||||
.implies(x, y)
|
||||
--------------
|
||||
|
||||
Given the key `x` is set, it is required that the key `y` is set.
|
||||
|
||||
implies can also accept an object specifying multiple implications.
|
||||
|
||||
.describe(key, desc)
|
||||
--------------------
|
||||
|
||||
Describe a `key` for the generated usage information.
|
||||
|
||||
Optionally `.describe()` can take an object that maps keys to descriptions.
|
||||
|
||||
.option(key, opt)
|
||||
-----------------
|
||||
.options(key, opt)
|
||||
------------------
|
||||
|
||||
Instead of chaining together `.alias().demand().default().describe().string()`, you can specify
|
||||
keys in `opt` for each of the chainable methods.
|
||||
|
||||
For example:
|
||||
|
||||
````javascript
|
||||
var argv = require('yargs')
|
||||
.option('f', {
|
||||
alias : 'file',
|
||||
demand: true,
|
||||
default: '/etc/passwd',
|
||||
describe: 'x marks the spot',
|
||||
type: 'string'
|
||||
})
|
||||
.argv
|
||||
;
|
||||
````
|
||||
|
||||
is the same as
|
||||
|
||||
````javascript
|
||||
var argv = require('yargs')
|
||||
.alias('f', 'file')
|
||||
.default('f', '/etc/passwd')
|
||||
.argv
|
||||
;
|
||||
````
|
||||
|
||||
Optionally `.options()` can take an object that maps keys to `opt` parameters.
|
||||
|
||||
````javascript
|
||||
var argv = require('yargs')
|
||||
.options({
|
||||
'f': {
|
||||
alias: 'file',
|
||||
demand: true,
|
||||
default: '/etc/passwd',
|
||||
describe: 'x marks the spot',
|
||||
type: 'string'
|
||||
}
|
||||
})
|
||||
.argv
|
||||
;
|
||||
````
|
||||
|
||||
.usage(message, opts)
|
||||
---------------------
|
||||
|
||||
Set a usage message to show which commands to use. Inside `message`, the string
|
||||
`$0` will get interpolated to the current script name or node command for the
|
||||
present script similar to how `$0` works in bash or perl.
|
||||
|
||||
`opts` is optional and acts like calling `.options(opts)`.
|
||||
|
||||
.command(cmd, desc)
|
||||
-------------------
|
||||
|
||||
Document the commands exposed by your application (stored in the `_` variable).
|
||||
|
||||
As an example, here's how the npm cli might document some of its commands:
|
||||
|
||||
```js
|
||||
var argv = require('yargs')
|
||||
.usage('npm <command>')
|
||||
.command('install', 'tis a mighty fine package to install')
|
||||
.command('publish', 'shiver me timbers, should you be sharing all that')
|
||||
.argv;
|
||||
```
|
||||
|
||||
.example(cmd, desc)
|
||||
-------------------
|
||||
|
||||
Give some example invocations of your program. Inside `cmd`, the string
|
||||
`$0` will get interpolated to the current script name or node command for the
|
||||
present script similar to how `$0` works in bash or perl.
|
||||
Examples will be printed out as part of the help message.
|
||||
|
||||
|
||||
.epilogue(str)
|
||||
--------------
|
||||
.epilog(str)
|
||||
------------
|
||||
|
||||
A message to print at the end of the usage instructions, e.g.,
|
||||
|
||||
```js
|
||||
var argv = require('yargs')
|
||||
.epilogue('for more information, find our manual at http://example.com');
|
||||
```
|
||||
|
||||
.check(fn)
|
||||
----------
|
||||
|
||||
Check that certain conditions are met in the provided arguments.
|
||||
|
||||
`fn` is called with two arguments, the parsed `argv` hash and an array of options and their aliases.
|
||||
|
||||
If `fn` throws or returns a non-truthy value, show the thrown error, usage information, and
|
||||
exit.
|
||||
|
||||
.fail(fn)
|
||||
---------
|
||||
|
||||
Method to execute when a failure occurs, rather then printing the failure message.
|
||||
|
||||
`fn` is called with the failure message that would have been printed.
|
||||
|
||||
.boolean(key)
|
||||
-------------
|
||||
|
||||
Interpret `key` as a boolean. If a non-flag option follows `key` in
|
||||
`process.argv`, that string won't get set as the value of `key`.
|
||||
|
||||
`key` will default to `false`, unless an `default(key, undefined)` is
|
||||
explicitly set.
|
||||
|
||||
If `key` is an Array, interpret all the elements as booleans.
|
||||
|
||||
.string(key)
|
||||
------------
|
||||
|
||||
Tell the parser logic not to interpret `key` as a number or boolean.
|
||||
This can be useful if you need to preserve leading zeros in an input.
|
||||
|
||||
If `key` is an Array, interpret all the elements as strings.
|
||||
|
||||
`.string('_')` will result in non-hyphenated arguments being interpreted as strings,
|
||||
regardless of whether they resemble numbers.
|
||||
|
||||
.array(key)
|
||||
----------
|
||||
|
||||
Tell the parser to interpret `key` as an array. If `.array('foo')` is set,
|
||||
`--foo bar` will be parsed as `['bar']` rather than as `'bar'`.
|
||||
|
||||
.nargs(key, count)
|
||||
-----------
|
||||
|
||||
The number of arguments that should be consumed after a key. This can be a
|
||||
useful hint to prevent parsing ambiguity:
|
||||
|
||||
```js
|
||||
var argv = require('yargs')
|
||||
.nargs('token', 1)
|
||||
.parse(['--token', '-my-token']);
|
||||
```
|
||||
|
||||
parses as:
|
||||
|
||||
`{ _: [], token: '-my-token', '$0': 'node test' }`
|
||||
|
||||
Optionally `.nargs()` can take an object of `key`/`narg` pairs.
|
||||
|
||||
.config(key)
|
||||
------------
|
||||
|
||||
Tells the parser to interpret `key` as a path to a JSON config file. The file
|
||||
is loaded and parsed, and its properties are set as arguments.
|
||||
|
||||
.wrap(columns)
|
||||
--------------
|
||||
|
||||
Format usage output to wrap at `columns` many columns.
|
||||
|
||||
By default wrap will be set to `Math.min(80, windowWidth)`. Use `.wrap(null)` to
|
||||
specify no column limit.
|
||||
|
||||
.strict()
|
||||
---------
|
||||
|
||||
Any command-line argument given that is not demanded, or does not have a
|
||||
corresponding description, will be reported as an error.
|
||||
|
||||
.help([option, [description]])
|
||||
------------------------------
|
||||
|
||||
Add an option (e.g., `--help`) that displays the usage string and exits the
|
||||
process. If present, the `description` parameter customises the description of
|
||||
the help option in the usage string.
|
||||
|
||||
If invoked without parameters, `.help` returns the generated usage string.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
var yargs = require("yargs")
|
||||
.usage("$0 -operand1 number -operand2 number -operation [add|subtract]");
|
||||
console.log(yargs.help());
|
||||
```
|
||||
|
||||
Later on, ```argv``` can be retrived with ```yargs.argv```
|
||||
|
||||
.version(version, [option], [description])
|
||||
----------------------------------------
|
||||
|
||||
Add an option (e.g., `--version`) that displays the version number (given by the
|
||||
`version` parameter) and exits the process. If present, the `description`
|
||||
parameter customizes the description of the version option in the usage string.
|
||||
|
||||
You can provide a `function` for version, rather than a string.
|
||||
This is useful if you want to use the version from your package.json:
|
||||
|
||||
```js
|
||||
var argv = require('yargs')
|
||||
.version(function() {
|
||||
return require('../package').version;
|
||||
})
|
||||
.argv;
|
||||
```
|
||||
|
||||
.showHelpOnFail(enable, [message])
|
||||
----------------------------------
|
||||
|
||||
By default, yargs outputs a usage string if any error is detected. Use the
|
||||
`.showHelpOnFail` method to customize this behaviour. if `enable` is `false`,
|
||||
the usage string is not output. If the `message` parameter is present, this
|
||||
message is output after the error message.
|
||||
|
||||
line_count.js
|
||||
|
||||
````javascript
|
||||
#!/usr/bin/env node
|
||||
var argv = require('yargs')
|
||||
.usage('Count the lines in a file.\nUsage: $0')
|
||||
.demand('f')
|
||||
.alias('f', 'file')
|
||||
.describe('f', 'Load a file')
|
||||
.showHelpOnFail(false, "Specify --help for available options")
|
||||
.argv;
|
||||
|
||||
// etc.
|
||||
````
|
||||
|
||||
***
|
||||
|
||||
$ node line_count.js --file
|
||||
Missing argument value: f
|
||||
|
||||
Specify --help for available options
|
||||
|
||||
.showHelp(fn=console.error)
|
||||
---------------------------
|
||||
|
||||
Print the usage data using `fn` for printing.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
var yargs = require("yargs")
|
||||
.usage("$0 -operand1 number -operand2 number -operation [add|subtract]");
|
||||
yargs.showHelp();
|
||||
```
|
||||
|
||||
Later on, ```argv``` can be retrived with ```yargs.argv```
|
||||
|
||||
.completion(cmd, [description], [fn]);
|
||||
-------------
|
||||
|
||||
Enable bash-completion shortcuts for commands and options.
|
||||
|
||||
`cmd`: when present in `argv._`, will result in the `.bashrc` completion script
|
||||
being outputted. To enable bash completions, concat the generated script to your
|
||||
`.bashrc`, or `.bash_profile`.
|
||||
|
||||
`description`: provide a description in your usage instructions for the command
|
||||
that generates bash completion scripts.
|
||||
|
||||
`fn`, rather than relying on yargs' default completion functionlity, which
|
||||
shiver me timbers is pretty awesome, you can provide your own completion
|
||||
method.
|
||||
|
||||
```js
|
||||
var argv = require('yargs')
|
||||
.completion('completion', function(current, argv) {
|
||||
// 'current' is the current command being completed.
|
||||
// 'argv' is the parsed arguments so far.
|
||||
// simply return an array of completions.
|
||||
return [
|
||||
'foo',
|
||||
'bar'
|
||||
];
|
||||
})
|
||||
.argv;
|
||||
```
|
||||
|
||||
But wait, there's more! you can provide asynchronous completions.
|
||||
|
||||
```js
|
||||
var argv = require('yargs')
|
||||
.completion('completion', function(current, argv, done) {
|
||||
setTimeout(function() {
|
||||
done([
|
||||
'apple',
|
||||
'banana'
|
||||
]);
|
||||
}, 500);
|
||||
})
|
||||
.argv;
|
||||
```
|
||||
|
||||
.showCompletionScript()
|
||||
----------------------
|
||||
|
||||
Generate a bash completion script. Users of your application can install this
|
||||
script in their `.bashrc`, and yargs will provide completion shortcuts for
|
||||
commands and options.
|
||||
|
||||
.exitProcess(enable)
|
||||
----------------------------------
|
||||
|
||||
By default, yargs exits the process when the user passes a help flag, uses the `.version` functionality or when validation fails. Calling `.exitProcess(false)` disables this behavior, enabling further actions after yargs have been validated.
|
||||
|
||||
.parse(args)
|
||||
------------
|
||||
|
||||
Parse `args` instead of `process.argv`. Returns the `argv` object.
|
||||
|
||||
.reset()
|
||||
--------
|
||||
|
||||
Reset the argument object built up so far. This is useful for
|
||||
creating nested command line interfaces.
|
||||
|
||||
```js
|
||||
var yargs = require('./yargs')
|
||||
.usage('$0 command')
|
||||
.command('hello', 'hello command')
|
||||
.command('world', 'world command')
|
||||
.demand(1, 'must provide a valid command'),
|
||||
argv = yargs.argv,
|
||||
command = argv._[0];
|
||||
|
||||
if (command === 'hello') {
|
||||
yargs.reset()
|
||||
.usage('$0 hello')
|
||||
.help('h')
|
||||
.example('$0 hello', 'print the hello message!')
|
||||
.argv
|
||||
|
||||
console.log('hello!');
|
||||
} else if (command === 'world'){
|
||||
yargs.reset()
|
||||
.usage('$0 world')
|
||||
.help('h')
|
||||
.example('$0 world', 'print the world message!')
|
||||
.argv
|
||||
|
||||
console.log('world!');
|
||||
} else {
|
||||
yargs.showHelp();
|
||||
}
|
||||
```
|
||||
|
||||
.argv
|
||||
-----
|
||||
|
||||
Get the arguments as a plain old object.
|
||||
|
||||
Arguments without a corresponding flag show up in the `argv._` array.
|
||||
|
||||
The script name or node command is available at `argv.$0` similarly to how `$0`
|
||||
works in bash or perl.
|
||||
|
||||
parsing tricks
|
||||
==============
|
||||
|
||||
stop parsing
|
||||
------------
|
||||
|
||||
Use `--` to stop parsing flags and stuff the remainder into `argv._`.
|
||||
|
||||
$ node examples/reflect.js -a 1 -b 2 -- -c 3 -d 4
|
||||
{ _: [ '-c', '3', '-d', '4' ],
|
||||
'$0': 'node ./examples/reflect.js',
|
||||
a: 1,
|
||||
b: 2 }
|
||||
|
||||
negate fields
|
||||
-------------
|
||||
|
||||
If you want to explicity set a field to false instead of just leaving it
|
||||
undefined or to override a default you can do `--no-key`.
|
||||
|
||||
$ node examples/reflect.js -a --no-b
|
||||
{ _: [],
|
||||
'$0': 'node ./examples/reflect.js',
|
||||
a: true,
|
||||
b: false }
|
||||
|
||||
numbers
|
||||
-------
|
||||
|
||||
Every argument that looks like a number (`!isNaN(Number(arg))`) is converted to
|
||||
one. This way you can just `net.createConnection(argv.port)` and you can add
|
||||
numbers out of `argv` with `+` without having that mean concatenation,
|
||||
which is super frustrating.
|
||||
|
||||
duplicates
|
||||
----------
|
||||
|
||||
If you specify a flag multiple times it will get turned into an array containing
|
||||
all the values in order.
|
||||
|
||||
$ node examples/reflect.js -x 5 -x 8 -x 0
|
||||
{ _: [],
|
||||
'$0': 'node ./examples/reflect.js',
|
||||
x: [ 5, 8, 0 ] }
|
||||
|
||||
dot notation
|
||||
------------
|
||||
|
||||
When you use dots (`.`s) in argument names, an implicit object path is assumed.
|
||||
This lets you organize arguments into nested objects.
|
||||
|
||||
$ node examples/reflect.js --foo.bar.baz=33 --foo.quux=5
|
||||
{ _: [],
|
||||
'$0': 'node ./examples/reflect.js',
|
||||
foo: { bar: { baz: 33 }, quux: 5 } }
|
||||
|
||||
short numbers
|
||||
-------------
|
||||
|
||||
Short numeric `head -n5` style argument work too:
|
||||
|
||||
$ node reflect.js -n123 -m456
|
||||
{ '3': true,
|
||||
'6': true,
|
||||
_: [],
|
||||
'$0': 'node ./reflect.js',
|
||||
n: 123,
|
||||
m: 456 }
|
||||
|
||||
installation
|
||||
============
|
||||
|
||||
With [npm](http://github.com/isaacs/npm), just do:
|
||||
|
||||
npm install yargs
|
||||
|
||||
or clone this project on github:
|
||||
|
||||
git clone http://github.com/bcoe/yargs.git
|
||||
|
||||
To run the tests with npm, just do:
|
||||
|
||||
npm test
|
||||
|
||||
inspired by
|
||||
===========
|
||||
|
||||
This module is loosely inspired by Perl's
|
||||
[Getopt::Casual](http://search.cpan.org/~photo/Getopt-Casual-0.13.1/Casual.pm).
|
||||
22
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/completion.sh.hbs
generated
vendored
Normal file
22
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/completion.sh.hbs
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
###-begin-{{app_name}}-completions-###
|
||||
#
|
||||
# yargs command completion script
|
||||
#
|
||||
# Installation: {{app_path}} completion >> ~/.bashrc
|
||||
# or {{app_path}} completion >> ~/.bash_profile on OSX.
|
||||
#
|
||||
_yargs_completions()
|
||||
{
|
||||
local cur_word args type_list
|
||||
|
||||
cur_word="${COMP_WORDS[COMP_CWORD]}"
|
||||
args=$(printf "%s " "${COMP_WORDS[@]}")
|
||||
|
||||
# ask yargs to generate completions.
|
||||
type_list=`{{app_path}} --get-yargs-completions $args`
|
||||
|
||||
COMPREPLY=( $(compgen -W "${type_list}" -- ${cur_word}) )
|
||||
return 0
|
||||
}
|
||||
complete -F _yargs_completions {{app_name}}
|
||||
###-end-{{app_name}}-completions-###
|
||||
489
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/index.js
generated
vendored
Normal file
489
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/index.js
generated
vendored
Normal file
@@ -0,0 +1,489 @@
|
||||
var assert = require('assert'),
|
||||
path = require('path'),
|
||||
Completion = require('./lib/completion'),
|
||||
Parser = require('./lib/parser'),
|
||||
Usage = require('./lib/usage'),
|
||||
Validation = require('./lib/validation');
|
||||
|
||||
Argv(process.argv.slice(2));
|
||||
|
||||
var exports = module.exports = Argv;
|
||||
function Argv (processArgs, cwd) {
|
||||
processArgs = processArgs || []; // handle calling yargs().
|
||||
|
||||
var self = {};
|
||||
var completion = null;
|
||||
var usage = null;
|
||||
var validation = null;
|
||||
|
||||
if (!cwd) cwd = process.cwd();
|
||||
|
||||
self.$0 = process.argv
|
||||
.slice(0,2)
|
||||
.map(function (x) {
|
||||
// ignore the node bin, specify this in your
|
||||
// bin file with #!/usr/bin/env node
|
||||
if (~x.indexOf('node')) return;
|
||||
var b = rebase(cwd, x);
|
||||
return x.match(/^\//) && b.length < x.length
|
||||
? b : x
|
||||
})
|
||||
.join(' ').trim();
|
||||
;
|
||||
|
||||
if (process.env._ != undefined && process.argv[1] == process.env._) {
|
||||
self.$0 = process.env._.replace(
|
||||
path.dirname(process.execPath) + '/', ''
|
||||
);
|
||||
}
|
||||
|
||||
var options;
|
||||
self.resetOptions = self.reset = function () {
|
||||
// put yargs back into its initial
|
||||
// state, this is useful for creating a
|
||||
// nested CLI.
|
||||
options = {
|
||||
array: [],
|
||||
boolean: [],
|
||||
string: [],
|
||||
narg: {},
|
||||
key: {},
|
||||
alias: {},
|
||||
default: {},
|
||||
defaultDescription: {},
|
||||
requiresArg: [],
|
||||
count: [],
|
||||
normalize: [],
|
||||
config: []
|
||||
};
|
||||
|
||||
usage = Usage(self); // handle usage output.
|
||||
validation = Validation(self, usage); // handle arg validation.
|
||||
completion = Completion(self, usage);
|
||||
|
||||
demanded = {};
|
||||
|
||||
exitProcess = true;
|
||||
strict = false;
|
||||
helpOpt = null;
|
||||
versionOpt = null;
|
||||
completionOpt = null;
|
||||
|
||||
return self;
|
||||
};
|
||||
self.resetOptions();
|
||||
|
||||
self.boolean = function (bools) {
|
||||
options.boolean.push.apply(options.boolean, [].concat(bools));
|
||||
return self;
|
||||
};
|
||||
|
||||
self.array = function (arrays) {
|
||||
options.array.push.apply(options.array, [].concat(arrays));
|
||||
return self;
|
||||
}
|
||||
|
||||
self.nargs = function (key, n) {
|
||||
if (typeof key === 'object') {
|
||||
Object.keys(key).forEach(function(k) {
|
||||
self.nargs(k, key[k]);
|
||||
});
|
||||
} else {
|
||||
options.narg[key] = n;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
self.normalize = function (strings) {
|
||||
options.normalize.push.apply(options.normalize, [].concat(strings));
|
||||
return self;
|
||||
};
|
||||
|
||||
self.config = function (configs) {
|
||||
options.config.push.apply(options.config, [].concat(configs));
|
||||
return self;
|
||||
};
|
||||
|
||||
self.example = function (cmd, description) {
|
||||
usage.example(cmd, description);
|
||||
return self;
|
||||
};
|
||||
|
||||
self.command = function (cmd, description) {
|
||||
usage.command(cmd, description);
|
||||
return self;
|
||||
};
|
||||
|
||||
self.string = function (strings) {
|
||||
options.string.push.apply(options.string, [].concat(strings));
|
||||
return self;
|
||||
};
|
||||
|
||||
self.default = function (key, value, defaultDescription) {
|
||||
if (typeof key === 'object') {
|
||||
Object.keys(key).forEach(function (k) {
|
||||
self.default(k, key[k]);
|
||||
});
|
||||
}
|
||||
else {
|
||||
options.defaultDescription[key] = defaultDescription;
|
||||
options.default[key] = value;
|
||||
}
|
||||
return self;
|
||||
};
|
||||
|
||||
self.alias = function (x, y) {
|
||||
if (typeof x === 'object') {
|
||||
Object.keys(x).forEach(function (key) {
|
||||
self.alias(key, x[key]);
|
||||
});
|
||||
}
|
||||
else {
|
||||
options.alias[x] = (options.alias[x] || []).concat(y);
|
||||
}
|
||||
return self;
|
||||
};
|
||||
|
||||
self.count = function(counts) {
|
||||
options.count.push.apply(options.count, [].concat(counts));
|
||||
return self;
|
||||
};
|
||||
|
||||
var demanded = {};
|
||||
self.demand = self.required = self.require = function (keys, msg) {
|
||||
if (typeof keys == 'number') {
|
||||
if (!demanded._) demanded._ = { count: 0, msg: null };
|
||||
demanded._.count += keys;
|
||||
demanded._.msg = msg;
|
||||
}
|
||||
else if (Array.isArray(keys)) {
|
||||
keys.forEach(function (key) {
|
||||
self.demand(key, msg);
|
||||
});
|
||||
}
|
||||
else {
|
||||
if (typeof msg === 'string') {
|
||||
demanded[keys] = { msg: msg };
|
||||
}
|
||||
else if (msg === true || typeof msg === 'undefined') {
|
||||
demanded[keys] = { msg: null };
|
||||
}
|
||||
}
|
||||
|
||||
return self;
|
||||
};
|
||||
self.getDemanded = function() {
|
||||
return demanded;
|
||||
};
|
||||
|
||||
self.requiresArg = function (requiresArgs) {
|
||||
options.requiresArg.push.apply(options.requiresArg, [].concat(requiresArgs));
|
||||
return self;
|
||||
};
|
||||
|
||||
self.implies = function (key, value) {
|
||||
validation.implies(key, value);
|
||||
return self;
|
||||
};
|
||||
|
||||
self.usage = function (msg, opts) {
|
||||
if (!opts && typeof msg === 'object') {
|
||||
opts = msg;
|
||||
msg = null;
|
||||
}
|
||||
|
||||
usage.usage(msg);
|
||||
|
||||
if (opts) self.options(opts);
|
||||
|
||||
return self;
|
||||
};
|
||||
|
||||
self.epilogue = self.epilog = function (msg) {
|
||||
usage.epilog(msg);
|
||||
return self;
|
||||
};
|
||||
|
||||
self.fail = function (f) {
|
||||
usage.failFn(f);
|
||||
return self;
|
||||
};
|
||||
|
||||
self.check = function (f) {
|
||||
validation.check(f);
|
||||
return self;
|
||||
};
|
||||
|
||||
self.defaults = self.default;
|
||||
|
||||
self.describe = function (key, desc) {
|
||||
usage.describe(key, desc);
|
||||
return self;
|
||||
};
|
||||
|
||||
self.parse = function (args) {
|
||||
return parseArgs(args);
|
||||
};
|
||||
|
||||
self.option = self.options = function (key, opt) {
|
||||
if (typeof key === 'object') {
|
||||
Object.keys(key).forEach(function (k) {
|
||||
self.options(k, key[k]);
|
||||
});
|
||||
}
|
||||
else {
|
||||
assert(typeof opt === 'object', 'second argument to option must be an object');
|
||||
|
||||
options.key[key] = true; // track manually set keys.
|
||||
|
||||
if (opt.alias) self.alias(key, opt.alias);
|
||||
|
||||
var demand = opt.demand || opt.required || opt.require;
|
||||
|
||||
if (demand) {
|
||||
self.demand(key, demand);
|
||||
}
|
||||
if ('default' in opt) {
|
||||
self.default(key, opt.default);
|
||||
}
|
||||
if ('nargs' in opt) {
|
||||
self.nargs(key, opt.nargs);
|
||||
}
|
||||
if (opt.boolean || opt.type === 'boolean') {
|
||||
self.boolean(key);
|
||||
if (opt.alias) self.boolean(opt.alias);
|
||||
}
|
||||
if (opt.array || opt.type === 'array') {
|
||||
self.array(key);
|
||||
if (opt.alias) self.array(opt.alias);
|
||||
}
|
||||
if (opt.string || opt.type === 'string') {
|
||||
self.string(key);
|
||||
if (opt.alias) self.string(opt.alias);
|
||||
}
|
||||
if (opt.count || opt.type === 'count') {
|
||||
self.count(key);
|
||||
}
|
||||
|
||||
var desc = opt.describe || opt.description || opt.desc;
|
||||
if (desc) {
|
||||
self.describe(key, desc);
|
||||
}
|
||||
|
||||
if (opt.requiresArg) {
|
||||
self.requiresArg(key);
|
||||
}
|
||||
}
|
||||
|
||||
return self;
|
||||
};
|
||||
self.getOptions = function() {
|
||||
return options;
|
||||
};
|
||||
|
||||
self.wrap = function (cols) {
|
||||
usage.wrap(cols);
|
||||
return self;
|
||||
};
|
||||
|
||||
var strict = false;
|
||||
self.strict = function () {
|
||||
strict = true;
|
||||
return self;
|
||||
};
|
||||
self.getStrict = function () {
|
||||
return strict;
|
||||
}
|
||||
|
||||
self.showHelp = function (fn) {
|
||||
usage.showHelp(fn);
|
||||
return self;
|
||||
};
|
||||
|
||||
var versionOpt = null;
|
||||
self.version = function (ver, opt, msg) {
|
||||
versionOpt = opt || 'version';
|
||||
usage.version(ver);
|
||||
self.describe(versionOpt, msg || 'Show version number');
|
||||
return self;
|
||||
};
|
||||
|
||||
var helpOpt = null;
|
||||
self.addHelpOpt = function (opt, msg) {
|
||||
helpOpt = opt;
|
||||
self.describe(opt, msg || 'Show help');
|
||||
return self;
|
||||
};
|
||||
|
||||
self.showHelpOnFail = function (enabled, message) {
|
||||
usage.showHelpOnFail(enabled, message);
|
||||
return self;
|
||||
};
|
||||
|
||||
var exitProcess = true;
|
||||
self.exitProcess = function (enabled) {
|
||||
if (typeof enabled !== 'boolean') {
|
||||
enabled = true;
|
||||
}
|
||||
exitProcess = enabled;
|
||||
return self;
|
||||
};
|
||||
self.getExitProcess = function () {
|
||||
return exitProcess;
|
||||
}
|
||||
|
||||
self.help = function () {
|
||||
if (arguments.length > 0) return self.addHelpOpt.apply(self, arguments);
|
||||
|
||||
if (!self.parsed) parseArgs(processArgs); // run parser, if it has not already been executed.
|
||||
|
||||
return usage.help();
|
||||
};
|
||||
|
||||
var completionOpt = null,
|
||||
completionCommand = null;
|
||||
self.completion = function(cmd, desc, fn) {
|
||||
// a function to execute when generating
|
||||
// completions can be provided as the second
|
||||
// or third argument to completion.
|
||||
if (typeof desc === 'function') {
|
||||
fn = desc;
|
||||
desc = null;
|
||||
}
|
||||
|
||||
// register the completion command.
|
||||
completionCommand = cmd;
|
||||
completionOpt = completion.completionKey;
|
||||
self.command(completionCommand, desc || 'generate bash completion script');
|
||||
|
||||
// a function can be provided
|
||||
if (fn) completion.registerFunction(fn);
|
||||
|
||||
return self;
|
||||
};
|
||||
|
||||
self.showCompletionScript = function($0) {
|
||||
$0 = $0 || self.$0;
|
||||
console.log(completion.generateCompletionScript($0));
|
||||
return self;
|
||||
};
|
||||
|
||||
self.getUsageInstance = function () {
|
||||
return usage;
|
||||
};
|
||||
|
||||
self.getValidationInstance = function () {
|
||||
return validation;
|
||||
}
|
||||
|
||||
Object.defineProperty(self, 'argv', {
|
||||
get : function () {
|
||||
var args = null;
|
||||
|
||||
try {
|
||||
args = parseArgs(processArgs);
|
||||
} catch (err) {
|
||||
usage.fail(err.message);
|
||||
}
|
||||
|
||||
return args;
|
||||
},
|
||||
enumerable : true
|
||||
});
|
||||
|
||||
function parseArgs (args) {
|
||||
var parsed = Parser(args, options),
|
||||
argv = parsed.argv,
|
||||
aliases = parsed.aliases;
|
||||
|
||||
argv.$0 = self.$0;
|
||||
|
||||
self.parsed = parsed;
|
||||
|
||||
// generate a completion script for adding to ~/.bashrc.
|
||||
if (completionCommand && ~argv._.indexOf(completionCommand)) {
|
||||
self.showCompletionScript();
|
||||
if (exitProcess){
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
Object.keys(argv).forEach(function(key) {
|
||||
if (key === helpOpt) {
|
||||
self.showHelp('log');
|
||||
if (exitProcess){
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
else if (key === versionOpt) {
|
||||
usage.showVersion();
|
||||
if (exitProcess){
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
else if (key === completionOpt) {
|
||||
// we allow for asynchronous completions,
|
||||
// e.g., loading in a list of commands from an API.
|
||||
completion.getCompletion(function(completions) {
|
||||
(completions || []).forEach(function(completion) {
|
||||
console.log(completion);
|
||||
});
|
||||
|
||||
if (exitProcess){
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
});
|
||||
|
||||
validation.nonOptionCount(argv);
|
||||
validation.missingArgumentValue(argv);
|
||||
validation.requiredArguments(argv);
|
||||
|
||||
if (strict) {
|
||||
validation.unknownArguments(argv, aliases);
|
||||
}
|
||||
|
||||
validation.customChecks(argv, aliases);
|
||||
validation.implications(argv);
|
||||
setPlaceholderKeys(argv);
|
||||
|
||||
return argv;
|
||||
}
|
||||
|
||||
function setPlaceholderKeys (argv) {
|
||||
Object.keys(options.key).forEach(function(key) {
|
||||
if (typeof argv[key] === 'undefined') argv[key] = undefined;
|
||||
});
|
||||
}
|
||||
|
||||
sigletonify(self);
|
||||
return self;
|
||||
};
|
||||
|
||||
// rebase an absolute path to a relative one with respect to a base directory
|
||||
// exported for tests
|
||||
exports.rebase = rebase;
|
||||
function rebase (base, dir) {
|
||||
return path.relative(base, dir);
|
||||
};
|
||||
|
||||
/* Hack an instance of Argv with process.argv into Argv
|
||||
so people can do
|
||||
require('yargs')(['--beeble=1','-z','zizzle']).argv
|
||||
to parse a list of args and
|
||||
require('yargs').argv
|
||||
to get a parsed version of process.argv.
|
||||
*/
|
||||
function sigletonify(inst) {
|
||||
Object.keys(inst).forEach(function (key) {
|
||||
if (key === 'argv') {
|
||||
Argv.__defineGetter__(key, inst.__lookupGetter__(key));
|
||||
} else {
|
||||
Argv[key] = typeof inst[key] == 'function'
|
||||
? inst[key].bind(inst)
|
||||
: inst[key];
|
||||
}
|
||||
});
|
||||
}
|
||||
71
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/lib/completion.js
generated
vendored
Normal file
71
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/lib/completion.js
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
var fs = require('fs'),
|
||||
path = require('path');
|
||||
|
||||
// add bash completions to your
|
||||
// yargs-powered applications.
|
||||
module.exports = function (yargs, usage) {
|
||||
var self = {
|
||||
completionKey: 'get-yargs-completions'
|
||||
};
|
||||
|
||||
// get a list of completion commands.
|
||||
self.getCompletion = function (done) {
|
||||
var completions = [],
|
||||
current = process.argv[process.argv.length - 1],
|
||||
previous = process.argv.slice(process.argv.indexOf('--' + self.completionKey) + 1),
|
||||
argv = yargs.parse(previous);
|
||||
|
||||
// a custom completion function can be provided
|
||||
// to completion().
|
||||
if (completionFunction) {
|
||||
if (completionFunction.length < 3) {
|
||||
// synchronous completion function.
|
||||
return done(completionFunction(current, argv));
|
||||
} else {
|
||||
// asynchronous completion function
|
||||
return completionFunction(current, argv, function(completions) {
|
||||
done(completions);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!current.match(/^-/)) {
|
||||
usage.getCommands().forEach(function(command) {
|
||||
completions.push(command[0]);
|
||||
});
|
||||
}
|
||||
|
||||
if (current.match(/^-/)) {
|
||||
Object.keys(yargs.getOptions().key).forEach(function(key) {
|
||||
completions.push('--' + key);
|
||||
});
|
||||
}
|
||||
|
||||
done(completions);
|
||||
};
|
||||
|
||||
// generate the completion script to add to your .bashrc.
|
||||
self.generateCompletionScript = function ($0) {
|
||||
var script = fs.readFileSync(
|
||||
path.resolve(__dirname, '../completion.sh.hbs'),
|
||||
'utf-8'
|
||||
),
|
||||
name = path.basename($0);
|
||||
|
||||
// add ./to applications not yet installed as bin.
|
||||
if ($0.match(/\.js$/)) $0 = './' + $0;
|
||||
|
||||
script = script.replace(/{{app_name}}/g, name);
|
||||
return script.replace(/{{app_path}}/g, $0);
|
||||
};
|
||||
|
||||
// register a function to perform your own custom
|
||||
// completions., this function can be either
|
||||
// synchrnous or asynchronous.
|
||||
var completionFunction = null;
|
||||
self.registerFunction = function (fn) {
|
||||
completionFunction = fn;
|
||||
}
|
||||
|
||||
return self;
|
||||
};
|
||||
407
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/lib/parser.js
generated
vendored
Normal file
407
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/lib/parser.js
generated
vendored
Normal file
@@ -0,0 +1,407 @@
|
||||
// fancy-pants parsing of argv, originally forked
|
||||
// from minimist: https://www.npmjs.com/package/minimist
|
||||
var camelCase = require('camelcase'),
|
||||
fs = require('fs'),
|
||||
path = require('path');
|
||||
|
||||
module.exports = function (args, opts) {
|
||||
if (!opts) opts = {};
|
||||
var flags = { arrays: {}, bools : {}, strings : {}, counts: {}, normalize: {}, configs: {} };
|
||||
|
||||
[].concat(opts['array']).filter(Boolean).forEach(function (key) {
|
||||
flags.arrays[key] = true;
|
||||
});
|
||||
|
||||
[].concat(opts['boolean']).filter(Boolean).forEach(function (key) {
|
||||
flags.bools[key] = true;
|
||||
});
|
||||
|
||||
[].concat(opts.string).filter(Boolean).forEach(function (key) {
|
||||
flags.strings[key] = true;
|
||||
});
|
||||
|
||||
[].concat(opts.count).filter(Boolean).forEach(function (key) {
|
||||
flags.counts[key] = true;
|
||||
});
|
||||
|
||||
[].concat(opts.normalize).filter(Boolean).forEach(function (key) {
|
||||
flags.normalize[key] = true;
|
||||
});
|
||||
|
||||
[].concat(opts.config).filter(Boolean).forEach(function (key) {
|
||||
flags.configs[key] = true;
|
||||
});
|
||||
|
||||
var aliases = {},
|
||||
newAliases = {};
|
||||
|
||||
extendAliases(opts.key);
|
||||
extendAliases(opts.alias);
|
||||
|
||||
var defaults = opts['default'] || {};
|
||||
Object.keys(defaults).forEach(function (key) {
|
||||
if (/-/.test(key) && !opts.alias[key]) {
|
||||
var c = camelCase(key);
|
||||
aliases[key] = aliases[key] || [];
|
||||
// don't allow the same key to be added multiple times.
|
||||
if (aliases[key].indexOf(c) === -1) {
|
||||
aliases[key] = (aliases[key] || []).concat(c);
|
||||
newAliases[c] = true;
|
||||
}
|
||||
}
|
||||
(aliases[key] || []).forEach(function (alias) {
|
||||
defaults[alias] = defaults[key];
|
||||
});
|
||||
});
|
||||
|
||||
var argv = { _ : [] };
|
||||
Object.keys(flags.bools).forEach(function (key) {
|
||||
setArg(key, !(key in defaults) ? false : defaults[key]);
|
||||
});
|
||||
|
||||
var notFlags = [];
|
||||
if (args.indexOf('--') !== -1) {
|
||||
notFlags = args.slice(args.indexOf('--')+1);
|
||||
args = args.slice(0, args.indexOf('--'));
|
||||
}
|
||||
|
||||
for (var i = 0; i < args.length; i++) {
|
||||
var arg = args[i];
|
||||
|
||||
// -- seperated by =
|
||||
if (arg.match(/^--.+=/)) {
|
||||
// Using [\s\S] instead of . because js doesn't support the
|
||||
// 'dotall' regex modifier. See:
|
||||
// http://stackoverflow.com/a/1068308/13216
|
||||
var m = arg.match(/^--([^=]+)=([\s\S]*)$/);
|
||||
setArg(m[1], m[2]);
|
||||
}
|
||||
else if (arg.match(/^--no-.+/)) {
|
||||
var key = arg.match(/^--no-(.+)/)[1];
|
||||
setArg(key, false);
|
||||
}
|
||||
// -- seperated by space.
|
||||
else if (arg.match(/^--.+/)) {
|
||||
var key = arg.match(/^--(.+)/)[1];
|
||||
|
||||
if (checkAllAliases(key, opts.narg)) {
|
||||
i = eatNargs(i, key, args);
|
||||
} else {
|
||||
var next = args[i + 1];
|
||||
|
||||
if (next !== undefined && !next.match(/^-/)
|
||||
&& !checkAllAliases(key, flags.bools)
|
||||
&& !checkAllAliases(key, flags.counts)) {
|
||||
setArg(key, next);
|
||||
i++;
|
||||
}
|
||||
else if (/^(true|false)$/.test(next)) {
|
||||
setArg(key, next);
|
||||
i++;
|
||||
}
|
||||
else {
|
||||
setArg(key, defaultForType(guessType(key, flags)));
|
||||
}
|
||||
}
|
||||
}
|
||||
// dot-notation flag seperated by '='.
|
||||
else if (arg.match(/^-.\..+=/)) {
|
||||
var m = arg.match(/^-([^=]+)=([\s\S]*)$/);
|
||||
setArg(m[1], m[2]);
|
||||
}
|
||||
// dot-notation flag seperated by space.
|
||||
else if (arg.match(/^-.\..+/)) {
|
||||
var key = arg.match(/^-(.\..+)/)[1];
|
||||
var next = args[i + 1];
|
||||
if (next !== undefined && !next.match(/^-/)
|
||||
&& !checkAllAliases(key, flags.bools)
|
||||
&& !checkAllAliases(key, flags.counts)) {
|
||||
setArg(key, next);
|
||||
i++;
|
||||
}
|
||||
else {
|
||||
setArg(key, defaultForType(guessType(key, flags)));
|
||||
}
|
||||
}
|
||||
else if (arg.match(/^-[^-]+/)) {
|
||||
var letters = arg.slice(1,-1).split('');
|
||||
|
||||
var broken = false;
|
||||
for (var j = 0; j < letters.length; j++) {
|
||||
var next = arg.slice(j+2);
|
||||
|
||||
if (letters[j+1] && letters[j+1] === '=') {
|
||||
setArg(letters[j], arg.slice(j+3));
|
||||
broken = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if (next === '-') {
|
||||
setArg(letters[j], next)
|
||||
continue;
|
||||
}
|
||||
if (/[A-Za-z]/.test(letters[j])
|
||||
&& /-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) {
|
||||
setArg(letters[j], next);
|
||||
broken = true;
|
||||
break;
|
||||
}
|
||||
if (letters[j+1] && letters[j+1].match(/\W/)) {
|
||||
setArg(letters[j], arg.slice(j+2));
|
||||
broken = true;
|
||||
break;
|
||||
}
|
||||
else {
|
||||
setArg(letters[j], defaultForType(guessType(letters[j], flags)));
|
||||
}
|
||||
}
|
||||
|
||||
var key = arg.slice(-1)[0];
|
||||
|
||||
if (!broken && key !== '-') {
|
||||
if (checkAllAliases(key, opts.narg)) {
|
||||
i = eatNargs(i, key, args);
|
||||
} else {
|
||||
if (args[i+1] && !/^(-|--)[^-]/.test(args[i+1])
|
||||
&& !checkAllAliases(key, flags.bools)
|
||||
&& !checkAllAliases(key, flags.counts)) {
|
||||
setArg(key, args[i+1]);
|
||||
i++;
|
||||
}
|
||||
else if (args[i+1] && /true|false/.test(args[i+1])) {
|
||||
setArg(key, args[i+1]);
|
||||
i++;
|
||||
}
|
||||
else {
|
||||
setArg(key, defaultForType(guessType(key, flags)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
argv._.push(
|
||||
flags.strings['_'] || !isNumber(arg) ? arg : Number(arg)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
setConfig(argv);
|
||||
applyDefaultsAndAliases(argv, aliases, defaults);
|
||||
|
||||
Object.keys(flags.counts).forEach(function (key) {
|
||||
setArg(key, defaults[key]);
|
||||
});
|
||||
|
||||
notFlags.forEach(function(key) {
|
||||
argv._.push(key);
|
||||
});
|
||||
|
||||
// how many arguments should we consume, based
|
||||
// on the nargs option?
|
||||
function eatNargs (i, key, args) {
|
||||
var toEat = checkAllAliases(key, opts.narg);
|
||||
|
||||
if (args.length - (i + 1) < toEat) throw Error('not enough arguments following: ' + key);
|
||||
|
||||
for (var ii = i + 1; ii < (toEat + i + 1); ii++) {
|
||||
setArg(key, args[ii]);
|
||||
}
|
||||
|
||||
return (i + toEat);
|
||||
}
|
||||
|
||||
function setArg (key, val) {
|
||||
// handle parsing boolean arguments --foo=true --bar false.
|
||||
if (checkAllAliases(key, flags.bools) || checkAllAliases(key, flags.counts)) {
|
||||
if (typeof val === 'string') val = val === 'true';
|
||||
}
|
||||
|
||||
if (/-/.test(key) && !(aliases[key] && aliases[key].length)) {
|
||||
var c = camelCase(key);
|
||||
aliases[key] = [c];
|
||||
newAliases[c] = true;
|
||||
}
|
||||
|
||||
var value = !checkAllAliases(key, flags.strings) && isNumber(val) ? Number(val) : val;
|
||||
|
||||
if (checkAllAliases(key, flags.counts)) {
|
||||
value = function(orig) { return orig !== undefined ? orig + 1 : 0; };
|
||||
}
|
||||
|
||||
var splitKey = key.split('.');
|
||||
setKey(argv, splitKey, value);
|
||||
|
||||
(aliases[splitKey[0]] || []).forEach(function (x) {
|
||||
x = x.split('.');
|
||||
|
||||
// handle populating dot notation for both
|
||||
// the key and its aliases.
|
||||
if (splitKey.length > 1) {
|
||||
var a = [].concat(splitKey);
|
||||
a.shift(); // nuke the old key.
|
||||
x = x.concat(a);
|
||||
}
|
||||
|
||||
setKey(argv, x, value);
|
||||
});
|
||||
|
||||
var keys = [key].concat(aliases[key] || []);
|
||||
for (var i = 0, l = keys.length; i < l; i++) {
|
||||
if (flags.normalize[keys[i]]) {
|
||||
keys.forEach(function(key) {
|
||||
argv.__defineSetter__(key, function(v) {
|
||||
val = path.normalize(v);
|
||||
});
|
||||
|
||||
argv.__defineGetter__(key, function () {
|
||||
return typeof val === 'string' ?
|
||||
path.normalize(val) : val;
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// set args from config.json file, this should be
|
||||
// applied last so that defaults can be applied.
|
||||
function setConfig (argv) {
|
||||
var configLookup = {};
|
||||
|
||||
// expand defaults/aliases, in-case any happen to reference
|
||||
// the config.json file.
|
||||
applyDefaultsAndAliases(configLookup, aliases, defaults);
|
||||
|
||||
Object.keys(flags.configs).forEach(function(configKey) {
|
||||
var configPath = argv[configKey] || configLookup[configKey];
|
||||
if (configPath) {
|
||||
try {
|
||||
var config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
|
||||
Object.keys(config).forEach(function (key) {
|
||||
// setting arguments via CLI takes precedence over
|
||||
// values within the config file.
|
||||
if (argv[key] === undefined) {
|
||||
delete argv[key];
|
||||
setArg(key, config[key]);
|
||||
}
|
||||
});
|
||||
} catch (ex) {
|
||||
throw Error('invalid json config file: ' + configPath);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function applyDefaultsAndAliases(obj, aliases, defaults) {
|
||||
Object.keys(defaults).forEach(function (key) {
|
||||
if (!hasKey(obj, key.split('.'))) {
|
||||
setKey(obj, key.split('.'), defaults[key]);
|
||||
|
||||
(aliases[key] || []).forEach(function (x) {
|
||||
setKey(obj, x.split('.'), defaults[key]);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function hasKey (obj, keys) {
|
||||
var o = obj;
|
||||
keys.slice(0,-1).forEach(function (key) {
|
||||
o = (o[key] || {});
|
||||
});
|
||||
|
||||
var key = keys[keys.length - 1];
|
||||
return key in o;
|
||||
}
|
||||
|
||||
function setKey (obj, keys, value) {
|
||||
var o = obj;
|
||||
keys.slice(0,-1).forEach(function (key) {
|
||||
if (o[key] === undefined) o[key] = {};
|
||||
o = o[key];
|
||||
});
|
||||
|
||||
var key = keys[keys.length - 1];
|
||||
if (typeof value === 'function') {
|
||||
o[key] = value(o[key]);
|
||||
}
|
||||
else if (o[key] === undefined && checkAllAliases(key, flags.arrays)) {
|
||||
o[key] = Array.isArray(value) ? value : [value];
|
||||
}
|
||||
else if (o[key] === undefined || typeof o[key] === 'boolean') {
|
||||
o[key] = value;
|
||||
}
|
||||
else if (Array.isArray(o[key])) {
|
||||
o[key].push(value);
|
||||
}
|
||||
else {
|
||||
o[key] = [ o[key], value ];
|
||||
}
|
||||
}
|
||||
|
||||
// extend the aliases list with inferred aliases.
|
||||
function extendAliases (obj) {
|
||||
Object.keys(obj || {}).forEach(function(key) {
|
||||
aliases[key] = [].concat(opts.alias[key] || []);
|
||||
// For "--option-name", also set argv.optionName
|
||||
aliases[key].concat(key).forEach(function (x) {
|
||||
if (/-/.test(x)) {
|
||||
var c = camelCase(x);
|
||||
aliases[key].push(c);
|
||||
newAliases[c] = true;
|
||||
}
|
||||
});
|
||||
aliases[key].forEach(function (x) {
|
||||
aliases[x] = [key].concat(aliases[key].filter(function (y) {
|
||||
return x !== y;
|
||||
}));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// check if a flag is set for any of a key's aliases.
|
||||
function checkAllAliases (key, flag) {
|
||||
var isSet = false,
|
||||
toCheck = [].concat(aliases[key] || [], key);
|
||||
|
||||
toCheck.forEach(function(key) {
|
||||
if (flag[key]) isSet = flag[key];
|
||||
});
|
||||
|
||||
return isSet;
|
||||
};
|
||||
|
||||
// return a default value, given the type of a flag.,
|
||||
// e.g., key of type 'string' will default to '', rather than 'true'.
|
||||
function defaultForType (type) {
|
||||
var def = {
|
||||
boolean: true,
|
||||
string: '',
|
||||
array: []
|
||||
};
|
||||
|
||||
return def[type];
|
||||
}
|
||||
|
||||
// given a flag, enforce a default type.
|
||||
function guessType (key, flags) {
|
||||
var type = 'boolean';
|
||||
|
||||
if (flags.strings && flags.strings[key]) type = 'string';
|
||||
else if (flags.arrays && flags.arrays[key]) type = 'array';
|
||||
|
||||
return type;
|
||||
}
|
||||
|
||||
function isNumber (x) {
|
||||
if (typeof x === 'number') return true;
|
||||
if (/^0x[0-9a-f]+$/i.test(x)) return true;
|
||||
return /^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x);
|
||||
}
|
||||
|
||||
return {
|
||||
argv: argv,
|
||||
aliases: aliases,
|
||||
newAliases: newAliases
|
||||
};
|
||||
};
|
||||
372
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/lib/usage.js
generated
vendored
Normal file
372
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/lib/usage.js
generated
vendored
Normal file
@@ -0,0 +1,372 @@
|
||||
// this file handles outputting usage instructions,
|
||||
// failures, etc. keeps logging in one place.
|
||||
var decamelize = require('decamelize'),
|
||||
wordwrap = require('wordwrap'),
|
||||
wsize = require('window-size');
|
||||
|
||||
module.exports = function (yargs) {
|
||||
var self = {};
|
||||
|
||||
// methods for ouputting/building failure message.
|
||||
var fails = [];
|
||||
self.failFn = function (f) {
|
||||
fails.push(f);
|
||||
};
|
||||
|
||||
var failMessage = null;
|
||||
var showHelpOnFail = true;
|
||||
self.showHelpOnFail = function (enabled, message) {
|
||||
if (typeof enabled === 'string') {
|
||||
message = enabled;
|
||||
enabled = true;
|
||||
}
|
||||
else if (typeof enabled === 'undefined') {
|
||||
enabled = true;
|
||||
}
|
||||
failMessage = message;
|
||||
showHelpOnFail = enabled;
|
||||
return self;
|
||||
};
|
||||
|
||||
self.fail = function (msg) {
|
||||
if (fails.length) {
|
||||
fails.forEach(function (f) {
|
||||
f(msg);
|
||||
});
|
||||
} else {
|
||||
if (showHelpOnFail) yargs.showHelp("error");
|
||||
if (msg) console.error(msg);
|
||||
if (failMessage) {
|
||||
if (msg) console.error("");
|
||||
console.error(failMessage);
|
||||
}
|
||||
if (yargs.getExitProcess()){
|
||||
process.exit(1);
|
||||
}else{
|
||||
throw new Error(msg);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// methods for ouputting/building help (usage) message.
|
||||
var usage;
|
||||
self.usage = function (msg) {
|
||||
usage = msg;
|
||||
};
|
||||
|
||||
var examples = [];
|
||||
self.example = function (cmd, description) {
|
||||
examples.push([cmd, description || '']);
|
||||
};
|
||||
|
||||
var commands = [];
|
||||
self.command = function (cmd, description) {
|
||||
commands.push([cmd, description || '']);
|
||||
};
|
||||
self.getCommands = function () {
|
||||
return commands;
|
||||
};
|
||||
|
||||
var descriptions = {};
|
||||
self.describe = function (key, desc) {
|
||||
if (typeof key === 'object') {
|
||||
Object.keys(key).forEach(function (k) {
|
||||
self.describe(k, key[k]);
|
||||
});
|
||||
}
|
||||
else {
|
||||
descriptions[key] = desc;
|
||||
}
|
||||
};
|
||||
self.getDescriptions = function() {
|
||||
return descriptions;
|
||||
}
|
||||
|
||||
var epilog;
|
||||
self.epilog = function (msg) {
|
||||
epilog = msg;
|
||||
};
|
||||
|
||||
var wrap = windowWidth();
|
||||
self.wrap = function (cols) {
|
||||
wrap = cols;
|
||||
};
|
||||
|
||||
self.help = function () {
|
||||
var demanded = yargs.getDemanded(),
|
||||
options = yargs.getOptions(),
|
||||
keys = Object.keys(
|
||||
Object.keys(descriptions)
|
||||
.concat(Object.keys(demanded))
|
||||
.concat(Object.keys(options.default))
|
||||
.reduce(function (acc, key) {
|
||||
if (key !== '_') acc[key] = true;
|
||||
return acc;
|
||||
}, {})
|
||||
);
|
||||
|
||||
var help = keys.length ? [ 'Options:' ] : [];
|
||||
|
||||
// your application's commands, i.e., non-option
|
||||
// arguments populated in '_'.
|
||||
if (commands.length) {
|
||||
help.unshift('');
|
||||
|
||||
var commandsTable = {};
|
||||
commands.forEach(function(command) {
|
||||
commandsTable[command[0]] = {
|
||||
desc: command[1],
|
||||
extra: ''
|
||||
};
|
||||
});
|
||||
|
||||
help = ['Commands:'].concat(formatTable(commandsTable, 5), help);
|
||||
}
|
||||
|
||||
// the usage string.
|
||||
if (usage) {
|
||||
var u = usage.replace(/\$0/g, yargs.$0);
|
||||
if (wrap) u = wordwrap(0, wrap)(u);
|
||||
help.unshift(u, '');
|
||||
}
|
||||
|
||||
// the options table.
|
||||
var aliasKeys = (Object.keys(options.alias) || [])
|
||||
.concat(Object.keys(yargs.parsed.newAliases) || []);
|
||||
|
||||
keys = keys.filter(function(key) {
|
||||
return !yargs.parsed.newAliases[key] && aliasKeys.every(function(alias) {
|
||||
return -1 == (options.alias[alias] || []).indexOf(key);
|
||||
});
|
||||
});
|
||||
|
||||
var switches = keys.reduce(function (acc, key) {
|
||||
acc[key] = [ key ].concat(options.alias[key] || [])
|
||||
.map(function (sw) {
|
||||
return (sw.length > 1 ? '--' : '-') + sw
|
||||
})
|
||||
.join(', ')
|
||||
;
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
var switchTable = {};
|
||||
keys.forEach(function (key) {
|
||||
var kswitch = switches[key];
|
||||
var desc = descriptions[key] || '';
|
||||
var type = null;
|
||||
|
||||
if (options.boolean[key]) type = '[boolean]';
|
||||
if (options.count[key]) type = '[count]';
|
||||
if (options.string[key]) type = '[string]';
|
||||
if (options.normalize[key]) type = '[string]';
|
||||
|
||||
var extra = [
|
||||
type,
|
||||
demanded[key]
|
||||
? '[required]'
|
||||
: null
|
||||
,
|
||||
defaultString(options.default[key], options.defaultDescription[key])
|
||||
].filter(Boolean).join(' ');
|
||||
|
||||
switchTable[kswitch] = {
|
||||
desc: desc,
|
||||
extra: extra
|
||||
};
|
||||
});
|
||||
help.push.apply(help, formatTable(switchTable, 3));
|
||||
|
||||
if (keys.length) help.push('');
|
||||
|
||||
// describe some common use-cases for your application.
|
||||
if (examples.length) {
|
||||
examples.forEach(function (example) {
|
||||
example[0] = example[0].replace(/\$0/g, yargs.$0);
|
||||
});
|
||||
|
||||
var examplesTable = {};
|
||||
examples.forEach(function(example) {
|
||||
examplesTable[example[0]] = {
|
||||
desc: example[1],
|
||||
extra: ''
|
||||
};
|
||||
});
|
||||
|
||||
help.push.apply(help, ['Examples:'].concat(formatTable(examplesTable, 5), ''));
|
||||
}
|
||||
|
||||
// the usage string.
|
||||
if (epilog) {
|
||||
var e = epilog.replace(/\$0/g, yargs.$0);
|
||||
if (wrap) e = wordwrap(0, wrap)(e);
|
||||
help.push(e, '');
|
||||
}
|
||||
|
||||
return help.join('\n');
|
||||
};
|
||||
|
||||
self.showHelp = function (level) {
|
||||
level = level || 'error';
|
||||
console[level](self.help());
|
||||
}
|
||||
|
||||
// format the default-value-string displayed in
|
||||
// the right-hand column.
|
||||
function defaultString(value, defaultDescription) {
|
||||
var string = '[default: ';
|
||||
|
||||
if (value === undefined) return null;
|
||||
|
||||
if (defaultDescription) {
|
||||
string += defaultDescription;
|
||||
} else {
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
string += JSON.stringify(value);
|
||||
break;
|
||||
case 'function':
|
||||
string += '(' + (value.name.length ? decamelize(value.name, '-') : 'generated-value') + ')'
|
||||
break;
|
||||
default:
|
||||
string += value;
|
||||
}
|
||||
}
|
||||
|
||||
return string + ']';
|
||||
}
|
||||
|
||||
// word-wrapped two-column layout used by
|
||||
// examples, options, commands.
|
||||
function formatTable (table, padding) {
|
||||
var output = [];
|
||||
|
||||
// size of left-hand-column.
|
||||
var llen = longest(Object.keys(table));
|
||||
|
||||
// don't allow the left-column to take up
|
||||
// more than half of the screen.
|
||||
if (wrap) {
|
||||
llen = Math.min(llen, parseInt(wrap / 2));
|
||||
}
|
||||
|
||||
// size of right-column.
|
||||
var desclen = longest(Object.keys(table).map(function (k) {
|
||||
return table[k].desc;
|
||||
}));
|
||||
|
||||
Object.keys(table).forEach(function(left) {
|
||||
var desc = table[left].desc,
|
||||
extra = table[left].extra,
|
||||
leftLines = null;
|
||||
|
||||
if (wrap) {
|
||||
desc = wordwrap(llen + padding + 1, wrap)(desc)
|
||||
.slice(llen + padding + 1);
|
||||
}
|
||||
|
||||
// if we need to wrap the left-hand-column,
|
||||
// split it on to multiple lines.
|
||||
if (wrap && left.length > llen) {
|
||||
leftLines = wordwrap(2, llen)(left.trim()).split('\n');
|
||||
left = '';
|
||||
}
|
||||
|
||||
var lpadding = new Array(
|
||||
Math.max(llen - left.length + padding, 0)
|
||||
).join(' ');
|
||||
|
||||
var dpadding = new Array(
|
||||
Math.max(desclen - desc.length + 1, 0)
|
||||
).join(' ');
|
||||
|
||||
if (!wrap && dpadding.length > 0) {
|
||||
desc += dpadding;
|
||||
}
|
||||
|
||||
var prelude = ' ' + left + lpadding;
|
||||
|
||||
var body = [ desc, extra ].filter(Boolean).join(' ');
|
||||
|
||||
if (wrap) {
|
||||
var dlines = desc.split('\n');
|
||||
var dlen = dlines.slice(-1)[0].length
|
||||
+ (dlines.length === 1 ? prelude.length : 0)
|
||||
|
||||
if (extra.length > wrap) {
|
||||
body = desc + '\n' + wordwrap(llen + 4, wrap)(extra)
|
||||
} else {
|
||||
body = desc + (dlen + extra.length > wrap - 2
|
||||
? '\n'
|
||||
+ new Array(wrap - extra.length + 1).join(' ')
|
||||
+ extra
|
||||
: new Array(wrap - extra.length - dlen + 1).join(' ')
|
||||
+ extra
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (leftLines) { // handle word-wrapping the left-hand-column.
|
||||
var rightLines = body.split('\n'),
|
||||
firstLine = prelude + rightLines[0],
|
||||
lineCount = Math.max(leftLines.length, rightLines.length);
|
||||
|
||||
for (var i = 0; i < lineCount; i++) {
|
||||
var left = leftLines[i],
|
||||
right = i ? rightLines[i] : firstLine;
|
||||
|
||||
output.push(strcpy(left, right, firstLine.length));
|
||||
}
|
||||
} else {
|
||||
output.push(prelude + body);
|
||||
}
|
||||
});
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
// find longest string in array of strings.
|
||||
function longest (xs) {
|
||||
return Math.max.apply(
|
||||
null,
|
||||
xs.map(function (x) { return x.length })
|
||||
);
|
||||
}
|
||||
|
||||
// copy one string into another, used when
|
||||
// formatting usage table.
|
||||
function strcpy (source, destination, width) {
|
||||
var str = ''
|
||||
|
||||
source = source || '';
|
||||
destination = destination || new Array(width).join(' ');
|
||||
|
||||
for (var i = 0; i < destination.length; i++) {
|
||||
var char = destination.charAt(i);
|
||||
|
||||
if (char === ' ') char = source.charAt(i) || char;
|
||||
|
||||
str += char;
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
// guess the width of the console window, max-width 100.
|
||||
function windowWidth() {
|
||||
return wsize.width ? Math.min(80, wsize.width) : null;
|
||||
}
|
||||
|
||||
// logic for displaying application version.
|
||||
var version = null;
|
||||
self.version = function (ver, opt, msg) {
|
||||
version = ver;
|
||||
};
|
||||
|
||||
self.showVersion = function() {
|
||||
if (typeof version === 'function') console.log(version());
|
||||
else console.log(version);
|
||||
};
|
||||
|
||||
return self;
|
||||
}
|
||||
193
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/lib/validation.js
generated
vendored
Normal file
193
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/lib/validation.js
generated
vendored
Normal file
@@ -0,0 +1,193 @@
|
||||
// validation-type-stuff, missing params,
|
||||
// bad implications, custom checks.
|
||||
module.exports = function (yargs, usage) {
|
||||
var self = {};
|
||||
|
||||
// validate appropriate # of non-option
|
||||
// arguments were provided, i.e., '_'.
|
||||
self.nonOptionCount = function(argv) {
|
||||
var demanded = yargs.getDemanded();
|
||||
|
||||
if (demanded._ && argv._.length < demanded._.count) {
|
||||
if (demanded._.msg !== undefined) {
|
||||
usage.fail(demanded._.msg);
|
||||
} else {
|
||||
usage.fail('Not enough non-option arguments: got '
|
||||
+ argv._.length + ', need at least ' + demanded._.count
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// make sure that any args that require an
|
||||
// value (--foo=bar), have a value.
|
||||
self.missingArgumentValue = function(argv) {
|
||||
var options = yargs.getOptions();
|
||||
|
||||
if (options.requiresArg.length > 0) {
|
||||
var missingRequiredArgs = [];
|
||||
|
||||
options.requiresArg.forEach(function(key) {
|
||||
var value = argv[key];
|
||||
|
||||
// parser sets --foo value to true / --no-foo to false
|
||||
if (value === true || value === false) {
|
||||
missingRequiredArgs.push(key);
|
||||
}
|
||||
});
|
||||
|
||||
if (missingRequiredArgs.length == 1) {
|
||||
usage.fail("Missing argument value: " + missingRequiredArgs[0]);
|
||||
}
|
||||
else if (missingRequiredArgs.length > 1) {
|
||||
var message = "Missing argument values: " + missingRequiredArgs.join(", ");
|
||||
usage.fail(message);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// make sure all the required arguments are present.
|
||||
self.requiredArguments = function(argv) {
|
||||
var demanded = yargs.getDemanded(),
|
||||
missing = null;
|
||||
|
||||
Object.keys(demanded).forEach(function (key) {
|
||||
if (!argv.hasOwnProperty(key)) {
|
||||
missing = missing || {};
|
||||
missing[key] = demanded[key];
|
||||
}
|
||||
});
|
||||
|
||||
if (missing) {
|
||||
var customMsgs = [];
|
||||
Object.keys(missing).forEach(function(key) {
|
||||
var msg = missing[key].msg;
|
||||
if (msg && customMsgs.indexOf(msg) < 0) {
|
||||
customMsgs.push(msg);
|
||||
}
|
||||
});
|
||||
var customMsg = customMsgs.length ? '\n' + customMsgs.join('\n') : '';
|
||||
|
||||
usage.fail('Missing required arguments: ' + Object.keys(missing).join(', ') + customMsg);
|
||||
}
|
||||
};
|
||||
|
||||
// check for unknown arguments (strict-mode).
|
||||
self.unknownArguments = function(argv, aliases) {
|
||||
var descriptions = usage.getDescriptions(),
|
||||
demanded = yargs.getDemanded(),
|
||||
unknown = [],
|
||||
aliasLookup = {};
|
||||
|
||||
Object.keys(aliases).forEach(function (key) {
|
||||
aliases[key].forEach(function (alias) {
|
||||
aliasLookup[alias] = key;
|
||||
});
|
||||
});
|
||||
|
||||
Object.keys(argv).forEach(function (key) {
|
||||
if (key !== "$0" && key !== "_" &&
|
||||
!descriptions.hasOwnProperty(key) &&
|
||||
!demanded.hasOwnProperty(key) &&
|
||||
!aliasLookup.hasOwnProperty(key)) {
|
||||
unknown.push(key);
|
||||
}
|
||||
});
|
||||
|
||||
if (unknown.length == 1) {
|
||||
usage.fail("Unknown argument: " + unknown[0]);
|
||||
}
|
||||
else if (unknown.length > 1) {
|
||||
usage.fail("Unknown arguments: " + unknown.join(", "));
|
||||
}
|
||||
};
|
||||
|
||||
// custom checks, added using the `check` option on yargs.
|
||||
var checks = [];
|
||||
self.check = function (f) {
|
||||
checks.push(f);
|
||||
};
|
||||
|
||||
self.customChecks = function(argv, aliases) {
|
||||
checks.forEach(function (f) {
|
||||
try {
|
||||
var result = f(argv, aliases);
|
||||
if (!result) {
|
||||
usage.fail('Argument check failed: ' + f.toString());
|
||||
} else if (typeof result === 'string') {
|
||||
usage.fail(result);
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
usage.fail(err)
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// check implications, argument foo implies => argument bar.
|
||||
var implied = {};
|
||||
self.implies = function (key, value) {
|
||||
if (typeof key === 'object') {
|
||||
Object.keys(key).forEach(function (k) {
|
||||
self.implies(k, key[k]);
|
||||
});
|
||||
} else {
|
||||
implied[key] = value;
|
||||
}
|
||||
};
|
||||
self.getImplied = function() {
|
||||
return implied;
|
||||
}
|
||||
|
||||
self.implications = function(argv) {
|
||||
var implyFail = [];
|
||||
|
||||
Object.keys(implied).forEach(function (key) {
|
||||
var num, origKey = key, value = implied[key];
|
||||
|
||||
// convert string '1' to number 1
|
||||
var num = Number(key);
|
||||
key = isNaN(num) ? key : num;
|
||||
|
||||
if (typeof key === 'number') {
|
||||
// check length of argv._
|
||||
key = argv._.length >= key;
|
||||
} else if (key.match(/^--no-.+/)) {
|
||||
// check if key doesn't exist
|
||||
key = key.match(/^--no-(.+)/)[1];
|
||||
key = !argv[key];
|
||||
} else {
|
||||
// check if key exists
|
||||
key = argv[key];
|
||||
}
|
||||
|
||||
num = Number(value);
|
||||
value = isNaN(num) ? value : num;
|
||||
|
||||
if (typeof value === 'number') {
|
||||
value = argv._.length >= value;
|
||||
} else if (value.match(/^--no-.+/)) {
|
||||
value = value.match(/^--no-(.+)/)[1];
|
||||
value = !argv[value];
|
||||
} else {
|
||||
value = argv[value];
|
||||
}
|
||||
|
||||
if (key && !value) {
|
||||
implyFail.push(origKey);
|
||||
}
|
||||
});
|
||||
|
||||
if (implyFail.length) {
|
||||
var msg = 'Implications failed:\n';
|
||||
|
||||
implyFail.forEach(function (key) {
|
||||
msg += (' ' + key + ' -> ' + implied[key]);
|
||||
});
|
||||
|
||||
usage.fail(msg);
|
||||
}
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
19
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/node_modules/camelcase/index.js
generated
vendored
Normal file
19
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/node_modules/camelcase/index.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
'use strict';
|
||||
module.exports = function (str) {
|
||||
str = str.trim();
|
||||
|
||||
if (str.length === 1 || !(/[_.\- ]+/).test(str) ) {
|
||||
if (str[0] === str[0].toLowerCase() && str.slice(1) !== str.slice(1).toLowerCase()) {
|
||||
return str;
|
||||
}
|
||||
|
||||
return str.toLowerCase();
|
||||
}
|
||||
|
||||
return str
|
||||
.replace(/^[_.\- ]+/, '')
|
||||
.toLowerCase()
|
||||
.replace(/[_.\- ]+(\w|$)/g, function (m, p1) {
|
||||
return p1.toUpperCase();
|
||||
});
|
||||
};
|
||||
48
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/node_modules/camelcase/package.json
generated
vendored
Normal file
48
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/node_modules/camelcase/package.json
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
{
|
||||
"name": "camelcase",
|
||||
"version": "1.1.0",
|
||||
"description": "Convert a dash/dot/underscore/space separated string to camelCase: foo-bar → fooBar",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/sindresorhus/camelcase"
|
||||
},
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "http://sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "node test.js"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"keywords": [
|
||||
"camelcase",
|
||||
"camel-case",
|
||||
"camel",
|
||||
"case",
|
||||
"dash",
|
||||
"hyphen",
|
||||
"dot",
|
||||
"underscore",
|
||||
"separator",
|
||||
"string",
|
||||
"text",
|
||||
"convert"
|
||||
],
|
||||
"devDependencies": {
|
||||
"ava": "0.0.4"
|
||||
},
|
||||
"readme": "# camelcase [](https://travis-ci.org/sindresorhus/camelcase)\n\n> Convert a dash/dot/underscore/space separated string to camelCase: `foo-bar` → `fooBar`\n\n\n## Install\n\n```sh\n$ npm install --save camelcase\n```\n\n\n## Usage\n\n```js\nvar camelCase = require('camelcase');\n\ncamelCase('foo-bar');\n//=> fooBar\n\ncamelCase('foo_bar');\n//=> fooBar\n\ncamelCase('Foo-Bar');\n//=> fooBar\n\ncamelCase('--foo.bar');\n//=> fooBar\n\ncamelCase('__foo__bar__');\n//=> fooBar\n\ncamelCase('foo bar');\n//=> fooBar\n\nconsole.log(process.argv[3]);\n//=> --foo-bar\ncamelCase(process.argv[3]);\n//=> fooBar\n```\n\n\n## Related\n\nSee [`decamelize`](https://github.com/sindresorhus/decamelize) for the inverse.\n\n\n## License\n\nMIT © [Sindre Sorhus](http://sindresorhus.com)\n",
|
||||
"readmeFilename": "readme.md",
|
||||
"bugs": {
|
||||
"url": "https://github.com/sindresorhus/camelcase/issues"
|
||||
},
|
||||
"_id": "camelcase@1.1.0",
|
||||
"_from": "camelcase@^1.0.2"
|
||||
}
|
||||
50
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/node_modules/camelcase/readme.md
generated
vendored
Normal file
50
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/node_modules/camelcase/readme.md
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
# camelcase [](https://travis-ci.org/sindresorhus/camelcase)
|
||||
|
||||
> Convert a dash/dot/underscore/space separated string to camelCase: `foo-bar` → `fooBar`
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
$ npm install --save camelcase
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
var camelCase = require('camelcase');
|
||||
|
||||
camelCase('foo-bar');
|
||||
//=> fooBar
|
||||
|
||||
camelCase('foo_bar');
|
||||
//=> fooBar
|
||||
|
||||
camelCase('Foo-Bar');
|
||||
//=> fooBar
|
||||
|
||||
camelCase('--foo.bar');
|
||||
//=> fooBar
|
||||
|
||||
camelCase('__foo__bar__');
|
||||
//=> fooBar
|
||||
|
||||
camelCase('foo bar');
|
||||
//=> fooBar
|
||||
|
||||
console.log(process.argv[3]);
|
||||
//=> --foo-bar
|
||||
camelCase(process.argv[3]);
|
||||
//=> fooBar
|
||||
```
|
||||
|
||||
|
||||
## Related
|
||||
|
||||
See [`decamelize`](https://github.com/sindresorhus/decamelize) for the inverse.
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
||||
8
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/node_modules/decamelize/index.js
generated
vendored
Normal file
8
server/node_modules/gulp-uglifyjs/node_modules/uglify-js/node_modules/yargs/node_modules/decamelize/index.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict';
|
||||
module.exports = function (str, sep) {
|
||||
if (typeof str !== 'string') {
|
||||
throw new TypeError('Expected a string');
|
||||
}
|
||||
|
||||
return str.replace(/([a-z\d])([A-Z])/g, '$1' + (sep || '_') + '$2').toLowerCase();
|
||||
};
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user