Ajout exercices supplémentaires formation JS
This commit is contained in:
300
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/README.md
generated
vendored
Normal file
300
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/README.md
generated
vendored
Normal file
@ -0,0 +1,300 @@
|
||||
# grunt-contrib-concat v0.5.1 [](https://travis-ci.org/gruntjs/grunt-contrib-concat) [](https://ci.appveyor.com/project/gruntjs/grunt-contrib-concat/branch/master)
|
||||
|
||||
> Concatenate files.
|
||||
|
||||
|
||||
|
||||
## Getting Started
|
||||
This plugin requires Grunt `>=0.4.0`
|
||||
|
||||
If you haven't used [Grunt](http://gruntjs.com/) before, be sure to check out the [Getting Started](http://gruntjs.com/getting-started) guide, as it explains how to create a [Gruntfile](http://gruntjs.com/sample-gruntfile) as well as install and use Grunt plugins. Once you're familiar with that process, you may install this plugin with this command:
|
||||
|
||||
```shell
|
||||
npm install grunt-contrib-concat --save-dev
|
||||
```
|
||||
|
||||
Once the plugin has been installed, it may be enabled inside your Gruntfile with this line of JavaScript:
|
||||
|
||||
```js
|
||||
grunt.loadNpmTasks('grunt-contrib-concat');
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
## Concat task
|
||||
_Run this task with the `grunt concat` command._
|
||||
|
||||
Task targets, files and options may be specified according to the Grunt [Configuring tasks](http://gruntjs.com/configuring-tasks) guide.
|
||||
|
||||
### Options
|
||||
|
||||
#### separator
|
||||
Type: `String`
|
||||
Default: `grunt.util.linefeed`
|
||||
|
||||
Concatenated files will be joined on this string. If you're post-processing concatenated JavaScript files with a minifier, you may need to use a semicolon `';'` as the separator.
|
||||
|
||||
#### banner
|
||||
Type: `String`
|
||||
Default: empty string
|
||||
|
||||
This string will be prepended to the beginning of the concatenated output. It is processed using [grunt.template.process][], using the default options.
|
||||
|
||||
_(Default processing options are explained in the [grunt.template.process][] documentation)_
|
||||
|
||||
#### footer
|
||||
Type: `String`
|
||||
Default: empty string
|
||||
|
||||
This string will be appended to the end of the concatenated output. It is processed using [grunt.template.process][], using the default options.
|
||||
|
||||
_(Default processing options are explained in the [grunt.template.process][] documentation)_
|
||||
|
||||
#### stripBanners
|
||||
Type: `Boolean` `Object`
|
||||
Default: `false`
|
||||
|
||||
Strip JavaScript banner comments from source files.
|
||||
|
||||
* `false` - No comments are stripped.
|
||||
* `true` - `/* ... */` block comments are stripped, but _NOT_ `/*! ... */` comments.
|
||||
* `options` object:
|
||||
* By default, behaves as if `true` were specified.
|
||||
* `block` - If true, _all_ block comments are stripped.
|
||||
* `line` - If true, any contiguous _leading_ `//` line comments are stripped.
|
||||
|
||||
#### process
|
||||
Type: `Boolean` `Object` `Function`
|
||||
Default: `false`
|
||||
|
||||
Process source files before concatenating, either as [templates][] or with a custom function.
|
||||
|
||||
* `false` - No processing will occur.
|
||||
* `true` - Process source files using [grunt.template.process][] defaults.
|
||||
* `data` object - Process source files using [grunt.template.process][], using the specified options.
|
||||
* `function(src, filepath)` - Process source files using the given function, called once for each file. The returned value will be used as source code.
|
||||
|
||||
_(Default processing options are explained in the [grunt.template.process][] documentation)_
|
||||
|
||||
[templates]: https://github.com/gruntjs/grunt-docs/blob/master/grunt.template.md
|
||||
[grunt.template.process]: https://github.com/gruntjs/grunt-docs/blob/master/grunt.template.md#grunttemplateprocess
|
||||
|
||||
#### sourceMap
|
||||
Type: `Boolean`
|
||||
Default: `false`
|
||||
|
||||
Set to true to create a source map. The source map will be created alongside the destination file, and share the same file name with the `.map` extension appended to it.
|
||||
|
||||
#### sourceMapName
|
||||
Type: `String` `Function`
|
||||
Default: `undefined`
|
||||
|
||||
To customize the name or location of the generated source map, pass a string to indicate where to write the source map to. If a function is provided, the concat destination is passed as the argument and the return value will be used as the file name.
|
||||
|
||||
#### sourceMapStyle
|
||||
Type: `String`
|
||||
Default: `embed`
|
||||
|
||||
Determines the type of source map that is generated. The default value, `embed`, places the content of the sources directly into the map. `link` will reference the original sources in the map as links. `inline` will store the entire map as a data URI in the destination file.
|
||||
|
||||
### Usage Examples
|
||||
|
||||
#### Concatenating with a custom separator
|
||||
|
||||
In this example, running `grunt concat:dist` (or `grunt concat` because `concat` is a [multi task][multitask]) will concatenate the three specified source files (in order), joining files with `;` and writing the output to `dist/built.js`.
|
||||
|
||||
```js
|
||||
// Project configuration.
|
||||
grunt.initConfig({
|
||||
concat: {
|
||||
options: {
|
||||
separator: ';',
|
||||
},
|
||||
dist: {
|
||||
src: ['src/intro.js', 'src/project.js', 'src/outro.js'],
|
||||
dest: 'dist/built.js',
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
#### Banner comments
|
||||
|
||||
In this example, running `grunt concat:dist` will first strip any preexisting banner comment from the `src/project.js` file, then concatenate the result with a newly-generated banner comment, writing the output to `dist/built.js`.
|
||||
|
||||
This generated banner will be the contents of the `banner` template string interpolated with the config object. In this case, those properties are the values imported from the `package.json` file (which are available via the `pkg` config property) plus today's date.
|
||||
|
||||
_Note: you don't have to use an external JSON file. It's also valid to create the `pkg` object inline in the config. That being said, if you already have a JSON file, you might as well reference it._
|
||||
|
||||
```js
|
||||
// Project configuration.
|
||||
grunt.initConfig({
|
||||
pkg: grunt.file.readJSON('package.json'),
|
||||
concat: {
|
||||
options: {
|
||||
stripBanners: true,
|
||||
banner: '/*! <%= pkg.name %> - v<%= pkg.version %> - ' +
|
||||
'<%= grunt.template.today("yyyy-mm-dd") %> */',
|
||||
},
|
||||
dist: {
|
||||
src: ['src/project.js'],
|
||||
dest: 'dist/built.js',
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
#### Multiple targets
|
||||
|
||||
In this example, running `grunt concat` will build two separate files. One "basic" version, with the main file essentially just copied to `dist/basic.js`, and another "with_extras" concatenated version written to `dist/with_extras.js`.
|
||||
|
||||
While each concat target can be built individually by running `grunt concat:basic` or `grunt concat:extras`, running `grunt concat` will build all concat targets. This is because `concat` is a [multi task][multitask].
|
||||
|
||||
```js
|
||||
// Project configuration.
|
||||
grunt.initConfig({
|
||||
concat: {
|
||||
basic: {
|
||||
src: ['src/main.js'],
|
||||
dest: 'dist/basic.js',
|
||||
},
|
||||
extras: {
|
||||
src: ['src/main.js', 'src/extras.js'],
|
||||
dest: 'dist/with_extras.js',
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
#### Multiple files per target
|
||||
|
||||
Like the previous example, in this example running `grunt concat` will build two separate files. One "basic" version, with the main file essentially just copied to `dist/basic.js`, and another "with_extras" concatenated version written to `dist/with_extras.js`.
|
||||
|
||||
This example differs in that both files are built under the same target.
|
||||
|
||||
Using the `files` object, you can have list any number of source-destination pairs.
|
||||
|
||||
```js
|
||||
// Project configuration.
|
||||
grunt.initConfig({
|
||||
concat: {
|
||||
basic_and_extras: {
|
||||
files: {
|
||||
'dist/basic.js': ['src/main.js'],
|
||||
'dist/with_extras.js': ['src/main.js', 'src/extras.js'],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
#### Dynamic filenames
|
||||
|
||||
Filenames can be generated dynamically by using `<%= %>` delimited underscore templates as filenames.
|
||||
|
||||
In this example, running `grunt concat:dist` generates a destination file whose name is generated from the `name` and `version` properties of the referenced `package.json` file (via the `pkg` config property).
|
||||
|
||||
```js
|
||||
// Project configuration.
|
||||
grunt.initConfig({
|
||||
pkg: grunt.file.readJSON('package.json'),
|
||||
concat: {
|
||||
dist: {
|
||||
src: ['src/main.js'],
|
||||
dest: 'dist/<%= pkg.name %>-<%= pkg.version %>.js',
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
#### Advanced dynamic filenames
|
||||
|
||||
In this more involved example, running `grunt concat` will build two separate files (because `concat` is a [multi task][multitask]). The destination file paths will be expanded dynamically based on the specified templates, recursively if necessary.
|
||||
|
||||
For example, if the `package.json` file contained `{"name": "awesome", "version": "1.0.0"}`, the files `dist/awesome/1.0.0/basic.js` and `dist/awesome/1.0.0/with_extras.js` would be generated.
|
||||
|
||||
```js
|
||||
// Project configuration.
|
||||
grunt.initConfig({
|
||||
pkg: grunt.file.readJSON('package.json'),
|
||||
dirs: {
|
||||
src: 'src/files',
|
||||
dest: 'dist/<%= pkg.name %>/<%= pkg.version %>',
|
||||
},
|
||||
concat: {
|
||||
basic: {
|
||||
src: ['<%= dirs.src %>/main.js'],
|
||||
dest: '<%= dirs.dest %>/basic.js',
|
||||
},
|
||||
extras: {
|
||||
src: ['<%= dirs.src %>/main.js', '<%= dirs.src %>/extras.js'],
|
||||
dest: '<%= dirs.dest %>/with_extras.js',
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
#### Invalid or Missing Files Warning
|
||||
If you would like the `concat` task to warn if a given file is missing or invalid be sure to set `nonull` to `true`:
|
||||
|
||||
```js
|
||||
grunt.initConfig({
|
||||
concat: {
|
||||
missing: {
|
||||
src: ['src/invalid_or_missing_file'],
|
||||
dest: 'compiled.js',
|
||||
nonull: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
See [configuring files for a task](http://gruntjs.com/configuring-tasks#files) for how to configure file globbing in Grunt.
|
||||
|
||||
|
||||
#### Custom process function
|
||||
If you would like to do any custom processing before concatenating, use a custom process function:
|
||||
|
||||
```js
|
||||
grunt.initConfig({
|
||||
concat: {
|
||||
dist: {
|
||||
options: {
|
||||
// Replace all 'use strict' statements in the code with a single one at the top
|
||||
banner: "'use strict';\n",
|
||||
process: function(src, filepath) {
|
||||
return '// Source: ' + filepath + '\n' +
|
||||
src.replace(/(^|\n)[ \t]*('use strict'|"use strict");?\s*/g, '$1');
|
||||
},
|
||||
},
|
||||
files: {
|
||||
'dist/built.js': ['src/project.js'],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
[multitask]: http://gruntjs.com/creating-tasks#multi-tasks
|
||||
|
||||
|
||||
## Release History
|
||||
|
||||
* 2015-02-20 v0.5.1 Fix path issues with Source Maps on Windows.
|
||||
* 2014-07-19 v0.5.0 Adds sourceMap option.
|
||||
* 2014-03-21 v0.4.0 README updates. Output updates.
|
||||
* 2013-04-25 v0.3.0 Add option to process files with a custom function.
|
||||
* 2013-04-08 v0.2.0 Don't normalize separator to allow user to set LF even on a Windows environment.
|
||||
* 2013-02-22 v0.1.3 Support footer option.
|
||||
* 2013-02-15 v0.1.2 First official release for Grunt 0.4.0.
|
||||
* 2013-01-18 v0.1.2rc6 Updating grunt/gruntplugin dependencies to rc6. Changing in-development grunt/gruntplugin dependency versions from tilde version ranges to specific versions.
|
||||
* 2013-01-09 v0.1.2rc5 Updating to work with grunt v0.4.0rc5. Switching back to this.files api.
|
||||
* 2012-11-13 v0.1.1 Switch to this.file api internally.
|
||||
* 2012-10-03 v0.1.0 Work in progress, not yet officially released.
|
||||
|
||||
---
|
||||
|
||||
Task submitted by ["Cowboy" Ben Alman](http://benalman.com/)
|
||||
|
||||
*This file was generated on Fri Feb 20 2015 10:39:55.*
|
95
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/index.js
generated
vendored
Normal file
95
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/index.js
generated
vendored
Normal file
@ -0,0 +1,95 @@
|
||||
'use strict';
|
||||
var escapeStringRegexp = require('escape-string-regexp');
|
||||
var ansiStyles = require('ansi-styles');
|
||||
var stripAnsi = require('strip-ansi');
|
||||
var hasAnsi = require('has-ansi');
|
||||
var supportsColor = require('supports-color');
|
||||
var defineProps = Object.defineProperties;
|
||||
var chalk = module.exports;
|
||||
|
||||
function build(_styles) {
|
||||
var builder = function builder() {
|
||||
return applyStyle.apply(builder, arguments);
|
||||
};
|
||||
builder._styles = _styles;
|
||||
// __proto__ is used because we must return a function, but there is
|
||||
// no way to create a function with a different prototype.
|
||||
builder.__proto__ = proto;
|
||||
return builder;
|
||||
}
|
||||
|
||||
var styles = (function () {
|
||||
var ret = {};
|
||||
|
||||
ansiStyles.grey = ansiStyles.gray;
|
||||
|
||||
Object.keys(ansiStyles).forEach(function (key) {
|
||||
ansiStyles[key].closeRe = new RegExp(escapeStringRegexp(ansiStyles[key].close), 'g');
|
||||
|
||||
ret[key] = {
|
||||
get: function () {
|
||||
return build(this._styles.concat(key));
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
return ret;
|
||||
})();
|
||||
|
||||
var proto = defineProps(function chalk() {}, styles);
|
||||
|
||||
function applyStyle() {
|
||||
// support varags, but simply cast to string in case there's only one arg
|
||||
var args = arguments;
|
||||
var argsLen = args.length;
|
||||
var str = argsLen !== 0 && String(arguments[0]);
|
||||
if (argsLen > 1) {
|
||||
// don't slice `arguments`, it prevents v8 optimizations
|
||||
for (var a = 1; a < argsLen; a++) {
|
||||
str += ' ' + args[a];
|
||||
}
|
||||
}
|
||||
|
||||
if (!chalk.enabled || !str) {
|
||||
return str;
|
||||
}
|
||||
|
||||
/*jshint validthis: true*/
|
||||
var nestedStyles = this._styles;
|
||||
|
||||
for (var i = 0; i < nestedStyles.length; i++) {
|
||||
var code = ansiStyles[nestedStyles[i]];
|
||||
// Replace any instances already present with a re-opening code
|
||||
// otherwise only the part of the string until said closing code
|
||||
// will be colored, and the rest will simply be 'plain'.
|
||||
str = code.open + str.replace(code.closeRe, code.open) + code.close;
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
function init() {
|
||||
var ret = {};
|
||||
|
||||
Object.keys(styles).forEach(function (name) {
|
||||
ret[name] = {
|
||||
get: function () {
|
||||
return build([name]);
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
defineProps(chalk, init());
|
||||
|
||||
chalk.styles = ansiStyles;
|
||||
chalk.hasColor = hasAnsi;
|
||||
chalk.stripColor = stripAnsi;
|
||||
chalk.supportsColor = supportsColor;
|
||||
|
||||
// detect mode if not set manually
|
||||
if (chalk.enabled === undefined) {
|
||||
chalk.enabled = chalk.supportsColor;
|
||||
}
|
1
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/.bin/has-ansi
generated
vendored
Symbolic link
1
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/.bin/has-ansi
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../has-ansi/cli.js
|
1
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/.bin/strip-ansi
generated
vendored
Symbolic link
1
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/.bin/strip-ansi
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../strip-ansi/cli.js
|
1
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/.bin/supports-color
generated
vendored
Symbolic link
1
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/.bin/supports-color
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../supports-color/cli.js
|
40
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/ansi-styles/index.js
generated
vendored
Normal file
40
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/ansi-styles/index.js
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
'use strict';
|
||||
var styles = module.exports;
|
||||
|
||||
var codes = {
|
||||
reset: [0, 0],
|
||||
|
||||
bold: [1, 22], // 21 isn't widely supported and 22 does the same thing
|
||||
dim: [2, 22],
|
||||
italic: [3, 23],
|
||||
underline: [4, 24],
|
||||
inverse: [7, 27],
|
||||
hidden: [8, 28],
|
||||
strikethrough: [9, 29],
|
||||
|
||||
black: [30, 39],
|
||||
red: [31, 39],
|
||||
green: [32, 39],
|
||||
yellow: [33, 39],
|
||||
blue: [34, 39],
|
||||
magenta: [35, 39],
|
||||
cyan: [36, 39],
|
||||
white: [37, 39],
|
||||
gray: [90, 39],
|
||||
|
||||
bgBlack: [40, 49],
|
||||
bgRed: [41, 49],
|
||||
bgGreen: [42, 49],
|
||||
bgYellow: [43, 49],
|
||||
bgBlue: [44, 49],
|
||||
bgMagenta: [45, 49],
|
||||
bgCyan: [46, 49],
|
||||
bgWhite: [47, 49]
|
||||
};
|
||||
|
||||
Object.keys(codes).forEach(function (key) {
|
||||
var val = codes[key];
|
||||
var style = styles[key] = {};
|
||||
style.open = '\u001b[' + val[0] + 'm';
|
||||
style.close = '\u001b[' + val[1] + 'm';
|
||||
});
|
74
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/ansi-styles/package.json
generated
vendored
Normal file
74
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/ansi-styles/package.json
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
{
|
||||
"name": "ansi-styles",
|
||||
"version": "1.1.0",
|
||||
"description": "ANSI escape codes for styling strings in the terminal",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/sindresorhus/ansi-styles"
|
||||
},
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "http://sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"keywords": [
|
||||
"ansi",
|
||||
"styles",
|
||||
"color",
|
||||
"colour",
|
||||
"colors",
|
||||
"terminal",
|
||||
"console",
|
||||
"cli",
|
||||
"string",
|
||||
"tty",
|
||||
"escape",
|
||||
"formatting",
|
||||
"rgb",
|
||||
"256",
|
||||
"shell",
|
||||
"xterm",
|
||||
"log",
|
||||
"logging",
|
||||
"command-line",
|
||||
"text"
|
||||
],
|
||||
"devDependencies": {
|
||||
"mocha": "*"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/sindresorhus/ansi-styles/issues"
|
||||
},
|
||||
"homepage": "https://github.com/sindresorhus/ansi-styles",
|
||||
"_id": "ansi-styles@1.1.0",
|
||||
"_shasum": "eaecbf66cd706882760b2f4691582b8f55d7a7de",
|
||||
"_from": "ansi-styles@^1.1.0",
|
||||
"_npmVersion": "1.4.9",
|
||||
"_npmUser": {
|
||||
"name": "sindresorhus",
|
||||
"email": "sindresorhus@gmail.com"
|
||||
},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "sindresorhus",
|
||||
"email": "sindresorhus@gmail.com"
|
||||
}
|
||||
],
|
||||
"dist": {
|
||||
"shasum": "eaecbf66cd706882760b2f4691582b8f55d7a7de",
|
||||
"tarball": "http://registry.npmjs.org/ansi-styles/-/ansi-styles-1.1.0.tgz"
|
||||
},
|
||||
"directories": {},
|
||||
"_resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-1.1.0.tgz",
|
||||
"readme": "ERROR: No README data found!"
|
||||
}
|
70
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/ansi-styles/readme.md
generated
vendored
Normal file
70
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/ansi-styles/readme.md
generated
vendored
Normal file
@ -0,0 +1,70 @@
|
||||
# ansi-styles [](https://travis-ci.org/sindresorhus/ansi-styles)
|
||||
|
||||
> [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal
|
||||
|
||||
You probably want the higher-level [chalk](https://github.com/sindresorhus/chalk) module for styling your strings.
|
||||
|
||||

|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
$ npm install --save ansi-styles
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
var ansi = require('ansi-styles');
|
||||
|
||||
console.log(ansi.green.open + 'Hello world!' + ansi.green.close);
|
||||
```
|
||||
|
||||
|
||||
## API
|
||||
|
||||
Each style has an `open` and `close` property.
|
||||
|
||||
|
||||
## Styles
|
||||
|
||||
### General
|
||||
|
||||
- `reset`
|
||||
- `bold`
|
||||
- `dim`
|
||||
- `italic` *(not widely supported)*
|
||||
- `underline`
|
||||
- `inverse`
|
||||
- `hidden`
|
||||
- `strikethrough` *(not widely supported)*
|
||||
|
||||
### Text colors
|
||||
|
||||
- `black`
|
||||
- `red`
|
||||
- `green`
|
||||
- `yellow`
|
||||
- `blue`
|
||||
- `magenta`
|
||||
- `cyan`
|
||||
- `white`
|
||||
- `gray`
|
||||
|
||||
### Background colors
|
||||
|
||||
- `bgBlack`
|
||||
- `bgRed`
|
||||
- `bgGreen`
|
||||
- `bgYellow`
|
||||
- `bgBlue`
|
||||
- `bgMagenta`
|
||||
- `bgCyan`
|
||||
- `bgWhite`
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
11
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/escape-string-regexp/index.js
generated
vendored
Normal file
11
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/escape-string-regexp/index.js
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
'use strict';
|
||||
|
||||
var matchOperatorsRe = /[|\\{}()[\]^$+*?.]/g;
|
||||
|
||||
module.exports = function (str) {
|
||||
if (typeof str !== 'string') {
|
||||
throw new TypeError('Expected a string');
|
||||
}
|
||||
|
||||
return str.replace(matchOperatorsRe, '\\$&');
|
||||
};
|
21
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/escape-string-regexp/license
generated
vendored
Normal file
21
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/escape-string-regexp/license
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
@ -0,0 +1,70 @@
|
||||
{
|
||||
"name": "escape-string-regexp",
|
||||
"version": "1.0.3",
|
||||
"description": "Escape RegExp special characters",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/sindresorhus/escape-string-regexp"
|
||||
},
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "http://sindresorhus.com"
|
||||
},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "sindresorhus",
|
||||
"email": "sindresorhus@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "jbnicolai",
|
||||
"email": "jappelman@xebia.com"
|
||||
}
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=0.8.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"keywords": [
|
||||
"regex",
|
||||
"regexp",
|
||||
"re",
|
||||
"regular",
|
||||
"expression",
|
||||
"escape",
|
||||
"string",
|
||||
"str",
|
||||
"special",
|
||||
"characters"
|
||||
],
|
||||
"devDependencies": {
|
||||
"mocha": "*"
|
||||
},
|
||||
"gitHead": "1e446e6b4449b5f1f8868cd31bf8fd25ee37fb4b",
|
||||
"bugs": {
|
||||
"url": "https://github.com/sindresorhus/escape-string-regexp/issues"
|
||||
},
|
||||
"homepage": "https://github.com/sindresorhus/escape-string-regexp",
|
||||
"_id": "escape-string-regexp@1.0.3",
|
||||
"_shasum": "9e2d8b25bc2555c3336723750e03f099c2735bb5",
|
||||
"_from": "escape-string-regexp@^1.0.0",
|
||||
"_npmVersion": "2.1.16",
|
||||
"_nodeVersion": "0.10.35",
|
||||
"_npmUser": {
|
||||
"name": "jbnicolai",
|
||||
"email": "jappelman@xebia.com"
|
||||
},
|
||||
"dist": {
|
||||
"shasum": "9e2d8b25bc2555c3336723750e03f099c2735bb5",
|
||||
"tarball": "http://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.3.tgz"
|
||||
},
|
||||
"directories": {},
|
||||
"_resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.3.tgz",
|
||||
"readme": "ERROR: No README data found!"
|
||||
}
|
@ -0,0 +1,27 @@
|
||||
# escape-string-regexp [](https://travis-ci.org/sindresorhus/escape-string-regexp)
|
||||
|
||||
> Escape RegExp special characters
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
$ npm install --save escape-string-regexp
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
var escapeStringRegexp = require('escape-string-regexp');
|
||||
|
||||
var escapedString = escapeStringRegexp('how much $ for a unicorn?');
|
||||
//=> how much \$ for a unicorn\?
|
||||
|
||||
new RegExp(escapedString);
|
||||
```
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
53
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/has-ansi/cli.js
generated
vendored
Executable file
53
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/has-ansi/cli.js
generated
vendored
Executable file
@ -0,0 +1,53 @@
|
||||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
var pkg = require('./package.json');
|
||||
var hasAnsi = require('./');
|
||||
var input = process.argv[2];
|
||||
|
||||
function stdin(cb) {
|
||||
var ret = '';
|
||||
process.stdin.setEncoding('utf8');
|
||||
process.stdin.on('data', function (data) {
|
||||
ret += data;
|
||||
});
|
||||
process.stdin.on('end', function () {
|
||||
cb(ret);
|
||||
});
|
||||
}
|
||||
|
||||
function help() {
|
||||
console.log([
|
||||
pkg.description,
|
||||
'',
|
||||
'Usage',
|
||||
' $ has-ansi <string>',
|
||||
' $ echo <string> | has-ansi',
|
||||
'',
|
||||
'Exits with code 0 if input has ANSI escape codes and 1 if not'
|
||||
].join('\n'));
|
||||
}
|
||||
|
||||
function init(data) {
|
||||
process.exit(hasAnsi(data) ? 0 : 1);
|
||||
}
|
||||
|
||||
if (process.argv.indexOf('--help') !== -1) {
|
||||
help();
|
||||
return;
|
||||
}
|
||||
|
||||
if (process.argv.indexOf('--version') !== -1) {
|
||||
console.log(pkg.version);
|
||||
return;
|
||||
}
|
||||
|
||||
if (process.stdin.isTTY) {
|
||||
if (!input) {
|
||||
help();
|
||||
return;
|
||||
}
|
||||
|
||||
init(input);
|
||||
} else {
|
||||
stdin(init);
|
||||
}
|
4
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/has-ansi/index.js
generated
vendored
Normal file
4
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/has-ansi/index.js
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
'use strict';
|
||||
var ansiRegex = require('ansi-regex');
|
||||
var re = new RegExp(ansiRegex().source); // remove the `g` flag
|
||||
module.exports = re.test.bind(re);
|
@ -0,0 +1,4 @@
|
||||
'use strict';
|
||||
module.exports = function () {
|
||||
return /\u001b\[(?:[0-9]{1,3}(?:;[0-9]{1,3})*)?[m|K]/g;
|
||||
};
|
@ -0,0 +1,79 @@
|
||||
{
|
||||
"name": "ansi-regex",
|
||||
"version": "0.2.1",
|
||||
"description": "Regular expression for matching ANSI escape codes",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/sindresorhus/ansi-regex"
|
||||
},
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "http://sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"keywords": [
|
||||
"ansi",
|
||||
"styles",
|
||||
"color",
|
||||
"colour",
|
||||
"colors",
|
||||
"terminal",
|
||||
"console",
|
||||
"cli",
|
||||
"string",
|
||||
"tty",
|
||||
"escape",
|
||||
"formatting",
|
||||
"rgb",
|
||||
"256",
|
||||
"shell",
|
||||
"xterm",
|
||||
"command-line",
|
||||
"text",
|
||||
"regex",
|
||||
"regexp",
|
||||
"re",
|
||||
"match",
|
||||
"test",
|
||||
"find",
|
||||
"pattern"
|
||||
],
|
||||
"devDependencies": {
|
||||
"mocha": "*"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/sindresorhus/ansi-regex/issues"
|
||||
},
|
||||
"homepage": "https://github.com/sindresorhus/ansi-regex",
|
||||
"_id": "ansi-regex@0.2.1",
|
||||
"_shasum": "0d8e946967a3d8143f93e24e298525fc1b2235f9",
|
||||
"_from": "ansi-regex@^0.2.0",
|
||||
"_npmVersion": "1.4.9",
|
||||
"_npmUser": {
|
||||
"name": "sindresorhus",
|
||||
"email": "sindresorhus@gmail.com"
|
||||
},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "sindresorhus",
|
||||
"email": "sindresorhus@gmail.com"
|
||||
}
|
||||
],
|
||||
"dist": {
|
||||
"shasum": "0d8e946967a3d8143f93e24e298525fc1b2235f9",
|
||||
"tarball": "http://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz"
|
||||
},
|
||||
"directories": {},
|
||||
"_resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz",
|
||||
"readme": "ERROR: No README data found!"
|
||||
}
|
@ -0,0 +1,33 @@
|
||||
# ansi-regex [](https://travis-ci.org/sindresorhus/ansi-regex)
|
||||
|
||||
> Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
$ npm install --save ansi-regex
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
var ansiRegex = require('ansi-regex');
|
||||
|
||||
ansiRegex().test('\u001b[4mcake\u001b[0m');
|
||||
//=> true
|
||||
|
||||
ansiRegex().test('cake');
|
||||
//=> false
|
||||
|
||||
'\u001b[4mcake\u001b[0m'.match(ansiRegex());
|
||||
//=> ['\u001b[4m', '\u001b[0m']
|
||||
```
|
||||
|
||||
*It's a function so you can create multiple instances. Regexes with the global flag will have the `.lastIndex` property changed for each call to methods on the instance. Therefore reusing the instance with multiple calls will not work as expected for `.test()`.*
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
85
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/has-ansi/package.json
generated
vendored
Normal file
85
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/has-ansi/package.json
generated
vendored
Normal file
@ -0,0 +1,85 @@
|
||||
{
|
||||
"name": "has-ansi",
|
||||
"version": "0.1.0",
|
||||
"description": "Check if a string has ANSI escape codes",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/sindresorhus/has-ansi"
|
||||
},
|
||||
"bin": {
|
||||
"has-ansi": "cli.js"
|
||||
},
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "http://sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"cli.js"
|
||||
],
|
||||
"keywords": [
|
||||
"cli",
|
||||
"bin",
|
||||
"ansi",
|
||||
"styles",
|
||||
"color",
|
||||
"colour",
|
||||
"colors",
|
||||
"terminal",
|
||||
"console",
|
||||
"string",
|
||||
"tty",
|
||||
"escape",
|
||||
"shell",
|
||||
"xterm",
|
||||
"command-line",
|
||||
"text",
|
||||
"regex",
|
||||
"regexp",
|
||||
"re",
|
||||
"match",
|
||||
"test",
|
||||
"find",
|
||||
"pattern",
|
||||
"has"
|
||||
],
|
||||
"dependencies": {
|
||||
"ansi-regex": "^0.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "*"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/sindresorhus/has-ansi/issues"
|
||||
},
|
||||
"homepage": "https://github.com/sindresorhus/has-ansi",
|
||||
"_id": "has-ansi@0.1.0",
|
||||
"_shasum": "84f265aae8c0e6a88a12d7022894b7568894c62e",
|
||||
"_from": "has-ansi@^0.1.0",
|
||||
"_npmVersion": "1.4.9",
|
||||
"_npmUser": {
|
||||
"name": "sindresorhus",
|
||||
"email": "sindresorhus@gmail.com"
|
||||
},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "sindresorhus",
|
||||
"email": "sindresorhus@gmail.com"
|
||||
}
|
||||
],
|
||||
"dist": {
|
||||
"shasum": "84f265aae8c0e6a88a12d7022894b7568894c62e",
|
||||
"tarball": "http://registry.npmjs.org/has-ansi/-/has-ansi-0.1.0.tgz"
|
||||
},
|
||||
"directories": {},
|
||||
"_resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-0.1.0.tgz",
|
||||
"readme": "ERROR: No README data found!"
|
||||
}
|
45
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/has-ansi/readme.md
generated
vendored
Normal file
45
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/has-ansi/readme.md
generated
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
# has-ansi [](https://travis-ci.org/sindresorhus/has-ansi)
|
||||
|
||||
> Check if a string has [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
$ npm install --save has-ansi
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
var hasAnsi = require('has-ansi');
|
||||
|
||||
hasAnsi('\u001b[4mcake\u001b[0m');
|
||||
//=> true
|
||||
|
||||
hasAnsi('cake');
|
||||
//=> false
|
||||
```
|
||||
|
||||
|
||||
## CLI
|
||||
|
||||
```sh
|
||||
$ npm install --global has-ansi
|
||||
```
|
||||
|
||||
```
|
||||
$ has-ansi --help
|
||||
|
||||
Usage
|
||||
$ has-ansi <string>
|
||||
$ echo <string> | has-ansi
|
||||
|
||||
Exits with code 0 if input has ANSI escape codes and 1 if not
|
||||
```
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
39
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/strip-ansi/cli.js
generated
vendored
Executable file
39
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/strip-ansi/cli.js
generated
vendored
Executable file
@ -0,0 +1,39 @@
|
||||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
var fs = require('fs');
|
||||
var pkg = require('./package.json');
|
||||
var strip = require('./');
|
||||
var input = process.argv[2];
|
||||
|
||||
function help() {
|
||||
console.log([
|
||||
pkg.description,
|
||||
'',
|
||||
'Usage',
|
||||
' $ strip-ansi <input-file> > <output-file>',
|
||||
' $ cat <input-file> | strip-ansi > <output-file>',
|
||||
'',
|
||||
'Example',
|
||||
' $ strip-ansi unicorn.txt > unicorn-stripped.txt'
|
||||
].join('\n'));
|
||||
}
|
||||
|
||||
if (process.argv.indexOf('--help') !== -1) {
|
||||
help();
|
||||
return;
|
||||
}
|
||||
|
||||
if (process.argv.indexOf('--version') !== -1) {
|
||||
console.log(pkg.version);
|
||||
return;
|
||||
}
|
||||
|
||||
if (input) {
|
||||
process.stdout.write(strip(fs.readFileSync(input, 'utf8')));
|
||||
return;
|
||||
}
|
||||
|
||||
process.stdin.setEncoding('utf8');
|
||||
process.stdin.on('data', function (data) {
|
||||
process.stdout.write(strip(data));
|
||||
});
|
6
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/strip-ansi/index.js
generated
vendored
Normal file
6
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/strip-ansi/index.js
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
'use strict';
|
||||
var ansiRegex = require('ansi-regex')();
|
||||
|
||||
module.exports = function (str) {
|
||||
return typeof str === 'string' ? str.replace(ansiRegex, '') : str;
|
||||
};
|
@ -0,0 +1,4 @@
|
||||
'use strict';
|
||||
module.exports = function () {
|
||||
return /\u001b\[(?:[0-9]{1,3}(?:;[0-9]{1,3})*)?[m|K]/g;
|
||||
};
|
@ -0,0 +1,79 @@
|
||||
{
|
||||
"name": "ansi-regex",
|
||||
"version": "0.2.1",
|
||||
"description": "Regular expression for matching ANSI escape codes",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/sindresorhus/ansi-regex"
|
||||
},
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "http://sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"keywords": [
|
||||
"ansi",
|
||||
"styles",
|
||||
"color",
|
||||
"colour",
|
||||
"colors",
|
||||
"terminal",
|
||||
"console",
|
||||
"cli",
|
||||
"string",
|
||||
"tty",
|
||||
"escape",
|
||||
"formatting",
|
||||
"rgb",
|
||||
"256",
|
||||
"shell",
|
||||
"xterm",
|
||||
"command-line",
|
||||
"text",
|
||||
"regex",
|
||||
"regexp",
|
||||
"re",
|
||||
"match",
|
||||
"test",
|
||||
"find",
|
||||
"pattern"
|
||||
],
|
||||
"devDependencies": {
|
||||
"mocha": "*"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/sindresorhus/ansi-regex/issues"
|
||||
},
|
||||
"homepage": "https://github.com/sindresorhus/ansi-regex",
|
||||
"_id": "ansi-regex@0.2.1",
|
||||
"_shasum": "0d8e946967a3d8143f93e24e298525fc1b2235f9",
|
||||
"_from": "ansi-regex@^0.2.0",
|
||||
"_npmVersion": "1.4.9",
|
||||
"_npmUser": {
|
||||
"name": "sindresorhus",
|
||||
"email": "sindresorhus@gmail.com"
|
||||
},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "sindresorhus",
|
||||
"email": "sindresorhus@gmail.com"
|
||||
}
|
||||
],
|
||||
"dist": {
|
||||
"shasum": "0d8e946967a3d8143f93e24e298525fc1b2235f9",
|
||||
"tarball": "http://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz"
|
||||
},
|
||||
"directories": {},
|
||||
"_resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-0.2.1.tgz",
|
||||
"readme": "ERROR: No README data found!"
|
||||
}
|
@ -0,0 +1,33 @@
|
||||
# ansi-regex [](https://travis-ci.org/sindresorhus/ansi-regex)
|
||||
|
||||
> Regular expression for matching [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
$ npm install --save ansi-regex
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
var ansiRegex = require('ansi-regex');
|
||||
|
||||
ansiRegex().test('\u001b[4mcake\u001b[0m');
|
||||
//=> true
|
||||
|
||||
ansiRegex().test('cake');
|
||||
//=> false
|
||||
|
||||
'\u001b[4mcake\u001b[0m'.match(ansiRegex());
|
||||
//=> ['\u001b[4m', '\u001b[0m']
|
||||
```
|
||||
|
||||
*It's a function so you can create multiple instances. Regexes with the global flag will have the `.lastIndex` property changed for each call to methods on the instance. Therefore reusing the instance with multiple calls will not work as expected for `.test()`.*
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
84
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/strip-ansi/package.json
generated
vendored
Normal file
84
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/strip-ansi/package.json
generated
vendored
Normal file
@ -0,0 +1,84 @@
|
||||
{
|
||||
"name": "strip-ansi",
|
||||
"version": "0.3.0",
|
||||
"description": "Strip ANSI escape codes",
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"strip-ansi": "cli.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/sindresorhus/strip-ansi"
|
||||
},
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "http://sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"cli.js"
|
||||
],
|
||||
"keywords": [
|
||||
"strip",
|
||||
"trim",
|
||||
"remove",
|
||||
"ansi",
|
||||
"styles",
|
||||
"color",
|
||||
"colour",
|
||||
"colors",
|
||||
"terminal",
|
||||
"console",
|
||||
"cli",
|
||||
"string",
|
||||
"tty",
|
||||
"escape",
|
||||
"formatting",
|
||||
"rgb",
|
||||
"256",
|
||||
"shell",
|
||||
"xterm",
|
||||
"log",
|
||||
"logging",
|
||||
"command-line",
|
||||
"text"
|
||||
],
|
||||
"dependencies": {
|
||||
"ansi-regex": "^0.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "*"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/sindresorhus/strip-ansi/issues"
|
||||
},
|
||||
"homepage": "https://github.com/sindresorhus/strip-ansi",
|
||||
"_id": "strip-ansi@0.3.0",
|
||||
"_shasum": "25f48ea22ca79187f3174a4db8759347bb126220",
|
||||
"_from": "strip-ansi@^0.3.0",
|
||||
"_npmVersion": "1.4.9",
|
||||
"_npmUser": {
|
||||
"name": "sindresorhus",
|
||||
"email": "sindresorhus@gmail.com"
|
||||
},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "sindresorhus",
|
||||
"email": "sindresorhus@gmail.com"
|
||||
}
|
||||
],
|
||||
"dist": {
|
||||
"shasum": "25f48ea22ca79187f3174a4db8759347bb126220",
|
||||
"tarball": "http://registry.npmjs.org/strip-ansi/-/strip-ansi-0.3.0.tgz"
|
||||
},
|
||||
"directories": {},
|
||||
"_resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.3.0.tgz",
|
||||
"readme": "ERROR: No README data found!"
|
||||
}
|
43
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/strip-ansi/readme.md
generated
vendored
Normal file
43
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/strip-ansi/readme.md
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
# strip-ansi [](https://travis-ci.org/sindresorhus/strip-ansi)
|
||||
|
||||
> Strip [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
$ npm install --save strip-ansi
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
var stripAnsi = require('strip-ansi');
|
||||
|
||||
stripAnsi('\x1b[4mcake\x1b[0m');
|
||||
//=> 'cake'
|
||||
```
|
||||
|
||||
|
||||
## CLI
|
||||
|
||||
```sh
|
||||
$ npm install --global strip-ansi
|
||||
```
|
||||
|
||||
```sh
|
||||
$ strip-ansi --help
|
||||
|
||||
Usage
|
||||
$ strip-ansi <input-file> > <output-file>
|
||||
$ cat <input-file> | strip-ansi > <output-file>
|
||||
|
||||
Example
|
||||
$ strip-ansi unicorn.txt > unicorn-stripped.txt
|
||||
```
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
28
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/supports-color/cli.js
generated
vendored
Executable file
28
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/supports-color/cli.js
generated
vendored
Executable file
@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env node
|
||||
'use strict';
|
||||
var pkg = require('./package.json');
|
||||
var supportsColor = require('./');
|
||||
var input = process.argv[2];
|
||||
|
||||
function help() {
|
||||
console.log([
|
||||
pkg.description,
|
||||
'',
|
||||
'Usage',
|
||||
' $ supports-color',
|
||||
'',
|
||||
'Exits with code 0 if color is supported and 1 if not'
|
||||
].join('\n'));
|
||||
}
|
||||
|
||||
if (!input || process.argv.indexOf('--help') !== -1) {
|
||||
help();
|
||||
return;
|
||||
}
|
||||
|
||||
if (process.argv.indexOf('--version') !== -1) {
|
||||
console.log(pkg.version);
|
||||
return;
|
||||
}
|
||||
|
||||
process.exit(supportsColor ? 0 : 1);
|
32
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/supports-color/index.js
generated
vendored
Normal file
32
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/supports-color/index.js
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
'use strict';
|
||||
module.exports = (function () {
|
||||
if (process.argv.indexOf('--no-color') !== -1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (process.argv.indexOf('--color') !== -1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (process.stdout && !process.stdout.isTTY) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
return true;
|
||||
}
|
||||
|
||||
if ('COLORTERM' in process.env) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (process.env.TERM === 'dumb') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (/^screen|^xterm|^vt100|color|ansi|cygwin|linux/i.test(process.env.TERM)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
})();
|
78
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/supports-color/package.json
generated
vendored
Normal file
78
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/supports-color/package.json
generated
vendored
Normal file
@ -0,0 +1,78 @@
|
||||
{
|
||||
"name": "supports-color",
|
||||
"version": "0.2.0",
|
||||
"description": "Detect whether a terminal supports color",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/sindresorhus/supports-color"
|
||||
},
|
||||
"bin": {
|
||||
"supports-color": "cli.js"
|
||||
},
|
||||
"author": {
|
||||
"name": "Sindre Sorhus",
|
||||
"email": "sindresorhus@gmail.com",
|
||||
"url": "http://sindresorhus.com"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"cli.js"
|
||||
],
|
||||
"keywords": [
|
||||
"cli",
|
||||
"bin",
|
||||
"color",
|
||||
"colour",
|
||||
"colors",
|
||||
"terminal",
|
||||
"console",
|
||||
"cli",
|
||||
"ansi",
|
||||
"styles",
|
||||
"tty",
|
||||
"rgb",
|
||||
"256",
|
||||
"shell",
|
||||
"xterm",
|
||||
"command-line",
|
||||
"support",
|
||||
"supports",
|
||||
"capability",
|
||||
"detect"
|
||||
],
|
||||
"devDependencies": {
|
||||
"mocha": "*"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/sindresorhus/supports-color/issues"
|
||||
},
|
||||
"homepage": "https://github.com/sindresorhus/supports-color",
|
||||
"_id": "supports-color@0.2.0",
|
||||
"_shasum": "d92de2694eb3f67323973d7ae3d8b55b4c22190a",
|
||||
"_from": "supports-color@^0.2.0",
|
||||
"_npmVersion": "1.4.9",
|
||||
"_npmUser": {
|
||||
"name": "sindresorhus",
|
||||
"email": "sindresorhus@gmail.com"
|
||||
},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "sindresorhus",
|
||||
"email": "sindresorhus@gmail.com"
|
||||
}
|
||||
],
|
||||
"dist": {
|
||||
"shasum": "d92de2694eb3f67323973d7ae3d8b55b4c22190a",
|
||||
"tarball": "http://registry.npmjs.org/supports-color/-/supports-color-0.2.0.tgz"
|
||||
},
|
||||
"directories": {},
|
||||
"_resolved": "https://registry.npmjs.org/supports-color/-/supports-color-0.2.0.tgz",
|
||||
"readme": "ERROR: No README data found!"
|
||||
}
|
44
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/supports-color/readme.md
generated
vendored
Normal file
44
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/node_modules/supports-color/readme.md
generated
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
# supports-color [](https://travis-ci.org/sindresorhus/supports-color)
|
||||
|
||||
> Detect whether a terminal supports color
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
$ npm install --save supports-color
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
var supportsColor = require('supports-color');
|
||||
|
||||
if (supportsColor) {
|
||||
console.log('Terminal supports color');
|
||||
}
|
||||
```
|
||||
|
||||
It obeys the `--color` and `--no-color` CLI flags.
|
||||
|
||||
|
||||
## CLI
|
||||
|
||||
```sh
|
||||
$ npm install --global supports-color
|
||||
```
|
||||
|
||||
```sh
|
||||
$ supports-color --help
|
||||
|
||||
Usage
|
||||
$ supports-color
|
||||
|
||||
# Exits with code 0 if color is supported and 1 if not
|
||||
```
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
82
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/package.json
generated
vendored
Normal file
82
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/package.json
generated
vendored
Normal file
@ -0,0 +1,82 @@
|
||||
{
|
||||
"name": "chalk",
|
||||
"version": "0.5.1",
|
||||
"description": "Terminal string styling done right. Created because the `colors` module does some really horrible things.",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/sindresorhus/chalk"
|
||||
},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "sindresorhus",
|
||||
"email": "sindresorhus@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "jbnicolai",
|
||||
"email": "jappelman@xebia.com"
|
||||
}
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "mocha",
|
||||
"bench": "matcha benchmark.js"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"keywords": [
|
||||
"color",
|
||||
"colour",
|
||||
"colors",
|
||||
"terminal",
|
||||
"console",
|
||||
"cli",
|
||||
"string",
|
||||
"ansi",
|
||||
"styles",
|
||||
"tty",
|
||||
"formatting",
|
||||
"rgb",
|
||||
"256",
|
||||
"shell",
|
||||
"xterm",
|
||||
"log",
|
||||
"logging",
|
||||
"command-line",
|
||||
"text"
|
||||
],
|
||||
"dependencies": {
|
||||
"ansi-styles": "^1.1.0",
|
||||
"escape-string-regexp": "^1.0.0",
|
||||
"has-ansi": "^0.1.0",
|
||||
"strip-ansi": "^0.3.0",
|
||||
"supports-color": "^0.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"matcha": "^0.5.0",
|
||||
"mocha": "*"
|
||||
},
|
||||
"gitHead": "994758f01293f1fdcf63282e9917cb9f2cfbdaac",
|
||||
"bugs": {
|
||||
"url": "https://github.com/sindresorhus/chalk/issues"
|
||||
},
|
||||
"homepage": "https://github.com/sindresorhus/chalk",
|
||||
"_id": "chalk@0.5.1",
|
||||
"_shasum": "663b3a648b68b55d04690d49167aa837858f2174",
|
||||
"_from": "chalk@^0.5.1",
|
||||
"_npmVersion": "1.4.14",
|
||||
"_npmUser": {
|
||||
"name": "jbnicolai",
|
||||
"email": "jappelman@xebia.com"
|
||||
},
|
||||
"dist": {
|
||||
"shasum": "663b3a648b68b55d04690d49167aa837858f2174",
|
||||
"tarball": "http://registry.npmjs.org/chalk/-/chalk-0.5.1.tgz"
|
||||
},
|
||||
"directories": {},
|
||||
"_resolved": "https://registry.npmjs.org/chalk/-/chalk-0.5.1.tgz",
|
||||
"readme": "ERROR: No README data found!"
|
||||
}
|
175
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/readme.md
generated
vendored
Normal file
175
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/chalk/readme.md
generated
vendored
Normal file
@ -0,0 +1,175 @@
|
||||
# <img width="300" src="https://cdn.rawgit.com/sindresorhus/chalk/77ae94f63ab1ac61389b190e5a59866569d1a376/logo.svg" alt="chalk">
|
||||
|
||||
> Terminal string styling done right
|
||||
|
||||
[](https://travis-ci.org/sindresorhus/chalk)
|
||||

|
||||
|
||||
[colors.js](https://github.com/Marak/colors.js) is currently the most popular string styling module, but it has serious deficiencies like extending String.prototype which causes all kinds of [problems](https://github.com/yeoman/yo/issues/68). Although there are other ones, they either do too much or not enough.
|
||||
|
||||
**Chalk is a clean and focused alternative.**
|
||||
|
||||

|
||||
|
||||
|
||||
## Why
|
||||
|
||||
- Highly performant
|
||||
- Doesn't extend String.prototype
|
||||
- Expressive API
|
||||
- Ability to nest styles
|
||||
- Clean and focused
|
||||
- Auto-detects color support
|
||||
- Actively maintained
|
||||
- [Used by 1000+ modules](https://npmjs.org/browse/depended/chalk)
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
$ npm install --save chalk
|
||||
```
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
Chalk comes with an easy to use composable API where you just chain and nest the styles you want.
|
||||
|
||||
```js
|
||||
var chalk = require('chalk');
|
||||
|
||||
// style a string
|
||||
console.log( chalk.blue('Hello world!') );
|
||||
|
||||
// combine styled and normal strings
|
||||
console.log( chalk.blue('Hello'), 'World' + chalk.red('!') );
|
||||
|
||||
// compose multiple styles using the chainable API
|
||||
console.log( chalk.blue.bgRed.bold('Hello world!') );
|
||||
|
||||
// pass in multiple arguments
|
||||
console.log( chalk.blue('Hello', 'World!', 'Foo', 'bar', 'biz', 'baz') );
|
||||
|
||||
// nest styles
|
||||
console.log( chalk.red('Hello', chalk.underline.bgBlue('world') + '!') );
|
||||
|
||||
// nest styles of the same type even (color, underline, background)
|
||||
console.log( chalk.green('I am a green line ' + chalk.blue('with a blue substring') + ' that becomes green again!') );
|
||||
```
|
||||
|
||||
Easily define your own themes.
|
||||
|
||||
```js
|
||||
var chalk = require('chalk');
|
||||
var error = chalk.bold.red;
|
||||
console.log(error('Error!'));
|
||||
```
|
||||
|
||||
Take advantage of console.log [string substitution](http://nodejs.org/docs/latest/api/console.html#console_console_log_data).
|
||||
|
||||
```js
|
||||
var name = 'Sindre';
|
||||
console.log(chalk.green('Hello %s'), name);
|
||||
//=> Hello Sindre
|
||||
```
|
||||
|
||||
|
||||
## API
|
||||
|
||||
### chalk.`<style>[.<style>...](string, [string...])`
|
||||
|
||||
Example: `chalk.red.bold.underline('Hello', 'world');`
|
||||
|
||||
Chain [styles](#styles) and call the last one as a method with a string argument. Order doesn't matter.
|
||||
|
||||
Multiple arguments will be separated by space.
|
||||
|
||||
### chalk.enabled
|
||||
|
||||
Color support is automatically detected, but you can override it.
|
||||
|
||||
### chalk.supportsColor
|
||||
|
||||
Detect whether the terminal [supports color](https://github.com/sindresorhus/supports-color).
|
||||
|
||||
Can be overridden by the user with the flags `--color` and `--no-color`.
|
||||
|
||||
Used internally and handled for you, but exposed for convenience.
|
||||
|
||||
### chalk.styles
|
||||
|
||||
Exposes the styles as [ANSI escape codes](https://github.com/sindresorhus/ansi-styles).
|
||||
|
||||
Generally not useful, but you might need just the `.open` or `.close` escape code if you're mixing externally styled strings with yours.
|
||||
|
||||
```js
|
||||
var chalk = require('chalk');
|
||||
|
||||
console.log(chalk.styles.red);
|
||||
//=> {open: '\u001b[31m', close: '\u001b[39m'}
|
||||
|
||||
console.log(chalk.styles.red.open + 'Hello' + chalk.styles.red.close);
|
||||
```
|
||||
|
||||
### chalk.hasColor(string)
|
||||
|
||||
Check whether a string [has color](https://github.com/sindresorhus/has-ansi).
|
||||
|
||||
### chalk.stripColor(string)
|
||||
|
||||
[Strip color](https://github.com/sindresorhus/strip-ansi) from a string.
|
||||
|
||||
Can be useful in combination with `.supportsColor` to strip color on externally styled text when it's not supported.
|
||||
|
||||
Example:
|
||||
|
||||
```js
|
||||
var chalk = require('chalk');
|
||||
var styledString = getText();
|
||||
|
||||
if (!chalk.supportsColor) {
|
||||
styledString = chalk.stripColor(styledString);
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Styles
|
||||
|
||||
### General
|
||||
|
||||
- `reset`
|
||||
- `bold`
|
||||
- `dim`
|
||||
- `italic` *(not widely supported)*
|
||||
- `underline`
|
||||
- `inverse`
|
||||
- `hidden`
|
||||
- `strikethrough` *(not widely supported)*
|
||||
|
||||
### Text colors
|
||||
|
||||
- `black`
|
||||
- `red`
|
||||
- `green`
|
||||
- `yellow`
|
||||
- `blue`
|
||||
- `magenta`
|
||||
- `cyan`
|
||||
- `white`
|
||||
- `gray`
|
||||
|
||||
### Background colors
|
||||
|
||||
- `bgBlack`
|
||||
- `bgRed`
|
||||
- `bgGreen`
|
||||
- `bgYellow`
|
||||
- `bgBlue`
|
||||
- `bgMagenta`
|
||||
- `bgCyan`
|
||||
- `bgWhite`
|
||||
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Sindre Sorhus](http://sindresorhus.com)
|
2
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/.npmignore
generated
vendored
Normal file
2
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/.npmignore
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
dist/*
|
||||
node_modules/*
|
4
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/.travis.yml
generated
vendored
Normal file
4
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 0.8
|
||||
- "0.10"
|
208
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/CHANGELOG.md
generated
vendored
Normal file
208
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,208 @@
|
||||
# Change Log
|
||||
|
||||
## 0.3.0
|
||||
|
||||
* Change the default direction that searching for positions fuzzes when there is
|
||||
not an exact match. See #154.
|
||||
|
||||
* Support for environments using json2.js for JSON serialization. See #156.
|
||||
|
||||
## 0.2.0
|
||||
|
||||
* Support for consuming "indexed" source maps which do not have any remote
|
||||
sections. See pull request #127. This introduces a minor backwards
|
||||
incompatibility if you are monkey patching `SourceMapConsumer.prototype`
|
||||
methods.
|
||||
|
||||
## 0.1.43
|
||||
|
||||
* Performance improvements for `SourceMapGenerator` and `SourceNode`. See issue
|
||||
#148 for some discussion and issues #150, #151, and #152 for implementations.
|
||||
|
||||
## 0.1.42
|
||||
|
||||
* Fix an issue where `SourceNode`s from different versions of the source-map
|
||||
library couldn't be used in conjunction with each other. See issue #142.
|
||||
|
||||
## 0.1.41
|
||||
|
||||
* Fix a bug with getting the source content of relative sources with a "./"
|
||||
prefix. See issue #145 and [Bug 1090768](bugzil.la/1090768).
|
||||
|
||||
* Add the `SourceMapConsumer.prototype.computeColumnSpans` method to compute the
|
||||
column span of each mapping.
|
||||
|
||||
* Add the `SourceMapConsumer.prototype.allGeneratedPositionsFor` method to find
|
||||
all generated positions associated with a given original source and line.
|
||||
|
||||
## 0.1.40
|
||||
|
||||
* Performance improvements for parsing source maps in SourceMapConsumer.
|
||||
|
||||
## 0.1.39
|
||||
|
||||
* Fix a bug where setting a source's contents to null before any source content
|
||||
had been set before threw a TypeError. See issue #131.
|
||||
|
||||
## 0.1.38
|
||||
|
||||
* Fix a bug where finding relative paths from an empty path were creating
|
||||
absolute paths. See issue #129.
|
||||
|
||||
## 0.1.37
|
||||
|
||||
* Fix a bug where if the source root was an empty string, relative source paths
|
||||
would turn into absolute source paths. Issue #124.
|
||||
|
||||
## 0.1.36
|
||||
|
||||
* Allow the `names` mapping property to be an empty string. Issue #121.
|
||||
|
||||
## 0.1.35
|
||||
|
||||
* A third optional parameter was added to `SourceNode.fromStringWithSourceMap`
|
||||
to specify a path that relative sources in the second parameter should be
|
||||
relative to. Issue #105.
|
||||
|
||||
* If no file property is given to a `SourceMapGenerator`, then the resulting
|
||||
source map will no longer have a `null` file property. The property will
|
||||
simply not exist. Issue #104.
|
||||
|
||||
* Fixed a bug where consecutive newlines were ignored in `SourceNode`s.
|
||||
Issue #116.
|
||||
|
||||
## 0.1.34
|
||||
|
||||
* Make `SourceNode` work with windows style ("\r\n") newlines. Issue #103.
|
||||
|
||||
* Fix bug involving source contents and the
|
||||
`SourceMapGenerator.prototype.applySourceMap`. Issue #100.
|
||||
|
||||
## 0.1.33
|
||||
|
||||
* Fix some edge cases surrounding path joining and URL resolution.
|
||||
|
||||
* Add a third parameter for relative path to
|
||||
`SourceMapGenerator.prototype.applySourceMap`.
|
||||
|
||||
* Fix issues with mappings and EOLs.
|
||||
|
||||
## 0.1.32
|
||||
|
||||
* Fixed a bug where SourceMapConsumer couldn't handle negative relative columns
|
||||
(issue 92).
|
||||
|
||||
* Fixed test runner to actually report number of failed tests as its process
|
||||
exit code.
|
||||
|
||||
* Fixed a typo when reporting bad mappings (issue 87).
|
||||
|
||||
## 0.1.31
|
||||
|
||||
* Delay parsing the mappings in SourceMapConsumer until queried for a source
|
||||
location.
|
||||
|
||||
* Support Sass source maps (which at the time of writing deviate from the spec
|
||||
in small ways) in SourceMapConsumer.
|
||||
|
||||
## 0.1.30
|
||||
|
||||
* Do not join source root with a source, when the source is a data URI.
|
||||
|
||||
* Extend the test runner to allow running single specific test files at a time.
|
||||
|
||||
* Performance improvements in `SourceNode.prototype.walk` and
|
||||
`SourceMapConsumer.prototype.eachMapping`.
|
||||
|
||||
* Source map browser builds will now work inside Workers.
|
||||
|
||||
* Better error messages when attempting to add an invalid mapping to a
|
||||
`SourceMapGenerator`.
|
||||
|
||||
## 0.1.29
|
||||
|
||||
* Allow duplicate entries in the `names` and `sources` arrays of source maps
|
||||
(usually from TypeScript) we are parsing. Fixes github issue 72.
|
||||
|
||||
## 0.1.28
|
||||
|
||||
* Skip duplicate mappings when creating source maps from SourceNode; github
|
||||
issue 75.
|
||||
|
||||
## 0.1.27
|
||||
|
||||
* Don't throw an error when the `file` property is missing in SourceMapConsumer,
|
||||
we don't use it anyway.
|
||||
|
||||
## 0.1.26
|
||||
|
||||
* Fix SourceNode.fromStringWithSourceMap for empty maps. Fixes github issue 70.
|
||||
|
||||
## 0.1.25
|
||||
|
||||
* Make compatible with browserify
|
||||
|
||||
## 0.1.24
|
||||
|
||||
* Fix issue with absolute paths and `file://` URIs. See
|
||||
https://bugzilla.mozilla.org/show_bug.cgi?id=885597
|
||||
|
||||
## 0.1.23
|
||||
|
||||
* Fix issue with absolute paths and sourcesContent, github issue 64.
|
||||
|
||||
## 0.1.22
|
||||
|
||||
* Ignore duplicate mappings in SourceMapGenerator. Fixes github issue 21.
|
||||
|
||||
## 0.1.21
|
||||
|
||||
* Fixed handling of sources that start with a slash so that they are relative to
|
||||
the source root's host.
|
||||
|
||||
## 0.1.20
|
||||
|
||||
* Fixed github issue #43: absolute URLs aren't joined with the source root
|
||||
anymore.
|
||||
|
||||
## 0.1.19
|
||||
|
||||
* Using Travis CI to run tests.
|
||||
|
||||
## 0.1.18
|
||||
|
||||
* Fixed a bug in the handling of sourceRoot.
|
||||
|
||||
## 0.1.17
|
||||
|
||||
* Added SourceNode.fromStringWithSourceMap.
|
||||
|
||||
## 0.1.16
|
||||
|
||||
* Added missing documentation.
|
||||
|
||||
* Fixed the generating of empty mappings in SourceNode.
|
||||
|
||||
## 0.1.15
|
||||
|
||||
* Added SourceMapGenerator.applySourceMap.
|
||||
|
||||
## 0.1.14
|
||||
|
||||
* The sourceRoot is now handled consistently.
|
||||
|
||||
## 0.1.13
|
||||
|
||||
* Added SourceMapGenerator.fromSourceMap.
|
||||
|
||||
## 0.1.12
|
||||
|
||||
* SourceNode now generates empty mappings too.
|
||||
|
||||
## 0.1.11
|
||||
|
||||
* Added name support to SourceNode.
|
||||
|
||||
## 0.1.10
|
||||
|
||||
* Added sourcesContent support to the customer and generator.
|
28
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/LICENSE
generated
vendored
Normal file
28
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/LICENSE
generated
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
|
||||
Copyright (c) 2009-2011, Mozilla Foundation and contributors
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the names of the Mozilla Foundation nor the names of project
|
||||
contributors may be used to endorse or promote products derived from this
|
||||
software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
166
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/Makefile.dryice.js
generated
vendored
Normal file
166
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/Makefile.dryice.js
generated
vendored
Normal file
@ -0,0 +1,166 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var copy = require('dryice').copy;
|
||||
|
||||
function removeAmdefine(src) {
|
||||
src = String(src).replace(
|
||||
/if\s*\(typeof\s*define\s*!==\s*'function'\)\s*{\s*var\s*define\s*=\s*require\('amdefine'\)\(module,\s*require\);\s*}\s*/g,
|
||||
'');
|
||||
src = src.replace(
|
||||
/\b(define\(.*)('amdefine',?)/gm,
|
||||
'$1');
|
||||
return src;
|
||||
}
|
||||
removeAmdefine.onRead = true;
|
||||
|
||||
function makeNonRelative(src) {
|
||||
return src
|
||||
.replace(/require\('.\//g, 'require(\'source-map/')
|
||||
.replace(/\.\.\/\.\.\/lib\//g, '');
|
||||
}
|
||||
makeNonRelative.onRead = true;
|
||||
|
||||
function buildBrowser() {
|
||||
console.log('\nCreating dist/source-map.js');
|
||||
|
||||
var project = copy.createCommonJsProject({
|
||||
roots: [ path.join(__dirname, 'lib') ]
|
||||
});
|
||||
|
||||
copy({
|
||||
source: [
|
||||
'build/mini-require.js',
|
||||
{
|
||||
project: project,
|
||||
require: [ 'source-map/source-map-generator',
|
||||
'source-map/source-map-consumer',
|
||||
'source-map/source-node']
|
||||
},
|
||||
'build/suffix-browser.js'
|
||||
],
|
||||
filter: [
|
||||
copy.filter.moduleDefines,
|
||||
removeAmdefine
|
||||
],
|
||||
dest: 'dist/source-map.js'
|
||||
});
|
||||
}
|
||||
|
||||
function buildBrowserMin() {
|
||||
console.log('\nCreating dist/source-map.min.js');
|
||||
|
||||
copy({
|
||||
source: 'dist/source-map.js',
|
||||
filter: copy.filter.uglifyjs,
|
||||
dest: 'dist/source-map.min.js'
|
||||
});
|
||||
}
|
||||
|
||||
function buildFirefox() {
|
||||
console.log('\nCreating dist/SourceMap.jsm');
|
||||
|
||||
var project = copy.createCommonJsProject({
|
||||
roots: [ path.join(__dirname, 'lib') ]
|
||||
});
|
||||
|
||||
copy({
|
||||
source: [
|
||||
'build/prefix-source-map.jsm',
|
||||
{
|
||||
project: project,
|
||||
require: [ 'source-map/source-map-consumer',
|
||||
'source-map/source-map-generator',
|
||||
'source-map/source-node' ]
|
||||
},
|
||||
'build/suffix-source-map.jsm'
|
||||
],
|
||||
filter: [
|
||||
copy.filter.moduleDefines,
|
||||
removeAmdefine,
|
||||
makeNonRelative
|
||||
],
|
||||
dest: 'dist/SourceMap.jsm'
|
||||
});
|
||||
|
||||
// Create dist/test/Utils.jsm
|
||||
console.log('\nCreating dist/test/Utils.jsm');
|
||||
|
||||
project = copy.createCommonJsProject({
|
||||
roots: [ __dirname, path.join(__dirname, 'lib') ]
|
||||
});
|
||||
|
||||
copy({
|
||||
source: [
|
||||
'build/prefix-utils.jsm',
|
||||
'build/assert-shim.js',
|
||||
{
|
||||
project: project,
|
||||
require: [ 'test/source-map/util' ]
|
||||
},
|
||||
'build/suffix-utils.jsm'
|
||||
],
|
||||
filter: [
|
||||
copy.filter.moduleDefines,
|
||||
removeAmdefine,
|
||||
makeNonRelative
|
||||
],
|
||||
dest: 'dist/test/Utils.jsm'
|
||||
});
|
||||
|
||||
function isTestFile(f) {
|
||||
return /^test\-.*?\.js/.test(f);
|
||||
}
|
||||
|
||||
var testFiles = fs.readdirSync(path.join(__dirname, 'test', 'source-map')).filter(isTestFile);
|
||||
|
||||
testFiles.forEach(function (testFile) {
|
||||
console.log('\nCreating', path.join('dist', 'test', testFile.replace(/\-/g, '_')));
|
||||
|
||||
copy({
|
||||
source: [
|
||||
'build/test-prefix.js',
|
||||
path.join('test', 'source-map', testFile),
|
||||
'build/test-suffix.js'
|
||||
],
|
||||
filter: [
|
||||
removeAmdefine,
|
||||
makeNonRelative,
|
||||
function (input, source) {
|
||||
return input.replace('define(',
|
||||
'define("'
|
||||
+ path.join('test', 'source-map', testFile.replace(/\.js$/, ''))
|
||||
+ '", ["require", "exports", "module"], ');
|
||||
},
|
||||
function (input, source) {
|
||||
return input.replace('{THIS_MODULE}', function () {
|
||||
return "test/source-map/" + testFile.replace(/\.js$/, '');
|
||||
});
|
||||
}
|
||||
],
|
||||
dest: path.join('dist', 'test', testFile.replace(/\-/g, '_'))
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function ensureDir(name) {
|
||||
var dirExists = false;
|
||||
try {
|
||||
dirExists = fs.statSync(name).isDirectory();
|
||||
} catch (err) {}
|
||||
|
||||
if (!dirExists) {
|
||||
fs.mkdirSync(name, 0777);
|
||||
}
|
||||
}
|
||||
|
||||
ensureDir("dist");
|
||||
ensureDir("dist/test");
|
||||
buildFirefox();
|
||||
buildBrowser();
|
||||
buildBrowserMin();
|
489
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/README.md
generated
vendored
Normal file
489
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/README.md
generated
vendored
Normal file
@ -0,0 +1,489 @@
|
||||
# Source Map
|
||||
|
||||
This is a library to generate and consume the source map format
|
||||
[described here][format].
|
||||
|
||||
This library is written in the Asynchronous Module Definition format, and works
|
||||
in the following environments:
|
||||
|
||||
* Modern Browsers supporting ECMAScript 5 (either after the build, or with an
|
||||
AMD loader such as RequireJS)
|
||||
|
||||
* Inside Firefox (as a JSM file, after the build)
|
||||
|
||||
* With NodeJS versions 0.8.X and higher
|
||||
|
||||
## Node
|
||||
|
||||
$ npm install source-map
|
||||
|
||||
## Building from Source (for everywhere else)
|
||||
|
||||
Install Node and then run
|
||||
|
||||
$ git clone https://fitzgen@github.com/mozilla/source-map.git
|
||||
$ cd source-map
|
||||
$ npm link .
|
||||
|
||||
Next, run
|
||||
|
||||
$ node Makefile.dryice.js
|
||||
|
||||
This should spew a bunch of stuff to stdout, and create the following files:
|
||||
|
||||
* `dist/source-map.js` - The unminified browser version.
|
||||
|
||||
* `dist/source-map.min.js` - The minified browser version.
|
||||
|
||||
* `dist/SourceMap.jsm` - The JavaScript Module for inclusion in Firefox source.
|
||||
|
||||
## Examples
|
||||
|
||||
### Consuming a source map
|
||||
|
||||
```js
|
||||
var rawSourceMap = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['one.js', 'two.js'],
|
||||
sourceRoot: 'http://example.com/www/js/',
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
|
||||
var smc = new SourceMapConsumer(rawSourceMap);
|
||||
|
||||
console.log(smc.sources);
|
||||
// [ 'http://example.com/www/js/one.js',
|
||||
// 'http://example.com/www/js/two.js' ]
|
||||
|
||||
console.log(smc.originalPositionFor({
|
||||
line: 2,
|
||||
column: 28
|
||||
}));
|
||||
// { source: 'http://example.com/www/js/two.js',
|
||||
// line: 2,
|
||||
// column: 10,
|
||||
// name: 'n' }
|
||||
|
||||
console.log(smc.generatedPositionFor({
|
||||
source: 'http://example.com/www/js/two.js',
|
||||
line: 2,
|
||||
column: 10
|
||||
}));
|
||||
// { line: 2, column: 28 }
|
||||
|
||||
smc.eachMapping(function (m) {
|
||||
// ...
|
||||
});
|
||||
```
|
||||
|
||||
### Generating a source map
|
||||
|
||||
In depth guide:
|
||||
[**Compiling to JavaScript, and Debugging with Source Maps**](https://hacks.mozilla.org/2013/05/compiling-to-javascript-and-debugging-with-source-maps/)
|
||||
|
||||
#### With SourceNode (high level API)
|
||||
|
||||
```js
|
||||
function compile(ast) {
|
||||
switch (ast.type) {
|
||||
case 'BinaryExpression':
|
||||
return new SourceNode(
|
||||
ast.location.line,
|
||||
ast.location.column,
|
||||
ast.location.source,
|
||||
[compile(ast.left), " + ", compile(ast.right)]
|
||||
);
|
||||
case 'Literal':
|
||||
return new SourceNode(
|
||||
ast.location.line,
|
||||
ast.location.column,
|
||||
ast.location.source,
|
||||
String(ast.value)
|
||||
);
|
||||
// ...
|
||||
default:
|
||||
throw new Error("Bad AST");
|
||||
}
|
||||
}
|
||||
|
||||
var ast = parse("40 + 2", "add.js");
|
||||
console.log(compile(ast).toStringWithSourceMap({
|
||||
file: 'add.js'
|
||||
}));
|
||||
// { code: '40 + 2',
|
||||
// map: [object SourceMapGenerator] }
|
||||
```
|
||||
|
||||
#### With SourceMapGenerator (low level API)
|
||||
|
||||
```js
|
||||
var map = new SourceMapGenerator({
|
||||
file: "source-mapped.js"
|
||||
});
|
||||
|
||||
map.addMapping({
|
||||
generated: {
|
||||
line: 10,
|
||||
column: 35
|
||||
},
|
||||
source: "foo.js",
|
||||
original: {
|
||||
line: 33,
|
||||
column: 2
|
||||
},
|
||||
name: "christopher"
|
||||
});
|
||||
|
||||
console.log(map.toString());
|
||||
// '{"version":3,"file":"source-mapped.js","sources":["foo.js"],"names":["christopher"],"mappings":";;;;;;;;;mCAgCEA"}'
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
Get a reference to the module:
|
||||
|
||||
```js
|
||||
// NodeJS
|
||||
var sourceMap = require('source-map');
|
||||
|
||||
// Browser builds
|
||||
var sourceMap = window.sourceMap;
|
||||
|
||||
// Inside Firefox
|
||||
let sourceMap = {};
|
||||
Components.utils.import('resource:///modules/devtools/SourceMap.jsm', sourceMap);
|
||||
```
|
||||
|
||||
### SourceMapConsumer
|
||||
|
||||
A SourceMapConsumer instance represents a parsed source map which we can query
|
||||
for information about the original file positions by giving it a file position
|
||||
in the generated source.
|
||||
|
||||
#### new SourceMapConsumer(rawSourceMap)
|
||||
|
||||
The only parameter is the raw source map (either as a string which can be
|
||||
`JSON.parse`'d, or an object). According to the spec, source maps have the
|
||||
following attributes:
|
||||
|
||||
* `version`: Which version of the source map spec this map is following.
|
||||
|
||||
* `sources`: An array of URLs to the original source files.
|
||||
|
||||
* `names`: An array of identifiers which can be referrenced by individual
|
||||
mappings.
|
||||
|
||||
* `sourceRoot`: Optional. The URL root from which all sources are relative.
|
||||
|
||||
* `sourcesContent`: Optional. An array of contents of the original source files.
|
||||
|
||||
* `mappings`: A string of base64 VLQs which contain the actual mappings.
|
||||
|
||||
* `file`: Optional. The generated filename this source map is associated with.
|
||||
|
||||
#### SourceMapConsumer.prototype.computeColumnSpans()
|
||||
|
||||
Compute the last column for each generated mapping. The last column is
|
||||
inclusive.
|
||||
|
||||
#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)
|
||||
|
||||
Returns the original source, line, and column information for the generated
|
||||
source's line and column positions provided. The only argument is an object with
|
||||
the following properties:
|
||||
|
||||
* `line`: The line number in the generated source.
|
||||
|
||||
* `column`: The column number in the generated source.
|
||||
|
||||
and an object is returned with the following properties:
|
||||
|
||||
* `source`: The original source file, or null if this information is not
|
||||
available.
|
||||
|
||||
* `line`: The line number in the original source, or null if this information is
|
||||
not available.
|
||||
|
||||
* `column`: The column number in the original source, or null or null if this
|
||||
information is not available.
|
||||
|
||||
* `name`: The original identifier, or null if this information is not available.
|
||||
|
||||
#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)
|
||||
|
||||
Returns the generated line and column information for the original source,
|
||||
line, and column positions provided. The only argument is an object with
|
||||
the following properties:
|
||||
|
||||
* `source`: The filename of the original source.
|
||||
|
||||
* `line`: The line number in the original source.
|
||||
|
||||
* `column`: The column number in the original source.
|
||||
|
||||
and an object is returned with the following properties:
|
||||
|
||||
* `line`: The line number in the generated source, or null.
|
||||
|
||||
* `column`: The column number in the generated source, or null.
|
||||
|
||||
#### SourceMapConsumer.prototype.allGeneratedPositionsFor(originalPosition)
|
||||
|
||||
Returns all generated line and column information for the original source
|
||||
and line provided. The only argument is an object with the following
|
||||
properties:
|
||||
|
||||
* `source`: The filename of the original source.
|
||||
|
||||
* `line`: The line number in the original source.
|
||||
|
||||
and an array of objects is returned, each with the following properties:
|
||||
|
||||
* `line`: The line number in the generated source, or null.
|
||||
|
||||
* `column`: The column number in the generated source, or null.
|
||||
|
||||
#### SourceMapConsumer.prototype.sourceContentFor(source[, returnNullOnMissing])
|
||||
|
||||
Returns the original source content for the source provided. The only
|
||||
argument is the URL of the original source file.
|
||||
|
||||
If the source content for the given source is not found, then an error is
|
||||
thrown. Optionally, pass `true` as the second param to have `null` returned
|
||||
instead.
|
||||
|
||||
#### SourceMapConsumer.prototype.eachMapping(callback, context, order)
|
||||
|
||||
Iterate over each mapping between an original source/line/column and a
|
||||
generated line/column in this source map.
|
||||
|
||||
* `callback`: The function that is called with each mapping. Mappings have the
|
||||
form `{ source, generatedLine, generatedColumn, originalLine, originalColumn,
|
||||
name }`
|
||||
|
||||
* `context`: Optional. If specified, this object will be the value of `this`
|
||||
every time that `callback` is called.
|
||||
|
||||
* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or
|
||||
`SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over
|
||||
the mappings sorted by the generated file's line/column order or the
|
||||
original's source/line/column order, respectively. Defaults to
|
||||
`SourceMapConsumer.GENERATED_ORDER`.
|
||||
|
||||
### SourceMapGenerator
|
||||
|
||||
An instance of the SourceMapGenerator represents a source map which is being
|
||||
built incrementally.
|
||||
|
||||
#### new SourceMapGenerator([startOfSourceMap])
|
||||
|
||||
You may pass an object with the following properties:
|
||||
|
||||
* `file`: The filename of the generated source that this source map is
|
||||
associated with.
|
||||
|
||||
* `sourceRoot`: A root for all relative URLs in this source map.
|
||||
|
||||
* `skipValidation`: Optional. When `true`, disables validation of mappings as
|
||||
they are added. This can improve performance but should be used with
|
||||
discretion, as a last resort. Even then, one should avoid using this flag when
|
||||
running tests, if possible.
|
||||
|
||||
#### SourceMapGenerator.fromSourceMap(sourceMapConsumer)
|
||||
|
||||
Creates a new SourceMapGenerator based on a SourceMapConsumer
|
||||
|
||||
* `sourceMapConsumer` The SourceMap.
|
||||
|
||||
#### SourceMapGenerator.prototype.addMapping(mapping)
|
||||
|
||||
Add a single mapping from original source line and column to the generated
|
||||
source's line and column for this source map being created. The mapping object
|
||||
should have the following properties:
|
||||
|
||||
* `generated`: An object with the generated line and column positions.
|
||||
|
||||
* `original`: An object with the original line and column positions.
|
||||
|
||||
* `source`: The original source file (relative to the sourceRoot).
|
||||
|
||||
* `name`: An optional original token name for this mapping.
|
||||
|
||||
#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)
|
||||
|
||||
Set the source content for an original source file.
|
||||
|
||||
* `sourceFile` the URL of the original source file.
|
||||
|
||||
* `sourceContent` the content of the source file.
|
||||
|
||||
#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile[, sourceMapPath]])
|
||||
|
||||
Applies a SourceMap for a source file to the SourceMap.
|
||||
Each mapping to the supplied source file is rewritten using the
|
||||
supplied SourceMap. Note: The resolution for the resulting mappings
|
||||
is the minimium of this map and the supplied map.
|
||||
|
||||
* `sourceMapConsumer`: The SourceMap to be applied.
|
||||
|
||||
* `sourceFile`: Optional. The filename of the source file.
|
||||
If omitted, sourceMapConsumer.file will be used, if it exists.
|
||||
Otherwise an error will be thrown.
|
||||
|
||||
* `sourceMapPath`: Optional. The dirname of the path to the SourceMap
|
||||
to be applied. If relative, it is relative to the SourceMap.
|
||||
|
||||
This parameter is needed when the two SourceMaps aren't in the same
|
||||
directory, and the SourceMap to be applied contains relative source
|
||||
paths. If so, those relative source paths need to be rewritten
|
||||
relative to the SourceMap.
|
||||
|
||||
If omitted, it is assumed that both SourceMaps are in the same directory,
|
||||
thus not needing any rewriting. (Supplying `'.'` has the same effect.)
|
||||
|
||||
#### SourceMapGenerator.prototype.toString()
|
||||
|
||||
Renders the source map being generated to a string.
|
||||
|
||||
### SourceNode
|
||||
|
||||
SourceNodes provide a way to abstract over interpolating and/or concatenating
|
||||
snippets of generated JavaScript source code, while maintaining the line and
|
||||
column information associated between those snippets and the original source
|
||||
code. This is useful as the final intermediate representation a compiler might
|
||||
use before outputting the generated JS and source map.
|
||||
|
||||
#### new SourceNode([line, column, source[, chunk[, name]]])
|
||||
|
||||
* `line`: The original line number associated with this source node, or null if
|
||||
it isn't associated with an original line.
|
||||
|
||||
* `column`: The original column number associated with this source node, or null
|
||||
if it isn't associated with an original column.
|
||||
|
||||
* `source`: The original source's filename; null if no filename is provided.
|
||||
|
||||
* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see
|
||||
below.
|
||||
|
||||
* `name`: Optional. The original identifier.
|
||||
|
||||
#### SourceNode.fromStringWithSourceMap(code, sourceMapConsumer[, relativePath])
|
||||
|
||||
Creates a SourceNode from generated code and a SourceMapConsumer.
|
||||
|
||||
* `code`: The generated code
|
||||
|
||||
* `sourceMapConsumer` The SourceMap for the generated code
|
||||
|
||||
* `relativePath` The optional path that relative sources in `sourceMapConsumer`
|
||||
should be relative to.
|
||||
|
||||
#### SourceNode.prototype.add(chunk)
|
||||
|
||||
Add a chunk of generated JS to this source node.
|
||||
|
||||
* `chunk`: A string snippet of generated JS code, another instance of
|
||||
`SourceNode`, or an array where each member is one of those things.
|
||||
|
||||
#### SourceNode.prototype.prepend(chunk)
|
||||
|
||||
Prepend a chunk of generated JS to this source node.
|
||||
|
||||
* `chunk`: A string snippet of generated JS code, another instance of
|
||||
`SourceNode`, or an array where each member is one of those things.
|
||||
|
||||
#### SourceNode.prototype.setSourceContent(sourceFile, sourceContent)
|
||||
|
||||
Set the source content for a source file. This will be added to the
|
||||
`SourceMap` in the `sourcesContent` field.
|
||||
|
||||
* `sourceFile`: The filename of the source file
|
||||
|
||||
* `sourceContent`: The content of the source file
|
||||
|
||||
#### SourceNode.prototype.walk(fn)
|
||||
|
||||
Walk over the tree of JS snippets in this node and its children. The walking
|
||||
function is called once for each snippet of JS and is passed that snippet and
|
||||
the its original associated source's line/column location.
|
||||
|
||||
* `fn`: The traversal function.
|
||||
|
||||
#### SourceNode.prototype.walkSourceContents(fn)
|
||||
|
||||
Walk over the tree of SourceNodes. The walking function is called for each
|
||||
source file content and is passed the filename and source content.
|
||||
|
||||
* `fn`: The traversal function.
|
||||
|
||||
#### SourceNode.prototype.join(sep)
|
||||
|
||||
Like `Array.prototype.join` except for SourceNodes. Inserts the separator
|
||||
between each of this source node's children.
|
||||
|
||||
* `sep`: The separator.
|
||||
|
||||
#### SourceNode.prototype.replaceRight(pattern, replacement)
|
||||
|
||||
Call `String.prototype.replace` on the very right-most source snippet. Useful
|
||||
for trimming whitespace from the end of a source node, etc.
|
||||
|
||||
* `pattern`: The pattern to replace.
|
||||
|
||||
* `replacement`: The thing to replace the pattern with.
|
||||
|
||||
#### SourceNode.prototype.toString()
|
||||
|
||||
Return the string representation of this source node. Walks over the tree and
|
||||
concatenates all the various snippets together to one string.
|
||||
|
||||
#### SourceNode.prototype.toStringWithSourceMap([startOfSourceMap])
|
||||
|
||||
Returns the string representation of this tree of source nodes, plus a
|
||||
SourceMapGenerator which contains all the mappings between the generated and
|
||||
original sources.
|
||||
|
||||
The arguments are the same as those to `new SourceMapGenerator`.
|
||||
|
||||
## Tests
|
||||
|
||||
[](https://travis-ci.org/mozilla/source-map)
|
||||
|
||||
Install NodeJS version 0.8.0 or greater, then run `node test/run-tests.js`.
|
||||
|
||||
To add new tests, create a new file named `test/test-<your new test name>.js`
|
||||
and export your test functions with names that start with "test", for example
|
||||
|
||||
```js
|
||||
exports["test doing the foo bar"] = function (assert, util) {
|
||||
...
|
||||
};
|
||||
```
|
||||
|
||||
The new test will be located automatically when you run the suite.
|
||||
|
||||
The `util` argument is the test utility module located at `test/source-map/util`.
|
||||
|
||||
The `assert` argument is a cut down version of node's assert module. You have
|
||||
access to the following assertion functions:
|
||||
|
||||
* `doesNotThrow`
|
||||
|
||||
* `equal`
|
||||
|
||||
* `ok`
|
||||
|
||||
* `strictEqual`
|
||||
|
||||
* `throws`
|
||||
|
||||
(The reason for the restricted set of test functions is because we need the
|
||||
tests to run inside Firefox's test suite as well and so the assert module is
|
||||
shimmed in that environment. See `build/assert-shim.js`.)
|
||||
|
||||
[format]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
|
||||
[feature]: https://wiki.mozilla.org/DevTools/Features/SourceMap
|
||||
[Dryice]: https://github.com/mozilla/dryice
|
56
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/assert-shim.js
generated
vendored
Normal file
56
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/assert-shim.js
generated
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
define('test/source-map/assert', ['exports'], function (exports) {
|
||||
|
||||
let do_throw = function (msg) {
|
||||
throw new Error(msg);
|
||||
};
|
||||
|
||||
exports.init = function (throw_fn) {
|
||||
do_throw = throw_fn;
|
||||
};
|
||||
|
||||
exports.doesNotThrow = function (fn) {
|
||||
try {
|
||||
fn();
|
||||
}
|
||||
catch (e) {
|
||||
do_throw(e.message);
|
||||
}
|
||||
};
|
||||
|
||||
exports.equal = function (actual, expected, msg) {
|
||||
msg = msg || String(actual) + ' != ' + String(expected);
|
||||
if (actual != expected) {
|
||||
do_throw(msg);
|
||||
}
|
||||
};
|
||||
|
||||
exports.ok = function (val, msg) {
|
||||
msg = msg || String(val) + ' is falsey';
|
||||
if (!Boolean(val)) {
|
||||
do_throw(msg);
|
||||
}
|
||||
};
|
||||
|
||||
exports.strictEqual = function (actual, expected, msg) {
|
||||
msg = msg || String(actual) + ' !== ' + String(expected);
|
||||
if (actual !== expected) {
|
||||
do_throw(msg);
|
||||
}
|
||||
};
|
||||
|
||||
exports.throws = function (fn) {
|
||||
try {
|
||||
fn();
|
||||
do_throw('Expected an error to be thrown, but it wasn\'t.');
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
};
|
||||
|
||||
});
|
152
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/mini-require.js
generated
vendored
Normal file
152
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/mini-require.js
generated
vendored
Normal file
@ -0,0 +1,152 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
/**
|
||||
* Define a module along with a payload.
|
||||
* @param {string} moduleName Name for the payload
|
||||
* @param {ignored} deps Ignored. For compatibility with CommonJS AMD Spec
|
||||
* @param {function} payload Function with (require, exports, module) params
|
||||
*/
|
||||
function define(moduleName, deps, payload) {
|
||||
if (typeof moduleName != "string") {
|
||||
throw new TypeError('Expected string, got: ' + moduleName);
|
||||
}
|
||||
|
||||
if (arguments.length == 2) {
|
||||
payload = deps;
|
||||
}
|
||||
|
||||
if (moduleName in define.modules) {
|
||||
throw new Error("Module already defined: " + moduleName);
|
||||
}
|
||||
define.modules[moduleName] = payload;
|
||||
};
|
||||
|
||||
/**
|
||||
* The global store of un-instantiated modules
|
||||
*/
|
||||
define.modules = {};
|
||||
|
||||
|
||||
/**
|
||||
* We invoke require() in the context of a Domain so we can have multiple
|
||||
* sets of modules running separate from each other.
|
||||
* This contrasts with JSMs which are singletons, Domains allows us to
|
||||
* optionally load a CommonJS module twice with separate data each time.
|
||||
* Perhaps you want 2 command lines with a different set of commands in each,
|
||||
* for example.
|
||||
*/
|
||||
function Domain() {
|
||||
this.modules = {};
|
||||
this._currentModule = null;
|
||||
}
|
||||
|
||||
(function () {
|
||||
|
||||
/**
|
||||
* Lookup module names and resolve them by calling the definition function if
|
||||
* needed.
|
||||
* There are 2 ways to call this, either with an array of dependencies and a
|
||||
* callback to call when the dependencies are found (which can happen
|
||||
* asynchronously in an in-page context) or with a single string an no callback
|
||||
* where the dependency is resolved synchronously and returned.
|
||||
* The API is designed to be compatible with the CommonJS AMD spec and
|
||||
* RequireJS.
|
||||
* @param {string[]|string} deps A name, or names for the payload
|
||||
* @param {function|undefined} callback Function to call when the dependencies
|
||||
* are resolved
|
||||
* @return {undefined|object} The module required or undefined for
|
||||
* array/callback method
|
||||
*/
|
||||
Domain.prototype.require = function(deps, callback) {
|
||||
if (Array.isArray(deps)) {
|
||||
var params = deps.map(function(dep) {
|
||||
return this.lookup(dep);
|
||||
}, this);
|
||||
if (callback) {
|
||||
callback.apply(null, params);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
else {
|
||||
return this.lookup(deps);
|
||||
}
|
||||
};
|
||||
|
||||
function normalize(path) {
|
||||
var bits = path.split('/');
|
||||
var i = 1;
|
||||
while (i < bits.length) {
|
||||
if (bits[i] === '..') {
|
||||
bits.splice(i-1, 1);
|
||||
} else if (bits[i] === '.') {
|
||||
bits.splice(i, 1);
|
||||
} else {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
return bits.join('/');
|
||||
}
|
||||
|
||||
function join(a, b) {
|
||||
a = a.trim();
|
||||
b = b.trim();
|
||||
if (/^\//.test(b)) {
|
||||
return b;
|
||||
} else {
|
||||
return a.replace(/\/*$/, '/') + b;
|
||||
}
|
||||
}
|
||||
|
||||
function dirname(path) {
|
||||
var bits = path.split('/');
|
||||
bits.pop();
|
||||
return bits.join('/');
|
||||
}
|
||||
|
||||
/**
|
||||
* Lookup module names and resolve them by calling the definition function if
|
||||
* needed.
|
||||
* @param {string} moduleName A name for the payload to lookup
|
||||
* @return {object} The module specified by aModuleName or null if not found.
|
||||
*/
|
||||
Domain.prototype.lookup = function(moduleName) {
|
||||
if (/^\./.test(moduleName)) {
|
||||
moduleName = normalize(join(dirname(this._currentModule), moduleName));
|
||||
}
|
||||
|
||||
if (moduleName in this.modules) {
|
||||
var module = this.modules[moduleName];
|
||||
return module;
|
||||
}
|
||||
|
||||
if (!(moduleName in define.modules)) {
|
||||
throw new Error("Module not defined: " + moduleName);
|
||||
}
|
||||
|
||||
var module = define.modules[moduleName];
|
||||
|
||||
if (typeof module == "function") {
|
||||
var exports = {};
|
||||
var previousModule = this._currentModule;
|
||||
this._currentModule = moduleName;
|
||||
module(this.require.bind(this), exports, { id: moduleName, uri: "" });
|
||||
this._currentModule = previousModule;
|
||||
module = exports;
|
||||
}
|
||||
|
||||
// cache the resulting module object for next time
|
||||
this.modules[moduleName] = module;
|
||||
|
||||
return module;
|
||||
};
|
||||
|
||||
}());
|
||||
|
||||
define.Domain = Domain;
|
||||
define.globalDomain = new Domain();
|
||||
var require = define.globalDomain.require.bind(define.globalDomain);
|
20
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/prefix-source-map.jsm
generated
vendored
Normal file
20
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/prefix-source-map.jsm
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
/*
|
||||
* WARNING!
|
||||
*
|
||||
* Do not edit this file directly, it is built from the sources at
|
||||
* https://github.com/mozilla/source-map/
|
||||
*/
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
this.EXPORTED_SYMBOLS = [ "SourceMapConsumer", "SourceMapGenerator", "SourceNode" ];
|
||||
|
||||
Components.utils.import('resource://gre/modules/devtools/Require.jsm');
|
18
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/prefix-utils.jsm
generated
vendored
Normal file
18
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/prefix-utils.jsm
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
/*
|
||||
* WARNING!
|
||||
*
|
||||
* Do not edit this file directly, it is built from the sources at
|
||||
* https://github.com/mozilla/source-map/
|
||||
*/
|
||||
|
||||
Components.utils.import('resource://gre/modules/devtools/Require.jsm');
|
||||
Components.utils.import('resource://gre/modules/devtools/SourceMap.jsm');
|
||||
|
||||
this.EXPORTED_SYMBOLS = [ "define", "runSourceMapTests" ];
|
8
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/suffix-browser.js
generated
vendored
Normal file
8
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/suffix-browser.js
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
this.sourceMap = {
|
||||
SourceMapConsumer: require('source-map/source-map-consumer').SourceMapConsumer,
|
||||
SourceMapGenerator: require('source-map/source-map-generator').SourceMapGenerator,
|
||||
SourceNode: require('source-map/source-node').SourceNode
|
||||
};
|
6
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/suffix-source-map.jsm
generated
vendored
Normal file
6
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/suffix-source-map.jsm
generated
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
this.SourceMapConsumer = require('source-map/source-map-consumer').SourceMapConsumer;
|
||||
this.SourceMapGenerator = require('source-map/source-map-generator').SourceMapGenerator;
|
||||
this.SourceNode = require('source-map/source-node').SourceNode;
|
21
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/suffix-utils.jsm
generated
vendored
Normal file
21
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/suffix-utils.jsm
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
function runSourceMapTests(modName, do_throw) {
|
||||
let mod = require(modName);
|
||||
let assert = require('test/source-map/assert');
|
||||
let util = require('test/source-map/util');
|
||||
|
||||
assert.init(do_throw);
|
||||
|
||||
for (let k in mod) {
|
||||
if (/^test/.test(k)) {
|
||||
mod[k](assert, util);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
this.runSourceMapTests = runSourceMapTests;
|
8
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/test-prefix.js
generated
vendored
Normal file
8
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/test-prefix.js
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
/*
|
||||
* WARNING!
|
||||
*
|
||||
* Do not edit this file directly, it is built from the sources at
|
||||
* https://github.com/mozilla/source-map/
|
||||
*/
|
||||
|
||||
Components.utils.import('resource://test/Utils.jsm');
|
3
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/test-suffix.js
generated
vendored
Normal file
3
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/build/test-suffix.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
function run_test() {
|
||||
runSourceMapTests('{THIS_MODULE}', do_throw);
|
||||
}
|
8
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map.js
generated
vendored
Normal file
8
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map.js
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
/*
|
||||
* Copyright 2009-2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE.txt or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
exports.SourceMapGenerator = require('./source-map/source-map-generator').SourceMapGenerator;
|
||||
exports.SourceMapConsumer = require('./source-map/source-map-consumer').SourceMapConsumer;
|
||||
exports.SourceNode = require('./source-map/source-node').SourceNode;
|
97
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/array-set.js
generated
vendored
Normal file
97
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/array-set.js
generated
vendored
Normal file
@ -0,0 +1,97 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var util = require('./util');
|
||||
|
||||
/**
|
||||
* A data structure which is a combination of an array and a set. Adding a new
|
||||
* member is O(1), testing for membership is O(1), and finding the index of an
|
||||
* element is O(1). Removing elements from the set is not supported. Only
|
||||
* strings are supported for membership.
|
||||
*/
|
||||
function ArraySet() {
|
||||
this._array = [];
|
||||
this._set = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Static method for creating ArraySet instances from an existing array.
|
||||
*/
|
||||
ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) {
|
||||
var set = new ArraySet();
|
||||
for (var i = 0, len = aArray.length; i < len; i++) {
|
||||
set.add(aArray[i], aAllowDuplicates);
|
||||
}
|
||||
return set;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add the given string to this set.
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) {
|
||||
var isDuplicate = this.has(aStr);
|
||||
var idx = this._array.length;
|
||||
if (!isDuplicate || aAllowDuplicates) {
|
||||
this._array.push(aStr);
|
||||
}
|
||||
if (!isDuplicate) {
|
||||
this._set[util.toSetString(aStr)] = idx;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Is the given string a member of this set?
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
ArraySet.prototype.has = function ArraySet_has(aStr) {
|
||||
return Object.prototype.hasOwnProperty.call(this._set,
|
||||
util.toSetString(aStr));
|
||||
};
|
||||
|
||||
/**
|
||||
* What is the index of the given string in the array?
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {
|
||||
if (this.has(aStr)) {
|
||||
return this._set[util.toSetString(aStr)];
|
||||
}
|
||||
throw new Error('"' + aStr + '" is not in the set.');
|
||||
};
|
||||
|
||||
/**
|
||||
* What is the element at the given index?
|
||||
*
|
||||
* @param Number aIdx
|
||||
*/
|
||||
ArraySet.prototype.at = function ArraySet_at(aIdx) {
|
||||
if (aIdx >= 0 && aIdx < this._array.length) {
|
||||
return this._array[aIdx];
|
||||
}
|
||||
throw new Error('No element indexed by ' + aIdx);
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the array representation of this set (which has the proper indices
|
||||
* indicated by indexOf). Note that this is a copy of the internal array used
|
||||
* for storing the members so that no one can mess with internal state.
|
||||
*/
|
||||
ArraySet.prototype.toArray = function ArraySet_toArray() {
|
||||
return this._array.slice();
|
||||
};
|
||||
|
||||
exports.ArraySet = ArraySet;
|
||||
|
||||
});
|
142
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/base64-vlq.js
generated
vendored
Normal file
142
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/base64-vlq.js
generated
vendored
Normal file
@ -0,0 +1,142 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*
|
||||
* Based on the Base 64 VLQ implementation in Closure Compiler:
|
||||
* https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java
|
||||
*
|
||||
* Copyright 2011 The Closure Compiler Authors. All rights reserved.
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following
|
||||
* disclaimer in the documentation and/or other materials provided
|
||||
* with the distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived
|
||||
* from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var base64 = require('./base64');
|
||||
|
||||
// A single base 64 digit can contain 6 bits of data. For the base 64 variable
|
||||
// length quantities we use in the source map spec, the first bit is the sign,
|
||||
// the next four bits are the actual value, and the 6th bit is the
|
||||
// continuation bit. The continuation bit tells us whether there are more
|
||||
// digits in this value following this digit.
|
||||
//
|
||||
// Continuation
|
||||
// | Sign
|
||||
// | |
|
||||
// V V
|
||||
// 101011
|
||||
|
||||
var VLQ_BASE_SHIFT = 5;
|
||||
|
||||
// binary: 100000
|
||||
var VLQ_BASE = 1 << VLQ_BASE_SHIFT;
|
||||
|
||||
// binary: 011111
|
||||
var VLQ_BASE_MASK = VLQ_BASE - 1;
|
||||
|
||||
// binary: 100000
|
||||
var VLQ_CONTINUATION_BIT = VLQ_BASE;
|
||||
|
||||
/**
|
||||
* Converts from a two-complement value to a value where the sign bit is
|
||||
* placed in the least significant bit. For example, as decimals:
|
||||
* 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)
|
||||
* 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)
|
||||
*/
|
||||
function toVLQSigned(aValue) {
|
||||
return aValue < 0
|
||||
? ((-aValue) << 1) + 1
|
||||
: (aValue << 1) + 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts to a two-complement value from a value where the sign bit is
|
||||
* placed in the least significant bit. For example, as decimals:
|
||||
* 2 (10 binary) becomes 1, 3 (11 binary) becomes -1
|
||||
* 4 (100 binary) becomes 2, 5 (101 binary) becomes -2
|
||||
*/
|
||||
function fromVLQSigned(aValue) {
|
||||
var isNegative = (aValue & 1) === 1;
|
||||
var shifted = aValue >> 1;
|
||||
return isNegative
|
||||
? -shifted
|
||||
: shifted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the base 64 VLQ encoded value.
|
||||
*/
|
||||
exports.encode = function base64VLQ_encode(aValue) {
|
||||
var encoded = "";
|
||||
var digit;
|
||||
|
||||
var vlq = toVLQSigned(aValue);
|
||||
|
||||
do {
|
||||
digit = vlq & VLQ_BASE_MASK;
|
||||
vlq >>>= VLQ_BASE_SHIFT;
|
||||
if (vlq > 0) {
|
||||
// There are still more digits in this value, so we must make sure the
|
||||
// continuation bit is marked.
|
||||
digit |= VLQ_CONTINUATION_BIT;
|
||||
}
|
||||
encoded += base64.encode(digit);
|
||||
} while (vlq > 0);
|
||||
|
||||
return encoded;
|
||||
};
|
||||
|
||||
/**
|
||||
* Decodes the next base 64 VLQ value from the given string and returns the
|
||||
* value and the rest of the string via the out parameter.
|
||||
*/
|
||||
exports.decode = function base64VLQ_decode(aStr, aOutParam) {
|
||||
var i = 0;
|
||||
var strLen = aStr.length;
|
||||
var result = 0;
|
||||
var shift = 0;
|
||||
var continuation, digit;
|
||||
|
||||
do {
|
||||
if (i >= strLen) {
|
||||
throw new Error("Expected more digits in base 64 VLQ value.");
|
||||
}
|
||||
digit = base64.decode(aStr.charAt(i++));
|
||||
continuation = !!(digit & VLQ_CONTINUATION_BIT);
|
||||
digit &= VLQ_BASE_MASK;
|
||||
result = result + (digit << shift);
|
||||
shift += VLQ_BASE_SHIFT;
|
||||
} while (continuation);
|
||||
|
||||
aOutParam.value = fromVLQSigned(result);
|
||||
aOutParam.rest = aStr.slice(i);
|
||||
};
|
||||
|
||||
});
|
42
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/base64.js
generated
vendored
Normal file
42
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/base64.js
generated
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var charToIntMap = {};
|
||||
var intToCharMap = {};
|
||||
|
||||
'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
|
||||
.split('')
|
||||
.forEach(function (ch, index) {
|
||||
charToIntMap[ch] = index;
|
||||
intToCharMap[index] = ch;
|
||||
});
|
||||
|
||||
/**
|
||||
* Encode an integer in the range of 0 to 63 to a single base 64 digit.
|
||||
*/
|
||||
exports.encode = function base64_encode(aNumber) {
|
||||
if (aNumber in intToCharMap) {
|
||||
return intToCharMap[aNumber];
|
||||
}
|
||||
throw new TypeError("Must be between 0 and 63: " + aNumber);
|
||||
};
|
||||
|
||||
/**
|
||||
* Decode a single base 64 digit to an integer.
|
||||
*/
|
||||
exports.decode = function base64_decode(aChar) {
|
||||
if (aChar in charToIntMap) {
|
||||
return charToIntMap[aChar];
|
||||
}
|
||||
throw new TypeError("Not a valid base 64 digit: " + aChar);
|
||||
};
|
||||
|
||||
});
|
@ -0,0 +1,420 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var util = require('./util');
|
||||
var binarySearch = require('./binary-search');
|
||||
var ArraySet = require('./array-set').ArraySet;
|
||||
var base64VLQ = require('./base64-vlq');
|
||||
var SourceMapConsumer = require('./source-map-consumer').SourceMapConsumer;
|
||||
|
||||
/**
|
||||
* A BasicSourceMapConsumer instance represents a parsed source map which we can
|
||||
* query for information about the original file positions by giving it a file
|
||||
* position in the generated source.
|
||||
*
|
||||
* The only parameter is the raw source map (either as a JSON string, or
|
||||
* already parsed to an object). According to the spec, source maps have the
|
||||
* following attributes:
|
||||
*
|
||||
* - version: Which version of the source map spec this map is following.
|
||||
* - sources: An array of URLs to the original source files.
|
||||
* - names: An array of identifiers which can be referrenced by individual mappings.
|
||||
* - sourceRoot: Optional. The URL root from which all sources are relative.
|
||||
* - sourcesContent: Optional. An array of contents of the original source files.
|
||||
* - mappings: A string of base64 VLQs which contain the actual mappings.
|
||||
* - file: Optional. The generated file this source map is associated with.
|
||||
*
|
||||
* Here is an example source map, taken from the source map spec[0]:
|
||||
*
|
||||
* {
|
||||
* version : 3,
|
||||
* file: "out.js",
|
||||
* sourceRoot : "",
|
||||
* sources: ["foo.js", "bar.js"],
|
||||
* names: ["src", "maps", "are", "fun"],
|
||||
* mappings: "AA,AB;;ABCDE;"
|
||||
* }
|
||||
*
|
||||
* [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1#
|
||||
*/
|
||||
function BasicSourceMapConsumer(aSourceMap) {
|
||||
var sourceMap = aSourceMap;
|
||||
if (typeof aSourceMap === 'string') {
|
||||
sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, ''));
|
||||
}
|
||||
|
||||
var version = util.getArg(sourceMap, 'version');
|
||||
var sources = util.getArg(sourceMap, 'sources');
|
||||
// Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which
|
||||
// requires the array) to play nice here.
|
||||
var names = util.getArg(sourceMap, 'names', []);
|
||||
var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null);
|
||||
var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null);
|
||||
var mappings = util.getArg(sourceMap, 'mappings');
|
||||
var file = util.getArg(sourceMap, 'file', null);
|
||||
|
||||
// Once again, Sass deviates from the spec and supplies the version as a
|
||||
// string rather than a number, so we use loose equality checking here.
|
||||
if (version != this._version) {
|
||||
throw new Error('Unsupported version: ' + version);
|
||||
}
|
||||
|
||||
// Some source maps produce relative source paths like "./foo.js" instead of
|
||||
// "foo.js". Normalize these first so that future comparisons will succeed.
|
||||
// See bugzil.la/1090768.
|
||||
sources = sources.map(util.normalize);
|
||||
|
||||
// Pass `true` below to allow duplicate names and sources. While source maps
|
||||
// are intended to be compressed and deduplicated, the TypeScript compiler
|
||||
// sometimes generates source maps with duplicates in them. See Github issue
|
||||
// #72 and bugzil.la/889492.
|
||||
this._names = ArraySet.fromArray(names, true);
|
||||
this._sources = ArraySet.fromArray(sources, true);
|
||||
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sourcesContent = sourcesContent;
|
||||
this._mappings = mappings;
|
||||
this.file = file;
|
||||
}
|
||||
|
||||
BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);
|
||||
BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer;
|
||||
|
||||
/**
|
||||
* Create a BasicSourceMapConsumer from a SourceMapGenerator.
|
||||
*
|
||||
* @param SourceMapGenerator aSourceMap
|
||||
* The source map that will be consumed.
|
||||
* @returns BasicSourceMapConsumer
|
||||
*/
|
||||
BasicSourceMapConsumer.fromSourceMap =
|
||||
function SourceMapConsumer_fromSourceMap(aSourceMap) {
|
||||
var smc = Object.create(BasicSourceMapConsumer.prototype);
|
||||
|
||||
smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true);
|
||||
smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true);
|
||||
smc.sourceRoot = aSourceMap._sourceRoot;
|
||||
smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(),
|
||||
smc.sourceRoot);
|
||||
smc.file = aSourceMap._file;
|
||||
|
||||
smc.__generatedMappings = aSourceMap._mappings.toArray().slice();
|
||||
smc.__originalMappings = aSourceMap._mappings.toArray().slice()
|
||||
.sort(util.compareByOriginalPositions);
|
||||
|
||||
return smc;
|
||||
};
|
||||
|
||||
/**
|
||||
* The version of the source mapping spec that we are consuming.
|
||||
*/
|
||||
BasicSourceMapConsumer.prototype._version = 3;
|
||||
|
||||
/**
|
||||
* The list of original sources.
|
||||
*/
|
||||
Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', {
|
||||
get: function () {
|
||||
return this._sources.toArray().map(function (s) {
|
||||
return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s;
|
||||
}, this);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Parse the mappings in a string in to a data structure which we can easily
|
||||
* query (the ordered arrays in the `this.__generatedMappings` and
|
||||
* `this.__originalMappings` properties).
|
||||
*/
|
||||
BasicSourceMapConsumer.prototype._parseMappings =
|
||||
function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {
|
||||
var generatedLine = 1;
|
||||
var previousGeneratedColumn = 0;
|
||||
var previousOriginalLine = 0;
|
||||
var previousOriginalColumn = 0;
|
||||
var previousSource = 0;
|
||||
var previousName = 0;
|
||||
var str = aStr;
|
||||
var temp = {};
|
||||
var mapping;
|
||||
|
||||
while (str.length > 0) {
|
||||
if (str.charAt(0) === ';') {
|
||||
generatedLine++;
|
||||
str = str.slice(1);
|
||||
previousGeneratedColumn = 0;
|
||||
}
|
||||
else if (str.charAt(0) === ',') {
|
||||
str = str.slice(1);
|
||||
}
|
||||
else {
|
||||
mapping = {};
|
||||
mapping.generatedLine = generatedLine;
|
||||
|
||||
// Generated column.
|
||||
base64VLQ.decode(str, temp);
|
||||
mapping.generatedColumn = previousGeneratedColumn + temp.value;
|
||||
previousGeneratedColumn = mapping.generatedColumn;
|
||||
str = temp.rest;
|
||||
|
||||
if (str.length > 0 && !this._nextCharIsMappingSeparator(str)) {
|
||||
// Original source.
|
||||
base64VLQ.decode(str, temp);
|
||||
mapping.source = this._sources.at(previousSource + temp.value);
|
||||
previousSource += temp.value;
|
||||
str = temp.rest;
|
||||
if (str.length === 0 || this._nextCharIsMappingSeparator(str)) {
|
||||
throw new Error('Found a source, but no line and column');
|
||||
}
|
||||
|
||||
// Original line.
|
||||
base64VLQ.decode(str, temp);
|
||||
mapping.originalLine = previousOriginalLine + temp.value;
|
||||
previousOriginalLine = mapping.originalLine;
|
||||
// Lines are stored 0-based
|
||||
mapping.originalLine += 1;
|
||||
str = temp.rest;
|
||||
if (str.length === 0 || this._nextCharIsMappingSeparator(str)) {
|
||||
throw new Error('Found a source and line, but no column');
|
||||
}
|
||||
|
||||
// Original column.
|
||||
base64VLQ.decode(str, temp);
|
||||
mapping.originalColumn = previousOriginalColumn + temp.value;
|
||||
previousOriginalColumn = mapping.originalColumn;
|
||||
str = temp.rest;
|
||||
|
||||
if (str.length > 0 && !this._nextCharIsMappingSeparator(str)) {
|
||||
// Original name.
|
||||
base64VLQ.decode(str, temp);
|
||||
mapping.name = this._names.at(previousName + temp.value);
|
||||
previousName += temp.value;
|
||||
str = temp.rest;
|
||||
}
|
||||
}
|
||||
|
||||
this.__generatedMappings.push(mapping);
|
||||
if (typeof mapping.originalLine === 'number') {
|
||||
this.__originalMappings.push(mapping);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.__generatedMappings.sort(util.compareByGeneratedPositions);
|
||||
this.__originalMappings.sort(util.compareByOriginalPositions);
|
||||
};
|
||||
|
||||
/**
|
||||
* Find the mapping that best matches the hypothetical "needle" mapping that
|
||||
* we are searching for in the given "haystack" of mappings.
|
||||
*/
|
||||
BasicSourceMapConsumer.prototype._findMapping =
|
||||
function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName,
|
||||
aColumnName, aComparator) {
|
||||
// To return the position we are searching for, we must first find the
|
||||
// mapping for the given position and then return the opposite position it
|
||||
// points to. Because the mappings are sorted, we can use binary search to
|
||||
// find the best mapping.
|
||||
|
||||
if (aNeedle[aLineName] <= 0) {
|
||||
throw new TypeError('Line must be greater than or equal to 1, got '
|
||||
+ aNeedle[aLineName]);
|
||||
}
|
||||
if (aNeedle[aColumnName] < 0) {
|
||||
throw new TypeError('Column must be greater than or equal to 0, got '
|
||||
+ aNeedle[aColumnName]);
|
||||
}
|
||||
|
||||
return binarySearch.search(aNeedle, aMappings, aComparator);
|
||||
};
|
||||
|
||||
/**
|
||||
* Compute the last column for each generated mapping. The last column is
|
||||
* inclusive.
|
||||
*/
|
||||
BasicSourceMapConsumer.prototype.computeColumnSpans =
|
||||
function SourceMapConsumer_computeColumnSpans() {
|
||||
for (var index = 0; index < this._generatedMappings.length; ++index) {
|
||||
var mapping = this._generatedMappings[index];
|
||||
|
||||
// Mappings do not contain a field for the last generated columnt. We
|
||||
// can come up with an optimistic estimate, however, by assuming that
|
||||
// mappings are contiguous (i.e. given two consecutive mappings, the
|
||||
// first mapping ends where the second one starts).
|
||||
if (index + 1 < this._generatedMappings.length) {
|
||||
var nextMapping = this._generatedMappings[index + 1];
|
||||
|
||||
if (mapping.generatedLine === nextMapping.generatedLine) {
|
||||
mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// The last mapping for each line spans the entire line.
|
||||
mapping.lastGeneratedColumn = Infinity;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the original source, line, and column information for the generated
|
||||
* source's line and column positions provided. The only argument is an object
|
||||
* with the following properties:
|
||||
*
|
||||
* - line: The line number in the generated source.
|
||||
* - column: The column number in the generated source.
|
||||
*
|
||||
* and an object is returned with the following properties:
|
||||
*
|
||||
* - source: The original source file, or null.
|
||||
* - line: The line number in the original source, or null.
|
||||
* - column: The column number in the original source, or null.
|
||||
* - name: The original identifier, or null.
|
||||
*/
|
||||
BasicSourceMapConsumer.prototype.originalPositionFor =
|
||||
function SourceMapConsumer_originalPositionFor(aArgs) {
|
||||
var needle = {
|
||||
generatedLine: util.getArg(aArgs, 'line'),
|
||||
generatedColumn: util.getArg(aArgs, 'column')
|
||||
};
|
||||
|
||||
var index = this._findMapping(needle,
|
||||
this._generatedMappings,
|
||||
"generatedLine",
|
||||
"generatedColumn",
|
||||
util.compareByGeneratedPositions);
|
||||
|
||||
if (index >= 0) {
|
||||
var mapping = this._generatedMappings[index];
|
||||
|
||||
if (mapping.generatedLine === needle.generatedLine) {
|
||||
var source = util.getArg(mapping, 'source', null);
|
||||
if (source != null && this.sourceRoot != null) {
|
||||
source = util.join(this.sourceRoot, source);
|
||||
}
|
||||
return {
|
||||
source: source,
|
||||
line: util.getArg(mapping, 'originalLine', null),
|
||||
column: util.getArg(mapping, 'originalColumn', null),
|
||||
name: util.getArg(mapping, 'name', null)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
source: null,
|
||||
line: null,
|
||||
column: null,
|
||||
name: null
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the original source content. The only argument is the url of the
|
||||
* original source file. Returns null if no original source content is
|
||||
* availible.
|
||||
*/
|
||||
BasicSourceMapConsumer.prototype.sourceContentFor =
|
||||
function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {
|
||||
if (!this.sourcesContent) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (this.sourceRoot != null) {
|
||||
aSource = util.relative(this.sourceRoot, aSource);
|
||||
}
|
||||
|
||||
if (this._sources.has(aSource)) {
|
||||
return this.sourcesContent[this._sources.indexOf(aSource)];
|
||||
}
|
||||
|
||||
var url;
|
||||
if (this.sourceRoot != null
|
||||
&& (url = util.urlParse(this.sourceRoot))) {
|
||||
// XXX: file:// URIs and absolute paths lead to unexpected behavior for
|
||||
// many users. We can help them out when they expect file:// URIs to
|
||||
// behave like it would if they were running a local HTTP server. See
|
||||
// https://bugzilla.mozilla.org/show_bug.cgi?id=885597.
|
||||
var fileUriAbsPath = aSource.replace(/^file:\/\//, "");
|
||||
if (url.scheme == "file"
|
||||
&& this._sources.has(fileUriAbsPath)) {
|
||||
return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)]
|
||||
}
|
||||
|
||||
if ((!url.path || url.path == "/")
|
||||
&& this._sources.has("/" + aSource)) {
|
||||
return this.sourcesContent[this._sources.indexOf("/" + aSource)];
|
||||
}
|
||||
}
|
||||
|
||||
// This function is used recursively from
|
||||
// IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we
|
||||
// don't want to throw if we can't find the source - we just want to
|
||||
// return null, so we provide a flag to exit gracefully.
|
||||
if (nullOnMissing) {
|
||||
return null;
|
||||
}
|
||||
else {
|
||||
throw new Error('"' + aSource + '" is not in the SourceMap.');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the generated line and column information for the original source,
|
||||
* line, and column positions provided. The only argument is an object with
|
||||
* the following properties:
|
||||
*
|
||||
* - source: The filename of the original source.
|
||||
* - line: The line number in the original source.
|
||||
* - column: The column number in the original source.
|
||||
*
|
||||
* and an object is returned with the following properties:
|
||||
*
|
||||
* - line: The line number in the generated source, or null.
|
||||
* - column: The column number in the generated source, or null.
|
||||
*/
|
||||
BasicSourceMapConsumer.prototype.generatedPositionFor =
|
||||
function SourceMapConsumer_generatedPositionFor(aArgs) {
|
||||
var needle = {
|
||||
source: util.getArg(aArgs, 'source'),
|
||||
originalLine: util.getArg(aArgs, 'line'),
|
||||
originalColumn: util.getArg(aArgs, 'column')
|
||||
};
|
||||
|
||||
if (this.sourceRoot != null) {
|
||||
needle.source = util.relative(this.sourceRoot, needle.source);
|
||||
}
|
||||
|
||||
var index = this._findMapping(needle,
|
||||
this._originalMappings,
|
||||
"originalLine",
|
||||
"originalColumn",
|
||||
util.compareByOriginalPositions);
|
||||
|
||||
if (index >= 0) {
|
||||
var mapping = this._originalMappings[index];
|
||||
|
||||
return {
|
||||
line: util.getArg(mapping, 'generatedLine', null),
|
||||
column: util.getArg(mapping, 'generatedColumn', null),
|
||||
lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
line: null,
|
||||
column: null,
|
||||
lastColumn: null
|
||||
};
|
||||
};
|
||||
|
||||
exports.BasicSourceMapConsumer = BasicSourceMapConsumer;
|
||||
|
||||
});
|
100
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/binary-search.js
generated
vendored
Normal file
100
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/binary-search.js
generated
vendored
Normal file
@ -0,0 +1,100 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
/**
|
||||
* Recursive implementation of binary search.
|
||||
*
|
||||
* @param aLow Indices here and lower do not contain the needle.
|
||||
* @param aHigh Indices here and higher do not contain the needle.
|
||||
* @param aNeedle The element being searched for.
|
||||
* @param aHaystack The non-empty array being searched.
|
||||
* @param aCompare Function which takes two elements and returns -1, 0, or 1.
|
||||
* @param aBias Either 'binarySearch.LEAST_UPPER_BOUND' or
|
||||
* 'binarySearch.GREATEST_LOWER_BOUND'. Specifies whether to return the
|
||||
* closest element that is smaller than or greater than the element we are
|
||||
* searching for if the exact element cannot be found.
|
||||
*/
|
||||
function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) {
|
||||
// This function terminates when one of the following is true:
|
||||
//
|
||||
// 1. We find the exact element we are looking for.
|
||||
//
|
||||
// 2. We did not find the exact element, but we can return the index of
|
||||
// the next closest element.
|
||||
//
|
||||
// 3. We did not find the exact element, and there is no next-closest
|
||||
// element than the one we are searching for, so we return -1.
|
||||
var mid = Math.floor((aHigh - aLow) / 2) + aLow;
|
||||
var cmp = aCompare(aNeedle, aHaystack[mid], true);
|
||||
if (cmp === 0) {
|
||||
// Found the element we are looking for.
|
||||
return mid;
|
||||
}
|
||||
else if (cmp > 0) {
|
||||
// Our needle is greater than aHaystack[mid].
|
||||
if (aHigh - mid > 1) {
|
||||
// The element is in the upper half.
|
||||
return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias);
|
||||
}
|
||||
// The exact needle element was not found in this haystack. Determine if
|
||||
// we are in termination case (3) or (2) and return the appropriate thing.
|
||||
if (aBias == exports.LEAST_UPPER_BOUND) {
|
||||
return aHigh < aHaystack.length ? aHigh : -1;
|
||||
} else {
|
||||
return mid;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Our needle is less than aHaystack[mid].
|
||||
if (mid - aLow > 1) {
|
||||
// The element is in the lower half.
|
||||
return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias);
|
||||
}
|
||||
// The exact needle element was not found in this haystack. Determine if
|
||||
// we are in termination case (3) or (2) and return the appropriate thing.
|
||||
if (aBias == exports.LEAST_UPPER_BOUND) {
|
||||
return mid;
|
||||
} else {
|
||||
return aLow < 0 ? -1 : aLow;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
exports.LEAST_UPPER_BOUND = 1;
|
||||
exports.GREATEST_LOWER_BOUND = 2;
|
||||
|
||||
/**
|
||||
* This is an implementation of binary search which will always try and return
|
||||
* the index of next highest value checked if there is no exact hit. This is
|
||||
* because mappings between original and generated line/col pairs are single
|
||||
* points, and there is an implicit region between each of them, so a miss
|
||||
* just means that you aren't on the very start of a region.
|
||||
*
|
||||
* @param aNeedle The element you are looking for.
|
||||
* @param aHaystack The array that is being searched.
|
||||
* @param aCompare A function which takes the needle and an element in the
|
||||
* array and returns -1, 0, or 1 depending on whether the needle is less
|
||||
* than, equal to, or greater than the element, respectively.
|
||||
* @param aBias Either 'exports.LEAST_UPPER_BOUND' or
|
||||
* 'exports.GREATEST_LOWER_BOUND'. Specifies whether to return the
|
||||
* closest element that is smaller than or greater than the element we are
|
||||
* searching for if the exact element cannot be found. Defaults to
|
||||
* 'exports.LEAST_UPPER_BOUND'.
|
||||
*/
|
||||
exports.search = function search(aNeedle, aHaystack, aCompare, aBias) {
|
||||
var aBias = aBias || exports.LEAST_UPPER_BOUND;
|
||||
|
||||
if (aHaystack.length === 0) {
|
||||
return -1;
|
||||
}
|
||||
return recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, aCompare, aBias)
|
||||
};
|
||||
|
||||
});
|
@ -0,0 +1,303 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var util = require('./util');
|
||||
var binarySearch = require('./binary-search');
|
||||
var SourceMapConsumer = require('./source-map-consumer').SourceMapConsumer;
|
||||
var BasicSourceMapConsumer = require('./basic-source-map-consumer').BasicSourceMapConsumer;
|
||||
|
||||
/**
|
||||
* An IndexedSourceMapConsumer instance represents a parsed source map which
|
||||
* we can query for information. It differs from BasicSourceMapConsumer in
|
||||
* that it takes "indexed" source maps (i.e. ones with a "sections" field) as
|
||||
* input.
|
||||
*
|
||||
* The only parameter is a raw source map (either as a JSON string, or already
|
||||
* parsed to an object). According to the spec for indexed source maps, they
|
||||
* have the following attributes:
|
||||
*
|
||||
* - version: Which version of the source map spec this map is following.
|
||||
* - file: Optional. The generated file this source map is associated with.
|
||||
* - sections: A list of section definitions.
|
||||
*
|
||||
* Each value under the "sections" field has two fields:
|
||||
* - offset: The offset into the original specified at which this section
|
||||
* begins to apply, defined as an object with a "line" and "column"
|
||||
* field.
|
||||
* - map: A source map definition. This source map could also be indexed,
|
||||
* but doesn't have to be.
|
||||
*
|
||||
* Instead of the "map" field, it's also possible to have a "url" field
|
||||
* specifying a URL to retrieve a source map from, but that's currently
|
||||
* unsupported.
|
||||
*
|
||||
* Here's an example source map, taken from the source map spec[0], but
|
||||
* modified to omit a section which uses the "url" field.
|
||||
*
|
||||
* {
|
||||
* version : 3,
|
||||
* file: "app.js",
|
||||
* sections: [{
|
||||
* offset: {line:100, column:10},
|
||||
* map: {
|
||||
* version : 3,
|
||||
* file: "section.js",
|
||||
* sources: ["foo.js", "bar.js"],
|
||||
* names: ["src", "maps", "are", "fun"],
|
||||
* mappings: "AAAA,E;;ABCDE;"
|
||||
* }
|
||||
* }],
|
||||
* }
|
||||
*
|
||||
* [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt
|
||||
*/
|
||||
function IndexedSourceMapConsumer(aSourceMap) {
|
||||
var sourceMap = aSourceMap;
|
||||
if (typeof aSourceMap === 'string') {
|
||||
sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, ''));
|
||||
}
|
||||
|
||||
var version = util.getArg(sourceMap, 'version');
|
||||
var sections = util.getArg(sourceMap, 'sections');
|
||||
|
||||
if (version != this._version) {
|
||||
throw new Error('Unsupported version: ' + version);
|
||||
}
|
||||
|
||||
var lastOffset = {
|
||||
line: -1,
|
||||
column: 0
|
||||
};
|
||||
this._sections = sections.map(function (s) {
|
||||
if (s.url) {
|
||||
// The url field will require support for asynchronicity.
|
||||
// See https://github.com/mozilla/source-map/issues/16
|
||||
throw new Error('Support for url field in sections not implemented.');
|
||||
}
|
||||
var offset = util.getArg(s, 'offset');
|
||||
var offsetLine = util.getArg(offset, 'line');
|
||||
var offsetColumn = util.getArg(offset, 'column');
|
||||
|
||||
if (offsetLine < lastOffset.line ||
|
||||
(offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) {
|
||||
throw new Error('Section offsets must be ordered and non-overlapping.');
|
||||
}
|
||||
lastOffset = offset;
|
||||
|
||||
return {
|
||||
generatedOffset: {
|
||||
// The offset fields are 0-based, but we use 1-based indices when
|
||||
// encoding/decoding from VLQ.
|
||||
generatedLine: offsetLine + 1,
|
||||
generatedColumn: offsetColumn + 1
|
||||
},
|
||||
consumer: new SourceMapConsumer(util.getArg(s, 'map'))
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype);
|
||||
IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer;
|
||||
|
||||
/**
|
||||
* The version of the source mapping spec that we are consuming.
|
||||
*/
|
||||
IndexedSourceMapConsumer.prototype._version = 3;
|
||||
|
||||
/**
|
||||
* The list of original sources.
|
||||
*/
|
||||
Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', {
|
||||
get: function () {
|
||||
var sources = [];
|
||||
for (var i = 0; i < this._sections.length; i++) {
|
||||
for (var j = 0; j < this._sections[i].consumer.sources.length; j++) {
|
||||
sources.push(this._sections[i].consumer.sources[j]);
|
||||
}
|
||||
};
|
||||
return sources;
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Returns the original source, line, and column information for the generated
|
||||
* source's line and column positions provided. The only argument is an object
|
||||
* with the following properties:
|
||||
*
|
||||
* - line: The line number in the generated source.
|
||||
* - column: The column number in the generated source.
|
||||
*
|
||||
* and an object is returned with the following properties:
|
||||
*
|
||||
* - source: The original source file, or null.
|
||||
* - line: The line number in the original source, or null.
|
||||
* - column: The column number in the original source, or null.
|
||||
* - name: The original identifier, or null.
|
||||
*/
|
||||
IndexedSourceMapConsumer.prototype.originalPositionFor =
|
||||
function IndexedSourceMapConsumer_originalPositionFor(aArgs) {
|
||||
var needle = {
|
||||
generatedLine: util.getArg(aArgs, 'line'),
|
||||
generatedColumn: util.getArg(aArgs, 'column')
|
||||
};
|
||||
|
||||
// Find the section containing the generated position we're trying to map
|
||||
// to an original position.
|
||||
var sectionIndex = binarySearch.search(needle, this._sections,
|
||||
function(needle, section) {
|
||||
var cmp = needle.generatedLine - section.generatedOffset.generatedLine;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
return (needle.generatedColumn -
|
||||
section.generatedOffset.generatedColumn);
|
||||
}, binarySearch.GREATEST_LOWER_BOUND);
|
||||
var section = this._sections[sectionIndex];
|
||||
|
||||
if (!section) {
|
||||
return {
|
||||
source: null,
|
||||
line: null,
|
||||
column: null,
|
||||
name: null
|
||||
};
|
||||
}
|
||||
|
||||
return section.consumer.originalPositionFor({
|
||||
line: needle.generatedLine -
|
||||
(section.generatedOffset.generatedLine - 1),
|
||||
column: needle.generatedColumn -
|
||||
(section.generatedOffset.generatedLine === needle.generatedLine
|
||||
? section.generatedOffset.generatedColumn - 1
|
||||
: 0)
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the original source content. The only argument is the url of the
|
||||
* original source file. Returns null if no original source content is
|
||||
* available.
|
||||
*/
|
||||
IndexedSourceMapConsumer.prototype.sourceContentFor =
|
||||
function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) {
|
||||
for (var i = 0; i < this._sections.length; i++) {
|
||||
var section = this._sections[i];
|
||||
|
||||
var content = section.consumer.sourceContentFor(aSource, true);
|
||||
if (content) {
|
||||
return content;
|
||||
}
|
||||
}
|
||||
if (nullOnMissing) {
|
||||
return null;
|
||||
}
|
||||
else {
|
||||
throw new Error('"' + aSource + '" is not in the SourceMap.');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the generated line and column information for the original source,
|
||||
* line, and column positions provided. The only argument is an object with
|
||||
* the following properties:
|
||||
*
|
||||
* - source: The filename of the original source.
|
||||
* - line: The line number in the original source.
|
||||
* - column: The column number in the original source.
|
||||
*
|
||||
* and an object is returned with the following properties:
|
||||
*
|
||||
* - line: The line number in the generated source, or null.
|
||||
* - column: The column number in the generated source, or null.
|
||||
*/
|
||||
IndexedSourceMapConsumer.prototype.generatedPositionFor =
|
||||
function IndexedSourceMapConsumer_generatedPositionFor(aArgs) {
|
||||
for (var i = 0; i < this._sections.length; i++) {
|
||||
var section = this._sections[i];
|
||||
|
||||
// Only consider this section if the requested source is in the list of
|
||||
// sources of the consumer.
|
||||
if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) {
|
||||
continue;
|
||||
}
|
||||
var generatedPosition = section.consumer.generatedPositionFor(aArgs);
|
||||
if (generatedPosition) {
|
||||
var ret = {
|
||||
line: generatedPosition.line +
|
||||
(section.generatedOffset.generatedLine - 1),
|
||||
column: generatedPosition.column +
|
||||
(section.generatedOffset.generatedLine === generatedPosition.line
|
||||
? section.generatedOffset.generatedColumn - 1
|
||||
: 0)
|
||||
};
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
line: null,
|
||||
column: null
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse the mappings in a string in to a data structure which we can easily
|
||||
* query (the ordered arrays in the `this.__generatedMappings` and
|
||||
* `this.__originalMappings` properties).
|
||||
*/
|
||||
IndexedSourceMapConsumer.prototype._parseMappings =
|
||||
function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) {
|
||||
this.__generatedMappings = [];
|
||||
this.__originalMappings = [];
|
||||
for (var i = 0; i < this._sections.length; i++) {
|
||||
var section = this._sections[i];
|
||||
var sectionMappings = section.consumer._generatedMappings;
|
||||
for (var j = 0; j < sectionMappings.length; j++) {
|
||||
var mapping = sectionMappings[i];
|
||||
|
||||
var source = mapping.source;
|
||||
var sourceRoot = section.consumer.sourceRoot;
|
||||
|
||||
if (source != null && sourceRoot != null) {
|
||||
source = util.join(sourceRoot, source);
|
||||
}
|
||||
|
||||
// The mappings coming from the consumer for the section have
|
||||
// generated positions relative to the start of the section, so we
|
||||
// need to offset them to be relative to the start of the concatenated
|
||||
// generated file.
|
||||
var adjustedMapping = {
|
||||
source: source,
|
||||
generatedLine: mapping.generatedLine +
|
||||
(section.generatedOffset.generatedLine - 1),
|
||||
generatedColumn: mapping.column +
|
||||
(section.generatedOffset.generatedLine === mapping.generatedLine)
|
||||
? section.generatedOffset.generatedColumn - 1
|
||||
: 0,
|
||||
originalLine: mapping.originalLine,
|
||||
originalColumn: mapping.originalColumn,
|
||||
name: mapping.name
|
||||
};
|
||||
|
||||
this.__generatedMappings.push(adjustedMapping);
|
||||
if (typeof adjustedMapping.originalLine === 'number') {
|
||||
this.__originalMappings.push(adjustedMapping);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
this.__generatedMappings.sort(util.compareByGeneratedPositions);
|
||||
this.__originalMappings.sort(util.compareByOriginalPositions);
|
||||
};
|
||||
|
||||
exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer;
|
||||
});
|
86
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/mapping-list.js
generated
vendored
Normal file
86
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/mapping-list.js
generated
vendored
Normal file
@ -0,0 +1,86 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2014 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var util = require('./util');
|
||||
|
||||
/**
|
||||
* Determine whether mappingB is after mappingA with respect to generated
|
||||
* position.
|
||||
*/
|
||||
function generatedPositionAfter(mappingA, mappingB) {
|
||||
// Optimized for most common case
|
||||
var lineA = mappingA.generatedLine;
|
||||
var lineB = mappingB.generatedLine;
|
||||
var columnA = mappingA.generatedColumn;
|
||||
var columnB = mappingB.generatedColumn;
|
||||
return lineB > lineA || lineB == lineA && columnB >= columnA ||
|
||||
util.compareByGeneratedPositions(mappingA, mappingB) <= 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* A data structure to provide a sorted view of accumulated mappings in a
|
||||
* performance conscious manner. It trades a neglibable overhead in general
|
||||
* case for a large speedup in case of mappings being added in order.
|
||||
*/
|
||||
function MappingList() {
|
||||
this._array = [];
|
||||
this._sorted = true;
|
||||
// Serves as infimum
|
||||
this._last = {generatedLine: -1, generatedColumn: 0};
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate through internal items. This method takes the same arguments that
|
||||
* `Array.prototype.forEach` takes.
|
||||
*
|
||||
* NOTE: The order of the mappings is NOT guaranteed.
|
||||
*/
|
||||
MappingList.prototype.unsortedForEach =
|
||||
function MappingList_forEach(aCallback, aThisArg) {
|
||||
this._array.forEach(aCallback, aThisArg);
|
||||
};
|
||||
|
||||
/**
|
||||
* Add the given source mapping.
|
||||
*
|
||||
* @param Object aMapping
|
||||
*/
|
||||
MappingList.prototype.add = function MappingList_add(aMapping) {
|
||||
var mapping;
|
||||
if (generatedPositionAfter(this._last, aMapping)) {
|
||||
this._last = aMapping;
|
||||
this._array.push(aMapping);
|
||||
} else {
|
||||
this._sorted = false;
|
||||
this._array.push(aMapping);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the flat, sorted array of mappings. The mappings are sorted by
|
||||
* generated position.
|
||||
*
|
||||
* WARNING: This method returns internal data without copying, for
|
||||
* performance. The return value must NOT be mutated, and should be treated as
|
||||
* an immutable borrow. If you want to take ownership, you must make your own
|
||||
* copy.
|
||||
*/
|
||||
MappingList.prototype.toArray = function MappingList_toArray() {
|
||||
if (!this._sorted) {
|
||||
this._array.sort(util.compareByGeneratedPositions);
|
||||
this._sorted = true;
|
||||
}
|
||||
return this._array;
|
||||
};
|
||||
|
||||
exports.MappingList = MappingList;
|
||||
|
||||
});
|
224
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/source-map-consumer.js
generated
vendored
Normal file
224
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/source-map-consumer.js
generated
vendored
Normal file
@ -0,0 +1,224 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var util = require('./util');
|
||||
|
||||
function SourceMapConsumer(aSourceMap) {
|
||||
var sourceMap = aSourceMap;
|
||||
if (typeof aSourceMap === 'string') {
|
||||
sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, ''));
|
||||
}
|
||||
|
||||
// We do late requires because the subclasses require() this file.
|
||||
if (sourceMap.sections != null) {
|
||||
var indexedSourceMapConsumer = require('./indexed-source-map-consumer');
|
||||
return new indexedSourceMapConsumer.IndexedSourceMapConsumer(sourceMap);
|
||||
} else {
|
||||
var basicSourceMapConsumer = require('./basic-source-map-consumer');
|
||||
return new basicSourceMapConsumer.BasicSourceMapConsumer(sourceMap);
|
||||
}
|
||||
}
|
||||
|
||||
SourceMapConsumer.fromSourceMap = function(aSourceMap) {
|
||||
var basicSourceMapConsumer = require('./basic-source-map-consumer');
|
||||
return basicSourceMapConsumer.BasicSourceMapConsumer
|
||||
.fromSourceMap(aSourceMap);
|
||||
}
|
||||
|
||||
/**
|
||||
* The version of the source mapping spec that we are consuming.
|
||||
*/
|
||||
SourceMapConsumer.prototype._version = 3;
|
||||
|
||||
|
||||
// `__generatedMappings` and `__originalMappings` are arrays that hold the
|
||||
// parsed mapping coordinates from the source map's "mappings" attribute. They
|
||||
// are lazily instantiated, accessed via the `_generatedMappings` and
|
||||
// `_originalMappings` getters respectively, and we only parse the mappings
|
||||
// and create these arrays once queried for a source location. We jump through
|
||||
// these hoops because there can be many thousands of mappings, and parsing
|
||||
// them is expensive, so we only want to do it if we must.
|
||||
//
|
||||
// Each object in the arrays is of the form:
|
||||
//
|
||||
// {
|
||||
// generatedLine: The line number in the generated code,
|
||||
// generatedColumn: The column number in the generated code,
|
||||
// source: The path to the original source file that generated this
|
||||
// chunk of code,
|
||||
// originalLine: The line number in the original source that
|
||||
// corresponds to this chunk of generated code,
|
||||
// originalColumn: The column number in the original source that
|
||||
// corresponds to this chunk of generated code,
|
||||
// name: The name of the original symbol which generated this chunk of
|
||||
// code.
|
||||
// }
|
||||
//
|
||||
// All properties except for `generatedLine` and `generatedColumn` can be
|
||||
// `null`.
|
||||
//
|
||||
// `_generatedMappings` is ordered by the generated positions.
|
||||
//
|
||||
// `_originalMappings` is ordered by the original positions.
|
||||
|
||||
SourceMapConsumer.prototype.__generatedMappings = null;
|
||||
Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', {
|
||||
get: function () {
|
||||
if (!this.__generatedMappings) {
|
||||
this.__generatedMappings = [];
|
||||
this.__originalMappings = [];
|
||||
this._parseMappings(this._mappings, this.sourceRoot);
|
||||
}
|
||||
|
||||
return this.__generatedMappings;
|
||||
}
|
||||
});
|
||||
|
||||
SourceMapConsumer.prototype.__originalMappings = null;
|
||||
Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', {
|
||||
get: function () {
|
||||
if (!this.__originalMappings) {
|
||||
this.__generatedMappings = [];
|
||||
this.__originalMappings = [];
|
||||
this._parseMappings(this._mappings, this.sourceRoot);
|
||||
}
|
||||
|
||||
return this.__originalMappings;
|
||||
}
|
||||
});
|
||||
|
||||
SourceMapConsumer.prototype._nextCharIsMappingSeparator =
|
||||
function SourceMapConsumer_nextCharIsMappingSeparator(aStr) {
|
||||
var c = aStr.charAt(0);
|
||||
return c === ";" || c === ",";
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse the mappings in a string in to a data structure which we can easily
|
||||
* query (the ordered arrays in the `this.__generatedMappings` and
|
||||
* `this.__originalMappings` properties).
|
||||
*/
|
||||
SourceMapConsumer.prototype._parseMappings =
|
||||
function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {
|
||||
throw new Error("Subclasses must implement _parseMappings");
|
||||
};
|
||||
|
||||
SourceMapConsumer.GENERATED_ORDER = 1;
|
||||
SourceMapConsumer.ORIGINAL_ORDER = 2;
|
||||
|
||||
SourceMapConsumer.LEAST_UPPER_BOUND = 1;
|
||||
SourceMapConsumer.GREATEST_LOWER_BOUND = 2;
|
||||
|
||||
/**
|
||||
* Iterate over each mapping between an original source/line/column and a
|
||||
* generated line/column in this source map.
|
||||
*
|
||||
* @param Function aCallback
|
||||
* The function that is called with each mapping.
|
||||
* @param Object aContext
|
||||
* Optional. If specified, this object will be the value of `this` every
|
||||
* time that `aCallback` is called.
|
||||
* @param aOrder
|
||||
* Either `SourceMapConsumer.GENERATED_ORDER` or
|
||||
* `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to
|
||||
* iterate over the mappings sorted by the generated file's line/column
|
||||
* order or the original's source/line/column order, respectively. Defaults to
|
||||
* `SourceMapConsumer.GENERATED_ORDER`.
|
||||
*/
|
||||
SourceMapConsumer.prototype.eachMapping =
|
||||
function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) {
|
||||
var context = aContext || null;
|
||||
var order = aOrder || SourceMapConsumer.GENERATED_ORDER;
|
||||
|
||||
var mappings;
|
||||
switch (order) {
|
||||
case SourceMapConsumer.GENERATED_ORDER:
|
||||
mappings = this._generatedMappings;
|
||||
break;
|
||||
case SourceMapConsumer.ORIGINAL_ORDER:
|
||||
mappings = this._originalMappings;
|
||||
break;
|
||||
default:
|
||||
throw new Error("Unknown order of iteration.");
|
||||
}
|
||||
|
||||
var sourceRoot = this.sourceRoot;
|
||||
mappings.map(function (mapping) {
|
||||
var source = mapping.source;
|
||||
if (source != null && sourceRoot != null) {
|
||||
source = util.join(sourceRoot, source);
|
||||
}
|
||||
return {
|
||||
source: source,
|
||||
generatedLine: mapping.generatedLine,
|
||||
generatedColumn: mapping.generatedColumn,
|
||||
originalLine: mapping.originalLine,
|
||||
originalColumn: mapping.originalColumn,
|
||||
name: mapping.name
|
||||
};
|
||||
}).forEach(aCallback, context);
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns all generated line and column information for the original source
|
||||
* and line provided. The only argument is an object with the following
|
||||
* properties:
|
||||
*
|
||||
* - source: The filename of the original source.
|
||||
* - line: The line number in the original source.
|
||||
*
|
||||
* and an array of objects is returned, each with the following properties:
|
||||
*
|
||||
* - line: The line number in the generated source, or null.
|
||||
* - column: The column number in the generated source, or null.
|
||||
*/
|
||||
SourceMapConsumer.prototype.allGeneratedPositionsFor =
|
||||
function SourceMapConsumer_allGeneratedPositionsFor(aArgs) {
|
||||
var needle = {
|
||||
source: util.getArg(aArgs, 'source'),
|
||||
originalLine: util.getArg(aArgs, 'line'),
|
||||
originalColumn: 0
|
||||
};
|
||||
|
||||
if (this.sourceRoot != null) {
|
||||
needle.source = util.relative(this.sourceRoot, needle.source);
|
||||
}
|
||||
|
||||
var mappings = [];
|
||||
|
||||
var index = this._findMapping(needle,
|
||||
this._originalMappings,
|
||||
"originalLine",
|
||||
"originalColumn",
|
||||
util.compareByOriginalPositions);
|
||||
if (index >= 0) {
|
||||
var mapping = this._originalMappings[index];
|
||||
|
||||
// Iterate until either we run out of mappings, or we run into
|
||||
// a mapping for a different line. Since mappings are sorted, this is
|
||||
// guaranteed to find all mappings for the line we are interested in.
|
||||
while (mapping && mapping.originalLine === needle.originalLine) {
|
||||
mappings.push({
|
||||
line: util.getArg(mapping, 'generatedLine', null),
|
||||
column: util.getArg(mapping, 'generatedColumn', null),
|
||||
lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null)
|
||||
});
|
||||
|
||||
mapping = this._originalMappings[++index];
|
||||
}
|
||||
}
|
||||
|
||||
return mappings;
|
||||
};
|
||||
|
||||
exports.SourceMapConsumer = SourceMapConsumer;
|
||||
|
||||
});
|
400
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/source-map-generator.js
generated
vendored
Normal file
400
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/source-map-generator.js
generated
vendored
Normal file
@ -0,0 +1,400 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var base64VLQ = require('./base64-vlq');
|
||||
var util = require('./util');
|
||||
var ArraySet = require('./array-set').ArraySet;
|
||||
var MappingList = require('./mapping-list').MappingList;
|
||||
|
||||
/**
|
||||
* An instance of the SourceMapGenerator represents a source map which is
|
||||
* being built incrementally. You may pass an object with the following
|
||||
* properties:
|
||||
*
|
||||
* - file: The filename of the generated source.
|
||||
* - sourceRoot: A root for all relative URLs in this source map.
|
||||
*/
|
||||
function SourceMapGenerator(aArgs) {
|
||||
if (!aArgs) {
|
||||
aArgs = {};
|
||||
}
|
||||
this._file = util.getArg(aArgs, 'file', null);
|
||||
this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null);
|
||||
this._skipValidation = util.getArg(aArgs, 'skipValidation', false);
|
||||
this._sources = new ArraySet();
|
||||
this._names = new ArraySet();
|
||||
this._mappings = new MappingList();
|
||||
this._sourcesContents = null;
|
||||
}
|
||||
|
||||
SourceMapGenerator.prototype._version = 3;
|
||||
|
||||
/**
|
||||
* Creates a new SourceMapGenerator based on a SourceMapConsumer
|
||||
*
|
||||
* @param aSourceMapConsumer The SourceMap.
|
||||
*/
|
||||
SourceMapGenerator.fromSourceMap =
|
||||
function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) {
|
||||
var sourceRoot = aSourceMapConsumer.sourceRoot;
|
||||
var generator = new SourceMapGenerator({
|
||||
file: aSourceMapConsumer.file,
|
||||
sourceRoot: sourceRoot
|
||||
});
|
||||
aSourceMapConsumer.eachMapping(function (mapping) {
|
||||
var newMapping = {
|
||||
generated: {
|
||||
line: mapping.generatedLine,
|
||||
column: mapping.generatedColumn
|
||||
}
|
||||
};
|
||||
|
||||
if (mapping.source != null) {
|
||||
newMapping.source = mapping.source;
|
||||
if (sourceRoot != null) {
|
||||
newMapping.source = util.relative(sourceRoot, newMapping.source);
|
||||
}
|
||||
|
||||
newMapping.original = {
|
||||
line: mapping.originalLine,
|
||||
column: mapping.originalColumn
|
||||
};
|
||||
|
||||
if (mapping.name != null) {
|
||||
newMapping.name = mapping.name;
|
||||
}
|
||||
}
|
||||
|
||||
generator.addMapping(newMapping);
|
||||
});
|
||||
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
||||
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
||||
if (content != null) {
|
||||
generator.setSourceContent(sourceFile, content);
|
||||
}
|
||||
});
|
||||
return generator;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a single mapping from original source line and column to the generated
|
||||
* source's line and column for this source map being created. The mapping
|
||||
* object should have the following properties:
|
||||
*
|
||||
* - generated: An object with the generated line and column positions.
|
||||
* - original: An object with the original line and column positions.
|
||||
* - source: The original source file (relative to the sourceRoot).
|
||||
* - name: An optional original token name for this mapping.
|
||||
*/
|
||||
SourceMapGenerator.prototype.addMapping =
|
||||
function SourceMapGenerator_addMapping(aArgs) {
|
||||
var generated = util.getArg(aArgs, 'generated');
|
||||
var original = util.getArg(aArgs, 'original', null);
|
||||
var source = util.getArg(aArgs, 'source', null);
|
||||
var name = util.getArg(aArgs, 'name', null);
|
||||
|
||||
if (!this._skipValidation) {
|
||||
this._validateMapping(generated, original, source, name);
|
||||
}
|
||||
|
||||
if (source != null && !this._sources.has(source)) {
|
||||
this._sources.add(source);
|
||||
}
|
||||
|
||||
if (name != null && !this._names.has(name)) {
|
||||
this._names.add(name);
|
||||
}
|
||||
|
||||
this._mappings.add({
|
||||
generatedLine: generated.line,
|
||||
generatedColumn: generated.column,
|
||||
originalLine: original != null && original.line,
|
||||
originalColumn: original != null && original.column,
|
||||
source: source,
|
||||
name: name
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Set the source content for a source file.
|
||||
*/
|
||||
SourceMapGenerator.prototype.setSourceContent =
|
||||
function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) {
|
||||
var source = aSourceFile;
|
||||
if (this._sourceRoot != null) {
|
||||
source = util.relative(this._sourceRoot, source);
|
||||
}
|
||||
|
||||
if (aSourceContent != null) {
|
||||
// Add the source content to the _sourcesContents map.
|
||||
// Create a new _sourcesContents map if the property is null.
|
||||
if (!this._sourcesContents) {
|
||||
this._sourcesContents = {};
|
||||
}
|
||||
this._sourcesContents[util.toSetString(source)] = aSourceContent;
|
||||
} else if (this._sourcesContents) {
|
||||
// Remove the source file from the _sourcesContents map.
|
||||
// If the _sourcesContents map is empty, set the property to null.
|
||||
delete this._sourcesContents[util.toSetString(source)];
|
||||
if (Object.keys(this._sourcesContents).length === 0) {
|
||||
this._sourcesContents = null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Applies the mappings of a sub-source-map for a specific source file to the
|
||||
* source map being generated. Each mapping to the supplied source file is
|
||||
* rewritten using the supplied source map. Note: The resolution for the
|
||||
* resulting mappings is the minimium of this map and the supplied map.
|
||||
*
|
||||
* @param aSourceMapConsumer The source map to be applied.
|
||||
* @param aSourceFile Optional. The filename of the source file.
|
||||
* If omitted, SourceMapConsumer's file property will be used.
|
||||
* @param aSourceMapPath Optional. The dirname of the path to the source map
|
||||
* to be applied. If relative, it is relative to the SourceMapConsumer.
|
||||
* This parameter is needed when the two source maps aren't in the same
|
||||
* directory, and the source map to be applied contains relative source
|
||||
* paths. If so, those relative source paths need to be rewritten
|
||||
* relative to the SourceMapGenerator.
|
||||
*/
|
||||
SourceMapGenerator.prototype.applySourceMap =
|
||||
function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) {
|
||||
var sourceFile = aSourceFile;
|
||||
// If aSourceFile is omitted, we will use the file property of the SourceMap
|
||||
if (aSourceFile == null) {
|
||||
if (aSourceMapConsumer.file == null) {
|
||||
throw new Error(
|
||||
'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' +
|
||||
'or the source map\'s "file" property. Both were omitted.'
|
||||
);
|
||||
}
|
||||
sourceFile = aSourceMapConsumer.file;
|
||||
}
|
||||
var sourceRoot = this._sourceRoot;
|
||||
// Make "sourceFile" relative if an absolute Url is passed.
|
||||
if (sourceRoot != null) {
|
||||
sourceFile = util.relative(sourceRoot, sourceFile);
|
||||
}
|
||||
// Applying the SourceMap can add and remove items from the sources and
|
||||
// the names array.
|
||||
var newSources = new ArraySet();
|
||||
var newNames = new ArraySet();
|
||||
|
||||
// Find mappings for the "sourceFile"
|
||||
this._mappings.unsortedForEach(function (mapping) {
|
||||
if (mapping.source === sourceFile && mapping.originalLine != null) {
|
||||
// Check if it can be mapped by the source map, then update the mapping.
|
||||
var original = aSourceMapConsumer.originalPositionFor({
|
||||
line: mapping.originalLine,
|
||||
column: mapping.originalColumn
|
||||
});
|
||||
if (original.source != null) {
|
||||
// Copy mapping
|
||||
mapping.source = original.source;
|
||||
if (aSourceMapPath != null) {
|
||||
mapping.source = util.join(aSourceMapPath, mapping.source)
|
||||
}
|
||||
if (sourceRoot != null) {
|
||||
mapping.source = util.relative(sourceRoot, mapping.source);
|
||||
}
|
||||
mapping.originalLine = original.line;
|
||||
mapping.originalColumn = original.column;
|
||||
if (original.name != null) {
|
||||
mapping.name = original.name;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var source = mapping.source;
|
||||
if (source != null && !newSources.has(source)) {
|
||||
newSources.add(source);
|
||||
}
|
||||
|
||||
var name = mapping.name;
|
||||
if (name != null && !newNames.has(name)) {
|
||||
newNames.add(name);
|
||||
}
|
||||
|
||||
}, this);
|
||||
this._sources = newSources;
|
||||
this._names = newNames;
|
||||
|
||||
// Copy sourcesContents of applied map.
|
||||
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
||||
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
||||
if (content != null) {
|
||||
if (aSourceMapPath != null) {
|
||||
sourceFile = util.join(aSourceMapPath, sourceFile);
|
||||
}
|
||||
if (sourceRoot != null) {
|
||||
sourceFile = util.relative(sourceRoot, sourceFile);
|
||||
}
|
||||
this.setSourceContent(sourceFile, content);
|
||||
}
|
||||
}, this);
|
||||
};
|
||||
|
||||
/**
|
||||
* A mapping can have one of the three levels of data:
|
||||
*
|
||||
* 1. Just the generated position.
|
||||
* 2. The Generated position, original position, and original source.
|
||||
* 3. Generated and original position, original source, as well as a name
|
||||
* token.
|
||||
*
|
||||
* To maintain consistency, we validate that any new mapping being added falls
|
||||
* in to one of these categories.
|
||||
*/
|
||||
SourceMapGenerator.prototype._validateMapping =
|
||||
function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource,
|
||||
aName) {
|
||||
if (aGenerated && 'line' in aGenerated && 'column' in aGenerated
|
||||
&& aGenerated.line > 0 && aGenerated.column >= 0
|
||||
&& !aOriginal && !aSource && !aName) {
|
||||
// Case 1.
|
||||
return;
|
||||
}
|
||||
else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated
|
||||
&& aOriginal && 'line' in aOriginal && 'column' in aOriginal
|
||||
&& aGenerated.line > 0 && aGenerated.column >= 0
|
||||
&& aOriginal.line > 0 && aOriginal.column >= 0
|
||||
&& aSource) {
|
||||
// Cases 2 and 3.
|
||||
return;
|
||||
}
|
||||
else {
|
||||
throw new Error('Invalid mapping: ' + JSON.stringify({
|
||||
generated: aGenerated,
|
||||
source: aSource,
|
||||
original: aOriginal,
|
||||
name: aName
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Serialize the accumulated mappings in to the stream of base 64 VLQs
|
||||
* specified by the source map format.
|
||||
*/
|
||||
SourceMapGenerator.prototype._serializeMappings =
|
||||
function SourceMapGenerator_serializeMappings() {
|
||||
var previousGeneratedColumn = 0;
|
||||
var previousGeneratedLine = 1;
|
||||
var previousOriginalColumn = 0;
|
||||
var previousOriginalLine = 0;
|
||||
var previousName = 0;
|
||||
var previousSource = 0;
|
||||
var result = '';
|
||||
var mapping;
|
||||
|
||||
var mappings = this._mappings.toArray();
|
||||
|
||||
for (var i = 0, len = mappings.length; i < len; i++) {
|
||||
mapping = mappings[i];
|
||||
|
||||
if (mapping.generatedLine !== previousGeneratedLine) {
|
||||
previousGeneratedColumn = 0;
|
||||
while (mapping.generatedLine !== previousGeneratedLine) {
|
||||
result += ';';
|
||||
previousGeneratedLine++;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (i > 0) {
|
||||
if (!util.compareByGeneratedPositions(mapping, mappings[i - 1])) {
|
||||
continue;
|
||||
}
|
||||
result += ',';
|
||||
}
|
||||
}
|
||||
|
||||
result += base64VLQ.encode(mapping.generatedColumn
|
||||
- previousGeneratedColumn);
|
||||
previousGeneratedColumn = mapping.generatedColumn;
|
||||
|
||||
if (mapping.source != null) {
|
||||
result += base64VLQ.encode(this._sources.indexOf(mapping.source)
|
||||
- previousSource);
|
||||
previousSource = this._sources.indexOf(mapping.source);
|
||||
|
||||
// lines are stored 0-based in SourceMap spec version 3
|
||||
result += base64VLQ.encode(mapping.originalLine - 1
|
||||
- previousOriginalLine);
|
||||
previousOriginalLine = mapping.originalLine - 1;
|
||||
|
||||
result += base64VLQ.encode(mapping.originalColumn
|
||||
- previousOriginalColumn);
|
||||
previousOriginalColumn = mapping.originalColumn;
|
||||
|
||||
if (mapping.name != null) {
|
||||
result += base64VLQ.encode(this._names.indexOf(mapping.name)
|
||||
- previousName);
|
||||
previousName = this._names.indexOf(mapping.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
SourceMapGenerator.prototype._generateSourcesContent =
|
||||
function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) {
|
||||
return aSources.map(function (source) {
|
||||
if (!this._sourcesContents) {
|
||||
return null;
|
||||
}
|
||||
if (aSourceRoot != null) {
|
||||
source = util.relative(aSourceRoot, source);
|
||||
}
|
||||
var key = util.toSetString(source);
|
||||
return Object.prototype.hasOwnProperty.call(this._sourcesContents,
|
||||
key)
|
||||
? this._sourcesContents[key]
|
||||
: null;
|
||||
}, this);
|
||||
};
|
||||
|
||||
/**
|
||||
* Externalize the source map.
|
||||
*/
|
||||
SourceMapGenerator.prototype.toJSON =
|
||||
function SourceMapGenerator_toJSON() {
|
||||
var map = {
|
||||
version: this._version,
|
||||
sources: this._sources.toArray(),
|
||||
names: this._names.toArray(),
|
||||
mappings: this._serializeMappings()
|
||||
};
|
||||
if (this._file != null) {
|
||||
map.file = this._file;
|
||||
}
|
||||
if (this._sourceRoot != null) {
|
||||
map.sourceRoot = this._sourceRoot;
|
||||
}
|
||||
if (this._sourcesContents) {
|
||||
map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);
|
||||
}
|
||||
|
||||
return map;
|
||||
};
|
||||
|
||||
/**
|
||||
* Render the source map being generated to a string.
|
||||
*/
|
||||
SourceMapGenerator.prototype.toString =
|
||||
function SourceMapGenerator_toString() {
|
||||
return JSON.stringify(this.toJSON());
|
||||
};
|
||||
|
||||
exports.SourceMapGenerator = SourceMapGenerator;
|
||||
|
||||
});
|
414
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/source-node.js
generated
vendored
Normal file
414
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/source-node.js
generated
vendored
Normal file
@ -0,0 +1,414 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var SourceMapGenerator = require('./source-map-generator').SourceMapGenerator;
|
||||
var util = require('./util');
|
||||
|
||||
// Matches a Windows-style `\r\n` newline or a `\n` newline used by all other
|
||||
// operating systems these days (capturing the result).
|
||||
var REGEX_NEWLINE = /(\r?\n)/;
|
||||
|
||||
// Newline character code for charCodeAt() comparisons
|
||||
var NEWLINE_CODE = 10;
|
||||
|
||||
// Private symbol for identifying `SourceNode`s when multiple versions of
|
||||
// the source-map library are loaded. This MUST NOT CHANGE across
|
||||
// versions!
|
||||
var isSourceNode = "$$$isSourceNode$$$";
|
||||
|
||||
/**
|
||||
* SourceNodes provide a way to abstract over interpolating/concatenating
|
||||
* snippets of generated JavaScript source code while maintaining the line and
|
||||
* column information associated with the original source code.
|
||||
*
|
||||
* @param aLine The original line number.
|
||||
* @param aColumn The original column number.
|
||||
* @param aSource The original source's filename.
|
||||
* @param aChunks Optional. An array of strings which are snippets of
|
||||
* generated JS, or other SourceNodes.
|
||||
* @param aName The original identifier.
|
||||
*/
|
||||
function SourceNode(aLine, aColumn, aSource, aChunks, aName) {
|
||||
this.children = [];
|
||||
this.sourceContents = {};
|
||||
this.line = aLine == null ? null : aLine;
|
||||
this.column = aColumn == null ? null : aColumn;
|
||||
this.source = aSource == null ? null : aSource;
|
||||
this.name = aName == null ? null : aName;
|
||||
this[isSourceNode] = true;
|
||||
if (aChunks != null) this.add(aChunks);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a SourceNode from generated code and a SourceMapConsumer.
|
||||
*
|
||||
* @param aGeneratedCode The generated code
|
||||
* @param aSourceMapConsumer The SourceMap for the generated code
|
||||
* @param aRelativePath Optional. The path that relative sources in the
|
||||
* SourceMapConsumer should be relative to.
|
||||
*/
|
||||
SourceNode.fromStringWithSourceMap =
|
||||
function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) {
|
||||
// The SourceNode we want to fill with the generated code
|
||||
// and the SourceMap
|
||||
var node = new SourceNode();
|
||||
|
||||
// All even indices of this array are one line of the generated code,
|
||||
// while all odd indices are the newlines between two adjacent lines
|
||||
// (since `REGEX_NEWLINE` captures its match).
|
||||
// Processed fragments are removed from this array, by calling `shiftNextLine`.
|
||||
var remainingLines = aGeneratedCode.split(REGEX_NEWLINE);
|
||||
var shiftNextLine = function() {
|
||||
var lineContents = remainingLines.shift();
|
||||
// The last line of a file might not have a newline.
|
||||
var newLine = remainingLines.shift() || "";
|
||||
return lineContents + newLine;
|
||||
};
|
||||
|
||||
// We need to remember the position of "remainingLines"
|
||||
var lastGeneratedLine = 1, lastGeneratedColumn = 0;
|
||||
|
||||
// The generate SourceNodes we need a code range.
|
||||
// To extract it current and last mapping is used.
|
||||
// Here we store the last mapping.
|
||||
var lastMapping = null;
|
||||
|
||||
aSourceMapConsumer.eachMapping(function (mapping) {
|
||||
if (lastMapping !== null) {
|
||||
// We add the code from "lastMapping" to "mapping":
|
||||
// First check if there is a new line in between.
|
||||
if (lastGeneratedLine < mapping.generatedLine) {
|
||||
var code = "";
|
||||
// Associate first line with "lastMapping"
|
||||
addMappingWithCode(lastMapping, shiftNextLine());
|
||||
lastGeneratedLine++;
|
||||
lastGeneratedColumn = 0;
|
||||
// The remaining code is added without mapping
|
||||
} else {
|
||||
// There is no new line in between.
|
||||
// Associate the code between "lastGeneratedColumn" and
|
||||
// "mapping.generatedColumn" with "lastMapping"
|
||||
var nextLine = remainingLines[0];
|
||||
var code = nextLine.substr(0, mapping.generatedColumn -
|
||||
lastGeneratedColumn);
|
||||
remainingLines[0] = nextLine.substr(mapping.generatedColumn -
|
||||
lastGeneratedColumn);
|
||||
lastGeneratedColumn = mapping.generatedColumn;
|
||||
addMappingWithCode(lastMapping, code);
|
||||
// No more remaining code, continue
|
||||
lastMapping = mapping;
|
||||
return;
|
||||
}
|
||||
}
|
||||
// We add the generated code until the first mapping
|
||||
// to the SourceNode without any mapping.
|
||||
// Each line is added as separate string.
|
||||
while (lastGeneratedLine < mapping.generatedLine) {
|
||||
node.add(shiftNextLine());
|
||||
lastGeneratedLine++;
|
||||
}
|
||||
if (lastGeneratedColumn < mapping.generatedColumn) {
|
||||
var nextLine = remainingLines[0];
|
||||
node.add(nextLine.substr(0, mapping.generatedColumn));
|
||||
remainingLines[0] = nextLine.substr(mapping.generatedColumn);
|
||||
lastGeneratedColumn = mapping.generatedColumn;
|
||||
}
|
||||
lastMapping = mapping;
|
||||
}, this);
|
||||
// We have processed all mappings.
|
||||
if (remainingLines.length > 0) {
|
||||
if (lastMapping) {
|
||||
// Associate the remaining code in the current line with "lastMapping"
|
||||
addMappingWithCode(lastMapping, shiftNextLine());
|
||||
}
|
||||
// and add the remaining lines without any mapping
|
||||
node.add(remainingLines.join(""));
|
||||
}
|
||||
|
||||
// Copy sourcesContent into SourceNode
|
||||
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
||||
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
||||
if (content != null) {
|
||||
if (aRelativePath != null) {
|
||||
sourceFile = util.join(aRelativePath, sourceFile);
|
||||
}
|
||||
node.setSourceContent(sourceFile, content);
|
||||
}
|
||||
});
|
||||
|
||||
return node;
|
||||
|
||||
function addMappingWithCode(mapping, code) {
|
||||
if (mapping === null || mapping.source === undefined) {
|
||||
node.add(code);
|
||||
} else {
|
||||
var source = aRelativePath
|
||||
? util.join(aRelativePath, mapping.source)
|
||||
: mapping.source;
|
||||
node.add(new SourceNode(mapping.originalLine,
|
||||
mapping.originalColumn,
|
||||
source,
|
||||
code,
|
||||
mapping.name));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a chunk of generated JS to this source node.
|
||||
*
|
||||
* @param aChunk A string snippet of generated JS code, another instance of
|
||||
* SourceNode, or an array where each member is one of those things.
|
||||
*/
|
||||
SourceNode.prototype.add = function SourceNode_add(aChunk) {
|
||||
if (Array.isArray(aChunk)) {
|
||||
aChunk.forEach(function (chunk) {
|
||||
this.add(chunk);
|
||||
}, this);
|
||||
}
|
||||
else if (aChunk[isSourceNode] || typeof aChunk === "string") {
|
||||
if (aChunk) {
|
||||
this.children.push(aChunk);
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw new TypeError(
|
||||
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
|
||||
);
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a chunk of generated JS to the beginning of this source node.
|
||||
*
|
||||
* @param aChunk A string snippet of generated JS code, another instance of
|
||||
* SourceNode, or an array where each member is one of those things.
|
||||
*/
|
||||
SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {
|
||||
if (Array.isArray(aChunk)) {
|
||||
for (var i = aChunk.length-1; i >= 0; i--) {
|
||||
this.prepend(aChunk[i]);
|
||||
}
|
||||
}
|
||||
else if (aChunk[isSourceNode] || typeof aChunk === "string") {
|
||||
this.children.unshift(aChunk);
|
||||
}
|
||||
else {
|
||||
throw new TypeError(
|
||||
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
|
||||
);
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Walk over the tree of JS snippets in this node and its children. The
|
||||
* walking function is called once for each snippet of JS and is passed that
|
||||
* snippet and the its original associated source's line/column location.
|
||||
*
|
||||
* @param aFn The traversal function.
|
||||
*/
|
||||
SourceNode.prototype.walk = function SourceNode_walk(aFn) {
|
||||
var chunk;
|
||||
for (var i = 0, len = this.children.length; i < len; i++) {
|
||||
chunk = this.children[i];
|
||||
if (chunk[isSourceNode]) {
|
||||
chunk.walk(aFn);
|
||||
}
|
||||
else {
|
||||
if (chunk !== '') {
|
||||
aFn(chunk, { source: this.source,
|
||||
line: this.line,
|
||||
column: this.column,
|
||||
name: this.name });
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between
|
||||
* each of `this.children`.
|
||||
*
|
||||
* @param aSep The separator.
|
||||
*/
|
||||
SourceNode.prototype.join = function SourceNode_join(aSep) {
|
||||
var newChildren;
|
||||
var i;
|
||||
var len = this.children.length;
|
||||
if (len > 0) {
|
||||
newChildren = [];
|
||||
for (i = 0; i < len-1; i++) {
|
||||
newChildren.push(this.children[i]);
|
||||
newChildren.push(aSep);
|
||||
}
|
||||
newChildren.push(this.children[i]);
|
||||
this.children = newChildren;
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Call String.prototype.replace on the very right-most source snippet. Useful
|
||||
* for trimming whitespace from the end of a source node, etc.
|
||||
*
|
||||
* @param aPattern The pattern to replace.
|
||||
* @param aReplacement The thing to replace the pattern with.
|
||||
*/
|
||||
SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {
|
||||
var lastChild = this.children[this.children.length - 1];
|
||||
if (lastChild[isSourceNode]) {
|
||||
lastChild.replaceRight(aPattern, aReplacement);
|
||||
}
|
||||
else if (typeof lastChild === 'string') {
|
||||
this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);
|
||||
}
|
||||
else {
|
||||
this.children.push(''.replace(aPattern, aReplacement));
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Set the source content for a source file. This will be added to the SourceMapGenerator
|
||||
* in the sourcesContent field.
|
||||
*
|
||||
* @param aSourceFile The filename of the source file
|
||||
* @param aSourceContent The content of the source file
|
||||
*/
|
||||
SourceNode.prototype.setSourceContent =
|
||||
function SourceNode_setSourceContent(aSourceFile, aSourceContent) {
|
||||
this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;
|
||||
};
|
||||
|
||||
/**
|
||||
* Walk over the tree of SourceNodes. The walking function is called for each
|
||||
* source file content and is passed the filename and source content.
|
||||
*
|
||||
* @param aFn The traversal function.
|
||||
*/
|
||||
SourceNode.prototype.walkSourceContents =
|
||||
function SourceNode_walkSourceContents(aFn) {
|
||||
for (var i = 0, len = this.children.length; i < len; i++) {
|
||||
if (this.children[i][isSourceNode]) {
|
||||
this.children[i].walkSourceContents(aFn);
|
||||
}
|
||||
}
|
||||
|
||||
var sources = Object.keys(this.sourceContents);
|
||||
for (var i = 0, len = sources.length; i < len; i++) {
|
||||
aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the string representation of this source node. Walks over the tree
|
||||
* and concatenates all the various snippets together to one string.
|
||||
*/
|
||||
SourceNode.prototype.toString = function SourceNode_toString() {
|
||||
var str = "";
|
||||
this.walk(function (chunk) {
|
||||
str += chunk;
|
||||
});
|
||||
return str;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the string representation of this source node along with a source
|
||||
* map.
|
||||
*/
|
||||
SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {
|
||||
var generated = {
|
||||
code: "",
|
||||
line: 1,
|
||||
column: 0
|
||||
};
|
||||
var map = new SourceMapGenerator(aArgs);
|
||||
var sourceMappingActive = false;
|
||||
var lastOriginalSource = null;
|
||||
var lastOriginalLine = null;
|
||||
var lastOriginalColumn = null;
|
||||
var lastOriginalName = null;
|
||||
this.walk(function (chunk, original) {
|
||||
generated.code += chunk;
|
||||
if (original.source !== null
|
||||
&& original.line !== null
|
||||
&& original.column !== null) {
|
||||
if(lastOriginalSource !== original.source
|
||||
|| lastOriginalLine !== original.line
|
||||
|| lastOriginalColumn !== original.column
|
||||
|| lastOriginalName !== original.name) {
|
||||
map.addMapping({
|
||||
source: original.source,
|
||||
original: {
|
||||
line: original.line,
|
||||
column: original.column
|
||||
},
|
||||
generated: {
|
||||
line: generated.line,
|
||||
column: generated.column
|
||||
},
|
||||
name: original.name
|
||||
});
|
||||
}
|
||||
lastOriginalSource = original.source;
|
||||
lastOriginalLine = original.line;
|
||||
lastOriginalColumn = original.column;
|
||||
lastOriginalName = original.name;
|
||||
sourceMappingActive = true;
|
||||
} else if (sourceMappingActive) {
|
||||
map.addMapping({
|
||||
generated: {
|
||||
line: generated.line,
|
||||
column: generated.column
|
||||
}
|
||||
});
|
||||
lastOriginalSource = null;
|
||||
sourceMappingActive = false;
|
||||
}
|
||||
for (var idx = 0, length = chunk.length; idx < length; idx++) {
|
||||
if (chunk.charCodeAt(idx) === NEWLINE_CODE) {
|
||||
generated.line++;
|
||||
generated.column = 0;
|
||||
// Mappings end at eol
|
||||
if (idx + 1 === length) {
|
||||
lastOriginalSource = null;
|
||||
sourceMappingActive = false;
|
||||
} else if (sourceMappingActive) {
|
||||
map.addMapping({
|
||||
source: original.source,
|
||||
original: {
|
||||
line: original.line,
|
||||
column: original.column
|
||||
},
|
||||
generated: {
|
||||
line: generated.line,
|
||||
column: generated.column
|
||||
},
|
||||
name: original.name
|
||||
});
|
||||
}
|
||||
} else {
|
||||
generated.column++;
|
||||
}
|
||||
}
|
||||
});
|
||||
this.walkSourceContents(function (sourceFile, sourceContent) {
|
||||
map.setSourceContent(sourceFile, sourceContent);
|
||||
});
|
||||
|
||||
return { code: generated.code, map: map };
|
||||
};
|
||||
|
||||
exports.SourceNode = SourceNode;
|
||||
|
||||
});
|
319
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/util.js
generated
vendored
Normal file
319
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/lib/source-map/util.js
generated
vendored
Normal file
@ -0,0 +1,319 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
/**
|
||||
* This is a helper function for getting values from parameter/options
|
||||
* objects.
|
||||
*
|
||||
* @param args The object we are extracting values from
|
||||
* @param name The name of the property we are getting.
|
||||
* @param defaultValue An optional value to return if the property is missing
|
||||
* from the object. If this is not specified and the property is missing, an
|
||||
* error will be thrown.
|
||||
*/
|
||||
function getArg(aArgs, aName, aDefaultValue) {
|
||||
if (aName in aArgs) {
|
||||
return aArgs[aName];
|
||||
} else if (arguments.length === 3) {
|
||||
return aDefaultValue;
|
||||
} else {
|
||||
throw new Error('"' + aName + '" is a required argument.');
|
||||
}
|
||||
}
|
||||
exports.getArg = getArg;
|
||||
|
||||
var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/;
|
||||
var dataUrlRegexp = /^data:.+\,.+$/;
|
||||
|
||||
function urlParse(aUrl) {
|
||||
var match = aUrl.match(urlRegexp);
|
||||
if (!match) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
scheme: match[1],
|
||||
auth: match[2],
|
||||
host: match[3],
|
||||
port: match[4],
|
||||
path: match[5]
|
||||
};
|
||||
}
|
||||
exports.urlParse = urlParse;
|
||||
|
||||
function urlGenerate(aParsedUrl) {
|
||||
var url = '';
|
||||
if (aParsedUrl.scheme) {
|
||||
url += aParsedUrl.scheme + ':';
|
||||
}
|
||||
url += '//';
|
||||
if (aParsedUrl.auth) {
|
||||
url += aParsedUrl.auth + '@';
|
||||
}
|
||||
if (aParsedUrl.host) {
|
||||
url += aParsedUrl.host;
|
||||
}
|
||||
if (aParsedUrl.port) {
|
||||
url += ":" + aParsedUrl.port
|
||||
}
|
||||
if (aParsedUrl.path) {
|
||||
url += aParsedUrl.path;
|
||||
}
|
||||
return url;
|
||||
}
|
||||
exports.urlGenerate = urlGenerate;
|
||||
|
||||
/**
|
||||
* Normalizes a path, or the path portion of a URL:
|
||||
*
|
||||
* - Replaces consequtive slashes with one slash.
|
||||
* - Removes unnecessary '.' parts.
|
||||
* - Removes unnecessary '<dir>/..' parts.
|
||||
*
|
||||
* Based on code in the Node.js 'path' core module.
|
||||
*
|
||||
* @param aPath The path or url to normalize.
|
||||
*/
|
||||
function normalize(aPath) {
|
||||
var path = aPath;
|
||||
var url = urlParse(aPath);
|
||||
if (url) {
|
||||
if (!url.path) {
|
||||
return aPath;
|
||||
}
|
||||
path = url.path;
|
||||
}
|
||||
var isAbsolute = (path.charAt(0) === '/');
|
||||
|
||||
var parts = path.split(/\/+/);
|
||||
for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {
|
||||
part = parts[i];
|
||||
if (part === '.') {
|
||||
parts.splice(i, 1);
|
||||
} else if (part === '..') {
|
||||
up++;
|
||||
} else if (up > 0) {
|
||||
if (part === '') {
|
||||
// The first part is blank if the path is absolute. Trying to go
|
||||
// above the root is a no-op. Therefore we can remove all '..' parts
|
||||
// directly after the root.
|
||||
parts.splice(i + 1, up);
|
||||
up = 0;
|
||||
} else {
|
||||
parts.splice(i, 2);
|
||||
up--;
|
||||
}
|
||||
}
|
||||
}
|
||||
path = parts.join('/');
|
||||
|
||||
if (path === '') {
|
||||
path = isAbsolute ? '/' : '.';
|
||||
}
|
||||
|
||||
if (url) {
|
||||
url.path = path;
|
||||
return urlGenerate(url);
|
||||
}
|
||||
return path;
|
||||
}
|
||||
exports.normalize = normalize;
|
||||
|
||||
/**
|
||||
* Joins two paths/URLs.
|
||||
*
|
||||
* @param aRoot The root path or URL.
|
||||
* @param aPath The path or URL to be joined with the root.
|
||||
*
|
||||
* - If aPath is a URL or a data URI, aPath is returned, unless aPath is a
|
||||
* scheme-relative URL: Then the scheme of aRoot, if any, is prepended
|
||||
* first.
|
||||
* - Otherwise aPath is a path. If aRoot is a URL, then its path portion
|
||||
* is updated with the result and aRoot is returned. Otherwise the result
|
||||
* is returned.
|
||||
* - If aPath is absolute, the result is aPath.
|
||||
* - Otherwise the two paths are joined with a slash.
|
||||
* - Joining for example 'http://' and 'www.example.com' is also supported.
|
||||
*/
|
||||
function join(aRoot, aPath) {
|
||||
if (aRoot === "") {
|
||||
aRoot = ".";
|
||||
}
|
||||
if (aPath === "") {
|
||||
aPath = ".";
|
||||
}
|
||||
var aPathUrl = urlParse(aPath);
|
||||
var aRootUrl = urlParse(aRoot);
|
||||
if (aRootUrl) {
|
||||
aRoot = aRootUrl.path || '/';
|
||||
}
|
||||
|
||||
// `join(foo, '//www.example.org')`
|
||||
if (aPathUrl && !aPathUrl.scheme) {
|
||||
if (aRootUrl) {
|
||||
aPathUrl.scheme = aRootUrl.scheme;
|
||||
}
|
||||
return urlGenerate(aPathUrl);
|
||||
}
|
||||
|
||||
if (aPathUrl || aPath.match(dataUrlRegexp)) {
|
||||
return aPath;
|
||||
}
|
||||
|
||||
// `join('http://', 'www.example.com')`
|
||||
if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {
|
||||
aRootUrl.host = aPath;
|
||||
return urlGenerate(aRootUrl);
|
||||
}
|
||||
|
||||
var joined = aPath.charAt(0) === '/'
|
||||
? aPath
|
||||
: normalize(aRoot.replace(/\/+$/, '') + '/' + aPath);
|
||||
|
||||
if (aRootUrl) {
|
||||
aRootUrl.path = joined;
|
||||
return urlGenerate(aRootUrl);
|
||||
}
|
||||
return joined;
|
||||
}
|
||||
exports.join = join;
|
||||
|
||||
/**
|
||||
* Make a path relative to a URL or another path.
|
||||
*
|
||||
* @param aRoot The root path or URL.
|
||||
* @param aPath The path or URL to be made relative to aRoot.
|
||||
*/
|
||||
function relative(aRoot, aPath) {
|
||||
if (aRoot === "") {
|
||||
aRoot = ".";
|
||||
}
|
||||
|
||||
aRoot = aRoot.replace(/\/$/, '');
|
||||
|
||||
// XXX: It is possible to remove this block, and the tests still pass!
|
||||
var url = urlParse(aRoot);
|
||||
if (aPath.charAt(0) == "/" && url && url.path == "/") {
|
||||
return aPath.slice(1);
|
||||
}
|
||||
|
||||
return aPath.indexOf(aRoot + '/') === 0
|
||||
? aPath.substr(aRoot.length + 1)
|
||||
: aPath;
|
||||
}
|
||||
exports.relative = relative;
|
||||
|
||||
/**
|
||||
* Because behavior goes wacky when you set `__proto__` on objects, we
|
||||
* have to prefix all the strings in our set with an arbitrary character.
|
||||
*
|
||||
* See https://github.com/mozilla/source-map/pull/31 and
|
||||
* https://github.com/mozilla/source-map/issues/30
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
function toSetString(aStr) {
|
||||
return '$' + aStr;
|
||||
}
|
||||
exports.toSetString = toSetString;
|
||||
|
||||
function fromSetString(aStr) {
|
||||
return aStr.substr(1);
|
||||
}
|
||||
exports.fromSetString = fromSetString;
|
||||
|
||||
function strcmp(aStr1, aStr2) {
|
||||
var s1 = aStr1 || "";
|
||||
var s2 = aStr2 || "";
|
||||
return (s1 > s2) - (s1 < s2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Comparator between two mappings where the original positions are compared.
|
||||
*
|
||||
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
|
||||
* mappings with the same original source/line/column, but different generated
|
||||
* line and column the same. Useful when searching for a mapping with a
|
||||
* stubbed out mapping.
|
||||
*/
|
||||
function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {
|
||||
var cmp;
|
||||
|
||||
cmp = strcmp(mappingA.source, mappingB.source);
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalLine - mappingB.originalLine;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
||||
if (cmp || onlyCompareOriginal) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = strcmp(mappingA.name, mappingB.name);
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.generatedLine - mappingB.generatedLine;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
return mappingA.generatedColumn - mappingB.generatedColumn;
|
||||
};
|
||||
exports.compareByOriginalPositions = compareByOriginalPositions;
|
||||
|
||||
/**
|
||||
* Comparator between two mappings where the generated positions are
|
||||
* compared.
|
||||
*
|
||||
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
|
||||
* mappings with the same generated line and column, but different
|
||||
* source/name/original line and column the same. Useful when searching for a
|
||||
* mapping with a stubbed out mapping.
|
||||
*/
|
||||
function compareByGeneratedPositions(mappingA, mappingB, onlyCompareGenerated) {
|
||||
var cmp;
|
||||
|
||||
cmp = mappingA.generatedLine - mappingB.generatedLine;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
|
||||
if (cmp || onlyCompareGenerated) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = strcmp(mappingA.source, mappingB.source);
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalLine - mappingB.originalLine;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
||||
if (cmp) {
|
||||
return cmp;
|
||||
}
|
||||
|
||||
return strcmp(mappingA.name, mappingB.name);
|
||||
};
|
||||
exports.compareByGeneratedPositions = compareByGeneratedPositions;
|
||||
|
||||
});
|
58
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/node_modules/amdefine/LICENSE
generated
vendored
Normal file
58
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/node_modules/amdefine/LICENSE
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
amdefine is released under two licenses: new BSD, and MIT. You may pick the
|
||||
license that best suits your development needs. The text of both licenses are
|
||||
provided below.
|
||||
|
||||
|
||||
The "New" BSD License:
|
||||
----------------------
|
||||
|
||||
Copyright (c) 2011, The Dojo Foundation
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
* Neither the name of the Dojo Foundation nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
|
||||
MIT License
|
||||
-----------
|
||||
|
||||
Copyright (c) 2011, The Dojo Foundation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
171
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/node_modules/amdefine/README.md
generated
vendored
Normal file
171
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/node_modules/amdefine/README.md
generated
vendored
Normal file
@ -0,0 +1,171 @@
|
||||
# amdefine
|
||||
|
||||
A module that can be used to implement AMD's define() in Node. This allows you
|
||||
to code to the AMD API and have the module work in node programs without
|
||||
requiring those other programs to use AMD.
|
||||
|
||||
## Usage
|
||||
|
||||
**1)** Update your package.json to indicate amdefine as a dependency:
|
||||
|
||||
```javascript
|
||||
"dependencies": {
|
||||
"amdefine": ">=0.1.0"
|
||||
}
|
||||
```
|
||||
|
||||
Then run `npm install` to get amdefine into your project.
|
||||
|
||||
**2)** At the top of each module that uses define(), place this code:
|
||||
|
||||
```javascript
|
||||
if (typeof define !== 'function') { var define = require('amdefine')(module) }
|
||||
```
|
||||
|
||||
**Only use these snippets** when loading amdefine. If you preserve the basic structure,
|
||||
with the braces, it will be stripped out when using the [RequireJS optimizer](#optimizer).
|
||||
|
||||
You can add spaces, line breaks and even require amdefine with a local path, but
|
||||
keep the rest of the structure to get the stripping behavior.
|
||||
|
||||
As you may know, because `if` statements in JavaScript don't have their own scope, the var
|
||||
declaration in the above snippet is made whether the `if` expression is truthy or not. If
|
||||
RequireJS is loaded then the declaration is superfluous because `define` is already already
|
||||
declared in the same scope in RequireJS. Fortunately JavaScript handles multiple `var`
|
||||
declarations of the same variable in the same scope gracefully.
|
||||
|
||||
If you want to deliver amdefine.js with your code rather than specifying it as a dependency
|
||||
with npm, then just download the latest release and refer to it using a relative path:
|
||||
|
||||
[Latest Version](https://github.com/jrburke/amdefine/raw/latest/amdefine.js)
|
||||
|
||||
### amdefine/intercept
|
||||
|
||||
Consider this very experimental.
|
||||
|
||||
Instead of pasting the piece of text for the amdefine setup of a `define`
|
||||
variable in each module you create or consume, you can use `amdefine/intercept`
|
||||
instead. It will automatically insert the above snippet in each .js file loaded
|
||||
by Node.
|
||||
|
||||
**Warning**: you should only use this if you are creating an application that
|
||||
is consuming AMD style defined()'d modules that are distributed via npm and want
|
||||
to run that code in Node.
|
||||
|
||||
For library code where you are not sure if it will be used by others in Node or
|
||||
in the browser, then explicitly depending on amdefine and placing the code
|
||||
snippet above is suggested path, instead of using `amdefine/intercept`. The
|
||||
intercept module affects all .js files loaded in the Node app, and it is
|
||||
inconsiderate to modify global state like that unless you are also controlling
|
||||
the top level app.
|
||||
|
||||
#### Why distribute AMD-style nodes via npm?
|
||||
|
||||
npm has a lot of weaknesses for front-end use (installed layout is not great,
|
||||
should have better support for the `baseUrl + moduleID + '.js' style of loading,
|
||||
single file JS installs), but some people want a JS package manager and are
|
||||
willing to live with those constraints. If that is you, but still want to author
|
||||
in AMD style modules to get dynamic require([]), better direct source usage and
|
||||
powerful loader plugin support in the browser, then this tool can help.
|
||||
|
||||
#### amdefine/intercept usage
|
||||
|
||||
Just require it in your top level app module (for example index.js, server.js):
|
||||
|
||||
```javascript
|
||||
require('amdefine/intercept');
|
||||
```
|
||||
|
||||
The module does not return a value, so no need to assign the result to a local
|
||||
variable.
|
||||
|
||||
Then just require() code as you normally would with Node's require(). Any .js
|
||||
loaded after the intercept require will have the amdefine check injected in
|
||||
the .js source as it is loaded. It does not modify the source on disk, just
|
||||
prepends some content to the text of the module as it is loaded by Node.
|
||||
|
||||
#### How amdefine/intercept works
|
||||
|
||||
It overrides the `Module._extensions['.js']` in Node to automatically prepend
|
||||
the amdefine snippet above. So, it will affect any .js file loaded by your
|
||||
app.
|
||||
|
||||
## define() usage
|
||||
|
||||
It is best if you use the anonymous forms of define() in your module:
|
||||
|
||||
```javascript
|
||||
define(function (require) {
|
||||
var dependency = require('dependency');
|
||||
});
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```javascript
|
||||
define(['dependency'], function (dependency) {
|
||||
|
||||
});
|
||||
```
|
||||
|
||||
## RequireJS optimizer integration. <a name="optimizer"></name>
|
||||
|
||||
Version 1.0.3 of the [RequireJS optimizer](http://requirejs.org/docs/optimization.html)
|
||||
will have support for stripping the `if (typeof define !== 'function')` check
|
||||
mentioned above, so you can include this snippet for code that runs in the
|
||||
browser, but avoid taking the cost of the if() statement once the code is
|
||||
optimized for deployment.
|
||||
|
||||
## Node 0.4 Support
|
||||
|
||||
If you want to support Node 0.4, then add `require` as the second parameter to amdefine:
|
||||
|
||||
```javascript
|
||||
//Only if you want Node 0.4. If using 0.5 or later, use the above snippet.
|
||||
if (typeof define !== 'function') { var define = require('amdefine')(module, require) }
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
### Synchronous vs Asynchronous
|
||||
|
||||
amdefine creates a define() function that is callable by your code. It will
|
||||
execute and trace dependencies and call the factory function *synchronously*,
|
||||
to keep the behavior in line with Node's synchronous dependency tracing.
|
||||
|
||||
The exception: calling AMD's callback-style require() from inside a factory
|
||||
function. The require callback is called on process.nextTick():
|
||||
|
||||
```javascript
|
||||
define(function (require) {
|
||||
require(['a'], function(a) {
|
||||
//'a' is loaded synchronously, but
|
||||
//this callback is called on process.nextTick().
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Loader Plugins
|
||||
|
||||
Loader plugins are supported as long as they call their load() callbacks
|
||||
synchronously. So ones that do network requests will not work. However plugins
|
||||
like [text](http://requirejs.org/docs/api.html#text) can load text files locally.
|
||||
|
||||
The plugin API's `load.fromText()` is **not supported** in amdefine, so this means
|
||||
transpiler plugins like the [CoffeeScript loader plugin](https://github.com/jrburke/require-cs)
|
||||
will not work. This may be fixable, but it is a bit complex, and I do not have
|
||||
enough node-fu to figure it out yet. See the source for amdefine.js if you want
|
||||
to get an idea of the issues involved.
|
||||
|
||||
## Tests
|
||||
|
||||
To run the tests, cd to **tests** and run:
|
||||
|
||||
```
|
||||
node all.js
|
||||
node all-intercept.js
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
New BSD and MIT. Check the LICENSE file for all the details.
|
299
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/node_modules/amdefine/amdefine.js
generated
vendored
Normal file
299
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/node_modules/amdefine/amdefine.js
generated
vendored
Normal file
@ -0,0 +1,299 @@
|
||||
/** vim: et:ts=4:sw=4:sts=4
|
||||
* @license amdefine 0.1.0 Copyright (c) 2011, The Dojo Foundation All Rights Reserved.
|
||||
* Available via the MIT or new BSD license.
|
||||
* see: http://github.com/jrburke/amdefine for details
|
||||
*/
|
||||
|
||||
/*jslint node: true */
|
||||
/*global module, process */
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Creates a define for node.
|
||||
* @param {Object} module the "module" object that is defined by Node for the
|
||||
* current module.
|
||||
* @param {Function} [requireFn]. Node's require function for the current module.
|
||||
* It only needs to be passed in Node versions before 0.5, when module.require
|
||||
* did not exist.
|
||||
* @returns {Function} a define function that is usable for the current node
|
||||
* module.
|
||||
*/
|
||||
function amdefine(module, requireFn) {
|
||||
'use strict';
|
||||
var defineCache = {},
|
||||
loaderCache = {},
|
||||
alreadyCalled = false,
|
||||
path = require('path'),
|
||||
makeRequire, stringRequire;
|
||||
|
||||
/**
|
||||
* Trims the . and .. from an array of path segments.
|
||||
* It will keep a leading path segment if a .. will become
|
||||
* the first path segment, to help with module name lookups,
|
||||
* which act like paths, but can be remapped. But the end result,
|
||||
* all paths that use this function should look normalized.
|
||||
* NOTE: this method MODIFIES the input array.
|
||||
* @param {Array} ary the array of path segments.
|
||||
*/
|
||||
function trimDots(ary) {
|
||||
var i, part;
|
||||
for (i = 0; ary[i]; i+= 1) {
|
||||
part = ary[i];
|
||||
if (part === '.') {
|
||||
ary.splice(i, 1);
|
||||
i -= 1;
|
||||
} else if (part === '..') {
|
||||
if (i === 1 && (ary[2] === '..' || ary[0] === '..')) {
|
||||
//End of the line. Keep at least one non-dot
|
||||
//path segment at the front so it can be mapped
|
||||
//correctly to disk. Otherwise, there is likely
|
||||
//no path mapping for a path starting with '..'.
|
||||
//This can still fail, but catches the most reasonable
|
||||
//uses of ..
|
||||
break;
|
||||
} else if (i > 0) {
|
||||
ary.splice(i - 1, 2);
|
||||
i -= 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function normalize(name, baseName) {
|
||||
var baseParts;
|
||||
|
||||
//Adjust any relative paths.
|
||||
if (name && name.charAt(0) === '.') {
|
||||
//If have a base name, try to normalize against it,
|
||||
//otherwise, assume it is a top-level require that will
|
||||
//be relative to baseUrl in the end.
|
||||
if (baseName) {
|
||||
baseParts = baseName.split('/');
|
||||
baseParts = baseParts.slice(0, baseParts.length - 1);
|
||||
baseParts = baseParts.concat(name.split('/'));
|
||||
trimDots(baseParts);
|
||||
name = baseParts.join('/');
|
||||
}
|
||||
}
|
||||
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the normalize() function passed to a loader plugin's
|
||||
* normalize method.
|
||||
*/
|
||||
function makeNormalize(relName) {
|
||||
return function (name) {
|
||||
return normalize(name, relName);
|
||||
};
|
||||
}
|
||||
|
||||
function makeLoad(id) {
|
||||
function load(value) {
|
||||
loaderCache[id] = value;
|
||||
}
|
||||
|
||||
load.fromText = function (id, text) {
|
||||
//This one is difficult because the text can/probably uses
|
||||
//define, and any relative paths and requires should be relative
|
||||
//to that id was it would be found on disk. But this would require
|
||||
//bootstrapping a module/require fairly deeply from node core.
|
||||
//Not sure how best to go about that yet.
|
||||
throw new Error('amdefine does not implement load.fromText');
|
||||
};
|
||||
|
||||
return load;
|
||||
}
|
||||
|
||||
makeRequire = function (systemRequire, exports, module, relId) {
|
||||
function amdRequire(deps, callback) {
|
||||
if (typeof deps === 'string') {
|
||||
//Synchronous, single module require('')
|
||||
return stringRequire(systemRequire, exports, module, deps, relId);
|
||||
} else {
|
||||
//Array of dependencies with a callback.
|
||||
|
||||
//Convert the dependencies to modules.
|
||||
deps = deps.map(function (depName) {
|
||||
return stringRequire(systemRequire, exports, module, depName, relId);
|
||||
});
|
||||
|
||||
//Wait for next tick to call back the require call.
|
||||
process.nextTick(function () {
|
||||
callback.apply(null, deps);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
amdRequire.toUrl = function (filePath) {
|
||||
if (filePath.indexOf('.') === 0) {
|
||||
return normalize(filePath, path.dirname(module.filename));
|
||||
} else {
|
||||
return filePath;
|
||||
}
|
||||
};
|
||||
|
||||
return amdRequire;
|
||||
};
|
||||
|
||||
//Favor explicit value, passed in if the module wants to support Node 0.4.
|
||||
requireFn = requireFn || function req() {
|
||||
return module.require.apply(module, arguments);
|
||||
};
|
||||
|
||||
function runFactory(id, deps, factory) {
|
||||
var r, e, m, result;
|
||||
|
||||
if (id) {
|
||||
e = loaderCache[id] = {};
|
||||
m = {
|
||||
id: id,
|
||||
uri: __filename,
|
||||
exports: e
|
||||
};
|
||||
r = makeRequire(requireFn, e, m, id);
|
||||
} else {
|
||||
//Only support one define call per file
|
||||
if (alreadyCalled) {
|
||||
throw new Error('amdefine with no module ID cannot be called more than once per file.');
|
||||
}
|
||||
alreadyCalled = true;
|
||||
|
||||
//Use the real variables from node
|
||||
//Use module.exports for exports, since
|
||||
//the exports in here is amdefine exports.
|
||||
e = module.exports;
|
||||
m = module;
|
||||
r = makeRequire(requireFn, e, m, module.id);
|
||||
}
|
||||
|
||||
//If there are dependencies, they are strings, so need
|
||||
//to convert them to dependency values.
|
||||
if (deps) {
|
||||
deps = deps.map(function (depName) {
|
||||
return r(depName);
|
||||
});
|
||||
}
|
||||
|
||||
//Call the factory with the right dependencies.
|
||||
if (typeof factory === 'function') {
|
||||
result = factory.apply(m.exports, deps);
|
||||
} else {
|
||||
result = factory;
|
||||
}
|
||||
|
||||
if (result !== undefined) {
|
||||
m.exports = result;
|
||||
if (id) {
|
||||
loaderCache[id] = m.exports;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stringRequire = function (systemRequire, exports, module, id, relId) {
|
||||
//Split the ID by a ! so that
|
||||
var index = id.indexOf('!'),
|
||||
originalId = id,
|
||||
prefix, plugin;
|
||||
|
||||
if (index === -1) {
|
||||
id = normalize(id, relId);
|
||||
|
||||
//Straight module lookup. If it is one of the special dependencies,
|
||||
//deal with it, otherwise, delegate to node.
|
||||
if (id === 'require') {
|
||||
return makeRequire(systemRequire, exports, module, relId);
|
||||
} else if (id === 'exports') {
|
||||
return exports;
|
||||
} else if (id === 'module') {
|
||||
return module;
|
||||
} else if (loaderCache.hasOwnProperty(id)) {
|
||||
return loaderCache[id];
|
||||
} else if (defineCache[id]) {
|
||||
runFactory.apply(null, defineCache[id]);
|
||||
return loaderCache[id];
|
||||
} else {
|
||||
if(systemRequire) {
|
||||
return systemRequire(originalId);
|
||||
} else {
|
||||
throw new Error('No module with ID: ' + id);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//There is a plugin in play.
|
||||
prefix = id.substring(0, index);
|
||||
id = id.substring(index + 1, id.length);
|
||||
|
||||
plugin = stringRequire(systemRequire, exports, module, prefix, relId);
|
||||
|
||||
if (plugin.normalize) {
|
||||
id = plugin.normalize(id, makeNormalize(relId));
|
||||
} else {
|
||||
//Normalize the ID normally.
|
||||
id = normalize(id, relId);
|
||||
}
|
||||
|
||||
if (loaderCache[id]) {
|
||||
return loaderCache[id];
|
||||
} else {
|
||||
plugin.load(id, makeRequire(systemRequire, exports, module, relId), makeLoad(id), {});
|
||||
|
||||
return loaderCache[id];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Create a define function specific to the module asking for amdefine.
|
||||
function define(id, deps, factory) {
|
||||
if (Array.isArray(id)) {
|
||||
factory = deps;
|
||||
deps = id;
|
||||
id = undefined;
|
||||
} else if (typeof id !== 'string') {
|
||||
factory = id;
|
||||
id = deps = undefined;
|
||||
}
|
||||
|
||||
if (deps && !Array.isArray(deps)) {
|
||||
factory = deps;
|
||||
deps = undefined;
|
||||
}
|
||||
|
||||
if (!deps) {
|
||||
deps = ['require', 'exports', 'module'];
|
||||
}
|
||||
|
||||
//Set up properties for this module. If an ID, then use
|
||||
//internal cache. If no ID, then use the external variables
|
||||
//for this node module.
|
||||
if (id) {
|
||||
//Put the module in deep freeze until there is a
|
||||
//require call for it.
|
||||
defineCache[id] = [id, deps, factory];
|
||||
} else {
|
||||
runFactory(id, deps, factory);
|
||||
}
|
||||
}
|
||||
|
||||
//define.require, which has access to all the values in the
|
||||
//cache. Useful for AMD modules that all have IDs in the file,
|
||||
//but need to finally export a value to node based on one of those
|
||||
//IDs.
|
||||
define.require = function (id) {
|
||||
if (loaderCache[id]) {
|
||||
return loaderCache[id];
|
||||
}
|
||||
|
||||
if (defineCache[id]) {
|
||||
runFactory.apply(null, defineCache[id]);
|
||||
return loaderCache[id];
|
||||
}
|
||||
};
|
||||
|
||||
define.amd = {};
|
||||
|
||||
return define;
|
||||
}
|
||||
|
||||
module.exports = amdefine;
|
36
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/node_modules/amdefine/intercept.js
generated
vendored
Normal file
36
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/node_modules/amdefine/intercept.js
generated
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
/*jshint node: true */
|
||||
var inserted,
|
||||
Module = require('module'),
|
||||
fs = require('fs'),
|
||||
existingExtFn = Module._extensions['.js'],
|
||||
amdefineRegExp = /amdefine\.js/;
|
||||
|
||||
inserted = "if (typeof define !== 'function') {var define = require('amdefine')(module)}";
|
||||
|
||||
//From the node/lib/module.js source:
|
||||
function stripBOM(content) {
|
||||
// Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
|
||||
// because the buffer-to-string conversion in `fs.readFileSync()`
|
||||
// translates it to FEFF, the UTF-16 BOM.
|
||||
if (content.charCodeAt(0) === 0xFEFF) {
|
||||
content = content.slice(1);
|
||||
}
|
||||
return content;
|
||||
}
|
||||
|
||||
//Also adapted from the node/lib/module.js source:
|
||||
function intercept(module, filename) {
|
||||
var content = stripBOM(fs.readFileSync(filename, 'utf8'));
|
||||
|
||||
if (!amdefineRegExp.test(module.id)) {
|
||||
content = inserted + content;
|
||||
}
|
||||
|
||||
module._compile(content, filename);
|
||||
}
|
||||
|
||||
intercept._id = 'amdefine/intercept';
|
||||
|
||||
if (!existingExtFn._id || existingExtFn._id !== intercept._id) {
|
||||
Module._extensions['.js'] = intercept;
|
||||
}
|
55
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/node_modules/amdefine/package.json
generated
vendored
Normal file
55
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/node_modules/amdefine/package.json
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
194
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/package.json
generated
vendored
Normal file
194
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/package.json
generated
vendored
Normal file
@ -0,0 +1,194 @@
|
||||
{
|
||||
"name": "source-map",
|
||||
"description": "Generates and consumes source maps",
|
||||
"version": "0.3.0",
|
||||
"homepage": "https://github.com/mozilla/source-map",
|
||||
"author": {
|
||||
"name": "Nick Fitzgerald",
|
||||
"email": "nfitzgerald@mozilla.com"
|
||||
},
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Tobias Koppers",
|
||||
"email": "tobias.koppers@googlemail.com"
|
||||
},
|
||||
{
|
||||
"name": "Duncan Beevers",
|
||||
"email": "duncan@dweebd.com"
|
||||
},
|
||||
{
|
||||
"name": "Stephen Crane",
|
||||
"email": "scrane@mozilla.com"
|
||||
},
|
||||
{
|
||||
"name": "Ryan Seddon",
|
||||
"email": "seddon.ryan@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Miles Elam",
|
||||
"email": "miles.elam@deem.com"
|
||||
},
|
||||
{
|
||||
"name": "Mihai Bazon",
|
||||
"email": "mihai.bazon@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Michael Ficarra",
|
||||
"email": "github.public.email@michael.ficarra.me"
|
||||
},
|
||||
{
|
||||
"name": "Todd Wolfson",
|
||||
"email": "todd@twolfson.com"
|
||||
},
|
||||
{
|
||||
"name": "Alexander Solovyov",
|
||||
"email": "alexander@solovyov.net"
|
||||
},
|
||||
{
|
||||
"name": "Felix Gnass",
|
||||
"email": "fgnass@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Conrad Irwin",
|
||||
"email": "conrad.irwin@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "usrbincc",
|
||||
"email": "usrbincc@yahoo.com"
|
||||
},
|
||||
{
|
||||
"name": "David Glasser",
|
||||
"email": "glasser@davidglasser.net"
|
||||
},
|
||||
{
|
||||
"name": "Chase Douglas",
|
||||
"email": "chase@newrelic.com"
|
||||
},
|
||||
{
|
||||
"name": "Evan Wallace",
|
||||
"email": "evan.exe@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Heather Arthur",
|
||||
"email": "fayearthur@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Hugh Kennedy",
|
||||
"email": "hughskennedy@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "David Glasser",
|
||||
"email": "glasser@davidglasser.net"
|
||||
},
|
||||
{
|
||||
"name": "Simon Lydell",
|
||||
"email": "simon.lydell@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Jmeas Smith",
|
||||
"email": "jellyes2@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Michael Z Goddard",
|
||||
"email": "mzgoddard@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "azu",
|
||||
"email": "azu@users.noreply.github.com"
|
||||
},
|
||||
{
|
||||
"name": "John Gozde",
|
||||
"email": "john@gozde.ca"
|
||||
},
|
||||
{
|
||||
"name": "Adam Kirkton",
|
||||
"email": "akirkton@truefitinnovation.com"
|
||||
},
|
||||
{
|
||||
"name": "Chris Montgomery",
|
||||
"email": "christopher.montgomery@dowjones.com"
|
||||
},
|
||||
{
|
||||
"name": "J. Ryan Stinnett",
|
||||
"email": "jryans@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Jack Herrington",
|
||||
"email": "jherrington@walmartlabs.com"
|
||||
},
|
||||
{
|
||||
"name": "Chris Truter",
|
||||
"email": "jeffpalentine@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Daniel Espeset",
|
||||
"email": "daniel@danielespeset.com"
|
||||
},
|
||||
{
|
||||
"name": "Jamie Wong",
|
||||
"email": "jamie.lf.wong@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "Eddy Bruël",
|
||||
"email": "ejpbruel@mozilla.com"
|
||||
}
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "http://github.com/mozilla/source-map.git"
|
||||
},
|
||||
"directories": {
|
||||
"lib": "./lib"
|
||||
},
|
||||
"main": "./lib/source-map.js",
|
||||
"engines": {
|
||||
"node": ">=0.8.0"
|
||||
},
|
||||
"licenses": [
|
||||
{
|
||||
"type": "BSD",
|
||||
"url": "http://opensource.org/licenses/BSD-3-Clause"
|
||||
}
|
||||
],
|
||||
"dependencies": {
|
||||
"amdefine": ">=0.0.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"dryice": ">=0.4.8"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "node test/run-tests.js",
|
||||
"build": "node Makefile.dryice.js"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/mozilla/source-map/issues"
|
||||
},
|
||||
"_id": "source-map@0.3.0",
|
||||
"_shasum": "8586fb9a5a005e5b501e21cd18b6f21b457ad1f9",
|
||||
"_from": "source-map@^0.3.0",
|
||||
"_npmVersion": "1.4.9",
|
||||
"_npmUser": {
|
||||
"name": "nickfitzgerald",
|
||||
"email": "fitzgen@gmail.com"
|
||||
},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "mozilla-devtools",
|
||||
"email": "mozilla-developer-tools@googlegroups.com"
|
||||
},
|
||||
{
|
||||
"name": "mozilla",
|
||||
"email": "dherman@mozilla.com"
|
||||
},
|
||||
{
|
||||
"name": "nickfitzgerald",
|
||||
"email": "fitzgen@gmail.com"
|
||||
}
|
||||
],
|
||||
"dist": {
|
||||
"shasum": "8586fb9a5a005e5b501e21cd18b6f21b457ad1f9",
|
||||
"tarball": "http://registry.npmjs.org/source-map/-/source-map-0.3.0.tgz"
|
||||
},
|
||||
"_resolved": "https://registry.npmjs.org/source-map/-/source-map-0.3.0.tgz",
|
||||
"readme": "ERROR: No README data found!"
|
||||
}
|
62
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/run-tests.js
generated
vendored
Executable file
62
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/run-tests.js
generated
vendored
Executable file
@ -0,0 +1,62 @@
|
||||
#!/usr/bin/env node
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
var assert = require('assert');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var util = require('./source-map/util');
|
||||
|
||||
function run(tests) {
|
||||
var total = 0;
|
||||
var passed = 0;
|
||||
|
||||
for (var i = 0; i < tests.length; i++) {
|
||||
for (var k in tests[i].testCase) {
|
||||
if (/^test/.test(k)) {
|
||||
total++;
|
||||
try {
|
||||
tests[i].testCase[k](assert, util);
|
||||
passed++;
|
||||
}
|
||||
catch (e) {
|
||||
console.log('FAILED ' + tests[i].name + ': ' + k + '!');
|
||||
console.log(e.stack);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log('');
|
||||
console.log(passed + ' / ' + total + ' tests passed.');
|
||||
console.log('');
|
||||
|
||||
return total - passed;
|
||||
}
|
||||
|
||||
function isTestFile(f) {
|
||||
var testToRun = process.argv[2];
|
||||
return testToRun
|
||||
? path.basename(testToRun) === f
|
||||
: /^test\-.*?\.js/.test(f);
|
||||
}
|
||||
|
||||
function toModule(f) {
|
||||
return './source-map/' + f.replace(/\.js$/, '');
|
||||
}
|
||||
|
||||
var requires = fs.readdirSync(path.join(__dirname, 'source-map'))
|
||||
.filter(isTestFile)
|
||||
.map(toModule);
|
||||
|
||||
var code = run(requires.map(require).map(function (mod, i) {
|
||||
return {
|
||||
name: requires[i],
|
||||
testCase: mod
|
||||
};
|
||||
}));
|
||||
|
||||
process.exit(code);
|
26
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/test-api.js
generated
vendored
Normal file
26
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/test-api.js
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2012 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var sourceMap;
|
||||
try {
|
||||
sourceMap = require('../../lib/source-map');
|
||||
} catch (e) {
|
||||
sourceMap = {};
|
||||
Components.utils.import('resource:///modules/devtools/SourceMap.jsm', sourceMap);
|
||||
}
|
||||
|
||||
exports['test that the api is properly exposed in the top level'] = function (assert, util) {
|
||||
assert.equal(typeof sourceMap.SourceMapGenerator, "function");
|
||||
assert.equal(typeof sourceMap.SourceMapConsumer, "function");
|
||||
assert.equal(typeof sourceMap.SourceNode, "function");
|
||||
};
|
||||
|
||||
});
|
104
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/test-array-set.js
generated
vendored
Normal file
104
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/test-array-set.js
generated
vendored
Normal file
@ -0,0 +1,104 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var ArraySet = require('../../lib/source-map/array-set').ArraySet;
|
||||
|
||||
function makeTestSet() {
|
||||
var set = new ArraySet();
|
||||
for (var i = 0; i < 100; i++) {
|
||||
set.add(String(i));
|
||||
}
|
||||
return set;
|
||||
}
|
||||
|
||||
exports['test .has() membership'] = function (assert, util) {
|
||||
var set = makeTestSet();
|
||||
for (var i = 0; i < 100; i++) {
|
||||
assert.ok(set.has(String(i)));
|
||||
}
|
||||
};
|
||||
|
||||
exports['test .indexOf() elements'] = function (assert, util) {
|
||||
var set = makeTestSet();
|
||||
for (var i = 0; i < 100; i++) {
|
||||
assert.strictEqual(set.indexOf(String(i)), i);
|
||||
}
|
||||
};
|
||||
|
||||
exports['test .at() indexing'] = function (assert, util) {
|
||||
var set = makeTestSet();
|
||||
for (var i = 0; i < 100; i++) {
|
||||
assert.strictEqual(set.at(i), String(i));
|
||||
}
|
||||
};
|
||||
|
||||
exports['test creating from an array'] = function (assert, util) {
|
||||
var set = ArraySet.fromArray(['foo', 'bar', 'baz', 'quux', 'hasOwnProperty']);
|
||||
|
||||
assert.ok(set.has('foo'));
|
||||
assert.ok(set.has('bar'));
|
||||
assert.ok(set.has('baz'));
|
||||
assert.ok(set.has('quux'));
|
||||
assert.ok(set.has('hasOwnProperty'));
|
||||
|
||||
assert.strictEqual(set.indexOf('foo'), 0);
|
||||
assert.strictEqual(set.indexOf('bar'), 1);
|
||||
assert.strictEqual(set.indexOf('baz'), 2);
|
||||
assert.strictEqual(set.indexOf('quux'), 3);
|
||||
|
||||
assert.strictEqual(set.at(0), 'foo');
|
||||
assert.strictEqual(set.at(1), 'bar');
|
||||
assert.strictEqual(set.at(2), 'baz');
|
||||
assert.strictEqual(set.at(3), 'quux');
|
||||
};
|
||||
|
||||
exports['test that you can add __proto__; see github issue #30'] = function (assert, util) {
|
||||
var set = new ArraySet();
|
||||
set.add('__proto__');
|
||||
assert.ok(set.has('__proto__'));
|
||||
assert.strictEqual(set.at(0), '__proto__');
|
||||
assert.strictEqual(set.indexOf('__proto__'), 0);
|
||||
};
|
||||
|
||||
exports['test .fromArray() with duplicates'] = function (assert, util) {
|
||||
var set = ArraySet.fromArray(['foo', 'foo']);
|
||||
assert.ok(set.has('foo'));
|
||||
assert.strictEqual(set.at(0), 'foo');
|
||||
assert.strictEqual(set.indexOf('foo'), 0);
|
||||
assert.strictEqual(set.toArray().length, 1);
|
||||
|
||||
set = ArraySet.fromArray(['foo', 'foo'], true);
|
||||
assert.ok(set.has('foo'));
|
||||
assert.strictEqual(set.at(0), 'foo');
|
||||
assert.strictEqual(set.at(1), 'foo');
|
||||
assert.strictEqual(set.indexOf('foo'), 0);
|
||||
assert.strictEqual(set.toArray().length, 2);
|
||||
};
|
||||
|
||||
exports['test .add() with duplicates'] = function (assert, util) {
|
||||
var set = new ArraySet();
|
||||
set.add('foo');
|
||||
|
||||
set.add('foo');
|
||||
assert.ok(set.has('foo'));
|
||||
assert.strictEqual(set.at(0), 'foo');
|
||||
assert.strictEqual(set.indexOf('foo'), 0);
|
||||
assert.strictEqual(set.toArray().length, 1);
|
||||
|
||||
set.add('foo', true);
|
||||
assert.ok(set.has('foo'));
|
||||
assert.strictEqual(set.at(0), 'foo');
|
||||
assert.strictEqual(set.at(1), 'foo');
|
||||
assert.strictEqual(set.indexOf('foo'), 0);
|
||||
assert.strictEqual(set.toArray().length, 2);
|
||||
};
|
||||
|
||||
});
|
23
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/test-base64-vlq.js
generated
vendored
Normal file
23
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/test-base64-vlq.js
generated
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var base64VLQ = require('../../lib/source-map/base64-vlq');
|
||||
|
||||
exports['test normal encoding and decoding'] = function (assert, util) {
|
||||
var result = {};
|
||||
for (var i = -255; i < 256; i++) {
|
||||
base64VLQ.decode(base64VLQ.encode(i), result);
|
||||
assert.equal(result.value, i);
|
||||
assert.equal(result.rest, "");
|
||||
}
|
||||
};
|
||||
|
||||
});
|
35
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/test-base64.js
generated
vendored
Normal file
35
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/test-base64.js
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var base64 = require('../../lib/source-map/base64');
|
||||
|
||||
exports['test out of range encoding'] = function (assert, util) {
|
||||
assert.throws(function () {
|
||||
base64.encode(-1);
|
||||
});
|
||||
assert.throws(function () {
|
||||
base64.encode(64);
|
||||
});
|
||||
};
|
||||
|
||||
exports['test out of range decoding'] = function (assert, util) {
|
||||
assert.throws(function () {
|
||||
base64.decode('=');
|
||||
});
|
||||
};
|
||||
|
||||
exports['test normal encoding and decoding'] = function (assert, util) {
|
||||
for (var i = 0; i < 64; i++) {
|
||||
assert.equal(base64.decode(base64.encode(i)), i);
|
||||
}
|
||||
};
|
||||
|
||||
});
|
94
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/test-binary-search.js
generated
vendored
Normal file
94
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/test-binary-search.js
generated
vendored
Normal file
@ -0,0 +1,94 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var binarySearch = require('../../lib/source-map/binary-search');
|
||||
|
||||
function numberCompare(a, b) {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
exports['test too high with lub bias'] = function (assert, util) {
|
||||
var needle = 30;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
binarySearch.search(needle, haystack, numberCompare);
|
||||
});
|
||||
|
||||
assert.equal(binarySearch.search(needle, haystack, numberCompare), -1);
|
||||
};
|
||||
|
||||
exports['test too low with lub bias'] = function (assert, util) {
|
||||
var needle = 1;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
binarySearch.search(needle, haystack, numberCompare, true);
|
||||
});
|
||||
|
||||
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare)], 2);
|
||||
};
|
||||
|
||||
exports['test exact search with lub bias'] = function (assert, util) {
|
||||
var needle = 4;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare)], 4);
|
||||
};
|
||||
|
||||
exports['test fuzzy search with lub bias'] = function (assert, util) {
|
||||
var needle = 19;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare)], 20);
|
||||
};
|
||||
|
||||
exports['test too high with glb bias'] = function (assert, util) {
|
||||
var needle = 30;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
binarySearch.search(needle, haystack, numberCompare);
|
||||
});
|
||||
|
||||
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare,
|
||||
binarySearch.GREATEST_LOWER_BOUND)], 20);
|
||||
};
|
||||
|
||||
exports['test too low with glb bias'] = function (assert, util) {
|
||||
var needle = 1;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
binarySearch.search(needle, haystack, numberCompare,
|
||||
binarySearch.GREATEST_LOWER_BOUND);
|
||||
});
|
||||
|
||||
assert.equal(binarySearch.search(needle, haystack, numberCompare,
|
||||
binarySearch.GREATEST_LOWER_BOUND), -1);
|
||||
};
|
||||
|
||||
exports['test exact search with glb bias'] = function (assert, util) {
|
||||
var needle = 4;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare,
|
||||
binarySearch.GREATEST_LOWER_BOUND)], 4);
|
||||
};
|
||||
|
||||
exports['test fuzzy search with glb bias'] = function (assert, util) {
|
||||
var needle = 19;
|
||||
var haystack = [2,4,6,8,10,12,14,16,18,20];
|
||||
|
||||
assert.equal(haystack[binarySearch.search(needle, haystack, numberCompare,
|
||||
binarySearch.GREATEST_LOWER_BOUND)], 18);
|
||||
};
|
||||
});
|
84
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/test-dog-fooding.js
generated
vendored
Normal file
84
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/test-dog-fooding.js
generated
vendored
Normal file
@ -0,0 +1,84 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
|
||||
var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
|
||||
|
||||
exports['test eating our own dog food'] = function (assert, util) {
|
||||
var smg = new SourceMapGenerator({
|
||||
file: 'testing.js',
|
||||
sourceRoot: '/wu/tang'
|
||||
});
|
||||
|
||||
smg.addMapping({
|
||||
source: 'gza.coffee',
|
||||
original: { line: 1, column: 0 },
|
||||
generated: { line: 2, column: 2 }
|
||||
});
|
||||
|
||||
smg.addMapping({
|
||||
source: 'gza.coffee',
|
||||
original: { line: 2, column: 0 },
|
||||
generated: { line: 3, column: 2 }
|
||||
});
|
||||
|
||||
smg.addMapping({
|
||||
source: 'gza.coffee',
|
||||
original: { line: 3, column: 0 },
|
||||
generated: { line: 4, column: 2 }
|
||||
});
|
||||
|
||||
smg.addMapping({
|
||||
source: 'gza.coffee',
|
||||
original: { line: 4, column: 0 },
|
||||
generated: { line: 5, column: 2 }
|
||||
});
|
||||
|
||||
smg.addMapping({
|
||||
source: 'gza.coffee',
|
||||
original: { line: 5, column: 10 },
|
||||
generated: { line: 6, column: 12 }
|
||||
});
|
||||
|
||||
var smc = new SourceMapConsumer(smg.toString());
|
||||
|
||||
// Exact
|
||||
util.assertMapping(2, 2, '/wu/tang/gza.coffee', 1, 0, null, smc, assert);
|
||||
util.assertMapping(3, 2, '/wu/tang/gza.coffee', 2, 0, null, smc, assert);
|
||||
util.assertMapping(4, 2, '/wu/tang/gza.coffee', 3, 0, null, smc, assert);
|
||||
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 4, 0, null, smc, assert);
|
||||
util.assertMapping(6, 12, '/wu/tang/gza.coffee', 5, 10, null, smc, assert);
|
||||
|
||||
// Fuzzy
|
||||
|
||||
// Generated to original
|
||||
util.assertMapping(2, 0, '/wu/tang/gza.coffee', 1, 0, null, smc, assert, true);
|
||||
util.assertMapping(2, 9, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(3, 0, '/wu/tang/gza.coffee', 2, 0, null, smc, assert, true);
|
||||
util.assertMapping(3, 9, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(4, 0, '/wu/tang/gza.coffee', 3, 0, null, smc, assert, true);
|
||||
util.assertMapping(4, 9, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(5, 0, '/wu/tang/gza.coffee', 4, 0, null, smc, assert, true);
|
||||
util.assertMapping(5, 9, null, null, null, null, smc, assert, true);
|
||||
util.assertMapping(6, 0, '/wu/tang/gza.coffee', 5, 10, null, smc, assert, true);
|
||||
util.assertMapping(6, 9, '/wu/tang/gza.coffee', 5, 10, null, smc, assert, true);
|
||||
util.assertMapping(6, 13, null, null, null, null, smc, assert, true);
|
||||
|
||||
// Original to generated
|
||||
util.assertMapping(3, 2, '/wu/tang/gza.coffee', 1, 1, null, smc, assert, null, true);
|
||||
util.assertMapping(4, 2, '/wu/tang/gza.coffee', 2, 3, null, smc, assert, null, true);
|
||||
util.assertMapping(5, 2, '/wu/tang/gza.coffee', 3, 6, null, smc, assert, null, true);
|
||||
util.assertMapping(6, 12, '/wu/tang/gza.coffee', 4, 9, null, smc, assert, null, true);
|
||||
util.assertMapping(6, 12, '/wu/tang/gza.coffee', 5, 9, null, smc, assert, null, true);
|
||||
util.assertMapping(null, null, '/wu/tang/gza.coffee', 6, 19, null, smc, assert, null, true);
|
||||
};
|
||||
|
||||
});
|
@ -0,0 +1,874 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
|
||||
var IndexedSourceMapConsumer = require('../../lib/source-map/indexed-source-map-consumer').IndexedSourceMapConsumer;
|
||||
var BasicSourceMapConsumer = require('../../lib/source-map/basic-source-map-consumer').BasicSourceMapConsumer;
|
||||
var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
|
||||
|
||||
exports['test that we can instantiate with a string or an object'] = function (assert, util) {
|
||||
assert.doesNotThrow(function () {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
});
|
||||
assert.doesNotThrow(function () {
|
||||
var map = new SourceMapConsumer(JSON.stringify(util.testMap));
|
||||
});
|
||||
};
|
||||
|
||||
exports['test that the object returned from new SourceMapConsumer inherits from SourceMapConsumer'] = function (assert, util) {
|
||||
assert.ok(new SourceMapConsumer(util.testMap) instanceof SourceMapConsumer);
|
||||
}
|
||||
|
||||
exports['test that a BasicSourceMapConsumer is returned for sourcemaps without sections'] = function(assert, util) {
|
||||
assert.ok(new SourceMapConsumer(util.testMap) instanceof BasicSourceMapConsumer);
|
||||
};
|
||||
|
||||
exports['test that an IndexedSourceMapConsumer is returned for sourcemaps with sections'] = function(assert, util) {
|
||||
assert.ok(new SourceMapConsumer(util.indexedTestMap) instanceof IndexedSourceMapConsumer);
|
||||
};
|
||||
|
||||
exports['test that the `sources` field has the original sources'] = function (assert, util) {
|
||||
var map;
|
||||
var sources;
|
||||
|
||||
map = new SourceMapConsumer(util.testMap);
|
||||
sources = map.sources;
|
||||
assert.equal(sources[0], '/the/root/one.js');
|
||||
assert.equal(sources[1], '/the/root/two.js');
|
||||
assert.equal(sources.length, 2);
|
||||
|
||||
map = new SourceMapConsumer(util.indexedTestMap);
|
||||
sources = map.sources;
|
||||
assert.equal(sources[0], '/the/root/one.js');
|
||||
assert.equal(sources[1], '/the/root/two.js');
|
||||
assert.equal(sources.length, 2);
|
||||
|
||||
map = new SourceMapConsumer(util.indexedTestMapDifferentSourceRoots);
|
||||
sources = map.sources;
|
||||
assert.equal(sources[0], '/the/root/one.js');
|
||||
assert.equal(sources[1], '/different/root/two.js');
|
||||
assert.equal(sources.length, 2);
|
||||
|
||||
map = new SourceMapConsumer(util.testMapNoSourceRoot);
|
||||
sources = map.sources;
|
||||
assert.equal(sources[0], 'one.js');
|
||||
assert.equal(sources[1], 'two.js');
|
||||
assert.equal(sources.length, 2);
|
||||
|
||||
map = new SourceMapConsumer(util.testMapEmptySourceRoot);
|
||||
sources = map.sources;
|
||||
assert.equal(sources[0], 'one.js');
|
||||
assert.equal(sources[1], 'two.js');
|
||||
assert.equal(sources.length, 2);
|
||||
};
|
||||
|
||||
exports['test that the source root is reflected in a mapping\'s source field'] = function (assert, util) {
|
||||
var map;
|
||||
var mapping;
|
||||
|
||||
map = new SourceMapConsumer(util.testMap);
|
||||
|
||||
mapping = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 1
|
||||
});
|
||||
assert.equal(mapping.source, '/the/root/two.js');
|
||||
|
||||
mapping = map.originalPositionFor({
|
||||
line: 1,
|
||||
column: 1
|
||||
});
|
||||
assert.equal(mapping.source, '/the/root/one.js');
|
||||
|
||||
|
||||
map = new SourceMapConsumer(util.testMapNoSourceRoot);
|
||||
|
||||
mapping = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 1
|
||||
});
|
||||
assert.equal(mapping.source, 'two.js');
|
||||
|
||||
mapping = map.originalPositionFor({
|
||||
line: 1,
|
||||
column: 1
|
||||
});
|
||||
assert.equal(mapping.source, 'one.js');
|
||||
|
||||
|
||||
map = new SourceMapConsumer(util.testMapEmptySourceRoot);
|
||||
|
||||
mapping = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 1
|
||||
});
|
||||
assert.equal(mapping.source, 'two.js');
|
||||
|
||||
mapping = map.originalPositionFor({
|
||||
line: 1,
|
||||
column: 1
|
||||
});
|
||||
assert.equal(mapping.source, 'one.js');
|
||||
};
|
||||
|
||||
exports['test mapping tokens back exactly'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
|
||||
util.assertMapping(1, 1, '/the/root/one.js', 1, 1, null, map, assert);
|
||||
util.assertMapping(1, 5, '/the/root/one.js', 1, 5, null, map, assert);
|
||||
util.assertMapping(1, 9, '/the/root/one.js', 1, 11, null, map, assert);
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 21, 'bar', map, assert);
|
||||
util.assertMapping(1, 21, '/the/root/one.js', 2, 3, null, map, assert);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 10, 'baz', map, assert);
|
||||
util.assertMapping(1, 32, '/the/root/one.js', 2, 14, 'bar', map, assert);
|
||||
|
||||
util.assertMapping(2, 1, '/the/root/two.js', 1, 1, null, map, assert);
|
||||
util.assertMapping(2, 5, '/the/root/two.js', 1, 5, null, map, assert);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 11, null, map, assert);
|
||||
util.assertMapping(2, 18, '/the/root/two.js', 1, 21, 'n', map, assert);
|
||||
util.assertMapping(2, 21, '/the/root/two.js', 2, 3, null, map, assert);
|
||||
util.assertMapping(2, 28, '/the/root/two.js', 2, 10, 'n', map, assert);
|
||||
};
|
||||
|
||||
exports['test mapping tokens back exactly in indexed source map'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||
|
||||
util.assertMapping(1, 1, '/the/root/one.js', 1, 1, null, map, assert);
|
||||
util.assertMapping(1, 5, '/the/root/one.js', 1, 5, null, map, assert);
|
||||
util.assertMapping(1, 9, '/the/root/one.js', 1, 11, null, map, assert);
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 21, 'bar', map, assert);
|
||||
util.assertMapping(1, 21, '/the/root/one.js', 2, 3, null, map, assert);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 10, 'baz', map, assert);
|
||||
util.assertMapping(1, 32, '/the/root/one.js', 2, 14, 'bar', map, assert);
|
||||
|
||||
util.assertMapping(2, 1, '/the/root/two.js', 1, 1, null, map, assert);
|
||||
util.assertMapping(2, 5, '/the/root/two.js', 1, 5, null, map, assert);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 11, null, map, assert);
|
||||
util.assertMapping(2, 18, '/the/root/two.js', 1, 21, 'n', map, assert);
|
||||
util.assertMapping(2, 21, '/the/root/two.js', 2, 3, null, map, assert);
|
||||
util.assertMapping(2, 28, '/the/root/two.js', 2, 10, 'n', map, assert);
|
||||
};
|
||||
|
||||
|
||||
exports['test mapping tokens back exactly'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
|
||||
util.assertMapping(1, 1, '/the/root/one.js', 1, 1, null, map, assert);
|
||||
util.assertMapping(1, 5, '/the/root/one.js', 1, 5, null, map, assert);
|
||||
util.assertMapping(1, 9, '/the/root/one.js', 1, 11, null, map, assert);
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 21, 'bar', map, assert);
|
||||
util.assertMapping(1, 21, '/the/root/one.js', 2, 3, null, map, assert);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 10, 'baz', map, assert);
|
||||
util.assertMapping(1, 32, '/the/root/one.js', 2, 14, 'bar', map, assert);
|
||||
|
||||
util.assertMapping(2, 1, '/the/root/two.js', 1, 1, null, map, assert);
|
||||
util.assertMapping(2, 5, '/the/root/two.js', 1, 5, null, map, assert);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 11, null, map, assert);
|
||||
util.assertMapping(2, 18, '/the/root/two.js', 1, 21, 'n', map, assert);
|
||||
util.assertMapping(2, 21, '/the/root/two.js', 2, 3, null, map, assert);
|
||||
util.assertMapping(2, 28, '/the/root/two.js', 2, 10, 'n', map, assert);
|
||||
};
|
||||
|
||||
exports['test mapping tokens fuzzy'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
|
||||
// Finding original positions
|
||||
util.assertMapping(1, 16, '/the/root/one.js', 1, 21, 'bar', map, assert, true);
|
||||
util.assertMapping(1, 26, '/the/root/one.js', 2, 10, 'baz', map, assert, true);
|
||||
util.assertMapping(2, 6, '/the/root/two.js', 1, 11, null, map, assert, true);
|
||||
|
||||
// Finding generated positions
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 20, 'bar', map, assert, null, true);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 7, 'baz', map, assert, null, true);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 6, null, map, assert, null, true);
|
||||
};
|
||||
|
||||
exports['test mapping tokens fuzzy in indexed source map'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||
|
||||
// Finding original positions
|
||||
util.assertMapping(1, 16, '/the/root/one.js', 1, 21, 'bar', map, assert, true);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 10, 'baz', map, assert, true);
|
||||
util.assertMapping(2, 6, '/the/root/two.js', 1, 11, null, map, assert, true);
|
||||
|
||||
// Finding generated positions
|
||||
util.assertMapping(1, 18, '/the/root/one.js', 1, 20, 'bar', map, assert, null, true);
|
||||
util.assertMapping(1, 28, '/the/root/one.js', 2, 7, 'baz', map, assert, null, true);
|
||||
util.assertMapping(2, 9, '/the/root/two.js', 1, 6, null, map, assert, null, true);
|
||||
};
|
||||
|
||||
exports['test mappings and end of lines'] = function (assert, util) {
|
||||
var smg = new SourceMapGenerator({
|
||||
file: 'foo.js'
|
||||
});
|
||||
smg.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 1, column: 1 },
|
||||
source: 'bar.js'
|
||||
});
|
||||
smg.addMapping({
|
||||
original: { line: 2, column: 2 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'bar.js'
|
||||
});
|
||||
|
||||
var map = SourceMapConsumer.fromSourceMap(smg);
|
||||
|
||||
// When finding original positions, mappings end at the end of the line.
|
||||
util.assertMapping(2, 3, null, null, null, null, map, assert, true)
|
||||
|
||||
// When finding generated positions, mappings do not end at the end of the line.
|
||||
util.assertMapping(2, 2, 'bar.js', 1, 2, null, map, assert, null, true);
|
||||
};
|
||||
|
||||
exports['test creating source map consumers with )]}\' prefix'] = function (assert, util) {
|
||||
assert.doesNotThrow(function () {
|
||||
var map = new SourceMapConsumer(")]}'" + JSON.stringify(util.testMap));
|
||||
});
|
||||
};
|
||||
|
||||
exports['test eachMapping'] = function (assert, util) {
|
||||
var map;
|
||||
|
||||
map = new SourceMapConsumer(util.testMap);
|
||||
var previousLine = -Infinity;
|
||||
var previousColumn = -Infinity;
|
||||
map.eachMapping(function (mapping) {
|
||||
assert.ok(mapping.generatedLine >= previousLine);
|
||||
|
||||
assert.ok(mapping.source === '/the/root/one.js' || mapping.source === '/the/root/two.js');
|
||||
|
||||
if (mapping.generatedLine === previousLine) {
|
||||
assert.ok(mapping.generatedColumn >= previousColumn);
|
||||
previousColumn = mapping.generatedColumn;
|
||||
}
|
||||
else {
|
||||
previousLine = mapping.generatedLine;
|
||||
previousColumn = -Infinity;
|
||||
}
|
||||
});
|
||||
|
||||
map = new SourceMapConsumer(util.testMapNoSourceRoot);
|
||||
map.eachMapping(function (mapping) {
|
||||
assert.ok(mapping.source === 'one.js' || mapping.source === 'two.js');
|
||||
});
|
||||
|
||||
map = new SourceMapConsumer(util.testMapEmptySourceRoot);
|
||||
map.eachMapping(function (mapping) {
|
||||
assert.ok(mapping.source === 'one.js' || mapping.source === 'two.js');
|
||||
});
|
||||
};
|
||||
|
||||
exports['test eachMapping for indexed source maps'] = function(assert, util) {
|
||||
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||
var previousLine = -Infinity;
|
||||
var previousColumn = -Infinity;
|
||||
map.eachMapping(function (mapping) {
|
||||
assert.ok(mapping.generatedLine >= previousLine);
|
||||
|
||||
if (mapping.source) {
|
||||
assert.equal(mapping.source.indexOf(util.testMap.sourceRoot), 0);
|
||||
}
|
||||
|
||||
if (mapping.generatedLine === previousLine) {
|
||||
assert.ok(mapping.generatedColumn >= previousColumn);
|
||||
previousColumn = mapping.generatedColumn;
|
||||
}
|
||||
else {
|
||||
previousLine = mapping.generatedLine;
|
||||
previousColumn = -Infinity;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
exports['test iterating over mappings in a different order'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
var previousLine = -Infinity;
|
||||
var previousColumn = -Infinity;
|
||||
var previousSource = "";
|
||||
map.eachMapping(function (mapping) {
|
||||
assert.ok(mapping.source >= previousSource);
|
||||
|
||||
if (mapping.source === previousSource) {
|
||||
assert.ok(mapping.originalLine >= previousLine);
|
||||
|
||||
if (mapping.originalLine === previousLine) {
|
||||
assert.ok(mapping.originalColumn >= previousColumn);
|
||||
previousColumn = mapping.originalColumn;
|
||||
}
|
||||
else {
|
||||
previousLine = mapping.originalLine;
|
||||
previousColumn = -Infinity;
|
||||
}
|
||||
}
|
||||
else {
|
||||
previousSource = mapping.source;
|
||||
previousLine = -Infinity;
|
||||
previousColumn = -Infinity;
|
||||
}
|
||||
}, null, SourceMapConsumer.ORIGINAL_ORDER);
|
||||
};
|
||||
|
||||
exports['test iterating over mappings in a different order in indexed source maps'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||
var previousLine = -Infinity;
|
||||
var previousColumn = -Infinity;
|
||||
var previousSource = "";
|
||||
map.eachMapping(function (mapping) {
|
||||
assert.ok(mapping.source >= previousSource);
|
||||
|
||||
if (mapping.source === previousSource) {
|
||||
assert.ok(mapping.originalLine >= previousLine);
|
||||
|
||||
if (mapping.originalLine === previousLine) {
|
||||
assert.ok(mapping.originalColumn >= previousColumn);
|
||||
previousColumn = mapping.originalColumn;
|
||||
}
|
||||
else {
|
||||
previousLine = mapping.originalLine;
|
||||
previousColumn = -Infinity;
|
||||
}
|
||||
}
|
||||
else {
|
||||
previousSource = mapping.source;
|
||||
previousLine = -Infinity;
|
||||
previousColumn = -Infinity;
|
||||
}
|
||||
}, null, SourceMapConsumer.ORIGINAL_ORDER);
|
||||
};
|
||||
|
||||
exports['test that we can set the context for `this` in eachMapping'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMap);
|
||||
var context = {};
|
||||
map.eachMapping(function () {
|
||||
assert.equal(this, context);
|
||||
}, context);
|
||||
};
|
||||
|
||||
exports['test that we can set the context for `this` in eachMapping in indexed source maps'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||
var context = {};
|
||||
map.eachMapping(function () {
|
||||
assert.equal(this, context);
|
||||
}, context);
|
||||
};
|
||||
|
||||
exports['test that the `sourcesContent` field has the original sources'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMapWithSourcesContent);
|
||||
var sourcesContent = map.sourcesContent;
|
||||
|
||||
assert.equal(sourcesContent[0], ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(sourcesContent[1], ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.equal(sourcesContent.length, 2);
|
||||
};
|
||||
|
||||
exports['test that we can get the original sources for the sources'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMapWithSourcesContent);
|
||||
var sources = map.sources;
|
||||
|
||||
assert.equal(map.sourceContentFor(sources[0]), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(map.sourceContentFor(sources[1]), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.equal(map.sourceContentFor("one.js"), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(map.sourceContentFor("two.js"), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("");
|
||||
}, Error);
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("/the/root/three.js");
|
||||
}, Error);
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("three.js");
|
||||
}, Error);
|
||||
};
|
||||
|
||||
exports['test that we can get the original source content with relative source paths'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.testMapRelativeSources);
|
||||
var sources = map.sources;
|
||||
|
||||
assert.equal(map.sourceContentFor(sources[0]), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(map.sourceContentFor(sources[1]), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.equal(map.sourceContentFor("one.js"), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(map.sourceContentFor("two.js"), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("");
|
||||
}, Error);
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("/the/root/three.js");
|
||||
}, Error);
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("three.js");
|
||||
}, Error);
|
||||
};
|
||||
|
||||
exports['test that we can get the original source content for the sources on an indexed source map'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer(util.indexedTestMap);
|
||||
var sources = map.sources;
|
||||
|
||||
assert.equal(map.sourceContentFor(sources[0]), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(map.sourceContentFor(sources[1]), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.equal(map.sourceContentFor("one.js"), ' ONE.foo = function (bar) {\n return baz(bar);\n };');
|
||||
assert.equal(map.sourceContentFor("two.js"), ' TWO.inc = function (n) {\n return n + 1;\n };');
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("");
|
||||
}, Error);
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("/the/root/three.js");
|
||||
}, Error);
|
||||
assert.throws(function () {
|
||||
map.sourceContentFor("three.js");
|
||||
}, Error);
|
||||
};
|
||||
|
||||
|
||||
exports['test sourceRoot + generatedPositionFor'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
sourceRoot: 'foo/bar',
|
||||
file: 'baz.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'bang.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 5, column: 5 },
|
||||
generated: { line: 6, column: 6 },
|
||||
source: 'bang.coffee'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
// Should handle without sourceRoot.
|
||||
var pos = map.generatedPositionFor({
|
||||
line: 1,
|
||||
column: 1,
|
||||
source: 'bang.coffee'
|
||||
});
|
||||
|
||||
assert.equal(pos.line, 2);
|
||||
assert.equal(pos.column, 2);
|
||||
|
||||
// Should handle with sourceRoot.
|
||||
var pos = map.generatedPositionFor({
|
||||
line: 1,
|
||||
column: 1,
|
||||
source: 'foo/bar/bang.coffee'
|
||||
});
|
||||
|
||||
assert.equal(pos.line, 2);
|
||||
assert.equal(pos.column, 2);
|
||||
};
|
||||
|
||||
exports['test allGeneratedPositionsFor'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 2, column: 1 },
|
||||
generated: { line: 3, column: 2 },
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 2, column: 2 },
|
||||
generated: { line: 3, column: 3 },
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 3, column: 1 },
|
||||
generated: { line: 4, column: 2 },
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var mappings = map.allGeneratedPositionsFor({
|
||||
line: 2,
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
|
||||
assert.equal(mappings.length, 2);
|
||||
assert.equal(mappings[0].line, 3);
|
||||
assert.equal(mappings[0].column, 2);
|
||||
assert.equal(mappings[1].line, 3);
|
||||
assert.equal(mappings[1].column, 3);
|
||||
};
|
||||
|
||||
exports['test allGeneratedPositionsFor for line with no mappings'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 3, column: 1 },
|
||||
generated: { line: 4, column: 2 },
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var mappings = map.allGeneratedPositionsFor({
|
||||
line: 2,
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
|
||||
assert.equal(mappings.length, 0);
|
||||
};
|
||||
|
||||
exports['test allGeneratedPositionsFor source map with no mappings'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated.js'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var mappings = map.allGeneratedPositionsFor({
|
||||
line: 2,
|
||||
source: 'bar.coffee'
|
||||
});
|
||||
|
||||
assert.equal(mappings.length, 0);
|
||||
};
|
||||
|
||||
exports['test computeColumnSpans'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 1, column: 1 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 2, column: 1 },
|
||||
generated: { line: 2, column: 1 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 2, column: 2 },
|
||||
generated: { line: 2, column: 10 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 2, column: 3 },
|
||||
generated: { line: 2, column: 20 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 3, column: 1 },
|
||||
generated: { line: 3, column: 1 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 3, column: 2 },
|
||||
generated: { line: 3, column: 2 },
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
map.computeColumnSpans();
|
||||
|
||||
var mappings = map.allGeneratedPositionsFor({
|
||||
line: 1,
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
|
||||
assert.equal(mappings.length, 1);
|
||||
// assert.equal(mappings[0].lastColumn, Infinity);
|
||||
|
||||
var mappings = map.allGeneratedPositionsFor({
|
||||
line: 2,
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
|
||||
assert.equal(mappings.length, 3);
|
||||
assert.equal(mappings[0].lastColumn, 9);
|
||||
assert.equal(mappings[1].lastColumn, 19);
|
||||
assert.equal(mappings[2].lastColumn, Infinity);
|
||||
|
||||
var mappings = map.allGeneratedPositionsFor({
|
||||
line: 3,
|
||||
source: 'foo.coffee'
|
||||
});
|
||||
|
||||
assert.equal(mappings.length, 2);
|
||||
assert.equal(mappings[0].lastColumn, 1);
|
||||
assert.equal(mappings[1].lastColumn, Infinity);
|
||||
};
|
||||
|
||||
exports['test sourceRoot + originalPositionFor'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
sourceRoot: 'foo/bar',
|
||||
file: 'baz.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'bang.coffee'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 2,
|
||||
});
|
||||
|
||||
// Should always have the prepended source root
|
||||
assert.equal(pos.source, 'foo/bar/bang.coffee');
|
||||
assert.equal(pos.line, 1);
|
||||
assert.equal(pos.column, 1);
|
||||
};
|
||||
|
||||
exports['test github issue #56'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
sourceRoot: 'http://',
|
||||
file: 'www.example.com/foo.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'www.example.com/original.js'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var sources = map.sources;
|
||||
assert.equal(sources.length, 1);
|
||||
assert.equal(sources[0], 'http://www.example.com/original.js');
|
||||
};
|
||||
|
||||
exports['test github issue #43'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
sourceRoot: 'http://example.com',
|
||||
file: 'foo.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'http://cdn.example.com/original.js'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var sources = map.sources;
|
||||
assert.equal(sources.length, 1,
|
||||
'Should only be one source.');
|
||||
assert.equal(sources[0], 'http://cdn.example.com/original.js',
|
||||
'Should not be joined with the sourceRoot.');
|
||||
};
|
||||
|
||||
exports['test absolute path, but same host sources'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
sourceRoot: 'http://example.com/foo/bar',
|
||||
file: 'foo.js'
|
||||
});
|
||||
map.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: '/original.js'
|
||||
});
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var sources = map.sources;
|
||||
assert.equal(sources.length, 1,
|
||||
'Should only be one source.');
|
||||
assert.equal(sources[0], 'http://example.com/original.js',
|
||||
'Source should be relative the host of the source root.');
|
||||
};
|
||||
|
||||
exports['test indexed source map errors when sections are out of order by line'] = function(assert, util) {
|
||||
// Make a deep copy of the indexedTestMap
|
||||
var misorderedIndexedTestMap = JSON.parse(JSON.stringify(util.indexedTestMap));
|
||||
|
||||
misorderedIndexedTestMap.sections[0].offset = {
|
||||
line: 2,
|
||||
column: 0
|
||||
};
|
||||
|
||||
assert.throws(function() {
|
||||
new SourceMapConsumer(misorderedIndexedTestMap);
|
||||
}, Error);
|
||||
};
|
||||
|
||||
exports['test github issue #64'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer({
|
||||
"version": 3,
|
||||
"file": "foo.js",
|
||||
"sourceRoot": "http://example.com/",
|
||||
"sources": ["/a"],
|
||||
"names": [],
|
||||
"mappings": "AACA",
|
||||
"sourcesContent": ["foo"]
|
||||
});
|
||||
|
||||
assert.equal(map.sourceContentFor("a"), "foo");
|
||||
assert.equal(map.sourceContentFor("/a"), "foo");
|
||||
};
|
||||
|
||||
exports['test bug 885597'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer({
|
||||
"version": 3,
|
||||
"file": "foo.js",
|
||||
"sourceRoot": "file:///Users/AlGore/Invented/The/Internet/",
|
||||
"sources": ["/a"],
|
||||
"names": [],
|
||||
"mappings": "AACA",
|
||||
"sourcesContent": ["foo"]
|
||||
});
|
||||
|
||||
var s = map.sources[0];
|
||||
assert.equal(map.sourceContentFor(s), "foo");
|
||||
};
|
||||
|
||||
exports['test github issue #72, duplicate sources'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer({
|
||||
"version": 3,
|
||||
"file": "foo.js",
|
||||
"sources": ["source1.js", "source1.js", "source3.js"],
|
||||
"names": [],
|
||||
"mappings": ";EAAC;;IAEE;;MEEE",
|
||||
"sourceRoot": "http://example.com"
|
||||
});
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(pos.source, 'http://example.com/source1.js');
|
||||
assert.equal(pos.line, 1);
|
||||
assert.equal(pos.column, 1);
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 4,
|
||||
column: 4
|
||||
});
|
||||
assert.equal(pos.source, 'http://example.com/source1.js');
|
||||
assert.equal(pos.line, 3);
|
||||
assert.equal(pos.column, 3);
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 6,
|
||||
column: 6
|
||||
});
|
||||
assert.equal(pos.source, 'http://example.com/source3.js');
|
||||
assert.equal(pos.line, 5);
|
||||
assert.equal(pos.column, 5);
|
||||
};
|
||||
|
||||
exports['test github issue #72, duplicate names'] = function (assert, util) {
|
||||
var map = new SourceMapConsumer({
|
||||
"version": 3,
|
||||
"file": "foo.js",
|
||||
"sources": ["source.js"],
|
||||
"names": ["name1", "name1", "name3"],
|
||||
"mappings": ";EAACA;;IAEEA;;MAEEE",
|
||||
"sourceRoot": "http://example.com"
|
||||
});
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(pos.name, 'name1');
|
||||
assert.equal(pos.line, 1);
|
||||
assert.equal(pos.column, 1);
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 4,
|
||||
column: 4
|
||||
});
|
||||
assert.equal(pos.name, 'name1');
|
||||
assert.equal(pos.line, 3);
|
||||
assert.equal(pos.column, 3);
|
||||
|
||||
var pos = map.originalPositionFor({
|
||||
line: 6,
|
||||
column: 6
|
||||
});
|
||||
assert.equal(pos.name, 'name3');
|
||||
assert.equal(pos.line, 5);
|
||||
assert.equal(pos.column, 5);
|
||||
};
|
||||
|
||||
exports['test SourceMapConsumer.fromSourceMap'] = function (assert, util) {
|
||||
var smg = new SourceMapGenerator({
|
||||
sourceRoot: 'http://example.com/',
|
||||
file: 'foo.js'
|
||||
});
|
||||
smg.addMapping({
|
||||
original: { line: 1, column: 1 },
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'bar.js'
|
||||
});
|
||||
smg.addMapping({
|
||||
original: { line: 2, column: 2 },
|
||||
generated: { line: 4, column: 4 },
|
||||
source: 'baz.js',
|
||||
name: 'dirtMcGirt'
|
||||
});
|
||||
smg.setSourceContent('baz.js', 'baz.js content');
|
||||
|
||||
var smc = SourceMapConsumer.fromSourceMap(smg);
|
||||
assert.equal(smc.file, 'foo.js');
|
||||
assert.equal(smc.sourceRoot, 'http://example.com/');
|
||||
assert.equal(smc.sources.length, 2);
|
||||
assert.equal(smc.sources[0], 'http://example.com/bar.js');
|
||||
assert.equal(smc.sources[1], 'http://example.com/baz.js');
|
||||
assert.equal(smc.sourceContentFor('baz.js'), 'baz.js content');
|
||||
|
||||
var pos = smc.originalPositionFor({
|
||||
line: 2,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(pos.line, 1);
|
||||
assert.equal(pos.column, 1);
|
||||
assert.equal(pos.source, 'http://example.com/bar.js');
|
||||
assert.equal(pos.name, null);
|
||||
|
||||
pos = smc.generatedPositionFor({
|
||||
line: 1,
|
||||
column: 1,
|
||||
source: 'http://example.com/bar.js'
|
||||
});
|
||||
assert.equal(pos.line, 2);
|
||||
assert.equal(pos.column, 2);
|
||||
|
||||
pos = smc.originalPositionFor({
|
||||
line: 4,
|
||||
column: 4
|
||||
});
|
||||
assert.equal(pos.line, 2);
|
||||
assert.equal(pos.column, 2);
|
||||
assert.equal(pos.source, 'http://example.com/baz.js');
|
||||
assert.equal(pos.name, 'dirtMcGirt');
|
||||
|
||||
pos = smc.generatedPositionFor({
|
||||
line: 2,
|
||||
column: 2,
|
||||
source: 'http://example.com/baz.js'
|
||||
});
|
||||
assert.equal(pos.line, 4);
|
||||
assert.equal(pos.column, 4);
|
||||
};
|
||||
});
|
@ -0,0 +1,679 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
|
||||
var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
|
||||
var SourceNode = require('../../lib/source-map/source-node').SourceNode;
|
||||
var util = require('./util');
|
||||
|
||||
exports['test some simple stuff'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
assert.ok(true);
|
||||
|
||||
var map = new SourceMapGenerator().toJSON();
|
||||
assert.ok(!('file' in map));
|
||||
assert.ok(!('sourceRoot' in map));
|
||||
};
|
||||
|
||||
exports['test JSON serialization'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
assert.equal(map.toString(), JSON.stringify(map));
|
||||
};
|
||||
|
||||
exports['test adding mappings (case 1)'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 }
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test adding mappings (case 2)'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
source: 'bar.js',
|
||||
original: { line: 1, column: 1 }
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test adding mappings (case 3)'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
source: 'bar.js',
|
||||
original: { line: 1, column: 1 },
|
||||
name: 'someToken'
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test adding mappings (invalid)'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
|
||||
// Not enough info.
|
||||
assert.throws(function () {
|
||||
map.addMapping({});
|
||||
});
|
||||
|
||||
// Original file position, but no source.
|
||||
assert.throws(function () {
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 1, column: 1 }
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test adding mappings with skipValidation'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.',
|
||||
skipValidation: true
|
||||
});
|
||||
|
||||
// Not enough info, caught by `util.getArgs`
|
||||
assert.throws(function () {
|
||||
map.addMapping({});
|
||||
});
|
||||
|
||||
// Original file position, but no source. Not checked.
|
||||
assert.doesNotThrow(function () {
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 1, column: 1 }
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test that the correct mappings are being generated'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'min.js',
|
||||
sourceRoot: '/the/root'
|
||||
});
|
||||
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 1, column: 1 },
|
||||
source: 'one.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 5 },
|
||||
original: { line: 1, column: 5 },
|
||||
source: 'one.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 9 },
|
||||
original: { line: 1, column: 11 },
|
||||
source: 'one.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 18 },
|
||||
original: { line: 1, column: 21 },
|
||||
source: 'one.js',
|
||||
name: 'bar'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 21 },
|
||||
original: { line: 2, column: 3 },
|
||||
source: 'one.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 28 },
|
||||
original: { line: 2, column: 10 },
|
||||
source: 'one.js',
|
||||
name: 'baz'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 32 },
|
||||
original: { line: 2, column: 14 },
|
||||
source: 'one.js',
|
||||
name: 'bar'
|
||||
});
|
||||
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 1 },
|
||||
original: { line: 1, column: 1 },
|
||||
source: 'two.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 5 },
|
||||
original: { line: 1, column: 5 },
|
||||
source: 'two.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 9 },
|
||||
original: { line: 1, column: 11 },
|
||||
source: 'two.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 18 },
|
||||
original: { line: 1, column: 21 },
|
||||
source: 'two.js',
|
||||
name: 'n'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 21 },
|
||||
original: { line: 2, column: 3 },
|
||||
source: 'two.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 28 },
|
||||
original: { line: 2, column: 10 },
|
||||
source: 'two.js',
|
||||
name: 'n'
|
||||
});
|
||||
|
||||
map = JSON.parse(map.toString());
|
||||
|
||||
util.assertEqualMaps(assert, map, util.testMap);
|
||||
};
|
||||
|
||||
exports['test that adding a mapping with an empty string name does not break generation'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'generated-foo.js',
|
||||
sourceRoot: '.'
|
||||
});
|
||||
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
source: 'bar.js',
|
||||
original: { line: 1, column: 1 },
|
||||
name: ''
|
||||
});
|
||||
|
||||
assert.doesNotThrow(function () {
|
||||
JSON.parse(map.toString());
|
||||
});
|
||||
};
|
||||
|
||||
exports['test that source content can be set'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'min.js',
|
||||
sourceRoot: '/the/root'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 1, column: 1 },
|
||||
source: 'one.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 1 },
|
||||
original: { line: 1, column: 1 },
|
||||
source: 'two.js'
|
||||
});
|
||||
map.setSourceContent('one.js', 'one file content');
|
||||
|
||||
map = JSON.parse(map.toString());
|
||||
assert.equal(map.sources[0], 'one.js');
|
||||
assert.equal(map.sources[1], 'two.js');
|
||||
assert.equal(map.sourcesContent[0], 'one file content');
|
||||
assert.equal(map.sourcesContent[1], null);
|
||||
};
|
||||
|
||||
exports['test .fromSourceMap'] = function (assert, util) {
|
||||
var map = SourceMapGenerator.fromSourceMap(new SourceMapConsumer(util.testMap));
|
||||
util.assertEqualMaps(assert, map.toJSON(), util.testMap);
|
||||
};
|
||||
|
||||
exports['test .fromSourceMap with sourcesContent'] = function (assert, util) {
|
||||
var map = SourceMapGenerator.fromSourceMap(
|
||||
new SourceMapConsumer(util.testMapWithSourcesContent));
|
||||
util.assertEqualMaps(assert, map.toJSON(), util.testMapWithSourcesContent);
|
||||
};
|
||||
|
||||
exports['test applySourceMap'] = function (assert, util) {
|
||||
var node = new SourceNode(null, null, null, [
|
||||
new SourceNode(2, 0, 'fileX', 'lineX2\n'),
|
||||
'genA1\n',
|
||||
new SourceNode(2, 0, 'fileY', 'lineY2\n'),
|
||||
'genA2\n',
|
||||
new SourceNode(1, 0, 'fileX', 'lineX1\n'),
|
||||
'genA3\n',
|
||||
new SourceNode(1, 0, 'fileY', 'lineY1\n')
|
||||
]);
|
||||
var mapStep1 = node.toStringWithSourceMap({
|
||||
file: 'fileA'
|
||||
}).map;
|
||||
mapStep1.setSourceContent('fileX', 'lineX1\nlineX2\n');
|
||||
mapStep1 = mapStep1.toJSON();
|
||||
|
||||
node = new SourceNode(null, null, null, [
|
||||
'gen1\n',
|
||||
new SourceNode(1, 0, 'fileA', 'lineA1\n'),
|
||||
new SourceNode(2, 0, 'fileA', 'lineA2\n'),
|
||||
new SourceNode(3, 0, 'fileA', 'lineA3\n'),
|
||||
new SourceNode(4, 0, 'fileA', 'lineA4\n'),
|
||||
new SourceNode(1, 0, 'fileB', 'lineB1\n'),
|
||||
new SourceNode(2, 0, 'fileB', 'lineB2\n'),
|
||||
'gen2\n'
|
||||
]);
|
||||
var mapStep2 = node.toStringWithSourceMap({
|
||||
file: 'fileGen'
|
||||
}).map;
|
||||
mapStep2.setSourceContent('fileB', 'lineB1\nlineB2\n');
|
||||
mapStep2 = mapStep2.toJSON();
|
||||
|
||||
node = new SourceNode(null, null, null, [
|
||||
'gen1\n',
|
||||
new SourceNode(2, 0, 'fileX', 'lineA1\n'),
|
||||
new SourceNode(2, 0, 'fileA', 'lineA2\n'),
|
||||
new SourceNode(2, 0, 'fileY', 'lineA3\n'),
|
||||
new SourceNode(4, 0, 'fileA', 'lineA4\n'),
|
||||
new SourceNode(1, 0, 'fileB', 'lineB1\n'),
|
||||
new SourceNode(2, 0, 'fileB', 'lineB2\n'),
|
||||
'gen2\n'
|
||||
]);
|
||||
var expectedMap = node.toStringWithSourceMap({
|
||||
file: 'fileGen'
|
||||
}).map;
|
||||
expectedMap.setSourceContent('fileX', 'lineX1\nlineX2\n');
|
||||
expectedMap.setSourceContent('fileB', 'lineB1\nlineB2\n');
|
||||
expectedMap = expectedMap.toJSON();
|
||||
|
||||
// apply source map "mapStep1" to "mapStep2"
|
||||
var generator = SourceMapGenerator.fromSourceMap(new SourceMapConsumer(mapStep2));
|
||||
generator.applySourceMap(new SourceMapConsumer(mapStep1));
|
||||
var actualMap = generator.toJSON();
|
||||
|
||||
util.assertEqualMaps(assert, actualMap, expectedMap);
|
||||
};
|
||||
|
||||
exports['test applySourceMap throws when file is missing'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'test.js'
|
||||
});
|
||||
var map2 = new SourceMapGenerator();
|
||||
assert.throws(function() {
|
||||
map.applySourceMap(new SourceMapConsumer(map2.toJSON()));
|
||||
});
|
||||
};
|
||||
|
||||
exports['test the two additional parameters of applySourceMap'] = function (assert, util) {
|
||||
// Assume the following directory structure:
|
||||
//
|
||||
// http://foo.org/
|
||||
// bar.coffee
|
||||
// app/
|
||||
// coffee/
|
||||
// foo.coffee
|
||||
// temp/
|
||||
// bundle.js
|
||||
// temp_maps/
|
||||
// bundle.js.map
|
||||
// public/
|
||||
// bundle.min.js
|
||||
// bundle.min.js.map
|
||||
//
|
||||
// http://www.example.com/
|
||||
// baz.coffee
|
||||
|
||||
var bundleMap = new SourceMapGenerator({
|
||||
file: 'bundle.js'
|
||||
});
|
||||
bundleMap.addMapping({
|
||||
generated: { line: 3, column: 3 },
|
||||
original: { line: 2, column: 2 },
|
||||
source: '../../coffee/foo.coffee'
|
||||
});
|
||||
bundleMap.setSourceContent('../../coffee/foo.coffee', 'foo coffee');
|
||||
bundleMap.addMapping({
|
||||
generated: { line: 13, column: 13 },
|
||||
original: { line: 12, column: 12 },
|
||||
source: '/bar.coffee'
|
||||
});
|
||||
bundleMap.setSourceContent('/bar.coffee', 'bar coffee');
|
||||
bundleMap.addMapping({
|
||||
generated: { line: 23, column: 23 },
|
||||
original: { line: 22, column: 22 },
|
||||
source: 'http://www.example.com/baz.coffee'
|
||||
});
|
||||
bundleMap.setSourceContent(
|
||||
'http://www.example.com/baz.coffee',
|
||||
'baz coffee'
|
||||
);
|
||||
bundleMap = new SourceMapConsumer(bundleMap.toJSON());
|
||||
|
||||
var minifiedMap = new SourceMapGenerator({
|
||||
file: 'bundle.min.js',
|
||||
sourceRoot: '..'
|
||||
});
|
||||
minifiedMap.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 3, column: 3 },
|
||||
source: 'temp/bundle.js'
|
||||
});
|
||||
minifiedMap.addMapping({
|
||||
generated: { line: 11, column: 11 },
|
||||
original: { line: 13, column: 13 },
|
||||
source: 'temp/bundle.js'
|
||||
});
|
||||
minifiedMap.addMapping({
|
||||
generated: { line: 21, column: 21 },
|
||||
original: { line: 23, column: 23 },
|
||||
source: 'temp/bundle.js'
|
||||
});
|
||||
minifiedMap = new SourceMapConsumer(minifiedMap.toJSON());
|
||||
|
||||
var expectedMap = function (sources) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'bundle.min.js',
|
||||
sourceRoot: '..'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 2, column: 2 },
|
||||
source: sources[0]
|
||||
});
|
||||
map.setSourceContent(sources[0], 'foo coffee');
|
||||
map.addMapping({
|
||||
generated: { line: 11, column: 11 },
|
||||
original: { line: 12, column: 12 },
|
||||
source: sources[1]
|
||||
});
|
||||
map.setSourceContent(sources[1], 'bar coffee');
|
||||
map.addMapping({
|
||||
generated: { line: 21, column: 21 },
|
||||
original: { line: 22, column: 22 },
|
||||
source: sources[2]
|
||||
});
|
||||
map.setSourceContent(sources[2], 'baz coffee');
|
||||
return map.toJSON();
|
||||
}
|
||||
|
||||
var actualMap = function (aSourceMapPath) {
|
||||
var map = SourceMapGenerator.fromSourceMap(minifiedMap);
|
||||
// Note that relying on `bundleMap.file` (which is simply 'bundle.js')
|
||||
// instead of supplying the second parameter wouldn't work here.
|
||||
map.applySourceMap(bundleMap, '../temp/bundle.js', aSourceMapPath);
|
||||
return map.toJSON();
|
||||
}
|
||||
|
||||
util.assertEqualMaps(assert, actualMap('../temp/temp_maps'), expectedMap([
|
||||
'coffee/foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
|
||||
util.assertEqualMaps(assert, actualMap('/app/temp/temp_maps'), expectedMap([
|
||||
'/app/coffee/foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
|
||||
util.assertEqualMaps(assert, actualMap('http://foo.org/app/temp/temp_maps'), expectedMap([
|
||||
'http://foo.org/app/coffee/foo.coffee',
|
||||
'http://foo.org/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
|
||||
// If the third parameter is omitted or set to the current working
|
||||
// directory we get incorrect source paths:
|
||||
|
||||
util.assertEqualMaps(assert, actualMap(), expectedMap([
|
||||
'../coffee/foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
|
||||
util.assertEqualMaps(assert, actualMap(''), expectedMap([
|
||||
'../coffee/foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
|
||||
util.assertEqualMaps(assert, actualMap('.'), expectedMap([
|
||||
'../coffee/foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
|
||||
util.assertEqualMaps(assert, actualMap('./'), expectedMap([
|
||||
'../coffee/foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee'
|
||||
]));
|
||||
};
|
||||
|
||||
exports['test applySourceMap name handling'] = function (assert, util) {
|
||||
// Imagine some CoffeeScript code being compiled into JavaScript and then
|
||||
// minified.
|
||||
|
||||
var assertName = function(coffeeName, jsName, expectedName) {
|
||||
var minifiedMap = new SourceMapGenerator({
|
||||
file: 'test.js.min'
|
||||
});
|
||||
minifiedMap.addMapping({
|
||||
generated: { line: 1, column: 4 },
|
||||
original: { line: 1, column: 4 },
|
||||
source: 'test.js',
|
||||
name: jsName
|
||||
});
|
||||
|
||||
var coffeeMap = new SourceMapGenerator({
|
||||
file: 'test.js'
|
||||
});
|
||||
coffeeMap.addMapping({
|
||||
generated: { line: 1, column: 4 },
|
||||
original: { line: 1, column: 0 },
|
||||
source: 'test.coffee',
|
||||
name: coffeeName
|
||||
});
|
||||
|
||||
minifiedMap.applySourceMap(new SourceMapConsumer(coffeeMap.toJSON()));
|
||||
|
||||
new SourceMapConsumer(minifiedMap.toJSON()).eachMapping(function(mapping) {
|
||||
assert.equal(mapping.name, expectedName);
|
||||
});
|
||||
};
|
||||
|
||||
// `foo = 1` -> `var foo = 1;` -> `var a=1`
|
||||
// CoffeeScript doesn’t rename variables, so there’s no need for it to
|
||||
// provide names in its source maps. Minifiers do rename variables and
|
||||
// therefore do provide names in their source maps. So that name should be
|
||||
// retained if the original map lacks names.
|
||||
assertName(null, 'foo', 'foo');
|
||||
|
||||
// `foo = 1` -> `var coffee$foo = 1;` -> `var a=1`
|
||||
// Imagine that CoffeeScript prefixed all variables with `coffee$`. Even
|
||||
// though the minifier then also provides a name, the original name is
|
||||
// what corresponds to the source.
|
||||
assertName('foo', 'coffee$foo', 'foo');
|
||||
|
||||
// `foo = 1` -> `var coffee$foo = 1;` -> `var coffee$foo=1`
|
||||
// Minifiers can turn off variable mangling. Then there’s no need to
|
||||
// provide names in the source map, but the names from the original map are
|
||||
// still needed.
|
||||
assertName('foo', null, 'foo');
|
||||
|
||||
// `foo = 1` -> `var foo = 1;` -> `var foo=1`
|
||||
// No renaming at all.
|
||||
assertName(null, null, null);
|
||||
};
|
||||
|
||||
exports['test sorting with duplicate generated mappings'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'test.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 3, column: 0 },
|
||||
original: { line: 2, column: 0 },
|
||||
source: 'a.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 0 }
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 2, column: 0 }
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 0 },
|
||||
original: { line: 1, column: 0 },
|
||||
source: 'a.js'
|
||||
});
|
||||
|
||||
util.assertEqualMaps(assert, map.toJSON(), {
|
||||
version: 3,
|
||||
file: 'test.js',
|
||||
sources: ['a.js'],
|
||||
names: [],
|
||||
mappings: 'AAAA;A;AACA'
|
||||
});
|
||||
};
|
||||
|
||||
exports['test ignore duplicate mappings.'] = function (assert, util) {
|
||||
var init = { file: 'min.js', sourceRoot: '/the/root' };
|
||||
var map1, map2;
|
||||
|
||||
// null original source location
|
||||
var nullMapping1 = {
|
||||
generated: { line: 1, column: 0 }
|
||||
};
|
||||
var nullMapping2 = {
|
||||
generated: { line: 2, column: 2 }
|
||||
};
|
||||
|
||||
map1 = new SourceMapGenerator(init);
|
||||
map2 = new SourceMapGenerator(init);
|
||||
|
||||
map1.addMapping(nullMapping1);
|
||||
map1.addMapping(nullMapping1);
|
||||
|
||||
map2.addMapping(nullMapping1);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
|
||||
map1.addMapping(nullMapping2);
|
||||
map1.addMapping(nullMapping1);
|
||||
|
||||
map2.addMapping(nullMapping2);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
|
||||
// original source location
|
||||
var srcMapping1 = {
|
||||
generated: { line: 1, column: 0 },
|
||||
original: { line: 11, column: 0 },
|
||||
source: 'srcMapping1.js'
|
||||
};
|
||||
var srcMapping2 = {
|
||||
generated: { line: 2, column: 2 },
|
||||
original: { line: 11, column: 0 },
|
||||
source: 'srcMapping2.js'
|
||||
};
|
||||
|
||||
map1 = new SourceMapGenerator(init);
|
||||
map2 = new SourceMapGenerator(init);
|
||||
|
||||
map1.addMapping(srcMapping1);
|
||||
map1.addMapping(srcMapping1);
|
||||
|
||||
map2.addMapping(srcMapping1);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
|
||||
map1.addMapping(srcMapping2);
|
||||
map1.addMapping(srcMapping1);
|
||||
|
||||
map2.addMapping(srcMapping2);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
|
||||
// full original source and name information
|
||||
var fullMapping1 = {
|
||||
generated: { line: 1, column: 0 },
|
||||
original: { line: 11, column: 0 },
|
||||
source: 'fullMapping1.js',
|
||||
name: 'fullMapping1'
|
||||
};
|
||||
var fullMapping2 = {
|
||||
generated: { line: 2, column: 2 },
|
||||
original: { line: 11, column: 0 },
|
||||
source: 'fullMapping2.js',
|
||||
name: 'fullMapping2'
|
||||
};
|
||||
|
||||
map1 = new SourceMapGenerator(init);
|
||||
map2 = new SourceMapGenerator(init);
|
||||
|
||||
map1.addMapping(fullMapping1);
|
||||
map1.addMapping(fullMapping1);
|
||||
|
||||
map2.addMapping(fullMapping1);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
|
||||
map1.addMapping(fullMapping2);
|
||||
map1.addMapping(fullMapping1);
|
||||
|
||||
map2.addMapping(fullMapping2);
|
||||
|
||||
util.assertEqualMaps(assert, map1.toJSON(), map2.toJSON());
|
||||
};
|
||||
|
||||
exports['test github issue #72, check for duplicate names or sources'] = function (assert, util) {
|
||||
var map = new SourceMapGenerator({
|
||||
file: 'test.js'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 1, column: 1 },
|
||||
original: { line: 2, column: 2 },
|
||||
source: 'a.js',
|
||||
name: 'foo'
|
||||
});
|
||||
map.addMapping({
|
||||
generated: { line: 3, column: 3 },
|
||||
original: { line: 4, column: 4 },
|
||||
source: 'a.js',
|
||||
name: 'foo'
|
||||
});
|
||||
util.assertEqualMaps(assert, map.toJSON(), {
|
||||
version: 3,
|
||||
file: 'test.js',
|
||||
sources: ['a.js'],
|
||||
names: ['foo'],
|
||||
mappings: 'CACEA;;GAEEA'
|
||||
});
|
||||
};
|
||||
|
||||
exports['test setting sourcesContent to null when already null'] = function (assert, util) {
|
||||
var smg = new SourceMapGenerator({ file: "foo.js" });
|
||||
assert.doesNotThrow(function() {
|
||||
smg.setSourceContent("bar.js", null);
|
||||
});
|
||||
};
|
||||
|
||||
});
|
612
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/test-source-node.js
generated
vendored
Normal file
612
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/test-source-node.js
generated
vendored
Normal file
@ -0,0 +1,612 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var SourceMapGenerator = require('../../lib/source-map/source-map-generator').SourceMapGenerator;
|
||||
var SourceMapConsumer = require('../../lib/source-map/source-map-consumer').SourceMapConsumer;
|
||||
var SourceNode = require('../../lib/source-map/source-node').SourceNode;
|
||||
|
||||
function forEachNewline(fn) {
|
||||
return function (assert, util) {
|
||||
['\n', '\r\n'].forEach(fn.bind(null, assert, util));
|
||||
}
|
||||
}
|
||||
|
||||
exports['test .add()'] = function (assert, util) {
|
||||
var node = new SourceNode(null, null, null);
|
||||
|
||||
// Adding a string works.
|
||||
node.add('function noop() {}');
|
||||
|
||||
// Adding another source node works.
|
||||
node.add(new SourceNode(null, null, null));
|
||||
|
||||
// Adding an array works.
|
||||
node.add(['function foo() {',
|
||||
new SourceNode(null, null, null,
|
||||
'return 10;'),
|
||||
'}']);
|
||||
|
||||
// Adding other stuff doesn't.
|
||||
assert.throws(function () {
|
||||
node.add({});
|
||||
});
|
||||
assert.throws(function () {
|
||||
node.add(function () {});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test .prepend()'] = function (assert, util) {
|
||||
var node = new SourceNode(null, null, null);
|
||||
|
||||
// Prepending a string works.
|
||||
node.prepend('function noop() {}');
|
||||
assert.equal(node.children[0], 'function noop() {}');
|
||||
assert.equal(node.children.length, 1);
|
||||
|
||||
// Prepending another source node works.
|
||||
node.prepend(new SourceNode(null, null, null));
|
||||
assert.equal(node.children[0], '');
|
||||
assert.equal(node.children[1], 'function noop() {}');
|
||||
assert.equal(node.children.length, 2);
|
||||
|
||||
// Prepending an array works.
|
||||
node.prepend(['function foo() {',
|
||||
new SourceNode(null, null, null,
|
||||
'return 10;'),
|
||||
'}']);
|
||||
assert.equal(node.children[0], 'function foo() {');
|
||||
assert.equal(node.children[1], 'return 10;');
|
||||
assert.equal(node.children[2], '}');
|
||||
assert.equal(node.children[3], '');
|
||||
assert.equal(node.children[4], 'function noop() {}');
|
||||
assert.equal(node.children.length, 5);
|
||||
|
||||
// Prepending other stuff doesn't.
|
||||
assert.throws(function () {
|
||||
node.prepend({});
|
||||
});
|
||||
assert.throws(function () {
|
||||
node.prepend(function () {});
|
||||
});
|
||||
};
|
||||
|
||||
exports['test .toString()'] = function (assert, util) {
|
||||
assert.equal((new SourceNode(null, null, null,
|
||||
['function foo() {',
|
||||
new SourceNode(null, null, null, 'return 10;'),
|
||||
'}'])).toString(),
|
||||
'function foo() {return 10;}');
|
||||
};
|
||||
|
||||
exports['test .join()'] = function (assert, util) {
|
||||
assert.equal((new SourceNode(null, null, null,
|
||||
['a', 'b', 'c', 'd'])).join(', ').toString(),
|
||||
'a, b, c, d');
|
||||
};
|
||||
|
||||
exports['test .walk()'] = function (assert, util) {
|
||||
var node = new SourceNode(null, null, null,
|
||||
['(function () {\n',
|
||||
' ', new SourceNode(1, 0, 'a.js', ['someCall()']), ';\n',
|
||||
' ', new SourceNode(2, 0, 'b.js', ['if (foo) bar()']), ';\n',
|
||||
'}());']);
|
||||
var expected = [
|
||||
{ str: '(function () {\n', source: null, line: null, column: null },
|
||||
{ str: ' ', source: null, line: null, column: null },
|
||||
{ str: 'someCall()', source: 'a.js', line: 1, column: 0 },
|
||||
{ str: ';\n', source: null, line: null, column: null },
|
||||
{ str: ' ', source: null, line: null, column: null },
|
||||
{ str: 'if (foo) bar()', source: 'b.js', line: 2, column: 0 },
|
||||
{ str: ';\n', source: null, line: null, column: null },
|
||||
{ str: '}());', source: null, line: null, column: null },
|
||||
];
|
||||
var i = 0;
|
||||
node.walk(function (chunk, loc) {
|
||||
assert.equal(expected[i].str, chunk);
|
||||
assert.equal(expected[i].source, loc.source);
|
||||
assert.equal(expected[i].line, loc.line);
|
||||
assert.equal(expected[i].column, loc.column);
|
||||
i++;
|
||||
});
|
||||
};
|
||||
|
||||
exports['test .replaceRight'] = function (assert, util) {
|
||||
var node;
|
||||
|
||||
// Not nested
|
||||
node = new SourceNode(null, null, null, 'hello world');
|
||||
node.replaceRight(/world/, 'universe');
|
||||
assert.equal(node.toString(), 'hello universe');
|
||||
|
||||
// Nested
|
||||
node = new SourceNode(null, null, null,
|
||||
[new SourceNode(null, null, null, 'hey sexy mama, '),
|
||||
new SourceNode(null, null, null, 'want to kill all humans?')]);
|
||||
node.replaceRight(/kill all humans/, 'watch Futurama');
|
||||
assert.equal(node.toString(), 'hey sexy mama, want to watch Futurama?');
|
||||
};
|
||||
|
||||
exports['test .toStringWithSourceMap()'] = forEachNewline(function (assert, util, nl) {
|
||||
var node = new SourceNode(null, null, null,
|
||||
['(function () {' + nl,
|
||||
' ',
|
||||
new SourceNode(1, 0, 'a.js', 'someCall', 'originalCall'),
|
||||
new SourceNode(1, 8, 'a.js', '()'),
|
||||
';' + nl,
|
||||
' ', new SourceNode(2, 0, 'b.js', ['if (foo) bar()']), ';' + nl,
|
||||
'}());']);
|
||||
var result = node.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
|
||||
assert.equal(result.code, [
|
||||
'(function () {',
|
||||
' someCall();',
|
||||
' if (foo) bar();',
|
||||
'}());'
|
||||
].join(nl));
|
||||
|
||||
var map = result.map;
|
||||
var mapWithoutOptions = node.toStringWithSourceMap().map;
|
||||
|
||||
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||
assert.ok(mapWithoutOptions instanceof SourceMapGenerator, 'mapWithoutOptions instanceof SourceMapGenerator');
|
||||
assert.ok(!('file' in mapWithoutOptions));
|
||||
mapWithoutOptions._file = 'foo.js';
|
||||
util.assertEqualMaps(assert, map.toJSON(), mapWithoutOptions.toJSON());
|
||||
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
var actual;
|
||||
|
||||
actual = map.originalPositionFor({
|
||||
line: 1,
|
||||
column: 4
|
||||
});
|
||||
assert.equal(actual.source, null);
|
||||
assert.equal(actual.line, null);
|
||||
assert.equal(actual.column, null);
|
||||
|
||||
actual = map.originalPositionFor({
|
||||
line: 2,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(actual.source, 'a.js');
|
||||
assert.equal(actual.line, 1);
|
||||
assert.equal(actual.column, 0);
|
||||
assert.equal(actual.name, 'originalCall');
|
||||
|
||||
actual = map.originalPositionFor({
|
||||
line: 3,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(actual.source, 'b.js');
|
||||
assert.equal(actual.line, 2);
|
||||
assert.equal(actual.column, 0);
|
||||
|
||||
actual = map.originalPositionFor({
|
||||
line: 3,
|
||||
column: 16
|
||||
});
|
||||
assert.equal(actual.source, null);
|
||||
assert.equal(actual.line, null);
|
||||
assert.equal(actual.column, null);
|
||||
|
||||
actual = map.originalPositionFor({
|
||||
line: 4,
|
||||
column: 2
|
||||
});
|
||||
assert.equal(actual.source, null);
|
||||
assert.equal(actual.line, null);
|
||||
assert.equal(actual.column, null);
|
||||
});
|
||||
|
||||
exports['test .fromStringWithSourceMap()'] = forEachNewline(function (assert, util, nl) {
|
||||
var testCode = util.testGeneratedCode.replace(/\n/g, nl);
|
||||
var node = SourceNode.fromStringWithSourceMap(
|
||||
testCode,
|
||||
new SourceMapConsumer(util.testMap));
|
||||
|
||||
var result = node.toStringWithSourceMap({
|
||||
file: 'min.js'
|
||||
});
|
||||
var map = result.map;
|
||||
var code = result.code;
|
||||
|
||||
assert.equal(code, testCode);
|
||||
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||
map = map.toJSON();
|
||||
assert.equal(map.version, util.testMap.version);
|
||||
assert.equal(map.file, util.testMap.file);
|
||||
assert.equal(map.mappings, util.testMap.mappings);
|
||||
});
|
||||
|
||||
exports['test .fromStringWithSourceMap() empty map'] = forEachNewline(function (assert, util, nl) {
|
||||
var node = SourceNode.fromStringWithSourceMap(
|
||||
util.testGeneratedCode.replace(/\n/g, nl),
|
||||
new SourceMapConsumer(util.emptyMap));
|
||||
var result = node.toStringWithSourceMap({
|
||||
file: 'min.js'
|
||||
});
|
||||
var map = result.map;
|
||||
var code = result.code;
|
||||
|
||||
assert.equal(code, util.testGeneratedCode.replace(/\n/g, nl));
|
||||
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||
map = map.toJSON();
|
||||
assert.equal(map.version, util.emptyMap.version);
|
||||
assert.equal(map.file, util.emptyMap.file);
|
||||
assert.equal(map.mappings.length, util.emptyMap.mappings.length);
|
||||
assert.equal(map.mappings, util.emptyMap.mappings);
|
||||
});
|
||||
|
||||
exports['test .fromStringWithSourceMap() complex version'] = forEachNewline(function (assert, util, nl) {
|
||||
var input = new SourceNode(null, null, null, [
|
||||
"(function() {" + nl,
|
||||
" var Test = {};" + nl,
|
||||
" ", new SourceNode(1, 0, "a.js", "Test.A = { value: 1234 };" + nl),
|
||||
" ", new SourceNode(2, 0, "a.js", "Test.A.x = 'xyz';"), nl,
|
||||
"}());" + nl,
|
||||
"/* Generated Source */"]);
|
||||
input = input.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
|
||||
var node = SourceNode.fromStringWithSourceMap(
|
||||
input.code,
|
||||
new SourceMapConsumer(input.map.toString()));
|
||||
|
||||
var result = node.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
var map = result.map;
|
||||
var code = result.code;
|
||||
|
||||
assert.equal(code, input.code);
|
||||
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||
map = map.toJSON();
|
||||
var inputMap = input.map.toJSON();
|
||||
util.assertEqualMaps(assert, map, inputMap);
|
||||
});
|
||||
|
||||
exports['test .fromStringWithSourceMap() third argument'] = function (assert, util) {
|
||||
// Assume the following directory structure:
|
||||
//
|
||||
// http://foo.org/
|
||||
// bar.coffee
|
||||
// app/
|
||||
// coffee/
|
||||
// foo.coffee
|
||||
// coffeeBundle.js # Made from {foo,bar,baz}.coffee
|
||||
// maps/
|
||||
// coffeeBundle.js.map
|
||||
// js/
|
||||
// foo.js
|
||||
// public/
|
||||
// app.js # Made from {foo,coffeeBundle}.js
|
||||
// app.js.map
|
||||
//
|
||||
// http://www.example.com/
|
||||
// baz.coffee
|
||||
|
||||
var coffeeBundle = new SourceNode(1, 0, 'foo.coffee', 'foo(coffee);\n');
|
||||
coffeeBundle.setSourceContent('foo.coffee', 'foo coffee');
|
||||
coffeeBundle.add(new SourceNode(2, 0, '/bar.coffee', 'bar(coffee);\n'));
|
||||
coffeeBundle.add(new SourceNode(3, 0, 'http://www.example.com/baz.coffee', 'baz(coffee);'));
|
||||
coffeeBundle = coffeeBundle.toStringWithSourceMap({
|
||||
file: 'foo.js',
|
||||
sourceRoot: '..'
|
||||
});
|
||||
|
||||
var foo = new SourceNode(1, 0, 'foo.js', 'foo(js);');
|
||||
|
||||
var test = function(relativePath, expectedSources) {
|
||||
var app = new SourceNode();
|
||||
app.add(SourceNode.fromStringWithSourceMap(
|
||||
coffeeBundle.code,
|
||||
new SourceMapConsumer(coffeeBundle.map.toString()),
|
||||
relativePath));
|
||||
app.add(foo);
|
||||
var i = 0;
|
||||
app.walk(function (chunk, loc) {
|
||||
assert.equal(loc.source, expectedSources[i]);
|
||||
i++;
|
||||
});
|
||||
app.walkSourceContents(function (sourceFile, sourceContent) {
|
||||
assert.equal(sourceFile, expectedSources[0]);
|
||||
assert.equal(sourceContent, 'foo coffee');
|
||||
})
|
||||
};
|
||||
|
||||
test('../coffee/maps', [
|
||||
'../coffee/foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee',
|
||||
'foo.js'
|
||||
]);
|
||||
|
||||
// If the third parameter is omitted or set to the current working
|
||||
// directory we get incorrect source paths:
|
||||
|
||||
test(undefined, [
|
||||
'../foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee',
|
||||
'foo.js'
|
||||
]);
|
||||
|
||||
test('', [
|
||||
'../foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee',
|
||||
'foo.js'
|
||||
]);
|
||||
|
||||
test('.', [
|
||||
'../foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee',
|
||||
'foo.js'
|
||||
]);
|
||||
|
||||
test('./', [
|
||||
'../foo.coffee',
|
||||
'/bar.coffee',
|
||||
'http://www.example.com/baz.coffee',
|
||||
'foo.js'
|
||||
]);
|
||||
};
|
||||
|
||||
exports['test .toStringWithSourceMap() merging duplicate mappings'] = forEachNewline(function (assert, util, nl) {
|
||||
var input = new SourceNode(null, null, null, [
|
||||
new SourceNode(1, 0, "a.js", "(function"),
|
||||
new SourceNode(1, 0, "a.js", "() {" + nl),
|
||||
" ",
|
||||
new SourceNode(1, 0, "a.js", "var Test = "),
|
||||
new SourceNode(1, 0, "b.js", "{};" + nl),
|
||||
new SourceNode(2, 0, "b.js", "Test"),
|
||||
new SourceNode(2, 0, "b.js", ".A", "A"),
|
||||
new SourceNode(2, 20, "b.js", " = { value: ", "A"),
|
||||
"1234",
|
||||
new SourceNode(2, 40, "b.js", " };" + nl, "A"),
|
||||
"}());" + nl,
|
||||
"/* Generated Source */"
|
||||
]);
|
||||
input = input.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
|
||||
assert.equal(input.code, [
|
||||
"(function() {",
|
||||
" var Test = {};",
|
||||
"Test.A = { value: 1234 };",
|
||||
"}());",
|
||||
"/* Generated Source */"
|
||||
].join(nl))
|
||||
|
||||
var correctMap = new SourceMapGenerator({
|
||||
file: 'foo.js'
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 1, column: 0 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
// Here is no need for a empty mapping,
|
||||
// because mappings ends at eol
|
||||
correctMap.addMapping({
|
||||
generated: { line: 2, column: 2 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 2, column: 13 },
|
||||
source: 'b.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 0 },
|
||||
source: 'b.js',
|
||||
original: { line: 2, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 4 },
|
||||
source: 'b.js',
|
||||
name: 'A',
|
||||
original: { line: 2, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 6 },
|
||||
source: 'b.js',
|
||||
name: 'A',
|
||||
original: { line: 2, column: 20 }
|
||||
});
|
||||
// This empty mapping is required,
|
||||
// because there is a hole in the middle of the line
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 18 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 22 },
|
||||
source: 'b.js',
|
||||
name: 'A',
|
||||
original: { line: 2, column: 40 }
|
||||
});
|
||||
// Here is no need for a empty mapping,
|
||||
// because mappings ends at eol
|
||||
|
||||
var inputMap = input.map.toJSON();
|
||||
correctMap = correctMap.toJSON();
|
||||
util.assertEqualMaps(assert, inputMap, correctMap);
|
||||
});
|
||||
|
||||
exports['test .toStringWithSourceMap() multi-line SourceNodes'] = forEachNewline(function (assert, util, nl) {
|
||||
var input = new SourceNode(null, null, null, [
|
||||
new SourceNode(1, 0, "a.js", "(function() {" + nl + "var nextLine = 1;" + nl + "anotherLine();" + nl),
|
||||
new SourceNode(2, 2, "b.js", "Test.call(this, 123);" + nl),
|
||||
new SourceNode(2, 2, "b.js", "this['stuff'] = 'v';" + nl),
|
||||
new SourceNode(2, 2, "b.js", "anotherLine();" + nl),
|
||||
"/*" + nl + "Generated" + nl + "Source" + nl + "*/" + nl,
|
||||
new SourceNode(3, 4, "c.js", "anotherLine();" + nl),
|
||||
"/*" + nl + "Generated" + nl + "Source" + nl + "*/"
|
||||
]);
|
||||
input = input.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
|
||||
assert.equal(input.code, [
|
||||
"(function() {",
|
||||
"var nextLine = 1;",
|
||||
"anotherLine();",
|
||||
"Test.call(this, 123);",
|
||||
"this['stuff'] = 'v';",
|
||||
"anotherLine();",
|
||||
"/*",
|
||||
"Generated",
|
||||
"Source",
|
||||
"*/",
|
||||
"anotherLine();",
|
||||
"/*",
|
||||
"Generated",
|
||||
"Source",
|
||||
"*/"
|
||||
].join(nl));
|
||||
|
||||
var correctMap = new SourceMapGenerator({
|
||||
file: 'foo.js'
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 1, column: 0 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 2, column: 0 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 0 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 4, column: 0 },
|
||||
source: 'b.js',
|
||||
original: { line: 2, column: 2 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 5, column: 0 },
|
||||
source: 'b.js',
|
||||
original: { line: 2, column: 2 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 6, column: 0 },
|
||||
source: 'b.js',
|
||||
original: { line: 2, column: 2 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 11, column: 0 },
|
||||
source: 'c.js',
|
||||
original: { line: 3, column: 4 }
|
||||
});
|
||||
|
||||
var inputMap = input.map.toJSON();
|
||||
correctMap = correctMap.toJSON();
|
||||
util.assertEqualMaps(assert, inputMap, correctMap);
|
||||
});
|
||||
|
||||
exports['test .toStringWithSourceMap() with empty string'] = function (assert, util) {
|
||||
var node = new SourceNode(1, 0, 'empty.js', '');
|
||||
var result = node.toStringWithSourceMap();
|
||||
assert.equal(result.code, '');
|
||||
};
|
||||
|
||||
exports['test .toStringWithSourceMap() with consecutive newlines'] = forEachNewline(function (assert, util, nl) {
|
||||
var input = new SourceNode(null, null, null, [
|
||||
"/***/" + nl + nl,
|
||||
new SourceNode(1, 0, "a.js", "'use strict';" + nl),
|
||||
new SourceNode(2, 0, "a.js", "a();"),
|
||||
]);
|
||||
input = input.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
});
|
||||
|
||||
assert.equal(input.code, [
|
||||
"/***/",
|
||||
"",
|
||||
"'use strict';",
|
||||
"a();",
|
||||
].join(nl));
|
||||
|
||||
var correctMap = new SourceMapGenerator({
|
||||
file: 'foo.js'
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 3, column: 0 },
|
||||
source: 'a.js',
|
||||
original: { line: 1, column: 0 }
|
||||
});
|
||||
correctMap.addMapping({
|
||||
generated: { line: 4, column: 0 },
|
||||
source: 'a.js',
|
||||
original: { line: 2, column: 0 }
|
||||
});
|
||||
|
||||
var inputMap = input.map.toJSON();
|
||||
correctMap = correctMap.toJSON();
|
||||
util.assertEqualMaps(assert, inputMap, correctMap);
|
||||
});
|
||||
|
||||
exports['test setSourceContent with toStringWithSourceMap'] = function (assert, util) {
|
||||
var aNode = new SourceNode(1, 1, 'a.js', 'a');
|
||||
aNode.setSourceContent('a.js', 'someContent');
|
||||
var node = new SourceNode(null, null, null,
|
||||
['(function () {\n',
|
||||
' ', aNode,
|
||||
' ', new SourceNode(1, 1, 'b.js', 'b'),
|
||||
'}());']);
|
||||
node.setSourceContent('b.js', 'otherContent');
|
||||
var map = node.toStringWithSourceMap({
|
||||
file: 'foo.js'
|
||||
}).map;
|
||||
|
||||
assert.ok(map instanceof SourceMapGenerator, 'map instanceof SourceMapGenerator');
|
||||
map = new SourceMapConsumer(map.toString());
|
||||
|
||||
assert.equal(map.sources.length, 2);
|
||||
assert.equal(map.sources[0], 'a.js');
|
||||
assert.equal(map.sources[1], 'b.js');
|
||||
assert.equal(map.sourcesContent.length, 2);
|
||||
assert.equal(map.sourcesContent[0], 'someContent');
|
||||
assert.equal(map.sourcesContent[1], 'otherContent');
|
||||
};
|
||||
|
||||
exports['test walkSourceContents'] = function (assert, util) {
|
||||
var aNode = new SourceNode(1, 1, 'a.js', 'a');
|
||||
aNode.setSourceContent('a.js', 'someContent');
|
||||
var node = new SourceNode(null, null, null,
|
||||
['(function () {\n',
|
||||
' ', aNode,
|
||||
' ', new SourceNode(1, 1, 'b.js', 'b'),
|
||||
'}());']);
|
||||
node.setSourceContent('b.js', 'otherContent');
|
||||
var results = [];
|
||||
node.walkSourceContents(function (sourceFile, sourceContent) {
|
||||
results.push([sourceFile, sourceContent]);
|
||||
});
|
||||
assert.equal(results.length, 2);
|
||||
assert.equal(results[0][0], 'a.js');
|
||||
assert.equal(results[0][1], 'someContent');
|
||||
assert.equal(results[1][0], 'b.js');
|
||||
assert.equal(results[1][1], 'otherContent');
|
||||
};
|
||||
});
|
216
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/test-util.js
generated
vendored
Normal file
216
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/test-util.js
generated
vendored
Normal file
@ -0,0 +1,216 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2014 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var libUtil = require('../../lib/source-map/util');
|
||||
|
||||
exports['test urls'] = function (assert, util) {
|
||||
var assertUrl = function (url) {
|
||||
assert.equal(url, libUtil.urlGenerate(libUtil.urlParse(url)));
|
||||
};
|
||||
assertUrl('http://');
|
||||
assertUrl('http://www.example.com');
|
||||
assertUrl('http://user:pass@www.example.com');
|
||||
assertUrl('http://www.example.com:80');
|
||||
assertUrl('http://www.example.com/');
|
||||
assertUrl('http://www.example.com/foo/bar');
|
||||
assertUrl('http://www.example.com/foo/bar/');
|
||||
assertUrl('http://user:pass@www.example.com:80/foo/bar/');
|
||||
|
||||
assertUrl('//');
|
||||
assertUrl('//www.example.com');
|
||||
assertUrl('file:///www.example.com');
|
||||
|
||||
assert.equal(libUtil.urlParse(''), null);
|
||||
assert.equal(libUtil.urlParse('.'), null);
|
||||
assert.equal(libUtil.urlParse('..'), null);
|
||||
assert.equal(libUtil.urlParse('a'), null);
|
||||
assert.equal(libUtil.urlParse('a/b'), null);
|
||||
assert.equal(libUtil.urlParse('a//b'), null);
|
||||
assert.equal(libUtil.urlParse('/a'), null);
|
||||
assert.equal(libUtil.urlParse('data:foo,bar'), null);
|
||||
};
|
||||
|
||||
exports['test normalize()'] = function (assert, util) {
|
||||
assert.equal(libUtil.normalize('/..'), '/');
|
||||
assert.equal(libUtil.normalize('/../'), '/');
|
||||
assert.equal(libUtil.normalize('/../../../..'), '/');
|
||||
assert.equal(libUtil.normalize('/../../../../a/b/c'), '/a/b/c');
|
||||
assert.equal(libUtil.normalize('/a/b/c/../../../d/../../e'), '/e');
|
||||
|
||||
assert.equal(libUtil.normalize('..'), '..');
|
||||
assert.equal(libUtil.normalize('../'), '../');
|
||||
assert.equal(libUtil.normalize('../../a/'), '../../a/');
|
||||
assert.equal(libUtil.normalize('a/..'), '.');
|
||||
assert.equal(libUtil.normalize('a/../../..'), '../..');
|
||||
|
||||
assert.equal(libUtil.normalize('/.'), '/');
|
||||
assert.equal(libUtil.normalize('/./'), '/');
|
||||
assert.equal(libUtil.normalize('/./././.'), '/');
|
||||
assert.equal(libUtil.normalize('/././././a/b/c'), '/a/b/c');
|
||||
assert.equal(libUtil.normalize('/a/b/c/./././d/././e'), '/a/b/c/d/e');
|
||||
|
||||
assert.equal(libUtil.normalize(''), '.');
|
||||
assert.equal(libUtil.normalize('.'), '.');
|
||||
assert.equal(libUtil.normalize('./'), '.');
|
||||
assert.equal(libUtil.normalize('././a'), 'a');
|
||||
assert.equal(libUtil.normalize('a/./'), 'a/');
|
||||
assert.equal(libUtil.normalize('a/././.'), 'a');
|
||||
|
||||
assert.equal(libUtil.normalize('/a/b//c////d/////'), '/a/b/c/d/');
|
||||
assert.equal(libUtil.normalize('///a/b//c////d/////'), '///a/b/c/d/');
|
||||
assert.equal(libUtil.normalize('a/b//c////d'), 'a/b/c/d');
|
||||
|
||||
assert.equal(libUtil.normalize('.///.././../a/b//./..'), '../../a')
|
||||
|
||||
assert.equal(libUtil.normalize('http://www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.normalize('http://www.example.com/'), 'http://www.example.com/');
|
||||
assert.equal(libUtil.normalize('http://www.example.com/./..//a/b/c/.././d//'), 'http://www.example.com/a/b/d/');
|
||||
};
|
||||
|
||||
exports['test join()'] = function (assert, util) {
|
||||
assert.equal(libUtil.join('a', 'b'), 'a/b');
|
||||
assert.equal(libUtil.join('a/', 'b'), 'a/b');
|
||||
assert.equal(libUtil.join('a//', 'b'), 'a/b');
|
||||
assert.equal(libUtil.join('a', 'b/'), 'a/b/');
|
||||
assert.equal(libUtil.join('a', 'b//'), 'a/b/');
|
||||
assert.equal(libUtil.join('a/', '/b'), '/b');
|
||||
assert.equal(libUtil.join('a//', '//b'), '//b');
|
||||
|
||||
assert.equal(libUtil.join('a', '..'), '.');
|
||||
assert.equal(libUtil.join('a', '../b'), 'b');
|
||||
assert.equal(libUtil.join('a/b', '../c'), 'a/c');
|
||||
|
||||
assert.equal(libUtil.join('a', '.'), 'a');
|
||||
assert.equal(libUtil.join('a', './b'), 'a/b');
|
||||
assert.equal(libUtil.join('a/b', './c'), 'a/b/c');
|
||||
|
||||
assert.equal(libUtil.join('a', 'http://www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.join('a', 'data:foo,bar'), 'data:foo,bar');
|
||||
|
||||
|
||||
assert.equal(libUtil.join('', 'b'), 'b');
|
||||
assert.equal(libUtil.join('.', 'b'), 'b');
|
||||
assert.equal(libUtil.join('', 'b/'), 'b/');
|
||||
assert.equal(libUtil.join('.', 'b/'), 'b/');
|
||||
assert.equal(libUtil.join('', 'b//'), 'b/');
|
||||
assert.equal(libUtil.join('.', 'b//'), 'b/');
|
||||
|
||||
assert.equal(libUtil.join('', '..'), '..');
|
||||
assert.equal(libUtil.join('.', '..'), '..');
|
||||
assert.equal(libUtil.join('', '../b'), '../b');
|
||||
assert.equal(libUtil.join('.', '../b'), '../b');
|
||||
|
||||
assert.equal(libUtil.join('', '.'), '.');
|
||||
assert.equal(libUtil.join('.', '.'), '.');
|
||||
assert.equal(libUtil.join('', './b'), 'b');
|
||||
assert.equal(libUtil.join('.', './b'), 'b');
|
||||
|
||||
assert.equal(libUtil.join('', 'http://www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.join('.', 'http://www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.join('', 'data:foo,bar'), 'data:foo,bar');
|
||||
assert.equal(libUtil.join('.', 'data:foo,bar'), 'data:foo,bar');
|
||||
|
||||
|
||||
assert.equal(libUtil.join('..', 'b'), '../b');
|
||||
assert.equal(libUtil.join('..', 'b/'), '../b/');
|
||||
assert.equal(libUtil.join('..', 'b//'), '../b/');
|
||||
|
||||
assert.equal(libUtil.join('..', '..'), '../..');
|
||||
assert.equal(libUtil.join('..', '../b'), '../../b');
|
||||
|
||||
assert.equal(libUtil.join('..', '.'), '..');
|
||||
assert.equal(libUtil.join('..', './b'), '../b');
|
||||
|
||||
assert.equal(libUtil.join('..', 'http://www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.join('..', 'data:foo,bar'), 'data:foo,bar');
|
||||
|
||||
|
||||
assert.equal(libUtil.join('a', ''), 'a');
|
||||
assert.equal(libUtil.join('a', '.'), 'a');
|
||||
assert.equal(libUtil.join('a/', ''), 'a');
|
||||
assert.equal(libUtil.join('a/', '.'), 'a');
|
||||
assert.equal(libUtil.join('a//', ''), 'a');
|
||||
assert.equal(libUtil.join('a//', '.'), 'a');
|
||||
assert.equal(libUtil.join('/a', ''), '/a');
|
||||
assert.equal(libUtil.join('/a', '.'), '/a');
|
||||
assert.equal(libUtil.join('', ''), '.');
|
||||
assert.equal(libUtil.join('.', ''), '.');
|
||||
assert.equal(libUtil.join('.', ''), '.');
|
||||
assert.equal(libUtil.join('.', '.'), '.');
|
||||
assert.equal(libUtil.join('..', ''), '..');
|
||||
assert.equal(libUtil.join('..', '.'), '..');
|
||||
assert.equal(libUtil.join('http://foo.org/a', ''), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/a', '.'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/a/', ''), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/a/', '.'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/a//', ''), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/a//', '.'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org', ''), 'http://foo.org/');
|
||||
assert.equal(libUtil.join('http://foo.org', '.'), 'http://foo.org/');
|
||||
assert.equal(libUtil.join('http://foo.org/', ''), 'http://foo.org/');
|
||||
assert.equal(libUtil.join('http://foo.org/', '.'), 'http://foo.org/');
|
||||
assert.equal(libUtil.join('http://foo.org//', ''), 'http://foo.org/');
|
||||
assert.equal(libUtil.join('http://foo.org//', '.'), 'http://foo.org/');
|
||||
assert.equal(libUtil.join('//www.example.com', ''), '//www.example.com/');
|
||||
assert.equal(libUtil.join('//www.example.com', '.'), '//www.example.com/');
|
||||
|
||||
|
||||
assert.equal(libUtil.join('http://foo.org/a', 'b'), 'http://foo.org/a/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a/', 'b'), 'http://foo.org/a/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a//', 'b'), 'http://foo.org/a/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a', 'b/'), 'http://foo.org/a/b/');
|
||||
assert.equal(libUtil.join('http://foo.org/a', 'b//'), 'http://foo.org/a/b/');
|
||||
assert.equal(libUtil.join('http://foo.org/a/', '/b'), 'http://foo.org/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a//', '//b'), 'http://b');
|
||||
|
||||
assert.equal(libUtil.join('http://foo.org/a', '..'), 'http://foo.org/');
|
||||
assert.equal(libUtil.join('http://foo.org/a', '../b'), 'http://foo.org/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a/b', '../c'), 'http://foo.org/a/c');
|
||||
|
||||
assert.equal(libUtil.join('http://foo.org/a', '.'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/a', './b'), 'http://foo.org/a/b');
|
||||
assert.equal(libUtil.join('http://foo.org/a/b', './c'), 'http://foo.org/a/b/c');
|
||||
|
||||
assert.equal(libUtil.join('http://foo.org/a', 'http://www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.join('http://foo.org/a', 'data:foo,bar'), 'data:foo,bar');
|
||||
|
||||
|
||||
assert.equal(libUtil.join('http://foo.org', 'a'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/', 'a'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org//', 'a'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org', '/a'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org/', '/a'), 'http://foo.org/a');
|
||||
assert.equal(libUtil.join('http://foo.org//', '/a'), 'http://foo.org/a');
|
||||
|
||||
|
||||
assert.equal(libUtil.join('http://', 'www.example.com'), 'http://www.example.com');
|
||||
assert.equal(libUtil.join('file:///', 'www.example.com'), 'file:///www.example.com');
|
||||
assert.equal(libUtil.join('http://', 'ftp://example.com'), 'ftp://example.com');
|
||||
|
||||
assert.equal(libUtil.join('http://www.example.com', '//foo.org/bar'), 'http://foo.org/bar');
|
||||
assert.equal(libUtil.join('//www.example.com', '//foo.org/bar'), '//foo.org/bar');
|
||||
};
|
||||
|
||||
// TODO Issue #128: Define and test this function properly.
|
||||
exports['test relative()'] = function (assert, util) {
|
||||
assert.equal(libUtil.relative('/the/root', '/the/root/one.js'), 'one.js');
|
||||
assert.equal(libUtil.relative('/the/root', '/the/rootone.js'), '/the/rootone.js');
|
||||
|
||||
assert.equal(libUtil.relative('', '/the/root/one.js'), '/the/root/one.js');
|
||||
assert.equal(libUtil.relative('.', '/the/root/one.js'), '/the/root/one.js');
|
||||
assert.equal(libUtil.relative('', 'the/root/one.js'), 'the/root/one.js');
|
||||
assert.equal(libUtil.relative('.', 'the/root/one.js'), 'the/root/one.js');
|
||||
|
||||
assert.equal(libUtil.relative('/', '/the/root/one.js'), 'the/root/one.js');
|
||||
assert.equal(libUtil.relative('/', 'the/root/one.js'), 'the/root/one.js');
|
||||
};
|
||||
|
||||
});
|
299
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/util.js
generated
vendored
Normal file
299
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/node_modules/source-map/test/source-map/util.js
generated
vendored
Normal file
@ -0,0 +1,299 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module, require);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var util = require('../../lib/source-map/util');
|
||||
|
||||
// This is a test mapping which maps functions from two different files
|
||||
// (one.js and two.js) to a minified generated source.
|
||||
//
|
||||
// Here is one.js:
|
||||
//
|
||||
// ONE.foo = function (bar) {
|
||||
// return baz(bar);
|
||||
// };
|
||||
//
|
||||
// Here is two.js:
|
||||
//
|
||||
// TWO.inc = function (n) {
|
||||
// return n + 1;
|
||||
// };
|
||||
//
|
||||
// And here is the generated code (min.js):
|
||||
//
|
||||
// ONE.foo=function(a){return baz(a);};
|
||||
// TWO.inc=function(a){return a+1;};
|
||||
exports.testGeneratedCode = " ONE.foo=function(a){return baz(a);};\n"+
|
||||
" TWO.inc=function(a){return a+1;};";
|
||||
exports.testMap = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['one.js', 'two.js'],
|
||||
sourceRoot: '/the/root',
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
exports.testMapNoSourceRoot = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['one.js', 'two.js'],
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
exports.testMapEmptySourceRoot = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['one.js', 'two.js'],
|
||||
sourceRoot: '',
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
// This mapping is identical to above, but uses the indexed format instead.
|
||||
exports.indexedTestMap = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
sections: [
|
||||
{
|
||||
offset: {
|
||||
line: 0,
|
||||
column: 0
|
||||
},
|
||||
map: {
|
||||
version: 3,
|
||||
sources: [
|
||||
"one.js"
|
||||
],
|
||||
sourcesContent: [
|
||||
' ONE.foo = function (bar) {\n' +
|
||||
' return baz(bar);\n' +
|
||||
' };',
|
||||
],
|
||||
names: [
|
||||
"bar",
|
||||
"baz"
|
||||
],
|
||||
mappings: "CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID",
|
||||
file: "min.js",
|
||||
sourceRoot: "/the/root"
|
||||
}
|
||||
},
|
||||
{
|
||||
offset: {
|
||||
line: 1,
|
||||
column: 0
|
||||
},
|
||||
map: {
|
||||
version: 3,
|
||||
sources: [
|
||||
"two.js"
|
||||
],
|
||||
sourcesContent: [
|
||||
' TWO.inc = function (n) {\n' +
|
||||
' return n + 1;\n' +
|
||||
' };'
|
||||
],
|
||||
names: [
|
||||
"n"
|
||||
],
|
||||
mappings: "CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOA",
|
||||
file: "min.js",
|
||||
sourceRoot: "/the/root"
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
exports.indexedTestMapDifferentSourceRoots = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
sections: [
|
||||
{
|
||||
offset: {
|
||||
line: 0,
|
||||
column: 0
|
||||
},
|
||||
map: {
|
||||
version: 3,
|
||||
sources: [
|
||||
"one.js"
|
||||
],
|
||||
sourcesContent: [
|
||||
' ONE.foo = function (bar) {\n' +
|
||||
' return baz(bar);\n' +
|
||||
' };',
|
||||
],
|
||||
names: [
|
||||
"bar",
|
||||
"baz"
|
||||
],
|
||||
mappings: "CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID",
|
||||
file: "min.js",
|
||||
sourceRoot: "/the/root"
|
||||
}
|
||||
},
|
||||
{
|
||||
offset: {
|
||||
line: 1,
|
||||
column: 0
|
||||
},
|
||||
map: {
|
||||
version: 3,
|
||||
sources: [
|
||||
"two.js"
|
||||
],
|
||||
sourcesContent: [
|
||||
' TWO.inc = function (n) {\n' +
|
||||
' return n + 1;\n' +
|
||||
' };'
|
||||
],
|
||||
names: [
|
||||
"n"
|
||||
],
|
||||
mappings: "CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOA",
|
||||
file: "min.js",
|
||||
sourceRoot: "/different/root"
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
exports.testMapWithSourcesContent = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['one.js', 'two.js'],
|
||||
sourcesContent: [
|
||||
' ONE.foo = function (bar) {\n' +
|
||||
' return baz(bar);\n' +
|
||||
' };',
|
||||
' TWO.inc = function (n) {\n' +
|
||||
' return n + 1;\n' +
|
||||
' };'
|
||||
],
|
||||
sourceRoot: '/the/root',
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
exports.testMapRelativeSources = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: ['bar', 'baz', 'n'],
|
||||
sources: ['./one.js', './two.js'],
|
||||
sourcesContent: [
|
||||
' ONE.foo = function (bar) {\n' +
|
||||
' return baz(bar);\n' +
|
||||
' };',
|
||||
' TWO.inc = function (n) {\n' +
|
||||
' return n + 1;\n' +
|
||||
' };'
|
||||
],
|
||||
sourceRoot: '/the/root',
|
||||
mappings: 'CAAC,IAAI,IAAM,SAAUA,GAClB,OAAOC,IAAID;CCDb,IAAI,IAAM,SAAUE,GAClB,OAAOA'
|
||||
};
|
||||
exports.emptyMap = {
|
||||
version: 3,
|
||||
file: 'min.js',
|
||||
names: [],
|
||||
sources: [],
|
||||
mappings: ''
|
||||
};
|
||||
|
||||
|
||||
function assertMapping(generatedLine, generatedColumn, originalSource,
|
||||
originalLine, originalColumn, name, map, assert,
|
||||
dontTestGenerated, dontTestOriginal) {
|
||||
if (!dontTestOriginal) {
|
||||
var origMapping = map.originalPositionFor({
|
||||
line: generatedLine,
|
||||
column: generatedColumn
|
||||
});
|
||||
assert.equal(origMapping.name, name,
|
||||
'Incorrect name, expected ' + JSON.stringify(name)
|
||||
+ ', got ' + JSON.stringify(origMapping.name));
|
||||
assert.equal(origMapping.line, originalLine,
|
||||
'Incorrect line, expected ' + JSON.stringify(originalLine)
|
||||
+ ', got ' + JSON.stringify(origMapping.line));
|
||||
assert.equal(origMapping.column, originalColumn,
|
||||
'Incorrect column, expected ' + JSON.stringify(originalColumn)
|
||||
+ ', got ' + JSON.stringify(origMapping.column));
|
||||
|
||||
var expectedSource;
|
||||
|
||||
if (originalSource && map.sourceRoot && originalSource.indexOf(map.sourceRoot) === 0) {
|
||||
expectedSource = originalSource;
|
||||
} else if (originalSource) {
|
||||
expectedSource = map.sourceRoot
|
||||
? util.join(map.sourceRoot, originalSource)
|
||||
: originalSource;
|
||||
} else {
|
||||
expectedSource = null;
|
||||
}
|
||||
|
||||
assert.equal(origMapping.source, expectedSource,
|
||||
'Incorrect source, expected ' + JSON.stringify(expectedSource)
|
||||
+ ', got ' + JSON.stringify(origMapping.source));
|
||||
}
|
||||
|
||||
if (!dontTestGenerated) {
|
||||
var genMapping = map.generatedPositionFor({
|
||||
source: originalSource,
|
||||
line: originalLine,
|
||||
column: originalColumn
|
||||
});
|
||||
assert.equal(genMapping.line, generatedLine,
|
||||
'Incorrect line, expected ' + JSON.stringify(generatedLine)
|
||||
+ ', got ' + JSON.stringify(genMapping.line));
|
||||
assert.equal(genMapping.column, generatedColumn,
|
||||
'Incorrect column, expected ' + JSON.stringify(generatedColumn)
|
||||
+ ', got ' + JSON.stringify(genMapping.column));
|
||||
}
|
||||
}
|
||||
exports.assertMapping = assertMapping;
|
||||
|
||||
function assertEqualMaps(assert, actualMap, expectedMap) {
|
||||
assert.equal(actualMap.version, expectedMap.version, "version mismatch");
|
||||
assert.equal(actualMap.file, expectedMap.file, "file mismatch");
|
||||
assert.equal(actualMap.names.length,
|
||||
expectedMap.names.length,
|
||||
"names length mismatch: " +
|
||||
actualMap.names.join(", ") + " != " + expectedMap.names.join(", "));
|
||||
for (var i = 0; i < actualMap.names.length; i++) {
|
||||
assert.equal(actualMap.names[i],
|
||||
expectedMap.names[i],
|
||||
"names[" + i + "] mismatch: " +
|
||||
actualMap.names.join(", ") + " != " + expectedMap.names.join(", "));
|
||||
}
|
||||
assert.equal(actualMap.sources.length,
|
||||
expectedMap.sources.length,
|
||||
"sources length mismatch: " +
|
||||
actualMap.sources.join(", ") + " != " + expectedMap.sources.join(", "));
|
||||
for (var i = 0; i < actualMap.sources.length; i++) {
|
||||
assert.equal(actualMap.sources[i],
|
||||
expectedMap.sources[i],
|
||||
"sources[" + i + "] length mismatch: " +
|
||||
actualMap.sources.join(", ") + " != " + expectedMap.sources.join(", "));
|
||||
}
|
||||
assert.equal(actualMap.sourceRoot,
|
||||
expectedMap.sourceRoot,
|
||||
"sourceRoot mismatch: " +
|
||||
actualMap.sourceRoot + " != " + expectedMap.sourceRoot);
|
||||
assert.equal(actualMap.mappings, expectedMap.mappings,
|
||||
"mappings mismatch:\nActual: " + actualMap.mappings + "\nExpected: " + expectedMap.mappings);
|
||||
if (actualMap.sourcesContent) {
|
||||
assert.equal(actualMap.sourcesContent.length,
|
||||
expectedMap.sourcesContent.length,
|
||||
"sourcesContent length mismatch");
|
||||
for (var i = 0; i < actualMap.sourcesContent.length; i++) {
|
||||
assert.equal(actualMap.sourcesContent[i],
|
||||
expectedMap.sourcesContent[i],
|
||||
"sourcesContent[" + i + "] mismatch");
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.assertEqualMaps = assertEqualMaps;
|
||||
|
||||
});
|
154
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/package.json
generated
vendored
Normal file
154
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/package.json
generated
vendored
Normal file
@ -0,0 +1,154 @@
|
||||
{
|
||||
"name": "grunt-contrib-concat",
|
||||
"description": "Concatenate files.",
|
||||
"version": "0.5.1",
|
||||
"author": {
|
||||
"name": "Grunt Team",
|
||||
"url": "http://gruntjs.com/"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/gruntjs/grunt-contrib-concat"
|
||||
},
|
||||
"licenses": [
|
||||
{
|
||||
"type": "MIT",
|
||||
"url": "https://github.com/gruntjs/grunt-contrib-concat/blob/master/LICENSE-MIT"
|
||||
}
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "grunt test"
|
||||
},
|
||||
"dependencies": {
|
||||
"chalk": "^0.5.1",
|
||||
"source-map": "^0.3.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"grunt": "^0.4.5",
|
||||
"grunt-cli": "^0.1.13",
|
||||
"grunt-contrib-clean": "^0.6.0",
|
||||
"grunt-contrib-internal": "^0.4.2",
|
||||
"grunt-contrib-jshint": "^0.11.0",
|
||||
"grunt-contrib-nodeunit": "^0.4.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"grunt": ">=0.4.0"
|
||||
},
|
||||
"keywords": [
|
||||
"gruntplugin"
|
||||
],
|
||||
"files": [
|
||||
"tasks"
|
||||
],
|
||||
"appveyor_id": "l42173901ms416km",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "\"Cowboy\" Ben Alman",
|
||||
"url": "http://benalman.com/"
|
||||
},
|
||||
{
|
||||
"name": "Tyler Kellen",
|
||||
"url": "http://goingslowly.com/"
|
||||
},
|
||||
{
|
||||
"name": "Dan Wolff",
|
||||
"url": "http://danwolff.se/"
|
||||
},
|
||||
{
|
||||
"name": "Kyle Robinson Young"
|
||||
},
|
||||
{
|
||||
"name": "Vlad Filippov"
|
||||
},
|
||||
{
|
||||
"name": "XhmikosR"
|
||||
},
|
||||
{
|
||||
"name": "Steven Benner"
|
||||
},
|
||||
{
|
||||
"name": "Sindre Sorhus"
|
||||
},
|
||||
{
|
||||
"name": "Michael \"Z\" Goddard"
|
||||
},
|
||||
{
|
||||
"name": "GilbertSun"
|
||||
},
|
||||
{
|
||||
"name": "cbotsikas"
|
||||
},
|
||||
{
|
||||
"name": "Timo Tijhof"
|
||||
},
|
||||
{
|
||||
"name": "Piotr Yordanov"
|
||||
},
|
||||
{
|
||||
"name": "Nick Schonning"
|
||||
},
|
||||
{
|
||||
"name": "MarcelloDiSimone"
|
||||
},
|
||||
{
|
||||
"name": "Manuel Razzari"
|
||||
},
|
||||
{
|
||||
"name": "Joshua Appelman"
|
||||
},
|
||||
{
|
||||
"name": "Jacob Gable"
|
||||
},
|
||||
{
|
||||
"name": "Brady Wetherington"
|
||||
}
|
||||
],
|
||||
"gitHead": "d4d97a6a65404351155c04fff9303f3c20d5c02c",
|
||||
"bugs": {
|
||||
"url": "https://github.com/gruntjs/grunt-contrib-concat/issues"
|
||||
},
|
||||
"homepage": "https://github.com/gruntjs/grunt-contrib-concat",
|
||||
"_id": "grunt-contrib-concat@0.5.1",
|
||||
"_shasum": "953c6efdfdfd2c107ab9c85077f2d4b24d31cd49",
|
||||
"_from": "grunt-contrib-concat@",
|
||||
"_npmVersion": "1.4.28",
|
||||
"_npmUser": {
|
||||
"name": "sindresorhus",
|
||||
"email": "sindresorhus@gmail.com"
|
||||
},
|
||||
"maintainers": [
|
||||
{
|
||||
"name": "cowboy",
|
||||
"email": "cowboy@rj3.net"
|
||||
},
|
||||
{
|
||||
"name": "tkellen",
|
||||
"email": "tyler@sleekcode.net"
|
||||
},
|
||||
{
|
||||
"name": "shama",
|
||||
"email": "kyle@dontkry.com"
|
||||
},
|
||||
{
|
||||
"name": "vladikoff",
|
||||
"email": "vlad@vladikoff.com"
|
||||
},
|
||||
{
|
||||
"name": "sindresorhus",
|
||||
"email": "sindresorhus@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "jmeas",
|
||||
"email": "jellyes2@gmail.com"
|
||||
}
|
||||
],
|
||||
"dist": {
|
||||
"shasum": "953c6efdfdfd2c107ab9c85077f2d4b24d31cd49",
|
||||
"tarball": "http://registry.npmjs.org/grunt-contrib-concat/-/grunt-contrib-concat-0.5.1.tgz"
|
||||
},
|
||||
"directories": {},
|
||||
"_resolved": "https://registry.npmjs.org/grunt-contrib-concat/-/grunt-contrib-concat-0.5.1.tgz"
|
||||
}
|
118
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/tasks/concat.js
generated
vendored
Normal file
118
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/tasks/concat.js
generated
vendored
Normal file
@ -0,0 +1,118 @@
|
||||
/*
|
||||
* grunt-contrib-concat
|
||||
* http://gruntjs.com/
|
||||
*
|
||||
* Copyright (c) 2015 "Cowboy" Ben Alman, contributors
|
||||
* Licensed under the MIT license.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
module.exports = function(grunt) {
|
||||
|
||||
// Internal lib.
|
||||
var comment = require('./lib/comment').init(grunt);
|
||||
var chalk = require('chalk');
|
||||
var sourcemap = require('./lib/sourcemap').init(grunt);
|
||||
|
||||
grunt.registerMultiTask('concat', 'Concatenate files.', function() {
|
||||
// Merge task-specific and/or target-specific options with these defaults.
|
||||
var options = this.options({
|
||||
separator: grunt.util.linefeed,
|
||||
banner: '',
|
||||
footer: '',
|
||||
stripBanners: false,
|
||||
process: false,
|
||||
sourceMap: false,
|
||||
sourceMapName: undefined,
|
||||
sourceMapStyle: 'embed'
|
||||
});
|
||||
|
||||
// Normalize boolean options that accept options objects.
|
||||
if (options.stripBanners === true) { options.stripBanners = {}; }
|
||||
if (options.process === true) { options.process = {}; }
|
||||
|
||||
// Process banner and footer.
|
||||
var banner = grunt.template.process(options.banner);
|
||||
var footer = grunt.template.process(options.footer);
|
||||
|
||||
// Set a local variable for whether to build source maps or not.
|
||||
var sourceMap = options.sourceMap;
|
||||
|
||||
// If content is not embedded and it will be modified, either exit or do
|
||||
// not make the source map.
|
||||
if (
|
||||
sourceMap && options.sourceMapStyle === 'link' &&
|
||||
(options.stripBanners || options.process)
|
||||
) {
|
||||
// Warn and exit if --force isn't set.
|
||||
grunt.warn(
|
||||
'stripBanners or process option is enabled. ' +
|
||||
'Set sourceMapStyle option to \'embed\' or \'inline\'.'
|
||||
);
|
||||
// --force is set, continue on without the source map.
|
||||
grunt.log.warn('Skipping creation of source maps.');
|
||||
// Set sourceMap to false to keep maps from being constructed.
|
||||
sourceMap = false;
|
||||
}
|
||||
|
||||
// Iterate over all src-dest file pairs.
|
||||
this.files.forEach(function(f) {
|
||||
// Initialize source map objects.
|
||||
var sourceMapHelper;
|
||||
if (sourceMap) {
|
||||
sourceMapHelper = sourcemap.helper(f, options);
|
||||
sourceMapHelper.add(banner);
|
||||
}
|
||||
|
||||
// Concat banner + specified files + footer.
|
||||
var src = banner + f.src.filter(function(filepath) {
|
||||
// Warn on and remove invalid source files (if nonull was set).
|
||||
if (!grunt.file.exists(filepath)) {
|
||||
grunt.log.warn('Source file "' + filepath + '" not found.');
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}).map(function(filepath, i) {
|
||||
if (grunt.file.isDir(filepath)) {
|
||||
return;
|
||||
}
|
||||
// Read file source.
|
||||
var src = grunt.file.read(filepath);
|
||||
// Process files as templates if requested.
|
||||
if (typeof options.process === 'function') {
|
||||
src = options.process(src, filepath);
|
||||
} else if (options.process) {
|
||||
src = grunt.template.process(src, options.process);
|
||||
}
|
||||
// Strip banners if requested.
|
||||
if (options.stripBanners) {
|
||||
src = comment.stripBanner(src, options.stripBanners);
|
||||
}
|
||||
// Add the lines of this file to our map.
|
||||
if (sourceMapHelper) {
|
||||
src = sourceMapHelper.addlines(src, filepath);
|
||||
if (i < f.src.length - 1) {
|
||||
sourceMapHelper.add(options.separator);
|
||||
}
|
||||
}
|
||||
return src;
|
||||
}).join(options.separator) + footer;
|
||||
|
||||
if (sourceMapHelper) {
|
||||
sourceMapHelper.add(footer);
|
||||
sourceMapHelper.write();
|
||||
// Add sourceMappingURL to the end.
|
||||
src += sourceMapHelper.url();
|
||||
}
|
||||
|
||||
// Write the destination file.
|
||||
grunt.file.write(f.dest, src);
|
||||
|
||||
// Print a success message.
|
||||
grunt.log.writeln('File ' + chalk.cyan(f.dest) + ' created.');
|
||||
});
|
||||
});
|
||||
|
||||
};
|
34
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/tasks/lib/comment.js
generated
vendored
Normal file
34
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/tasks/lib/comment.js
generated
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
/*
|
||||
* grunt-contrib-concat
|
||||
* http://gruntjs.com/
|
||||
*
|
||||
* Copyright (c) 2013 "Cowboy" Ben Alman, contributors
|
||||
* Licensed under the MIT license.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
exports.init = function(/*grunt*/) {
|
||||
var exports = {};
|
||||
|
||||
// Return the given source code with any leading banner comment stripped.
|
||||
exports.stripBanner = function(src, options) {
|
||||
if (!options) { options = {}; }
|
||||
var m = [];
|
||||
if (options.line) {
|
||||
// Strip // ... leading banners.
|
||||
m.push('(?:.*\\/\\/.*\\r?\\n)+\\s*');
|
||||
}
|
||||
if (options.block) {
|
||||
// Strips all /* ... */ block comment banners.
|
||||
m.push('\\/\\*[\\s\\S]*?\\*\\/');
|
||||
} else {
|
||||
// Strips only /* ... */ block comment banners, excluding /*! ... */.
|
||||
m.push('\\/\\*[^!][\\s\\S]*?\\*\\/');
|
||||
}
|
||||
var re = new RegExp('^\\s*(?:' + m.join('|') + ')\\s*', '');
|
||||
return src.replace(re, '');
|
||||
};
|
||||
|
||||
return exports;
|
||||
};
|
214
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/tasks/lib/sourcemap.js
generated
vendored
Normal file
214
javascript/base/exercices/pipeline/grunt/node_modules/grunt-contrib-concat/tasks/lib/sourcemap.js
generated
vendored
Normal file
@ -0,0 +1,214 @@
|
||||
/*
|
||||
* grunt-contrib-concat
|
||||
* http://gruntjs.com/
|
||||
*
|
||||
* Copyright (c) 2015 "Cowboy" Ben Alman, contributors
|
||||
* Licensed under the MIT license.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
exports.init = function(grunt) {
|
||||
var exports = {};
|
||||
|
||||
// Node first party libs
|
||||
var path = require('path');
|
||||
|
||||
// Third party libs
|
||||
var chalk = require('chalk');
|
||||
var SourceMapConsumer = require('source-map').SourceMapConsumer;
|
||||
var SourceMapGenerator = require('source-map').SourceMapGenerator;
|
||||
var SourceNode = require('source-map').SourceNode;
|
||||
|
||||
// Return an object that is used to track sourcemap data between calls.
|
||||
exports.helper = function(files, options) {
|
||||
// Figure out the source map destination.
|
||||
var dest = files.dest;
|
||||
if (options.sourceMapStyle === 'inline') {
|
||||
// Leave dest as is. It will be used to compute relative sources.
|
||||
} else if (typeof options.sourceMapName === 'string') {
|
||||
dest = options.sourceMapName;
|
||||
} else if (typeof options.sourceMapName === 'function') {
|
||||
dest = options.sourceMapName(dest);
|
||||
} else {
|
||||
dest = dest + '.map';
|
||||
}
|
||||
|
||||
// Inline style and sourceMapName together doesn't work
|
||||
if (options.sourceMapStyle === 'inline' && options.sourceMapName) {
|
||||
grunt.log.warn(
|
||||
'Source map will be inlined, sourceMapName option ignored.'
|
||||
);
|
||||
}
|
||||
|
||||
return new SourceMapConcatHelper({
|
||||
files: files,
|
||||
dest: dest,
|
||||
options: options
|
||||
});
|
||||
};
|
||||
|
||||
function SourceMapConcatHelper(options) {
|
||||
this.files = options.files;
|
||||
this.dest = options.dest;
|
||||
this.options = options.options;
|
||||
|
||||
// Create the source map node we'll add concat files into.
|
||||
this.node = new SourceNode();
|
||||
|
||||
// Create an array to store source maps that are referenced from files
|
||||
// being concatenated.
|
||||
this.maps = [];
|
||||
}
|
||||
|
||||
// Construct a node split by a zero-length regex.
|
||||
SourceMapConcatHelper.prototype._dummyNode = function(src, name) {
|
||||
var node = new SourceNode();
|
||||
var lineIndex = 1;
|
||||
var charIndex = 0;
|
||||
// Tokenize on words, new lines, and white space.
|
||||
var tokens = src.split(/(\n|[^\S\n]+|\b)/g);
|
||||
// Filter out empty strings.
|
||||
tokens = tokens.filter(function(t) { return !!t; });
|
||||
|
||||
tokens.forEach(function(token) {
|
||||
node.add(new SourceNode(lineIndex, charIndex, name, token));
|
||||
if (token === '\n') {
|
||||
lineIndex++;
|
||||
charIndex = 0;
|
||||
} else {
|
||||
charIndex += token.length;
|
||||
}
|
||||
});
|
||||
|
||||
return node;
|
||||
};
|
||||
|
||||
// Add some arbitraty text to the sourcemap.
|
||||
SourceMapConcatHelper.prototype.add = function(src) {
|
||||
// Use the dummy node to track new lines and character offset in the unnamed
|
||||
// concat pieces (banner, footer, separator).
|
||||
this.node.add(this._dummyNode(src));
|
||||
};
|
||||
|
||||
// Add the lines of a given file to the sourcemap. If in the file, store a
|
||||
// prior sourcemap and return src with sourceMappingURL removed.
|
||||
SourceMapConcatHelper.prototype.addlines = function(src, filename) {
|
||||
var relativeFilename = path.relative(path.dirname(this.dest), filename);
|
||||
// sourceMap path references are URLs, so ensure forward slashes are used for paths passed to sourcemap library
|
||||
relativeFilename = relativeFilename.replace(/\\/g, '/');
|
||||
var node;
|
||||
if (
|
||||
/\/\/[@#]\s+sourceMappingURL=(.+)/.test(src) ||
|
||||
/\/\*#\s+sourceMappingURL=(\S+)\s+\*\//.test(src)
|
||||
) {
|
||||
var sourceMapFile = RegExp.$1;
|
||||
var sourceMapPath;
|
||||
|
||||
var sourceContent;
|
||||
// Browserify, as an example, stores a datauri at sourceMappingURL.
|
||||
if (/data:application\/json;base64,([^\s]+)/.test(sourceMapFile)) {
|
||||
// Set sourceMapPath to the file that the map is inlined.
|
||||
sourceMapPath = filename;
|
||||
sourceContent = new Buffer(RegExp.$1, 'base64').toString();
|
||||
} else {
|
||||
// If sourceMapPath is relative, expand relative to the file
|
||||
// refering to it.
|
||||
sourceMapPath = path.resolve(path.dirname(filename), sourceMapFile);
|
||||
sourceContent = grunt.file.read(sourceMapPath);
|
||||
}
|
||||
var sourceMap = JSON.parse(sourceContent);
|
||||
var sourceMapConsumer = new SourceMapConsumer(sourceMap);
|
||||
// Consider the relative path from source files to new sourcemap.
|
||||
var sourcePathToSourceMapPath =
|
||||
path.relative(path.dirname(this.dest), path.dirname(sourceMapPath));
|
||||
// sourceMap path references are URLs, so ensure forward slashes are used for paths passed to sourcemap library
|
||||
sourcePathToSourceMapPath = sourcePathToSourceMapPath.replace(/\\/g, '/');
|
||||
// Store the sourceMap so that it may later be consumed.
|
||||
this.maps.push([
|
||||
sourceMapConsumer, relativeFilename, sourcePathToSourceMapPath
|
||||
]);
|
||||
// Remove the old sourceMappingURL.
|
||||
src = src.replace(/[@#]\s+sourceMappingURL=[^\s]+/, '');
|
||||
// Create a node from the source map for the file.
|
||||
node = SourceNode.fromStringWithSourceMap(
|
||||
src, sourceMapConsumer, sourcePathToSourceMapPath
|
||||
);
|
||||
} else {
|
||||
// Use a dummy node. Performs a rudimentary tokenization of the source.
|
||||
node = this._dummyNode(src, relativeFilename);
|
||||
}
|
||||
|
||||
this.node.add(node);
|
||||
|
||||
if (this.options.sourceMapStyle !== 'link') {
|
||||
this.node.setSourceContent(relativeFilename, src);
|
||||
}
|
||||
|
||||
return src;
|
||||
};
|
||||
|
||||
// Return the comment sourceMappingURL that must be appended to the
|
||||
// concatenated file.
|
||||
SourceMapConcatHelper.prototype.url = function() {
|
||||
// Create the map filepath. Either datauri or destination path.
|
||||
var mapfilepath;
|
||||
if (this.options.sourceMapStyle === 'inline') {
|
||||
var inlineMap = new Buffer(this._write()).toString('base64');
|
||||
mapfilepath = 'data:application/json;base64,' + inlineMap;
|
||||
} else {
|
||||
// Compute relative path to source map destination.
|
||||
mapfilepath = path.relative(path.dirname(this.files.dest), this.dest);
|
||||
}
|
||||
// Create the sourceMappingURL.
|
||||
var url;
|
||||
if (/\.css$/.test(this.files.dest)) {
|
||||
url = '\n/*# sourceMappingURL=' + mapfilepath + ' */';
|
||||
} else {
|
||||
url = '\n//# sourceMappingURL=' + mapfilepath;
|
||||
}
|
||||
|
||||
return url;
|
||||
};
|
||||
|
||||
// Return a string for inline use or write the source map to disk.
|
||||
SourceMapConcatHelper.prototype._write = function() {
|
||||
// ensure we're using forward slashes, because these are URLs
|
||||
var file = path.relative(path.dirname(this.dest), this.files.dest);
|
||||
file = file.replace(/\\/g, '/');
|
||||
var code_map = this.node.toStringWithSourceMap({
|
||||
file: file
|
||||
});
|
||||
// Consume the new sourcemap.
|
||||
var generator = SourceMapGenerator.fromSourceMap(
|
||||
new SourceMapConsumer(code_map.map.toJSON())
|
||||
);
|
||||
// Consume sourcemaps for source files.
|
||||
this.maps.forEach(Function.apply.bind(generator.applySourceMap, generator));
|
||||
// New sourcemap.
|
||||
var newSourceMap = generator.toJSON();
|
||||
// Return a string for inline use or write the map.
|
||||
if (this.options.sourceMapStyle === 'inline') {
|
||||
grunt.log.writeln(
|
||||
'Source map for ' + chalk.cyan(this.files.dest) + ' inlined.'
|
||||
);
|
||||
return JSON.stringify(newSourceMap, null, '');
|
||||
} else {
|
||||
grunt.file.write(
|
||||
this.dest,
|
||||
JSON.stringify(newSourceMap, null, '')
|
||||
);
|
||||
grunt.log.writeln('Source map ' + chalk.cyan(this.dest) + ' created.');
|
||||
}
|
||||
};
|
||||
|
||||
// Non-private function to write the sourcemap. Shortcuts if writing a inline
|
||||
// style map.
|
||||
SourceMapConcatHelper.prototype.write = function() {
|
||||
if (this.options.sourceMapStyle !== 'inline') {
|
||||
this._write();
|
||||
}
|
||||
};
|
||||
|
||||
return exports;
|
||||
};
|
Reference in New Issue
Block a user