all da files
This commit is contained in:
1
node_modules/browserify/node_modules/.bin/browser-pack
generated
vendored
Symbolic link
1
node_modules/browserify/node_modules/.bin/browser-pack
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../browser-pack/cmd.js
|
1
node_modules/browserify/node_modules/.bin/insert-module-globals
generated
vendored
Symbolic link
1
node_modules/browserify/node_modules/.bin/insert-module-globals
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../insert-module-globals/bin/cmd.js
|
1
node_modules/browserify/node_modules/.bin/module-deps
generated
vendored
Symbolic link
1
node_modules/browserify/node_modules/.bin/module-deps
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../module-deps/cmd.js
|
2
node_modules/browserify/node_modules/JSONStream/.npmignore
generated
vendored
Normal file
2
node_modules/browserify/node_modules/JSONStream/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules/*
|
||||
node_modules
|
4
node_modules/browserify/node_modules/JSONStream/.travis.yml
generated
vendored
Normal file
4
node_modules/browserify/node_modules/JSONStream/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 0.8
|
||||
|
15
node_modules/browserify/node_modules/JSONStream/LICENSE.APACHE2
generated
vendored
Normal file
15
node_modules/browserify/node_modules/JSONStream/LICENSE.APACHE2
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
Apache License, Version 2.0
|
||||
|
||||
Copyright (c) 2011 Dominic Tarr
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
24
node_modules/browserify/node_modules/JSONStream/LICENSE.MIT
generated
vendored
Normal file
24
node_modules/browserify/node_modules/JSONStream/LICENSE.MIT
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2011 Dominic Tarr
|
||||
|
||||
Permission is hereby granted, free of charge,
|
||||
to any person obtaining a copy of this software and
|
||||
associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify,
|
||||
merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom
|
||||
the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
|
||||
ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
13
node_modules/browserify/node_modules/JSONStream/examples/all_docs.js
generated
vendored
Normal file
13
node_modules/browserify/node_modules/JSONStream/examples/all_docs.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
var request = require('request')
|
||||
, JSONStream = require('JSONStream')
|
||||
, es = require('event-stream')
|
||||
|
||||
var parser = JSONStream.parse(['rows', true]) //emit parts that match this path (any element of the rows array)
|
||||
, req = request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
|
||||
, logger = es.mapSync(function (data) { //create a stream that logs to stderr,
|
||||
console.error(data)
|
||||
return data
|
||||
})
|
||||
|
||||
req.pipe(parser)
|
||||
parser.pipe(logger)
|
191
node_modules/browserify/node_modules/JSONStream/index.js
generated
vendored
Normal file
191
node_modules/browserify/node_modules/JSONStream/index.js
generated
vendored
Normal file
@@ -0,0 +1,191 @@
|
||||
var Parser = require('jsonparse')
|
||||
, Stream = require('stream').Stream
|
||||
|
||||
/*
|
||||
|
||||
the value of this.stack that creationix's jsonparse has is weird.
|
||||
|
||||
it makes this code ugly, but his problem is way harder that mine,
|
||||
so i'll forgive him.
|
||||
|
||||
*/
|
||||
|
||||
exports.parse = function (path) {
|
||||
|
||||
var stream = new Stream()
|
||||
var parser = new Parser()
|
||||
var count = 0
|
||||
if(!path || !path.length)
|
||||
path = null
|
||||
parser.onValue = function () {
|
||||
if(!this.root && this.stack.length == 1){
|
||||
stream.root = this.value
|
||||
}
|
||||
if(!path || this.stack.length !== path.length)
|
||||
return
|
||||
var _path = []
|
||||
for( var i = 0; i < (path.length - 1); i++) {
|
||||
var key = path[i]
|
||||
var c = this.stack[1 + (+i)]
|
||||
|
||||
if(!c) {
|
||||
return
|
||||
}
|
||||
var m = check(key, c.key)
|
||||
_path.push(c.key)
|
||||
|
||||
if(!m)
|
||||
return
|
||||
|
||||
}
|
||||
var c = this
|
||||
|
||||
var key = path[path.length - 1]
|
||||
var m = check(key, c.key)
|
||||
if(!m)
|
||||
return
|
||||
_path.push(c.key)
|
||||
|
||||
count ++
|
||||
stream.emit('data', this.value[this.key])
|
||||
for(var i in this.stack)
|
||||
this.stack[i].value = {}
|
||||
}
|
||||
|
||||
parser._onToken = parser.onToken;
|
||||
|
||||
parser.onToken = function (token, value) {
|
||||
parser._onToken(token, value);
|
||||
if (this.stack.length === 0) {
|
||||
if (stream.root) {
|
||||
if(!path)
|
||||
stream.emit('data', stream.root)
|
||||
stream.emit('root', stream.root, count)
|
||||
count = 0;
|
||||
stream.root = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
parser.onError = function (err) {
|
||||
stream.emit('error', err)
|
||||
}
|
||||
stream.readable = true
|
||||
stream.writable = true
|
||||
stream.write = function (chunk) {
|
||||
if('string' === typeof chunk) {
|
||||
if ('undefined' === typeof Buffer) {
|
||||
var buf = new Array(chunk.length)
|
||||
for (var i = 0; i < chunk.length; i++) buf[i] = chunk.charCodeAt(i)
|
||||
chunk = new Int32Array(buf)
|
||||
} else {
|
||||
chunk = new Buffer(chunk)
|
||||
}
|
||||
}
|
||||
parser.write(chunk)
|
||||
}
|
||||
stream.end = function (data) {
|
||||
if(data)
|
||||
stream.write(data)
|
||||
stream.emit('end')
|
||||
}
|
||||
|
||||
stream.destroy = function () {
|
||||
stream.emit('close');
|
||||
}
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
function check (x, y) {
|
||||
if ('string' === typeof x)
|
||||
return y == x
|
||||
else if (x && 'function' === typeof x.exec)
|
||||
return x.exec(y)
|
||||
else if ('boolean' === typeof x)
|
||||
return x
|
||||
else if ('function' === typeof x)
|
||||
return x(y)
|
||||
return false
|
||||
}
|
||||
|
||||
exports.stringify = function (op, sep, cl) {
|
||||
if (op === false){
|
||||
op = ''
|
||||
sep = '\n'
|
||||
cl = ''
|
||||
} else if (op == null) {
|
||||
|
||||
op = '[\n'
|
||||
sep = '\n,\n'
|
||||
cl = '\n]\n'
|
||||
|
||||
}
|
||||
|
||||
//else, what ever you like
|
||||
|
||||
var stream = new Stream ()
|
||||
, first = true
|
||||
, ended = false
|
||||
, anyData = false
|
||||
stream.write = function (data) {
|
||||
anyData = true
|
||||
var json = JSON.stringify(data)
|
||||
if(first) { first = false ; stream.emit('data', op + json)}
|
||||
else stream.emit('data', sep + json)
|
||||
}
|
||||
stream.end = function (data) {
|
||||
if(ended)
|
||||
return
|
||||
ended = true
|
||||
if(data) stream.write(data)
|
||||
if(!anyData) stream.emit('data', op)
|
||||
stream.emit('data', cl)
|
||||
|
||||
stream.emit('end')
|
||||
}
|
||||
stream.writable = true
|
||||
stream.readable = true
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
exports.stringifyObject = function (op, sep, cl) {
|
||||
if (op === false){
|
||||
op = ''
|
||||
sep = '\n'
|
||||
cl = ''
|
||||
} else if (op == null) {
|
||||
|
||||
op = '{\n'
|
||||
sep = '\n,\n'
|
||||
cl = '\n}\n'
|
||||
|
||||
}
|
||||
|
||||
//else, what ever you like
|
||||
|
||||
var stream = new Stream ()
|
||||
, first = true
|
||||
, ended = false
|
||||
, anyData = false
|
||||
stream.write = function (data) {
|
||||
anyData = true
|
||||
var json = JSON.stringify(data[0]) + ':' + JSON.stringify(data[1])
|
||||
if(first) { first = false ; stream.emit('data', op + json)}
|
||||
else stream.emit('data', sep + json)
|
||||
}
|
||||
stream.end = function (data) {
|
||||
if(ended) return
|
||||
ended = true
|
||||
if(data) stream.write(data)
|
||||
if(!anyData) stream.emit('data', op)
|
||||
stream.emit('data', cl)
|
||||
|
||||
stream.emit('end')
|
||||
}
|
||||
stream.writable = true
|
||||
stream.readable = true
|
||||
|
||||
return stream
|
||||
}
|
24
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/LICENSE
generated
vendored
Normal file
24
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2012 Tim Caswell
|
||||
|
||||
Permission is hereby granted, free of charge,
|
||||
to any person obtaining a copy of this software and
|
||||
associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify,
|
||||
merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom
|
||||
the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
|
||||
ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
11
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/README.markdown
generated
vendored
Normal file
11
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/README.markdown
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
This is a streaming JSON parser. For a simpler, sax-based version see this gist: https://gist.github.com/1821394
|
||||
|
||||
The MIT License (MIT)
|
||||
Copyright (c) 2011-2012 Tim Caswell
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
26
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/bench.js
generated
vendored
Normal file
26
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/bench.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
var fs = require('fs'),
|
||||
Parser = require('./jsonparse');
|
||||
|
||||
|
||||
var json = fs.readFileSync("samplejson/basic.json");
|
||||
|
||||
|
||||
while (true) {
|
||||
var start = Date.now();
|
||||
for (var i = 0; i < 1000; i++) {
|
||||
JSON.parse(json);
|
||||
}
|
||||
var first = Date.now() - start;
|
||||
|
||||
start = Date.now();
|
||||
var p = new Parser();
|
||||
for (var i = 0; i < 1000; i++) {
|
||||
p.write(json);
|
||||
}
|
||||
var second = Date.now() - start;
|
||||
|
||||
|
||||
console.log("JSON.parse took %s", first);
|
||||
console.log("streaming parser took %s", second);
|
||||
console.log("streaming is %s times slower", second / first);
|
||||
}
|
30
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/examples/twitterfeed.js
generated
vendored
Normal file
30
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/examples/twitterfeed.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
var Parser = require('../jsonparse');
|
||||
var Http = require('http');
|
||||
require('./colors');
|
||||
var p = new Parser();
|
||||
var cred = require('./credentials');
|
||||
var client = Http.createClient(80, "stream.twitter.com");
|
||||
var request = client.request("GET", "/1/statuses/sample.json", {
|
||||
"Host": "stream.twitter.com",
|
||||
"Authorization": (new Buffer(cred.username + ":" + cred.password)).toString("base64")
|
||||
});
|
||||
request.on('response', function (response) {
|
||||
console.log(response.statusCode);
|
||||
console.dir(response.headers);
|
||||
response.on('data', function (chunk) {
|
||||
p.write(chunk);
|
||||
});
|
||||
response.on('end', function () {
|
||||
console.log("END");
|
||||
});
|
||||
});
|
||||
request.end();
|
||||
var text = "", name = "";
|
||||
p.onValue = function (value) {
|
||||
if (this.stack.length === 1 && this.key === 'text') { text = value; }
|
||||
if (this.stack.length === 2 && this.key === 'name' && this.stack[1].key === 'user') { name = value; }
|
||||
if (this.stack.length === 0) {
|
||||
console.log(text.blue + " - " + name.yellow);
|
||||
text = name = "";
|
||||
}
|
||||
};
|
401
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/jsonparse.js
generated
vendored
Normal file
401
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/jsonparse.js
generated
vendored
Normal file
@@ -0,0 +1,401 @@
|
||||
/*global Buffer*/
|
||||
// Named constants with unique integer values
|
||||
var C = {};
|
||||
// Tokens
|
||||
var LEFT_BRACE = C.LEFT_BRACE = 0x1;
|
||||
var RIGHT_BRACE = C.RIGHT_BRACE = 0x2;
|
||||
var LEFT_BRACKET = C.LEFT_BRACKET = 0x3;
|
||||
var RIGHT_BRACKET = C.RIGHT_BRACKET = 0x4;
|
||||
var COLON = C.COLON = 0x5;
|
||||
var COMMA = C.COMMA = 0x6;
|
||||
var TRUE = C.TRUE = 0x7;
|
||||
var FALSE = C.FALSE = 0x8;
|
||||
var NULL = C.NULL = 0x9;
|
||||
var STRING = C.STRING = 0xa;
|
||||
var NUMBER = C.NUMBER = 0xb;
|
||||
// Tokenizer States
|
||||
var START = C.START = 0x11;
|
||||
var TRUE1 = C.TRUE1 = 0x21;
|
||||
var TRUE2 = C.TRUE2 = 0x22;
|
||||
var TRUE3 = C.TRUE3 = 0x23;
|
||||
var FALSE1 = C.FALSE1 = 0x31;
|
||||
var FALSE2 = C.FALSE2 = 0x32;
|
||||
var FALSE3 = C.FALSE3 = 0x33;
|
||||
var FALSE4 = C.FALSE4 = 0x34;
|
||||
var NULL1 = C.NULL1 = 0x41;
|
||||
var NULL2 = C.NULL3 = 0x42;
|
||||
var NULL3 = C.NULL2 = 0x43;
|
||||
var NUMBER1 = C.NUMBER1 = 0x51;
|
||||
var NUMBER2 = C.NUMBER2 = 0x52;
|
||||
var NUMBER3 = C.NUMBER3 = 0x53;
|
||||
var NUMBER4 = C.NUMBER4 = 0x54;
|
||||
var NUMBER5 = C.NUMBER5 = 0x55;
|
||||
var NUMBER6 = C.NUMBER6 = 0x56;
|
||||
var NUMBER7 = C.NUMBER7 = 0x57;
|
||||
var NUMBER8 = C.NUMBER8 = 0x58;
|
||||
var STRING1 = C.STRING1 = 0x61;
|
||||
var STRING2 = C.STRING2 = 0x62;
|
||||
var STRING3 = C.STRING3 = 0x63;
|
||||
var STRING4 = C.STRING4 = 0x64;
|
||||
var STRING5 = C.STRING5 = 0x65;
|
||||
var STRING6 = C.STRING6 = 0x66;
|
||||
// Parser States
|
||||
var VALUE = C.VALUE = 0x71;
|
||||
var KEY = C.KEY = 0x72;
|
||||
// Parser Modes
|
||||
var OBJECT = C.OBJECT = 0x81;
|
||||
var ARRAY = C.ARRAY = 0x82;
|
||||
|
||||
// Slow code to string converter (only used when throwing syntax errors)
|
||||
function toknam(code) {
|
||||
var keys = Object.keys(C);
|
||||
for (var i = 0, l = keys.length; i < l; i++) {
|
||||
var key = keys[i];
|
||||
if (C[key] === code) { return key; }
|
||||
}
|
||||
return code && ("0x" + code.toString(16));
|
||||
}
|
||||
|
||||
|
||||
function Parser() {
|
||||
this.tState = START;
|
||||
this.value = undefined;
|
||||
|
||||
this.string = undefined; // string data
|
||||
this.unicode = undefined; // unicode escapes
|
||||
|
||||
// For number parsing
|
||||
this.negative = undefined;
|
||||
this.magnatude = undefined;
|
||||
this.position = undefined;
|
||||
this.exponent = undefined;
|
||||
this.negativeExponent = undefined;
|
||||
|
||||
this.key = undefined;
|
||||
this.mode = undefined;
|
||||
this.stack = [];
|
||||
this.state = VALUE;
|
||||
this.bytes_remaining = 0; // number of bytes remaining in multi byte utf8 char to read after split boundary
|
||||
this.bytes_in_sequence = 0; // bytes in multi byte utf8 char to read
|
||||
this.temp_buffs = { "2": new Buffer(2), "3": new Buffer(3), "4": new Buffer(4) }; // for rebuilding chars split before boundary is reached
|
||||
}
|
||||
var proto = Parser.prototype;
|
||||
proto.charError = function (buffer, i) {
|
||||
this.onError(new Error("Unexpected " + JSON.stringify(String.fromCharCode(buffer[i])) + " at position " + i + " in state " + toknam(this.tState)));
|
||||
};
|
||||
proto.onError = function (err) { throw err; };
|
||||
proto.write = function (buffer) {
|
||||
if (typeof buffer === "string") buffer = new Buffer(buffer);
|
||||
//process.stdout.write("Input: ");
|
||||
//console.dir(buffer.toString());
|
||||
var n;
|
||||
for (var i = 0, l = buffer.length; i < l; i++) {
|
||||
if (this.tState === START){
|
||||
n = buffer[i];
|
||||
if(n === 0x7b){ this.onToken(LEFT_BRACE, "{"); // {
|
||||
}else if(n === 0x7d){ this.onToken(RIGHT_BRACE, "}"); // }
|
||||
}else if(n === 0x5b){ this.onToken(LEFT_BRACKET, "["); // [
|
||||
}else if(n === 0x5d){ this.onToken(RIGHT_BRACKET, "]"); // ]
|
||||
}else if(n === 0x3a){ this.onToken(COLON, ":"); // :
|
||||
}else if(n === 0x2c){ this.onToken(COMMA, ","); // ,
|
||||
}else if(n === 0x74){ this.tState = TRUE1; // t
|
||||
}else if(n === 0x66){ this.tState = FALSE1; // f
|
||||
}else if(n === 0x6e){ this.tState = NULL1; // n
|
||||
}else if(n === 0x22){ this.string = ""; this.tState = STRING1; // "
|
||||
}else if(n === 0x2d){ this.negative = true; this.tState = NUMBER1; // -
|
||||
}else if(n === 0x30){ this.magnatude = 0; this.tState = NUMBER2; // 0
|
||||
}else{
|
||||
if (n > 0x30 && n < 0x40) { // 1-9
|
||||
this.magnatude = n - 0x30; this.tState = NUMBER3;
|
||||
} else if (n === 0x20 || n === 0x09 || n === 0x0a || n === 0x0d) {
|
||||
// whitespace
|
||||
} else { this.charError(buffer, i); }
|
||||
}
|
||||
}else if (this.tState === STRING1){ // After open quote
|
||||
n = buffer[i]; // get current byte from buffer
|
||||
// check for carry over of a multi byte char split between data chunks
|
||||
// & fill temp buffer it with start of this data chunk up to the boundary limit set in the last iteration
|
||||
if (this.bytes_remaining > 0) {
|
||||
for (var j = 0; j < this.bytes_remaining; j++) {
|
||||
this.temp_buffs[this.bytes_in_sequence][this.bytes_in_sequence - this.bytes_remaining + j] = buffer[j];
|
||||
}
|
||||
this.string += this.temp_buffs[this.bytes_in_sequence].toString();
|
||||
this.bytes_in_sequence = this.bytes_remaining = 0;
|
||||
i = i + j - 1;
|
||||
} else if (this.bytes_remaining === 0 && n >= 128) { // else if no remainder bytes carried over, parse multi byte (>=128) chars one at a time
|
||||
if ((n >= 194) && (n <= 223)) this.bytes_in_sequence = 2;
|
||||
if ((n >= 224) && (n <= 239)) this.bytes_in_sequence = 3;
|
||||
if ((n >= 240) && (n <= 244)) this.bytes_in_sequence = 4;
|
||||
if ((this.bytes_in_sequence + i) > buffer.length) { // if bytes needed to complete char fall outside buffer length, we have a boundary split
|
||||
for (var k = 0; k <= (buffer.length - 1 - i); k++) {
|
||||
this.temp_buffs[this.bytes_in_sequence][k] = buffer[i + k]; // fill temp buffer of correct size with bytes available in this chunk
|
||||
}
|
||||
this.bytes_remaining = (i + this.bytes_in_sequence) - buffer.length;
|
||||
i = buffer.length - 1;
|
||||
} else {
|
||||
this.string += buffer.slice(i, (i + this.bytes_in_sequence)).toString();
|
||||
i = i + this.bytes_in_sequence - 1;
|
||||
}
|
||||
} else if (n === 0x22) { this.tState = START; this.onToken(STRING, this.string); this.string = undefined; }
|
||||
else if (n === 0x5c) { this.tState = STRING2; }
|
||||
else if (n >= 0x20) { this.string += String.fromCharCode(n); }
|
||||
else { this.charError(buffer, i); }
|
||||
}else if (this.tState === STRING2){ // After backslash
|
||||
n = buffer[i];
|
||||
if(n === 0x22){ this.string += "\""; this.tState = STRING1;
|
||||
}else if(n === 0x5c){ this.string += "\\"; this.tState = STRING1;
|
||||
}else if(n === 0x2f){ this.string += "\/"; this.tState = STRING1;
|
||||
}else if(n === 0x62){ this.string += "\b"; this.tState = STRING1;
|
||||
}else if(n === 0x66){ this.string += "\f"; this.tState = STRING1;
|
||||
}else if(n === 0x6e){ this.string += "\n"; this.tState = STRING1;
|
||||
}else if(n === 0x72){ this.string += "\r"; this.tState = STRING1;
|
||||
}else if(n === 0x74){ this.string += "\t"; this.tState = STRING1;
|
||||
}else if(n === 0x75){ this.unicode = ""; this.tState = STRING3;
|
||||
}else{
|
||||
this.charError(buffer, i);
|
||||
}
|
||||
}else if (this.tState === STRING3 || this.tState === STRING4 || this.tState === STRING5 || this.tState === STRING6){ // unicode hex codes
|
||||
n = buffer[i];
|
||||
// 0-9 A-F a-f
|
||||
if ((n >= 0x30 && n < 0x40) || (n > 0x40 && n <= 0x46) || (n > 0x60 && n <= 0x66)) {
|
||||
this.unicode += String.fromCharCode(n);
|
||||
if (this.tState++ === STRING6) {
|
||||
this.string += String.fromCharCode(parseInt(this.unicode, 16));
|
||||
this.unicode = undefined;
|
||||
this.tState = STRING1;
|
||||
}
|
||||
} else {
|
||||
this.charError(buffer, i);
|
||||
}
|
||||
}else if (this.tState === NUMBER1){ // after minus
|
||||
n = buffer[i];
|
||||
if (n === 0x30) { this.magnatude = 0; this.tState = NUMBER2; }
|
||||
else if (n > 0x30 && n < 0x40) { this.magnatude = n - 0x30; this.tState = NUMBER3; }
|
||||
else { this.charError(buffer, i); }
|
||||
}else if (this.tState === NUMBER2){ // * After initial zero
|
||||
n = buffer[i];
|
||||
if(n === 0x2e){ // .
|
||||
this.position = 0.1; this.tState = NUMBER4;
|
||||
}else if(n === 0x65 || n === 0x45){ // e/E
|
||||
this.exponent = 0; this.tState = NUMBER6;
|
||||
}else{
|
||||
this.tState = START;
|
||||
this.onToken(NUMBER, 0);
|
||||
this.magnatude = undefined;
|
||||
this.negative = undefined;
|
||||
i--;
|
||||
}
|
||||
}else if (this.tState === NUMBER3){ // * After digit (before period)
|
||||
n = buffer[i];
|
||||
if(n === 0x2e){ // .
|
||||
this.position = 0.1; this.tState = NUMBER4;
|
||||
}else if(n === 0x65 || n === 0x45){ // e/E
|
||||
this.exponent = 0; this.tState = NUMBER6;
|
||||
}else{
|
||||
if (n >= 0x30 && n < 0x40) { this.magnatude = this.magnatude * 10 + n - 0x30; }
|
||||
else {
|
||||
this.tState = START;
|
||||
if (this.negative) {
|
||||
this.magnatude = -this.magnatude;
|
||||
this.negative = undefined;
|
||||
}
|
||||
this.onToken(NUMBER, this.magnatude);
|
||||
this.magnatude = undefined;
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}else if (this.tState === NUMBER4){ // After period
|
||||
n = buffer[i];
|
||||
if (n >= 0x30 && n < 0x40) { // 0-9
|
||||
this.magnatude += this.position * (n - 0x30);
|
||||
this.position /= 10;
|
||||
this.tState = NUMBER5;
|
||||
} else { this.charError(buffer, i); }
|
||||
}else if (this.tState === NUMBER5){ // * After digit (after period)
|
||||
n = buffer[i];
|
||||
if (n >= 0x30 && n < 0x40) { // 0-9
|
||||
this.magnatude += this.position * (n - 0x30);
|
||||
this.position /= 10;
|
||||
}
|
||||
else if (n === 0x65 || n === 0x45) { this.exponent = 0; this.tState = NUMBER6; } // E/e
|
||||
else {
|
||||
this.tState = START;
|
||||
if (this.negative) {
|
||||
this.magnatude = -this.magnatude;
|
||||
this.negative = undefined;
|
||||
}
|
||||
this.onToken(NUMBER, this.negative ? -this.magnatude : this.magnatude);
|
||||
this.magnatude = undefined;
|
||||
this.position = undefined;
|
||||
i--;
|
||||
}
|
||||
}else if (this.tState === NUMBER6){ // After E
|
||||
n = buffer[i];
|
||||
if (n === 0x2b || n === 0x2d) { // +/-
|
||||
if (n === 0x2d) { this.negativeExponent = true; }
|
||||
this.tState = NUMBER7;
|
||||
}
|
||||
else if (n >= 0x30 && n < 0x40) {
|
||||
this.exponent = this.exponent * 10 + (n - 0x30);
|
||||
this.tState = NUMBER8;
|
||||
}
|
||||
else { this.charError(buffer, i); }
|
||||
}else if (this.tState === NUMBER7){ // After +/-
|
||||
n = buffer[i];
|
||||
if (n >= 0x30 && n < 0x40) { // 0-9
|
||||
this.exponent = this.exponent * 10 + (n - 0x30);
|
||||
this.tState = NUMBER8;
|
||||
}
|
||||
else { this.charError(buffer, i); }
|
||||
}else if (this.tState === NUMBER8){ // * After digit (after +/-)
|
||||
n = buffer[i];
|
||||
if (n >= 0x30 && n < 0x40) { // 0-9
|
||||
this.exponent = this.exponent * 10 + (n - 0x30);
|
||||
}
|
||||
else {
|
||||
if (this.negativeExponent) {
|
||||
this.exponent = -this.exponent;
|
||||
this.negativeExponent = undefined;
|
||||
}
|
||||
this.magnatude *= Math.pow(10, this.exponent);
|
||||
this.exponent = undefined;
|
||||
if (this.negative) {
|
||||
this.magnatude = -this.magnatude;
|
||||
this.negative = undefined;
|
||||
}
|
||||
this.tState = START;
|
||||
this.onToken(NUMBER, this.magnatude);
|
||||
this.magnatude = undefined;
|
||||
i--;
|
||||
}
|
||||
}else if (this.tState === TRUE1){ // r
|
||||
if (buffer[i] === 0x72) { this.tState = TRUE2; }
|
||||
else { this.charError(buffer, i); }
|
||||
}else if (this.tState === TRUE2){ // u
|
||||
if (buffer[i] === 0x75) { this.tState = TRUE3; }
|
||||
else { this.charError(buffer, i); }
|
||||
}else if (this.tState === TRUE3){ // e
|
||||
if (buffer[i] === 0x65) { this.tState = START; this.onToken(TRUE, true); }
|
||||
else { this.charError(buffer, i); }
|
||||
}else if (this.tState === FALSE1){ // a
|
||||
if (buffer[i] === 0x61) { this.tState = FALSE2; }
|
||||
else { this.charError(buffer, i); }
|
||||
}else if (this.tState === FALSE2){ // l
|
||||
if (buffer[i] === 0x6c) { this.tState = FALSE3; }
|
||||
else { this.charError(buffer, i); }
|
||||
}else if (this.tState === FALSE3){ // s
|
||||
if (buffer[i] === 0x73) { this.tState = FALSE4; }
|
||||
else { this.charError(buffer, i); }
|
||||
}else if (this.tState === FALSE4){ // e
|
||||
if (buffer[i] === 0x65) { this.tState = START; this.onToken(FALSE, false); }
|
||||
else { this.charError(buffer, i); }
|
||||
}else if (this.tState === NULL1){ // u
|
||||
if (buffer[i] === 0x75) { this.tState = NULL2; }
|
||||
else { this.charError(buffer, i); }
|
||||
}else if (this.tState === NULL2){ // l
|
||||
if (buffer[i] === 0x6c) { this.tState = NULL3; }
|
||||
else { this.charError(buffer, i); }
|
||||
}else if (this.tState === NULL3){ // l
|
||||
if (buffer[i] === 0x6c) { this.tState = START; this.onToken(NULL, null); }
|
||||
else { this.charError(buffer, i); }
|
||||
}
|
||||
}
|
||||
};
|
||||
proto.onToken = function (token, value) {
|
||||
// Override this to get events
|
||||
};
|
||||
|
||||
proto.parseError = function (token, value) {
|
||||
this.onError(new Error("Unexpected " + toknam(token) + (value ? ("(" + JSON.stringify(value) + ")") : "") + " in state " + toknam(this.state)));
|
||||
};
|
||||
proto.onError = function (err) { throw err; };
|
||||
proto.push = function () {
|
||||
this.stack.push({value: this.value, key: this.key, mode: this.mode});
|
||||
};
|
||||
proto.pop = function () {
|
||||
var value = this.value;
|
||||
var parent = this.stack.pop();
|
||||
this.value = parent.value;
|
||||
this.key = parent.key;
|
||||
this.mode = parent.mode;
|
||||
this.emit(value);
|
||||
if (!this.mode) { this.state = VALUE; }
|
||||
};
|
||||
proto.emit = function (value) {
|
||||
if (this.mode) { this.state = COMMA; }
|
||||
this.onValue(value);
|
||||
};
|
||||
proto.onValue = function (value) {
|
||||
// Override me
|
||||
};
|
||||
proto.onToken = function (token, value) {
|
||||
//console.log("OnToken: state=%s token=%s %s", toknam(this.state), toknam(token), value?JSON.stringify(value):"");
|
||||
if(this.state === VALUE){
|
||||
if(token === STRING || token === NUMBER || token === TRUE || token === FALSE || token === NULL){
|
||||
if (this.value) {
|
||||
this.value[this.key] = value;
|
||||
}
|
||||
this.emit(value);
|
||||
}else if(token === LEFT_BRACE){
|
||||
this.push();
|
||||
if (this.value) {
|
||||
this.value = this.value[this.key] = {};
|
||||
} else {
|
||||
this.value = {};
|
||||
}
|
||||
this.key = undefined;
|
||||
this.state = KEY;
|
||||
this.mode = OBJECT;
|
||||
}else if(token === LEFT_BRACKET){
|
||||
this.push();
|
||||
if (this.value) {
|
||||
this.value = this.value[this.key] = [];
|
||||
} else {
|
||||
this.value = [];
|
||||
}
|
||||
this.key = 0;
|
||||
this.mode = ARRAY;
|
||||
this.state = VALUE;
|
||||
}else if(token === RIGHT_BRACE){
|
||||
if (this.mode === OBJECT) {
|
||||
this.pop();
|
||||
} else {
|
||||
this.parseError(token, value);
|
||||
}
|
||||
}else if(token === RIGHT_BRACKET){
|
||||
if (this.mode === ARRAY) {
|
||||
this.pop();
|
||||
} else {
|
||||
this.parseError(token, value);
|
||||
}
|
||||
}else{
|
||||
this.parseError(token, value);
|
||||
}
|
||||
}else if(this.state === KEY){
|
||||
if (token === STRING) {
|
||||
this.key = value;
|
||||
this.state = COLON;
|
||||
} else if (token === RIGHT_BRACE) {
|
||||
this.pop();
|
||||
} else {
|
||||
this.parseError(token, value);
|
||||
}
|
||||
}else if(this.state === COLON){
|
||||
if (token === COLON) { this.state = VALUE; }
|
||||
else { this.parseError(token, value); }
|
||||
}else if(this.state === COMMA){
|
||||
if (token === COMMA) {
|
||||
if (this.mode === ARRAY) { this.key++; this.state = VALUE; }
|
||||
else if (this.mode === OBJECT) { this.state = KEY; }
|
||||
|
||||
} else if (token === RIGHT_BRACKET && this.mode === ARRAY || token === RIGHT_BRACE && this.mode === OBJECT) {
|
||||
this.pop();
|
||||
} else {
|
||||
this.parseError(token, value);
|
||||
}
|
||||
}else{
|
||||
this.parseError(token, value);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = Parser;
|
36
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/package.json
generated
vendored
Normal file
36
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/package.json
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"name": "jsonparse",
|
||||
"description": "This is a pure-js JSON streaming parser for node.js",
|
||||
"tags": [
|
||||
"json",
|
||||
"stream"
|
||||
],
|
||||
"version": "0.0.5",
|
||||
"author": {
|
||||
"name": "Tim Caswell",
|
||||
"email": "tim@creationix.com"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "http://github.com/creationix/jsonparse.git"
|
||||
},
|
||||
"devDependencies": {
|
||||
"tape": "~0.1.1",
|
||||
"tap": "~0.3.3"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap test/*.js"
|
||||
},
|
||||
"bugs": "http://github.com/creationix/jsonparse/issues",
|
||||
"engines": [
|
||||
"node >= 0.2.0"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "jsonparse.js",
|
||||
"readme": "This is a streaming JSON parser. For a simpler, sax-based version see this gist: https://gist.github.com/1821394\n\nThe MIT License (MIT)\nCopyright (c) 2011-2012 Tim Caswell\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n",
|
||||
"_id": "jsonparse@0.0.5",
|
||||
"dist": {
|
||||
"shasum": "f19c1fbd65775bf20a6379b86a64b7745820c137"
|
||||
},
|
||||
"_from": "jsonparse@0.0.5"
|
||||
}
|
167
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/samplejson/basic.json
generated
vendored
Normal file
167
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/samplejson/basic.json
generated
vendored
Normal file
@@ -0,0 +1,167 @@
|
||||
[
|
||||
{
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "rect", "fill": "#333", "stroke": "#999", "x": 0.5e+1, "y": 0.5, "z": 0.8e-0, "w": 0.5e5, "u": 2E10, "foo": 2E+1, "bar": 2E-0, "width": 47, "height": 47}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [2,4],
|
||||
"2": [],
|
||||
"3": [2,6],
|
||||
"4": [],
|
||||
"5": [2,8,1,3,7,9,4,6],
|
||||
"6": [],
|
||||
"7": [4,8],
|
||||
"8": [],
|
||||
"9": [6,8]
|
||||
},
|
||||
"corners": {"1": true,"3": true,"7": true,"9": true}
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "polygon", "fill": "#248", "stroke": "#48f", "points": [[0.5,47.5],[47.5,47.5],[47.5,0.5]]}
|
||||
],
|
||||
"solid": {
|
||||
"1": [2,4],
|
||||
"2": [1],
|
||||
"3": [2],
|
||||
"4": [],
|
||||
"5": [2,8,1,3,7,9,4,6],
|
||||
"6": [],
|
||||
"7": [4,8],
|
||||
"8": [],
|
||||
"9": [6,8]
|
||||
},
|
||||
"corners": {"1": true,"3": true,"7": false,"9": true}
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "polygon", "fill": "#248", "stroke": "#48f", "points": [[0.5,0.5],[47.5,47.5],[0.5,47.5]]}
|
||||
],
|
||||
"solid": {
|
||||
"1": [2],
|
||||
"2": [3],
|
||||
"3": [2,6],
|
||||
"4": [],
|
||||
"5": [2,8,1,3,7,9,4,6],
|
||||
"6": [],
|
||||
"7": [4,8],
|
||||
"8": [],
|
||||
"9": [6,8]
|
||||
},
|
||||
"corners": {"1": true,"3": true,"7": true,"9": false}
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[47.5,47.5],[47.5,0.5]]}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [2,4],
|
||||
"2": [],
|
||||
"3": [2,6],
|
||||
"4": [],
|
||||
"5": [2,8,1,3,7,9,4,6],
|
||||
"6": [3],
|
||||
"7": [4,8],
|
||||
"8": [7],
|
||||
"9": [6,8]
|
||||
},
|
||||
"corners": {"1": false,"3": true,"7": true,"9": true}
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[0.5,47.5],[47.5,0.5]]}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [2,4],
|
||||
"2": [],
|
||||
"3": [2,6],
|
||||
"4": [1],
|
||||
"5": [2,8,1,3,7,9,4,6],
|
||||
"6": [],
|
||||
"7": [4,8],
|
||||
"8": [9],
|
||||
"9": [6,8]
|
||||
},
|
||||
"corners": {"1": true,"3": false,"7": true,"9": true}
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "polygon", "fill": "#482", "stroke": "#8f4", "points": [[0.5,47.5],[0.5,23.5],[24.5,23.5],[24.5,0.5],[47.5,0.5],[47.5,47.5]]}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [2,4],
|
||||
"2": [],
|
||||
"3": [6,2],
|
||||
"4": [],
|
||||
"5": [2,8,1,3,7,9,4,6],
|
||||
"6": [9],
|
||||
"7": [4,8],
|
||||
"8": [],
|
||||
"9": [6,8]
|
||||
},
|
||||
"corners": {"1": true,"3": true,"7": false,"9": true}
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "polygon", "fill": "#482", "stroke": "#8f4", "points": [[0.5,0.5],[23.5,0.5],[23.5,24.5],[47.5,24.5],[47.5,47.5],[0.5,47.5]]}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [4,2],
|
||||
"2": [],
|
||||
"3": [2,6],
|
||||
"4": [7],
|
||||
"5": [2,8,1,3,7,9,4,6],
|
||||
"6": [],
|
||||
"7": [4,8],
|
||||
"8": [],
|
||||
"9": [6,8]
|
||||
},
|
||||
"corners": {"1": true,"3": true,"7": true,"9": false}
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "circle", "fill": "#ff0", "stroke": "#ff8", "cx": 24, "cy": 24, "r": 18}
|
||||
],
|
||||
"item": true
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "polygon", "fill": "#842", "stroke": "#f84", "points": [[4.5,0.5],[14.5,0.5],[14.5,17.5],[34,17.5],[33.5,0.5],[43.5,0.5],[43.5,47.5],[33.5,47.5],[33.5,30.5],[14.5,30.5],[14.5,47.5],[4.5,47.5]]}
|
||||
],
|
||||
"jumpable": 3
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[47.5,0.5],[24,47.5]]}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [2,4],
|
||||
"2": [],
|
||||
"3": [2,6],
|
||||
"4": [1],
|
||||
"5": [2,8,1,3,7,9,4,6],
|
||||
"6": [3],
|
||||
"7": [4,8],
|
||||
"8": [],
|
||||
"9": [6,8]
|
||||
},
|
||||
"corners": {"1": false,"3": false,"7": true,"9": true}
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "rect", "fill": "#114acb", "x": 0.5, "y": 0.5, "width": 47, "height": 47},
|
||||
{"shape": "polygon", "fill": "rgba(255,255,255,0.30)", "points": [[0.5,0.5],[47.5,0.5],[40,8],[8,8],[8,40],[0.5,47.5]]},
|
||||
{"shape": "polygon", "fill": "rgba(0,0,0,0.30)", "points": [[47.5,0.5],[48,48],[0.5,47.5],[8,40],[40,40],[40,8]]},
|
||||
{"shape": "polygon", "fill": "rgb(255,255,0)", "stroke": "rgba(255,255,0,0.5)", "points": [[24,9],[35,20],[26,29],[26,33],[22,33],[22,27],[29,20],[24,15],[16,23],[13,20]]},
|
||||
{"shape": "rect", "fill": "rgb(255,255,0)", "stroke": "rgba(255,255,0,0.5)", "x": 22, "y":35, "width": 4, "height": 4}
|
||||
]
|
||||
}
|
||||
]
|
180
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/samplejson/basic2.json
generated
vendored
Normal file
180
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/samplejson/basic2.json
generated
vendored
Normal file
@@ -0,0 +1,180 @@
|
||||
[
|
||||
{
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "rect", "fill": "#333", "stroke": "#999", "x": 0.5, "y": 0.5, "width": 47, "height": 47}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [2,4],
|
||||
"2": [],
|
||||
"3": [2,6],
|
||||
"4": [],
|
||||
"5": [2,8,1,3,7,9,4,6],
|
||||
"6": [],
|
||||
"7": [4,8],
|
||||
"8": [],
|
||||
"9": [6,8]
|
||||
},
|
||||
"corners": {"1": true,"3": true,"7": true,"9": true}
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "polygon", "fill": "#248", "stroke": "#48f", "points": [[0.5,47.5],[47.5,47.5],[47.5,0.5]]}
|
||||
],
|
||||
"solid": {
|
||||
"1": [2,4],
|
||||
"2": [1],
|
||||
"3": [2],
|
||||
"4": [],
|
||||
"5": [2,8,1,3,7,9,4,6],
|
||||
"6": [],
|
||||
"7": [4,8],
|
||||
"8": [],
|
||||
"9": [6,8]
|
||||
},
|
||||
"corners": {"1": true,"3": true,"7": false,"9": true}
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "polygon", "fill": "#248", "stroke": "#48f", "points": [[0.5,0.5],[47.5,47.5],[0.5,47.5]]}
|
||||
],
|
||||
"solid": {
|
||||
"1": [2],
|
||||
"2": [3],
|
||||
"3": [2,6],
|
||||
"4": [],
|
||||
"5": [2,8,1,3,7,9,4,6],
|
||||
"6": [],
|
||||
"7": [4,8],
|
||||
"8": [],
|
||||
"9": [6,8]
|
||||
},
|
||||
"corners": {"1": true,"3": true,"7": true,"9": false}
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[47.5,47.5],[47.5,0.5]]}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [2,4],
|
||||
"2": [],
|
||||
"3": [2,6],
|
||||
"4": [],
|
||||
"5": [2,8,1,3,7,9,4,6],
|
||||
"6": [3],
|
||||
"7": [4,8],
|
||||
"8": [7],
|
||||
"9": [6,8]
|
||||
},
|
||||
"corners": {"1": false,"3": true,"7": true,"9": true}
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[0.5,47.5],[47.5,0.5]]}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [2,4],
|
||||
"2": [],
|
||||
"3": [2,6],
|
||||
"4": [1],
|
||||
"5": [2,8,1,3,7,9,4,6],
|
||||
"6": [],
|
||||
"7": [4,8],
|
||||
"8": [9],
|
||||
"9": [6,8]
|
||||
},
|
||||
"corners": {"1": true,"3": false,"7": true,"9": true}
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "polygon", "fill": "#482", "stroke": "#8f4", "points": [[0.5,47.5],[0.5,23.5],[24.5,23.5],[24.5,0.5],[47.5,0.5],[47.5,47.5]]}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [2,4],
|
||||
"2": [],
|
||||
"3": [6,2],
|
||||
"4": [],
|
||||
"5": [2,8,1,3,7,9,4,6],
|
||||
"6": [9],
|
||||
"7": [4,8],
|
||||
"8": [],
|
||||
"9": [6,8]
|
||||
},
|
||||
"corners": {"1": true,"3": true,"7": false,"9": true}
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "polygon", "fill": "#482", "stroke": "#8f4", "points": [[0.5,0.5],[23.5,0.5],[23.5,24.5],[47.5,24.5],[47.5,47.5],[0.5,47.5]]}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [4,2],
|
||||
"2": [],
|
||||
"3": [2,6],
|
||||
"4": [7],
|
||||
"5": [2,8,1,3,7,9,4,6],
|
||||
"6": [],
|
||||
"7": [4,8],
|
||||
"8": [],
|
||||
"9": [6,8]
|
||||
},
|
||||
"corners": {"1": true,"3": true,"7": true,"9": false}
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "circle", "fill": "#ff0", "stroke": "#ff8", "cx": 24, "cy": 24, "r": 18}
|
||||
],
|
||||
"item": true
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "polygon", "fill": "#842", "stroke": "#f84", "points": [[4.5,0.5],[14.5,0.5],[14.5,17.5],[34,17.5],[33.5,0.5],[43.5,0.5],[43.5,47.5],[33.5,47.5],[33.5,30.5],[14.5,30.5],[14.5,47.5],[4.5,47.5]]}
|
||||
],
|
||||
"jumpable": 3
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[47.5,0.5],[24,47.5]]}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [2,4],
|
||||
"2": [],
|
||||
"3": [2,6],
|
||||
"4": [1],
|
||||
"5": [2,8,1,3,7,9,4,6],
|
||||
"6": [3],
|
||||
"7": [4,8],
|
||||
"8": [],
|
||||
"9": [6,8]
|
||||
},
|
||||
"corners": {"1": false,"3": false,"7": true,"9": true}
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "rect", "fill": "#114acb", "x": 0.5, "y": 0.5, "width": 47, "height": 47},
|
||||
{"shape": "polygon", "fill": "rgba(255,255,255,0.30)", "points": [[0.5,0.5],[47.5,0.5],[40,8],[8,8],[8,40],[0.5,47.5]]},
|
||||
{"shape": "polygon", "fill": "rgba(0,0,0,0.30)", "points": [[47.5,0.5],[48,48],[0.5,47.5],[8,40],[40,40],[40,8]]},
|
||||
{"shape": "polygon", "fill": "rgb(255,255,0)", "stroke": "rgba(255,255,0,0.5)", "points": [[24,9],[35,20],[26,29],[26,33],[22,33],[22,27],[29,20],[24,15],[16,23],[13,20]]},
|
||||
{"shape": "rect", "fill": "rgb(255,255,0)", "stroke": "rgba(255,255,0,0.5)", "x": 22, "y":35, "width": 4, "height": 4}
|
||||
],
|
||||
"item": true
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "circle", "fill": "#80f", "stroke": "#88f", "cx": 24, "cy": 24, "r": 18}
|
||||
],
|
||||
"item": true
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{"shape": "circle", "fill": "#4f4", "stroke": "#8f8", "cx": 24, "cy": 24, "r": 18}
|
||||
],
|
||||
"item": true
|
||||
}
|
||||
]
|
110
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/test/boundary.js
generated
vendored
Normal file
110
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/test/boundary.js
generated
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
var test = require('tape');
|
||||
var Parser = require('../');
|
||||
|
||||
test('2 byte utf8 \'De\' character: д', function (t) {
|
||||
t.plan(1);
|
||||
|
||||
var p = new Parser();
|
||||
p.onValue = function (value) {
|
||||
t.equal(value, 'д');
|
||||
};
|
||||
|
||||
var de_buffer = new Buffer([0xd0, 0xb4]);
|
||||
|
||||
p.write('"');
|
||||
p.write(de_buffer);
|
||||
p.write('"');
|
||||
|
||||
});
|
||||
|
||||
test('3 byte utf8 \'Han\' character: 我', function (t) {
|
||||
t.plan(1);
|
||||
|
||||
var p = new Parser();
|
||||
p.onValue = function (value) {
|
||||
t.equal(value, '我');
|
||||
};
|
||||
|
||||
var han_buffer = new Buffer([0xe6, 0x88, 0x91]);
|
||||
p.write('"');
|
||||
p.write(han_buffer);
|
||||
p.write('"');
|
||||
});
|
||||
|
||||
test('4 byte utf8 character (unicode scalar U+2070E): 𠜎', function (t) {
|
||||
t.plan(1);
|
||||
|
||||
var p = new Parser();
|
||||
p.onValue = function (value) {
|
||||
t.equal(value, '𠜎');
|
||||
};
|
||||
|
||||
var Ux2070E_buffer = new Buffer([0xf0, 0xa0, 0x9c, 0x8e]);
|
||||
p.write('"');
|
||||
p.write(Ux2070E_buffer);
|
||||
p.write('"');
|
||||
});
|
||||
|
||||
test('3 byte utf8 \'Han\' character chunked inbetween 2nd and 3rd byte: 我', function (t) {
|
||||
t.plan(1);
|
||||
|
||||
var p = new Parser();
|
||||
p.onValue = function (value) {
|
||||
t.equal(value, '我');
|
||||
};
|
||||
|
||||
var han_buffer_first = new Buffer([0xe6, 0x88]);
|
||||
var han_buffer_second = new Buffer([0x91]);
|
||||
p.write('"');
|
||||
p.write(han_buffer_first);
|
||||
p.write(han_buffer_second);
|
||||
p.write('"');
|
||||
});
|
||||
|
||||
test('4 byte utf8 character (unicode scalar U+2070E) chunked inbetween 2nd and 3rd byte: 𠜎', function (t) {
|
||||
t.plan(1);
|
||||
|
||||
var p = new Parser();
|
||||
p.onValue = function (value) {
|
||||
t.equal(value, '𠜎');
|
||||
};
|
||||
|
||||
var Ux2070E_buffer_first = new Buffer([0xf0, 0xa0]);
|
||||
var Ux2070E_buffer_second = new Buffer([0x9c, 0x8e]);
|
||||
p.write('"');
|
||||
p.write(Ux2070E_buffer_first);
|
||||
p.write(Ux2070E_buffer_second);
|
||||
p.write('"');
|
||||
});
|
||||
|
||||
test('1-4 byte utf8 character string chunked inbetween random bytes: Aж文𠜱B', function (t) {
|
||||
t.plan(1);
|
||||
|
||||
var p = new Parser();
|
||||
p.onValue = function (value) {
|
||||
t.equal(value, 'Aж文𠜱B');
|
||||
};
|
||||
|
||||
var eclectic_buffer = new Buffer([0x41, // A
|
||||
0xd0, 0xb6, // ж
|
||||
0xe6, 0x96, 0x87, // 文
|
||||
0xf0, 0xa0, 0x9c, 0xb1, // 𠜱
|
||||
0x42]); // B
|
||||
|
||||
var rand_chunk = Math.floor(Math.random() * (eclectic_buffer.length));
|
||||
var first_buffer = eclectic_buffer.slice(0, rand_chunk);
|
||||
var second_buffer = eclectic_buffer.slice(rand_chunk);
|
||||
|
||||
//console.log('eclectic_buffer: ' + eclectic_buffer)
|
||||
//console.log('sliced from 0 to ' + rand_chunk);
|
||||
//console.log(first_buffer);
|
||||
//console.log('then sliced from ' + rand_chunk + ' to the end');
|
||||
//console.log(second_buffer);
|
||||
|
||||
console.log('chunked after offset ' + rand_chunk);
|
||||
p.write('"');
|
||||
p.write(first_buffer);
|
||||
p.write(second_buffer);
|
||||
p.write('"');
|
||||
|
||||
});
|
54
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/test/primitives.js
generated
vendored
Normal file
54
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/test/primitives.js
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
var test = require('tape');
|
||||
var Parser = require('../');
|
||||
|
||||
var expected = [
|
||||
[ [], '' ],
|
||||
[ [], 'Hello' ],
|
||||
[ [], 'This"is' ],
|
||||
[ [], '\r\n\f\t\\/"' ],
|
||||
[ [], 'Λάμβδα' ],
|
||||
[ [], '\\' ],
|
||||
[ [], '/' ],
|
||||
[ [], '"' ],
|
||||
[ [ 0 ], 0 ],
|
||||
[ [ 1 ], 1 ],
|
||||
[ [ 2 ], -1 ],
|
||||
[ [], [ 0, 1, -1 ] ],
|
||||
[ [ 0 ], 1 ],
|
||||
[ [ 1 ], 1.1 ],
|
||||
[ [ 2 ], -1.1 ],
|
||||
[ [ 3 ], -1 ],
|
||||
[ [], [ 1, 1.1, -1.1, -1 ] ],
|
||||
[ [ 0 ], -1 ],
|
||||
[ [], [ -1 ] ],
|
||||
[ [ 0 ], -0.1 ],
|
||||
[ [], [ -0.1 ] ],
|
||||
[ [ 0 ], 6.019999999999999e+23 ],
|
||||
[ [], [ 6.019999999999999e+23 ] ]
|
||||
];
|
||||
|
||||
test('primitives', function (t) {
|
||||
t.plan(23);
|
||||
|
||||
var p = new Parser();
|
||||
p.onValue = function (value) {
|
||||
var keys = this.stack
|
||||
.slice(1)
|
||||
.map(function (item) { return item.key })
|
||||
.concat(this.key !== undefined ? this.key : [])
|
||||
;
|
||||
t.deepEqual(
|
||||
[ keys, value ],
|
||||
expected.shift()
|
||||
);
|
||||
};
|
||||
|
||||
p.write('"""Hello""This\\"is""\\r\\n\\f\\t\\\\\\/\\""');
|
||||
p.write('"\\u039b\\u03ac\\u03bc\\u03b2\\u03b4\\u03b1"');
|
||||
p.write('"\\\\"');
|
||||
p.write('"\\/"');
|
||||
p.write('"\\""');
|
||||
p.write('[0,1,-1]');
|
||||
p.write('[1.0,1.1,-1.1,-1.0][-1][-0.1]');
|
||||
p.write('[6.02e23]');
|
||||
});
|
38
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/test/utf8.js
generated
vendored
Normal file
38
node_modules/browserify/node_modules/JSONStream/node_modules/jsonparse/test/utf8.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
var test = require('tape');
|
||||
var Parser = require('../');
|
||||
|
||||
test('3 bytes of utf8', function (t) {
|
||||
t.plan(1);
|
||||
|
||||
var p = new Parser();
|
||||
p.onValue = function (value) {
|
||||
t.equal(value, '├──');
|
||||
};
|
||||
|
||||
p.write('"├──"');
|
||||
});
|
||||
|
||||
test('utf8 snowman', function (t) {
|
||||
t.plan(1);
|
||||
|
||||
var p = new Parser();
|
||||
p.onValue = function (value) {
|
||||
t.equal(value, '☃');
|
||||
};
|
||||
|
||||
p.write('"☃"');
|
||||
});
|
||||
|
||||
test('utf8 with regular ascii', function (t) {
|
||||
t.plan(4);
|
||||
|
||||
var p = new Parser();
|
||||
var expected = [ "snow: ☃!", "xyz", "¡que!" ];
|
||||
expected.push(expected.slice());
|
||||
|
||||
p.onValue = function (value) {
|
||||
t.deepEqual(value, expected.shift());
|
||||
};
|
||||
|
||||
p.write('["snow: ☃!","xyz","¡que!"]');
|
||||
});
|
38
node_modules/browserify/node_modules/JSONStream/package.json
generated
vendored
Normal file
38
node_modules/browserify/node_modules/JSONStream/package.json
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"name": "JSONStream",
|
||||
"version": "0.4.4",
|
||||
"description": "rawStream.pipe(JSONStream.parse()).pipe(streamOfObjects)",
|
||||
"homepage": "http://github.com/dominictarr/JSONStream",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/dominictarr/JSONStream.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"jsonparse": "0.0.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"it-is": "~1",
|
||||
"assertions": "~2.2.2",
|
||||
"render": "~0.1.1",
|
||||
"trees": "~0.0.3",
|
||||
"event-stream": "~0.7.0"
|
||||
},
|
||||
"author": {
|
||||
"name": "Dominic Tarr",
|
||||
"email": "dominic.tarr@gmail.com",
|
||||
"url": "http://bit.ly/dominictarr"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "set -e; for t in test/*.js; do echo '***' $t '***'; node $t; done"
|
||||
},
|
||||
"optionalDependencies": {},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
},
|
||||
"readme": "# JSONStream\n\nstreaming JSON.parse and stringify\n\n<img src=https://secure.travis-ci.org/dominictarr/JSONStream.png?branch=master>\n\n## example\n\n```javascript\n\nvar request = require('request')\n , JSONStream = require('JSONStream')\n , es = require('event-stream')\n\nvar parser = JSONStream.parse(['rows', true])\n , req = request({url: 'http://isaacs.couchone.com/registry/_all_docs'})\n , logger = es.mapSync(function (data) {\n console.error(data)\n return data\n })\n```\n\nin node 0.4.x\n\n``` javascript\n\nreq.pipe(parser)\nparser.pipe(logger)\n\n```\n\nin node v0.5.x\n\n``` javascript\nreq.pipe(parser).pipe(logger)\n\n```\n\n## JSONStream.parse(path)\n\nusally, a json API will return a list of objects.\n\n`path` should be an array of property names, `RegExp`s, booleans, and/or functions.\nany object that matches the path will be emitted as 'data' (and `pipe`d down stream)\n\na 'root' event is emitted when all data has been received. The 'root' event passes the root object & the count of matched objects.\n\nif `path` is empty or null, no 'data' events are emitted.\n\n### example\n\nquery a couchdb view:\n\n``` bash\ncurl -sS localhost:5984/tests/_all_docs&include_docs=true\n```\nyou will get something like this:\n\n``` js\n{\"total_rows\":129,\"offset\":0,\"rows\":[\n { \"id\":\"change1_0.6995461115147918\"\n , \"key\":\"change1_0.6995461115147918\"\n , \"value\":{\"rev\":\"1-e240bae28c7bb3667f02760f6398d508\"}\n , \"doc\":{\n \"_id\": \"change1_0.6995461115147918\"\n , \"_rev\": \"1-e240bae28c7bb3667f02760f6398d508\",\"hello\":1}\n },\n { \"id\":\"change2_0.6995461115147918\"\n , \"key\":\"change2_0.6995461115147918\"\n , \"value\":{\"rev\":\"1-13677d36b98c0c075145bb8975105153\"}\n , \"doc\":{\n \"_id\":\"change2_0.6995461115147918\"\n , \"_rev\":\"1-13677d36b98c0c075145bb8975105153\"\n , \"hello\":2\n }\n },\n]}\n\n```\n\nwe are probably most interested in the `rows.*.docs`\n\ncreate a `Stream` that parses the documents from the feed like this:\n\n``` js\nvar stream = JSONStream.parse(['rows', true, 'doc']) //rows, ANYTHING, doc\n\nstream.on('data', function(data) {\n console.log('received:', data);\n});\n\nstream.on('root', function(root, count) {\n if (!count) {\n console.log('no matches found:', root);\n }\n});\n```\nawesome!\n\n## JSONStream.stringify(open, sep, close)\n\nCreate a writable stream.\n\nyou may pass in custom `open`, `close`, and `seperator` strings.\nBut, by default, `JSONStream.stringify()` will create an array,\n(with default options `open='[\\n', sep='\\n,\\n', close='\\n]\\n'`)\n\nIf you call `JSONStream.stringify(false)`\nthe elements will only be seperated by a newline.\n\nIf you only write one item this will be valid JSON.\n\nIf you write many items,\nyou can use a `RegExp` to split it into valid chunks.\n\n## JSONStream.stringifyObject(open, sep, close)\n\nVery much like `JSONStream.stringify`,\nbut creates a writable stream for objects instead of arrays.\n\nAccordingly, `open='{\\n', sep='\\n,\\n', close='\\n}\\n'`.\n\nWhen you `.write()` to the stream you must supply an array with `[ key, data ]`\nas the first argument.\n\n## numbers\n\nThere are occasional problems parsing and unparsing very precise numbers.\n\nI have opened an issue here:\n\nhttps://github.com/creationix/jsonparse/issues/2\n\n+1\n\n## Acknowlegements\n\nthis module depends on https://github.com/creationix/jsonparse\nby Tim Caswell\nand also thanks to Florent Jaby for teaching me about parsing with:\nhttps://github.com/Floby/node-json-streams\n\n## license\n\nMIT / APACHE2\n",
|
||||
"_id": "JSONStream@0.4.4",
|
||||
"dist": {
|
||||
"shasum": "cc2cf119286c45be150423cbc128d480e9b54ae2"
|
||||
},
|
||||
"_from": "JSONStream@~0.4.3"
|
||||
}
|
145
node_modules/browserify/node_modules/JSONStream/readme.markdown
generated
vendored
Normal file
145
node_modules/browserify/node_modules/JSONStream/readme.markdown
generated
vendored
Normal file
@@ -0,0 +1,145 @@
|
||||
# JSONStream
|
||||
|
||||
streaming JSON.parse and stringify
|
||||
|
||||
<img src=https://secure.travis-ci.org/dominictarr/JSONStream.png?branch=master>
|
||||
|
||||
## example
|
||||
|
||||
```javascript
|
||||
|
||||
var request = require('request')
|
||||
, JSONStream = require('JSONStream')
|
||||
, es = require('event-stream')
|
||||
|
||||
var parser = JSONStream.parse(['rows', true])
|
||||
, req = request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
|
||||
, logger = es.mapSync(function (data) {
|
||||
console.error(data)
|
||||
return data
|
||||
})
|
||||
```
|
||||
|
||||
in node 0.4.x
|
||||
|
||||
``` javascript
|
||||
|
||||
req.pipe(parser)
|
||||
parser.pipe(logger)
|
||||
|
||||
```
|
||||
|
||||
in node v0.5.x
|
||||
|
||||
``` javascript
|
||||
req.pipe(parser).pipe(logger)
|
||||
|
||||
```
|
||||
|
||||
## JSONStream.parse(path)
|
||||
|
||||
usally, a json API will return a list of objects.
|
||||
|
||||
`path` should be an array of property names, `RegExp`s, booleans, and/or functions.
|
||||
any object that matches the path will be emitted as 'data' (and `pipe`d down stream)
|
||||
|
||||
a 'root' event is emitted when all data has been received. The 'root' event passes the root object & the count of matched objects.
|
||||
|
||||
if `path` is empty or null, no 'data' events are emitted.
|
||||
|
||||
### example
|
||||
|
||||
query a couchdb view:
|
||||
|
||||
``` bash
|
||||
curl -sS localhost:5984/tests/_all_docs&include_docs=true
|
||||
```
|
||||
you will get something like this:
|
||||
|
||||
``` js
|
||||
{"total_rows":129,"offset":0,"rows":[
|
||||
{ "id":"change1_0.6995461115147918"
|
||||
, "key":"change1_0.6995461115147918"
|
||||
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
|
||||
, "doc":{
|
||||
"_id": "change1_0.6995461115147918"
|
||||
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
|
||||
},
|
||||
{ "id":"change2_0.6995461115147918"
|
||||
, "key":"change2_0.6995461115147918"
|
||||
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
|
||||
, "doc":{
|
||||
"_id":"change2_0.6995461115147918"
|
||||
, "_rev":"1-13677d36b98c0c075145bb8975105153"
|
||||
, "hello":2
|
||||
}
|
||||
},
|
||||
]}
|
||||
|
||||
```
|
||||
|
||||
we are probably most interested in the `rows.*.docs`
|
||||
|
||||
create a `Stream` that parses the documents from the feed like this:
|
||||
|
||||
``` js
|
||||
var stream = JSONStream.parse(['rows', true, 'doc']) //rows, ANYTHING, doc
|
||||
|
||||
stream.on('data', function(data) {
|
||||
console.log('received:', data);
|
||||
});
|
||||
|
||||
stream.on('root', function(root, count) {
|
||||
if (!count) {
|
||||
console.log('no matches found:', root);
|
||||
}
|
||||
});
|
||||
```
|
||||
awesome!
|
||||
|
||||
## JSONStream.stringify(open, sep, close)
|
||||
|
||||
Create a writable stream.
|
||||
|
||||
you may pass in custom `open`, `close`, and `seperator` strings.
|
||||
But, by default, `JSONStream.stringify()` will create an array,
|
||||
(with default options `open='[\n', sep='\n,\n', close='\n]\n'`)
|
||||
|
||||
If you call `JSONStream.stringify(false)`
|
||||
the elements will only be seperated by a newline.
|
||||
|
||||
If you only write one item this will be valid JSON.
|
||||
|
||||
If you write many items,
|
||||
you can use a `RegExp` to split it into valid chunks.
|
||||
|
||||
## JSONStream.stringifyObject(open, sep, close)
|
||||
|
||||
Very much like `JSONStream.stringify`,
|
||||
but creates a writable stream for objects instead of arrays.
|
||||
|
||||
Accordingly, `open='{\n', sep='\n,\n', close='\n}\n'`.
|
||||
|
||||
When you `.write()` to the stream you must supply an array with `[ key, data ]`
|
||||
as the first argument.
|
||||
|
||||
## numbers
|
||||
|
||||
There are occasional problems parsing and unparsing very precise numbers.
|
||||
|
||||
I have opened an issue here:
|
||||
|
||||
https://github.com/creationix/jsonparse/issues/2
|
||||
|
||||
+1
|
||||
|
||||
## Acknowlegements
|
||||
|
||||
this module depends on https://github.com/creationix/jsonparse
|
||||
by Tim Caswell
|
||||
and also thanks to Florent Jaby for teaching me about parsing with:
|
||||
https://github.com/Floby/node-json-streams
|
||||
|
||||
## license
|
||||
|
||||
MIT / APACHE2
|
41
node_modules/browserify/node_modules/JSONStream/test/bool.js
generated
vendored
Normal file
41
node_modules/browserify/node_modules/JSONStream/test/bool.js
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
|
||||
var fs = require ('fs')
|
||||
, join = require('path').join
|
||||
, file = join(__dirname, 'fixtures','all_npm.json')
|
||||
, JSONStream = require('../')
|
||||
, it = require('it-is').style('colour')
|
||||
|
||||
function randomObj () {
|
||||
return (
|
||||
Math.random () < 0.4
|
||||
? {hello: 'eonuhckmqjk',
|
||||
whatever: 236515,
|
||||
lies: true,
|
||||
nothing: [null],
|
||||
// stuff: [Math.random(),Math.random(),Math.random()]
|
||||
}
|
||||
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
|
||||
)
|
||||
}
|
||||
|
||||
var expected = []
|
||||
, stringify = JSONStream.stringify()
|
||||
, es = require('event-stream')
|
||||
, stringified = ''
|
||||
, called = 0
|
||||
, count = 10
|
||||
, ended = false
|
||||
|
||||
while (count --)
|
||||
expected.push(randomObj())
|
||||
|
||||
es.connect(
|
||||
es.readArray(expected),
|
||||
stringify,
|
||||
JSONStream.parse([true]),
|
||||
es.writeArray(function (err, lines) {
|
||||
|
||||
it(lines).has(expected)
|
||||
console.error('PASSED')
|
||||
})
|
||||
)
|
22
node_modules/browserify/node_modules/JSONStream/test/destroy_missing.js
generated
vendored
Normal file
22
node_modules/browserify/node_modules/JSONStream/test/destroy_missing.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
var fs = require ('fs');
|
||||
var net = require('net');
|
||||
var join = require('path').join;
|
||||
var file = join(__dirname, 'fixtures','all_npm.json');
|
||||
var JSONStream = require('../');
|
||||
|
||||
|
||||
var server = net.createServer(function(client) {
|
||||
var parser = JSONStream.parse([]);
|
||||
parser.on('close', function() {
|
||||
console.error('PASSED');
|
||||
server.close();
|
||||
});
|
||||
client.pipe(parser);
|
||||
client.destroy();
|
||||
});
|
||||
server.listen(9999);
|
||||
|
||||
|
||||
var client = net.connect({ port : 9999 }, function() {
|
||||
fs.createReadStream(file).pipe(client);
|
||||
});
|
44
node_modules/browserify/node_modules/JSONStream/test/empty.js
generated
vendored
Normal file
44
node_modules/browserify/node_modules/JSONStream/test/empty.js
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
var JSONStream = require('../')
|
||||
, stream = require('stream')
|
||||
, it = require('it-is')
|
||||
|
||||
var output = [ [], [] ]
|
||||
|
||||
var parser1 = JSONStream.parse(['docs', /./])
|
||||
parser1.on('data', function(data) {
|
||||
output[0].push(data)
|
||||
})
|
||||
|
||||
var parser2 = JSONStream.parse(['docs', /./])
|
||||
parser2.on('data', function(data) {
|
||||
output[1].push(data)
|
||||
})
|
||||
|
||||
var pending = 2
|
||||
function onend () {
|
||||
if (--pending > 0) return
|
||||
it(output).deepEqual([
|
||||
[], [{hello: 'world'}]
|
||||
])
|
||||
console.error('PASSED')
|
||||
}
|
||||
parser1.on('end', onend)
|
||||
parser2.on('end', onend)
|
||||
|
||||
function makeReadableStream() {
|
||||
var readStream = new stream.Stream()
|
||||
readStream.readable = true
|
||||
readStream.write = function (data) { this.emit('data', data) }
|
||||
readStream.end = function (data) { this.emit('end') }
|
||||
return readStream
|
||||
}
|
||||
|
||||
var emptyArray = makeReadableStream()
|
||||
emptyArray.pipe(parser1)
|
||||
emptyArray.write('{"docs":[]}')
|
||||
emptyArray.end()
|
||||
|
||||
var objectArray = makeReadableStream()
|
||||
objectArray.pipe(parser2)
|
||||
objectArray.write('{"docs":[{"hello":"world"}]}')
|
||||
objectArray.end()
|
4030
node_modules/browserify/node_modules/JSONStream/test/fixtures/all_npm.json
generated
vendored
Normal file
4030
node_modules/browserify/node_modules/JSONStream/test/fixtures/all_npm.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
39
node_modules/browserify/node_modules/JSONStream/test/fn.js
generated
vendored
Normal file
39
node_modules/browserify/node_modules/JSONStream/test/fn.js
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
|
||||
|
||||
var fs = require ('fs')
|
||||
, join = require('path').join
|
||||
, file = join(__dirname, 'fixtures','all_npm.json')
|
||||
, JSONStream = require('../')
|
||||
, it = require('it-is')
|
||||
|
||||
function fn (s) {
|
||||
return !isNaN(parseInt(s, 10))
|
||||
}
|
||||
|
||||
var expected = JSON.parse(fs.readFileSync(file))
|
||||
, parser = JSONStream.parse(['rows', fn])
|
||||
, called = 0
|
||||
, ended = false
|
||||
, parsed = []
|
||||
|
||||
fs.createReadStream(file).pipe(parser)
|
||||
|
||||
parser.on('data', function (data) {
|
||||
called ++
|
||||
it.has({
|
||||
id: it.typeof('string'),
|
||||
value: {rev: it.typeof('string')},
|
||||
key:it.typeof('string')
|
||||
})
|
||||
parsed.push(data)
|
||||
})
|
||||
|
||||
parser.on('end', function () {
|
||||
ended = true
|
||||
})
|
||||
|
||||
process.on('exit', function () {
|
||||
it(called).equal(expected.rows.length)
|
||||
it(parsed).deepEqual(expected.rows)
|
||||
console.error('PASSED')
|
||||
})
|
42
node_modules/browserify/node_modules/JSONStream/test/multiple_objects.js
generated
vendored
Normal file
42
node_modules/browserify/node_modules/JSONStream/test/multiple_objects.js
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
var fs = require ('fs');
|
||||
var net = require('net');
|
||||
var join = require('path').join;
|
||||
var file = join(__dirname, 'fixtures','all_npm.json');
|
||||
var it = require('it-is');
|
||||
var JSONStream = require('../');
|
||||
|
||||
var str = fs.readFileSync(file);
|
||||
|
||||
var datas = {}
|
||||
|
||||
var server = net.createServer(function(client) {
|
||||
var root_calls = 0;
|
||||
var data_calls = 0;
|
||||
var parser = JSONStream.parse(['rows', true, 'key']);
|
||||
parser.on('root', function(root, count) {
|
||||
++ root_calls;
|
||||
});
|
||||
|
||||
parser.on('data', function(data) {
|
||||
++ data_calls;
|
||||
datas[data] = (datas[data] || 0) + 1
|
||||
it(data).typeof('string')
|
||||
});
|
||||
|
||||
parser.on('end', function() {
|
||||
console.log('END')
|
||||
var min = Infinity
|
||||
for (var d in datas)
|
||||
min = min > datas[d] ? datas[d] : min
|
||||
it(root_calls).equal(3);
|
||||
it(min).equal(3);
|
||||
server.close();
|
||||
});
|
||||
client.pipe(parser);
|
||||
});
|
||||
server.listen(9999);
|
||||
|
||||
var client = net.connect({ port : 9999 }, function() {
|
||||
var msgs = str + ' ' + str + '\n\n' + str
|
||||
client.end(msgs);
|
||||
});
|
35
node_modules/browserify/node_modules/JSONStream/test/multiple_objects_error.js
generated
vendored
Normal file
35
node_modules/browserify/node_modules/JSONStream/test/multiple_objects_error.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
var fs = require ('fs');
|
||||
var net = require('net');
|
||||
var join = require('path').join;
|
||||
var file = join(__dirname, 'fixtures','all_npm.json');
|
||||
var it = require('it-is');
|
||||
var JSONStream = require('../');
|
||||
|
||||
var str = fs.readFileSync(file);
|
||||
|
||||
var server = net.createServer(function(client) {
|
||||
var root_calls = 0;
|
||||
var data_calls = 0;
|
||||
var parser = JSONStream.parse();
|
||||
parser.on('root', function(root, count) {
|
||||
++ root_calls;
|
||||
it(root_calls).notEqual(2);
|
||||
});
|
||||
|
||||
parser.on('error', function(err) {
|
||||
console.log(err);
|
||||
server.close();
|
||||
});
|
||||
|
||||
parser.on('end', function() {
|
||||
console.log('END');
|
||||
server.close();
|
||||
});
|
||||
client.pipe(parser);
|
||||
});
|
||||
server.listen(9999);
|
||||
|
||||
var client = net.connect({ port : 9999 }, function() {
|
||||
var msgs = str + '}';
|
||||
client.end(msgs);
|
||||
});
|
28
node_modules/browserify/node_modules/JSONStream/test/parsejson.js
generated
vendored
Normal file
28
node_modules/browserify/node_modules/JSONStream/test/parsejson.js
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
|
||||
|
||||
/*
|
||||
sometimes jsonparse changes numbers slightly.
|
||||
*/
|
||||
|
||||
var r = Math.random()
|
||||
, Parser = require('jsonparse')
|
||||
, p = new Parser()
|
||||
, assert = require('assert')
|
||||
, times = 20
|
||||
while (times --) {
|
||||
|
||||
assert.equal(JSON.parse(JSON.stringify(r)), r, 'core JSON')
|
||||
|
||||
p.onValue = function (v) {
|
||||
console.error('parsed', v)
|
||||
assert.equal(
|
||||
String(v).slice(0,12),
|
||||
String(r).slice(0,12)
|
||||
)
|
||||
}
|
||||
console.error('correct', r)
|
||||
p.write (new Buffer(JSON.stringify([r])))
|
||||
|
||||
|
||||
|
||||
}
|
41
node_modules/browserify/node_modules/JSONStream/test/stringify.js
generated
vendored
Normal file
41
node_modules/browserify/node_modules/JSONStream/test/stringify.js
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
|
||||
var fs = require ('fs')
|
||||
, join = require('path').join
|
||||
, file = join(__dirname, 'fixtures','all_npm.json')
|
||||
, JSONStream = require('../')
|
||||
, it = require('it-is').style('colour')
|
||||
|
||||
function randomObj () {
|
||||
return (
|
||||
Math.random () < 0.4
|
||||
? {hello: 'eonuhckmqjk',
|
||||
whatever: 236515,
|
||||
lies: true,
|
||||
nothing: [null],
|
||||
stuff: [Math.random(),Math.random(),Math.random()]
|
||||
}
|
||||
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
|
||||
)
|
||||
}
|
||||
|
||||
var expected = []
|
||||
, stringify = JSONStream.stringify()
|
||||
, es = require('event-stream')
|
||||
, stringified = ''
|
||||
, called = 0
|
||||
, count = 10
|
||||
, ended = false
|
||||
|
||||
while (count --)
|
||||
expected.push(randomObj())
|
||||
|
||||
es.connect(
|
||||
es.readArray(expected),
|
||||
stringify,
|
||||
//JSONStream.parse([/./]),
|
||||
es.writeArray(function (err, lines) {
|
||||
|
||||
it(JSON.parse(lines.join(''))).deepEqual(expected)
|
||||
console.error('PASSED')
|
||||
})
|
||||
)
|
47
node_modules/browserify/node_modules/JSONStream/test/stringify_object.js
generated
vendored
Normal file
47
node_modules/browserify/node_modules/JSONStream/test/stringify_object.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
|
||||
var fs = require ('fs')
|
||||
, join = require('path').join
|
||||
, file = join(__dirname, 'fixtures','all_npm.json')
|
||||
, JSONStream = require('../')
|
||||
, it = require('it-is').style('colour')
|
||||
, es = require('event-stream')
|
||||
, pending = 10
|
||||
, passed = true
|
||||
|
||||
function randomObj () {
|
||||
return (
|
||||
Math.random () < 0.4
|
||||
? {hello: 'eonuhckmqjk',
|
||||
whatever: 236515,
|
||||
lies: true,
|
||||
nothing: [null],
|
||||
stuff: [Math.random(),Math.random(),Math.random()]
|
||||
}
|
||||
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
|
||||
)
|
||||
}
|
||||
|
||||
for (var ix = 0; ix < pending; ix++) (function (count) {
|
||||
var expected = {}
|
||||
, stringify = JSONStream.stringifyObject()
|
||||
|
||||
es.connect(
|
||||
stringify,
|
||||
es.writeArray(function (err, lines) {
|
||||
it(JSON.parse(lines.join(''))).deepEqual(expected)
|
||||
if (--pending === 0) {
|
||||
console.error('PASSED')
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
while (count --) {
|
||||
var key = Math.random().toString(16).slice(2)
|
||||
expected[key] = randomObj()
|
||||
stringify.write([ key, expected[key] ])
|
||||
}
|
||||
|
||||
process.nextTick(function () {
|
||||
stringify.end()
|
||||
})
|
||||
})(ix)
|
35
node_modules/browserify/node_modules/JSONStream/test/test.js
generated
vendored
Normal file
35
node_modules/browserify/node_modules/JSONStream/test/test.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
|
||||
|
||||
var fs = require ('fs')
|
||||
, join = require('path').join
|
||||
, file = join(__dirname, 'fixtures','all_npm.json')
|
||||
, JSONStream = require('../')
|
||||
, it = require('it-is')
|
||||
|
||||
var expected = JSON.parse(fs.readFileSync(file))
|
||||
, parser = JSONStream.parse(['rows', /\d+/ /*, 'value'*/])
|
||||
, called = 0
|
||||
, ended = false
|
||||
, parsed = []
|
||||
|
||||
fs.createReadStream(file).pipe(parser)
|
||||
|
||||
parser.on('data', function (data) {
|
||||
called ++
|
||||
it.has({
|
||||
id: it.typeof('string'),
|
||||
value: {rev: it.typeof('string')},
|
||||
key:it.typeof('string')
|
||||
})
|
||||
parsed.push(data)
|
||||
})
|
||||
|
||||
parser.on('end', function () {
|
||||
ended = true
|
||||
})
|
||||
|
||||
process.on('exit', function () {
|
||||
it(called).equal(expected.rows.length)
|
||||
it(parsed).deepEqual(expected.rows)
|
||||
console.error('PASSED')
|
||||
})
|
29
node_modules/browserify/node_modules/JSONStream/test/test2.js
generated
vendored
Normal file
29
node_modules/browserify/node_modules/JSONStream/test/test2.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
|
||||
|
||||
var fs = require ('fs')
|
||||
, join = require('path').join
|
||||
, file = join(__dirname, '..','package.json')
|
||||
, JSONStream = require('../')
|
||||
, it = require('it-is')
|
||||
|
||||
var expected = JSON.parse(fs.readFileSync(file))
|
||||
, parser = JSONStream.parse([])
|
||||
, called = 0
|
||||
, ended = false
|
||||
, parsed = []
|
||||
|
||||
fs.createReadStream(file).pipe(parser)
|
||||
|
||||
parser.on('data', function (data) {
|
||||
called ++
|
||||
it(data).deepEqual(expected)
|
||||
})
|
||||
|
||||
parser.on('end', function () {
|
||||
ended = true
|
||||
})
|
||||
|
||||
process.on('exit', function () {
|
||||
it(called).equal(1)
|
||||
console.error('PASSED')
|
||||
})
|
41
node_modules/browserify/node_modules/JSONStream/test/two-ways.js
generated
vendored
Normal file
41
node_modules/browserify/node_modules/JSONStream/test/two-ways.js
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
|
||||
var fs = require ('fs')
|
||||
, join = require('path').join
|
||||
, file = join(__dirname, 'fixtures','all_npm.json')
|
||||
, JSONStream = require('../')
|
||||
, it = require('it-is').style('colour')
|
||||
|
||||
function randomObj () {
|
||||
return (
|
||||
Math.random () < 0.4
|
||||
? {hello: 'eonuhckmqjk',
|
||||
whatever: 236515,
|
||||
lies: true,
|
||||
nothing: [null],
|
||||
// stuff: [Math.random(),Math.random(),Math.random()]
|
||||
}
|
||||
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
|
||||
)
|
||||
}
|
||||
|
||||
var expected = []
|
||||
, stringify = JSONStream.stringify()
|
||||
, es = require('event-stream')
|
||||
, stringified = ''
|
||||
, called = 0
|
||||
, count = 10
|
||||
, ended = false
|
||||
|
||||
while (count --)
|
||||
expected.push(randomObj())
|
||||
|
||||
es.connect(
|
||||
es.readArray(expected),
|
||||
stringify,
|
||||
JSONStream.parse([/./]),
|
||||
es.writeArray(function (err, lines) {
|
||||
|
||||
it(lines).has(expected)
|
||||
console.error('PASSED')
|
||||
})
|
||||
)
|
1
node_modules/browserify/node_modules/browser-pack/.npmignore
generated
vendored
Normal file
1
node_modules/browserify/node_modules/browser-pack/.npmignore
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
node_modules
|
3
node_modules/browserify/node_modules/browser-pack/.travis.yml
generated
vendored
Normal file
3
node_modules/browserify/node_modules/browser-pack/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 0.8
|
18
node_modules/browserify/node_modules/browser-pack/LICENSE
generated
vendored
Normal file
18
node_modules/browserify/node_modules/browser-pack/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
This software is released under the MIT license:
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
6
node_modules/browserify/node_modules/browser-pack/cmd.js
generated
vendored
Executable file
6
node_modules/browserify/node_modules/browser-pack/cmd.js
generated
vendored
Executable file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
var pack = require('./')();
|
||||
|
||||
process.stdin.pipe(pack).pipe(process.stdout);
|
||||
process.stdin.resume();
|
13
node_modules/browserify/node_modules/browser-pack/example/input.json
generated
vendored
Normal file
13
node_modules/browserify/node_modules/browser-pack/example/input.json
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
[
|
||||
{
|
||||
"id": "a1b5af78",
|
||||
"source": "console.log(require('./foo')(5))",
|
||||
"deps": { "./foo": "b8f69fa5" },
|
||||
"entry": true
|
||||
},
|
||||
{
|
||||
"id": "b8f69fa5",
|
||||
"source": "module.exports = function (n) { return n * 111 }",
|
||||
"deps": {}
|
||||
}
|
||||
]
|
1
node_modules/browserify/node_modules/browser-pack/example/output.js
generated
vendored
Normal file
1
node_modules/browserify/node_modules/browser-pack/example/output.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
(function(p,c,e){function r(n){if(!c[n]){c[n]={exports:{}};p[n][0](function(x){return r(p[n][1][x])},c[n],c[n].exports);}return c[n].exports}for(var i=0;i<e.length;i++)r(e[i]);return r})({"a1b5af78":[function(require,module,exports){console.log(require('./foo')(5))},{"./foo":"b8f69fa5"}],"b8f69fa5":[function(require,module,exports){module.exports = function (n) { return n * 111 }},{}]},{},["a1b5af78","b8f69fa5"])
|
15
node_modules/browserify/node_modules/browser-pack/example/sourcemap/input.json
generated
vendored
Normal file
15
node_modules/browserify/node_modules/browser-pack/example/sourcemap/input.json
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
[
|
||||
{
|
||||
"id": "a1b5af78",
|
||||
"source": "console.log(require('./foo')(5))",
|
||||
"deps": { "./foo": "b8f69fa5" },
|
||||
"entry": true,
|
||||
"sourceFile": "wunder/bar.js"
|
||||
},
|
||||
{
|
||||
"id": "b8f69fa5",
|
||||
"source": "module.exports = function (n) { return n * 111 }",
|
||||
"deps": {},
|
||||
"sourceFile": "foo.js"
|
||||
}
|
||||
]
|
6
node_modules/browserify/node_modules/browser-pack/example/sourcemap/output.js
generated
vendored
Normal file
6
node_modules/browserify/node_modules/browser-pack/example/sourcemap/output.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
(function(e,t,n,r){function i(r){if(!n[r]){if(!t[r]){if(e)return e(r);throw new Error("Cannot find module '"+r+"'")}var s=n[r]={exports:{}};t[r][0](function(e){var n=t[r][1][e];return i(n?n:e)},s,s.exports)}return n[r].exports}for(var s=0;s<r.length;s++)i(r[s]);return i})(typeof require!=="undefined"&&require,{"a1b5af78":[function(require,module,exports){
|
||||
console.log(require('./foo')(5))
|
||||
},{"./foo":"b8f69fa5"}],"b8f69fa5":[function(require,module,exports){
|
||||
module.exports = function (n) { return n * 111 }
|
||||
},{}]},{},["a1b5af78"])
|
||||
//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJ3dW5kZXIvYmFyLmpzIiwiZm9vLmpzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7QUFBQTs7QUNBQSJ9
|
78
node_modules/browserify/node_modules/browser-pack/index.js
generated
vendored
Normal file
78
node_modules/browserify/node_modules/browser-pack/index.js
generated
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
var JSONStream = require('JSONStream');
|
||||
var duplexer = require('duplexer');
|
||||
var through = require('through');
|
||||
var uglify = require('uglify-js');
|
||||
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
|
||||
var combineSourceMap = require('combine-source-map');
|
||||
|
||||
var prelude = (function () {
|
||||
var src = fs.readFileSync(path.join(__dirname, 'prelude.js'), 'utf8');
|
||||
return uglify(src) + '(typeof require!=="undefined"&&require,{';
|
||||
})();
|
||||
|
||||
function newlinesIn(src) {
|
||||
if (!src) return 0;
|
||||
var newlines = src.match(/\n/g);
|
||||
|
||||
return newlines ? newlines.length : 0;
|
||||
}
|
||||
|
||||
module.exports = function (opts) {
|
||||
if (!opts) opts = {};
|
||||
var parser = opts.raw ? through() : JSONStream.parse([ true ]);
|
||||
var output = through(write, end);
|
||||
parser.pipe(output);
|
||||
|
||||
var first = true;
|
||||
var entries = [];
|
||||
var order = [];
|
||||
|
||||
var lineno = 1 + newlinesIn(prelude);
|
||||
var sourcemap;
|
||||
|
||||
return duplexer(parser, output);
|
||||
|
||||
function write (row) {
|
||||
if (first) this.queue(prelude);
|
||||
|
||||
if (row.sourceFile) {
|
||||
sourcemap = sourcemap || combineSourceMap.create();
|
||||
sourcemap.addFile(
|
||||
{ sourceFile: row.sourceFile, source: row.source },
|
||||
{ line: lineno }
|
||||
);
|
||||
}
|
||||
|
||||
var wrappedSource = [
|
||||
(first ? '' : ','),
|
||||
JSON.stringify(row.id),
|
||||
':[',
|
||||
'function(require,module,exports){\n',
|
||||
combineSourceMap.removeComments(row.source),
|
||||
'\n},',
|
||||
JSON.stringify(row.deps || {}),
|
||||
']'
|
||||
].join('');
|
||||
|
||||
this.queue(wrappedSource);
|
||||
lineno += newlinesIn(wrappedSource);
|
||||
|
||||
first = false;
|
||||
if (row.entry && row.order !== undefined) {
|
||||
entries.splice(row.order, 0, row.id);
|
||||
}
|
||||
else if (row.entry) entries.push(row.id);
|
||||
}
|
||||
|
||||
function end () {
|
||||
if (first) this.queue(prelude);
|
||||
|
||||
this.queue('},{},' + JSON.stringify(entries) + ')');
|
||||
if (sourcemap) this.queue('\n' + sourcemap.comment());
|
||||
|
||||
this.queue(null);
|
||||
}
|
||||
};
|
1
node_modules/browserify/node_modules/browser-pack/node_modules/.bin/uglifyjs
generated
vendored
Symbolic link
1
node_modules/browserify/node_modules/browser-pack/node_modules/.bin/uglifyjs
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../uglify-js/bin/uglifyjs
|
16
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/.npmignore
generated
vendored
Normal file
16
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
lib-cov
|
||||
*.seed
|
||||
*.log
|
||||
*.csv
|
||||
*.dat
|
||||
*.out
|
||||
*.pid
|
||||
*.gz
|
||||
|
||||
pids
|
||||
logs
|
||||
results
|
||||
|
||||
node_modules
|
||||
npm-debug.log
|
||||
tmp
|
4
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/.travis.yml
generated
vendored
Normal file
4
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 0.6
|
||||
- 0.8
|
16
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/LICENSE
generated
vendored
Normal file
16
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
lib-cov
|
||||
*.seed
|
||||
*.log
|
||||
*.csv
|
||||
*.dat
|
||||
*.out
|
||||
*.pid
|
||||
*.gz
|
||||
|
||||
pids
|
||||
logs
|
||||
results
|
||||
|
||||
node_modules
|
||||
npm-debug.log
|
||||
tmp
|
111
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/README.md
generated
vendored
Normal file
111
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/README.md
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
# combine-source-map [](http://travis-ci.org/thlorenz/combine-source-map)
|
||||
|
||||
Add source maps of multiple files, offset them and then combine them into one source map.
|
||||
|
||||
```js
|
||||
var convert = require('convert-source-map');
|
||||
var combine = require('combine-source-map');
|
||||
|
||||
var fooComment = '//@ sourceMappingURL=data:application/json;base64,eyJ2Z [..] pzJylcbiJdfQ==';
|
||||
var barComment = '//@ sourceMappingURL=data:application/json;base64,eyJ2Z [..] VjaycpXG4iXX0=';
|
||||
|
||||
var fooFile = {
|
||||
source: '(function() {\n\n console.log(require(\'./bar.js\'));\n\n}).call(this);\n' + '\n' + fooComment
|
||||
, sourceFile: 'foo.js'
|
||||
};
|
||||
var barFile = {
|
||||
source: '(function() {\n\n console.log(alert(\'alerts suck\'));\n\n}).call(this);\n' + '\n' + barComment
|
||||
, sourceFile: 'bar.js'
|
||||
};
|
||||
|
||||
var offset = { line: 2 };
|
||||
var base64 = combine
|
||||
.create('bundle.js')
|
||||
.addFile(fooFile, offset)
|
||||
.addFile(barFile, { line: offset.line + 8 })
|
||||
.base64();
|
||||
|
||||
var sm = convert.fromBase64(base64).toObject();
|
||||
console.log(sm);
|
||||
```
|
||||
|
||||
```
|
||||
{ version: 3,
|
||||
file: 'bundle.js',
|
||||
sources: [ 'foo.coffee', 'bar.coffee' ],
|
||||
names: [],
|
||||
mappings: ';;;AAAA;CAAA;CAAA,CAAA,CAAA,IAAO,GAAK;CAAZ;;;;;ACAA;CAAA;CAAA,CAAA,CAAA,IAAO,GAAK;CAAZ',
|
||||
sourcesContent:
|
||||
[ 'console.log(require \'./bar.js\')\n',
|
||||
'console.log(alert \'alerts suck\')\n' ] }
|
||||
```
|
||||
|
||||
## Installation
|
||||
|
||||
npm install combine-source-map
|
||||
|
||||
## API
|
||||
|
||||
### create()
|
||||
|
||||
```
|
||||
/**
|
||||
* @name create
|
||||
* @function
|
||||
* @param file {String} optional name of the generated file
|
||||
* @param sourceRoot { String} optional sourceRoot of the map to be generated
|
||||
* @return {Object} Combiner instance to which source maps can be added and later combined
|
||||
*/
|
||||
```
|
||||
|
||||
### Combiner.prototype.addFile(opts, offset)
|
||||
|
||||
```
|
||||
/**
|
||||
* Adds map to underlying source map.
|
||||
* If source contains a source map comment that has the source of the original file inlined it will offset these
|
||||
* mappings and include them.
|
||||
* If no source map comment is found or it has no source inlined, mappings for the file will be generated and included
|
||||
*
|
||||
* @name addMap
|
||||
* @function
|
||||
* @param opts {Object} { sourceFile: {String}, source: {String} }
|
||||
* @param offset {Object} { line: {Number}, column: {Number} }
|
||||
*/
|
||||
```
|
||||
|
||||
### Combiner.prototype.base64()
|
||||
|
||||
```
|
||||
/**
|
||||
* @name base64
|
||||
* @function
|
||||
* @return {String} base64 encoded combined source map
|
||||
*/
|
||||
```
|
||||
|
||||
### Combiner.prototype.comment()
|
||||
|
||||
```
|
||||
/**
|
||||
* @name comment
|
||||
* @function
|
||||
* @return {String} base64 encoded sourceMappingUrl comment of the combined source map
|
||||
*/
|
||||
```
|
||||
|
||||
### removeComments(src)
|
||||
|
||||
```
|
||||
/**
|
||||
* @name removeComments
|
||||
* @function
|
||||
* @param src
|
||||
* @return {String} src with all sourceMappingUrl comments removed
|
||||
*/
|
||||
```
|
||||
|
||||
## Example
|
||||
|
||||
Read and run the [more elaborate example](https://github.com/thlorenz/combine-source-map/blob/master/example/two-files.js)
|
||||
in order to get a better idea how things work.
|
26
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/example/two-files-short.js
generated
vendored
Normal file
26
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/example/two-files-short.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
'use strict';
|
||||
|
||||
var convert = require('convert-source-map');
|
||||
var combine = require('..');
|
||||
|
||||
var fooComment = '//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZm9vLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiZm9vLmNvZmZlZSJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiO0FBQUE7Q0FBQTtDQUFBLENBQUEsQ0FBQSxJQUFPLEdBQUs7Q0FBWiIsInNvdXJjZXNDb250ZW50IjpbImNvbnNvbGUubG9nKHJlcXVpcmUgJy4vYmFyLmpzJylcbiJdfQ==';
|
||||
var barComment = '//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiYmFyLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiYmFyLmNvZmZlZSJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiO0FBQUE7Q0FBQTtDQUFBLENBQUEsQ0FBQSxJQUFPLEdBQUs7Q0FBWiIsInNvdXJjZXNDb250ZW50IjpbImNvbnNvbGUubG9nKGFsZXJ0ICdhbGVydHMgc3VjaycpXG4iXX0=';
|
||||
|
||||
var fooFile = {
|
||||
source: '(function() {\n\n console.log(require(\'./bar.js\'));\n\n}).call(this);\n' + '\n' + fooComment
|
||||
, sourceFile: 'foo.js'
|
||||
};
|
||||
var barFile = {
|
||||
source: '(function() {\n\n console.log(alert(\'alerts suck\'));\n\n}).call(this);\n' + '\n' + barComment
|
||||
, sourceFile: 'bar.js'
|
||||
};
|
||||
|
||||
var offset = { line: 2 };
|
||||
var base64 = combine
|
||||
.create('bundle.js')
|
||||
.addFile(fooFile, offset)
|
||||
.addFile(barFile, { line: offset.line + 8 })
|
||||
.base64();
|
||||
|
||||
var sm = convert.fromBase64(base64).toObject();
|
||||
console.log(sm);
|
47
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/example/two-files.js
generated
vendored
Normal file
47
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/example/two-files.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
'use strict';
|
||||
|
||||
var convert = require('convert-source-map');
|
||||
var parse = require('parse-base64vlq-mappings');
|
||||
var combine = require('..');
|
||||
|
||||
var foo = {
|
||||
version : 3,
|
||||
file : 'foo.js',
|
||||
sourceRoot : '',
|
||||
sources : [ 'foo.coffee' ],
|
||||
names : [],
|
||||
mappings : ';AAAA;CAAA;CAAA,CAAA,CAAA,IAAO,GAAK;CAAZ',
|
||||
sourcesContent : [ 'console.log(require \'./bar.js\')\n' ] };
|
||||
|
||||
var bar = {
|
||||
version : 3,
|
||||
file : 'bar.js',
|
||||
sourceRoot : '',
|
||||
sources : [ 'bar.coffee' ],
|
||||
names : [],
|
||||
mappings : ';AAAA;CAAA;CAAA,CAAA,CAAA,IAAO,GAAK;CAAZ',
|
||||
sourcesContent : [ 'console.log(alert \'alerts suck\')\n' ] };
|
||||
|
||||
|
||||
var fooComment = convert.fromObject(foo).toComment();
|
||||
var barComment = convert.fromObject(bar).toComment();
|
||||
|
||||
var fooFile = {
|
||||
source: '(function() {\n\n console.log(require(\'./bar.js\'));\n\n}).call(this);\n' + '\n' + fooComment
|
||||
, sourceFile: 'foo.js'
|
||||
};
|
||||
var barFile = {
|
||||
source: '(function() {\n\n console.log(alert(\'alerts suck\'));\n\n}).call(this);\n' + '\n' + barComment
|
||||
, sourceFile: 'bar.js'
|
||||
};
|
||||
|
||||
var offset = { line: 2 };
|
||||
var base64 = combine
|
||||
.create('bundle.js')
|
||||
.addFile(fooFile, offset)
|
||||
.addFile(barFile, { line: offset.line + 8 })
|
||||
.base64();
|
||||
|
||||
var sm = convert.fromBase64(base64).toObject();
|
||||
console.log('Combined source maps:\n', sm);
|
||||
console.log('\nMappings:\n', parse(sm.mappings));
|
97
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/index.js
generated
vendored
Normal file
97
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/index.js
generated
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
'use strict';
|
||||
|
||||
var parse = require('parse-base64vlq-mappings');
|
||||
var convert = require('convert-source-map');
|
||||
var createGenerator = require('inline-source-map');
|
||||
|
||||
function resolveMap(source) {
|
||||
var gen = convert.fromSource(source);
|
||||
return gen ? gen.toObject() : null;
|
||||
}
|
||||
|
||||
function hasInlinedSource(existingMap) {
|
||||
return existingMap.sourcesContent && !!existingMap.sourcesContent[0];
|
||||
}
|
||||
|
||||
function Combiner(file, sourceRoot) {
|
||||
// since we include the original code in the map sourceRoot actually not needed
|
||||
this.generator = createGenerator({ file: file || 'generated.js', sourceRoot: sourceRoot });
|
||||
}
|
||||
|
||||
Combiner.prototype._addGeneratedMap = function (sourceFile, source, offset) {
|
||||
this.generator.addGeneratedMappings(sourceFile, source, offset);
|
||||
this.generator.addSourceContent(sourceFile, source);
|
||||
return this;
|
||||
};
|
||||
|
||||
Combiner.prototype._addExistingMap = function (sourceFile, source, existingMap, offset) {
|
||||
var mappings = parse(existingMap.mappings);
|
||||
var originalSource = existingMap.sourcesContent[0]
|
||||
, originalSourceFile = existingMap.sources[0];
|
||||
|
||||
this.generator.addMappings(originalSourceFile || sourceFile, mappings, offset);
|
||||
this.generator.addSourceContent(originalSourceFile || sourceFile, originalSource);
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Adds map to underlying source map.
|
||||
* If source contains a source map comment that has the source of the original file inlined it will offset these
|
||||
* mappings and include them.
|
||||
* If no source map comment is found or it has no source inlined, mappings for the file will be generated and included
|
||||
*
|
||||
* @name addMap
|
||||
* @function
|
||||
* @param opts {Object} { sourceFile: {String}, source: {String} }
|
||||
* @param offset {Object} { line: {Number}, column: {Number} }
|
||||
*/
|
||||
Combiner.prototype.addFile = function (opts, offset) {
|
||||
|
||||
offset = offset || {};
|
||||
if (!offset.hasOwnProperty('line')) offset.line = 0;
|
||||
if (!offset.hasOwnProperty('column')) offset.column = 0;
|
||||
|
||||
var existingMap = resolveMap(opts.source);
|
||||
|
||||
return existingMap && hasInlinedSource(existingMap)
|
||||
? this._addExistingMap(opts.sourceFile, opts.source, existingMap, offset)
|
||||
: this._addGeneratedMap(opts.sourceFile, opts.source, offset);
|
||||
};
|
||||
|
||||
/**
|
||||
* @name base64
|
||||
* @function
|
||||
* @return {String} base64 encoded combined source map
|
||||
*/
|
||||
Combiner.prototype.base64 = function () {
|
||||
return this.generator.base64Encode();
|
||||
};
|
||||
|
||||
/**
|
||||
* @name comment
|
||||
* @function
|
||||
* @return {String} base64 encoded sourceMappingUrl comment of the combined source map
|
||||
*/
|
||||
Combiner.prototype.comment = function () {
|
||||
return this.generator.inlineMappingUrl();
|
||||
};
|
||||
|
||||
/**
|
||||
* @name create
|
||||
* @function
|
||||
* @param file {String} optional name of the generated file
|
||||
* @param sourceRoot { String} optional sourceRoot of the map to be generated
|
||||
* @return {Object} Combiner instance to which source maps can be added and later combined
|
||||
*/
|
||||
exports.create = function (file, sourceRoot) { return new Combiner(file, sourceRoot); };
|
||||
|
||||
/**
|
||||
* @name removeComments
|
||||
* @function
|
||||
* @param src
|
||||
* @return {String} src with all sourceMappingUrl comments removed
|
||||
*/
|
||||
exports.removeComments = function (src) {
|
||||
if (!src.replace) return src;
|
||||
return src.replace(convert.commentRegex, '');
|
||||
};
|
16
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/convert-source-map/.npmignore
generated
vendored
Normal file
16
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/convert-source-map/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
lib-cov
|
||||
*.seed
|
||||
*.log
|
||||
*.csv
|
||||
*.dat
|
||||
*.out
|
||||
*.pid
|
||||
*.gz
|
||||
|
||||
pids
|
||||
logs
|
||||
results
|
||||
|
||||
node_modules
|
||||
npm-debug.log
|
||||
tmp
|
4
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/convert-source-map/.travis.yml
generated
vendored
Normal file
4
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/convert-source-map/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 0.6
|
||||
- 0.8
|
23
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/convert-source-map/LICENSE
generated
vendored
Normal file
23
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/convert-source-map/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
Copyright 2013 Thorsten Lorenz.
|
||||
All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
85
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/convert-source-map/README.md
generated
vendored
Normal file
85
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/convert-source-map/README.md
generated
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
# convert-source-map [](http://travis-ci.org/thlorenz/convert-source-map)
|
||||
|
||||
Converts a source-map from/to different formats and allows adding/changing properties.
|
||||
|
||||
```js
|
||||
var convert = require('convert-source-map');
|
||||
|
||||
var json = convert
|
||||
.fromComment('//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZm9vLmpzIiwic291cmNlcyI6WyJjb25zb2xlLmxvZyhcImhpXCIpOyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSIsInNvdXJjZVJvb3QiOiIvIn0=')
|
||||
.toJSON();
|
||||
|
||||
var modified = convert
|
||||
.fromComment('//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZm9vLmpzIiwic291cmNlcyI6WyJjb25zb2xlLmxvZyhcImhpXCIpOyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSIsInNvdXJjZVJvb3QiOiIvIn0=')
|
||||
.setProperty('sources', [ 'CONSOLE.LOG("HI");' ])
|
||||
.toJSON();
|
||||
|
||||
console.log(json);
|
||||
console.log(modified);
|
||||
```
|
||||
|
||||
```json
|
||||
{"version":3,"file":"foo.js","sources":["console.log(\"hi\");"],"names":[],"mappings":"AAAA","sourceRoot":"/"}
|
||||
{"version":3,"file":"foo.js","sources":["CONSOLE.LOG(\"HI\");"],"names":[],"mappings":"AAAA","sourceRoot":"/"}
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### fromObject(obj)
|
||||
|
||||
Returns source map converter from given object.
|
||||
|
||||
### fromJSON(json)
|
||||
|
||||
Returns source map converter from given json string.
|
||||
|
||||
### fromBase64(base64)
|
||||
|
||||
Returns source map converter from given base64 encoded json string.
|
||||
|
||||
### fromComment()
|
||||
|
||||
Returns source map converter from given base64 encoded json string prefixed with `//@ sourceMappintURL=...`.
|
||||
|
||||
### fromSource()
|
||||
|
||||
Finds last sourcemap comment in file and returns source map converter or returns null if no source map comment was
|
||||
found.
|
||||
|
||||
### toObject()
|
||||
|
||||
Returns a copy of the underlying source map.
|
||||
|
||||
### toJSON([space])
|
||||
|
||||
Converts source map to json string. If `space` is given (optional), this will be passed to
|
||||
[JSON.stringify](https://developer.mozilla.org/en-US/docs/JavaScript/Reference/Global_Objects/JSON/stringify) when the
|
||||
JSON string is generated.
|
||||
|
||||
### toBase64()
|
||||
|
||||
Converts source map to base64 encoded json string.
|
||||
|
||||
### toComment()
|
||||
|
||||
Converts source map to base64 encoded json string prefixed with `//@ sourceMappingURL=...`.
|
||||
|
||||
### addProperty(key, value)
|
||||
|
||||
Adds given property to the source map. Throws an error if property already exists.
|
||||
|
||||
### setProperty(key, value)
|
||||
|
||||
Sets given property to the source map. If property doesn't exist it is added, otherwise its value is updated.
|
||||
|
||||
### getProperty(key)
|
||||
|
||||
Gets given property of the source map.
|
||||
|
||||
### removeComments(src)
|
||||
|
||||
Returns `src` with all source map comments removed
|
||||
|
||||
### commentRegex
|
||||
|
||||
Returns the regex used to find source map comments.
|
@@ -0,0 +1,15 @@
|
||||
'use strict';
|
||||
|
||||
var convert = require('..');
|
||||
|
||||
var json = convert
|
||||
.fromComment('//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZm9vLmpzIiwic291cmNlcyI6WyJjb25zb2xlLmxvZyhcImhpXCIpOyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSIsInNvdXJjZVJvb3QiOiIvIn0=')
|
||||
.toJSON();
|
||||
|
||||
var modified = convert
|
||||
.fromComment('//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZm9vLmpzIiwic291cmNlcyI6WyJjb25zb2xlLmxvZyhcImhpXCIpOyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSIsInNvdXJjZVJvb3QiOiIvIn0=')
|
||||
.setProperty('sources', [ 'CONSOLE.LOG("HI");' ])
|
||||
.toJSON();
|
||||
|
||||
console.log(json);
|
||||
console.log(modified);
|
89
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/convert-source-map/index.js
generated
vendored
Normal file
89
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/convert-source-map/index.js
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
'use strict';
|
||||
var commentRx = /^[ \t]*\/\/@[ \t]+sourceMappingURL=data:(?:application|text)\/json;base64,(.+)/mg;
|
||||
|
||||
function decodeBase64(base64) {
|
||||
return new Buffer(base64, 'base64').toString();
|
||||
}
|
||||
|
||||
function stripComment(sm) {
|
||||
return sm.split(',').pop();
|
||||
}
|
||||
|
||||
function Converter (sourcemap, isEncoded, isJSON, hasComment) {
|
||||
var sm = sourcemap;
|
||||
try {
|
||||
if (hasComment) sm = stripComment(sm);
|
||||
if (isEncoded) sm = decodeBase64(sm);
|
||||
if (isJSON || isEncoded) sm = JSON.parse(sm);
|
||||
|
||||
this.sourcemap = sm;
|
||||
} catch(e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
Converter.prototype.toJSON = function (space) {
|
||||
return JSON.stringify(this.sourcemap, null, space);
|
||||
};
|
||||
|
||||
Converter.prototype.toBase64 = function () {
|
||||
var json = this.toJSON();
|
||||
return new Buffer(json).toString('base64');
|
||||
};
|
||||
|
||||
Converter.prototype.toComment = function () {
|
||||
var base64 = this.toBase64();
|
||||
return '//@ sourceMappingURL=data:application/json;base64,' + base64;
|
||||
};
|
||||
|
||||
// returns copy instead of original
|
||||
Converter.prototype.toObject = function () {
|
||||
return JSON.parse(this.toJSON());
|
||||
};
|
||||
|
||||
Converter.prototype.addProperty = function (key, value) {
|
||||
if (this.sourcemap.hasOwnProperty(key)) throw new Error('property %s already exists on the sourcemap, use set property instead');
|
||||
return this.setProperty(key, value);
|
||||
};
|
||||
|
||||
Converter.prototype.setProperty = function (key, value) {
|
||||
this.sourcemap[key] = value;
|
||||
return this;
|
||||
};
|
||||
|
||||
Converter.prototype.getProperty = function (key) {
|
||||
return this.sourcemap[key];
|
||||
};
|
||||
|
||||
exports.fromObject = function (obj) {
|
||||
return new Converter(obj, false, false, false);
|
||||
};
|
||||
|
||||
exports.fromJSON = function (json) {
|
||||
return new Converter(json, false, true, false);
|
||||
};
|
||||
|
||||
exports.fromBase64 = function (base64) {
|
||||
return new Converter(base64, true, false, false);
|
||||
};
|
||||
|
||||
exports.fromComment = function (comment) {
|
||||
return new Converter(comment, true, false, true);
|
||||
};
|
||||
|
||||
// Finds last sourcemap comment in file or returns null if none was found
|
||||
exports.fromSource = function (content) {
|
||||
var m = content.match(commentRx);
|
||||
commentRx.lastIndex = 0;
|
||||
return m ? exports.fromComment(m.pop()) : null;
|
||||
};
|
||||
|
||||
exports.removeComments = function (src) {
|
||||
commentRx.lastIndex = 0;
|
||||
return src.replace(commentRx, '');
|
||||
};
|
||||
|
||||
exports.__defineGetter__('commentRegex', function () {
|
||||
commentRx.lastIndex = 0;
|
||||
return commentRx;
|
||||
});
|
35
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/convert-source-map/package.json
generated
vendored
Normal file
35
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/convert-source-map/package.json
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"name": "convert-source-map",
|
||||
"version": "0.2.5",
|
||||
"description": "Converts a source-map from/to different formats and allows adding/changing properties.",
|
||||
"main": "convert-source-map.js",
|
||||
"scripts": {
|
||||
"test": "node-trap test/*.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/thlorenz/convert-source-map.git"
|
||||
},
|
||||
"homepage": "https://github.com/thlorenz/convert-source-map",
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"trap": "~0.4.2",
|
||||
"inline-source-map": "~0.2.1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": {
|
||||
"name": "Thorsten Lorenz",
|
||||
"email": "thlorenz@gmx.de",
|
||||
"url": "http://thlorenz.com"
|
||||
},
|
||||
"license": "MIT",
|
||||
"engine": {
|
||||
"node": ">=0.6"
|
||||
},
|
||||
"readme": "# convert-source-map [](http://travis-ci.org/thlorenz/convert-source-map)\n\nConverts a source-map from/to different formats and allows adding/changing properties.\n\n```js\nvar convert = require('convert-source-map');\n\nvar json = convert\n .fromComment('//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZm9vLmpzIiwic291cmNlcyI6WyJjb25zb2xlLmxvZyhcImhpXCIpOyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSIsInNvdXJjZVJvb3QiOiIvIn0=')\n .toJSON();\n\nvar modified = convert\n .fromComment('//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZm9vLmpzIiwic291cmNlcyI6WyJjb25zb2xlLmxvZyhcImhpXCIpOyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSIsInNvdXJjZVJvb3QiOiIvIn0=')\n .setProperty('sources', [ 'CONSOLE.LOG(\"HI\");' ])\n .toJSON();\n\nconsole.log(json);\nconsole.log(modified);\n```\n\n```json\n{\"version\":3,\"file\":\"foo.js\",\"sources\":[\"console.log(\\\"hi\\\");\"],\"names\":[],\"mappings\":\"AAAA\",\"sourceRoot\":\"/\"}\n{\"version\":3,\"file\":\"foo.js\",\"sources\":[\"CONSOLE.LOG(\\\"HI\\\");\"],\"names\":[],\"mappings\":\"AAAA\",\"sourceRoot\":\"/\"}\n```\n\n## API\n\n### fromObject(obj)\n\nReturns source map converter from given object.\n\n### fromJSON(json)\n\nReturns source map converter from given json string.\n\n### fromBase64(base64)\n\nReturns source map converter from given base64 encoded json string.\n\n### fromComment()\n\nReturns source map converter from given base64 encoded json string prefixed with `//@ sourceMappintURL=...`.\n\n### fromSource()\n\nFinds last sourcemap comment in file and returns source map converter or returns null if no source map comment was\nfound.\n\n### toObject()\n\nReturns a copy of the underlying source map.\n\n### toJSON([space])\n\nConverts source map to json string. If `space` is given (optional), this will be passed to\n[JSON.stringify](https://developer.mozilla.org/en-US/docs/JavaScript/Reference/Global_Objects/JSON/stringify) when the\nJSON string is generated.\n\n### toBase64()\n\nConverts source map to base64 encoded json string.\n\n### toComment()\n\nConverts source map to base64 encoded json string prefixed with `//@ sourceMappingURL=...`.\n\n### addProperty(key, value)\n\nAdds given property to the source map. Throws an error if property already exists.\n\n### setProperty(key, value)\n\nSets given property to the source map. If property doesn't exist it is added, otherwise its value is updated.\n\n### getProperty(key)\n\nGets given property of the source map.\n\n### removeComments(src)\n\nReturns `src` with all source map comments removed\n\n### commentRegex\n\nReturns the regex used to find source map comments.\n",
|
||||
"_id": "convert-source-map@0.2.5",
|
||||
"dist": {
|
||||
"shasum": "bcbcb50f1fba6c9acd0e7c2b5e1bf168b1e41bd4"
|
||||
},
|
||||
"_from": "convert-source-map@~0.2.3"
|
||||
}
|
25
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/convert-source-map/test/comment-regex.js
generated
vendored
Normal file
25
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/convert-source-map/test/comment-regex.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
'use strict';
|
||||
/*jshint asi: true */
|
||||
|
||||
var test = require('trap').test
|
||||
, generator = require('inline-source-map')
|
||||
, rx = require('..').commentRegex
|
||||
|
||||
function comment(s) {
|
||||
rx.lastIndex = 0;
|
||||
return rx.test(s + 'sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJmdW5jdGlvbiBmb28oKSB7XG4gY29uc29sZS5sb2coXCJoZWxsbyBJIGFtIGZvb1wiKTtcbiBjb25zb2xlLmxvZyhcIndobyBhcmUgeW91XCIpO1xufVxuXG5mb28oKTtcbiJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSJ9')
|
||||
}
|
||||
|
||||
test('comment regex', function (t) {
|
||||
[ '//@ '
|
||||
, ' //@ '
|
||||
, '\t//@ '
|
||||
].forEach(function (x) { t.ok(comment(x), 'matches ' + x) })
|
||||
|
||||
// if I don't assign this, I get: TypeError: Cannot read property ' @// @' of undefined
|
||||
var a =
|
||||
[ '///@ '
|
||||
, '}}//@ '
|
||||
, ' @// @'
|
||||
].forEach(function (x) { t.ok(!comment(x), 'does not match ' + x) })
|
||||
})
|
129
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/convert-source-map/test/convert-source-map.js
generated
vendored
Normal file
129
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/convert-source-map/test/convert-source-map.js
generated
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
'use strict';
|
||||
/*jshint asi: true */
|
||||
|
||||
var test = require('trap').test
|
||||
, generator = require('inline-source-map')
|
||||
, convert = require('..')
|
||||
|
||||
|
||||
var gen = generator()
|
||||
.addMappings('foo.js', [{ original: { line: 2, column: 3 } , generated: { line: 5, column: 10 } }], { line: 5 })
|
||||
.addGeneratedMappings('bar.js', 'var a = 2;\nconsole.log(a)', { line: 23, column: 22 })
|
||||
, base64 = gen.base64Encode()
|
||||
, comment = gen.inlineMappingUrl()
|
||||
, json = '{"version":3,"file":"","sources":["foo.js","bar.js"],"names":[],"mappings":";;;;;;;;;UACG;;;;;;;;;;;;;;sBCDH;sBACA"}'
|
||||
, obj = JSON.parse(json)
|
||||
|
||||
test('different formats', function (t) {
|
||||
|
||||
t.equal(convert.fromComment(comment).toComment(), comment, 'comment -> comment')
|
||||
t.equal(convert.fromComment(comment).toBase64(), base64, 'comment -> base64')
|
||||
t.equal(convert.fromComment(comment).toJSON(), json, 'comment -> json')
|
||||
t.deepEqual(convert.fromComment(comment).toObject(), obj, 'comment -> object')
|
||||
|
||||
t.equal(convert.fromBase64(base64).toBase64(), base64, 'base64 -> base64')
|
||||
t.equal(convert.fromBase64(base64).toComment(), comment, 'base64 -> comment')
|
||||
t.equal(convert.fromBase64(base64).toJSON(), json, 'base64 -> json')
|
||||
t.deepEqual(convert.fromBase64(base64).toObject(), obj, 'base64 -> object')
|
||||
|
||||
t.equal(convert.fromJSON(json).toJSON(), json, 'json -> json')
|
||||
t.equal(convert.fromJSON(json).toBase64(), base64, 'json -> base64')
|
||||
t.equal(convert.fromJSON(json).toComment(), comment, 'json -> comment')
|
||||
t.deepEqual(convert.fromJSON(json).toObject(), obj, 'json -> object')
|
||||
|
||||
})
|
||||
|
||||
test('to object returns a copy', function (t) {
|
||||
var c = convert.fromJSON(json)
|
||||
var o = c.toObject()
|
||||
o.version = '99';
|
||||
t.equal(c.toObject().version, '3', 'setting property on returned object does not affect original')
|
||||
})
|
||||
|
||||
test('from source', function (t) {
|
||||
var foo = [
|
||||
'function foo() {'
|
||||
, ' console.log("hello I am foo");'
|
||||
, ' console.log("who are you");'
|
||||
, '}'
|
||||
, ''
|
||||
, 'foo();'
|
||||
, ''
|
||||
].join('\n')
|
||||
, map = '//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJmdW5jdGlvbiBmb28oKSB7XG4gY29uc29sZS5sb2coXCJoZWxsbyBJIGFtIGZvb1wiKTtcbiBjb25zb2xlLmxvZyhcIndobyBhcmUgeW91XCIpO1xufVxuXG5mb28oKTtcbiJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSJ9'
|
||||
, otherMap = '//@ sourceMappingURL=data:application/json;base64,otherZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJmdW5jdGlvbiBmb28oKSB7XG4gY29uc29sZS5sb2coXCJoZWxsbyBJIGFtIGZvb1wiKTtcbiBjb25zb2xlLmxvZyhcIndobyBhcmUgeW91XCIpO1xufVxuXG5mb28oKTtcbiJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSJ9'
|
||||
|
||||
function getComment(src) {
|
||||
var map = convert.fromSource(src);
|
||||
return map ? map.toComment() : null;
|
||||
}
|
||||
|
||||
t.equal(getComment(foo), null, 'no comment returns null')
|
||||
t.equal(getComment(foo + map), map, 'beginning of last line')
|
||||
t.equal(getComment(foo + ' ' + map), map, 'indented of last line')
|
||||
t.equal(getComment(foo + ' ' + map + '\n\n'), map, 'indented on last non empty line')
|
||||
t.equal(getComment(foo + map + '\nconsole.log("more code");\nfoo()\n'), map, 'in the middle of code')
|
||||
t.equal(getComment(foo + otherMap + '\n' + map), map, 'finds last map in source')
|
||||
})
|
||||
|
||||
test('remove comments', function (t) {
|
||||
var foo = [
|
||||
'function foo() {'
|
||||
, ' console.log("hello I am foo");'
|
||||
, ' console.log("who are you");'
|
||||
, '}'
|
||||
, ''
|
||||
, 'foo();'
|
||||
, ''
|
||||
].join('\n')
|
||||
, map = '//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJmdW5jdGlvbiBmb28oKSB7XG4gY29uc29sZS5sb2coXCJoZWxsbyBJIGFtIGZvb1wiKTtcbiBjb25zb2xlLmxvZyhcIndobyBhcmUgeW91XCIpO1xufVxuXG5mb28oKTtcbiJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSJ9'
|
||||
, otherMap = '//@ sourceMappingURL=data:application/json;base64,otherZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJmdW5jdGlvbiBmb28oKSB7XG4gY29uc29sZS5sb2coXCJoZWxsbyBJIGFtIGZvb1wiKTtcbiBjb25zb2xlLmxvZyhcIndobyBhcmUgeW91XCIpO1xufVxuXG5mb28oKTtcbiJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSJ9'
|
||||
, extraCode = '\nconsole.log("more code");\nfoo()\n'
|
||||
|
||||
t.equal(convert.removeComments(foo + map), foo, 'from last line')
|
||||
t.equal(convert.removeComments(foo + map + extraCode), foo + extraCode, 'from the middle of code')
|
||||
t.equal(convert.removeComments(foo + otherMap + extraCode + map + map), foo + extraCode, 'multiple comments from the middle of code')
|
||||
})
|
||||
|
||||
test('pretty json', function (t) {
|
||||
var mod = convert.fromJSON(json).toJSON(2);
|
||||
t.equal(
|
||||
mod
|
||||
, '{\n "version": 3,\n "file": "",\n "sources": [\n "foo.js",\n "bar.js"\n ],\n "names": [],\n "mappings": ";;;;;;;;;UACG;;;;;;;;;;;;;;sBCDH;sBACA"\n}'
|
||||
, 'pretty prints json whe space is given')
|
||||
})
|
||||
|
||||
test('adding properties', function (t) {
|
||||
var mod = convert
|
||||
.fromJSON(json)
|
||||
.addProperty('foo', 'bar')
|
||||
.toJSON()
|
||||
|
||||
t.equal(
|
||||
mod
|
||||
, '{"version":3,"file":"","sources":["foo.js","bar.js"],"names":[],"mappings":";;;;;;;;;UACG;;;;;;;;;;;;;;sBCDH;sBACA","foo":"bar"}'
|
||||
, 'includes added property'
|
||||
)
|
||||
})
|
||||
|
||||
test('setting properties', function (t) {
|
||||
var mod = convert
|
||||
.fromJSON(json)
|
||||
.setProperty('version', '2')
|
||||
.setProperty('mappings', ';;;UACG')
|
||||
.setProperty('should add', 'this')
|
||||
.toJSON()
|
||||
|
||||
t.equal(
|
||||
mod
|
||||
, '{"version":"2","file":"","sources":["foo.js","bar.js"],"names":[],"mappings":";;;UACG","should add":"this"}'
|
||||
, 'includes new property and changes existing properties'
|
||||
)
|
||||
})
|
||||
|
||||
test('getting properties', function (t) {
|
||||
var sm = convert.fromJSON(json)
|
||||
|
||||
t.equal(sm.getProperty('version'), 3, 'gets version')
|
||||
t.deepEqual(sm.getProperty('sources'), ['foo.js', 'bar.js'], 'gets sources')
|
||||
})
|
16
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/inline-source-map/.npmignore
generated
vendored
Normal file
16
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/inline-source-map/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
lib-cov
|
||||
*.seed
|
||||
*.log
|
||||
*.csv
|
||||
*.dat
|
||||
*.out
|
||||
*.pid
|
||||
*.gz
|
||||
|
||||
pids
|
||||
logs
|
||||
results
|
||||
|
||||
node_modules
|
||||
npm-debug.log
|
||||
tmp
|
4
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/inline-source-map/.travis.yml
generated
vendored
Normal file
4
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/inline-source-map/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 0.6
|
||||
- 0.8
|
23
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/inline-source-map/LICENSE
generated
vendored
Normal file
23
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/inline-source-map/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
Copyright 2013 Thorsten Lorenz.
|
||||
All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
88
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/inline-source-map/README.md
generated
vendored
Normal file
88
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/inline-source-map/README.md
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
# inline-source-map [](http://travis-ci.org/thlorenz/inline-source-map)
|
||||
|
||||
Adds source mappings and base64 encodes them, so they can be inlined in your generated file.
|
||||
|
||||
```js
|
||||
var generator = require('inline-source-map');
|
||||
|
||||
var gen = generator()
|
||||
.addMappings('foo.js', [{ original: { line: 2, column: 3 } , generated: { line: 5, column: 10 } }], { line: 5 })
|
||||
.addGeneratedMappings('bar.js', 'var a = 2;\nconsole.log(a)', { line: 23, column: 22 });
|
||||
|
||||
console.log('base64 mapping:', gen.base64Encode());
|
||||
console.log('inline mapping url:', gen.inlineMappingUrl());
|
||||
```
|
||||
|
||||
```
|
||||
base64 mapping: eyJ2ZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJmb28uanMiLCJiYXIuanMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7O1VBQ0c7Ozs7Ozs7Ozs7Ozs7O3NCQ0RIO3NCQUNBIn0=
|
||||
inline mapping url: //@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJmb28uanMiLCJiYXIuanMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7O1VBQ0c7Ozs7Ozs7Ozs7Ozs7O3NCQ0RIO3NCQUNBIn0=
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
### addMappings(sourceFile, mappings, offset)
|
||||
|
||||
```
|
||||
/**
|
||||
* Adds the given mappings to the generator and offsets them if offset is given
|
||||
*
|
||||
* @name addMappings
|
||||
* @function
|
||||
* @param sourceFile {String} name of the source file
|
||||
* @param mappings {Array{{Object}} each object has the form { original: { line: _, column: _ }, generated: { line: _, column: _ } }
|
||||
* @param offset {Object} offset to apply to each mapping. Has the form { line: _, column: _ }
|
||||
* @return {Object} the generator to allow chaining
|
||||
*/
|
||||
```
|
||||
|
||||
### addGeneratedMappings(sourceFile, source, offset)
|
||||
|
||||
```
|
||||
/**
|
||||
* Generates mappings for the given source and adds them, assuming that no translation from original to generated is necessary.
|
||||
*
|
||||
* @name addGeneratedMappings
|
||||
* @function
|
||||
* @param sourceFile {String} name of the source file
|
||||
* @param source {String} source of the file
|
||||
* @param offset {Object} offset to apply to each mapping. Has the form { line: _, column: _ }
|
||||
* @return {Object} the generator to allow chaining
|
||||
*/
|
||||
```
|
||||
|
||||
### addSourceContent(sourceFile, sourceContent)
|
||||
|
||||
```
|
||||
/**
|
||||
* Adds source content for the given source file.
|
||||
*
|
||||
* @name addSourceContent
|
||||
* @function
|
||||
* @param sourceFile {String} The source file for which a mapping is included
|
||||
* @param sourceContent {String} The content of the source file
|
||||
* @return {Object} The generator to allow chaining
|
||||
*/
|
||||
```
|
||||
|
||||
|
||||
### base64Encode()
|
||||
|
||||
```
|
||||
/**
|
||||
* @name base64Encode
|
||||
* @function
|
||||
* @return {String} bas64 encoded representation of the added mappings
|
||||
*/
|
||||
```
|
||||
|
||||
If source contents were added, this will be included in the encoded mappings.
|
||||
|
||||
### inlineMappingUrl()
|
||||
|
||||
```
|
||||
/**
|
||||
* @name inlineMappingUrl
|
||||
* @function
|
||||
* @return {String} comment with base64 encoded representation of the added mappings. Can be inlined at the end of the generated file.
|
||||
*/
|
||||
```
|
8
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/inline-source-map/example/foo-bar.js
generated
vendored
Normal file
8
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/inline-source-map/example/foo-bar.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
var generator = require('..');
|
||||
|
||||
var gen = generator()
|
||||
.addMappings('foo.js', [{ original: { line: 2, column: 3 } , generated: { line: 5, column: 10 } }], { line: 5 })
|
||||
.addGeneratedMappings('bar.js', 'var a = 2;\nconsole.log(a)', { line: 23, column: 22 });
|
||||
|
||||
console.log('base64 mapping', gen.base64Encode());
|
||||
console.log('inline mapping url', gen.inlineMappingUrl());
|
126
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/inline-source-map/index.js
generated
vendored
Normal file
126
node_modules/browserify/node_modules/browser-pack/node_modules/combine-source-map/node_modules/inline-source-map/index.js
generated
vendored
Normal file
@@ -0,0 +1,126 @@
|
||||
'use strict';
|
||||
var SourceMapGenerator = require('source-map').SourceMapGenerator;
|
||||
|
||||
function offsetMapping(mapping, offset) {
|
||||
return { line: offset.line + mapping.line, column: offset.column + mapping.column };
|
||||
}
|
||||
|
||||
function newlinesIn(src) {
|
||||
if (!src) return 0;
|
||||
var newlines = src.match(/\n/g);
|
||||
|
||||
return newlines ? newlines.length : 0;
|
||||
}
|
||||
|
||||
function Generator(opts) {
|
||||
opts = opts || {};
|
||||
this.generator = new SourceMapGenerator({ file: opts.file || '', sourceRoot: opts.sourceRoot || '' });
|
||||
this.sourcesContent = undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the given mappings to the generator and offsets them if offset is given
|
||||
*
|
||||
* @name addMappings
|
||||
* @function
|
||||
* @param sourceFile {String} name of the source file
|
||||
* @param mappings {Array{{Object}} each object has the form { original: { line: _, column: _ }, generated: { line: _, column: _ } }
|
||||
* @param offset {Object} offset to apply to each mapping. Has the form { line: _, column: _ }
|
||||
* @return {Object} the generator to allow chaining
|
||||
*/
|
||||
Generator.prototype.addMappings = function (sourceFile, mappings, offset) {
|
||||
var generator = this.generator;
|
||||
|
||||
offset = offset || {};
|
||||
offset.line = offset.hasOwnProperty('line') ? offset.line : 0;
|
||||
offset.column = offset.hasOwnProperty('column') ? offset.column : 0;
|
||||
|
||||
mappings.forEach(function (m) {
|
||||
generator.addMapping({
|
||||
source : sourceFile
|
||||
, original : m.original
|
||||
, generated : offsetMapping(m.generated, offset)
|
||||
});
|
||||
});
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Generates mappings for the given source, assuming that no translation from original to generated is necessary.
|
||||
*
|
||||
* @name addGeneratedMappings
|
||||
* @function
|
||||
* @param sourceFile {String} name of the source file
|
||||
* @param source {String} source of the file
|
||||
* @param offset {Object} offset to apply to each mapping. Has the form { line: _, column: _ }
|
||||
* @return {Object} the generator to allow chaining
|
||||
*/
|
||||
Generator.prototype.addGeneratedMappings = function (sourceFile, source, offset) {
|
||||
var mappings = []
|
||||
, linesToGenerate = newlinesIn(source) + 1;
|
||||
|
||||
for (var line = 1; line <= linesToGenerate; line++) {
|
||||
var location = { line: line, column: 0 };
|
||||
mappings.push({ original: location, generated: location });
|
||||
}
|
||||
|
||||
return this.addMappings(sourceFile, mappings, offset);
|
||||
};
|
||||
|
||||
/**
|
||||
* Adds source content for the given source file.
|
||||
*
|
||||
* @name addSourceContent
|
||||
* @function
|
||||
* @param sourceFile {String} The source file for which a mapping is included
|
||||
* @param sourcesContent {String} The content of the source file
|
||||
* @return {Object} The generator to allow chaining
|
||||
*/
|
||||
Generator.prototype.addSourceContent = function (sourceFile, sourcesContent) {
|
||||
this.sourcesContent = this.sourcesContent || {};
|
||||
this.sourcesContent[sourceFile] = sourcesContent;
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* @name base64Encode
|
||||
* @function
|
||||
* @return {String} bas64 encoded representation of the added mappings
|
||||
*/
|
||||
Generator.prototype.base64Encode = function () {
|
||||
var map = this.toString();
|
||||
return new Buffer(map).toString('base64');
|
||||
};
|
||||
|
||||
/**
|
||||
* @name inlineMappingUrl
|
||||
* @function
|
||||
* @return {String} comment with base64 encoded representation of the added mappings. Can be inlined at the end of the generated file.
|
||||
*/
|
||||
Generator.prototype.inlineMappingUrl = function () {
|
||||
return '//@ sourceMappingURL=data:application/json;base64,' + this.base64Encode();
|
||||
};
|
||||
|
||||
Generator.prototype.toJSON = function () {
|
||||
var map = this.generator.toJSON();
|
||||
if (!this.sourcesContent) return map;
|
||||
|
||||
var toSourcesContent = (function (s) { return this.sourcesContent[s] || null; }).bind(this);
|
||||
map.sourcesContent = map.sources.map(toSourcesContent);
|
||||
return map;
|
||||
};
|
||||
|
||||
Generator.prototype.toString = function () {
|
||||
return JSON.stringify(this);
|
||||
};
|
||||
|
||||
Generator.prototype._mappings = function () {
|
||||
return this.generator._mappings;
|
||||
};
|
||||
|
||||
Generator.prototype.gen = function () {
|
||||
return this.generator;
|
||||
};
|
||||
|
||||
module.exports = function (opts) { return new Generator(opts); };
|
||||
module.exports.Generator = Generator;
|
@@ -0,0 +1,2 @@
|
||||
dist/*
|
||||
node_modules/*
|
@@ -0,0 +1,28 @@
|
||||
|
||||
Copyright (c) 2009-2011, Mozilla Foundation and contributors
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the names of the Mozilla Foundation nor the names of project
|
||||
contributors may be used to endorse or promote products derived from this
|
||||
software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
@@ -0,0 +1,166 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var copy = require('dryice').copy;
|
||||
|
||||
function removeAmdefine(src) {
|
||||
src = String(src).replace(
|
||||
/if\s*\(typeof\s*define\s*!==\s*'function'\)\s*{\s*var\s*define\s*=\s*require\('amdefine'\)\(module\);\s*}\s*/g,
|
||||
'');
|
||||
src = src.replace(
|
||||
/\b(define\(.*)('amdefine',?)/gm,
|
||||
'$1');
|
||||
return src;
|
||||
}
|
||||
removeAmdefine.onRead = true;
|
||||
|
||||
function makeNonRelative(src) {
|
||||
return src
|
||||
.replace(/require\('.\//g, 'require(\'source-map/')
|
||||
.replace(/\.\.\/\.\.\/lib\//g, '');
|
||||
}
|
||||
makeNonRelative.onRead = true;
|
||||
|
||||
function buildBrowser() {
|
||||
console.log('\nCreating dist/source-map.js');
|
||||
|
||||
var project = copy.createCommonJsProject({
|
||||
roots: [ path.join(__dirname, 'lib') ]
|
||||
});
|
||||
|
||||
copy({
|
||||
source: [
|
||||
'build/mini-require.js',
|
||||
{
|
||||
project: project,
|
||||
require: [ 'source-map/source-map-generator',
|
||||
'source-map/source-map-consumer',
|
||||
'source-map/source-node']
|
||||
},
|
||||
'build/suffix-browser.js'
|
||||
],
|
||||
filter: [
|
||||
copy.filter.moduleDefines,
|
||||
removeAmdefine
|
||||
],
|
||||
dest: 'dist/source-map.js'
|
||||
});
|
||||
}
|
||||
|
||||
function buildBrowserMin() {
|
||||
console.log('\nCreating dist/source-map.min.js');
|
||||
|
||||
copy({
|
||||
source: 'dist/source-map.js',
|
||||
filter: copy.filter.uglifyjs,
|
||||
dest: 'dist/source-map.min.js'
|
||||
});
|
||||
}
|
||||
|
||||
function buildFirefox() {
|
||||
console.log('\nCreating dist/SourceMap.jsm');
|
||||
|
||||
var project = copy.createCommonJsProject({
|
||||
roots: [ path.join(__dirname, 'lib') ]
|
||||
});
|
||||
|
||||
copy({
|
||||
source: [
|
||||
'build/prefix-source-map.jsm',
|
||||
{
|
||||
project: project,
|
||||
require: [ 'source-map/source-map-consumer',
|
||||
'source-map/source-map-generator',
|
||||
'source-map/source-node' ]
|
||||
},
|
||||
'build/suffix-source-map.jsm'
|
||||
],
|
||||
filter: [
|
||||
copy.filter.moduleDefines,
|
||||
removeAmdefine,
|
||||
makeNonRelative
|
||||
],
|
||||
dest: 'dist/SourceMap.jsm'
|
||||
});
|
||||
|
||||
// Create dist/test/Utils.jsm
|
||||
console.log('\nCreating dist/test/Utils.jsm');
|
||||
|
||||
project = copy.createCommonJsProject({
|
||||
roots: [ __dirname, path.join(__dirname, 'lib') ]
|
||||
});
|
||||
|
||||
copy({
|
||||
source: [
|
||||
'build/prefix-utils.jsm',
|
||||
'build/assert-shim.js',
|
||||
{
|
||||
project: project,
|
||||
require: [ 'test/source-map/util' ]
|
||||
},
|
||||
'build/suffix-utils.jsm'
|
||||
],
|
||||
filter: [
|
||||
copy.filter.moduleDefines,
|
||||
removeAmdefine,
|
||||
makeNonRelative
|
||||
],
|
||||
dest: 'dist/test/Utils.jsm'
|
||||
});
|
||||
|
||||
function isTestFile(f) {
|
||||
return /^test\-.*?\.js/.test(f);
|
||||
}
|
||||
|
||||
var testFiles = fs.readdirSync(path.join(__dirname, 'test', 'source-map')).filter(isTestFile);
|
||||
|
||||
testFiles.forEach(function (testFile) {
|
||||
console.log('\nCreating', path.join('dist', 'test', testFile.replace(/\-/g, '_')));
|
||||
|
||||
copy({
|
||||
source: [
|
||||
'build/test-prefix.js',
|
||||
path.join('test', 'source-map', testFile),
|
||||
'build/test-suffix.js'
|
||||
],
|
||||
filter: [
|
||||
removeAmdefine,
|
||||
makeNonRelative,
|
||||
function (input, source) {
|
||||
return input.replace('define(',
|
||||
'define("'
|
||||
+ path.join('test', 'source-map', testFile.replace(/\.js$/, ''))
|
||||
+ '", ["require", "exports", "module"], ');
|
||||
},
|
||||
function (input, source) {
|
||||
return input.replace('{THIS_MODULE}', function () {
|
||||
return "test/source-map/" + testFile.replace(/\.js$/, '');
|
||||
});
|
||||
}
|
||||
],
|
||||
dest: path.join('dist', 'test', testFile.replace(/\-/g, '_'))
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function ensureDir(name) {
|
||||
var dirExists = false;
|
||||
try {
|
||||
dirExists = fs.statSync(name).isDirectory();
|
||||
} catch (err) {}
|
||||
|
||||
if (!dirExists) {
|
||||
fs.mkdirSync(name, 0777);
|
||||
}
|
||||
}
|
||||
|
||||
ensureDir("dist");
|
||||
ensureDir("dist/test");
|
||||
buildFirefox();
|
||||
buildBrowser();
|
||||
buildBrowserMin();
|
@@ -0,0 +1,321 @@
|
||||
# Source Map
|
||||
|
||||
This is a library to generate and consume the source map format
|
||||
[described here][format].
|
||||
|
||||
[Learn more here][feature].
|
||||
|
||||
This library was written in the Asynchronous Module Definition
|
||||
format. It should work in the following environments:
|
||||
|
||||
* Modern Browsers (either after the build, or with an AMD loader such as
|
||||
RequireJS)
|
||||
|
||||
* Inside Firefox (as a JSM file, after the build)
|
||||
|
||||
* With NodeJS versions 0.8.X and higher
|
||||
|
||||
## Installing with NPM (for use with NodeJS)
|
||||
|
||||
Simply
|
||||
|
||||
$ npm install source-map
|
||||
|
||||
Or, if you'd like to hack on this library and have it installed via npm so you
|
||||
can try out your changes:
|
||||
|
||||
$ git clone https://fitzgen@github.com/mozilla/source-map.git
|
||||
$ cd source-map
|
||||
$ npm link .
|
||||
|
||||
## Building from Source (for everywhere else)
|
||||
|
||||
Install Node and then run
|
||||
|
||||
$ git clone https://fitzgen@github.com/mozilla/source-map.git
|
||||
$ cd source-map
|
||||
$ npm link .
|
||||
|
||||
Next, run
|
||||
|
||||
$ node Makefile.dryice.js`
|
||||
|
||||
This should create the following files:
|
||||
|
||||
* `dist/source-map.js` - The unminified browser version.
|
||||
|
||||
* `dist/source-map.min.js` - The minified browser version.
|
||||
|
||||
* `dist/SourceMap.jsm` - The JavaScript Module for inclusion in Firefox
|
||||
source.
|
||||
|
||||
## API
|
||||
|
||||
Get a reference to the module:
|
||||
|
||||
// NodeJS
|
||||
var sourceMap = require('source-map');
|
||||
|
||||
// Browser builds
|
||||
var sourceMap = window.sourceMap;
|
||||
|
||||
// Inside Firefox
|
||||
let sourceMap = {};
|
||||
Components.utils.import('resource:///modules/devtools/SourceMap.jsm', sourceMap);
|
||||
|
||||
### SourceMapConsumer
|
||||
|
||||
A SourceMapConsumer instance represents a parsed source map which we can query
|
||||
for information about the original file positions by giving it a file position
|
||||
in the generated source.
|
||||
|
||||
#### new SourceMapConsumer(rawSourceMap)
|
||||
|
||||
The only parameter is the raw source map (either as a string which can be
|
||||
`JSON.parse`'d, or an object). According to the spec, source maps have the
|
||||
following attributes:
|
||||
|
||||
* `version`: Which version of the source map spec this map is following.
|
||||
|
||||
* `sources`: An array of URLs to the original source files.
|
||||
|
||||
* `names`: An array of identifiers which can be referrenced by individual
|
||||
mappings.
|
||||
|
||||
* `sourceRoot`: Optional. The URL root from which all sources are relative.
|
||||
|
||||
* `sourcesContent`: Optional. An array of contents of the original source files.
|
||||
|
||||
* `mappings`: A string of base64 VLQs which contain the actual mappings.
|
||||
|
||||
* `file`: The generated filename this source map is associated with.
|
||||
|
||||
#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)
|
||||
|
||||
Returns the original source, line, and column information for the generated
|
||||
source's line and column positions provided. The only argument is an object with
|
||||
the following properties:
|
||||
|
||||
* `line`: The line number in the generated source.
|
||||
|
||||
* `column`: The column number in the generated source.
|
||||
|
||||
and an object is returned with the following properties:
|
||||
|
||||
* `source`: The original source file, or null if this information is not
|
||||
available.
|
||||
|
||||
* `line`: The line number in the original source, or null if this information is
|
||||
not available.
|
||||
|
||||
* `column`: The column number in the original source, or null or null if this
|
||||
information is not available.
|
||||
|
||||
* `name`: The original identifier, or null if this information is not available.
|
||||
|
||||
#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)
|
||||
|
||||
Returns the generated line and column information for the original source,
|
||||
line, and column positions provided. The only argument is an object with
|
||||
the following properties:
|
||||
|
||||
* `source`: The filename of the original source.
|
||||
|
||||
* `line`: The line number in the original source.
|
||||
|
||||
* `column`: The column number in the original source.
|
||||
|
||||
and an object is returned with the following properties:
|
||||
|
||||
* `line`: The line number in the generated source, or null.
|
||||
|
||||
* `column`: The column number in the generated source, or null.
|
||||
|
||||
#### SourceMapConsumer.prototype.sourceContentFor(source)
|
||||
|
||||
Returns the original source content for the source provided. The only
|
||||
argument is the URL of the original source file.
|
||||
|
||||
#### SourceMapConsumer.prototype.eachMapping(callback, context, order)
|
||||
|
||||
Iterate over each mapping between an original source/line/column and a
|
||||
generated line/column in this source map.
|
||||
|
||||
* `callback`: The function that is called with each mapping.
|
||||
|
||||
* `context`: Optional. If specified, this object will be the value of `this`
|
||||
every time that `callback` is called.
|
||||
|
||||
* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or
|
||||
`SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over
|
||||
the mappings sorted by the generated file's line/column order or the
|
||||
original's source/line/column order, respectively. Defaults to
|
||||
`SourceMapConsumer.GENERATED_ORDER`.
|
||||
|
||||
### SourceMapGenerator
|
||||
|
||||
An instance of the SourceMapGenerator represents a source map which is being
|
||||
built incrementally.
|
||||
|
||||
#### new SourceMapGenerator(startOfSourceMap)
|
||||
|
||||
To create a new one, you must pass an object with the following properties:
|
||||
|
||||
* `file`: The filename of the generated source that this source map is
|
||||
associated with.
|
||||
|
||||
* `sourceRoot`: An optional root for all relative URLs in this source map.
|
||||
|
||||
#### SourceMapGenerator.fromSourceMap(sourceMapConsumer)
|
||||
|
||||
Creates a new SourceMapGenerator based on a SourceMapConsumer
|
||||
|
||||
* `sourceMapConsumer` The SourceMap.
|
||||
|
||||
#### SourceMapGenerator.prototype.addMapping(mapping)
|
||||
|
||||
Add a single mapping from original source line and column to the generated
|
||||
source's line and column for this source map being created. The mapping object
|
||||
should have the following properties:
|
||||
|
||||
* `generated`: An object with the generated line and column positions.
|
||||
|
||||
* `original`: An object with the original line and column positions.
|
||||
|
||||
* `source`: The original source file (relative to the sourceRoot).
|
||||
|
||||
* `name`: An optional original token name for this mapping.
|
||||
|
||||
#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)
|
||||
|
||||
Set the source content for an original source file.
|
||||
|
||||
* `sourceFile` the URL of the original source file.
|
||||
|
||||
* `sourceContent` the content of the source file.
|
||||
|
||||
#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile])
|
||||
|
||||
Applies a SourceMap for a source file to the SourceMap.
|
||||
Each mapping to the supplied source file is rewritten using the
|
||||
supplied SourceMap. Note: The resolution for the resulting mappings
|
||||
is the minimium of this map and the supplied map.
|
||||
|
||||
* `sourceMapConsumer`: The SourceMap to be applied.
|
||||
|
||||
* `sourceFile`: Optional. The filename of the source file.
|
||||
If omitted, sourceMapConsumer.file will be used.
|
||||
|
||||
#### SourceMapGenerator.prototype.toString()
|
||||
|
||||
Renders the source map being generated to a string.
|
||||
|
||||
### SourceNode
|
||||
|
||||
SourceNodes provide a way to abstract over interpolating and/or concatenating
|
||||
snippets of generated JavaScript source code, while maintaining the line and
|
||||
column information associated between those snippets and the original source
|
||||
code. This is useful as the final intermediate representation a compiler might
|
||||
use before outputting the generated JS and source map.
|
||||
|
||||
#### new SourceNode(line, column, source[, chunk[, name]])
|
||||
|
||||
* `line`: The original line number associated with this source node, or null if
|
||||
it isn't associated with an original line.
|
||||
|
||||
* `column`: The original column number associated with this source node, or null
|
||||
if it isn't associated with an original column.
|
||||
|
||||
* `source`: The original source's filename.
|
||||
|
||||
* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see
|
||||
below.
|
||||
|
||||
* `name`: Optional. The original identifier.
|
||||
|
||||
#### SourceNode.prototype.add(chunk)
|
||||
|
||||
Add a chunk of generated JS to this source node.
|
||||
|
||||
* `chunk`: A string snippet of generated JS code, another instance of
|
||||
`SourceNode`, or an array where each member is one of those things.
|
||||
|
||||
#### SourceNode.prototype.prepend(chunk)
|
||||
|
||||
Prepend a chunk of generated JS to this source node.
|
||||
|
||||
* `chunk`: A string snippet of generated JS code, another instance of
|
||||
`SourceNode`, or an array where each member is one of those things.
|
||||
|
||||
#### SourceNode.prototype.walk(fn)
|
||||
|
||||
Walk over the tree of JS snippets in this node and its children. The walking
|
||||
function is called once for each snippet of JS and is passed that snippet and
|
||||
the its original associated source's line/column location.
|
||||
|
||||
* `fn`: The traversal function.
|
||||
|
||||
#### SourceNode.prototype.join(sep)
|
||||
|
||||
Like `Array.prototype.join` except for SourceNodes. Inserts the separator
|
||||
between each of this source node's children.
|
||||
|
||||
* `sep`: The separator.
|
||||
|
||||
#### SourceNode.prototype.replaceRight(pattern, replacement)
|
||||
|
||||
Call `String.prototype.replace` on the very right-most source snippet. Useful
|
||||
for trimming whitespace from the end of a source node, etc.
|
||||
|
||||
* `pattern`: The pattern to replace.
|
||||
|
||||
* `replacement`: The thing to replace the pattern with.
|
||||
|
||||
#### SourceNode.prototype.toString()
|
||||
|
||||
Return the string representation of this source node. Walks over the tree and
|
||||
concatenates all the various snippets together to one string.
|
||||
|
||||
### SourceNode.prototype.toStringWithSourceMap(startOfSourceMap)
|
||||
|
||||
Returns the string representation of this tree of source nodes, plus a
|
||||
SourceMapGenerator which contains all the mappings between the generated and
|
||||
original sources.
|
||||
|
||||
The arguments are the same as those to `new SourceMapGenerator`.
|
||||
|
||||
## Tests
|
||||
|
||||
Install NodeJS version 0.8.0 or greater, then run `node test/run-tests.js`.
|
||||
|
||||
To add new tests, create a new file named `test/test-<your new test name>.js`
|
||||
and export your test functions with names that start with "test", for example
|
||||
|
||||
exports["test doing the foo bar"] = function (assert, util) {
|
||||
...
|
||||
};
|
||||
|
||||
The new test will be located automatically when you run the suite.
|
||||
|
||||
The `util` argument is the test utility module located at `test/source-map/util`.
|
||||
|
||||
The `assert` argument is a cut down version of node's assert module. You have
|
||||
access to the following assertion functions:
|
||||
|
||||
* `doesNotThrow`
|
||||
|
||||
* `equal`
|
||||
|
||||
* `ok`
|
||||
|
||||
* `strictEqual`
|
||||
|
||||
* `throws`
|
||||
|
||||
(The reason for the restricted set of test functions is because we need the
|
||||
tests to run inside Firefox's test suite as well and so the assert module is
|
||||
shimmed in that environment. See `build/assert-shim.js`.)
|
||||
|
||||
[format]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
|
||||
[feature]: https://wiki.mozilla.org/DevTools/Features/SourceMap
|
||||
[Dryice]: https://github.com/mozilla/dryice
|
@@ -0,0 +1,56 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
define('test/source-map/assert', ['exports'], function (exports) {
|
||||
|
||||
let do_throw = function (msg) {
|
||||
throw new Error(msg);
|
||||
};
|
||||
|
||||
exports.init = function (throw_fn) {
|
||||
do_throw = throw_fn;
|
||||
};
|
||||
|
||||
exports.doesNotThrow = function (fn) {
|
||||
try {
|
||||
fn();
|
||||
}
|
||||
catch (e) {
|
||||
do_throw(e.message);
|
||||
}
|
||||
};
|
||||
|
||||
exports.equal = function (actual, expected, msg) {
|
||||
msg = msg || String(actual) + ' != ' + String(expected);
|
||||
if (actual != expected) {
|
||||
do_throw(msg);
|
||||
}
|
||||
};
|
||||
|
||||
exports.ok = function (val, msg) {
|
||||
msg = msg || String(val) + ' is falsey';
|
||||
if (!Boolean(val)) {
|
||||
do_throw(msg);
|
||||
}
|
||||
};
|
||||
|
||||
exports.strictEqual = function (actual, expected, msg) {
|
||||
msg = msg || String(actual) + ' !== ' + String(expected);
|
||||
if (actual !== expected) {
|
||||
do_throw(msg);
|
||||
}
|
||||
};
|
||||
|
||||
exports.throws = function (fn) {
|
||||
try {
|
||||
fn();
|
||||
do_throw('Expected an error to be thrown, but it wasn\'t.');
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
};
|
||||
|
||||
});
|
@@ -0,0 +1,152 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
/**
|
||||
* Define a module along with a payload.
|
||||
* @param {string} moduleName Name for the payload
|
||||
* @param {ignored} deps Ignored. For compatibility with CommonJS AMD Spec
|
||||
* @param {function} payload Function with (require, exports, module) params
|
||||
*/
|
||||
function define(moduleName, deps, payload) {
|
||||
if (typeof moduleName != "string") {
|
||||
throw new TypeError('Expected string, got: ' + moduleName);
|
||||
}
|
||||
|
||||
if (arguments.length == 2) {
|
||||
payload = deps;
|
||||
}
|
||||
|
||||
if (moduleName in define.modules) {
|
||||
throw new Error("Module already defined: " + moduleName);
|
||||
}
|
||||
define.modules[moduleName] = payload;
|
||||
};
|
||||
|
||||
/**
|
||||
* The global store of un-instantiated modules
|
||||
*/
|
||||
define.modules = {};
|
||||
|
||||
|
||||
/**
|
||||
* We invoke require() in the context of a Domain so we can have multiple
|
||||
* sets of modules running separate from each other.
|
||||
* This contrasts with JSMs which are singletons, Domains allows us to
|
||||
* optionally load a CommonJS module twice with separate data each time.
|
||||
* Perhaps you want 2 command lines with a different set of commands in each,
|
||||
* for example.
|
||||
*/
|
||||
function Domain() {
|
||||
this.modules = {};
|
||||
this._currentModule = null;
|
||||
}
|
||||
|
||||
(function () {
|
||||
|
||||
/**
|
||||
* Lookup module names and resolve them by calling the definition function if
|
||||
* needed.
|
||||
* There are 2 ways to call this, either with an array of dependencies and a
|
||||
* callback to call when the dependencies are found (which can happen
|
||||
* asynchronously in an in-page context) or with a single string an no callback
|
||||
* where the dependency is resolved synchronously and returned.
|
||||
* The API is designed to be compatible with the CommonJS AMD spec and
|
||||
* RequireJS.
|
||||
* @param {string[]|string} deps A name, or names for the payload
|
||||
* @param {function|undefined} callback Function to call when the dependencies
|
||||
* are resolved
|
||||
* @return {undefined|object} The module required or undefined for
|
||||
* array/callback method
|
||||
*/
|
||||
Domain.prototype.require = function(deps, callback) {
|
||||
if (Array.isArray(deps)) {
|
||||
var params = deps.map(function(dep) {
|
||||
return this.lookup(dep);
|
||||
}, this);
|
||||
if (callback) {
|
||||
callback.apply(null, params);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
else {
|
||||
return this.lookup(deps);
|
||||
}
|
||||
};
|
||||
|
||||
function normalize(path) {
|
||||
var bits = path.split('/');
|
||||
var i = 1;
|
||||
while (i < bits.length) {
|
||||
if (bits[i] === '..') {
|
||||
bits.splice(i-1, 1);
|
||||
} else if (bits[i] === '.') {
|
||||
bits.splice(i, 1);
|
||||
} else {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
return bits.join('/');
|
||||
}
|
||||
|
||||
function join(a, b) {
|
||||
a = a.trim();
|
||||
b = b.trim();
|
||||
if (/^\//.test(b)) {
|
||||
return b;
|
||||
} else {
|
||||
return a.replace(/\/*$/, '/') + b;
|
||||
}
|
||||
}
|
||||
|
||||
function dirname(path) {
|
||||
var bits = path.split('/');
|
||||
bits.pop();
|
||||
return bits.join('/');
|
||||
}
|
||||
|
||||
/**
|
||||
* Lookup module names and resolve them by calling the definition function if
|
||||
* needed.
|
||||
* @param {string} moduleName A name for the payload to lookup
|
||||
* @return {object} The module specified by aModuleName or null if not found.
|
||||
*/
|
||||
Domain.prototype.lookup = function(moduleName) {
|
||||
if (/^\./.test(moduleName)) {
|
||||
moduleName = normalize(join(dirname(this._currentModule), moduleName));
|
||||
}
|
||||
|
||||
if (moduleName in this.modules) {
|
||||
var module = this.modules[moduleName];
|
||||
return module;
|
||||
}
|
||||
|
||||
if (!(moduleName in define.modules)) {
|
||||
throw new Error("Module not defined: " + moduleName);
|
||||
}
|
||||
|
||||
var module = define.modules[moduleName];
|
||||
|
||||
if (typeof module == "function") {
|
||||
var exports = {};
|
||||
var previousModule = this._currentModule;
|
||||
this._currentModule = moduleName;
|
||||
module(this.require.bind(this), exports, { id: moduleName, uri: "" });
|
||||
this._currentModule = previousModule;
|
||||
module = exports;
|
||||
}
|
||||
|
||||
// cache the resulting module object for next time
|
||||
this.modules[moduleName] = module;
|
||||
|
||||
return module;
|
||||
};
|
||||
|
||||
}());
|
||||
|
||||
define.Domain = Domain;
|
||||
define.globalDomain = new Domain();
|
||||
var require = define.globalDomain.require.bind(define.globalDomain);
|
@@ -0,0 +1,20 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
/*
|
||||
* WARNING!
|
||||
*
|
||||
* Do not edit this file directly, it is built from the sources at
|
||||
* https://github.com/mozilla/source-map/
|
||||
*/
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
var EXPORTED_SYMBOLS = [ "SourceMapConsumer", "SourceMapGenerator", "SourceNode" ];
|
||||
|
||||
Components.utils.import('resource://gre/modules/devtools/Require.jsm');
|
@@ -0,0 +1,18 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
|
||||
/*
|
||||
* WARNING!
|
||||
*
|
||||
* Do not edit this file directly, it is built from the sources at
|
||||
* https://github.com/mozilla/source-map/
|
||||
*/
|
||||
|
||||
Components.utils.import('resource://gre/modules/devtools/Require.jsm');
|
||||
Components.utils.import('resource://gre/modules/devtools/SourceMap.jsm');
|
||||
|
||||
let EXPORTED_SYMBOLS = [ "define", "runSourceMapTests" ];
|
@@ -0,0 +1,8 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
window.sourceMap = {
|
||||
SourceMapConsumer: require('source-map/source-map-consumer').SourceMapConsumer,
|
||||
SourceMapGenerator: require('source-map/source-map-generator').SourceMapGenerator,
|
||||
SourceNode: require('source-map/source-node').SourceNode
|
||||
};
|
@@ -0,0 +1,6 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
let SourceMapConsumer = require('source-map/source-map-consumer').SourceMapConsumer;
|
||||
let SourceMapGenerator = require('source-map/source-map-generator').SourceMapGenerator;
|
||||
let SourceNode = require('source-map/source-node').SourceNode;
|
@@ -0,0 +1,20 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
function runSourceMapTests(modName, do_throw) {
|
||||
let mod = require(modName);
|
||||
let assert = require('test/source-map/assert');
|
||||
let util = require('test/source-map/util');
|
||||
|
||||
assert.init(do_throw);
|
||||
|
||||
for (let k in mod) {
|
||||
if (/^test/.test(k)) {
|
||||
mod[k](assert, util);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,8 @@
|
||||
/*
|
||||
* WARNING!
|
||||
*
|
||||
* Do not edit this file directly, it is built from the sources at
|
||||
* https://github.com/mozilla/source-map/
|
||||
*/
|
||||
|
||||
Components.utils.import('resource://test/Utils.jsm');
|
@@ -0,0 +1,3 @@
|
||||
function run_test() {
|
||||
runSourceMapTests('{THIS_MODULE}', do_throw);
|
||||
}
|
@@ -0,0 +1,8 @@
|
||||
/*
|
||||
* Copyright 2009-2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE.txt or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
exports.SourceMapGenerator = require('./source-map/source-map-generator').SourceMapGenerator;
|
||||
exports.SourceMapConsumer = require('./source-map/source-map-consumer').SourceMapConsumer;
|
||||
exports.SourceNode = require('./source-map/source-node').SourceNode;
|
@@ -0,0 +1,96 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var util = require('./util');
|
||||
|
||||
/**
|
||||
* A data structure which is a combination of an array and a set. Adding a new
|
||||
* member is O(1), testing for membership is O(1), and finding the index of an
|
||||
* element is O(1). Removing elements from the set is not supported. Only
|
||||
* strings are supported for membership.
|
||||
*/
|
||||
function ArraySet() {
|
||||
this._array = [];
|
||||
this._set = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Static method for creating ArraySet instances from an existing array.
|
||||
*/
|
||||
ArraySet.fromArray = function ArraySet_fromArray(aArray) {
|
||||
var set = new ArraySet();
|
||||
for (var i = 0, len = aArray.length; i < len; i++) {
|
||||
set.add(aArray[i]);
|
||||
}
|
||||
return set;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add the given string to this set.
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
ArraySet.prototype.add = function ArraySet_add(aStr) {
|
||||
if (this.has(aStr)) {
|
||||
// Already a member; nothing to do.
|
||||
return;
|
||||
}
|
||||
var idx = this._array.length;
|
||||
this._array.push(aStr);
|
||||
this._set[util.toSetString(aStr)] = idx;
|
||||
};
|
||||
|
||||
/**
|
||||
* Is the given string a member of this set?
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
ArraySet.prototype.has = function ArraySet_has(aStr) {
|
||||
return Object.prototype.hasOwnProperty.call(this._set,
|
||||
util.toSetString(aStr));
|
||||
};
|
||||
|
||||
/**
|
||||
* What is the index of the given string in the array?
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) {
|
||||
if (this.has(aStr)) {
|
||||
return this._set[util.toSetString(aStr)];
|
||||
}
|
||||
throw new Error('"' + aStr + '" is not in the set.');
|
||||
};
|
||||
|
||||
/**
|
||||
* What is the element at the given index?
|
||||
*
|
||||
* @param Number aIdx
|
||||
*/
|
||||
ArraySet.prototype.at = function ArraySet_at(aIdx) {
|
||||
if (aIdx >= 0 && aIdx < this._array.length) {
|
||||
return this._array[aIdx];
|
||||
}
|
||||
throw new Error('No element indexed by ' + aIdx);
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the array representation of this set (which has the proper indices
|
||||
* indicated by indexOf). Note that this is a copy of the internal array used
|
||||
* for storing the members so that no one can mess with internal state.
|
||||
*/
|
||||
ArraySet.prototype.toArray = function ArraySet_toArray() {
|
||||
return this._array.slice();
|
||||
};
|
||||
|
||||
exports.ArraySet = ArraySet;
|
||||
|
||||
});
|
@@ -0,0 +1,144 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*
|
||||
* Based on the Base 64 VLQ implementation in Closure Compiler:
|
||||
* https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java
|
||||
*
|
||||
* Copyright 2011 The Closure Compiler Authors. All rights reserved.
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above
|
||||
* copyright notice, this list of conditions and the following
|
||||
* disclaimer in the documentation and/or other materials provided
|
||||
* with the distribution.
|
||||
* * Neither the name of Google Inc. nor the names of its
|
||||
* contributors may be used to endorse or promote products derived
|
||||
* from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var base64 = require('./base64');
|
||||
|
||||
// A single base 64 digit can contain 6 bits of data. For the base 64 variable
|
||||
// length quantities we use in the source map spec, the first bit is the sign,
|
||||
// the next four bits are the actual value, and the 6th bit is the
|
||||
// continuation bit. The continuation bit tells us whether there are more
|
||||
// digits in this value following this digit.
|
||||
//
|
||||
// Continuation
|
||||
// | Sign
|
||||
// | |
|
||||
// V V
|
||||
// 101011
|
||||
|
||||
var VLQ_BASE_SHIFT = 5;
|
||||
|
||||
// binary: 100000
|
||||
var VLQ_BASE = 1 << VLQ_BASE_SHIFT;
|
||||
|
||||
// binary: 011111
|
||||
var VLQ_BASE_MASK = VLQ_BASE - 1;
|
||||
|
||||
// binary: 100000
|
||||
var VLQ_CONTINUATION_BIT = VLQ_BASE;
|
||||
|
||||
/**
|
||||
* Converts from a two-complement value to a value where the sign bit is
|
||||
* is placed in the least significant bit. For example, as decimals:
|
||||
* 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)
|
||||
* 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)
|
||||
*/
|
||||
function toVLQSigned(aValue) {
|
||||
return aValue < 0
|
||||
? ((-aValue) << 1) + 1
|
||||
: (aValue << 1) + 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts to a two-complement value from a value where the sign bit is
|
||||
* is placed in the least significant bit. For example, as decimals:
|
||||
* 2 (10 binary) becomes 1, 3 (11 binary) becomes -1
|
||||
* 4 (100 binary) becomes 2, 5 (101 binary) becomes -2
|
||||
*/
|
||||
function fromVLQSigned(aValue) {
|
||||
var isNegative = (aValue & 1) === 1;
|
||||
var shifted = aValue >> 1;
|
||||
return isNegative
|
||||
? -shifted
|
||||
: shifted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the base 64 VLQ encoded value.
|
||||
*/
|
||||
exports.encode = function base64VLQ_encode(aValue) {
|
||||
var encoded = "";
|
||||
var digit;
|
||||
|
||||
var vlq = toVLQSigned(aValue);
|
||||
|
||||
do {
|
||||
digit = vlq & VLQ_BASE_MASK;
|
||||
vlq >>>= VLQ_BASE_SHIFT;
|
||||
if (vlq > 0) {
|
||||
// There are still more digits in this value, so we must make sure the
|
||||
// continuation bit is marked.
|
||||
digit |= VLQ_CONTINUATION_BIT;
|
||||
}
|
||||
encoded += base64.encode(digit);
|
||||
} while (vlq > 0);
|
||||
|
||||
return encoded;
|
||||
};
|
||||
|
||||
/**
|
||||
* Decodes the next base 64 VLQ value from the given string and returns the
|
||||
* value and the rest of the string.
|
||||
*/
|
||||
exports.decode = function base64VLQ_decode(aStr) {
|
||||
var i = 0;
|
||||
var strLen = aStr.length;
|
||||
var result = 0;
|
||||
var shift = 0;
|
||||
var continuation, digit;
|
||||
|
||||
do {
|
||||
if (i >= strLen) {
|
||||
throw new Error("Expected more digits in base 64 VLQ value.");
|
||||
}
|
||||
digit = base64.decode(aStr.charAt(i++));
|
||||
continuation = !!(digit & VLQ_CONTINUATION_BIT);
|
||||
digit &= VLQ_BASE_MASK;
|
||||
result = result + (digit << shift);
|
||||
shift += VLQ_BASE_SHIFT;
|
||||
} while (continuation);
|
||||
|
||||
return {
|
||||
value: fromVLQSigned(result),
|
||||
rest: aStr.slice(i)
|
||||
};
|
||||
};
|
||||
|
||||
});
|
@@ -0,0 +1,42 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var charToIntMap = {};
|
||||
var intToCharMap = {};
|
||||
|
||||
'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
|
||||
.split('')
|
||||
.forEach(function (ch, index) {
|
||||
charToIntMap[ch] = index;
|
||||
intToCharMap[index] = ch;
|
||||
});
|
||||
|
||||
/**
|
||||
* Encode an integer in the range of 0 to 63 to a single base 64 digit.
|
||||
*/
|
||||
exports.encode = function base64_encode(aNumber) {
|
||||
if (aNumber in intToCharMap) {
|
||||
return intToCharMap[aNumber];
|
||||
}
|
||||
throw new TypeError("Must be between 0 and 63: " + aNumber);
|
||||
};
|
||||
|
||||
/**
|
||||
* Decode a single base 64 digit to an integer.
|
||||
*/
|
||||
exports.decode = function base64_decode(aChar) {
|
||||
if (aChar in charToIntMap) {
|
||||
return charToIntMap[aChar];
|
||||
}
|
||||
throw new TypeError("Not a valid base 64 digit: " + aChar);
|
||||
};
|
||||
|
||||
});
|
@@ -0,0 +1,81 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
/**
|
||||
* Recursive implementation of binary search.
|
||||
*
|
||||
* @param aLow Indices here and lower do not contain the needle.
|
||||
* @param aHigh Indices here and higher do not contain the needle.
|
||||
* @param aNeedle The element being searched for.
|
||||
* @param aHaystack The non-empty array being searched.
|
||||
* @param aCompare Function which takes two elements and returns -1, 0, or 1.
|
||||
*/
|
||||
function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare) {
|
||||
// This function terminates when one of the following is true:
|
||||
//
|
||||
// 1. We find the exact element we are looking for.
|
||||
//
|
||||
// 2. We did not find the exact element, but we can return the next
|
||||
// closest element that is less than that element.
|
||||
//
|
||||
// 3. We did not find the exact element, and there is no next-closest
|
||||
// element which is less than the one we are searching for, so we
|
||||
// return null.
|
||||
var mid = Math.floor((aHigh - aLow) / 2) + aLow;
|
||||
var cmp = aCompare(aNeedle, aHaystack[mid]);
|
||||
if (cmp === 0) {
|
||||
// Found the element we are looking for.
|
||||
return aHaystack[mid];
|
||||
}
|
||||
else if (cmp > 0) {
|
||||
// aHaystack[mid] is greater than our needle.
|
||||
if (aHigh - mid > 1) {
|
||||
// The element is in the upper half.
|
||||
return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare);
|
||||
}
|
||||
// We did not find an exact match, return the next closest one
|
||||
// (termination case 2).
|
||||
return aHaystack[mid];
|
||||
}
|
||||
else {
|
||||
// aHaystack[mid] is less than our needle.
|
||||
if (mid - aLow > 1) {
|
||||
// The element is in the lower half.
|
||||
return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare);
|
||||
}
|
||||
// The exact needle element was not found in this haystack. Determine if
|
||||
// we are in termination case (2) or (3) and return the appropriate thing.
|
||||
return aLow < 0
|
||||
? null
|
||||
: aHaystack[aLow];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This is an implementation of binary search which will always try and return
|
||||
* the next lowest value checked if there is no exact hit. This is because
|
||||
* mappings between original and generated line/col pairs are single points,
|
||||
* and there is an implicit region between each of them, so a miss just means
|
||||
* that you aren't on the very start of a region.
|
||||
*
|
||||
* @param aNeedle The element you are looking for.
|
||||
* @param aHaystack The array that is being searched.
|
||||
* @param aCompare A function which takes the needle and an element in the
|
||||
* array and returns -1, 0, or 1 depending on whether the needle is less
|
||||
* than, equal to, or greater than the element, respectively.
|
||||
*/
|
||||
exports.search = function search(aNeedle, aHaystack, aCompare) {
|
||||
return aHaystack.length > 0
|
||||
? recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, aCompare)
|
||||
: null;
|
||||
};
|
||||
|
||||
});
|
@@ -0,0 +1,426 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var util = require('./util');
|
||||
var binarySearch = require('./binary-search');
|
||||
var ArraySet = require('./array-set').ArraySet;
|
||||
var base64VLQ = require('./base64-vlq');
|
||||
|
||||
/**
|
||||
* A SourceMapConsumer instance represents a parsed source map which we can
|
||||
* query for information about the original file positions by giving it a file
|
||||
* position in the generated source.
|
||||
*
|
||||
* The only parameter is the raw source map (either as a JSON string, or
|
||||
* already parsed to an object). According to the spec, source maps have the
|
||||
* following attributes:
|
||||
*
|
||||
* - version: Which version of the source map spec this map is following.
|
||||
* - sources: An array of URLs to the original source files.
|
||||
* - names: An array of identifiers which can be referrenced by individual mappings.
|
||||
* - sourceRoot: Optional. The URL root from which all sources are relative.
|
||||
* - sourcesContent: Optional. An array of contents of the original source files.
|
||||
* - mappings: A string of base64 VLQs which contain the actual mappings.
|
||||
* - file: The generated file this source map is associated with.
|
||||
*
|
||||
* Here is an example source map, taken from the source map spec[0]:
|
||||
*
|
||||
* {
|
||||
* version : 3,
|
||||
* file: "out.js",
|
||||
* sourceRoot : "",
|
||||
* sources: ["foo.js", "bar.js"],
|
||||
* names: ["src", "maps", "are", "fun"],
|
||||
* mappings: "AA,AB;;ABCDE;"
|
||||
* }
|
||||
*
|
||||
* [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1#
|
||||
*/
|
||||
function SourceMapConsumer(aSourceMap) {
|
||||
var sourceMap = aSourceMap;
|
||||
if (typeof aSourceMap === 'string') {
|
||||
sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, ''));
|
||||
}
|
||||
|
||||
var version = util.getArg(sourceMap, 'version');
|
||||
var sources = util.getArg(sourceMap, 'sources');
|
||||
var names = util.getArg(sourceMap, 'names');
|
||||
var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null);
|
||||
var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null);
|
||||
var mappings = util.getArg(sourceMap, 'mappings');
|
||||
var file = util.getArg(sourceMap, 'file');
|
||||
|
||||
if (version !== this._version) {
|
||||
throw new Error('Unsupported version: ' + version);
|
||||
}
|
||||
|
||||
this._names = ArraySet.fromArray(names);
|
||||
this._sources = ArraySet.fromArray(sources);
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sourcesContent = sourcesContent;
|
||||
this.file = file;
|
||||
|
||||
// `this._generatedMappings` and `this._originalMappings` hold the parsed
|
||||
// mapping coordinates from the source map's "mappings" attribute. Each
|
||||
// object in the array is of the form
|
||||
//
|
||||
// {
|
||||
// generatedLine: The line number in the generated code,
|
||||
// generatedColumn: The column number in the generated code,
|
||||
// source: The path to the original source file that generated this
|
||||
// chunk of code,
|
||||
// originalLine: The line number in the original source that
|
||||
// corresponds to this chunk of generated code,
|
||||
// originalColumn: The column number in the original source that
|
||||
// corresponds to this chunk of generated code,
|
||||
// name: The name of the original symbol which generated this chunk of
|
||||
// code.
|
||||
// }
|
||||
//
|
||||
// All properties except for `generatedLine` and `generatedColumn` can be
|
||||
// `null`.
|
||||
//
|
||||
// `this._generatedMappings` is ordered by the generated positions.
|
||||
//
|
||||
// `this._originalMappings` is ordered by the original positions.
|
||||
this._generatedMappings = [];
|
||||
this._originalMappings = [];
|
||||
this._parseMappings(mappings, sourceRoot);
|
||||
}
|
||||
|
||||
/**
|
||||
* The version of the source mapping spec that we are consuming.
|
||||
*/
|
||||
SourceMapConsumer.prototype._version = 3;
|
||||
|
||||
/**
|
||||
* The list of original sources.
|
||||
*/
|
||||
Object.defineProperty(SourceMapConsumer.prototype, 'sources', {
|
||||
get: function () {
|
||||
return this._sources.toArray().map(function (s) {
|
||||
return this.sourceRoot ? util.join(this.sourceRoot, s) : s;
|
||||
}, this);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Parse the mappings in a string in to a data structure which we can easily
|
||||
* query (an ordered list in this._generatedMappings).
|
||||
*/
|
||||
SourceMapConsumer.prototype._parseMappings =
|
||||
function SourceMapConsumer_parseMappings(aStr, aSourceRoot) {
|
||||
var generatedLine = 1;
|
||||
var previousGeneratedColumn = 0;
|
||||
var previousOriginalLine = 0;
|
||||
var previousOriginalColumn = 0;
|
||||
var previousSource = 0;
|
||||
var previousName = 0;
|
||||
var mappingSeparator = /^[,;]/;
|
||||
var str = aStr;
|
||||
var mapping;
|
||||
var temp;
|
||||
|
||||
while (str.length > 0) {
|
||||
if (str.charAt(0) === ';') {
|
||||
generatedLine++;
|
||||
str = str.slice(1);
|
||||
previousGeneratedColumn = 0;
|
||||
}
|
||||
else if (str.charAt(0) === ',') {
|
||||
str = str.slice(1);
|
||||
}
|
||||
else {
|
||||
mapping = {};
|
||||
mapping.generatedLine = generatedLine;
|
||||
|
||||
// Generated column.
|
||||
temp = base64VLQ.decode(str);
|
||||
mapping.generatedColumn = previousGeneratedColumn + temp.value;
|
||||
previousGeneratedColumn = mapping.generatedColumn;
|
||||
str = temp.rest;
|
||||
|
||||
if (str.length > 0 && !mappingSeparator.test(str.charAt(0))) {
|
||||
// Original source.
|
||||
temp = base64VLQ.decode(str);
|
||||
mapping.source = this._sources.at(previousSource + temp.value);
|
||||
previousSource += temp.value;
|
||||
str = temp.rest;
|
||||
if (str.length === 0 || mappingSeparator.test(str.charAt(0))) {
|
||||
throw new Error('Found a source, but no line and column');
|
||||
}
|
||||
|
||||
// Original line.
|
||||
temp = base64VLQ.decode(str);
|
||||
mapping.originalLine = previousOriginalLine + temp.value;
|
||||
previousOriginalLine = mapping.originalLine;
|
||||
// Lines are stored 0-based
|
||||
mapping.originalLine += 1;
|
||||
str = temp.rest;
|
||||
if (str.length === 0 || mappingSeparator.test(str.charAt(0))) {
|
||||
throw new Error('Found a source and line, but no column');
|
||||
}
|
||||
|
||||
// Original column.
|
||||
temp = base64VLQ.decode(str);
|
||||
mapping.originalColumn = previousOriginalColumn + temp.value;
|
||||
previousOriginalColumn = mapping.originalColumn;
|
||||
str = temp.rest;
|
||||
|
||||
if (str.length > 0 && !mappingSeparator.test(str.charAt(0))) {
|
||||
// Original name.
|
||||
temp = base64VLQ.decode(str);
|
||||
mapping.name = this._names.at(previousName + temp.value);
|
||||
previousName += temp.value;
|
||||
str = temp.rest;
|
||||
}
|
||||
}
|
||||
|
||||
this._generatedMappings.push(mapping);
|
||||
if (typeof mapping.originalLine === 'number') {
|
||||
this._originalMappings.push(mapping);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this._originalMappings.sort(this._compareOriginalPositions);
|
||||
};
|
||||
|
||||
/**
|
||||
* Comparator between two mappings where the original positions are compared.
|
||||
*/
|
||||
SourceMapConsumer.prototype._compareOriginalPositions =
|
||||
function SourceMapConsumer_compareOriginalPositions(mappingA, mappingB) {
|
||||
if (mappingA.source > mappingB.source) {
|
||||
return 1;
|
||||
}
|
||||
else if (mappingA.source < mappingB.source) {
|
||||
return -1;
|
||||
}
|
||||
else {
|
||||
var cmp = mappingA.originalLine - mappingB.originalLine;
|
||||
return cmp === 0
|
||||
? mappingA.originalColumn - mappingB.originalColumn
|
||||
: cmp;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Comparator between two mappings where the generated positions are compared.
|
||||
*/
|
||||
SourceMapConsumer.prototype._compareGeneratedPositions =
|
||||
function SourceMapConsumer_compareGeneratedPositions(mappingA, mappingB) {
|
||||
var cmp = mappingA.generatedLine - mappingB.generatedLine;
|
||||
return cmp === 0
|
||||
? mappingA.generatedColumn - mappingB.generatedColumn
|
||||
: cmp;
|
||||
};
|
||||
|
||||
/**
|
||||
* Find the mapping that best matches the hypothetical "needle" mapping that
|
||||
* we are searching for in the given "haystack" of mappings.
|
||||
*/
|
||||
SourceMapConsumer.prototype._findMapping =
|
||||
function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName,
|
||||
aColumnName, aComparator) {
|
||||
// To return the position we are searching for, we must first find the
|
||||
// mapping for the given position and then return the opposite position it
|
||||
// points to. Because the mappings are sorted, we can use binary search to
|
||||
// find the best mapping.
|
||||
|
||||
if (aNeedle[aLineName] <= 0) {
|
||||
throw new TypeError('Line must be greater than or equal to 1, got '
|
||||
+ aNeedle[aLineName]);
|
||||
}
|
||||
if (aNeedle[aColumnName] < 0) {
|
||||
throw new TypeError('Column must be greater than or equal to 0, got '
|
||||
+ aNeedle[aColumnName]);
|
||||
}
|
||||
|
||||
return binarySearch.search(aNeedle, aMappings, aComparator);
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the original source, line, and column information for the generated
|
||||
* source's line and column positions provided. The only argument is an object
|
||||
* with the following properties:
|
||||
*
|
||||
* - line: The line number in the generated source.
|
||||
* - column: The column number in the generated source.
|
||||
*
|
||||
* and an object is returned with the following properties:
|
||||
*
|
||||
* - source: The original source file, or null.
|
||||
* - line: The line number in the original source, or null.
|
||||
* - column: The column number in the original source, or null.
|
||||
* - name: The original identifier, or null.
|
||||
*/
|
||||
SourceMapConsumer.prototype.originalPositionFor =
|
||||
function SourceMapConsumer_originalPositionFor(aArgs) {
|
||||
var needle = {
|
||||
generatedLine: util.getArg(aArgs, 'line'),
|
||||
generatedColumn: util.getArg(aArgs, 'column')
|
||||
};
|
||||
|
||||
var mapping = this._findMapping(needle,
|
||||
this._generatedMappings,
|
||||
"generatedLine",
|
||||
"generatedColumn",
|
||||
this._compareGeneratedPositions)
|
||||
|
||||
if (mapping) {
|
||||
var source = util.getArg(mapping, 'source', null);
|
||||
if (source && this.sourceRoot) {
|
||||
source = util.join(this.sourceRoot, source);
|
||||
}
|
||||
return {
|
||||
source: source,
|
||||
line: util.getArg(mapping, 'originalLine', null),
|
||||
column: util.getArg(mapping, 'originalColumn', null),
|
||||
name: util.getArg(mapping, 'name', null)
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
source: null,
|
||||
line: null,
|
||||
column: null,
|
||||
name: null
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the original source content. The only argument is
|
||||
* the url of the original source file. Returns null if no
|
||||
* original source content is availible.
|
||||
*/
|
||||
SourceMapConsumer.prototype.sourceContentFor =
|
||||
function SourceMapConsumer_sourceContentFor(aSource) {
|
||||
if (!this.sourcesContent) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (this.sourceRoot) {
|
||||
// Try to remove the sourceRoot
|
||||
var relativeUrl = util.relative(this.sourceRoot, aSource);
|
||||
if (this._sources.has(relativeUrl)) {
|
||||
return this.sourcesContent[this._sources.indexOf(relativeUrl)];
|
||||
}
|
||||
}
|
||||
|
||||
if (this._sources.has(aSource)) {
|
||||
return this.sourcesContent[this._sources.indexOf(aSource)];
|
||||
}
|
||||
|
||||
throw new Error('"' + aSource + '" is not in the SourceMap.');
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the generated line and column information for the original source,
|
||||
* line, and column positions provided. The only argument is an object with
|
||||
* the following properties:
|
||||
*
|
||||
* - source: The filename of the original source.
|
||||
* - line: The line number in the original source.
|
||||
* - column: The column number in the original source.
|
||||
*
|
||||
* and an object is returned with the following properties:
|
||||
*
|
||||
* - line: The line number in the generated source, or null.
|
||||
* - column: The column number in the generated source, or null.
|
||||
*/
|
||||
SourceMapConsumer.prototype.generatedPositionFor =
|
||||
function SourceMapConsumer_generatedPositionFor(aArgs) {
|
||||
var needle = {
|
||||
source: util.getArg(aArgs, 'source'),
|
||||
originalLine: util.getArg(aArgs, 'line'),
|
||||
originalColumn: util.getArg(aArgs, 'column')
|
||||
};
|
||||
|
||||
if (this.sourceRoot) {
|
||||
needle.source = util.relative(this.sourceRoot, needle.source);
|
||||
}
|
||||
|
||||
var mapping = this._findMapping(needle,
|
||||
this._originalMappings,
|
||||
"originalLine",
|
||||
"originalColumn",
|
||||
this._compareOriginalPositions)
|
||||
|
||||
if (mapping) {
|
||||
return {
|
||||
line: util.getArg(mapping, 'generatedLine', null),
|
||||
column: util.getArg(mapping, 'generatedColumn', null)
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
line: null,
|
||||
column: null
|
||||
};
|
||||
};
|
||||
|
||||
SourceMapConsumer.GENERATED_ORDER = 1;
|
||||
SourceMapConsumer.ORIGINAL_ORDER = 2;
|
||||
|
||||
/**
|
||||
* Iterate over each mapping between an original source/line/column and a
|
||||
* generated line/column in this source map.
|
||||
*
|
||||
* @param Function aCallback
|
||||
* The function that is called with each mapping.
|
||||
* @param Object aContext
|
||||
* Optional. If specified, this object will be the value of `this` every
|
||||
* time that `aCallback` is called.
|
||||
* @param aOrder
|
||||
* Either `SourceMapConsumer.GENERATED_ORDER` or
|
||||
* `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to
|
||||
* iterate over the mappings sorted by the generated file's line/column
|
||||
* order or the original's source/line/column order, respectively. Defaults to
|
||||
* `SourceMapConsumer.GENERATED_ORDER`.
|
||||
*/
|
||||
SourceMapConsumer.prototype.eachMapping =
|
||||
function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) {
|
||||
var context = aContext || null;
|
||||
var order = aOrder || SourceMapConsumer.GENERATED_ORDER;
|
||||
|
||||
var mappings;
|
||||
switch (order) {
|
||||
case SourceMapConsumer.GENERATED_ORDER:
|
||||
mappings = this._generatedMappings;
|
||||
break;
|
||||
case SourceMapConsumer.ORIGINAL_ORDER:
|
||||
mappings = this._originalMappings;
|
||||
break;
|
||||
default:
|
||||
throw new Error("Unknown order of iteration.");
|
||||
}
|
||||
|
||||
var sourceRoot = this.sourceRoot;
|
||||
mappings.map(function (mapping) {
|
||||
var source = mapping.source;
|
||||
if (source && sourceRoot) {
|
||||
source = util.join(sourceRoot, source);
|
||||
}
|
||||
return {
|
||||
source: source,
|
||||
generatedLine: mapping.generatedLine,
|
||||
generatedColumn: mapping.generatedColumn,
|
||||
originalLine: mapping.originalLine,
|
||||
originalColumn: mapping.originalColumn,
|
||||
name: mapping.name
|
||||
};
|
||||
}).forEach(aCallback, context);
|
||||
};
|
||||
|
||||
exports.SourceMapConsumer = SourceMapConsumer;
|
||||
|
||||
});
|
@@ -0,0 +1,362 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var base64VLQ = require('./base64-vlq');
|
||||
var util = require('./util');
|
||||
var ArraySet = require('./array-set').ArraySet;
|
||||
|
||||
/**
|
||||
* An instance of the SourceMapGenerator represents a source map which is
|
||||
* being built incrementally. To create a new one, you must pass an object
|
||||
* with the following properties:
|
||||
*
|
||||
* - file: The filename of the generated source.
|
||||
* - sourceRoot: An optional root for all URLs in this source map.
|
||||
*/
|
||||
function SourceMapGenerator(aArgs) {
|
||||
this._file = util.getArg(aArgs, 'file');
|
||||
this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null);
|
||||
this._sources = new ArraySet();
|
||||
this._names = new ArraySet();
|
||||
this._mappings = [];
|
||||
this._sourcesContents = null;
|
||||
}
|
||||
|
||||
SourceMapGenerator.prototype._version = 3;
|
||||
|
||||
/**
|
||||
* Creates a new SourceMapGenerator based on a SourceMapConsumer
|
||||
*
|
||||
* @param aSourceMapConsumer The SourceMap.
|
||||
*/
|
||||
SourceMapGenerator.fromSourceMap =
|
||||
function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) {
|
||||
var sourceRoot = aSourceMapConsumer.sourceRoot;
|
||||
var generator = new SourceMapGenerator({
|
||||
file: aSourceMapConsumer.file,
|
||||
sourceRoot: sourceRoot
|
||||
});
|
||||
aSourceMapConsumer.eachMapping(function (mapping) {
|
||||
var newMapping = {
|
||||
generated: {
|
||||
line: mapping.generatedLine,
|
||||
column: mapping.generatedColumn
|
||||
}
|
||||
};
|
||||
|
||||
if (mapping.source) {
|
||||
newMapping.source = mapping.source;
|
||||
if (sourceRoot) {
|
||||
newMapping.source = util.relative(sourceRoot, newMapping.source);
|
||||
}
|
||||
|
||||
newMapping.original = {
|
||||
line: mapping.originalLine,
|
||||
column: mapping.originalColumn
|
||||
};
|
||||
|
||||
if (mapping.name) {
|
||||
newMapping.name = mapping.name;
|
||||
}
|
||||
}
|
||||
|
||||
generator.addMapping(newMapping);
|
||||
});
|
||||
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
||||
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
||||
if (content) {
|
||||
generator.setSourceContent(sourceFile, content);
|
||||
}
|
||||
});
|
||||
return generator;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a single mapping from original source line and column to the generated
|
||||
* source's line and column for this source map being created. The mapping
|
||||
* object should have the following properties:
|
||||
*
|
||||
* - generated: An object with the generated line and column positions.
|
||||
* - original: An object with the original line and column positions.
|
||||
* - source: The original source file (relative to the sourceRoot).
|
||||
* - name: An optional original token name for this mapping.
|
||||
*/
|
||||
SourceMapGenerator.prototype.addMapping =
|
||||
function SourceMapGenerator_addMapping(aArgs) {
|
||||
var generated = util.getArg(aArgs, 'generated');
|
||||
var original = util.getArg(aArgs, 'original', null);
|
||||
var source = util.getArg(aArgs, 'source', null);
|
||||
var name = util.getArg(aArgs, 'name', null);
|
||||
|
||||
this._validateMapping(generated, original, source, name);
|
||||
|
||||
if (source && !this._sources.has(source)) {
|
||||
this._sources.add(source);
|
||||
}
|
||||
|
||||
if (name && !this._names.has(name)) {
|
||||
this._names.add(name);
|
||||
}
|
||||
|
||||
this._mappings.push({
|
||||
generated: generated,
|
||||
original: original,
|
||||
source: source,
|
||||
name: name
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Set the source content for a source file.
|
||||
*/
|
||||
SourceMapGenerator.prototype.setSourceContent =
|
||||
function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) {
|
||||
var source = aSourceFile;
|
||||
if (this._sourceRoot) {
|
||||
source = util.relative(this._sourceRoot, source);
|
||||
}
|
||||
|
||||
if (aSourceContent !== null) {
|
||||
// Add the source content to the _sourcesContents map.
|
||||
// Create a new _sourcesContents map if the property is null.
|
||||
if (!this._sourcesContents) {
|
||||
this._sourcesContents = {};
|
||||
}
|
||||
this._sourcesContents[util.toSetString(source)] = aSourceContent;
|
||||
} else {
|
||||
// Remove the source file from the _sourcesContents map.
|
||||
// If the _sourcesContents map is empty, set the property to null.
|
||||
delete this._sourcesContents[util.toSetString(source)];
|
||||
if (Object.keys(this._sourcesContents).length === 0) {
|
||||
this._sourcesContents = null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Applies a SourceMap for a source file to the SourceMap.
|
||||
* Each mapping to the supplied source file is rewritten using the
|
||||
* supplied SourceMap. Note: The resolution for the resulting mappings
|
||||
* is the minimium of this map and the supplied map.
|
||||
*
|
||||
* @param aSourceMapConsumer The SourceMap to be applied.
|
||||
* @param aSourceFile Optional. The filename of the source file.
|
||||
* If omitted, sourceMapConsumer.file will be used.
|
||||
*/
|
||||
SourceMapGenerator.prototype.applySourceMap =
|
||||
function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile) {
|
||||
// If aSourceFile is omitted, we will use the file property of the SourceMap
|
||||
if (!aSourceFile) {
|
||||
aSourceFile = aSourceMapConsumer.file;
|
||||
}
|
||||
var sourceRoot = this._sourceRoot;
|
||||
// Make "aSourceFile" relative if an absolute Url is passed.
|
||||
if (sourceRoot) {
|
||||
aSourceFile = util.relative(sourceRoot, aSourceFile);
|
||||
}
|
||||
// Applying the SourceMap can add and remove items from the sources and
|
||||
// the names array.
|
||||
var newSources = new ArraySet();
|
||||
var newNames = new ArraySet();
|
||||
|
||||
// Find mappings for the "aSourceFile"
|
||||
this._mappings.forEach(function (mapping) {
|
||||
if (mapping.source === aSourceFile && mapping.original) {
|
||||
// Check if it can be mapped by the SourceMap.
|
||||
// Than update the mapping.
|
||||
var original = aSourceMapConsumer.originalPositionFor({
|
||||
line: mapping.original.line,
|
||||
column: mapping.original.column
|
||||
});
|
||||
if (original && original.source !== null) {
|
||||
// Copy mapping
|
||||
if (sourceRoot) {
|
||||
mapping.source = util.relative(sourceRoot, original.source);
|
||||
} else {
|
||||
mapping.source = original.source;
|
||||
}
|
||||
mapping.original.line = original.line;
|
||||
mapping.original.column = original.column;
|
||||
mapping.name = mapping.name && original.name || mapping.name;
|
||||
}
|
||||
}
|
||||
|
||||
var source = mapping.source;
|
||||
if (source && !newSources.has(source)) {
|
||||
newSources.add(source);
|
||||
}
|
||||
|
||||
var name = mapping.name;
|
||||
if (name && !newNames.has(name)) {
|
||||
newNames.add(name);
|
||||
}
|
||||
|
||||
}, this);
|
||||
this._sources = newSources;
|
||||
this._names = newNames;
|
||||
|
||||
// Copy sourcesContents of applied map.
|
||||
aSourceMapConsumer.sources.forEach(function (sourceFile) {
|
||||
var content = aSourceMapConsumer.sourceContentFor(sourceFile);
|
||||
if (content) {
|
||||
if (sourceRoot) {
|
||||
sourceFile = util.relative(sourceRoot, sourceFile);
|
||||
}
|
||||
this.setSourceContent(sourceFile, content);
|
||||
}
|
||||
}, this);
|
||||
};
|
||||
|
||||
/**
|
||||
* A mapping can have one of the three levels of data:
|
||||
*
|
||||
* 1. Just the generated position.
|
||||
* 2. The Generated position, original position, and original source.
|
||||
* 3. Generated and original position, original source, as well as a name
|
||||
* token.
|
||||
*
|
||||
* To maintain consistency, we validate that any new mapping being added falls
|
||||
* in to one of these categories.
|
||||
*/
|
||||
SourceMapGenerator.prototype._validateMapping =
|
||||
function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource,
|
||||
aName) {
|
||||
if (aGenerated && 'line' in aGenerated && 'column' in aGenerated
|
||||
&& aGenerated.line > 0 && aGenerated.column >= 0
|
||||
&& !aOriginal && !aSource && !aName) {
|
||||
// Case 1.
|
||||
return;
|
||||
}
|
||||
else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated
|
||||
&& aOriginal && 'line' in aOriginal && 'column' in aOriginal
|
||||
&& aGenerated.line > 0 && aGenerated.column >= 0
|
||||
&& aOriginal.line > 0 && aOriginal.column >= 0
|
||||
&& aSource) {
|
||||
// Cases 2 and 3.
|
||||
return;
|
||||
}
|
||||
else {
|
||||
throw new Error('Invalid mapping.');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Serialize the accumulated mappings in to the stream of base 64 VLQs
|
||||
* specified by the source map format.
|
||||
*/
|
||||
SourceMapGenerator.prototype._serializeMappings =
|
||||
function SourceMapGenerator_serializeMappings() {
|
||||
var previousGeneratedColumn = 0;
|
||||
var previousGeneratedLine = 1;
|
||||
var previousOriginalColumn = 0;
|
||||
var previousOriginalLine = 0;
|
||||
var previousName = 0;
|
||||
var previousSource = 0;
|
||||
var result = '';
|
||||
var mapping;
|
||||
|
||||
// The mappings must be guarenteed to be in sorted order before we start
|
||||
// serializing them or else the generated line numbers (which are defined
|
||||
// via the ';' separators) will be all messed up. Note: it might be more
|
||||
// performant to maintain the sorting as we insert them, rather than as we
|
||||
// serialize them, but the big O is the same either way.
|
||||
this._mappings.sort(function (mappingA, mappingB) {
|
||||
var cmp = mappingA.generated.line - mappingB.generated.line;
|
||||
return cmp === 0
|
||||
? mappingA.generated.column - mappingB.generated.column
|
||||
: cmp;
|
||||
});
|
||||
|
||||
for (var i = 0, len = this._mappings.length; i < len; i++) {
|
||||
mapping = this._mappings[i];
|
||||
|
||||
if (mapping.generated.line !== previousGeneratedLine) {
|
||||
previousGeneratedColumn = 0;
|
||||
while (mapping.generated.line !== previousGeneratedLine) {
|
||||
result += ';';
|
||||
previousGeneratedLine++;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (i > 0) {
|
||||
result += ',';
|
||||
}
|
||||
}
|
||||
|
||||
result += base64VLQ.encode(mapping.generated.column
|
||||
- previousGeneratedColumn);
|
||||
previousGeneratedColumn = mapping.generated.column;
|
||||
|
||||
if (mapping.source && mapping.original) {
|
||||
result += base64VLQ.encode(this._sources.indexOf(mapping.source)
|
||||
- previousSource);
|
||||
previousSource = this._sources.indexOf(mapping.source);
|
||||
|
||||
// lines are stored 0-based in SourceMap spec version 3
|
||||
result += base64VLQ.encode(mapping.original.line - 1
|
||||
- previousOriginalLine);
|
||||
previousOriginalLine = mapping.original.line - 1;
|
||||
|
||||
result += base64VLQ.encode(mapping.original.column
|
||||
- previousOriginalColumn);
|
||||
previousOriginalColumn = mapping.original.column;
|
||||
|
||||
if (mapping.name) {
|
||||
result += base64VLQ.encode(this._names.indexOf(mapping.name)
|
||||
- previousName);
|
||||
previousName = this._names.indexOf(mapping.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Externalize the source map.
|
||||
*/
|
||||
SourceMapGenerator.prototype.toJSON =
|
||||
function SourceMapGenerator_toJSON() {
|
||||
var map = {
|
||||
version: this._version,
|
||||
file: this._file,
|
||||
sources: this._sources.toArray(),
|
||||
names: this._names.toArray(),
|
||||
mappings: this._serializeMappings()
|
||||
};
|
||||
if (this._sourceRoot) {
|
||||
map.sourceRoot = this._sourceRoot;
|
||||
}
|
||||
if (this._sourcesContents) {
|
||||
map.sourcesContent = map.sources.map(function (source) {
|
||||
if (map.sourceRoot) {
|
||||
source = util.relative(map.sourceRoot, source);
|
||||
}
|
||||
return Object.prototype.hasOwnProperty.call(
|
||||
this._sourcesContents, util.toSetString(source))
|
||||
? this._sourcesContents[util.toSetString(source)]
|
||||
: null;
|
||||
}, this);
|
||||
}
|
||||
return map;
|
||||
};
|
||||
|
||||
/**
|
||||
* Render the source map being generated to a string.
|
||||
*/
|
||||
SourceMapGenerator.prototype.toString =
|
||||
function SourceMapGenerator_toString() {
|
||||
return JSON.stringify(this);
|
||||
};
|
||||
|
||||
exports.SourceMapGenerator = SourceMapGenerator;
|
||||
|
||||
});
|
@@ -0,0 +1,215 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var SourceMapGenerator = require('./source-map-generator').SourceMapGenerator;
|
||||
|
||||
/**
|
||||
* SourceNodes provide a way to abstract over interpolating/concatenating
|
||||
* snippets of generated JavaScript source code while maintaining the line and
|
||||
* column information associated with the original source code.
|
||||
*
|
||||
* @param aLine The original line number.
|
||||
* @param aColumn The original column number.
|
||||
* @param aSource The original source's filename.
|
||||
* @param aChunks Optional. An array of strings which are snippets of
|
||||
* generated JS, or other SourceNodes.
|
||||
* @param aName The original identifier.
|
||||
*/
|
||||
function SourceNode(aLine, aColumn, aSource, aChunks, aName) {
|
||||
this.children = [];
|
||||
this.line = aLine === undefined ? null : aLine;
|
||||
this.column = aColumn === undefined ? null : aColumn;
|
||||
this.source = aSource === undefined ? null : aSource;
|
||||
this.name = aName === undefined ? null : aName;
|
||||
if (aChunks != null) this.add(aChunks);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a chunk of generated JS to this source node.
|
||||
*
|
||||
* @param aChunk A string snippet of generated JS code, another instance of
|
||||
* SourceNode, or an array where each member is one of those things.
|
||||
*/
|
||||
SourceNode.prototype.add = function SourceNode_add(aChunk) {
|
||||
if (Array.isArray(aChunk)) {
|
||||
aChunk.forEach(function (chunk) {
|
||||
this.add(chunk);
|
||||
}, this);
|
||||
}
|
||||
else if (aChunk instanceof SourceNode || typeof aChunk === "string") {
|
||||
if (aChunk) {
|
||||
this.children.push(aChunk);
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw new TypeError(
|
||||
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
|
||||
);
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a chunk of generated JS to the beginning of this source node.
|
||||
*
|
||||
* @param aChunk A string snippet of generated JS code, another instance of
|
||||
* SourceNode, or an array where each member is one of those things.
|
||||
*/
|
||||
SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) {
|
||||
if (Array.isArray(aChunk)) {
|
||||
for (var i = aChunk.length-1; i >= 0; i--) {
|
||||
this.prepend(aChunk[i]);
|
||||
}
|
||||
}
|
||||
else if (aChunk instanceof SourceNode || typeof aChunk === "string") {
|
||||
this.children.unshift(aChunk);
|
||||
}
|
||||
else {
|
||||
throw new TypeError(
|
||||
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
|
||||
);
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Walk over the tree of JS snippets in this node and its children. The
|
||||
* walking function is called once for each snippet of JS and is passed that
|
||||
* snippet and the its original associated source's line/column location.
|
||||
*
|
||||
* @param aFn The traversal function.
|
||||
*/
|
||||
SourceNode.prototype.walk = function SourceNode_walk(aFn) {
|
||||
this.children.forEach(function (chunk) {
|
||||
if (chunk instanceof SourceNode) {
|
||||
chunk.walk(aFn);
|
||||
}
|
||||
else {
|
||||
if (chunk !== '') {
|
||||
aFn(chunk, { source: this.source,
|
||||
line: this.line,
|
||||
column: this.column,
|
||||
name: this.name });
|
||||
}
|
||||
}
|
||||
}, this);
|
||||
};
|
||||
|
||||
/**
|
||||
* Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between
|
||||
* each of `this.children`.
|
||||
*
|
||||
* @param aSep The separator.
|
||||
*/
|
||||
SourceNode.prototype.join = function SourceNode_join(aSep) {
|
||||
var newChildren;
|
||||
var i;
|
||||
var len = this.children.length
|
||||
if (len > 0) {
|
||||
newChildren = [];
|
||||
for (i = 0; i < len-1; i++) {
|
||||
newChildren.push(this.children[i]);
|
||||
newChildren.push(aSep);
|
||||
}
|
||||
newChildren.push(this.children[i]);
|
||||
this.children = newChildren;
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Call String.prototype.replace on the very right-most source snippet. Useful
|
||||
* for trimming whitespace from the end of a source node, etc.
|
||||
*
|
||||
* @param aPattern The pattern to replace.
|
||||
* @param aReplacement The thing to replace the pattern with.
|
||||
*/
|
||||
SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) {
|
||||
var lastChild = this.children[this.children.length - 1];
|
||||
if (lastChild instanceof SourceNode) {
|
||||
lastChild.replaceRight(aPattern, aReplacement);
|
||||
}
|
||||
else if (typeof lastChild === 'string') {
|
||||
this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);
|
||||
}
|
||||
else {
|
||||
this.children.push(''.replace(aPattern, aReplacement));
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the string representation of this source node. Walks over the tree
|
||||
* and concatenates all the various snippets together to one string.
|
||||
*/
|
||||
SourceNode.prototype.toString = function SourceNode_toString() {
|
||||
var str = "";
|
||||
this.walk(function (chunk) {
|
||||
str += chunk;
|
||||
});
|
||||
return str;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the string representation of this source node along with a source
|
||||
* map.
|
||||
*/
|
||||
SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) {
|
||||
var generated = {
|
||||
code: "",
|
||||
line: 1,
|
||||
column: 0
|
||||
};
|
||||
var map = new SourceMapGenerator(aArgs);
|
||||
var sourceMappingActive = false;
|
||||
this.walk(function (chunk, original) {
|
||||
generated.code += chunk;
|
||||
if (original.source !== null
|
||||
&& original.line !== null
|
||||
&& original.column !== null) {
|
||||
map.addMapping({
|
||||
source: original.source,
|
||||
original: {
|
||||
line: original.line,
|
||||
column: original.column
|
||||
},
|
||||
generated: {
|
||||
line: generated.line,
|
||||
column: generated.column
|
||||
},
|
||||
name: original.name
|
||||
});
|
||||
sourceMappingActive = true;
|
||||
} else if (sourceMappingActive) {
|
||||
map.addMapping({
|
||||
generated: {
|
||||
line: generated.line,
|
||||
column: generated.column
|
||||
}
|
||||
});
|
||||
sourceMappingActive = false;
|
||||
}
|
||||
chunk.split('').forEach(function (ch) {
|
||||
if (ch === '\n') {
|
||||
generated.line++;
|
||||
generated.column = 0;
|
||||
} else {
|
||||
generated.column++;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return { code: generated.code, map: map };
|
||||
};
|
||||
|
||||
exports.SourceNode = SourceNode;
|
||||
|
||||
});
|
@@ -0,0 +1,61 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
/**
|
||||
* This is a helper function for getting values from parameter/options
|
||||
* objects.
|
||||
*
|
||||
* @param args The object we are extracting values from
|
||||
* @param name The name of the property we are getting.
|
||||
* @param defaultValue An optional value to return if the property is missing
|
||||
* from the object. If this is not specified and the property is missing, an
|
||||
* error will be thrown.
|
||||
*/
|
||||
function getArg(aArgs, aName, aDefaultValue) {
|
||||
if (aName in aArgs) {
|
||||
return aArgs[aName];
|
||||
} else if (arguments.length === 3) {
|
||||
return aDefaultValue;
|
||||
} else {
|
||||
throw new Error('"' + aName + '" is a required argument.');
|
||||
}
|
||||
}
|
||||
exports.getArg = getArg;
|
||||
|
||||
function join(aRoot, aPath) {
|
||||
return aPath.charAt(0) === '/'
|
||||
? aPath
|
||||
: aRoot.replace(/\/*$/, '') + '/' + aPath;
|
||||
}
|
||||
exports.join = join;
|
||||
|
||||
/**
|
||||
* Because behavior goes wacky when you set `__proto__` on objects, we
|
||||
* have to prefix all the strings in our set with an arbitrary character.
|
||||
*
|
||||
* See https://github.com/mozilla/source-map/pull/31 and
|
||||
* https://github.com/mozilla/source-map/issues/30
|
||||
*
|
||||
* @param String aStr
|
||||
*/
|
||||
function toSetString(aStr) {
|
||||
return '$' + aStr
|
||||
}
|
||||
exports.toSetString = toSetString;
|
||||
|
||||
function relative(aRoot, aPath) {
|
||||
return aPath.indexOf(aRoot.replace(/\/*$/, '') + '/') === 0
|
||||
? aPath.substr(aRoot.length + 1)
|
||||
: aPath;
|
||||
}
|
||||
exports.relative = relative;
|
||||
|
||||
});
|
@@ -0,0 +1 @@
|
||||
tests/
|
@@ -0,0 +1,58 @@
|
||||
amdefine is released under two licenses: new BSD, and MIT. You may pick the
|
||||
license that best suits your development needs. The text of both licenses are
|
||||
provided below.
|
||||
|
||||
|
||||
The "New" BSD License:
|
||||
----------------------
|
||||
|
||||
Copyright (c) 2011, The Dojo Foundation
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
* Neither the name of the Dojo Foundation nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
|
||||
MIT License
|
||||
-----------
|
||||
|
||||
Copyright (c) 2011, The Dojo Foundation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
@@ -0,0 +1,119 @@
|
||||
# amdefine
|
||||
|
||||
A module that can be used to implement AMD's define() in Node. This allows you
|
||||
to code to the AMD API and have the module work in node programs without
|
||||
requiring those other programs to use AMD.
|
||||
|
||||
## Usage
|
||||
|
||||
**1)** Update your package.json to indicate amdefine as a dependency:
|
||||
|
||||
```javascript
|
||||
"dependencies": {
|
||||
"amdefine": ">=0.0.2"
|
||||
}
|
||||
```
|
||||
|
||||
Then run `npm install` to get amdefine into your project.
|
||||
|
||||
**2)** At the top of each module that uses define(), place this code:
|
||||
|
||||
```javascript
|
||||
if (typeof define !== 'function') { var define = require('amdefine')(module) }
|
||||
```
|
||||
|
||||
**Only use these snippets** when loading amdefine. If you preserve the basic structure,
|
||||
with the braces, it will be stripped out when using the [RequireJS optimizer](#optimizer).
|
||||
|
||||
You can add spaces, line breaks and even require amdefine with a local path, but
|
||||
keep the rest of the structure to get the stripping behavior.
|
||||
|
||||
As you may know, because `if` statements in JavaScript don't have their own scope, the var
|
||||
declaration in the above snippet is made whether the `if` expression is truthy or not. If
|
||||
RequireJS is loaded then the declaration is superfluous because `define` is already already
|
||||
declared in the same scope in RequireJS. Fortunately JavaScript handles multiple `var`
|
||||
declarations of the same variable in the same scope gracefully.
|
||||
|
||||
If you want to deliver amdefine.js with your code rather than specifying it as a dependency
|
||||
with npm, then just download the latest release and refer to it using a relative path:
|
||||
|
||||
[Version 0.0.2](https://github.com/jrburke/amdefine/raw/0.0.2/amdefine.js)
|
||||
|
||||
## define() usage
|
||||
|
||||
It is best if you use the anonymous forms of define() in your module:
|
||||
|
||||
```javascript
|
||||
define(function (require) {
|
||||
var dependency = require('dependency');
|
||||
});
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```javascript
|
||||
define(['dependency'], function (dependency) {
|
||||
|
||||
});
|
||||
```
|
||||
|
||||
## RequireJS optimizer integration. <a name="optimizer"></name>
|
||||
|
||||
Version 1.0.3 of the [RequireJS optimizer](http://requirejs.org/docs/optimization.html)
|
||||
will have support for stripping the `if (typeof define !== 'function')` check
|
||||
mentioned above, so you can include this snippet for code that runs in the
|
||||
browser, but avoid taking the cost of the if() statement once the code is
|
||||
optimized for deployment.
|
||||
|
||||
## Node 0.4 Support
|
||||
|
||||
If you want to support Node 0.4, then add `require` as the second parameter to amdefine:
|
||||
|
||||
```javascript
|
||||
//Only if you want Node 0.4. If using 0.5 or later, use the above snippet.
|
||||
if (typeof define !== 'function') { var define = require('amdefine')(module, require) }
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
### Synchronous vs Asynchronous
|
||||
|
||||
amdefine creates a define() function that is callable by your code. It will
|
||||
execute and trace dependencies and call the factory function *synchronously*,
|
||||
to keep the behavior in line with Node's synchronous dependency tracing.
|
||||
|
||||
The exception: calling AMD's callback-style require() from inside a factory
|
||||
function. The require callback is called on process.nextTick():
|
||||
|
||||
```javascript
|
||||
define(function (require) {
|
||||
require(['a'], function(a) {
|
||||
//'a' is loaded synchronously, but
|
||||
//this callback is called on process.nextTick().
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Loader Plugins
|
||||
|
||||
Loader plugins are supported as long as they call their load() callbacks
|
||||
synchronously. So ones that do network requests will not work. However plugins
|
||||
like [text](http://requirejs.org/docs/api.html#text) can load text files locally.
|
||||
|
||||
The plugin API's `load.fromText()` is **not supported** in amdefine, so this means
|
||||
transpiler plugins like the [CoffeeScript loader plugin](https://github.com/jrburke/require-cs)
|
||||
will not work. This may be fixable, but it is a bit complex, and I do not have
|
||||
enough node-fu to figure it out yet. See the source for amdefine.js if you want
|
||||
to get an idea of the issues involved.
|
||||
|
||||
## Tests
|
||||
|
||||
To run the tests, cd to **tests** and run:
|
||||
|
||||
```
|
||||
node all.js
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
New BSD and MIT. Check the LICENSE file for all the details.
|
@@ -0,0 +1,299 @@
|
||||
/** vim: et:ts=4:sw=4:sts=4
|
||||
* @license amdefine 0.0.4 Copyright (c) 2011, The Dojo Foundation All Rights Reserved.
|
||||
* Available via the MIT or new BSD license.
|
||||
* see: http://github.com/jrburke/amdefine for details
|
||||
*/
|
||||
|
||||
/*jslint node: true */
|
||||
/*global module, process */
|
||||
'use strict';
|
||||
|
||||
var path = require('path');
|
||||
|
||||
/**
|
||||
* Creates a define for node.
|
||||
* @param {Object} module the "module" object that is defined by Node for the
|
||||
* current module.
|
||||
* @param {Function} [require]. Node's require function for the current module.
|
||||
* It only needs to be passed in Node versions before 0.5, when module.require
|
||||
* did not exist.
|
||||
* @returns {Function} a define function that is usable for the current node
|
||||
* module.
|
||||
*/
|
||||
function amdefine(module, require) {
|
||||
var defineCache = {},
|
||||
loaderCache = {},
|
||||
alreadyCalled = false,
|
||||
makeRequire, stringRequire;
|
||||
|
||||
/**
|
||||
* Trims the . and .. from an array of path segments.
|
||||
* It will keep a leading path segment if a .. will become
|
||||
* the first path segment, to help with module name lookups,
|
||||
* which act like paths, but can be remapped. But the end result,
|
||||
* all paths that use this function should look normalized.
|
||||
* NOTE: this method MODIFIES the input array.
|
||||
* @param {Array} ary the array of path segments.
|
||||
*/
|
||||
function trimDots(ary) {
|
||||
var i, part;
|
||||
for (i = 0; ary[i]; i+= 1) {
|
||||
part = ary[i];
|
||||
if (part === '.') {
|
||||
ary.splice(i, 1);
|
||||
i -= 1;
|
||||
} else if (part === '..') {
|
||||
if (i === 1 && (ary[2] === '..' || ary[0] === '..')) {
|
||||
//End of the line. Keep at least one non-dot
|
||||
//path segment at the front so it can be mapped
|
||||
//correctly to disk. Otherwise, there is likely
|
||||
//no path mapping for a path starting with '..'.
|
||||
//This can still fail, but catches the most reasonable
|
||||
//uses of ..
|
||||
break;
|
||||
} else if (i > 0) {
|
||||
ary.splice(i - 1, 2);
|
||||
i -= 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function normalize(name, baseName) {
|
||||
var baseParts;
|
||||
|
||||
//Adjust any relative paths.
|
||||
if (name && name.charAt(0) === '.') {
|
||||
//If have a base name, try to normalize against it,
|
||||
//otherwise, assume it is a top-level require that will
|
||||
//be relative to baseUrl in the end.
|
||||
if (baseName) {
|
||||
baseParts = baseName.split('/');
|
||||
baseParts = baseParts.slice(0, baseParts.length - 1);
|
||||
baseParts = baseParts.concat(name.split('/'));
|
||||
trimDots(baseParts);
|
||||
name = baseParts.join('/');
|
||||
}
|
||||
}
|
||||
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the normalize() function passed to a loader plugin's
|
||||
* normalize method.
|
||||
*/
|
||||
function makeNormalize(relName) {
|
||||
return function (name) {
|
||||
return normalize(name, relName);
|
||||
};
|
||||
}
|
||||
|
||||
function makeLoad(id) {
|
||||
function load(value) {
|
||||
loaderCache[id] = value;
|
||||
}
|
||||
|
||||
load.fromText = function (id, text) {
|
||||
//This one is difficult because the text can/probably uses
|
||||
//define, and any relative paths and requires should be relative
|
||||
//to that id was it would be found on disk. But this would require
|
||||
//bootstrapping a module/require fairly deeply from node core.
|
||||
//Not sure how best to go about that yet.
|
||||
throw new Error('amdefine does not implement load.fromText');
|
||||
};
|
||||
|
||||
return load;
|
||||
}
|
||||
|
||||
makeRequire = function (systemRequire, exports, module, relId) {
|
||||
function amdRequire(deps, callback) {
|
||||
if (typeof deps === 'string') {
|
||||
//Synchronous, single module require('')
|
||||
return stringRequire(systemRequire, exports, module, deps, relId);
|
||||
} else {
|
||||
//Array of dependencies with a callback.
|
||||
|
||||
//Convert the dependencies to modules.
|
||||
deps = deps.map(function (depName) {
|
||||
return stringRequire(systemRequire, exports, module, depName, relId);
|
||||
});
|
||||
|
||||
//Wait for next tick to call back the require call.
|
||||
process.nextTick(function () {
|
||||
callback.apply(null, deps);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
amdRequire.toUrl = function (filePath) {
|
||||
if (filePath.indexOf('.') === 0) {
|
||||
return normalize(filePath, path.dirname(module.filename));
|
||||
} else {
|
||||
return filePath;
|
||||
}
|
||||
};
|
||||
|
||||
return amdRequire;
|
||||
};
|
||||
|
||||
//Favor explicit value, passed in if the module wants to support Node 0.4.
|
||||
require = require || function req() {
|
||||
return module.require.apply(module, arguments);
|
||||
};
|
||||
|
||||
function runFactory(id, deps, factory) {
|
||||
var r, e, m, result;
|
||||
|
||||
if (id) {
|
||||
e = loaderCache[id] = {};
|
||||
m = {
|
||||
id: id,
|
||||
uri: __filename,
|
||||
exports: e
|
||||
};
|
||||
r = makeRequire(undefined, e, m, id);
|
||||
} else {
|
||||
//Only support one define call per file
|
||||
if (alreadyCalled) {
|
||||
throw new Error('amdefine with no module ID cannot be called more than once per file.');
|
||||
}
|
||||
alreadyCalled = true;
|
||||
|
||||
//Use the real variables from node
|
||||
//Use module.exports for exports, since
|
||||
//the exports in here is amdefine exports.
|
||||
e = module.exports;
|
||||
m = module;
|
||||
r = makeRequire(require, e, m, module.id);
|
||||
}
|
||||
|
||||
//If there are dependencies, they are strings, so need
|
||||
//to convert them to dependency values.
|
||||
if (deps) {
|
||||
deps = deps.map(function (depName) {
|
||||
return r(depName);
|
||||
});
|
||||
}
|
||||
|
||||
//Call the factory with the right dependencies.
|
||||
if (typeof factory === 'function') {
|
||||
result = factory.apply(module.exports, deps);
|
||||
} else {
|
||||
result = factory;
|
||||
}
|
||||
|
||||
if (result !== undefined) {
|
||||
m.exports = result;
|
||||
if (id) {
|
||||
loaderCache[id] = m.exports;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stringRequire = function (systemRequire, exports, module, id, relId) {
|
||||
//Split the ID by a ! so that
|
||||
var index = id.indexOf('!'),
|
||||
originalId = id,
|
||||
prefix, plugin;
|
||||
|
||||
if (index === -1) {
|
||||
id = normalize(id, relId);
|
||||
|
||||
//Straight module lookup. If it is one of the special dependencies,
|
||||
//deal with it, otherwise, delegate to node.
|
||||
if (id === 'require') {
|
||||
return makeRequire(systemRequire, exports, module, relId);
|
||||
} else if (id === 'exports') {
|
||||
return exports;
|
||||
} else if (id === 'module') {
|
||||
return module;
|
||||
} else if (loaderCache.hasOwnProperty(id)) {
|
||||
return loaderCache[id];
|
||||
} else if (defineCache[id]) {
|
||||
runFactory.apply(null, defineCache[id]);
|
||||
return loaderCache[id];
|
||||
} else {
|
||||
if(systemRequire) {
|
||||
return systemRequire(originalId);
|
||||
} else {
|
||||
throw new Error('No module with ID: ' + id);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//There is a plugin in play.
|
||||
prefix = id.substring(0, index);
|
||||
id = id.substring(index + 1, id.length);
|
||||
|
||||
plugin = stringRequire(systemRequire, exports, module, prefix, relId);
|
||||
|
||||
if (plugin.normalize) {
|
||||
id = plugin.normalize(id, makeNormalize(relId));
|
||||
} else {
|
||||
//Normalize the ID normally.
|
||||
id = normalize(id, relId);
|
||||
}
|
||||
|
||||
if (loaderCache[id]) {
|
||||
return loaderCache[id];
|
||||
} else {
|
||||
plugin.load(id, makeRequire(systemRequire, exports, module, relId), makeLoad(id), {});
|
||||
|
||||
return loaderCache[id];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Create a define function specific to the module asking for amdefine.
|
||||
function define(id, deps, factory) {
|
||||
if (Array.isArray(id)) {
|
||||
factory = deps;
|
||||
deps = id;
|
||||
id = undefined;
|
||||
} else if (typeof id !== 'string') {
|
||||
factory = id;
|
||||
id = deps = undefined;
|
||||
}
|
||||
|
||||
if (deps && !Array.isArray(deps)) {
|
||||
factory = deps;
|
||||
deps = undefined;
|
||||
}
|
||||
|
||||
if (!deps) {
|
||||
deps = ['require', 'exports', 'module'];
|
||||
}
|
||||
|
||||
//Set up properties for this module. If an ID, then use
|
||||
//internal cache. If no ID, then use the external variables
|
||||
//for this node module.
|
||||
if (id) {
|
||||
//Put the module in deep freeze until there is a
|
||||
//require call for it.
|
||||
defineCache[id] = [id, deps, factory];
|
||||
} else {
|
||||
runFactory(id, deps, factory);
|
||||
}
|
||||
}
|
||||
|
||||
//define.require, which has access to all the values in the
|
||||
//cache. Useful for AMD modules that all have IDs in the file,
|
||||
//but need to finally export a value to node based on one of those
|
||||
//IDs.
|
||||
define.require = function (id) {
|
||||
if (loaderCache[id]) {
|
||||
return loaderCache[id];
|
||||
}
|
||||
|
||||
if (defineCache[id]) {
|
||||
runFactory.apply(null, defineCache[id]);
|
||||
return loaderCache[id];
|
||||
}
|
||||
};
|
||||
|
||||
define.amd = {};
|
||||
|
||||
return define;
|
||||
}
|
||||
|
||||
module.exports = amdefine;
|
@@ -0,0 +1,31 @@
|
||||
{
|
||||
"name": "amdefine",
|
||||
"description": "Provide AMD's define() API for declaring modules in the AMD format",
|
||||
"version": "0.0.4",
|
||||
"homepage": "http://github.com/jrburke/amdefine.js",
|
||||
"author": {
|
||||
"name": "James Burke",
|
||||
"email": "jrburke@gmail.com",
|
||||
"url": "http://github.com/jrburke"
|
||||
},
|
||||
"licenses": [
|
||||
{
|
||||
"type": "BSD",
|
||||
"url": "https://github.com/jrburke/amdefine/blob/master/LICENSE"
|
||||
},
|
||||
{
|
||||
"type": "MIT",
|
||||
"url": "https://github.com/jrburke/amdefine/blob/master/LICENSE"
|
||||
}
|
||||
],
|
||||
"main": "./amdefine.js",
|
||||
"engines": {
|
||||
"node": ">=0.4.2"
|
||||
},
|
||||
"readme": "# amdefine\n\nA module that can be used to implement AMD's define() in Node. This allows you\nto code to the AMD API and have the module work in node programs without\nrequiring those other programs to use AMD.\n\n## Usage\n\n**1)** Update your package.json to indicate amdefine as a dependency:\n\n```javascript\n \"dependencies\": {\n \"amdefine\": \">=0.0.2\"\n }\n```\n\nThen run `npm install` to get amdefine into your project.\n\n**2)** At the top of each module that uses define(), place this code:\n\n```javascript\nif (typeof define !== 'function') { var define = require('amdefine')(module) }\n```\n\n**Only use these snippets** when loading amdefine. If you preserve the basic structure,\nwith the braces, it will be stripped out when using the [RequireJS optimizer](#optimizer).\n\nYou can add spaces, line breaks and even require amdefine with a local path, but\nkeep the rest of the structure to get the stripping behavior.\n\nAs you may know, because `if` statements in JavaScript don't have their own scope, the var\ndeclaration in the above snippet is made whether the `if` expression is truthy or not. If\nRequireJS is loaded then the declaration is superfluous because `define` is already already\ndeclared in the same scope in RequireJS. Fortunately JavaScript handles multiple `var`\ndeclarations of the same variable in the same scope gracefully.\n\nIf you want to deliver amdefine.js with your code rather than specifying it as a dependency\nwith npm, then just download the latest release and refer to it using a relative path:\n\n[Version 0.0.2](https://github.com/jrburke/amdefine/raw/0.0.2/amdefine.js)\n\n## define() usage\n\nIt is best if you use the anonymous forms of define() in your module:\n\n```javascript\ndefine(function (require) {\n var dependency = require('dependency');\n});\n```\n\nor\n\n```javascript\ndefine(['dependency'], function (dependency) {\n\n});\n```\n\n## RequireJS optimizer integration. <a name=\"optimizer\"></name>\n\nVersion 1.0.3 of the [RequireJS optimizer](http://requirejs.org/docs/optimization.html)\nwill have support for stripping the `if (typeof define !== 'function')` check\nmentioned above, so you can include this snippet for code that runs in the\nbrowser, but avoid taking the cost of the if() statement once the code is\noptimized for deployment.\n\n## Node 0.4 Support\n\nIf you want to support Node 0.4, then add `require` as the second parameter to amdefine:\n\n```javascript\n//Only if you want Node 0.4. If using 0.5 or later, use the above snippet.\nif (typeof define !== 'function') { var define = require('amdefine')(module, require) }\n```\n\n## Limitations\n\n### Synchronous vs Asynchronous\n\namdefine creates a define() function that is callable by your code. It will\nexecute and trace dependencies and call the factory function *synchronously*,\nto keep the behavior in line with Node's synchronous dependency tracing.\n\nThe exception: calling AMD's callback-style require() from inside a factory\nfunction. The require callback is called on process.nextTick():\n\n```javascript\ndefine(function (require) {\n require(['a'], function(a) {\n //'a' is loaded synchronously, but\n //this callback is called on process.nextTick().\n });\n});\n```\n\n### Loader Plugins\n\nLoader plugins are supported as long as they call their load() callbacks\nsynchronously. So ones that do network requests will not work. However plugins\nlike [text](http://requirejs.org/docs/api.html#text) can load text files locally.\n\nThe plugin API's `load.fromText()` is **not supported** in amdefine, so this means\ntranspiler plugins like the [CoffeeScript loader plugin](https://github.com/jrburke/require-cs)\nwill not work. This may be fixable, but it is a bit complex, and I do not have\nenough node-fu to figure it out yet. See the source for amdefine.js if you want\nto get an idea of the issues involved.\n\n## Tests\n\nTo run the tests, cd to **tests** and run:\n\n```\nnode all.js\n```\n\n## License\n\nNew BSD and MIT. Check the LICENSE file for all the details.\n",
|
||||
"_id": "amdefine@0.0.4",
|
||||
"dist": {
|
||||
"shasum": "f4148643494a8ba95fbc3af06b6af48346b9aedb"
|
||||
},
|
||||
"_from": "amdefine@>=0.0.4"
|
||||
}
|
File diff suppressed because one or more lines are too long
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env node
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
var assert = require('assert');
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var util = require('./source-map/util');
|
||||
|
||||
function run(tests) {
|
||||
var failures = [];
|
||||
var total = 0;
|
||||
var passed = 0;
|
||||
|
||||
for (var i = 0; i < tests.length; i++) {
|
||||
for (var k in tests[i].testCase) {
|
||||
if (/^test/.test(k)) {
|
||||
total++;
|
||||
try {
|
||||
tests[i].testCase[k](assert, util);
|
||||
passed++;
|
||||
process.stdout.write('.');
|
||||
}
|
||||
catch (e) {
|
||||
failures.push({
|
||||
name: tests[i].name + ': ' + k,
|
||||
error: e
|
||||
});
|
||||
process.stdout.write('E');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
process.stdout.write('\n');
|
||||
console.log(passed + ' / ' + total + ' tests passed.');
|
||||
|
||||
failures.forEach(function (f) {
|
||||
console.log('================================================================================');
|
||||
console.log(f.name);
|
||||
console.log('--------------------------------------------------------------------------------');
|
||||
console.log(f.error.stack);
|
||||
});
|
||||
|
||||
return failures.length;
|
||||
}
|
||||
|
||||
var code;
|
||||
|
||||
process.stdout.on('close', function () {
|
||||
process.exit(code);
|
||||
});
|
||||
|
||||
function isTestFile(f) {
|
||||
return /^test\-.*?\.js/.test(f);
|
||||
}
|
||||
|
||||
function toModule(f) {
|
||||
return './source-map/' + f.replace(/\.js$/, '');
|
||||
}
|
||||
|
||||
var requires = fs.readdirSync(path.join(__dirname, 'source-map')).filter(isTestFile).map(toModule);
|
||||
|
||||
code = run(requires.map(require).map(function (mod, i) {
|
||||
return {
|
||||
name: requires[i],
|
||||
testCase: mod
|
||||
};
|
||||
}));
|
@@ -0,0 +1,26 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2012 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var sourceMap;
|
||||
try {
|
||||
sourceMap = require('../../lib/source-map');
|
||||
} catch (e) {
|
||||
sourceMap = {};
|
||||
Components.utils.import('resource:///modules/devtools/SourceMap.jsm', sourceMap);
|
||||
}
|
||||
|
||||
exports['test that the api is properly exposed in the top level'] = function (assert, util) {
|
||||
assert.equal(typeof sourceMap.SourceMapGenerator, "function");
|
||||
assert.equal(typeof sourceMap.SourceMapConsumer, "function");
|
||||
assert.equal(typeof sourceMap.SourceNode, "function");
|
||||
};
|
||||
|
||||
});
|
@@ -0,0 +1,71 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var ArraySet = require('../../lib/source-map/array-set').ArraySet;
|
||||
|
||||
function makeTestSet() {
|
||||
var set = new ArraySet();
|
||||
for (var i = 0; i < 100; i++) {
|
||||
set.add(String(i));
|
||||
}
|
||||
return set;
|
||||
}
|
||||
|
||||
exports['test .has() membership'] = function (assert, util) {
|
||||
var set = makeTestSet();
|
||||
for (var i = 0; i < 100; i++) {
|
||||
assert.ok(set.has(String(i)));
|
||||
}
|
||||
};
|
||||
|
||||
exports['test .indexOf() elements'] = function (assert, util) {
|
||||
var set = makeTestSet();
|
||||
for (var i = 0; i < 100; i++) {
|
||||
assert.strictEqual(set.indexOf(String(i)), i);
|
||||
}
|
||||
};
|
||||
|
||||
exports['test .at() indexing'] = function (assert, util) {
|
||||
var set = makeTestSet();
|
||||
for (var i = 0; i < 100; i++) {
|
||||
assert.strictEqual(set.at(i), String(i));
|
||||
}
|
||||
};
|
||||
|
||||
exports['test creating from an array'] = function (assert, util) {
|
||||
var set = ArraySet.fromArray(['foo', 'bar', 'baz', 'quux', 'hasOwnProperty']);
|
||||
|
||||
assert.ok(set.has('foo'));
|
||||
assert.ok(set.has('bar'));
|
||||
assert.ok(set.has('baz'));
|
||||
assert.ok(set.has('quux'));
|
||||
assert.ok(set.has('hasOwnProperty'));
|
||||
|
||||
assert.strictEqual(set.indexOf('foo'), 0);
|
||||
assert.strictEqual(set.indexOf('bar'), 1);
|
||||
assert.strictEqual(set.indexOf('baz'), 2);
|
||||
assert.strictEqual(set.indexOf('quux'), 3);
|
||||
|
||||
assert.strictEqual(set.at(0), 'foo');
|
||||
assert.strictEqual(set.at(1), 'bar');
|
||||
assert.strictEqual(set.at(2), 'baz');
|
||||
assert.strictEqual(set.at(3), 'quux');
|
||||
};
|
||||
|
||||
exports['test that you can add __proto__; see github issue #30'] = function (assert, util) {
|
||||
var set = new ArraySet();
|
||||
set.add('__proto__');
|
||||
assert.ok(set.has('__proto__'));
|
||||
assert.strictEqual(set.at(0), '__proto__');
|
||||
assert.strictEqual(set.indexOf('__proto__'), 0);
|
||||
};
|
||||
|
||||
});
|
@@ -0,0 +1,24 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var base64VLQ = require('../../lib/source-map/base64-vlq');
|
||||
|
||||
exports['test normal encoding and decoding'] = function (assert, util) {
|
||||
var result;
|
||||
for (var i = -255; i < 256; i++) {
|
||||
result = base64VLQ.decode(base64VLQ.encode(i));
|
||||
assert.ok(result);
|
||||
assert.equal(result.value, i);
|
||||
assert.equal(result.rest, "");
|
||||
}
|
||||
};
|
||||
|
||||
});
|
@@ -0,0 +1,35 @@
|
||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
||||
/*
|
||||
* Copyright 2011 Mozilla Foundation and contributors
|
||||
* Licensed under the New BSD license. See LICENSE or:
|
||||
* http://opensource.org/licenses/BSD-3-Clause
|
||||
*/
|
||||
if (typeof define !== 'function') {
|
||||
var define = require('amdefine')(module);
|
||||
}
|
||||
define(function (require, exports, module) {
|
||||
|
||||
var base64 = require('../../lib/source-map/base64');
|
||||
|
||||
exports['test out of range encoding'] = function (assert, util) {
|
||||
assert.throws(function () {
|
||||
base64.encode(-1);
|
||||
});
|
||||
assert.throws(function () {
|
||||
base64.encode(64);
|
||||
});
|
||||
};
|
||||
|
||||
exports['test out of range decoding'] = function (assert, util) {
|
||||
assert.throws(function () {
|
||||
base64.decode('=');
|
||||
});
|
||||
};
|
||||
|
||||
exports['test normal encoding and decoding'] = function (assert, util) {
|
||||
for (var i = 0; i < 64; i++) {
|
||||
assert.equal(base64.decode(base64.encode(i)), i);
|
||||
}
|
||||
};
|
||||
|
||||
});
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user