all da files

This commit is contained in:
jllord
2013-05-27 13:45:59 -07:00
commit 59d3d30afa
6704 changed files with 1954956 additions and 0 deletions

1
node_modules/browserify/.npmignore generated vendored Normal file
View File

@@ -0,0 +1 @@
**/bundle.js

4
node_modules/browserify/.travis.yml generated vendored Normal file
View File

@@ -0,0 +1,4 @@
language: node_js
node_js:
- 0.8
- 0.9

63
node_modules/browserify/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,63 @@
Some pieces from builtins/ taken from node core under this license:
----
Copyright Joyent, Inc. and other Node contributors.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to permit
persons to whom the Software is furnished to do so, subject to the
following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
USE OR OTHER DEALINGS IN THE SOFTWARE.
----
buffer_ieee754.js has this license in it:
----
Copyright (c) 2008, Fair Oaks Labs, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Fair Oaks Labs, Inc. nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
Modifications to writeIEEE754 to support negative zeroes made by Brian White
----

0
node_modules/browserify/_empty.js generated vendored Normal file
View File

113
node_modules/browserify/bin/cmd.js generated vendored Executable file
View File

@@ -0,0 +1,113 @@
#!/usr/bin/env node
var browserify = require('../');
var fs = require('fs');
var path = require('path');
var JSONStream = require('JSONStream');
var spawn = require('child_process').spawn;
var parseShell = require('shell-quote').parse;
var duplexer = require('duplexer');
var argv = require('optimist')
.boolean(['deps','pack','ig','dg', 'im', 'd'])
.alias('insert-globals', 'ig')
.alias('detect-globals', 'dg')
.alias('ignore-missing', 'im')
.alias('debug', 'd')
.alias('ig', 'fast')
.default('ig', false)
.default('im', false)
.default('dg', true)
.default('d', false)
.argv
;
if (argv.h || argv.help || process.argv.length <= 2) {
return fs.createReadStream(__dirname + '/usage.txt')
.pipe(process.stdout)
.on('close', function () { process.exit(1) })
;
}
if (argv.v || argv.verbose) {
return console.log(require('../package.json').version);
}
var entries = argv._.concat(argv.e).filter(Boolean).map(function(entry) {
return path.resolve(process.cwd(), entry);
});
var b = browserify(entries);
b.on('error', function (err) {
console.error(err);
process.exit(1);
});
[].concat(argv.i).concat(argv.ignore).filter(Boolean)
.forEach(function (i) { b.ignore(i) })
;
[].concat(argv.r).concat(argv.require).filter(Boolean)
.forEach(function (r) { b.require(r, { expose: r }) })
;
// resolve any external files and add them to the bundle as externals
[].concat(argv.x).concat(argv.external).filter(Boolean)
.forEach(function (x) { b.external(path.resolve(process.cwd(), x)) })
;
[].concat(argv.t).concat(argv.transform).filter(Boolean)
.forEach(function (t) { b.transform(t) })
;
[].concat(argv.c).concat(argv.command).filter(Boolean)
.forEach(function (c) {
var cmd = parseShell(c);
b.transform(function (file) {
var env = Object.keys(process.env).reduce(function (acc, key) {
acc[key] = process.env[key];
return acc;
}, {});
env.FILENAME = file;
var ps = spawn(cmd[0], cmd.slice(1), { env: env });
var error = '';
ps.stderr.on('data', function (buf) { error += buf });
ps.on('exit', function (code) {
if (code === 0) return;
console.error([
'error running source transform command: ' + c,
error.split('\n').join('\n '),
''
].join('\n'));
process.exit(1);
});
return duplexer(ps.stdin, ps.stdout);
});
})
;
if (argv.pack) {
process.stdin.pipe(b.pack()).pipe(process.stdout);
process.stdin.resume();
return;
}
if (argv.deps) {
var stringify = JSONStream.stringify();
b.deps().pipe(stringify).pipe(process.stdout);
return;
}
var bundle = b.bundle({
detectGlobals: argv['detect-globals'] !== false && argv.dg !== false,
insertGlobals: argv['insert-globals'] || argv.ig,
ignoreMissing: argv['ignore-missing'] || argv.im,
debug: argv['debug'] || argv.d
});
var outfile = argv.o || argv.outfile;
if (outfile) {
bundle.pipe(fs.createWriteStream(outfile));
}
else {
bundle.pipe(process.stdout);
}

49
node_modules/browserify/bin/usage.txt generated vendored Normal file
View File

@@ -0,0 +1,49 @@
Usage: browserify [entry files] {OPTIONS}
Standard Options:
--outfile, -o Write the browserify bundle to this file.
If unspecified, browserify prints to stdout.
--require, -r A module name or file to bundle.require()
Optionally use a colon separator to set the target.
--entry, -e An entry point of your app
--ignore, -i Omit a file from the output bundle.
--external, -x Reference a file from another bundle.
--transform, -t Use a transform module on top-level files.
--command, -c Use a transform command on top-level files.
--help, -h Show this message
Advanced Options:
--insert-globals, --ig, --fast [default: false]
Skip detection and always insert definitions for process, global,
__filename, and __dirname.
benefit: faster builds
cost: extra bytes
--detect-globals, --dg [default: true]
Detect the presence of process, global, __filename, and __dirname and define
these values when present.
benefit: npm modules more likely to work
cost: slower builds
--ignore-missing, --im [default: false]
Ignore `require()` statements that don't resolve to anything.
--debug -d [default: false]
Enable source maps that allow you to debug your files separately.
Specify a parameter.

View File

@@ -0,0 +1,2 @@
var robot = require('./robot');
console.log(robot('beep'));

View File

@@ -0,0 +1,2 @@
var robot = require('./robot');
console.log(robot('boop'));

4
node_modules/browserify/example/multiple_bundles/build.sh generated vendored Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
browserify -r ./robot.js > static/common.js
browserify -i ./robot.js beep.js > static/beep.js
browserify -i ./robot.js boop.js > static/boop.js

View File

@@ -0,0 +1 @@
module.exports = function (s) { return s.toUpperCase() + '!' };

View File

@@ -0,0 +1,2 @@
<script src="common.js"></script>
<script src="beep.js"></script>

View File

@@ -0,0 +1,6 @@
;(function(p,c,e){function r(n){if(!c[n]){if(!p[n])return;c[n]={exports:{}};p[n][0](function(x){return r(p[n][1][x])},c[n],c[n].exports);}return c[n].exports}for(var i=0;i<e.length;i++)r(e[i]);return r})({0:[function(require,module,exports){var robot = require('./robot');
console.log(robot('beep'));
},{"./robot":1}],1:[function(require,module,exports){module.exports = function (s) { return s.toUpperCase() + '!' };
},{}]},{},[0]);

View File

@@ -0,0 +1,2 @@
<script src="common.js"></script>
<script src="boop.js"></script>

View File

@@ -0,0 +1,6 @@
;(function(p,c,e){function r(n){if(!c[n]){if(!p[n])return;c[n]={exports:{}};p[n][0](function(x){return r(p[n][1][x])},c[n],c[n].exports);}return c[n].exports}for(var i=0;i<e.length;i++)r(e[i]);return r})({0:[function(require,module,exports){var robot = require('./robot');
console.log(robot('boop'));
},{"./robot":1}],1:[function(require,module,exports){module.exports = function (s) { return s.toUpperCase() + '!' };
},{}]},{},[0]);

View File

@@ -0,0 +1,3 @@
require=(function(o,r){return function(n){var x=r(n);if(x!==undefined)return x;if(o)return o(n);throw new Error("Cannot find module '"+n+"'")}})(typeof require!=="undefined"&&require,(function(p,c,e){function r(n){if(!c[n]){if(!p[n])return;c[n]={exports:{}};p[n][0](function(x){return r(p[n][1][x])},c[n],c[n].exports);}return c[n].exports}for(var i=0;i<e.length;i++)r(e[i]);return r})({"./robot.js":[function(require,module,exports){module.exports = function (s) { return s.toUpperCase() + '!' };
},{}]},{},[]));

10
node_modules/browserify/example/source_maps/build.js generated vendored Normal file
View File

@@ -0,0 +1,10 @@
var browserify = require('../..'),
path = require('path'),
fs = require('fs'),
bundlePath = path.join(__dirname, 'js', 'build', 'bundle.js');
browserify()
.require(require.resolve('./js/main.js'), { entry: true })
.bundle({ debug: true })
.on('error', function (err) { console.error(err); })
.pipe(fs.createWriteStream(bundlePath));

4
node_modules/browserify/example/source_maps/build.sh generated vendored Executable file
View File

@@ -0,0 +1,4 @@
#!/bin/bash
../../bin/cmd.js --debug -e ./js/main.js > js/build/bundle.js
echo bundle was generated with source maps, you can now open index.html

11
node_modules/browserify/example/source_maps/index.html generated vendored Normal file
View File

@@ -0,0 +1,11 @@
<!DOCTYPE html>
<html>
<head>
<meta charset=utf-8 />
<title></title>
<script type="text/javascript" src="./js/build/bundle.js"></script>
</head>
<body>
<p>Open your dev tools ;)</p>
</body>
</html>

View File

@@ -0,0 +1 @@
!.gitignore

View File

@@ -0,0 +1,7 @@
console.log('foo line 1');
var bar = require('./wunder/bar');
module.exports = function foo() {
console.log('hello from foo line 5');
bar();
};

View File

@@ -0,0 +1,4 @@
console.log('main line 1');
var foo = require('./foo.js');
foo();

View File

@@ -0,0 +1,8 @@
console.log('bar line 1');
'use strict';
// this is a meaningless comment to add some lines
module.exports = function bar() {
console.log('hello from bar line 7');
};

288
node_modules/browserify/index.js generated vendored Normal file
View File

@@ -0,0 +1,288 @@
var crypto = require('crypto');
var through = require('through');
var duplexer = require('duplexer');
var concatStream = require('concat-stream');
var checkSyntax = require('syntax-error');
var mdeps = require('module-deps');
var browserPack = require('browser-pack');
var browserResolve = require('browser-resolve');
var insertGlobals = require('insert-module-globals');
var path = require('path');
var inherits = require('inherits');
var EventEmitter = require('events').EventEmitter;
module.exports = function (files) {
return new Browserify(files);
};
function hash(what) {
return crypto.createHash('md5').update(what).digest('base64').slice(0, 6);
};
inherits(Browserify, EventEmitter);
function Browserify (files) {
this.files = [];
this.exports = {};
this._pending = 0;
this._entries = [];
this._ignore = {};
this._external = {};
this._expose = {};
this._mapped = {};
this._transforms = [];
[].concat(files).filter(Boolean).forEach(this.add.bind(this));
}
Browserify.prototype.add = function (file) {
this.require(file, { entry: true });
return this;
};
Browserify.prototype.require = function (id, opts) {
var self = this;
if (opts === undefined) opts = { expose: id };
self._pending ++;
var basedir = opts.basedir || process.cwd();
var fromfile = basedir + '/_fake.js';
var params = { filename: fromfile, packageFilter: packageFilter };
browserResolve(id, params, function (err, file) {
if (err) return self.emit('error', err);
if (opts.expose) {
self.exports[file] = hash(file);
if (typeof opts.expose === 'string') {
self._expose[file] = opts.expose;
self._mapped[opts.expose] = file;
}
}
if (opts.external) {
self._external[file] = true;
}
else {
self.files.push(file);
}
if (opts.entry) self._entries.push(file);
if (--self._pending === 0) self.emit('_ready');
});
return self;
};
// DEPRECATED
Browserify.prototype.expose = function (name, file) {
this.exports[file] = name;
this.files.push(file);
};
Browserify.prototype.external = function (id, opts) {
opts = opts || {};
opts.external = true;
return this.require(id, opts);
};
Browserify.prototype.ignore = function (file) {
this._ignore[file] = true;
return this;
};
Browserify.prototype.bundle = function (opts, cb) {
var self = this;
if (typeof opts === 'function') {
cb = opts;
opts = {};
}
if (!opts) opts = {};
if (opts.insertGlobals === undefined) opts.insertGlobals = false;
if (opts.detectGlobals === undefined) opts.detectGlobals = true;
if (opts.ignoreMissing === undefined) opts.ignoreMissing = false;
opts.resolve = self._resolve.bind(self);
opts.transform = self._transforms;
opts.transformKey = [ 'browserify', 'transform' ];
var parentFilter = opts.packageFilter;
opts.packageFilter = function (pkg) {
if (parentFilter) pkg = parentFilter(pkg);
return packageFilter(pkg);
};
if (self._pending) {
var tr = through();
self.on('_ready', function () {
var b = self.bundle(opts, cb);
if (!cb) b.on('error', tr.emit.bind(tr, 'error'));
b.pipe(tr);
});
return tr;
}
var d = self.deps(opts);
var g = opts.detectGlobals || opts.insertGlobals
? insertGlobals(self.files, {
resolve: self._resolve.bind(self),
always: opts.insertGlobals
})
: through()
;
var p = self.pack(opts.debug);
if (cb) {
p.on('error', cb);
p.pipe(concatStream(cb));
}
d.on('error', p.emit.bind(p, 'error'));
g.on('error', p.emit.bind(p, 'error'));
d.pipe(g).pipe(p);
return p;
};
Browserify.prototype.transform = function (t) {
if (typeof t === 'string' && /^\./.test(t)) {
t = path.resolve(t);
}
this._transforms.push(t);
return this;
};
Browserify.prototype.deps = function (opts) {
var self = this;
var d = mdeps(self.files, opts);
var tr = d.pipe(through(write));
d.on('error', tr.emit.bind(tr, 'error'));
return tr;
function write (row) {
if (row.id === emptyModulePath) {
row.source = '';
}
if (self._expose[row.id]) {
this.queue({
exposed: self._expose[row.id],
deps: {},
source: 'module.exports=require(\'' + hash(row.id) + '\');'
});
}
if (self.exports[row.id]) row.exposed = self.exports[row.id];
// skip adding this file if it is external
if (self._external[row.id]) {
return;
}
if (/\.json$/.test(row.id)) {
row.source = 'module.exports=' + row.source;
}
var ix = self._entries.indexOf(row.id);
row.entry = ix >= 0;
if (ix >= 0) row.order = ix;
this.queue(row);
}
};
Browserify.prototype.pack = function (debug) {
var self = this;
var packer = browserPack({ raw: true });
var ids = {};
var idIndex = 1;
var input = through(function (row) {
var ix;
if (debug) {
row.sourceRoot = 'file://localhost';
row.sourceFile = row.id;
}
if (row.exposed) {
ix = row.exposed;
}
else {
ix = ids[row.id] !== undefined ? ids[row.id] : idIndex++;
}
if (ids[row.id] === undefined) ids[row.id] = ix;
if (/^#!/.test(row.source)) row.source = '//' + row.source;
var err = checkSyntax(row.source, row.id);
if (err) self.emit('error', err);
row.id = ix;
row.deps = Object.keys(row.deps).reduce(function (acc, key) {
var file = row.deps[key];
// reference external and exposed files directly by hash
if (self._external[file] || self._expose[file]) {
acc[key] = hash(file);
return acc;
}
if (ids[file] === undefined) ids[file] = idIndex++;
acc[key] = ids[file];
return acc;
}, {});
this.queue(row);
});
var first = true;
var hasExports = Object.keys(self.exports).length;
var output = through(write, end);
function writePrelude () {
if (!first) return;
if (!hasExports) return output.queue(';');
output.queue('require=');
}
input.pipe(packer);
packer.pipe(output);
return duplexer(input, output);
function write (buf) {
if (first) writePrelude();
first = false;
this.queue(buf);
}
function end () {
if (first) writePrelude();
this.queue('\n;');
this.queue(null);
}
};
var packageFilter = function (info) {
if (typeof info.browserify === 'string' && !info.browser) {
info.browser = info.browserify;
}
return info;
};
var emptyModulePath = require.resolve('./_empty');
Browserify.prototype._resolve = function (id, parent, cb) {
var self = this;
if (self._mapped[id]) return cb(null, self._mapped[id]);
return browserResolve(id, parent, function(err, file) {
if (err) return cb(err);
if (self._ignore[file]) return cb(null, emptyModulePath);
if (self._external[file]) return cb(null, file, true);
cb(err, file);
});
};

1
node_modules/browserify/node_modules/.bin/browser-pack generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../browser-pack/cmd.js

View File

@@ -0,0 +1 @@
../insert-module-globals/bin/cmd.js

1
node_modules/browserify/node_modules/.bin/module-deps generated vendored Symbolic link
View File

@@ -0,0 +1 @@
../module-deps/cmd.js

View File

@@ -0,0 +1,2 @@
node_modules/*
node_modules

View File

@@ -0,0 +1,4 @@
language: node_js
node_js:
- 0.8

View File

@@ -0,0 +1,15 @@
Apache License, Version 2.0
Copyright (c) 2011 Dominic Tarr
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -0,0 +1,24 @@
The MIT License
Copyright (c) 2011 Dominic Tarr
Permission is hereby granted, free of charge,
to any person obtaining a copy of this software and
associated documentation files (the "Software"), to
deal in the Software without restriction, including
without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom
the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,13 @@
var request = require('request')
, JSONStream = require('JSONStream')
, es = require('event-stream')
var parser = JSONStream.parse(['rows', true]) //emit parts that match this path (any element of the rows array)
, req = request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
, logger = es.mapSync(function (data) { //create a stream that logs to stderr,
console.error(data)
return data
})
req.pipe(parser)
parser.pipe(logger)

View File

@@ -0,0 +1,191 @@
var Parser = require('jsonparse')
, Stream = require('stream').Stream
/*
the value of this.stack that creationix's jsonparse has is weird.
it makes this code ugly, but his problem is way harder that mine,
so i'll forgive him.
*/
exports.parse = function (path) {
var stream = new Stream()
var parser = new Parser()
var count = 0
if(!path || !path.length)
path = null
parser.onValue = function () {
if(!this.root && this.stack.length == 1){
stream.root = this.value
}
if(!path || this.stack.length !== path.length)
return
var _path = []
for( var i = 0; i < (path.length - 1); i++) {
var key = path[i]
var c = this.stack[1 + (+i)]
if(!c) {
return
}
var m = check(key, c.key)
_path.push(c.key)
if(!m)
return
}
var c = this
var key = path[path.length - 1]
var m = check(key, c.key)
if(!m)
return
_path.push(c.key)
count ++
stream.emit('data', this.value[this.key])
for(var i in this.stack)
this.stack[i].value = {}
}
parser._onToken = parser.onToken;
parser.onToken = function (token, value) {
parser._onToken(token, value);
if (this.stack.length === 0) {
if (stream.root) {
if(!path)
stream.emit('data', stream.root)
stream.emit('root', stream.root, count)
count = 0;
stream.root = null;
}
}
}
parser.onError = function (err) {
stream.emit('error', err)
}
stream.readable = true
stream.writable = true
stream.write = function (chunk) {
if('string' === typeof chunk) {
if ('undefined' === typeof Buffer) {
var buf = new Array(chunk.length)
for (var i = 0; i < chunk.length; i++) buf[i] = chunk.charCodeAt(i)
chunk = new Int32Array(buf)
} else {
chunk = new Buffer(chunk)
}
}
parser.write(chunk)
}
stream.end = function (data) {
if(data)
stream.write(data)
stream.emit('end')
}
stream.destroy = function () {
stream.emit('close');
}
return stream
}
function check (x, y) {
if ('string' === typeof x)
return y == x
else if (x && 'function' === typeof x.exec)
return x.exec(y)
else if ('boolean' === typeof x)
return x
else if ('function' === typeof x)
return x(y)
return false
}
exports.stringify = function (op, sep, cl) {
if (op === false){
op = ''
sep = '\n'
cl = ''
} else if (op == null) {
op = '[\n'
sep = '\n,\n'
cl = '\n]\n'
}
//else, what ever you like
var stream = new Stream ()
, first = true
, ended = false
, anyData = false
stream.write = function (data) {
anyData = true
var json = JSON.stringify(data)
if(first) { first = false ; stream.emit('data', op + json)}
else stream.emit('data', sep + json)
}
stream.end = function (data) {
if(ended)
return
ended = true
if(data) stream.write(data)
if(!anyData) stream.emit('data', op)
stream.emit('data', cl)
stream.emit('end')
}
stream.writable = true
stream.readable = true
return stream
}
exports.stringifyObject = function (op, sep, cl) {
if (op === false){
op = ''
sep = '\n'
cl = ''
} else if (op == null) {
op = '{\n'
sep = '\n,\n'
cl = '\n}\n'
}
//else, what ever you like
var stream = new Stream ()
, first = true
, ended = false
, anyData = false
stream.write = function (data) {
anyData = true
var json = JSON.stringify(data[0]) + ':' + JSON.stringify(data[1])
if(first) { first = false ; stream.emit('data', op + json)}
else stream.emit('data', sep + json)
}
stream.end = function (data) {
if(ended) return
ended = true
if(data) stream.write(data)
if(!anyData) stream.emit('data', op)
stream.emit('data', cl)
stream.emit('end')
}
stream.writable = true
stream.readable = true
return stream
}

View File

@@ -0,0 +1,24 @@
The MIT License
Copyright (c) 2012 Tim Caswell
Permission is hereby granted, free of charge,
to any person obtaining a copy of this software and
associated documentation files (the "Software"), to
deal in the Software without restriction, including
without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom
the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,11 @@
This is a streaming JSON parser. For a simpler, sax-based version see this gist: https://gist.github.com/1821394
The MIT License (MIT)
Copyright (c) 2011-2012 Tim Caswell
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,26 @@
var fs = require('fs'),
Parser = require('./jsonparse');
var json = fs.readFileSync("samplejson/basic.json");
while (true) {
var start = Date.now();
for (var i = 0; i < 1000; i++) {
JSON.parse(json);
}
var first = Date.now() - start;
start = Date.now();
var p = new Parser();
for (var i = 0; i < 1000; i++) {
p.write(json);
}
var second = Date.now() - start;
console.log("JSON.parse took %s", first);
console.log("streaming parser took %s", second);
console.log("streaming is %s times slower", second / first);
}

View File

@@ -0,0 +1,30 @@
var Parser = require('../jsonparse');
var Http = require('http');
require('./colors');
var p = new Parser();
var cred = require('./credentials');
var client = Http.createClient(80, "stream.twitter.com");
var request = client.request("GET", "/1/statuses/sample.json", {
"Host": "stream.twitter.com",
"Authorization": (new Buffer(cred.username + ":" + cred.password)).toString("base64")
});
request.on('response', function (response) {
console.log(response.statusCode);
console.dir(response.headers);
response.on('data', function (chunk) {
p.write(chunk);
});
response.on('end', function () {
console.log("END");
});
});
request.end();
var text = "", name = "";
p.onValue = function (value) {
if (this.stack.length === 1 && this.key === 'text') { text = value; }
if (this.stack.length === 2 && this.key === 'name' && this.stack[1].key === 'user') { name = value; }
if (this.stack.length === 0) {
console.log(text.blue + " - " + name.yellow);
text = name = "";
}
};

View File

@@ -0,0 +1,401 @@
/*global Buffer*/
// Named constants with unique integer values
var C = {};
// Tokens
var LEFT_BRACE = C.LEFT_BRACE = 0x1;
var RIGHT_BRACE = C.RIGHT_BRACE = 0x2;
var LEFT_BRACKET = C.LEFT_BRACKET = 0x3;
var RIGHT_BRACKET = C.RIGHT_BRACKET = 0x4;
var COLON = C.COLON = 0x5;
var COMMA = C.COMMA = 0x6;
var TRUE = C.TRUE = 0x7;
var FALSE = C.FALSE = 0x8;
var NULL = C.NULL = 0x9;
var STRING = C.STRING = 0xa;
var NUMBER = C.NUMBER = 0xb;
// Tokenizer States
var START = C.START = 0x11;
var TRUE1 = C.TRUE1 = 0x21;
var TRUE2 = C.TRUE2 = 0x22;
var TRUE3 = C.TRUE3 = 0x23;
var FALSE1 = C.FALSE1 = 0x31;
var FALSE2 = C.FALSE2 = 0x32;
var FALSE3 = C.FALSE3 = 0x33;
var FALSE4 = C.FALSE4 = 0x34;
var NULL1 = C.NULL1 = 0x41;
var NULL2 = C.NULL3 = 0x42;
var NULL3 = C.NULL2 = 0x43;
var NUMBER1 = C.NUMBER1 = 0x51;
var NUMBER2 = C.NUMBER2 = 0x52;
var NUMBER3 = C.NUMBER3 = 0x53;
var NUMBER4 = C.NUMBER4 = 0x54;
var NUMBER5 = C.NUMBER5 = 0x55;
var NUMBER6 = C.NUMBER6 = 0x56;
var NUMBER7 = C.NUMBER7 = 0x57;
var NUMBER8 = C.NUMBER8 = 0x58;
var STRING1 = C.STRING1 = 0x61;
var STRING2 = C.STRING2 = 0x62;
var STRING3 = C.STRING3 = 0x63;
var STRING4 = C.STRING4 = 0x64;
var STRING5 = C.STRING5 = 0x65;
var STRING6 = C.STRING6 = 0x66;
// Parser States
var VALUE = C.VALUE = 0x71;
var KEY = C.KEY = 0x72;
// Parser Modes
var OBJECT = C.OBJECT = 0x81;
var ARRAY = C.ARRAY = 0x82;
// Slow code to string converter (only used when throwing syntax errors)
function toknam(code) {
var keys = Object.keys(C);
for (var i = 0, l = keys.length; i < l; i++) {
var key = keys[i];
if (C[key] === code) { return key; }
}
return code && ("0x" + code.toString(16));
}
function Parser() {
this.tState = START;
this.value = undefined;
this.string = undefined; // string data
this.unicode = undefined; // unicode escapes
// For number parsing
this.negative = undefined;
this.magnatude = undefined;
this.position = undefined;
this.exponent = undefined;
this.negativeExponent = undefined;
this.key = undefined;
this.mode = undefined;
this.stack = [];
this.state = VALUE;
this.bytes_remaining = 0; // number of bytes remaining in multi byte utf8 char to read after split boundary
this.bytes_in_sequence = 0; // bytes in multi byte utf8 char to read
this.temp_buffs = { "2": new Buffer(2), "3": new Buffer(3), "4": new Buffer(4) }; // for rebuilding chars split before boundary is reached
}
var proto = Parser.prototype;
proto.charError = function (buffer, i) {
this.onError(new Error("Unexpected " + JSON.stringify(String.fromCharCode(buffer[i])) + " at position " + i + " in state " + toknam(this.tState)));
};
proto.onError = function (err) { throw err; };
proto.write = function (buffer) {
if (typeof buffer === "string") buffer = new Buffer(buffer);
//process.stdout.write("Input: ");
//console.dir(buffer.toString());
var n;
for (var i = 0, l = buffer.length; i < l; i++) {
if (this.tState === START){
n = buffer[i];
if(n === 0x7b){ this.onToken(LEFT_BRACE, "{"); // {
}else if(n === 0x7d){ this.onToken(RIGHT_BRACE, "}"); // }
}else if(n === 0x5b){ this.onToken(LEFT_BRACKET, "["); // [
}else if(n === 0x5d){ this.onToken(RIGHT_BRACKET, "]"); // ]
}else if(n === 0x3a){ this.onToken(COLON, ":"); // :
}else if(n === 0x2c){ this.onToken(COMMA, ","); // ,
}else if(n === 0x74){ this.tState = TRUE1; // t
}else if(n === 0x66){ this.tState = FALSE1; // f
}else if(n === 0x6e){ this.tState = NULL1; // n
}else if(n === 0x22){ this.string = ""; this.tState = STRING1; // "
}else if(n === 0x2d){ this.negative = true; this.tState = NUMBER1; // -
}else if(n === 0x30){ this.magnatude = 0; this.tState = NUMBER2; // 0
}else{
if (n > 0x30 && n < 0x40) { // 1-9
this.magnatude = n - 0x30; this.tState = NUMBER3;
} else if (n === 0x20 || n === 0x09 || n === 0x0a || n === 0x0d) {
// whitespace
} else { this.charError(buffer, i); }
}
}else if (this.tState === STRING1){ // After open quote
n = buffer[i]; // get current byte from buffer
// check for carry over of a multi byte char split between data chunks
// & fill temp buffer it with start of this data chunk up to the boundary limit set in the last iteration
if (this.bytes_remaining > 0) {
for (var j = 0; j < this.bytes_remaining; j++) {
this.temp_buffs[this.bytes_in_sequence][this.bytes_in_sequence - this.bytes_remaining + j] = buffer[j];
}
this.string += this.temp_buffs[this.bytes_in_sequence].toString();
this.bytes_in_sequence = this.bytes_remaining = 0;
i = i + j - 1;
} else if (this.bytes_remaining === 0 && n >= 128) { // else if no remainder bytes carried over, parse multi byte (>=128) chars one at a time
if ((n >= 194) && (n <= 223)) this.bytes_in_sequence = 2;
if ((n >= 224) && (n <= 239)) this.bytes_in_sequence = 3;
if ((n >= 240) && (n <= 244)) this.bytes_in_sequence = 4;
if ((this.bytes_in_sequence + i) > buffer.length) { // if bytes needed to complete char fall outside buffer length, we have a boundary split
for (var k = 0; k <= (buffer.length - 1 - i); k++) {
this.temp_buffs[this.bytes_in_sequence][k] = buffer[i + k]; // fill temp buffer of correct size with bytes available in this chunk
}
this.bytes_remaining = (i + this.bytes_in_sequence) - buffer.length;
i = buffer.length - 1;
} else {
this.string += buffer.slice(i, (i + this.bytes_in_sequence)).toString();
i = i + this.bytes_in_sequence - 1;
}
} else if (n === 0x22) { this.tState = START; this.onToken(STRING, this.string); this.string = undefined; }
else if (n === 0x5c) { this.tState = STRING2; }
else if (n >= 0x20) { this.string += String.fromCharCode(n); }
else { this.charError(buffer, i); }
}else if (this.tState === STRING2){ // After backslash
n = buffer[i];
if(n === 0x22){ this.string += "\""; this.tState = STRING1;
}else if(n === 0x5c){ this.string += "\\"; this.tState = STRING1;
}else if(n === 0x2f){ this.string += "\/"; this.tState = STRING1;
}else if(n === 0x62){ this.string += "\b"; this.tState = STRING1;
}else if(n === 0x66){ this.string += "\f"; this.tState = STRING1;
}else if(n === 0x6e){ this.string += "\n"; this.tState = STRING1;
}else if(n === 0x72){ this.string += "\r"; this.tState = STRING1;
}else if(n === 0x74){ this.string += "\t"; this.tState = STRING1;
}else if(n === 0x75){ this.unicode = ""; this.tState = STRING3;
}else{
this.charError(buffer, i);
}
}else if (this.tState === STRING3 || this.tState === STRING4 || this.tState === STRING5 || this.tState === STRING6){ // unicode hex codes
n = buffer[i];
// 0-9 A-F a-f
if ((n >= 0x30 && n < 0x40) || (n > 0x40 && n <= 0x46) || (n > 0x60 && n <= 0x66)) {
this.unicode += String.fromCharCode(n);
if (this.tState++ === STRING6) {
this.string += String.fromCharCode(parseInt(this.unicode, 16));
this.unicode = undefined;
this.tState = STRING1;
}
} else {
this.charError(buffer, i);
}
}else if (this.tState === NUMBER1){ // after minus
n = buffer[i];
if (n === 0x30) { this.magnatude = 0; this.tState = NUMBER2; }
else if (n > 0x30 && n < 0x40) { this.magnatude = n - 0x30; this.tState = NUMBER3; }
else { this.charError(buffer, i); }
}else if (this.tState === NUMBER2){ // * After initial zero
n = buffer[i];
if(n === 0x2e){ // .
this.position = 0.1; this.tState = NUMBER4;
}else if(n === 0x65 || n === 0x45){ // e/E
this.exponent = 0; this.tState = NUMBER6;
}else{
this.tState = START;
this.onToken(NUMBER, 0);
this.magnatude = undefined;
this.negative = undefined;
i--;
}
}else if (this.tState === NUMBER3){ // * After digit (before period)
n = buffer[i];
if(n === 0x2e){ // .
this.position = 0.1; this.tState = NUMBER4;
}else if(n === 0x65 || n === 0x45){ // e/E
this.exponent = 0; this.tState = NUMBER6;
}else{
if (n >= 0x30 && n < 0x40) { this.magnatude = this.magnatude * 10 + n - 0x30; }
else {
this.tState = START;
if (this.negative) {
this.magnatude = -this.magnatude;
this.negative = undefined;
}
this.onToken(NUMBER, this.magnatude);
this.magnatude = undefined;
i--;
}
}
}else if (this.tState === NUMBER4){ // After period
n = buffer[i];
if (n >= 0x30 && n < 0x40) { // 0-9
this.magnatude += this.position * (n - 0x30);
this.position /= 10;
this.tState = NUMBER5;
} else { this.charError(buffer, i); }
}else if (this.tState === NUMBER5){ // * After digit (after period)
n = buffer[i];
if (n >= 0x30 && n < 0x40) { // 0-9
this.magnatude += this.position * (n - 0x30);
this.position /= 10;
}
else if (n === 0x65 || n === 0x45) { this.exponent = 0; this.tState = NUMBER6; } // E/e
else {
this.tState = START;
if (this.negative) {
this.magnatude = -this.magnatude;
this.negative = undefined;
}
this.onToken(NUMBER, this.negative ? -this.magnatude : this.magnatude);
this.magnatude = undefined;
this.position = undefined;
i--;
}
}else if (this.tState === NUMBER6){ // After E
n = buffer[i];
if (n === 0x2b || n === 0x2d) { // +/-
if (n === 0x2d) { this.negativeExponent = true; }
this.tState = NUMBER7;
}
else if (n >= 0x30 && n < 0x40) {
this.exponent = this.exponent * 10 + (n - 0x30);
this.tState = NUMBER8;
}
else { this.charError(buffer, i); }
}else if (this.tState === NUMBER7){ // After +/-
n = buffer[i];
if (n >= 0x30 && n < 0x40) { // 0-9
this.exponent = this.exponent * 10 + (n - 0x30);
this.tState = NUMBER8;
}
else { this.charError(buffer, i); }
}else if (this.tState === NUMBER8){ // * After digit (after +/-)
n = buffer[i];
if (n >= 0x30 && n < 0x40) { // 0-9
this.exponent = this.exponent * 10 + (n - 0x30);
}
else {
if (this.negativeExponent) {
this.exponent = -this.exponent;
this.negativeExponent = undefined;
}
this.magnatude *= Math.pow(10, this.exponent);
this.exponent = undefined;
if (this.negative) {
this.magnatude = -this.magnatude;
this.negative = undefined;
}
this.tState = START;
this.onToken(NUMBER, this.magnatude);
this.magnatude = undefined;
i--;
}
}else if (this.tState === TRUE1){ // r
if (buffer[i] === 0x72) { this.tState = TRUE2; }
else { this.charError(buffer, i); }
}else if (this.tState === TRUE2){ // u
if (buffer[i] === 0x75) { this.tState = TRUE3; }
else { this.charError(buffer, i); }
}else if (this.tState === TRUE3){ // e
if (buffer[i] === 0x65) { this.tState = START; this.onToken(TRUE, true); }
else { this.charError(buffer, i); }
}else if (this.tState === FALSE1){ // a
if (buffer[i] === 0x61) { this.tState = FALSE2; }
else { this.charError(buffer, i); }
}else if (this.tState === FALSE2){ // l
if (buffer[i] === 0x6c) { this.tState = FALSE3; }
else { this.charError(buffer, i); }
}else if (this.tState === FALSE3){ // s
if (buffer[i] === 0x73) { this.tState = FALSE4; }
else { this.charError(buffer, i); }
}else if (this.tState === FALSE4){ // e
if (buffer[i] === 0x65) { this.tState = START; this.onToken(FALSE, false); }
else { this.charError(buffer, i); }
}else if (this.tState === NULL1){ // u
if (buffer[i] === 0x75) { this.tState = NULL2; }
else { this.charError(buffer, i); }
}else if (this.tState === NULL2){ // l
if (buffer[i] === 0x6c) { this.tState = NULL3; }
else { this.charError(buffer, i); }
}else if (this.tState === NULL3){ // l
if (buffer[i] === 0x6c) { this.tState = START; this.onToken(NULL, null); }
else { this.charError(buffer, i); }
}
}
};
proto.onToken = function (token, value) {
// Override this to get events
};
proto.parseError = function (token, value) {
this.onError(new Error("Unexpected " + toknam(token) + (value ? ("(" + JSON.stringify(value) + ")") : "") + " in state " + toknam(this.state)));
};
proto.onError = function (err) { throw err; };
proto.push = function () {
this.stack.push({value: this.value, key: this.key, mode: this.mode});
};
proto.pop = function () {
var value = this.value;
var parent = this.stack.pop();
this.value = parent.value;
this.key = parent.key;
this.mode = parent.mode;
this.emit(value);
if (!this.mode) { this.state = VALUE; }
};
proto.emit = function (value) {
if (this.mode) { this.state = COMMA; }
this.onValue(value);
};
proto.onValue = function (value) {
// Override me
};
proto.onToken = function (token, value) {
//console.log("OnToken: state=%s token=%s %s", toknam(this.state), toknam(token), value?JSON.stringify(value):"");
if(this.state === VALUE){
if(token === STRING || token === NUMBER || token === TRUE || token === FALSE || token === NULL){
if (this.value) {
this.value[this.key] = value;
}
this.emit(value);
}else if(token === LEFT_BRACE){
this.push();
if (this.value) {
this.value = this.value[this.key] = {};
} else {
this.value = {};
}
this.key = undefined;
this.state = KEY;
this.mode = OBJECT;
}else if(token === LEFT_BRACKET){
this.push();
if (this.value) {
this.value = this.value[this.key] = [];
} else {
this.value = [];
}
this.key = 0;
this.mode = ARRAY;
this.state = VALUE;
}else if(token === RIGHT_BRACE){
if (this.mode === OBJECT) {
this.pop();
} else {
this.parseError(token, value);
}
}else if(token === RIGHT_BRACKET){
if (this.mode === ARRAY) {
this.pop();
} else {
this.parseError(token, value);
}
}else{
this.parseError(token, value);
}
}else if(this.state === KEY){
if (token === STRING) {
this.key = value;
this.state = COLON;
} else if (token === RIGHT_BRACE) {
this.pop();
} else {
this.parseError(token, value);
}
}else if(this.state === COLON){
if (token === COLON) { this.state = VALUE; }
else { this.parseError(token, value); }
}else if(this.state === COMMA){
if (token === COMMA) {
if (this.mode === ARRAY) { this.key++; this.state = VALUE; }
else if (this.mode === OBJECT) { this.state = KEY; }
} else if (token === RIGHT_BRACKET && this.mode === ARRAY || token === RIGHT_BRACE && this.mode === OBJECT) {
this.pop();
} else {
this.parseError(token, value);
}
}else{
this.parseError(token, value);
}
};
module.exports = Parser;

View File

@@ -0,0 +1,36 @@
{
"name": "jsonparse",
"description": "This is a pure-js JSON streaming parser for node.js",
"tags": [
"json",
"stream"
],
"version": "0.0.5",
"author": {
"name": "Tim Caswell",
"email": "tim@creationix.com"
},
"repository": {
"type": "git",
"url": "http://github.com/creationix/jsonparse.git"
},
"devDependencies": {
"tape": "~0.1.1",
"tap": "~0.3.3"
},
"scripts": {
"test": "tap test/*.js"
},
"bugs": "http://github.com/creationix/jsonparse/issues",
"engines": [
"node >= 0.2.0"
],
"license": "MIT",
"main": "jsonparse.js",
"readme": "This is a streaming JSON parser. For a simpler, sax-based version see this gist: https://gist.github.com/1821394\n\nThe MIT License (MIT)\nCopyright (c) 2011-2012 Tim Caswell\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\n",
"_id": "jsonparse@0.0.5",
"dist": {
"shasum": "f19c1fbd65775bf20a6379b86a64b7745820c137"
},
"_from": "jsonparse@0.0.5"
}

View File

@@ -0,0 +1,167 @@
[
{
},
{
"image": [
{"shape": "rect", "fill": "#333", "stroke": "#999", "x": 0.5e+1, "y": 0.5, "z": 0.8e-0, "w": 0.5e5, "u": 2E10, "foo": 2E+1, "bar": 2E-0, "width": 47, "height": 47}
],
"jumpable": 3,
"solid": {
"1": [2,4],
"2": [],
"3": [2,6],
"4": [],
"5": [2,8,1,3,7,9,4,6],
"6": [],
"7": [4,8],
"8": [],
"9": [6,8]
},
"corners": {"1": true,"3": true,"7": true,"9": true}
},
{
"image": [
{"shape": "polygon", "fill": "#248", "stroke": "#48f", "points": [[0.5,47.5],[47.5,47.5],[47.5,0.5]]}
],
"solid": {
"1": [2,4],
"2": [1],
"3": [2],
"4": [],
"5": [2,8,1,3,7,9,4,6],
"6": [],
"7": [4,8],
"8": [],
"9": [6,8]
},
"corners": {"1": true,"3": true,"7": false,"9": true}
},
{
"image": [
{"shape": "polygon", "fill": "#248", "stroke": "#48f", "points": [[0.5,0.5],[47.5,47.5],[0.5,47.5]]}
],
"solid": {
"1": [2],
"2": [3],
"3": [2,6],
"4": [],
"5": [2,8,1,3,7,9,4,6],
"6": [],
"7": [4,8],
"8": [],
"9": [6,8]
},
"corners": {"1": true,"3": true,"7": true,"9": false}
},
{
"image": [
{"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[47.5,47.5],[47.5,0.5]]}
],
"jumpable": 3,
"solid": {
"1": [2,4],
"2": [],
"3": [2,6],
"4": [],
"5": [2,8,1,3,7,9,4,6],
"6": [3],
"7": [4,8],
"8": [7],
"9": [6,8]
},
"corners": {"1": false,"3": true,"7": true,"9": true}
},
{
"image": [
{"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[0.5,47.5],[47.5,0.5]]}
],
"jumpable": 3,
"solid": {
"1": [2,4],
"2": [],
"3": [2,6],
"4": [1],
"5": [2,8,1,3,7,9,4,6],
"6": [],
"7": [4,8],
"8": [9],
"9": [6,8]
},
"corners": {"1": true,"3": false,"7": true,"9": true}
},
{
"image": [
{"shape": "polygon", "fill": "#482", "stroke": "#8f4", "points": [[0.5,47.5],[0.5,23.5],[24.5,23.5],[24.5,0.5],[47.5,0.5],[47.5,47.5]]}
],
"jumpable": 3,
"solid": {
"1": [2,4],
"2": [],
"3": [6,2],
"4": [],
"5": [2,8,1,3,7,9,4,6],
"6": [9],
"7": [4,8],
"8": [],
"9": [6,8]
},
"corners": {"1": true,"3": true,"7": false,"9": true}
},
{
"image": [
{"shape": "polygon", "fill": "#482", "stroke": "#8f4", "points": [[0.5,0.5],[23.5,0.5],[23.5,24.5],[47.5,24.5],[47.5,47.5],[0.5,47.5]]}
],
"jumpable": 3,
"solid": {
"1": [4,2],
"2": [],
"3": [2,6],
"4": [7],
"5": [2,8,1,3,7,9,4,6],
"6": [],
"7": [4,8],
"8": [],
"9": [6,8]
},
"corners": {"1": true,"3": true,"7": true,"9": false}
},
{
"image": [
{"shape": "circle", "fill": "#ff0", "stroke": "#ff8", "cx": 24, "cy": 24, "r": 18}
],
"item": true
},
{
"image": [
{"shape": "polygon", "fill": "#842", "stroke": "#f84", "points": [[4.5,0.5],[14.5,0.5],[14.5,17.5],[34,17.5],[33.5,0.5],[43.5,0.5],[43.5,47.5],[33.5,47.5],[33.5,30.5],[14.5,30.5],[14.5,47.5],[4.5,47.5]]}
],
"jumpable": 3
},
{
"image": [
{"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[47.5,0.5],[24,47.5]]}
],
"jumpable": 3,
"solid": {
"1": [2,4],
"2": [],
"3": [2,6],
"4": [1],
"5": [2,8,1,3,7,9,4,6],
"6": [3],
"7": [4,8],
"8": [],
"9": [6,8]
},
"corners": {"1": false,"3": false,"7": true,"9": true}
},
{
"image": [
{"shape": "rect", "fill": "#114acb", "x": 0.5, "y": 0.5, "width": 47, "height": 47},
{"shape": "polygon", "fill": "rgba(255,255,255,0.30)", "points": [[0.5,0.5],[47.5,0.5],[40,8],[8,8],[8,40],[0.5,47.5]]},
{"shape": "polygon", "fill": "rgba(0,0,0,0.30)", "points": [[47.5,0.5],[48,48],[0.5,47.5],[8,40],[40,40],[40,8]]},
{"shape": "polygon", "fill": "rgb(255,255,0)", "stroke": "rgba(255,255,0,0.5)", "points": [[24,9],[35,20],[26,29],[26,33],[22,33],[22,27],[29,20],[24,15],[16,23],[13,20]]},
{"shape": "rect", "fill": "rgb(255,255,0)", "stroke": "rgba(255,255,0,0.5)", "x": 22, "y":35, "width": 4, "height": 4}
]
}
]

View File

@@ -0,0 +1,180 @@
[
{
},
{
"image": [
{"shape": "rect", "fill": "#333", "stroke": "#999", "x": 0.5, "y": 0.5, "width": 47, "height": 47}
],
"jumpable": 3,
"solid": {
"1": [2,4],
"2": [],
"3": [2,6],
"4": [],
"5": [2,8,1,3,7,9,4,6],
"6": [],
"7": [4,8],
"8": [],
"9": [6,8]
},
"corners": {"1": true,"3": true,"7": true,"9": true}
},
{
"image": [
{"shape": "polygon", "fill": "#248", "stroke": "#48f", "points": [[0.5,47.5],[47.5,47.5],[47.5,0.5]]}
],
"solid": {
"1": [2,4],
"2": [1],
"3": [2],
"4": [],
"5": [2,8,1,3,7,9,4,6],
"6": [],
"7": [4,8],
"8": [],
"9": [6,8]
},
"corners": {"1": true,"3": true,"7": false,"9": true}
},
{
"image": [
{"shape": "polygon", "fill": "#248", "stroke": "#48f", "points": [[0.5,0.5],[47.5,47.5],[0.5,47.5]]}
],
"solid": {
"1": [2],
"2": [3],
"3": [2,6],
"4": [],
"5": [2,8,1,3,7,9,4,6],
"6": [],
"7": [4,8],
"8": [],
"9": [6,8]
},
"corners": {"1": true,"3": true,"7": true,"9": false}
},
{
"image": [
{"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[47.5,47.5],[47.5,0.5]]}
],
"jumpable": 3,
"solid": {
"1": [2,4],
"2": [],
"3": [2,6],
"4": [],
"5": [2,8,1,3,7,9,4,6],
"6": [3],
"7": [4,8],
"8": [7],
"9": [6,8]
},
"corners": {"1": false,"3": true,"7": true,"9": true}
},
{
"image": [
{"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[0.5,47.5],[47.5,0.5]]}
],
"jumpable": 3,
"solid": {
"1": [2,4],
"2": [],
"3": [2,6],
"4": [1],
"5": [2,8,1,3,7,9,4,6],
"6": [],
"7": [4,8],
"8": [9],
"9": [6,8]
},
"corners": {"1": true,"3": false,"7": true,"9": true}
},
{
"image": [
{"shape": "polygon", "fill": "#482", "stroke": "#8f4", "points": [[0.5,47.5],[0.5,23.5],[24.5,23.5],[24.5,0.5],[47.5,0.5],[47.5,47.5]]}
],
"jumpable": 3,
"solid": {
"1": [2,4],
"2": [],
"3": [6,2],
"4": [],
"5": [2,8,1,3,7,9,4,6],
"6": [9],
"7": [4,8],
"8": [],
"9": [6,8]
},
"corners": {"1": true,"3": true,"7": false,"9": true}
},
{
"image": [
{"shape": "polygon", "fill": "#482", "stroke": "#8f4", "points": [[0.5,0.5],[23.5,0.5],[23.5,24.5],[47.5,24.5],[47.5,47.5],[0.5,47.5]]}
],
"jumpable": 3,
"solid": {
"1": [4,2],
"2": [],
"3": [2,6],
"4": [7],
"5": [2,8,1,3,7,9,4,6],
"6": [],
"7": [4,8],
"8": [],
"9": [6,8]
},
"corners": {"1": true,"3": true,"7": true,"9": false}
},
{
"image": [
{"shape": "circle", "fill": "#ff0", "stroke": "#ff8", "cx": 24, "cy": 24, "r": 18}
],
"item": true
},
{
"image": [
{"shape": "polygon", "fill": "#842", "stroke": "#f84", "points": [[4.5,0.5],[14.5,0.5],[14.5,17.5],[34,17.5],[33.5,0.5],[43.5,0.5],[43.5,47.5],[33.5,47.5],[33.5,30.5],[14.5,30.5],[14.5,47.5],[4.5,47.5]]}
],
"jumpable": 3
},
{
"image": [
{"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[47.5,0.5],[24,47.5]]}
],
"jumpable": 3,
"solid": {
"1": [2,4],
"2": [],
"3": [2,6],
"4": [1],
"5": [2,8,1,3,7,9,4,6],
"6": [3],
"7": [4,8],
"8": [],
"9": [6,8]
},
"corners": {"1": false,"3": false,"7": true,"9": true}
},
{
"image": [
{"shape": "rect", "fill": "#114acb", "x": 0.5, "y": 0.5, "width": 47, "height": 47},
{"shape": "polygon", "fill": "rgba(255,255,255,0.30)", "points": [[0.5,0.5],[47.5,0.5],[40,8],[8,8],[8,40],[0.5,47.5]]},
{"shape": "polygon", "fill": "rgba(0,0,0,0.30)", "points": [[47.5,0.5],[48,48],[0.5,47.5],[8,40],[40,40],[40,8]]},
{"shape": "polygon", "fill": "rgb(255,255,0)", "stroke": "rgba(255,255,0,0.5)", "points": [[24,9],[35,20],[26,29],[26,33],[22,33],[22,27],[29,20],[24,15],[16,23],[13,20]]},
{"shape": "rect", "fill": "rgb(255,255,0)", "stroke": "rgba(255,255,0,0.5)", "x": 22, "y":35, "width": 4, "height": 4}
],
"item": true
},
{
"image": [
{"shape": "circle", "fill": "#80f", "stroke": "#88f", "cx": 24, "cy": 24, "r": 18}
],
"item": true
},
{
"image": [
{"shape": "circle", "fill": "#4f4", "stroke": "#8f8", "cx": 24, "cy": 24, "r": 18}
],
"item": true
}
]

View File

@@ -0,0 +1,110 @@
var test = require('tape');
var Parser = require('../');
test('2 byte utf8 \'De\' character: д', function (t) {
t.plan(1);
var p = new Parser();
p.onValue = function (value) {
t.equal(value, 'д');
};
var de_buffer = new Buffer([0xd0, 0xb4]);
p.write('"');
p.write(de_buffer);
p.write('"');
});
test('3 byte utf8 \'Han\' character: 我', function (t) {
t.plan(1);
var p = new Parser();
p.onValue = function (value) {
t.equal(value, '我');
};
var han_buffer = new Buffer([0xe6, 0x88, 0x91]);
p.write('"');
p.write(han_buffer);
p.write('"');
});
test('4 byte utf8 character (unicode scalar U+2070E): 𠜎', function (t) {
t.plan(1);
var p = new Parser();
p.onValue = function (value) {
t.equal(value, '𠜎');
};
var Ux2070E_buffer = new Buffer([0xf0, 0xa0, 0x9c, 0x8e]);
p.write('"');
p.write(Ux2070E_buffer);
p.write('"');
});
test('3 byte utf8 \'Han\' character chunked inbetween 2nd and 3rd byte: 我', function (t) {
t.plan(1);
var p = new Parser();
p.onValue = function (value) {
t.equal(value, '我');
};
var han_buffer_first = new Buffer([0xe6, 0x88]);
var han_buffer_second = new Buffer([0x91]);
p.write('"');
p.write(han_buffer_first);
p.write(han_buffer_second);
p.write('"');
});
test('4 byte utf8 character (unicode scalar U+2070E) chunked inbetween 2nd and 3rd byte: 𠜎', function (t) {
t.plan(1);
var p = new Parser();
p.onValue = function (value) {
t.equal(value, '𠜎');
};
var Ux2070E_buffer_first = new Buffer([0xf0, 0xa0]);
var Ux2070E_buffer_second = new Buffer([0x9c, 0x8e]);
p.write('"');
p.write(Ux2070E_buffer_first);
p.write(Ux2070E_buffer_second);
p.write('"');
});
test('1-4 byte utf8 character string chunked inbetween random bytes: Aж文𠜱B', function (t) {
t.plan(1);
var p = new Parser();
p.onValue = function (value) {
t.equal(value, 'Aж文𠜱B');
};
var eclectic_buffer = new Buffer([0x41, // A
0xd0, 0xb6, // ж
0xe6, 0x96, 0x87, // 文
0xf0, 0xa0, 0x9c, 0xb1, // 𠜱
0x42]); // B
var rand_chunk = Math.floor(Math.random() * (eclectic_buffer.length));
var first_buffer = eclectic_buffer.slice(0, rand_chunk);
var second_buffer = eclectic_buffer.slice(rand_chunk);
//console.log('eclectic_buffer: ' + eclectic_buffer)
//console.log('sliced from 0 to ' + rand_chunk);
//console.log(first_buffer);
//console.log('then sliced from ' + rand_chunk + ' to the end');
//console.log(second_buffer);
console.log('chunked after offset ' + rand_chunk);
p.write('"');
p.write(first_buffer);
p.write(second_buffer);
p.write('"');
});

View File

@@ -0,0 +1,54 @@
var test = require('tape');
var Parser = require('../');
var expected = [
[ [], '' ],
[ [], 'Hello' ],
[ [], 'This"is' ],
[ [], '\r\n\f\t\\/"' ],
[ [], 'Λάμβδα' ],
[ [], '\\' ],
[ [], '/' ],
[ [], '"' ],
[ [ 0 ], 0 ],
[ [ 1 ], 1 ],
[ [ 2 ], -1 ],
[ [], [ 0, 1, -1 ] ],
[ [ 0 ], 1 ],
[ [ 1 ], 1.1 ],
[ [ 2 ], -1.1 ],
[ [ 3 ], -1 ],
[ [], [ 1, 1.1, -1.1, -1 ] ],
[ [ 0 ], -1 ],
[ [], [ -1 ] ],
[ [ 0 ], -0.1 ],
[ [], [ -0.1 ] ],
[ [ 0 ], 6.019999999999999e+23 ],
[ [], [ 6.019999999999999e+23 ] ]
];
test('primitives', function (t) {
t.plan(23);
var p = new Parser();
p.onValue = function (value) {
var keys = this.stack
.slice(1)
.map(function (item) { return item.key })
.concat(this.key !== undefined ? this.key : [])
;
t.deepEqual(
[ keys, value ],
expected.shift()
);
};
p.write('"""Hello""This\\"is""\\r\\n\\f\\t\\\\\\/\\""');
p.write('"\\u039b\\u03ac\\u03bc\\u03b2\\u03b4\\u03b1"');
p.write('"\\\\"');
p.write('"\\/"');
p.write('"\\""');
p.write('[0,1,-1]');
p.write('[1.0,1.1,-1.1,-1.0][-1][-0.1]');
p.write('[6.02e23]');
});

View File

@@ -0,0 +1,38 @@
var test = require('tape');
var Parser = require('../');
test('3 bytes of utf8', function (t) {
t.plan(1);
var p = new Parser();
p.onValue = function (value) {
t.equal(value, '├──');
};
p.write('"├──"');
});
test('utf8 snowman', function (t) {
t.plan(1);
var p = new Parser();
p.onValue = function (value) {
t.equal(value, '☃');
};
p.write('"☃"');
});
test('utf8 with regular ascii', function (t) {
t.plan(4);
var p = new Parser();
var expected = [ "snow: ☃!", "xyz", "¡que!" ];
expected.push(expected.slice());
p.onValue = function (value) {
t.deepEqual(value, expected.shift());
};
p.write('["snow: ☃!","xyz","¡que!"]');
});

View File

@@ -0,0 +1,38 @@
{
"name": "JSONStream",
"version": "0.4.4",
"description": "rawStream.pipe(JSONStream.parse()).pipe(streamOfObjects)",
"homepage": "http://github.com/dominictarr/JSONStream",
"repository": {
"type": "git",
"url": "git://github.com/dominictarr/JSONStream.git"
},
"dependencies": {
"jsonparse": "0.0.5"
},
"devDependencies": {
"it-is": "~1",
"assertions": "~2.2.2",
"render": "~0.1.1",
"trees": "~0.0.3",
"event-stream": "~0.7.0"
},
"author": {
"name": "Dominic Tarr",
"email": "dominic.tarr@gmail.com",
"url": "http://bit.ly/dominictarr"
},
"scripts": {
"test": "set -e; for t in test/*.js; do echo '***' $t '***'; node $t; done"
},
"optionalDependencies": {},
"engines": {
"node": "*"
},
"readme": "# JSONStream\n\nstreaming JSON.parse and stringify\n\n<img src=https://secure.travis-ci.org/dominictarr/JSONStream.png?branch=master>\n\n## example\n\n```javascript\n\nvar request = require('request')\n , JSONStream = require('JSONStream')\n , es = require('event-stream')\n\nvar parser = JSONStream.parse(['rows', true])\n , req = request({url: 'http://isaacs.couchone.com/registry/_all_docs'})\n , logger = es.mapSync(function (data) {\n console.error(data)\n return data\n })\n```\n\nin node 0.4.x\n\n``` javascript\n\nreq.pipe(parser)\nparser.pipe(logger)\n\n```\n\nin node v0.5.x\n\n``` javascript\nreq.pipe(parser).pipe(logger)\n\n```\n\n## JSONStream.parse(path)\n\nusally, a json API will return a list of objects.\n\n`path` should be an array of property names, `RegExp`s, booleans, and/or functions.\nany object that matches the path will be emitted as 'data' (and `pipe`d down stream)\n\na 'root' event is emitted when all data has been received. The 'root' event passes the root object & the count of matched objects.\n\nif `path` is empty or null, no 'data' events are emitted.\n\n### example\n\nquery a couchdb view:\n\n``` bash\ncurl -sS localhost:5984/tests/_all_docs&include_docs=true\n```\nyou will get something like this:\n\n``` js\n{\"total_rows\":129,\"offset\":0,\"rows\":[\n { \"id\":\"change1_0.6995461115147918\"\n , \"key\":\"change1_0.6995461115147918\"\n , \"value\":{\"rev\":\"1-e240bae28c7bb3667f02760f6398d508\"}\n , \"doc\":{\n \"_id\": \"change1_0.6995461115147918\"\n , \"_rev\": \"1-e240bae28c7bb3667f02760f6398d508\",\"hello\":1}\n },\n { \"id\":\"change2_0.6995461115147918\"\n , \"key\":\"change2_0.6995461115147918\"\n , \"value\":{\"rev\":\"1-13677d36b98c0c075145bb8975105153\"}\n , \"doc\":{\n \"_id\":\"change2_0.6995461115147918\"\n , \"_rev\":\"1-13677d36b98c0c075145bb8975105153\"\n , \"hello\":2\n }\n },\n]}\n\n```\n\nwe are probably most interested in the `rows.*.docs`\n\ncreate a `Stream` that parses the documents from the feed like this:\n\n``` js\nvar stream = JSONStream.parse(['rows', true, 'doc']) //rows, ANYTHING, doc\n\nstream.on('data', function(data) {\n console.log('received:', data);\n});\n\nstream.on('root', function(root, count) {\n if (!count) {\n console.log('no matches found:', root);\n }\n});\n```\nawesome!\n\n## JSONStream.stringify(open, sep, close)\n\nCreate a writable stream.\n\nyou may pass in custom `open`, `close`, and `seperator` strings.\nBut, by default, `JSONStream.stringify()` will create an array,\n(with default options `open='[\\n', sep='\\n,\\n', close='\\n]\\n'`)\n\nIf you call `JSONStream.stringify(false)`\nthe elements will only be seperated by a newline.\n\nIf you only write one item this will be valid JSON.\n\nIf you write many items,\nyou can use a `RegExp` to split it into valid chunks.\n\n## JSONStream.stringifyObject(open, sep, close)\n\nVery much like `JSONStream.stringify`,\nbut creates a writable stream for objects instead of arrays.\n\nAccordingly, `open='{\\n', sep='\\n,\\n', close='\\n}\\n'`.\n\nWhen you `.write()` to the stream you must supply an array with `[ key, data ]`\nas the first argument.\n\n## numbers\n\nThere are occasional problems parsing and unparsing very precise numbers.\n\nI have opened an issue here:\n\nhttps://github.com/creationix/jsonparse/issues/2\n\n+1\n\n## Acknowlegements\n\nthis module depends on https://github.com/creationix/jsonparse\nby Tim Caswell\nand also thanks to Florent Jaby for teaching me about parsing with:\nhttps://github.com/Floby/node-json-streams\n\n## license\n\nMIT / APACHE2\n",
"_id": "JSONStream@0.4.4",
"dist": {
"shasum": "cc2cf119286c45be150423cbc128d480e9b54ae2"
},
"_from": "JSONStream@~0.4.3"
}

View File

@@ -0,0 +1,145 @@
# JSONStream
streaming JSON.parse and stringify
<img src=https://secure.travis-ci.org/dominictarr/JSONStream.png?branch=master>
## example
```javascript
var request = require('request')
, JSONStream = require('JSONStream')
, es = require('event-stream')
var parser = JSONStream.parse(['rows', true])
, req = request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
, logger = es.mapSync(function (data) {
console.error(data)
return data
})
```
in node 0.4.x
``` javascript
req.pipe(parser)
parser.pipe(logger)
```
in node v0.5.x
``` javascript
req.pipe(parser).pipe(logger)
```
## JSONStream.parse(path)
usally, a json API will return a list of objects.
`path` should be an array of property names, `RegExp`s, booleans, and/or functions.
any object that matches the path will be emitted as 'data' (and `pipe`d down stream)
a 'root' event is emitted when all data has been received. The 'root' event passes the root object & the count of matched objects.
if `path` is empty or null, no 'data' events are emitted.
### example
query a couchdb view:
``` bash
curl -sS localhost:5984/tests/_all_docs&include_docs=true
```
you will get something like this:
``` js
{"total_rows":129,"offset":0,"rows":[
{ "id":"change1_0.6995461115147918"
, "key":"change1_0.6995461115147918"
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
, "doc":{
"_id": "change1_0.6995461115147918"
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
},
{ "id":"change2_0.6995461115147918"
, "key":"change2_0.6995461115147918"
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
, "doc":{
"_id":"change2_0.6995461115147918"
, "_rev":"1-13677d36b98c0c075145bb8975105153"
, "hello":2
}
},
]}
```
we are probably most interested in the `rows.*.docs`
create a `Stream` that parses the documents from the feed like this:
``` js
var stream = JSONStream.parse(['rows', true, 'doc']) //rows, ANYTHING, doc
stream.on('data', function(data) {
console.log('received:', data);
});
stream.on('root', function(root, count) {
if (!count) {
console.log('no matches found:', root);
}
});
```
awesome!
## JSONStream.stringify(open, sep, close)
Create a writable stream.
you may pass in custom `open`, `close`, and `seperator` strings.
But, by default, `JSONStream.stringify()` will create an array,
(with default options `open='[\n', sep='\n,\n', close='\n]\n'`)
If you call `JSONStream.stringify(false)`
the elements will only be seperated by a newline.
If you only write one item this will be valid JSON.
If you write many items,
you can use a `RegExp` to split it into valid chunks.
## JSONStream.stringifyObject(open, sep, close)
Very much like `JSONStream.stringify`,
but creates a writable stream for objects instead of arrays.
Accordingly, `open='{\n', sep='\n,\n', close='\n}\n'`.
When you `.write()` to the stream you must supply an array with `[ key, data ]`
as the first argument.
## numbers
There are occasional problems parsing and unparsing very precise numbers.
I have opened an issue here:
https://github.com/creationix/jsonparse/issues/2
+1
## Acknowlegements
this module depends on https://github.com/creationix/jsonparse
by Tim Caswell
and also thanks to Florent Jaby for teaching me about parsing with:
https://github.com/Floby/node-json-streams
## license
MIT / APACHE2

View File

@@ -0,0 +1,41 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
// stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
var expected = []
, stringify = JSONStream.stringify()
, es = require('event-stream')
, stringified = ''
, called = 0
, count = 10
, ended = false
while (count --)
expected.push(randomObj())
es.connect(
es.readArray(expected),
stringify,
JSONStream.parse([true]),
es.writeArray(function (err, lines) {
it(lines).has(expected)
console.error('PASSED')
})
)

View File

@@ -0,0 +1,22 @@
var fs = require ('fs');
var net = require('net');
var join = require('path').join;
var file = join(__dirname, 'fixtures','all_npm.json');
var JSONStream = require('../');
var server = net.createServer(function(client) {
var parser = JSONStream.parse([]);
parser.on('close', function() {
console.error('PASSED');
server.close();
});
client.pipe(parser);
client.destroy();
});
server.listen(9999);
var client = net.connect({ port : 9999 }, function() {
fs.createReadStream(file).pipe(client);
});

View File

@@ -0,0 +1,44 @@
var JSONStream = require('../')
, stream = require('stream')
, it = require('it-is')
var output = [ [], [] ]
var parser1 = JSONStream.parse(['docs', /./])
parser1.on('data', function(data) {
output[0].push(data)
})
var parser2 = JSONStream.parse(['docs', /./])
parser2.on('data', function(data) {
output[1].push(data)
})
var pending = 2
function onend () {
if (--pending > 0) return
it(output).deepEqual([
[], [{hello: 'world'}]
])
console.error('PASSED')
}
parser1.on('end', onend)
parser2.on('end', onend)
function makeReadableStream() {
var readStream = new stream.Stream()
readStream.readable = true
readStream.write = function (data) { this.emit('data', data) }
readStream.end = function (data) { this.emit('end') }
return readStream
}
var emptyArray = makeReadableStream()
emptyArray.pipe(parser1)
emptyArray.write('{"docs":[]}')
emptyArray.end()
var objectArray = makeReadableStream()
objectArray.pipe(parser2)
objectArray.write('{"docs":[{"hello":"world"}]}')
objectArray.end()

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,39 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
function fn (s) {
return !isNaN(parseInt(s, 10))
}
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows', fn])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it.has({
id: it.typeof('string'),
value: {rev: it.typeof('string')},
key:it.typeof('string')
})
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
it(parsed).deepEqual(expected.rows)
console.error('PASSED')
})

View File

@@ -0,0 +1,42 @@
var fs = require ('fs');
var net = require('net');
var join = require('path').join;
var file = join(__dirname, 'fixtures','all_npm.json');
var it = require('it-is');
var JSONStream = require('../');
var str = fs.readFileSync(file);
var datas = {}
var server = net.createServer(function(client) {
var root_calls = 0;
var data_calls = 0;
var parser = JSONStream.parse(['rows', true, 'key']);
parser.on('root', function(root, count) {
++ root_calls;
});
parser.on('data', function(data) {
++ data_calls;
datas[data] = (datas[data] || 0) + 1
it(data).typeof('string')
});
parser.on('end', function() {
console.log('END')
var min = Infinity
for (var d in datas)
min = min > datas[d] ? datas[d] : min
it(root_calls).equal(3);
it(min).equal(3);
server.close();
});
client.pipe(parser);
});
server.listen(9999);
var client = net.connect({ port : 9999 }, function() {
var msgs = str + ' ' + str + '\n\n' + str
client.end(msgs);
});

View File

@@ -0,0 +1,35 @@
var fs = require ('fs');
var net = require('net');
var join = require('path').join;
var file = join(__dirname, 'fixtures','all_npm.json');
var it = require('it-is');
var JSONStream = require('../');
var str = fs.readFileSync(file);
var server = net.createServer(function(client) {
var root_calls = 0;
var data_calls = 0;
var parser = JSONStream.parse();
parser.on('root', function(root, count) {
++ root_calls;
it(root_calls).notEqual(2);
});
parser.on('error', function(err) {
console.log(err);
server.close();
});
parser.on('end', function() {
console.log('END');
server.close();
});
client.pipe(parser);
});
server.listen(9999);
var client = net.connect({ port : 9999 }, function() {
var msgs = str + '}';
client.end(msgs);
});

View File

@@ -0,0 +1,28 @@
/*
sometimes jsonparse changes numbers slightly.
*/
var r = Math.random()
, Parser = require('jsonparse')
, p = new Parser()
, assert = require('assert')
, times = 20
while (times --) {
assert.equal(JSON.parse(JSON.stringify(r)), r, 'core JSON')
p.onValue = function (v) {
console.error('parsed', v)
assert.equal(
String(v).slice(0,12),
String(r).slice(0,12)
)
}
console.error('correct', r)
p.write (new Buffer(JSON.stringify([r])))
}

View File

@@ -0,0 +1,41 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
var expected = []
, stringify = JSONStream.stringify()
, es = require('event-stream')
, stringified = ''
, called = 0
, count = 10
, ended = false
while (count --)
expected.push(randomObj())
es.connect(
es.readArray(expected),
stringify,
//JSONStream.parse([/./]),
es.writeArray(function (err, lines) {
it(JSON.parse(lines.join(''))).deepEqual(expected)
console.error('PASSED')
})
)

View File

@@ -0,0 +1,47 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
, es = require('event-stream')
, pending = 10
, passed = true
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
for (var ix = 0; ix < pending; ix++) (function (count) {
var expected = {}
, stringify = JSONStream.stringifyObject()
es.connect(
stringify,
es.writeArray(function (err, lines) {
it(JSON.parse(lines.join(''))).deepEqual(expected)
if (--pending === 0) {
console.error('PASSED')
}
})
)
while (count --) {
var key = Math.random().toString(16).slice(2)
expected[key] = randomObj()
stringify.write([ key, expected[key] ])
}
process.nextTick(function () {
stringify.end()
})
})(ix)

View File

@@ -0,0 +1,35 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows', /\d+/ /*, 'value'*/])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it.has({
id: it.typeof('string'),
value: {rev: it.typeof('string')},
key:it.typeof('string')
})
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
it(parsed).deepEqual(expected.rows)
console.error('PASSED')
})

View File

@@ -0,0 +1,29 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, '..','package.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse([])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it(data).deepEqual(expected)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(1)
console.error('PASSED')
})

View File

@@ -0,0 +1,41 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
// stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
var expected = []
, stringify = JSONStream.stringify()
, es = require('event-stream')
, stringified = ''
, called = 0
, count = 10
, ended = false
while (count --)
expected.push(randomObj())
es.connect(
es.readArray(expected),
stringify,
JSONStream.parse([/./]),
es.writeArray(function (err, lines) {
it(lines).has(expected)
console.error('PASSED')
})
)

View File

@@ -0,0 +1 @@
node_modules

View File

@@ -0,0 +1,3 @@
language: node_js
node_js:
- 0.8

View File

@@ -0,0 +1,18 @@
This software is released under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

6
node_modules/browserify/node_modules/browser-pack/cmd.js generated vendored Executable file
View File

@@ -0,0 +1,6 @@
#!/usr/bin/env node
var pack = require('./')();
process.stdin.pipe(pack).pipe(process.stdout);
process.stdin.resume();

View File

@@ -0,0 +1,13 @@
[
{
"id": "a1b5af78",
"source": "console.log(require('./foo')(5))",
"deps": { "./foo": "b8f69fa5" },
"entry": true
},
{
"id": "b8f69fa5",
"source": "module.exports = function (n) { return n * 111 }",
"deps": {}
}
]

View File

@@ -0,0 +1 @@
(function(p,c,e){function r(n){if(!c[n]){c[n]={exports:{}};p[n][0](function(x){return r(p[n][1][x])},c[n],c[n].exports);}return c[n].exports}for(var i=0;i<e.length;i++)r(e[i]);return r})({"a1b5af78":[function(require,module,exports){console.log(require('./foo')(5))},{"./foo":"b8f69fa5"}],"b8f69fa5":[function(require,module,exports){module.exports = function (n) { return n * 111 }},{}]},{},["a1b5af78","b8f69fa5"])

View File

@@ -0,0 +1,15 @@
[
{
"id": "a1b5af78",
"source": "console.log(require('./foo')(5))",
"deps": { "./foo": "b8f69fa5" },
"entry": true,
"sourceFile": "wunder/bar.js"
},
{
"id": "b8f69fa5",
"source": "module.exports = function (n) { return n * 111 }",
"deps": {},
"sourceFile": "foo.js"
}
]

View File

@@ -0,0 +1,6 @@
(function(e,t,n,r){function i(r){if(!n[r]){if(!t[r]){if(e)return e(r);throw new Error("Cannot find module '"+r+"'")}var s=n[r]={exports:{}};t[r][0](function(e){var n=t[r][1][e];return i(n?n:e)},s,s.exports)}return n[r].exports}for(var s=0;s<r.length;s++)i(r[s]);return i})(typeof require!=="undefined"&&require,{"a1b5af78":[function(require,module,exports){
console.log(require('./foo')(5))
},{"./foo":"b8f69fa5"}],"b8f69fa5":[function(require,module,exports){
module.exports = function (n) { return n * 111 }
},{}]},{},["a1b5af78"])
//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJ3dW5kZXIvYmFyLmpzIiwiZm9vLmpzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7QUFBQTs7QUNBQSJ9

View File

@@ -0,0 +1,78 @@
var JSONStream = require('JSONStream');
var duplexer = require('duplexer');
var through = require('through');
var uglify = require('uglify-js');
var fs = require('fs');
var path = require('path');
var combineSourceMap = require('combine-source-map');
var prelude = (function () {
var src = fs.readFileSync(path.join(__dirname, 'prelude.js'), 'utf8');
return uglify(src) + '(typeof require!=="undefined"&&require,{';
})();
function newlinesIn(src) {
if (!src) return 0;
var newlines = src.match(/\n/g);
return newlines ? newlines.length : 0;
}
module.exports = function (opts) {
if (!opts) opts = {};
var parser = opts.raw ? through() : JSONStream.parse([ true ]);
var output = through(write, end);
parser.pipe(output);
var first = true;
var entries = [];
var order = [];
var lineno = 1 + newlinesIn(prelude);
var sourcemap;
return duplexer(parser, output);
function write (row) {
if (first) this.queue(prelude);
if (row.sourceFile) {
sourcemap = sourcemap || combineSourceMap.create();
sourcemap.addFile(
{ sourceFile: row.sourceFile, source: row.source },
{ line: lineno }
);
}
var wrappedSource = [
(first ? '' : ','),
JSON.stringify(row.id),
':[',
'function(require,module,exports){\n',
combineSourceMap.removeComments(row.source),
'\n},',
JSON.stringify(row.deps || {}),
']'
].join('');
this.queue(wrappedSource);
lineno += newlinesIn(wrappedSource);
first = false;
if (row.entry && row.order !== undefined) {
entries.splice(row.order, 0, row.id);
}
else if (row.entry) entries.push(row.id);
}
function end () {
if (first) this.queue(prelude);
this.queue('},{},' + JSON.stringify(entries) + ')');
if (sourcemap) this.queue('\n' + sourcemap.comment());
this.queue(null);
}
};

View File

@@ -0,0 +1 @@
../uglify-js/bin/uglifyjs

View File

@@ -0,0 +1,16 @@
lib-cov
*.seed
*.log
*.csv
*.dat
*.out
*.pid
*.gz
pids
logs
results
node_modules
npm-debug.log
tmp

View File

@@ -0,0 +1,4 @@
language: node_js
node_js:
- 0.6
- 0.8

View File

@@ -0,0 +1,16 @@
lib-cov
*.seed
*.log
*.csv
*.dat
*.out
*.pid
*.gz
pids
logs
results
node_modules
npm-debug.log
tmp

View File

@@ -0,0 +1,111 @@
# combine-source-map [![build status](https://secure.travis-ci.org/thlorenz/combine-source-map.png)](http://travis-ci.org/thlorenz/combine-source-map)
Add source maps of multiple files, offset them and then combine them into one source map.
```js
var convert = require('convert-source-map');
var combine = require('combine-source-map');
var fooComment = '//@ sourceMappingURL=data:application/json;base64,eyJ2Z [..] pzJylcbiJdfQ==';
var barComment = '//@ sourceMappingURL=data:application/json;base64,eyJ2Z [..] VjaycpXG4iXX0=';
var fooFile = {
source: '(function() {\n\n console.log(require(\'./bar.js\'));\n\n}).call(this);\n' + '\n' + fooComment
, sourceFile: 'foo.js'
};
var barFile = {
source: '(function() {\n\n console.log(alert(\'alerts suck\'));\n\n}).call(this);\n' + '\n' + barComment
, sourceFile: 'bar.js'
};
var offset = { line: 2 };
var base64 = combine
.create('bundle.js')
.addFile(fooFile, offset)
.addFile(barFile, { line: offset.line + 8 })
.base64();
var sm = convert.fromBase64(base64).toObject();
console.log(sm);
```
```
{ version: 3,
file: 'bundle.js',
sources: [ 'foo.coffee', 'bar.coffee' ],
names: [],
mappings: ';;;AAAA;CAAA;CAAA,CAAA,CAAA,IAAO,GAAK;CAAZ;;;;;ACAA;CAAA;CAAA,CAAA,CAAA,IAAO,GAAK;CAAZ',
sourcesContent:
[ 'console.log(require \'./bar.js\')\n',
'console.log(alert \'alerts suck\')\n' ] }
```
## Installation
npm install combine-source-map
## API
### create()
```
/**
* @name create
* @function
* @param file {String} optional name of the generated file
* @param sourceRoot { String} optional sourceRoot of the map to be generated
* @return {Object} Combiner instance to which source maps can be added and later combined
*/
```
### Combiner.prototype.addFile(opts, offset)
```
/**
* Adds map to underlying source map.
* If source contains a source map comment that has the source of the original file inlined it will offset these
* mappings and include them.
* If no source map comment is found or it has no source inlined, mappings for the file will be generated and included
*
* @name addMap
* @function
* @param opts {Object} { sourceFile: {String}, source: {String} }
* @param offset {Object} { line: {Number}, column: {Number} }
*/
```
### Combiner.prototype.base64()
```
/**
* @name base64
* @function
* @return {String} base64 encoded combined source map
*/
```
### Combiner.prototype.comment()
```
/**
* @name comment
* @function
* @return {String} base64 encoded sourceMappingUrl comment of the combined source map
*/
```
### removeComments(src)
```
/**
* @name removeComments
* @function
* @param src
* @return {String} src with all sourceMappingUrl comments removed
*/
```
## Example
Read and run the [more elaborate example](https://github.com/thlorenz/combine-source-map/blob/master/example/two-files.js)
in order to get a better idea how things work.

View File

@@ -0,0 +1,26 @@
'use strict';
var convert = require('convert-source-map');
var combine = require('..');
var fooComment = '//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZm9vLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiZm9vLmNvZmZlZSJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiO0FBQUE7Q0FBQTtDQUFBLENBQUEsQ0FBQSxJQUFPLEdBQUs7Q0FBWiIsInNvdXJjZXNDb250ZW50IjpbImNvbnNvbGUubG9nKHJlcXVpcmUgJy4vYmFyLmpzJylcbiJdfQ==';
var barComment = '//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiYmFyLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiYmFyLmNvZmZlZSJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiO0FBQUE7Q0FBQTtDQUFBLENBQUEsQ0FBQSxJQUFPLEdBQUs7Q0FBWiIsInNvdXJjZXNDb250ZW50IjpbImNvbnNvbGUubG9nKGFsZXJ0ICdhbGVydHMgc3VjaycpXG4iXX0=';
var fooFile = {
source: '(function() {\n\n console.log(require(\'./bar.js\'));\n\n}).call(this);\n' + '\n' + fooComment
, sourceFile: 'foo.js'
};
var barFile = {
source: '(function() {\n\n console.log(alert(\'alerts suck\'));\n\n}).call(this);\n' + '\n' + barComment
, sourceFile: 'bar.js'
};
var offset = { line: 2 };
var base64 = combine
.create('bundle.js')
.addFile(fooFile, offset)
.addFile(barFile, { line: offset.line + 8 })
.base64();
var sm = convert.fromBase64(base64).toObject();
console.log(sm);

View File

@@ -0,0 +1,47 @@
'use strict';
var convert = require('convert-source-map');
var parse = require('parse-base64vlq-mappings');
var combine = require('..');
var foo = {
version : 3,
file : 'foo.js',
sourceRoot : '',
sources : [ 'foo.coffee' ],
names : [],
mappings : ';AAAA;CAAA;CAAA,CAAA,CAAA,IAAO,GAAK;CAAZ',
sourcesContent : [ 'console.log(require \'./bar.js\')\n' ] };
var bar = {
version : 3,
file : 'bar.js',
sourceRoot : '',
sources : [ 'bar.coffee' ],
names : [],
mappings : ';AAAA;CAAA;CAAA,CAAA,CAAA,IAAO,GAAK;CAAZ',
sourcesContent : [ 'console.log(alert \'alerts suck\')\n' ] };
var fooComment = convert.fromObject(foo).toComment();
var barComment = convert.fromObject(bar).toComment();
var fooFile = {
source: '(function() {\n\n console.log(require(\'./bar.js\'));\n\n}).call(this);\n' + '\n' + fooComment
, sourceFile: 'foo.js'
};
var barFile = {
source: '(function() {\n\n console.log(alert(\'alerts suck\'));\n\n}).call(this);\n' + '\n' + barComment
, sourceFile: 'bar.js'
};
var offset = { line: 2 };
var base64 = combine
.create('bundle.js')
.addFile(fooFile, offset)
.addFile(barFile, { line: offset.line + 8 })
.base64();
var sm = convert.fromBase64(base64).toObject();
console.log('Combined source maps:\n', sm);
console.log('\nMappings:\n', parse(sm.mappings));

View File

@@ -0,0 +1,97 @@
'use strict';
var parse = require('parse-base64vlq-mappings');
var convert = require('convert-source-map');
var createGenerator = require('inline-source-map');
function resolveMap(source) {
var gen = convert.fromSource(source);
return gen ? gen.toObject() : null;
}
function hasInlinedSource(existingMap) {
return existingMap.sourcesContent && !!existingMap.sourcesContent[0];
}
function Combiner(file, sourceRoot) {
// since we include the original code in the map sourceRoot actually not needed
this.generator = createGenerator({ file: file || 'generated.js', sourceRoot: sourceRoot });
}
Combiner.prototype._addGeneratedMap = function (sourceFile, source, offset) {
this.generator.addGeneratedMappings(sourceFile, source, offset);
this.generator.addSourceContent(sourceFile, source);
return this;
};
Combiner.prototype._addExistingMap = function (sourceFile, source, existingMap, offset) {
var mappings = parse(existingMap.mappings);
var originalSource = existingMap.sourcesContent[0]
, originalSourceFile = existingMap.sources[0];
this.generator.addMappings(originalSourceFile || sourceFile, mappings, offset);
this.generator.addSourceContent(originalSourceFile || sourceFile, originalSource);
return this;
};
/**
* Adds map to underlying source map.
* If source contains a source map comment that has the source of the original file inlined it will offset these
* mappings and include them.
* If no source map comment is found or it has no source inlined, mappings for the file will be generated and included
*
* @name addMap
* @function
* @param opts {Object} { sourceFile: {String}, source: {String} }
* @param offset {Object} { line: {Number}, column: {Number} }
*/
Combiner.prototype.addFile = function (opts, offset) {
offset = offset || {};
if (!offset.hasOwnProperty('line')) offset.line = 0;
if (!offset.hasOwnProperty('column')) offset.column = 0;
var existingMap = resolveMap(opts.source);
return existingMap && hasInlinedSource(existingMap)
? this._addExistingMap(opts.sourceFile, opts.source, existingMap, offset)
: this._addGeneratedMap(opts.sourceFile, opts.source, offset);
};
/**
* @name base64
* @function
* @return {String} base64 encoded combined source map
*/
Combiner.prototype.base64 = function () {
return this.generator.base64Encode();
};
/**
* @name comment
* @function
* @return {String} base64 encoded sourceMappingUrl comment of the combined source map
*/
Combiner.prototype.comment = function () {
return this.generator.inlineMappingUrl();
};
/**
* @name create
* @function
* @param file {String} optional name of the generated file
* @param sourceRoot { String} optional sourceRoot of the map to be generated
* @return {Object} Combiner instance to which source maps can be added and later combined
*/
exports.create = function (file, sourceRoot) { return new Combiner(file, sourceRoot); };
/**
* @name removeComments
* @function
* @param src
* @return {String} src with all sourceMappingUrl comments removed
*/
exports.removeComments = function (src) {
if (!src.replace) return src;
return src.replace(convert.commentRegex, '');
};

View File

@@ -0,0 +1,16 @@
lib-cov
*.seed
*.log
*.csv
*.dat
*.out
*.pid
*.gz
pids
logs
results
node_modules
npm-debug.log
tmp

View File

@@ -0,0 +1,4 @@
language: node_js
node_js:
- 0.6
- 0.8

View File

@@ -0,0 +1,23 @@
Copyright 2013 Thorsten Lorenz.
All rights reserved.
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,85 @@
# convert-source-map [![build status](https://secure.travis-ci.org/thlorenz/convert-source-map.png)](http://travis-ci.org/thlorenz/convert-source-map)
Converts a source-map from/to different formats and allows adding/changing properties.
```js
var convert = require('convert-source-map');
var json = convert
.fromComment('//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZm9vLmpzIiwic291cmNlcyI6WyJjb25zb2xlLmxvZyhcImhpXCIpOyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSIsInNvdXJjZVJvb3QiOiIvIn0=')
.toJSON();
var modified = convert
.fromComment('//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZm9vLmpzIiwic291cmNlcyI6WyJjb25zb2xlLmxvZyhcImhpXCIpOyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSIsInNvdXJjZVJvb3QiOiIvIn0=')
.setProperty('sources', [ 'CONSOLE.LOG("HI");' ])
.toJSON();
console.log(json);
console.log(modified);
```
```json
{"version":3,"file":"foo.js","sources":["console.log(\"hi\");"],"names":[],"mappings":"AAAA","sourceRoot":"/"}
{"version":3,"file":"foo.js","sources":["CONSOLE.LOG(\"HI\");"],"names":[],"mappings":"AAAA","sourceRoot":"/"}
```
## API
### fromObject(obj)
Returns source map converter from given object.
### fromJSON(json)
Returns source map converter from given json string.
### fromBase64(base64)
Returns source map converter from given base64 encoded json string.
### fromComment()
Returns source map converter from given base64 encoded json string prefixed with `//@ sourceMappintURL=...`.
### fromSource()
Finds last sourcemap comment in file and returns source map converter or returns null if no source map comment was
found.
### toObject()
Returns a copy of the underlying source map.
### toJSON([space])
Converts source map to json string. If `space` is given (optional), this will be passed to
[JSON.stringify](https://developer.mozilla.org/en-US/docs/JavaScript/Reference/Global_Objects/JSON/stringify) when the
JSON string is generated.
### toBase64()
Converts source map to base64 encoded json string.
### toComment()
Converts source map to base64 encoded json string prefixed with `//@ sourceMappingURL=...`.
### addProperty(key, value)
Adds given property to the source map. Throws an error if property already exists.
### setProperty(key, value)
Sets given property to the source map. If property doesn't exist it is added, otherwise its value is updated.
### getProperty(key)
Gets given property of the source map.
### removeComments(src)
Returns `src` with all source map comments removed
### commentRegex
Returns the regex used to find source map comments.

View File

@@ -0,0 +1,15 @@
'use strict';
var convert = require('..');
var json = convert
.fromComment('//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZm9vLmpzIiwic291cmNlcyI6WyJjb25zb2xlLmxvZyhcImhpXCIpOyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSIsInNvdXJjZVJvb3QiOiIvIn0=')
.toJSON();
var modified = convert
.fromComment('//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZm9vLmpzIiwic291cmNlcyI6WyJjb25zb2xlLmxvZyhcImhpXCIpOyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSIsInNvdXJjZVJvb3QiOiIvIn0=')
.setProperty('sources', [ 'CONSOLE.LOG("HI");' ])
.toJSON();
console.log(json);
console.log(modified);

View File

@@ -0,0 +1,89 @@
'use strict';
var commentRx = /^[ \t]*\/\/@[ \t]+sourceMappingURL=data:(?:application|text)\/json;base64,(.+)/mg;
function decodeBase64(base64) {
return new Buffer(base64, 'base64').toString();
}
function stripComment(sm) {
return sm.split(',').pop();
}
function Converter (sourcemap, isEncoded, isJSON, hasComment) {
var sm = sourcemap;
try {
if (hasComment) sm = stripComment(sm);
if (isEncoded) sm = decodeBase64(sm);
if (isJSON || isEncoded) sm = JSON.parse(sm);
this.sourcemap = sm;
} catch(e) {
return null;
}
}
Converter.prototype.toJSON = function (space) {
return JSON.stringify(this.sourcemap, null, space);
};
Converter.prototype.toBase64 = function () {
var json = this.toJSON();
return new Buffer(json).toString('base64');
};
Converter.prototype.toComment = function () {
var base64 = this.toBase64();
return '//@ sourceMappingURL=data:application/json;base64,' + base64;
};
// returns copy instead of original
Converter.prototype.toObject = function () {
return JSON.parse(this.toJSON());
};
Converter.prototype.addProperty = function (key, value) {
if (this.sourcemap.hasOwnProperty(key)) throw new Error('property %s already exists on the sourcemap, use set property instead');
return this.setProperty(key, value);
};
Converter.prototype.setProperty = function (key, value) {
this.sourcemap[key] = value;
return this;
};
Converter.prototype.getProperty = function (key) {
return this.sourcemap[key];
};
exports.fromObject = function (obj) {
return new Converter(obj, false, false, false);
};
exports.fromJSON = function (json) {
return new Converter(json, false, true, false);
};
exports.fromBase64 = function (base64) {
return new Converter(base64, true, false, false);
};
exports.fromComment = function (comment) {
return new Converter(comment, true, false, true);
};
// Finds last sourcemap comment in file or returns null if none was found
exports.fromSource = function (content) {
var m = content.match(commentRx);
commentRx.lastIndex = 0;
return m ? exports.fromComment(m.pop()) : null;
};
exports.removeComments = function (src) {
commentRx.lastIndex = 0;
return src.replace(commentRx, '');
};
exports.__defineGetter__('commentRegex', function () {
commentRx.lastIndex = 0;
return commentRx;
});

View File

@@ -0,0 +1,35 @@
{
"name": "convert-source-map",
"version": "0.2.5",
"description": "Converts a source-map from/to different formats and allows adding/changing properties.",
"main": "convert-source-map.js",
"scripts": {
"test": "node-trap test/*.js"
},
"repository": {
"type": "git",
"url": "git://github.com/thlorenz/convert-source-map.git"
},
"homepage": "https://github.com/thlorenz/convert-source-map",
"dependencies": {},
"devDependencies": {
"trap": "~0.4.2",
"inline-source-map": "~0.2.1"
},
"keywords": [],
"author": {
"name": "Thorsten Lorenz",
"email": "thlorenz@gmx.de",
"url": "http://thlorenz.com"
},
"license": "MIT",
"engine": {
"node": ">=0.6"
},
"readme": "# convert-source-map [![build status](https://secure.travis-ci.org/thlorenz/convert-source-map.png)](http://travis-ci.org/thlorenz/convert-source-map)\n\nConverts a source-map from/to different formats and allows adding/changing properties.\n\n```js\nvar convert = require('convert-source-map');\n\nvar json = convert\n .fromComment('//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZm9vLmpzIiwic291cmNlcyI6WyJjb25zb2xlLmxvZyhcImhpXCIpOyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSIsInNvdXJjZVJvb3QiOiIvIn0=')\n .toJSON();\n\nvar modified = convert\n .fromComment('//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZm9vLmpzIiwic291cmNlcyI6WyJjb25zb2xlLmxvZyhcImhpXCIpOyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSIsInNvdXJjZVJvb3QiOiIvIn0=')\n .setProperty('sources', [ 'CONSOLE.LOG(\"HI\");' ])\n .toJSON();\n\nconsole.log(json);\nconsole.log(modified);\n```\n\n```json\n{\"version\":3,\"file\":\"foo.js\",\"sources\":[\"console.log(\\\"hi\\\");\"],\"names\":[],\"mappings\":\"AAAA\",\"sourceRoot\":\"/\"}\n{\"version\":3,\"file\":\"foo.js\",\"sources\":[\"CONSOLE.LOG(\\\"HI\\\");\"],\"names\":[],\"mappings\":\"AAAA\",\"sourceRoot\":\"/\"}\n```\n\n## API\n\n### fromObject(obj)\n\nReturns source map converter from given object.\n\n### fromJSON(json)\n\nReturns source map converter from given json string.\n\n### fromBase64(base64)\n\nReturns source map converter from given base64 encoded json string.\n\n### fromComment()\n\nReturns source map converter from given base64 encoded json string prefixed with `//@ sourceMappintURL=...`.\n\n### fromSource()\n\nFinds last sourcemap comment in file and returns source map converter or returns null if no source map comment was\nfound.\n\n### toObject()\n\nReturns a copy of the underlying source map.\n\n### toJSON([space])\n\nConverts source map to json string. If `space` is given (optional), this will be passed to\n[JSON.stringify](https://developer.mozilla.org/en-US/docs/JavaScript/Reference/Global_Objects/JSON/stringify) when the\nJSON string is generated.\n\n### toBase64()\n\nConverts source map to base64 encoded json string.\n\n### toComment()\n\nConverts source map to base64 encoded json string prefixed with `//@ sourceMappingURL=...`.\n\n### addProperty(key, value)\n\nAdds given property to the source map. Throws an error if property already exists.\n\n### setProperty(key, value)\n\nSets given property to the source map. If property doesn't exist it is added, otherwise its value is updated.\n\n### getProperty(key)\n\nGets given property of the source map.\n\n### removeComments(src)\n\nReturns `src` with all source map comments removed\n\n### commentRegex\n\nReturns the regex used to find source map comments.\n",
"_id": "convert-source-map@0.2.5",
"dist": {
"shasum": "bcbcb50f1fba6c9acd0e7c2b5e1bf168b1e41bd4"
},
"_from": "convert-source-map@~0.2.3"
}

View File

@@ -0,0 +1,25 @@
'use strict';
/*jshint asi: true */
var test = require('trap').test
, generator = require('inline-source-map')
, rx = require('..').commentRegex
function comment(s) {
rx.lastIndex = 0;
return rx.test(s + 'sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJmdW5jdGlvbiBmb28oKSB7XG4gY29uc29sZS5sb2coXCJoZWxsbyBJIGFtIGZvb1wiKTtcbiBjb25zb2xlLmxvZyhcIndobyBhcmUgeW91XCIpO1xufVxuXG5mb28oKTtcbiJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSJ9')
}
test('comment regex', function (t) {
[ '//@ '
, ' //@ '
, '\t//@ '
].forEach(function (x) { t.ok(comment(x), 'matches ' + x) })
// if I don't assign this, I get: TypeError: Cannot read property ' @// @' of undefined
var a =
[ '///@ '
, '}}//@ '
, ' @// @'
].forEach(function (x) { t.ok(!comment(x), 'does not match ' + x) })
})

View File

@@ -0,0 +1,129 @@
'use strict';
/*jshint asi: true */
var test = require('trap').test
, generator = require('inline-source-map')
, convert = require('..')
var gen = generator()
.addMappings('foo.js', [{ original: { line: 2, column: 3 } , generated: { line: 5, column: 10 } }], { line: 5 })
.addGeneratedMappings('bar.js', 'var a = 2;\nconsole.log(a)', { line: 23, column: 22 })
, base64 = gen.base64Encode()
, comment = gen.inlineMappingUrl()
, json = '{"version":3,"file":"","sources":["foo.js","bar.js"],"names":[],"mappings":";;;;;;;;;UACG;;;;;;;;;;;;;;sBCDH;sBACA"}'
, obj = JSON.parse(json)
test('different formats', function (t) {
t.equal(convert.fromComment(comment).toComment(), comment, 'comment -> comment')
t.equal(convert.fromComment(comment).toBase64(), base64, 'comment -> base64')
t.equal(convert.fromComment(comment).toJSON(), json, 'comment -> json')
t.deepEqual(convert.fromComment(comment).toObject(), obj, 'comment -> object')
t.equal(convert.fromBase64(base64).toBase64(), base64, 'base64 -> base64')
t.equal(convert.fromBase64(base64).toComment(), comment, 'base64 -> comment')
t.equal(convert.fromBase64(base64).toJSON(), json, 'base64 -> json')
t.deepEqual(convert.fromBase64(base64).toObject(), obj, 'base64 -> object')
t.equal(convert.fromJSON(json).toJSON(), json, 'json -> json')
t.equal(convert.fromJSON(json).toBase64(), base64, 'json -> base64')
t.equal(convert.fromJSON(json).toComment(), comment, 'json -> comment')
t.deepEqual(convert.fromJSON(json).toObject(), obj, 'json -> object')
})
test('to object returns a copy', function (t) {
var c = convert.fromJSON(json)
var o = c.toObject()
o.version = '99';
t.equal(c.toObject().version, '3', 'setting property on returned object does not affect original')
})
test('from source', function (t) {
var foo = [
'function foo() {'
, ' console.log("hello I am foo");'
, ' console.log("who are you");'
, '}'
, ''
, 'foo();'
, ''
].join('\n')
, map = '//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJmdW5jdGlvbiBmb28oKSB7XG4gY29uc29sZS5sb2coXCJoZWxsbyBJIGFtIGZvb1wiKTtcbiBjb25zb2xlLmxvZyhcIndobyBhcmUgeW91XCIpO1xufVxuXG5mb28oKTtcbiJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSJ9'
, otherMap = '//@ sourceMappingURL=data:application/json;base64,otherZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJmdW5jdGlvbiBmb28oKSB7XG4gY29uc29sZS5sb2coXCJoZWxsbyBJIGFtIGZvb1wiKTtcbiBjb25zb2xlLmxvZyhcIndobyBhcmUgeW91XCIpO1xufVxuXG5mb28oKTtcbiJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSJ9'
function getComment(src) {
var map = convert.fromSource(src);
return map ? map.toComment() : null;
}
t.equal(getComment(foo), null, 'no comment returns null')
t.equal(getComment(foo + map), map, 'beginning of last line')
t.equal(getComment(foo + ' ' + map), map, 'indented of last line')
t.equal(getComment(foo + ' ' + map + '\n\n'), map, 'indented on last non empty line')
t.equal(getComment(foo + map + '\nconsole.log("more code");\nfoo()\n'), map, 'in the middle of code')
t.equal(getComment(foo + otherMap + '\n' + map), map, 'finds last map in source')
})
test('remove comments', function (t) {
var foo = [
'function foo() {'
, ' console.log("hello I am foo");'
, ' console.log("who are you");'
, '}'
, ''
, 'foo();'
, ''
].join('\n')
, map = '//@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJmdW5jdGlvbiBmb28oKSB7XG4gY29uc29sZS5sb2coXCJoZWxsbyBJIGFtIGZvb1wiKTtcbiBjb25zb2xlLmxvZyhcIndobyBhcmUgeW91XCIpO1xufVxuXG5mb28oKTtcbiJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSJ9'
, otherMap = '//@ sourceMappingURL=data:application/json;base64,otherZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJmdW5jdGlvbiBmb28oKSB7XG4gY29uc29sZS5sb2coXCJoZWxsbyBJIGFtIGZvb1wiKTtcbiBjb25zb2xlLmxvZyhcIndobyBhcmUgeW91XCIpO1xufVxuXG5mb28oKTtcbiJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiQUFBQSJ9'
, extraCode = '\nconsole.log("more code");\nfoo()\n'
t.equal(convert.removeComments(foo + map), foo, 'from last line')
t.equal(convert.removeComments(foo + map + extraCode), foo + extraCode, 'from the middle of code')
t.equal(convert.removeComments(foo + otherMap + extraCode + map + map), foo + extraCode, 'multiple comments from the middle of code')
})
test('pretty json', function (t) {
var mod = convert.fromJSON(json).toJSON(2);
t.equal(
mod
, '{\n "version": 3,\n "file": "",\n "sources": [\n "foo.js",\n "bar.js"\n ],\n "names": [],\n "mappings": ";;;;;;;;;UACG;;;;;;;;;;;;;;sBCDH;sBACA"\n}'
, 'pretty prints json whe space is given')
})
test('adding properties', function (t) {
var mod = convert
.fromJSON(json)
.addProperty('foo', 'bar')
.toJSON()
t.equal(
mod
, '{"version":3,"file":"","sources":["foo.js","bar.js"],"names":[],"mappings":";;;;;;;;;UACG;;;;;;;;;;;;;;sBCDH;sBACA","foo":"bar"}'
, 'includes added property'
)
})
test('setting properties', function (t) {
var mod = convert
.fromJSON(json)
.setProperty('version', '2')
.setProperty('mappings', ';;;UACG')
.setProperty('should add', 'this')
.toJSON()
t.equal(
mod
, '{"version":"2","file":"","sources":["foo.js","bar.js"],"names":[],"mappings":";;;UACG","should add":"this"}'
, 'includes new property and changes existing properties'
)
})
test('getting properties', function (t) {
var sm = convert.fromJSON(json)
t.equal(sm.getProperty('version'), 3, 'gets version')
t.deepEqual(sm.getProperty('sources'), ['foo.js', 'bar.js'], 'gets sources')
})

View File

@@ -0,0 +1,16 @@
lib-cov
*.seed
*.log
*.csv
*.dat
*.out
*.pid
*.gz
pids
logs
results
node_modules
npm-debug.log
tmp

View File

@@ -0,0 +1,4 @@
language: node_js
node_js:
- 0.6
- 0.8

View File

@@ -0,0 +1,23 @@
Copyright 2013 Thorsten Lorenz.
All rights reserved.
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,88 @@
# inline-source-map [![build status](https://secure.travis-ci.org/thlorenz/inline-source-map.png)](http://travis-ci.org/thlorenz/inline-source-map)
Adds source mappings and base64 encodes them, so they can be inlined in your generated file.
```js
var generator = require('inline-source-map');
var gen = generator()
.addMappings('foo.js', [{ original: { line: 2, column: 3 } , generated: { line: 5, column: 10 } }], { line: 5 })
.addGeneratedMappings('bar.js', 'var a = 2;\nconsole.log(a)', { line: 23, column: 22 });
console.log('base64 mapping:', gen.base64Encode());
console.log('inline mapping url:', gen.inlineMappingUrl());
```
```
base64 mapping: eyJ2ZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJmb28uanMiLCJiYXIuanMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7O1VBQ0c7Ozs7Ozs7Ozs7Ozs7O3NCQ0RIO3NCQUNBIn0=
inline mapping url: //@ sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiIiwic291cmNlcyI6WyJmb28uanMiLCJiYXIuanMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7O1VBQ0c7Ozs7Ozs7Ozs7Ozs7O3NCQ0RIO3NCQUNBIn0=
```
## API
### addMappings(sourceFile, mappings, offset)
```
/**
* Adds the given mappings to the generator and offsets them if offset is given
*
* @name addMappings
* @function
* @param sourceFile {String} name of the source file
* @param mappings {Array{{Object}} each object has the form { original: { line: _, column: _ }, generated: { line: _, column: _ } }
* @param offset {Object} offset to apply to each mapping. Has the form { line: _, column: _ }
* @return {Object} the generator to allow chaining
*/
```
### addGeneratedMappings(sourceFile, source, offset)
```
/**
* Generates mappings for the given source and adds them, assuming that no translation from original to generated is necessary.
*
* @name addGeneratedMappings
* @function
* @param sourceFile {String} name of the source file
* @param source {String} source of the file
* @param offset {Object} offset to apply to each mapping. Has the form { line: _, column: _ }
* @return {Object} the generator to allow chaining
*/
```
### addSourceContent(sourceFile, sourceContent)
```
/**
* Adds source content for the given source file.
*
* @name addSourceContent
* @function
* @param sourceFile {String} The source file for which a mapping is included
* @param sourceContent {String} The content of the source file
* @return {Object} The generator to allow chaining
*/
```
### base64Encode()
```
/**
* @name base64Encode
* @function
* @return {String} bas64 encoded representation of the added mappings
*/
```
If source contents were added, this will be included in the encoded mappings.
### inlineMappingUrl()
```
/**
* @name inlineMappingUrl
* @function
* @return {String} comment with base64 encoded representation of the added mappings. Can be inlined at the end of the generated file.
*/
```

View File

@@ -0,0 +1,8 @@
var generator = require('..');
var gen = generator()
.addMappings('foo.js', [{ original: { line: 2, column: 3 } , generated: { line: 5, column: 10 } }], { line: 5 })
.addGeneratedMappings('bar.js', 'var a = 2;\nconsole.log(a)', { line: 23, column: 22 });
console.log('base64 mapping', gen.base64Encode());
console.log('inline mapping url', gen.inlineMappingUrl());

View File

@@ -0,0 +1,126 @@
'use strict';
var SourceMapGenerator = require('source-map').SourceMapGenerator;
function offsetMapping(mapping, offset) {
return { line: offset.line + mapping.line, column: offset.column + mapping.column };
}
function newlinesIn(src) {
if (!src) return 0;
var newlines = src.match(/\n/g);
return newlines ? newlines.length : 0;
}
function Generator(opts) {
opts = opts || {};
this.generator = new SourceMapGenerator({ file: opts.file || '', sourceRoot: opts.sourceRoot || '' });
this.sourcesContent = undefined;
}
/**
* Adds the given mappings to the generator and offsets them if offset is given
*
* @name addMappings
* @function
* @param sourceFile {String} name of the source file
* @param mappings {Array{{Object}} each object has the form { original: { line: _, column: _ }, generated: { line: _, column: _ } }
* @param offset {Object} offset to apply to each mapping. Has the form { line: _, column: _ }
* @return {Object} the generator to allow chaining
*/
Generator.prototype.addMappings = function (sourceFile, mappings, offset) {
var generator = this.generator;
offset = offset || {};
offset.line = offset.hasOwnProperty('line') ? offset.line : 0;
offset.column = offset.hasOwnProperty('column') ? offset.column : 0;
mappings.forEach(function (m) {
generator.addMapping({
source : sourceFile
, original : m.original
, generated : offsetMapping(m.generated, offset)
});
});
return this;
};
/**
* Generates mappings for the given source, assuming that no translation from original to generated is necessary.
*
* @name addGeneratedMappings
* @function
* @param sourceFile {String} name of the source file
* @param source {String} source of the file
* @param offset {Object} offset to apply to each mapping. Has the form { line: _, column: _ }
* @return {Object} the generator to allow chaining
*/
Generator.prototype.addGeneratedMappings = function (sourceFile, source, offset) {
var mappings = []
, linesToGenerate = newlinesIn(source) + 1;
for (var line = 1; line <= linesToGenerate; line++) {
var location = { line: line, column: 0 };
mappings.push({ original: location, generated: location });
}
return this.addMappings(sourceFile, mappings, offset);
};
/**
* Adds source content for the given source file.
*
* @name addSourceContent
* @function
* @param sourceFile {String} The source file for which a mapping is included
* @param sourcesContent {String} The content of the source file
* @return {Object} The generator to allow chaining
*/
Generator.prototype.addSourceContent = function (sourceFile, sourcesContent) {
this.sourcesContent = this.sourcesContent || {};
this.sourcesContent[sourceFile] = sourcesContent;
return this;
};
/**
* @name base64Encode
* @function
* @return {String} bas64 encoded representation of the added mappings
*/
Generator.prototype.base64Encode = function () {
var map = this.toString();
return new Buffer(map).toString('base64');
};
/**
* @name inlineMappingUrl
* @function
* @return {String} comment with base64 encoded representation of the added mappings. Can be inlined at the end of the generated file.
*/
Generator.prototype.inlineMappingUrl = function () {
return '//@ sourceMappingURL=data:application/json;base64,' + this.base64Encode();
};
Generator.prototype.toJSON = function () {
var map = this.generator.toJSON();
if (!this.sourcesContent) return map;
var toSourcesContent = (function (s) { return this.sourcesContent[s] || null; }).bind(this);
map.sourcesContent = map.sources.map(toSourcesContent);
return map;
};
Generator.prototype.toString = function () {
return JSON.stringify(this);
};
Generator.prototype._mappings = function () {
return this.generator._mappings;
};
Generator.prototype.gen = function () {
return this.generator;
};
module.exports = function (opts) { return new Generator(opts); };
module.exports.Generator = Generator;

View File

@@ -0,0 +1,28 @@
Copyright (c) 2009-2011, Mozilla Foundation and contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the names of the Mozilla Foundation nor the names of project
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@@ -0,0 +1,166 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
var path = require('path');
var fs = require('fs');
var copy = require('dryice').copy;
function removeAmdefine(src) {
src = String(src).replace(
/if\s*\(typeof\s*define\s*!==\s*'function'\)\s*{\s*var\s*define\s*=\s*require\('amdefine'\)\(module\);\s*}\s*/g,
'');
src = src.replace(
/\b(define\(.*)('amdefine',?)/gm,
'$1');
return src;
}
removeAmdefine.onRead = true;
function makeNonRelative(src) {
return src
.replace(/require\('.\//g, 'require(\'source-map/')
.replace(/\.\.\/\.\.\/lib\//g, '');
}
makeNonRelative.onRead = true;
function buildBrowser() {
console.log('\nCreating dist/source-map.js');
var project = copy.createCommonJsProject({
roots: [ path.join(__dirname, 'lib') ]
});
copy({
source: [
'build/mini-require.js',
{
project: project,
require: [ 'source-map/source-map-generator',
'source-map/source-map-consumer',
'source-map/source-node']
},
'build/suffix-browser.js'
],
filter: [
copy.filter.moduleDefines,
removeAmdefine
],
dest: 'dist/source-map.js'
});
}
function buildBrowserMin() {
console.log('\nCreating dist/source-map.min.js');
copy({
source: 'dist/source-map.js',
filter: copy.filter.uglifyjs,
dest: 'dist/source-map.min.js'
});
}
function buildFirefox() {
console.log('\nCreating dist/SourceMap.jsm');
var project = copy.createCommonJsProject({
roots: [ path.join(__dirname, 'lib') ]
});
copy({
source: [
'build/prefix-source-map.jsm',
{
project: project,
require: [ 'source-map/source-map-consumer',
'source-map/source-map-generator',
'source-map/source-node' ]
},
'build/suffix-source-map.jsm'
],
filter: [
copy.filter.moduleDefines,
removeAmdefine,
makeNonRelative
],
dest: 'dist/SourceMap.jsm'
});
// Create dist/test/Utils.jsm
console.log('\nCreating dist/test/Utils.jsm');
project = copy.createCommonJsProject({
roots: [ __dirname, path.join(__dirname, 'lib') ]
});
copy({
source: [
'build/prefix-utils.jsm',
'build/assert-shim.js',
{
project: project,
require: [ 'test/source-map/util' ]
},
'build/suffix-utils.jsm'
],
filter: [
copy.filter.moduleDefines,
removeAmdefine,
makeNonRelative
],
dest: 'dist/test/Utils.jsm'
});
function isTestFile(f) {
return /^test\-.*?\.js/.test(f);
}
var testFiles = fs.readdirSync(path.join(__dirname, 'test', 'source-map')).filter(isTestFile);
testFiles.forEach(function (testFile) {
console.log('\nCreating', path.join('dist', 'test', testFile.replace(/\-/g, '_')));
copy({
source: [
'build/test-prefix.js',
path.join('test', 'source-map', testFile),
'build/test-suffix.js'
],
filter: [
removeAmdefine,
makeNonRelative,
function (input, source) {
return input.replace('define(',
'define("'
+ path.join('test', 'source-map', testFile.replace(/\.js$/, ''))
+ '", ["require", "exports", "module"], ');
},
function (input, source) {
return input.replace('{THIS_MODULE}', function () {
return "test/source-map/" + testFile.replace(/\.js$/, '');
});
}
],
dest: path.join('dist', 'test', testFile.replace(/\-/g, '_'))
});
});
}
function ensureDir(name) {
var dirExists = false;
try {
dirExists = fs.statSync(name).isDirectory();
} catch (err) {}
if (!dirExists) {
fs.mkdirSync(name, 0777);
}
}
ensureDir("dist");
ensureDir("dist/test");
buildFirefox();
buildBrowser();
buildBrowserMin();

View File

@@ -0,0 +1,321 @@
# Source Map
This is a library to generate and consume the source map format
[described here][format].
[Learn more here][feature].
This library was written in the Asynchronous Module Definition
format. It should work in the following environments:
* Modern Browsers (either after the build, or with an AMD loader such as
RequireJS)
* Inside Firefox (as a JSM file, after the build)
* With NodeJS versions 0.8.X and higher
## Installing with NPM (for use with NodeJS)
Simply
$ npm install source-map
Or, if you'd like to hack on this library and have it installed via npm so you
can try out your changes:
$ git clone https://fitzgen@github.com/mozilla/source-map.git
$ cd source-map
$ npm link .
## Building from Source (for everywhere else)
Install Node and then run
$ git clone https://fitzgen@github.com/mozilla/source-map.git
$ cd source-map
$ npm link .
Next, run
$ node Makefile.dryice.js`
This should create the following files:
* `dist/source-map.js` - The unminified browser version.
* `dist/source-map.min.js` - The minified browser version.
* `dist/SourceMap.jsm` - The JavaScript Module for inclusion in Firefox
source.
## API
Get a reference to the module:
// NodeJS
var sourceMap = require('source-map');
// Browser builds
var sourceMap = window.sourceMap;
// Inside Firefox
let sourceMap = {};
Components.utils.import('resource:///modules/devtools/SourceMap.jsm', sourceMap);
### SourceMapConsumer
A SourceMapConsumer instance represents a parsed source map which we can query
for information about the original file positions by giving it a file position
in the generated source.
#### new SourceMapConsumer(rawSourceMap)
The only parameter is the raw source map (either as a string which can be
`JSON.parse`'d, or an object). According to the spec, source maps have the
following attributes:
* `version`: Which version of the source map spec this map is following.
* `sources`: An array of URLs to the original source files.
* `names`: An array of identifiers which can be referrenced by individual
mappings.
* `sourceRoot`: Optional. The URL root from which all sources are relative.
* `sourcesContent`: Optional. An array of contents of the original source files.
* `mappings`: A string of base64 VLQs which contain the actual mappings.
* `file`: The generated filename this source map is associated with.
#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition)
Returns the original source, line, and column information for the generated
source's line and column positions provided. The only argument is an object with
the following properties:
* `line`: The line number in the generated source.
* `column`: The column number in the generated source.
and an object is returned with the following properties:
* `source`: The original source file, or null if this information is not
available.
* `line`: The line number in the original source, or null if this information is
not available.
* `column`: The column number in the original source, or null or null if this
information is not available.
* `name`: The original identifier, or null if this information is not available.
#### SourceMapConsumer.prototype.generatedPositionFor(originalPosition)
Returns the generated line and column information for the original source,
line, and column positions provided. The only argument is an object with
the following properties:
* `source`: The filename of the original source.
* `line`: The line number in the original source.
* `column`: The column number in the original source.
and an object is returned with the following properties:
* `line`: The line number in the generated source, or null.
* `column`: The column number in the generated source, or null.
#### SourceMapConsumer.prototype.sourceContentFor(source)
Returns the original source content for the source provided. The only
argument is the URL of the original source file.
#### SourceMapConsumer.prototype.eachMapping(callback, context, order)
Iterate over each mapping between an original source/line/column and a
generated line/column in this source map.
* `callback`: The function that is called with each mapping.
* `context`: Optional. If specified, this object will be the value of `this`
every time that `callback` is called.
* `order`: Either `SourceMapConsumer.GENERATED_ORDER` or
`SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to iterate over
the mappings sorted by the generated file's line/column order or the
original's source/line/column order, respectively. Defaults to
`SourceMapConsumer.GENERATED_ORDER`.
### SourceMapGenerator
An instance of the SourceMapGenerator represents a source map which is being
built incrementally.
#### new SourceMapGenerator(startOfSourceMap)
To create a new one, you must pass an object with the following properties:
* `file`: The filename of the generated source that this source map is
associated with.
* `sourceRoot`: An optional root for all relative URLs in this source map.
#### SourceMapGenerator.fromSourceMap(sourceMapConsumer)
Creates a new SourceMapGenerator based on a SourceMapConsumer
* `sourceMapConsumer` The SourceMap.
#### SourceMapGenerator.prototype.addMapping(mapping)
Add a single mapping from original source line and column to the generated
source's line and column for this source map being created. The mapping object
should have the following properties:
* `generated`: An object with the generated line and column positions.
* `original`: An object with the original line and column positions.
* `source`: The original source file (relative to the sourceRoot).
* `name`: An optional original token name for this mapping.
#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent)
Set the source content for an original source file.
* `sourceFile` the URL of the original source file.
* `sourceContent` the content of the source file.
#### SourceMapGenerator.prototype.applySourceMap(sourceMapConsumer[, sourceFile])
Applies a SourceMap for a source file to the SourceMap.
Each mapping to the supplied source file is rewritten using the
supplied SourceMap. Note: The resolution for the resulting mappings
is the minimium of this map and the supplied map.
* `sourceMapConsumer`: The SourceMap to be applied.
* `sourceFile`: Optional. The filename of the source file.
If omitted, sourceMapConsumer.file will be used.
#### SourceMapGenerator.prototype.toString()
Renders the source map being generated to a string.
### SourceNode
SourceNodes provide a way to abstract over interpolating and/or concatenating
snippets of generated JavaScript source code, while maintaining the line and
column information associated between those snippets and the original source
code. This is useful as the final intermediate representation a compiler might
use before outputting the generated JS and source map.
#### new SourceNode(line, column, source[, chunk[, name]])
* `line`: The original line number associated with this source node, or null if
it isn't associated with an original line.
* `column`: The original column number associated with this source node, or null
if it isn't associated with an original column.
* `source`: The original source's filename.
* `chunk`: Optional. Is immediately passed to `SourceNode.prototype.add`, see
below.
* `name`: Optional. The original identifier.
#### SourceNode.prototype.add(chunk)
Add a chunk of generated JS to this source node.
* `chunk`: A string snippet of generated JS code, another instance of
`SourceNode`, or an array where each member is one of those things.
#### SourceNode.prototype.prepend(chunk)
Prepend a chunk of generated JS to this source node.
* `chunk`: A string snippet of generated JS code, another instance of
`SourceNode`, or an array where each member is one of those things.
#### SourceNode.prototype.walk(fn)
Walk over the tree of JS snippets in this node and its children. The walking
function is called once for each snippet of JS and is passed that snippet and
the its original associated source's line/column location.
* `fn`: The traversal function.
#### SourceNode.prototype.join(sep)
Like `Array.prototype.join` except for SourceNodes. Inserts the separator
between each of this source node's children.
* `sep`: The separator.
#### SourceNode.prototype.replaceRight(pattern, replacement)
Call `String.prototype.replace` on the very right-most source snippet. Useful
for trimming whitespace from the end of a source node, etc.
* `pattern`: The pattern to replace.
* `replacement`: The thing to replace the pattern with.
#### SourceNode.prototype.toString()
Return the string representation of this source node. Walks over the tree and
concatenates all the various snippets together to one string.
### SourceNode.prototype.toStringWithSourceMap(startOfSourceMap)
Returns the string representation of this tree of source nodes, plus a
SourceMapGenerator which contains all the mappings between the generated and
original sources.
The arguments are the same as those to `new SourceMapGenerator`.
## Tests
Install NodeJS version 0.8.0 or greater, then run `node test/run-tests.js`.
To add new tests, create a new file named `test/test-<your new test name>.js`
and export your test functions with names that start with "test", for example
exports["test doing the foo bar"] = function (assert, util) {
...
};
The new test will be located automatically when you run the suite.
The `util` argument is the test utility module located at `test/source-map/util`.
The `assert` argument is a cut down version of node's assert module. You have
access to the following assertion functions:
* `doesNotThrow`
* `equal`
* `ok`
* `strictEqual`
* `throws`
(The reason for the restricted set of test functions is because we need the
tests to run inside Firefox's test suite as well and so the assert module is
shimmed in that environment. See `build/assert-shim.js`.)
[format]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit
[feature]: https://wiki.mozilla.org/DevTools/Features/SourceMap
[Dryice]: https://github.com/mozilla/dryice

View File

@@ -0,0 +1,56 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
define('test/source-map/assert', ['exports'], function (exports) {
let do_throw = function (msg) {
throw new Error(msg);
};
exports.init = function (throw_fn) {
do_throw = throw_fn;
};
exports.doesNotThrow = function (fn) {
try {
fn();
}
catch (e) {
do_throw(e.message);
}
};
exports.equal = function (actual, expected, msg) {
msg = msg || String(actual) + ' != ' + String(expected);
if (actual != expected) {
do_throw(msg);
}
};
exports.ok = function (val, msg) {
msg = msg || String(val) + ' is falsey';
if (!Boolean(val)) {
do_throw(msg);
}
};
exports.strictEqual = function (actual, expected, msg) {
msg = msg || String(actual) + ' !== ' + String(expected);
if (actual !== expected) {
do_throw(msg);
}
};
exports.throws = function (fn) {
try {
fn();
do_throw('Expected an error to be thrown, but it wasn\'t.');
}
catch (e) {
}
};
});

View File

@@ -0,0 +1,152 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
/**
* Define a module along with a payload.
* @param {string} moduleName Name for the payload
* @param {ignored} deps Ignored. For compatibility with CommonJS AMD Spec
* @param {function} payload Function with (require, exports, module) params
*/
function define(moduleName, deps, payload) {
if (typeof moduleName != "string") {
throw new TypeError('Expected string, got: ' + moduleName);
}
if (arguments.length == 2) {
payload = deps;
}
if (moduleName in define.modules) {
throw new Error("Module already defined: " + moduleName);
}
define.modules[moduleName] = payload;
};
/**
* The global store of un-instantiated modules
*/
define.modules = {};
/**
* We invoke require() in the context of a Domain so we can have multiple
* sets of modules running separate from each other.
* This contrasts with JSMs which are singletons, Domains allows us to
* optionally load a CommonJS module twice with separate data each time.
* Perhaps you want 2 command lines with a different set of commands in each,
* for example.
*/
function Domain() {
this.modules = {};
this._currentModule = null;
}
(function () {
/**
* Lookup module names and resolve them by calling the definition function if
* needed.
* There are 2 ways to call this, either with an array of dependencies and a
* callback to call when the dependencies are found (which can happen
* asynchronously in an in-page context) or with a single string an no callback
* where the dependency is resolved synchronously and returned.
* The API is designed to be compatible with the CommonJS AMD spec and
* RequireJS.
* @param {string[]|string} deps A name, or names for the payload
* @param {function|undefined} callback Function to call when the dependencies
* are resolved
* @return {undefined|object} The module required or undefined for
* array/callback method
*/
Domain.prototype.require = function(deps, callback) {
if (Array.isArray(deps)) {
var params = deps.map(function(dep) {
return this.lookup(dep);
}, this);
if (callback) {
callback.apply(null, params);
}
return undefined;
}
else {
return this.lookup(deps);
}
};
function normalize(path) {
var bits = path.split('/');
var i = 1;
while (i < bits.length) {
if (bits[i] === '..') {
bits.splice(i-1, 1);
} else if (bits[i] === '.') {
bits.splice(i, 1);
} else {
i++;
}
}
return bits.join('/');
}
function join(a, b) {
a = a.trim();
b = b.trim();
if (/^\//.test(b)) {
return b;
} else {
return a.replace(/\/*$/, '/') + b;
}
}
function dirname(path) {
var bits = path.split('/');
bits.pop();
return bits.join('/');
}
/**
* Lookup module names and resolve them by calling the definition function if
* needed.
* @param {string} moduleName A name for the payload to lookup
* @return {object} The module specified by aModuleName or null if not found.
*/
Domain.prototype.lookup = function(moduleName) {
if (/^\./.test(moduleName)) {
moduleName = normalize(join(dirname(this._currentModule), moduleName));
}
if (moduleName in this.modules) {
var module = this.modules[moduleName];
return module;
}
if (!(moduleName in define.modules)) {
throw new Error("Module not defined: " + moduleName);
}
var module = define.modules[moduleName];
if (typeof module == "function") {
var exports = {};
var previousModule = this._currentModule;
this._currentModule = moduleName;
module(this.require.bind(this), exports, { id: moduleName, uri: "" });
this._currentModule = previousModule;
module = exports;
}
// cache the resulting module object for next time
this.modules[moduleName] = module;
return module;
};
}());
define.Domain = Domain;
define.globalDomain = new Domain();
var require = define.globalDomain.require.bind(define.globalDomain);

View File

@@ -0,0 +1,20 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
/*
* WARNING!
*
* Do not edit this file directly, it is built from the sources at
* https://github.com/mozilla/source-map/
*/
///////////////////////////////////////////////////////////////////////////////
var EXPORTED_SYMBOLS = [ "SourceMapConsumer", "SourceMapGenerator", "SourceNode" ];
Components.utils.import('resource://gre/modules/devtools/Require.jsm');

View File

@@ -0,0 +1,18 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
/*
* WARNING!
*
* Do not edit this file directly, it is built from the sources at
* https://github.com/mozilla/source-map/
*/
Components.utils.import('resource://gre/modules/devtools/Require.jsm');
Components.utils.import('resource://gre/modules/devtools/SourceMap.jsm');
let EXPORTED_SYMBOLS = [ "define", "runSourceMapTests" ];

View File

@@ -0,0 +1,8 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
///////////////////////////////////////////////////////////////////////////////
window.sourceMap = {
SourceMapConsumer: require('source-map/source-map-consumer').SourceMapConsumer,
SourceMapGenerator: require('source-map/source-map-generator').SourceMapGenerator,
SourceNode: require('source-map/source-node').SourceNode
};

View File

@@ -0,0 +1,6 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
///////////////////////////////////////////////////////////////////////////////
let SourceMapConsumer = require('source-map/source-map-consumer').SourceMapConsumer;
let SourceMapGenerator = require('source-map/source-map-generator').SourceMapGenerator;
let SourceNode = require('source-map/source-node').SourceNode;

Some files were not shown because too many files have changed in this diff Show More