mirror of
https://github.com/electron/node-gyp.git
synced 2025-09-15 21:53:38 +02:00
Commit node_modules.
For @billywhizz :) And cause it's just an all around good idea for command-line apps.
This commit is contained in:
parent
d39620999f
commit
24bde139e1
255 changed files with 20261 additions and 0 deletions
1
node_modules/.bin/nopt
generated
vendored
Symbolic link
1
node_modules/.bin/nopt
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../nopt/bin/nopt.js
|
1
node_modules/.bin/which
generated
vendored
Symbolic link
1
node_modules/.bin/which
generated
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../which/bin/which
|
0
.gitignore → node_modules/ansi/.npmignore
generated
vendored
0
.gitignore → node_modules/ansi/.npmignore
generated
vendored
89
node_modules/ansi/README.md
generated
vendored
Normal file
89
node_modules/ansi/README.md
generated
vendored
Normal file
|
@ -0,0 +1,89 @@
|
|||
ansi.js
|
||||
=========
|
||||
### Advanced ANSI formatting tool for Node.js
|
||||
|
||||

|
||||
|
||||
`ansi.js` is a module for Node.js that provides an easy-to-use API for
|
||||
writing ANSI escape codes to `Stream` instances. ANSI escape codes are used to do
|
||||
fancy things in a terminal window, like render text in colors, delete characters,
|
||||
lines, the entire window, or hide and show the cursor, and lots more!
|
||||
|
||||
The code for the example in the screenshot above can be found in the `examples`
|
||||
directory.
|
||||
|
||||
#### Features:
|
||||
|
||||
* 256 color support for the terminal!
|
||||
* Works with *any* writable `Stream` instance.
|
||||
* Allows you to move the cursor anywhere on the terminal window.
|
||||
* Allows you to delete existing contents from the terminal window.
|
||||
* Allows you to hide and show the cursor.
|
||||
* Converts CSS color codes and RGB values into ANSI escape codes.
|
||||
* Low-level; you are in control of when escape codes are used, it's not abstracted.
|
||||
* Optional automatic cleanup of stream by before closing (still TODO).
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
Install with `npm`:
|
||||
|
||||
``` bash
|
||||
$ npm install ansi
|
||||
```
|
||||
|
||||
|
||||
Example
|
||||
-------
|
||||
|
||||
``` js
|
||||
var ansi = require('ansi')
|
||||
, cursor = ansi(process.stdout)
|
||||
|
||||
// You can chain your calls forever:
|
||||
cursor.red() // Set font color to red
|
||||
.bg.blue() // Set background color to blue
|
||||
.write('Hello World!') // Write 'Hello World!' to stdout
|
||||
.reset() // When a bg color is set, call reset() before
|
||||
// writing the trailing \n, to avoid Terminal glitches
|
||||
.write('\n') // And a final \n to wrap things up
|
||||
|
||||
// Rendering modes are persistent:
|
||||
cursor.green().bold()
|
||||
|
||||
// You can use the regular logging functions, text will be green
|
||||
console.log('This is green, bold text')
|
||||
|
||||
// To reset just the foreground color:
|
||||
cursor.fg.reset()
|
||||
|
||||
console.log('This will still be bold')
|
||||
```
|
||||
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2012 Nathan Rajlich <nathan@tootallnate.net>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
'Software'), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
46
node_modules/ansi/examples/imgcat.js
generated
vendored
Executable file
46
node_modules/ansi/examples/imgcat.js
generated
vendored
Executable file
|
@ -0,0 +1,46 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
var ansi = require('../')
|
||||
, cursor = ansi(process.stdout)
|
||||
, tty = require('tty')
|
||||
, Canvas = require('canvas')
|
||||
, imageFile = process.argv[2] || __dirname + '/yoshi.png'
|
||||
, image = require('fs').readFileSync(imageFile)
|
||||
, pixel = ' '
|
||||
, alphaThreshold = 0
|
||||
|
||||
var img = new Canvas.Image();
|
||||
img.src = image;
|
||||
|
||||
function draw () {
|
||||
var width = process.stdout.getWindowSize()[0] / pixel.length | 0
|
||||
, scaleW = img.width > width ? width / img.width : 1
|
||||
, w = Math.floor(img.width * scaleW)
|
||||
, h = Math.floor(img.height * scaleW);
|
||||
|
||||
var canvas = new Canvas(w, h)
|
||||
, ctx = canvas.getContext('2d');
|
||||
|
||||
ctx.drawImage(img, 0, 0, w, h);
|
||||
|
||||
var data = ctx.getImageData(0, 0, w, h).data;
|
||||
|
||||
for (var i=0, l=data.length; i<l; i+=4) {
|
||||
var r = data[i]
|
||||
, g = data[i+1]
|
||||
, b = data[i+2]
|
||||
, alpha = data[i+3];
|
||||
if (alpha > alphaThreshold) {
|
||||
cursor.bg.rgb(r, g, b);
|
||||
} else {
|
||||
cursor.bg.reset();
|
||||
}
|
||||
process.stdout.write(pixel);
|
||||
if ((i/4|0) % w === (w-1)) {
|
||||
cursor.bg.reset();
|
||||
process.stdout.write('\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
draw();
|
BIN
node_modules/ansi/examples/yoshi.png
generated
vendored
Normal file
BIN
node_modules/ansi/examples/yoshi.png
generated
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.3 KiB |
280
node_modules/ansi/lib/ansi.js
generated
vendored
Normal file
280
node_modules/ansi/lib/ansi.js
generated
vendored
Normal file
|
@ -0,0 +1,280 @@
|
|||
|
||||
/**
|
||||
* Reference: http://en.wikipedia.org/wiki/ANSI_escape_code
|
||||
*/
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var prefix = '\033[' // For all escape codes
|
||||
, suffix = 'm'; // Only for color codes
|
||||
|
||||
/**
|
||||
* The ANSI escape sequences.
|
||||
*/
|
||||
|
||||
var codes = {
|
||||
up: 'A'
|
||||
, down: 'B'
|
||||
, forward: 'C'
|
||||
, back: 'D'
|
||||
, nextLine: 'E'
|
||||
, previousLine: 'F'
|
||||
, horizontalAbsolute: 'G'
|
||||
, eraseData: 'J'
|
||||
, eraseLine: 'K'
|
||||
, scrollUp: 'S'
|
||||
, scrollDown: 'T'
|
||||
, savePosition: 's'
|
||||
, restorePosition: 'u'
|
||||
, hide: '?25l'
|
||||
, show: '?25h'
|
||||
};
|
||||
|
||||
/**
|
||||
* Rendering ANSI codes.
|
||||
*/
|
||||
|
||||
var styles = {
|
||||
bold: 1
|
||||
, italic: 3
|
||||
, underline: 4
|
||||
, inverse: 7
|
||||
};
|
||||
|
||||
/**
|
||||
* The negating ANSI code for the rendering modes.
|
||||
*/
|
||||
|
||||
var reset = {
|
||||
bold: 22
|
||||
, italic: 23
|
||||
, underline: 24
|
||||
, inverse: 27
|
||||
, foreground: 39
|
||||
, background: 49
|
||||
};
|
||||
|
||||
/**
|
||||
* The standard, styleable ANSI colors.
|
||||
*/
|
||||
|
||||
var colors = {
|
||||
white: 37
|
||||
, grey: 90
|
||||
, black: 30
|
||||
, blue: 34
|
||||
, cyan: 36
|
||||
, green: 32
|
||||
, magenta: 35
|
||||
, red: 31
|
||||
, yellow: 33
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Creates a Cursor instance based off the given `writable stream` instance.
|
||||
*/
|
||||
|
||||
function ansi (stream, options) {
|
||||
return new Cursor(stream, options);
|
||||
}
|
||||
module.exports = exports = ansi;
|
||||
|
||||
/**
|
||||
* The `Cursor` class.
|
||||
*/
|
||||
|
||||
function Cursor (stream, options) {
|
||||
this.stream = stream;
|
||||
this.fg = this.foreground = new Foreground(this);
|
||||
this.bg = this.background = new Background(this);
|
||||
}
|
||||
exports.Cursor = Cursor;
|
||||
|
||||
/**
|
||||
* The `Foreground` class.
|
||||
*/
|
||||
|
||||
function Foreground (cursor) {
|
||||
this.cursor = cursor;
|
||||
}
|
||||
exports.Foreground = Foreground;
|
||||
|
||||
/**
|
||||
* The `Background` class.
|
||||
*/
|
||||
|
||||
function Background (cursor) {
|
||||
this.cursor = cursor;
|
||||
}
|
||||
exports.Background = Background;
|
||||
|
||||
/**
|
||||
* Helper function that calls `write()` on the underlying Stream.
|
||||
*/
|
||||
|
||||
Cursor.prototype.write = function () {
|
||||
this.stream.write.apply(this.stream, arguments);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set up the positional ANSI codes.
|
||||
*/
|
||||
|
||||
Object.keys(codes).forEach(function (name) {
|
||||
var code = String(codes[name]);
|
||||
Cursor.prototype[name] = function () {
|
||||
var c = code;
|
||||
if (arguments.length > 0) {
|
||||
c = Math.round(arguments[0]) + code;
|
||||
}
|
||||
this.write(prefix + c);
|
||||
return this;
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Set up the functions for the rendering ANSI codes.
|
||||
*/
|
||||
|
||||
Object.keys(styles).forEach(function (style) {
|
||||
var name = style[0].toUpperCase() + style.substring(1);
|
||||
|
||||
Cursor.prototype[style] = function () {
|
||||
this.write(prefix + styles[style] + suffix);
|
||||
return this;
|
||||
}
|
||||
|
||||
Cursor.prototype['reset'+name] = function () {
|
||||
this.write(prefix + reset[style] + suffix);
|
||||
return this;
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Setup the functions for the standard colors.
|
||||
*/
|
||||
|
||||
Object.keys(colors).forEach(function (color) {
|
||||
Foreground.prototype[color] = function () {
|
||||
this.cursor.write(prefix + colors[color] + suffix);
|
||||
return this.cursor;
|
||||
}
|
||||
|
||||
var bgCode = colors[color] + 10;
|
||||
Background.prototype[color] = function () {
|
||||
this.cursor.write(prefix + bgCode + suffix);
|
||||
return this.cursor;
|
||||
}
|
||||
|
||||
Cursor.prototype[color] = function () {
|
||||
return this.foreground[color]();
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Makes a beep sound!
|
||||
*/
|
||||
|
||||
Cursor.prototype.beep = function () {
|
||||
this.write('\007');
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the foreground color.
|
||||
*/
|
||||
|
||||
Foreground.prototype.reset = function () {
|
||||
this.cursor.write(prefix + reset.foreground + suffix);
|
||||
return this.cursor;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the background color.
|
||||
*/
|
||||
|
||||
Background.prototype.reset = function () {
|
||||
this.cursor.write(prefix + reset.background + suffix);
|
||||
return this.cursor;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resets all ANSI formatting on the stream.
|
||||
*/
|
||||
|
||||
Cursor.prototype.reset = function () {
|
||||
this.write(prefix + '0' + suffix);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the foreground color with the given RGB values.
|
||||
* The closest match out of the 216 colors is picked.
|
||||
*/
|
||||
|
||||
Foreground.prototype.rgb = function (r, g, b) {
|
||||
this.cursor.write(prefix + '38;5;' + rgb(r, g, b) + suffix);
|
||||
return this.cursor;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the background color with the given RGB values.
|
||||
* The closest match out of the 216 colors is picked.
|
||||
*/
|
||||
|
||||
Background.prototype.rgb = function (r, g, b) {
|
||||
this.cursor.write(prefix + '48;5;' + rgb(r, g, b) + suffix);
|
||||
return this.cursor;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as `cursor.fg.rgb()`.
|
||||
*/
|
||||
|
||||
Cursor.prototype.rgb = function (r, g, b) {
|
||||
return this.foreground.rgb(r, g, b);
|
||||
}
|
||||
|
||||
/**
|
||||
* Accepts CSS color codes for use with ANSI escape codes.
|
||||
* For example: `#FF000` would be bright red.
|
||||
*/
|
||||
|
||||
Foreground.prototype.hex = Background.prototype.hex = function (color) {
|
||||
var rgb = hex(color);
|
||||
return this.rgb(rgb[0], rgb[1], rgb[2]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as `cursor.fg.hex()`.
|
||||
*/
|
||||
|
||||
Cursor.prototype.hex = function (color) {
|
||||
return this.foreground.hex(color);
|
||||
}
|
||||
|
||||
function rgb (r, g, b) {
|
||||
var red = r / 255 * 5
|
||||
, green = g / 255 * 5
|
||||
, blue = b / 255 * 5;
|
||||
return rgb5(red, green, blue);
|
||||
}
|
||||
|
||||
function rgb5 (r, g, b) {
|
||||
var red = Math.round(r)
|
||||
, green = Math.round(g)
|
||||
, blue = Math.round(b);
|
||||
return 16 + (red*36) + (green*6) + blue;
|
||||
}
|
||||
|
||||
function hex (color) {
|
||||
var c = color[0] === '#' ? color.substring(1) : color
|
||||
, r = c.substring(0, 2)
|
||||
, g = c.substring(2, 4)
|
||||
, b = c.substring(4, 6);
|
||||
return [parseInt(r, 16), parseInt(g, 16), parseInt(b, 16)];
|
||||
}
|
BIN
node_modules/ansi/nodejs.png
generated
vendored
Normal file
BIN
node_modules/ansi/nodejs.png
generated
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 30 KiB |
16
node_modules/ansi/package.json
generated
vendored
Normal file
16
node_modules/ansi/package.json
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
{ "name": "ansi"
|
||||
, "description": "Advanced ANSI formatting tool for Node.js"
|
||||
, "keywords": [ "ansi", "formatting", "cursor", "color", "terminal", "rgb", "256", "stream" ]
|
||||
, "version": "0.0.3"
|
||||
, "author": "Nathan Rajlich <nathan@tootallnate.net> (http://tootallnate.net)"
|
||||
, "repository": { "type": "git", "url": "git://github.com/TooTallNate/ansi.js.git" }
|
||||
, "main": "./lib/ansi.js"
|
||||
, "scripts": {
|
||||
"test": "mocha --reporter spec"
|
||||
}
|
||||
, "devDependencies": {
|
||||
"mocha": "*"
|
||||
, "canvas": "*"
|
||||
}
|
||||
, "engines": { "node": ">= 0.4.0 && < 0.9.0" }
|
||||
}
|
51
node_modules/ansi/server.js
generated
vendored
Normal file
51
node_modules/ansi/server.js
generated
vendored
Normal file
|
@ -0,0 +1,51 @@
|
|||
var http = require('http')
|
||||
, ansi = require('./')
|
||||
, Canvas = require('canvas')
|
||||
, Image = Canvas.Image
|
||||
, imageFile = process.argv[2] || __dirname + '/examples/yoshi.png'
|
||||
, image = require('fs').readFileSync(imageFile)
|
||||
|
||||
var img = new Image()
|
||||
img.src = image
|
||||
|
||||
var server = http.createServer(function (req, res) {
|
||||
draw(res);
|
||||
})
|
||||
|
||||
server.listen(8080, function () {
|
||||
console.error('HTTP server listening on:', this.address())
|
||||
})
|
||||
|
||||
function draw (stream) {
|
||||
var cursor = ansi(stream)
|
||||
, pixel = ' '
|
||||
, width = img.width
|
||||
, scaleW = img.width > width ? width / img.width : 1
|
||||
, w = Math.floor(img.width * scaleW)
|
||||
, h = Math.floor(img.height * scaleW);
|
||||
|
||||
var canvas = new Canvas(w, h)
|
||||
, ctx = canvas.getContext('2d');
|
||||
|
||||
ctx.drawImage(img, 0, 0, w, h);
|
||||
|
||||
var data = ctx.getImageData(0, 0, w, h).data;
|
||||
|
||||
for (var i=0, l=data.length; i<l; i+=4) {
|
||||
var r = data[i]
|
||||
, g = data[i+1]
|
||||
, b = data[i+2]
|
||||
, alpha = data[i+3];
|
||||
if (alpha > 0) {
|
||||
cursor.bg.rgb(r, g, b);
|
||||
} else {
|
||||
cursor.bg.reset();
|
||||
}
|
||||
stream.write(pixel);
|
||||
if ((i/4|0) % w === (w-1)) {
|
||||
cursor.bg.reset();
|
||||
stream.write('\n');
|
||||
}
|
||||
}
|
||||
stream.end();
|
||||
}
|
2
node_modules/glob/.npmignore
generated
vendored
Normal file
2
node_modules/glob/.npmignore
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
.*.swp
|
||||
test/a/
|
4
node_modules/glob/.travis.yml
generated
vendored
Normal file
4
node_modules/glob/.travis.yml
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
language: node_js
|
||||
node_js:
|
||||
- 0.6
|
||||
- 0.7
|
114
node_modules/glob/README.md
generated
vendored
Normal file
114
node_modules/glob/README.md
generated
vendored
Normal file
|
@ -0,0 +1,114 @@
|
|||
# Glob
|
||||
|
||||
This is a glob implementation in JavaScript. It uses the `minimatch`
|
||||
library to do its matching.
|
||||
|
||||
## Attention: node-glob users!
|
||||
|
||||
The API has changed dramatically between 2.x and 3.x. This library is
|
||||
now 100% JavaScript, and the integer flags have been replaced with an
|
||||
options object.
|
||||
|
||||
Also, there's an event emitter class, proper tests, and all the other
|
||||
things you've come to expect from node modules.
|
||||
|
||||
And best of all, no compilation!
|
||||
|
||||
## Usage
|
||||
|
||||
```javascript
|
||||
var glob = require("glob")
|
||||
|
||||
// options is optional
|
||||
glob("**/*.js", options, function (er, files) {
|
||||
// files is an array of filenames.
|
||||
// If the `nonull` option is set, and nothing
|
||||
// was found, then files is ["**/*.js"]
|
||||
// er is an error object or null.
|
||||
})
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
Please see the [minimatch
|
||||
documentation](https://github.com/isaacs/minimatch) for more details.
|
||||
|
||||
Supports these glob features:
|
||||
|
||||
* Brace Expansion
|
||||
* Extended glob matching
|
||||
* "Globstar" `**` matching
|
||||
|
||||
See:
|
||||
|
||||
* `man sh`
|
||||
* `man bash`
|
||||
* `man 3 fnmatch`
|
||||
* `man 5 gitignore`
|
||||
* [minimatch documentation](https://github.com/isaacs/minimatch)
|
||||
|
||||
## Glob Class
|
||||
|
||||
Create a glob object by instanting the `glob.Glob` class.
|
||||
|
||||
```javascript
|
||||
var Glob = require("glob").Glob
|
||||
var mg = new Glob(pattern, options)
|
||||
```
|
||||
|
||||
It's an EventEmitter.
|
||||
|
||||
### Properties
|
||||
|
||||
* `minimatch` The minimatch object that the glob uses.
|
||||
* `options` The options object passed in.
|
||||
* `matches` A [FastList](https://github.com/isaacs/fast-list) object
|
||||
containing the matches as they are found.
|
||||
* `error` The error encountered. When an error is encountered, the
|
||||
glob object is in an undefined state, and should be discarded.
|
||||
* `aborted` Boolean which is set to true when calling `abort()`. There
|
||||
is no way at this time to continue a glob search after aborting.
|
||||
|
||||
### Events
|
||||
|
||||
* `end` When the matching is finished, this is emitted with all the
|
||||
matches found. If the `nonull` option is set, and no match was found,
|
||||
then the `matches` list contains the original pattern. The matches
|
||||
are sorted, unless the `nosort` flag is set.
|
||||
* `match` Every time a match is found, this is emitted with the pattern.
|
||||
* `partial` Emitted when a directory matches the start of a pattern, and
|
||||
is then searched for additional matches.
|
||||
* `error` Emitted when an unexpected error is encountered.
|
||||
* `abort` When `abort()` is called, this event is raised.
|
||||
|
||||
### Methods
|
||||
|
||||
* `abort` Stop the search.
|
||||
|
||||
### Options
|
||||
|
||||
All the options that can be passed to Minimatch can also be passed to
|
||||
Glob to change pattern matching behavior. Additionally, these ones
|
||||
are added which are glob-specific, or have glob-specific ramifcations.
|
||||
|
||||
All options are false by default.
|
||||
|
||||
* `cwd` The current working directory in which to search. Since, unlike
|
||||
Minimatch, Glob requires a working directory to start in, this
|
||||
defaults to `process.cwd()`.
|
||||
* `root` Since Glob requires a root setting, this defaults to
|
||||
`path.resolve(options.cwd, "/")`.
|
||||
* `mark` Add a `/` character to directory matches.
|
||||
* `follow` Use `stat` instead of `lstat`. This is only relevant if
|
||||
`stat` or `mark` are true.
|
||||
* `nosort` Don't sort the results.
|
||||
* `stat` Set to true to stat/lstat *all* results. This reduces performance
|
||||
somewhat, but guarantees that the results are files that actually
|
||||
exist.
|
||||
* `silent` When an error other than `ENOENT` or `ENOTDIR` is encountered
|
||||
when attempting to read a directory, a warning will be printed to
|
||||
stderr. Set the `silent` option to true to suppress these warnings.
|
||||
* `strict` When an error other than `ENOENT` or `ENOTDIR` is encountered
|
||||
when attempting to read a directory, the process will just continue on
|
||||
in search of other matches. Set the `strict` option to raise an error
|
||||
in these cases.
|
9
node_modules/glob/examples/g.js
generated
vendored
Normal file
9
node_modules/glob/examples/g.js
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
var Glob = require("../").Glob
|
||||
|
||||
var pattern = "test/a/**/[cg]/../[cg]"
|
||||
console.log(pattern)
|
||||
|
||||
var mg = new Glob(pattern, {mark: true, sync:true}, function (er, matches) {
|
||||
console.log("matches", matches)
|
||||
})
|
||||
console.log("after")
|
9
node_modules/glob/examples/usr-local.js
generated
vendored
Normal file
9
node_modules/glob/examples/usr-local.js
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
var Glob = require("../").Glob
|
||||
|
||||
var pattern = "{./*/*,/*,/usr/local/*}"
|
||||
console.log(pattern)
|
||||
|
||||
var mg = new Glob(pattern, {mark: true}, function (er, matches) {
|
||||
console.log("matches", matches)
|
||||
})
|
||||
console.log("after")
|
402
node_modules/glob/glob.js
generated
vendored
Normal file
402
node_modules/glob/glob.js
generated
vendored
Normal file
|
@ -0,0 +1,402 @@
|
|||
module.exports = glob
|
||||
|
||||
var fs = require("graceful-fs")
|
||||
, minimatch = require("minimatch")
|
||||
, Minimatch = minimatch.Minimatch
|
||||
, inherits = require("inherits")
|
||||
, EE = require("events").EventEmitter
|
||||
, FastList = require("fast-list")
|
||||
, path = require("path")
|
||||
, isDir = {}
|
||||
|
||||
// Globbing is a *little* bit different than just matching, in some
|
||||
// key ways.
|
||||
//
|
||||
// First, and importantly, it matters a great deal whether a pattern
|
||||
// is "absolute" or "relative". Absolute patterns are patterns that
|
||||
// start with / on unix, or a full device/unc path on windows.
|
||||
//
|
||||
// Second, globs interact with the actual filesystem, so being able
|
||||
// to stop searching as soon as a match is no longer possible is of
|
||||
// the utmost importance. It would not do to traverse a large file
|
||||
// tree, and then eliminate all but one of the options, if it could
|
||||
// be possible to skip the traversal early.
|
||||
|
||||
// Get a Minimatch object from the pattern and options. Then, starting
|
||||
// from the options.root or the cwd, read the dir, and do a partial
|
||||
// match on all the files if it's a dir, or a regular match if it's not.
|
||||
|
||||
|
||||
function glob (pattern, options, cb) {
|
||||
if (typeof options === "function") cb = options, options = {}
|
||||
if (!options) options = {}
|
||||
|
||||
if (typeof options === "number") {
|
||||
deprecated()
|
||||
return
|
||||
}
|
||||
|
||||
var m = new Glob(pattern, options, cb)
|
||||
|
||||
if (options.sync) {
|
||||
return m.found
|
||||
} else {
|
||||
return m
|
||||
}
|
||||
}
|
||||
|
||||
glob.fnmatch = deprecated
|
||||
|
||||
function deprecated () {
|
||||
throw new Error("glob's interface has changed. Please see the docs.")
|
||||
}
|
||||
|
||||
glob.sync = globSync
|
||||
function globSync (pattern, options) {
|
||||
if (typeof options === "number") {
|
||||
deprecated()
|
||||
return
|
||||
}
|
||||
|
||||
options = options || {}
|
||||
options.sync = true
|
||||
return glob(pattern, options)
|
||||
}
|
||||
|
||||
|
||||
glob.Glob = Glob
|
||||
inherits(Glob, EE)
|
||||
function Glob (pattern, options, cb) {
|
||||
if (!(this instanceof Glob)) {
|
||||
return new Glob(pattern, options, cb)
|
||||
}
|
||||
|
||||
if (typeof cb === "function") {
|
||||
this.on("error", cb)
|
||||
this.on("end", function (matches) { cb(null, matches) })
|
||||
}
|
||||
|
||||
options = options || {}
|
||||
|
||||
if (!options.hasOwnProperty("maxDepth")) options.maxDepth = 1000
|
||||
if (!options.hasOwnProperty("maxLength")) options.maxLength = 4096
|
||||
|
||||
var cwd = this.cwd = options.cwd =
|
||||
options.cwd || process.cwd()
|
||||
|
||||
this.root = options.root =
|
||||
options.root || path.resolve(cwd, "/")
|
||||
|
||||
if (!pattern) {
|
||||
throw new Error("must provide pattern")
|
||||
}
|
||||
|
||||
var mm = this.minimatch = new Minimatch(pattern, options)
|
||||
options = this.options = mm.options
|
||||
pattern = this.pattern = mm.pattern
|
||||
|
||||
this.error = null
|
||||
this.aborted = false
|
||||
|
||||
this.matches = new FastList()
|
||||
EE.call(this)
|
||||
var me = this
|
||||
|
||||
this._checkedRoot = false
|
||||
|
||||
// if we have any patterns starting with /, then we need to
|
||||
// start at the root. If we don't, then we can take a short
|
||||
// cut and just start at the cwd.
|
||||
var start = this.cwd
|
||||
for (var i = 0, l = this.minimatch.set.length; i < l; i ++) {
|
||||
if (this.minimatch.set[i].absolute) {
|
||||
start = this.root
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (me.options.debug) {
|
||||
console.error("start =", start)
|
||||
}
|
||||
|
||||
this._process(start, 1, this._finish.bind(this))
|
||||
}
|
||||
|
||||
Glob.prototype._finish = _finish
|
||||
function _finish () {
|
||||
var me = this
|
||||
if (me.options.debug) {
|
||||
console.error("!!! GLOB top level cb", me)
|
||||
}
|
||||
if (me.options.nonull && me.matches.length === 0) {
|
||||
return me.emit("end", [pattern])
|
||||
}
|
||||
|
||||
var found = me.found = me.matches.slice()
|
||||
|
||||
found = me.found = found.map(function (m) {
|
||||
if (m.indexOf(me.options.cwd) === 0) {
|
||||
m = m.substr(me.options.cwd.length + 1)
|
||||
}
|
||||
return m
|
||||
})
|
||||
|
||||
if (!me.options.mark) return next()
|
||||
|
||||
// mark all directories with a /.
|
||||
// This may involve some stat calls for things that are unknown.
|
||||
var needStat = []
|
||||
found = me.found = found.map(function (f) {
|
||||
if (isDir[f] === undefined) needStat.push(f)
|
||||
else if (isDir[f] && f.slice(-1) !== "/") f += "/"
|
||||
return f
|
||||
})
|
||||
var c = needStat.length
|
||||
if (c === 0) return next()
|
||||
|
||||
var stat = me.options.follow ? "stat" : "lstat"
|
||||
needStat.forEach(function (f) {
|
||||
if (me.options.sync) {
|
||||
try {
|
||||
afterStat(f)(null, fs[stat + "Sync"](f))
|
||||
} catch (er) {
|
||||
afterStat(f)(er)
|
||||
}
|
||||
} else fs[stat](f, afterStat(f))
|
||||
})
|
||||
|
||||
function afterStat (f) { return function (er, st) {
|
||||
// ignore errors. if the user only wants to show
|
||||
// existing files, then set options.stat to exclude anything
|
||||
// that doesn't exist.
|
||||
if (st && st.isDirectory() && f.substr(-1) !== "/") {
|
||||
var i = found.indexOf(f)
|
||||
if (i !== -1) {
|
||||
found.splice(i, 1, f + "/")
|
||||
}
|
||||
}
|
||||
if (-- c <= 0) return next()
|
||||
}}
|
||||
|
||||
function next () {
|
||||
if (!me.options.nosort) {
|
||||
found = found.sort(alphasort)
|
||||
}
|
||||
me.emit("end", found)
|
||||
}
|
||||
}
|
||||
|
||||
function alphasort (a, b) {
|
||||
a = a.toLowerCase()
|
||||
b = b.toLowerCase()
|
||||
return a > b ? 1 : a < b ? -1 : 0
|
||||
}
|
||||
|
||||
Glob.prototype.abort = abort
|
||||
function abort () {
|
||||
this.aborted = true
|
||||
this.emit("abort")
|
||||
}
|
||||
|
||||
|
||||
Glob.prototype._process = _process
|
||||
function _process (f, depth, cb) {
|
||||
if (this.aborted) return cb()
|
||||
|
||||
var me = this
|
||||
|
||||
// if f matches, then it's a match. emit it, move on.
|
||||
// if it *partially* matches, then it might be a dir.
|
||||
//
|
||||
// possible optimization: don't just minimatch everything
|
||||
// against the full pattern. if a bit of the pattern is
|
||||
// not magical, it'd be good to reduce the number of stats
|
||||
// that had to be made. so, in the pattern: "a/*/b", we could
|
||||
// readdir a, then stat a/<child>/b in all of them.
|
||||
//
|
||||
// however, that'll require a lot of muddying between minimatch
|
||||
// and glob, and at least for the time being, it's kind of nice to
|
||||
// keep them a little bit separate.
|
||||
|
||||
// if this thing is a match, then add to the matches list.
|
||||
var match = me.minimatch.match(f)
|
||||
if (!match) {
|
||||
if (me.options.debug) {
|
||||
console.error("not a match", f)
|
||||
}
|
||||
return me._processPartial(f, depth, cb)
|
||||
}
|
||||
|
||||
if (match) {
|
||||
if (me.options.debug) {
|
||||
console.error(" %s matches %s", f, me.pattern)
|
||||
}
|
||||
// make sure it exists if asked.
|
||||
if (me.options.stat) {
|
||||
var stat = me.options.follow ? "stat" : "lstat"
|
||||
if (me.options.sync) {
|
||||
try {
|
||||
afterStat(f)(null, fs[stat + "Sync"](f))
|
||||
} catch (ex) {
|
||||
afterStat(f)(ex)
|
||||
}
|
||||
} else fs[stat](f, afterStat(f))
|
||||
} else if (me.options.sync) {
|
||||
emitMatch()
|
||||
} else {
|
||||
process.nextTick(emitMatch)
|
||||
}
|
||||
|
||||
return
|
||||
|
||||
function afterStat (f) { return function (er, st) {
|
||||
if (er) return cb()
|
||||
isDir[f] = st.isDirectory()
|
||||
emitMatch()
|
||||
}}
|
||||
|
||||
function emitMatch () {
|
||||
if (me.options.debug) {
|
||||
console.error("emitting match", f)
|
||||
}
|
||||
me.matches.push(f)
|
||||
me.emit("match", f)
|
||||
// move on, since it might also be a partial match
|
||||
// eg, a/**/c matches both a/c and a/c/d/c
|
||||
me._processPartial(f, depth, cb)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
Glob.prototype._processPartial = _processPartial
|
||||
function _processPartial (f, depth, cb) {
|
||||
if (this.aborted) return cb()
|
||||
|
||||
var me = this
|
||||
|
||||
var partial = me.minimatch.match(f, true)
|
||||
if (!partial) {
|
||||
if (me.options.debug) {
|
||||
console.error("not a partial", f)
|
||||
}
|
||||
|
||||
// if not a match or partial match, just move on.
|
||||
return cb()
|
||||
}
|
||||
|
||||
// partial match
|
||||
// however, this only matters if it's a dir.
|
||||
//if (me.options.debug)
|
||||
if (me.options.debug) {
|
||||
console.error("got a partial", f)
|
||||
}
|
||||
me.emit("partial", f)
|
||||
|
||||
me._processDir(f, depth, cb)
|
||||
}
|
||||
|
||||
Glob.prototype._processDir = _processDir
|
||||
function _processDir (f, depth, cb) {
|
||||
if (this.aborted) return cb()
|
||||
|
||||
// If we're already at the maximum depth, then don't read the dir.
|
||||
if (depth >= this.options.maxDepth) return cb()
|
||||
|
||||
// if the path is at the maximum length, then don't proceed, either.
|
||||
if (f.length >= this.options.maxLength) return cb()
|
||||
|
||||
// now the fun stuff.
|
||||
// if it's a dir, then we'll read all the children, and process them.
|
||||
// if it's not a dir, or we can't access it, then it'll fail.
|
||||
// We log a warning for EACCES and EPERM, but ENOTDIR and ENOENT are
|
||||
// expected and fine.
|
||||
cb = this._afterReaddir(f, depth, cb)
|
||||
if (this.options.sync) return this._processDirSync(f, depth, cb)
|
||||
fs.readdir(f, cb)
|
||||
}
|
||||
|
||||
Glob.prototype._processDirSync = _processDirSync
|
||||
function _processDirSync (f, depth, cb) {
|
||||
try {
|
||||
cb(null, fs.readdirSync(f))
|
||||
} catch (ex) {
|
||||
cb(ex)
|
||||
}
|
||||
}
|
||||
|
||||
Glob.prototype._afterReaddir = _afterReaddir
|
||||
function _afterReaddir (f, depth, cb) {
|
||||
var me = this
|
||||
return function afterReaddir (er, children) {
|
||||
if (er) switch (er.code) {
|
||||
case "UNKNOWN": // probably too deep
|
||||
case "ENOTDIR": // completely expected and normal.
|
||||
isDir[f] = false
|
||||
return cb()
|
||||
case "ENOENT": // should never happen.
|
||||
default: // some other kind of problem.
|
||||
if (!me.options.silent) console.error("glob error", er)
|
||||
if (me.options.strict) return cb(er)
|
||||
return cb()
|
||||
}
|
||||
|
||||
// at this point, we know it's a dir, so save a stat later if
|
||||
// mark is set.
|
||||
isDir[f] = true
|
||||
|
||||
me._processChildren(f, depth, children, cb)
|
||||
}
|
||||
}
|
||||
|
||||
Glob.prototype._processChildren = _processChildren
|
||||
function _processChildren (f, depth, children, cb) {
|
||||
var me = this
|
||||
|
||||
// note: the file ending with / might match, but only if
|
||||
// it's a directory, which we know it is at this point.
|
||||
// For example, /a/b/ or /a/b/** would match /a/b/ but not
|
||||
// /a/b. Note: it'll get the trailing "/" strictly based
|
||||
// on the "mark" param, but that happens later.
|
||||
// This is slightly different from bash's glob.
|
||||
if (!me.minimatch.match(f) && me.minimatch.match(f + "/")) {
|
||||
me.matches.push(f)
|
||||
me.emit("match", f)
|
||||
}
|
||||
|
||||
if (-1 === children.indexOf(".")) children.push(".")
|
||||
if (-1 === children.indexOf("..")) children.push("..")
|
||||
|
||||
var count = children.length
|
||||
if (me.options.debug) {
|
||||
console.error("count=%d %s", count, f, children)
|
||||
}
|
||||
|
||||
if (count === 0) {
|
||||
if (me.options.debug) {
|
||||
console.error("no children?", children, f)
|
||||
}
|
||||
return then()
|
||||
}
|
||||
|
||||
children.forEach(function (c) {
|
||||
if (f === "/") c = f + c
|
||||
else c = f + "/" + c
|
||||
|
||||
if (me.options.debug) {
|
||||
console.error(" processing", c)
|
||||
}
|
||||
me._process(c, depth + 1, then)
|
||||
})
|
||||
|
||||
function then (er) {
|
||||
count --
|
||||
if (me.options.debug) {
|
||||
console.error("%s THEN %s", f, count, count <= 0 ? "done" : "not done")
|
||||
}
|
||||
if (me.error) return
|
||||
if (er) return me.emit("error", me.error = er)
|
||||
if (count <= 0) cb()
|
||||
}
|
||||
}
|
1
node_modules/glob/node_modules/fast-list/.npmignore
generated
vendored
Normal file
1
node_modules/glob/node_modules/fast-list/.npmignore
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
node_modules/
|
4
node_modules/glob/node_modules/fast-list/.travis.yml
generated
vendored
Normal file
4
node_modules/glob/node_modules/fast-list/.travis.yml
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
language: node_js
|
||||
node_js:
|
||||
- 0.4
|
||||
- 0.6
|
111
node_modules/glob/node_modules/fast-list/README.md
generated
vendored
Normal file
111
node_modules/glob/node_modules/fast-list/README.md
generated
vendored
Normal file
|
@ -0,0 +1,111 @@
|
|||
# The Problem
|
||||
|
||||
You've got some thing where you need to push a bunch of stuff into a
|
||||
queue and then shift it out. Or, maybe it's a stack, and you're just
|
||||
pushing and popping it.
|
||||
|
||||
Arrays work for this, but are a bit costly performance-wise.
|
||||
|
||||
# The Solution
|
||||
|
||||
A linked-list implementation that takes advantage of what v8 is good at:
|
||||
creating objects with a known shape.
|
||||
|
||||
This is faster for this use case. How much faster? About 50%.
|
||||
|
||||
$ node bench.js
|
||||
benchmarking /Users/isaacs/dev-src/js/fast-list/bench.js
|
||||
Please be patient.
|
||||
{ node: '0.6.2-pre',
|
||||
v8: '3.6.6.8',
|
||||
ares: '1.7.5-DEV',
|
||||
uv: '0.1',
|
||||
openssl: '0.9.8l' }
|
||||
Scores: (bigger is better)
|
||||
|
||||
new FastList()
|
||||
Raw:
|
||||
> 22556.39097744361
|
||||
> 23054.755043227666
|
||||
> 22770.398481973436
|
||||
> 23414.634146341465
|
||||
> 23099.133782483157
|
||||
Average (mean) 22979.062486293868
|
||||
|
||||
[]
|
||||
Raw:
|
||||
> 12195.121951219513
|
||||
> 12184.508268059182
|
||||
> 12173.91304347826
|
||||
> 12216.404886561955
|
||||
> 12184.508268059182
|
||||
Average (mean) 12190.891283475617
|
||||
|
||||
new Array()
|
||||
Raw:
|
||||
> 12131.715771230503
|
||||
> 12184.508268059182
|
||||
> 12216.404886561955
|
||||
> 12195.121951219513
|
||||
> 11940.298507462687
|
||||
Average (mean) 12133.609876906768
|
||||
|
||||
Winner: new FastList()
|
||||
Compared with next highest ([]), it's:
|
||||
46.95% faster
|
||||
1.88 times as fast
|
||||
0.28 order(s) of magnitude faster
|
||||
|
||||
Compared with the slowest (new Array()), it's:
|
||||
47.2% faster
|
||||
1.89 times as fast
|
||||
0.28 order(s) of magnitude faster
|
||||
|
||||
This lacks a lot of features that arrays have:
|
||||
|
||||
1. You can't specify the size at the outset.
|
||||
2. It's not indexable.
|
||||
3. There's no join, concat, etc.
|
||||
|
||||
If any of this matters for your use case, you're probably better off
|
||||
using an Array object.
|
||||
|
||||
## Installing
|
||||
|
||||
```
|
||||
npm install fast-list
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
```javascript
|
||||
var FastList = require("fast-list")
|
||||
var list = new FastList()
|
||||
list.push("foo")
|
||||
list.unshift("bar")
|
||||
list.push("baz")
|
||||
console.log(list.length) // 2
|
||||
console.log(list.pop()) // baz
|
||||
console.log(list.shift()) // bar
|
||||
console.log(list.shift()) // foo
|
||||
```
|
||||
|
||||
### Methods
|
||||
|
||||
* `push`: Just like Array.push, but only can take a single entry
|
||||
* `pop`: Just like Array.pop
|
||||
* `shift`: Just like Array.shift
|
||||
* `unshift`: Just like Array.unshift, but only can take a single entry
|
||||
* `drop`: Drop all entries
|
||||
* `item(n)`: Retrieve the nth item in the list. This involves a walk
|
||||
every time. It's very slow. If you find yourself using this,
|
||||
consider using a normal Array instead.
|
||||
* `slice(start, end)`: Retrieve an array of the items at this position.
|
||||
This involves a walk every time. It's very slow. If you find
|
||||
yourself using this, consider using a normal Array instead.
|
||||
|
||||
### Members
|
||||
|
||||
* `length`: The number of things in the list. Note that, unlike
|
||||
Array.length, this is not a getter/setter, but rather a counter that
|
||||
is internally managed. Setting it can only cause harm.
|
55
node_modules/glob/node_modules/fast-list/bench.js
generated
vendored
Normal file
55
node_modules/glob/node_modules/fast-list/bench.js
generated
vendored
Normal file
|
@ -0,0 +1,55 @@
|
|||
var bench = require("bench")
|
||||
|
||||
var l = 1000
|
||||
, FastList = require("./fast-list.js")
|
||||
|
||||
exports.countPerLap = l * 2
|
||||
|
||||
exports.compare =
|
||||
{ "[]": function () {
|
||||
var list = []
|
||||
for (var j = 0; j < l; j ++) {
|
||||
if (j % 2) list.push(j)
|
||||
else list.unshift(j)
|
||||
}
|
||||
for (var j = 0; j < l; j ++) {
|
||||
if (j % 2) list.shift(j)
|
||||
else list.pop(j)
|
||||
}
|
||||
}
|
||||
, "new Array()": function () {
|
||||
var list = new Array()
|
||||
for (var j = 0; j < l; j ++) {
|
||||
if (j % 2) list.push(j)
|
||||
else list.unshift(j)
|
||||
}
|
||||
for (var j = 0; j < l; j ++) {
|
||||
if (j % 2) list.shift(j)
|
||||
else list.pop(j)
|
||||
}
|
||||
}
|
||||
// , "FastList()": function () {
|
||||
// var list = FastList()
|
||||
// for (var j = 0; j < l; j ++) {
|
||||
// if (j % 2) list.push(j)
|
||||
// else list.unshift(j)
|
||||
// }
|
||||
// for (var j = 0; j < l; j ++) {
|
||||
// if (j % 2) list.shift(j)
|
||||
// else list.pop(j)
|
||||
// }
|
||||
// }
|
||||
, "new FastList()": function () {
|
||||
var list = new FastList()
|
||||
for (var j = 0; j < l; j ++) {
|
||||
if (j % 2) list.push(j)
|
||||
else list.unshift(j)
|
||||
}
|
||||
for (var j = 0; j < l; j ++) {
|
||||
if (j % 2) list.shift(j)
|
||||
else list.pop(j)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bench.runMain()
|
144
node_modules/glob/node_modules/fast-list/fast-list.js
generated
vendored
Normal file
144
node_modules/glob/node_modules/fast-list/fast-list.js
generated
vendored
Normal file
|
@ -0,0 +1,144 @@
|
|||
;(function() { // closure for web browsers
|
||||
|
||||
function Item (data, prev, next) {
|
||||
this.next = next
|
||||
if (next) next.prev = this
|
||||
this.prev = prev
|
||||
if (prev) prev.next = this
|
||||
this.data = data
|
||||
}
|
||||
|
||||
function FastList () {
|
||||
if (!(this instanceof FastList)) return new FastList
|
||||
this._head = null
|
||||
this._tail = null
|
||||
this.length = 0
|
||||
}
|
||||
|
||||
FastList.prototype =
|
||||
{ push: function (data) {
|
||||
this._tail = new Item(data, this._tail, null)
|
||||
if (!this._head) this._head = this._tail
|
||||
this.length ++
|
||||
}
|
||||
|
||||
, pop: function () {
|
||||
if (this.length === 0) return undefined
|
||||
var t = this._tail
|
||||
this._tail = t.prev
|
||||
if (t.prev) {
|
||||
t.prev = this._tail.next = null
|
||||
}
|
||||
this.length --
|
||||
if (this.length === 1) this._head = this._tail
|
||||
else if (this.length === 0) this._head = this._tail = null
|
||||
return t.data
|
||||
}
|
||||
|
||||
, unshift: function (data) {
|
||||
this._head = new Item(data, null, this._head)
|
||||
if (!this._tail) this._tail = this._head
|
||||
this.length ++
|
||||
}
|
||||
|
||||
, shift: function () {
|
||||
if (this.length === 0) return undefined
|
||||
var h = this._head
|
||||
this._head = h.next
|
||||
if (h.next) {
|
||||
h.next = this._head.prev = null
|
||||
}
|
||||
this.length --
|
||||
if (this.length === 1) this._tail = this._head
|
||||
else if (this.length === 0) this._head = this._tail = null
|
||||
return h.data
|
||||
}
|
||||
|
||||
, item: function (n) {
|
||||
if (n < 0) n = this.length + n
|
||||
var h = this._head
|
||||
while (n-- > 0 && h) h = h.next
|
||||
return h ? h.data : undefined
|
||||
}
|
||||
|
||||
, slice: function (n, m) {
|
||||
if (!n) n = 0
|
||||
if (!m) m = this.length
|
||||
if (m < 0) m = this.length + m
|
||||
if (n < 0) n = this.length + n
|
||||
|
||||
if (m === n) {
|
||||
return []
|
||||
}
|
||||
|
||||
if (m < n) {
|
||||
throw new Error("invalid offset: "+n+","+m+" (length="+this.length+")")
|
||||
}
|
||||
|
||||
var len = m - n
|
||||
, ret = new Array(len)
|
||||
, i = 0
|
||||
, h = this._head
|
||||
while (n-- > 0 && h) h = h.next
|
||||
while (i < len && h) {
|
||||
ret[i++] = h.data
|
||||
h = h.next
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
, drop: function () {
|
||||
FastList.call(this)
|
||||
}
|
||||
|
||||
, forEach: function (fn, thisp) {
|
||||
var p = this._head
|
||||
, i = 0
|
||||
, len = this.length
|
||||
while (i < len && p) {
|
||||
fn.call(thisp || this, p.data, i, this)
|
||||
p = p.next
|
||||
i ++
|
||||
}
|
||||
}
|
||||
|
||||
, map: function (fn, thisp) {
|
||||
var n = new FastList()
|
||||
this.forEach(function (v, i, me) {
|
||||
n.push(fn.call(thisp || me, v, i, me))
|
||||
})
|
||||
return n
|
||||
}
|
||||
|
||||
, filter: function (fn, thisp) {
|
||||
var n = new FastList()
|
||||
this.forEach(function (v, i, me) {
|
||||
if (fn.call(thisp || me, v, i, me)) n.push(v)
|
||||
})
|
||||
return n
|
||||
}
|
||||
|
||||
, reduce: function (fn, val, thisp) {
|
||||
var i = 0
|
||||
, p = this._head
|
||||
, len = this.length
|
||||
if (!val) {
|
||||
i = 1
|
||||
val = p && p.data
|
||||
p = p && p.next
|
||||
}
|
||||
while (i < len && p) {
|
||||
val = fn.call(thisp || this, val, p.data, this)
|
||||
i ++
|
||||
p = p.next
|
||||
}
|
||||
return val
|
||||
}
|
||||
}
|
||||
|
||||
if ("undefined" !== typeof(exports)) module.exports = FastList
|
||||
else if ("function" === typeof(define) && define.amd) {
|
||||
define("FastList", function() { return FastList })
|
||||
} else (function () { return this })().FastList = FastList
|
||||
|
||||
})()
|
20
node_modules/glob/node_modules/fast-list/package.json
generated
vendored
Normal file
20
node_modules/glob/node_modules/fast-list/package.json
generated
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
{
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"name": "fast-list",
|
||||
"description": "A fast linked list (good for queues, stacks, etc.)",
|
||||
"version": "1.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/isaacs/fast-list.git"
|
||||
},
|
||||
"main": "fast-list.js",
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"bench": "~0.3.2",
|
||||
"tap": "~0.1.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap test.js",
|
||||
"bench": "node bench.js"
|
||||
}
|
||||
}
|
112
node_modules/glob/node_modules/fast-list/test.js
generated
vendored
Normal file
112
node_modules/glob/node_modules/fast-list/test.js
generated
vendored
Normal file
|
@ -0,0 +1,112 @@
|
|||
var tap = require("tap")
|
||||
, test = tap.test
|
||||
, FastList = require("./fast-list.js")
|
||||
|
||||
test(function (t) {
|
||||
var list = new FastList()
|
||||
list.push("foo")
|
||||
t.equal(list._head, list._tail, "should have only one thing")
|
||||
list.push("bar")
|
||||
list.push("baz")
|
||||
list.push("boo")
|
||||
list.push("asd")
|
||||
list.push("dsa")
|
||||
list.push("elf")
|
||||
list.push("fro")
|
||||
list.push("gap")
|
||||
list.push("hoo")
|
||||
list.push("ike")
|
||||
list.push("jut")
|
||||
list.push("kni")
|
||||
list.push("lam")
|
||||
list.push("mut")
|
||||
list.push("nop")
|
||||
list.push("orc")
|
||||
t.equal(list.length, 17, "length = 17")
|
||||
t.equal(list.pop(), "orc", "pop orc")
|
||||
t.equal(list.shift(), "foo", "shift foo")
|
||||
t.equal(list.length, 15, "length = 15")
|
||||
|
||||
t.equal(list.item(0), "bar", "item 0 = bar")
|
||||
|
||||
t.equal(list.item(1), "baz", "item 1 = baz")
|
||||
t.equal(list.item(-0), "bar", "item -0 = bar")
|
||||
t.equal(list.item(-1), "nop", "item -1 = nop")
|
||||
|
||||
t.equal(list.item(5), "elf", "item 5 = elf")
|
||||
|
||||
t.deepEqual(list.slice(),
|
||||
["bar"
|
||||
,"baz"
|
||||
,"boo"
|
||||
,"asd"
|
||||
,"dsa"
|
||||
,"elf"
|
||||
,"fro"
|
||||
,"gap"
|
||||
,"hoo"
|
||||
,"ike"
|
||||
,"jut"
|
||||
,"kni"
|
||||
,"lam"
|
||||
,"mut"
|
||||
,"nop"], "slice()")
|
||||
|
||||
t.deepEqual(list.slice(0), list.slice(), "slice(0) == slice()")
|
||||
|
||||
t.deepEqual(list.slice(0, 1), ["bar"], "slice(0, 1)")
|
||||
|
||||
t.deepEqual(list.slice(5, 10),
|
||||
["elf"
|
||||
,"fro"
|
||||
,"gap"
|
||||
,"hoo"
|
||||
,"ike"], "slice(5, 10)")
|
||||
|
||||
t.deepEqual(list.slice(5, -2),
|
||||
["elf"
|
||||
,"fro"
|
||||
,"gap"
|
||||
,"hoo"
|
||||
,"ike"
|
||||
,"jut"
|
||||
,"kni"
|
||||
,"lam"], "slice(5, -2)")
|
||||
|
||||
t.deepEqual(list.slice(-4),
|
||||
["kni"
|
||||
,"lam"
|
||||
,"mut"
|
||||
,"nop"], "slice(-4)")
|
||||
|
||||
// verify that map, reduce, and filter all match their
|
||||
// array counterparts. This implies forEach coverage as well,
|
||||
// since map and filter rely on it.
|
||||
function reduce (l, r) {
|
||||
l[r] = true
|
||||
return l
|
||||
}
|
||||
t.deepEqual( list.reduce(reduce, {})
|
||||
, list.slice().reduce(reduce, {})
|
||||
, "reduce")
|
||||
|
||||
// filter out the first three items
|
||||
function filter (v) {
|
||||
return v.charAt(0) !== "b"
|
||||
}
|
||||
t.deepEqual( list.filter(filter).slice()
|
||||
, list.slice().filter(filter)
|
||||
, "filter")
|
||||
|
||||
// double all the items
|
||||
function map (v) {
|
||||
return v + v
|
||||
}
|
||||
t.deepEqual( list.map(map).slice()
|
||||
, list.slice().map(map)
|
||||
, "map")
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
|
1
node_modules/glob/node_modules/graceful-fs/.npmignore
generated
vendored
Normal file
1
node_modules/glob/node_modules/graceful-fs/.npmignore
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
node_modules/
|
23
node_modules/glob/node_modules/graceful-fs/LICENSE
generated
vendored
Normal file
23
node_modules/glob/node_modules/graceful-fs/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
|
||||
All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
5
node_modules/glob/node_modules/graceful-fs/README.md
generated
vendored
Normal file
5
node_modules/glob/node_modules/graceful-fs/README.md
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
Just like node's `fs` module, but it does an incremental back-off when
|
||||
EMFILE is encountered.
|
||||
|
||||
Useful in asynchronous situations where one needs to try to open lots
|
||||
and lots of files.
|
256
node_modules/glob/node_modules/graceful-fs/graceful-fs.js
generated
vendored
Normal file
256
node_modules/glob/node_modules/graceful-fs/graceful-fs.js
generated
vendored
Normal file
|
@ -0,0 +1,256 @@
|
|||
// this keeps a queue of opened file descriptors, and will make
|
||||
// fs operations wait until some have closed before trying to open more.
|
||||
|
||||
var fs = require("fs")
|
||||
|
||||
// there is such a thing as TOO graceful.
|
||||
if (fs.open === gracefulOpen) return
|
||||
|
||||
var FastList = require("fast-list")
|
||||
, queue = new FastList()
|
||||
, curOpen = 0
|
||||
, constants = require("constants")
|
||||
|
||||
|
||||
exports = module.exports = fs
|
||||
|
||||
|
||||
fs.MIN_MAX_OPEN = 64
|
||||
fs.MAX_OPEN = 1024
|
||||
|
||||
var originalOpen = fs.open
|
||||
, originalOpenSync = fs.openSync
|
||||
, originalClose = fs.close
|
||||
, originalCloseSync = fs.closeSync
|
||||
|
||||
|
||||
// prevent EMFILE errors
|
||||
function OpenReq (path, flags, mode, cb) {
|
||||
this.path = path
|
||||
this.flags = flags
|
||||
this.mode = mode
|
||||
this.cb = cb
|
||||
}
|
||||
|
||||
function noop () {}
|
||||
|
||||
fs.open = gracefulOpen
|
||||
|
||||
function gracefulOpen (path, flags, mode, cb) {
|
||||
if (typeof mode === "function") cb = mode, mode = null
|
||||
if (typeof cb !== "function") cb = noop
|
||||
|
||||
if (curOpen >= fs.MAX_OPEN) {
|
||||
queue.push(new OpenReq(path, flags, mode, cb))
|
||||
setTimeout(flush)
|
||||
return
|
||||
}
|
||||
open(path, flags, mode, function (er, fd) {
|
||||
if (er && er.code === "EMFILE" && curOpen > fs.MIN_MAX_OPEN) {
|
||||
// that was too many. reduce max, get back in queue.
|
||||
// this should only happen once in a great while, and only
|
||||
// if the ulimit -n is set lower than 1024.
|
||||
fs.MAX_OPEN = curOpen - 1
|
||||
return fs.open(path, flags, mode, cb)
|
||||
}
|
||||
cb(er, fd)
|
||||
})
|
||||
}
|
||||
|
||||
function open (path, flags, mode, cb) {
|
||||
cb = cb || noop
|
||||
curOpen ++
|
||||
originalOpen.call(fs, path, flags, mode, function (er, fd) {
|
||||
if (er) {
|
||||
onclose()
|
||||
}
|
||||
|
||||
cb(er, fd)
|
||||
})
|
||||
}
|
||||
|
||||
fs.openSync = function (path, flags, mode) {
|
||||
curOpen ++
|
||||
return originalOpenSync.call(fs, path, flags, mode)
|
||||
}
|
||||
|
||||
function onclose () {
|
||||
curOpen --
|
||||
flush()
|
||||
}
|
||||
|
||||
function flush () {
|
||||
while (curOpen < fs.MAX_OPEN) {
|
||||
var req = queue.shift()
|
||||
if (!req) break
|
||||
open(req.path, req.flags || "r", req.mode || 0777, req.cb)
|
||||
}
|
||||
if (queue.length === 0) return
|
||||
}
|
||||
|
||||
fs.close = function (fd, cb) {
|
||||
cb = cb || noop
|
||||
originalClose.call(fs, fd, function (er) {
|
||||
onclose()
|
||||
cb(er)
|
||||
})
|
||||
}
|
||||
|
||||
fs.closeSync = function (fd) {
|
||||
onclose()
|
||||
return originalCloseSync.call(fs, fd)
|
||||
}
|
||||
|
||||
|
||||
// (re-)implement some things that are known busted or missing.
|
||||
|
||||
var constants = require("constants")
|
||||
|
||||
// lchmod, broken prior to 0.6.2
|
||||
// back-port the fix here.
|
||||
if (constants.hasOwnProperty('O_SYMLINK') &&
|
||||
process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
|
||||
fs.lchmod = function (path, mode, callback) {
|
||||
callback = callback || noop
|
||||
fs.open( path
|
||||
, constants.O_WRONLY | constants.O_SYMLINK
|
||||
, mode
|
||||
, function (err, fd) {
|
||||
if (err) {
|
||||
callback(err)
|
||||
return
|
||||
}
|
||||
// prefer to return the chmod error, if one occurs,
|
||||
// but still try to close, and report closing errors if they occur.
|
||||
fs.fchmod(fd, mode, function (err) {
|
||||
fs.close(fd, function(err2) {
|
||||
callback(err || err2)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fs.lchmodSync = function (path, mode) {
|
||||
var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
|
||||
|
||||
// prefer to return the chmod error, if one occurs,
|
||||
// but still try to close, and report closing errors if they occur.
|
||||
var err, err2
|
||||
try {
|
||||
var ret = fs.fchmodSync(fd, mode)
|
||||
} catch (er) {
|
||||
err = er
|
||||
}
|
||||
try {
|
||||
fs.closeSync(fd)
|
||||
} catch (er) {
|
||||
err2 = er
|
||||
}
|
||||
if (err || err2) throw (err || err2)
|
||||
return ret
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// lstat on windows, missing from early 0.5 versions
|
||||
// replacing with stat isn't quite perfect, but good enough to get by.
|
||||
if (process.platform === "win32" && !process.binding("fs").lstat) {
|
||||
fs.lstat = fs.stat
|
||||
fs.lstatSync = fs.statSync
|
||||
}
|
||||
|
||||
|
||||
// lutimes implementation, or no-op
|
||||
if (!fs.lutimes) {
|
||||
if (constants.hasOwnProperty("O_SYMLINK")) {
|
||||
fs.lutimes = function (path, at, mt, cb) {
|
||||
fs.open(path, constants.O_SYMLINK, function (er, fd) {
|
||||
cb = cb || noop
|
||||
if (er) return cb(er)
|
||||
fs.futimes(fd, at, mt, function (er) {
|
||||
fs.close(fd, function (er2) {
|
||||
return cb(er || er2)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fs.lutimesSync = function (path, at, mt) {
|
||||
var fd = fs.openSync(path, constants.O_SYMLINK)
|
||||
, err
|
||||
, err2
|
||||
, ret
|
||||
|
||||
try {
|
||||
var ret = fs.futimesSync(fd, at, mt)
|
||||
} catch (er) {
|
||||
err = er
|
||||
}
|
||||
try {
|
||||
fs.closeSync(fd)
|
||||
} catch (er) {
|
||||
err2 = er
|
||||
}
|
||||
if (err || err2) throw (err || err2)
|
||||
return ret
|
||||
}
|
||||
|
||||
} else if (fs.utimensat && constants.hasOwnProperty("AT_SYMLINK_NOFOLLOW")) {
|
||||
// maybe utimensat will be bound soonish?
|
||||
fs.lutimes = function (path, at, mt, cb) {
|
||||
fs.utimensat(path, at, mt, constants.AT_SYMLINK_NOFOLLOW, cb)
|
||||
}
|
||||
|
||||
fs.lutimesSync = function (path, at, mt) {
|
||||
return fs.utimensatSync(path, at, mt, constants.AT_SYMLINK_NOFOLLOW)
|
||||
}
|
||||
|
||||
} else {
|
||||
fs.lutimes = function (_a, _b, _c, cb) { process.nextTick(cb) }
|
||||
fs.lutimesSync = function () {}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// https://github.com/isaacs/node-graceful-fs/issues/4
|
||||
// Chown should not fail on einval or eperm if non-root.
|
||||
|
||||
fs.chown = chownFix(fs.chown)
|
||||
fs.fchown = chownFix(fs.fchown)
|
||||
fs.lchown = chownFix(fs.lchown)
|
||||
|
||||
fs.chownSync = chownFixSync(fs.chownSync)
|
||||
fs.fchownSync = chownFixSync(fs.fchownSync)
|
||||
fs.lchownSync = chownFixSync(fs.lchownSync)
|
||||
|
||||
function chownFix (orig) {
|
||||
if (!orig) return orig
|
||||
return function (target, uid, gid, cb) {
|
||||
return orig.call(fs, target, uid, gid, function (er, res) {
|
||||
if (chownErOk(er)) er = null
|
||||
cb(er, res)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function chownFixSync (orig) {
|
||||
if (!orig) return orig
|
||||
return function (target, uid, gid) {
|
||||
try {
|
||||
return orig.call(fs, target, uid, gid)
|
||||
} catch (er) {
|
||||
if (!chownErOk(er)) throw er
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function chownErOk (er) {
|
||||
// if there's no getuid, or if getuid() is something other than 0,
|
||||
// and the error is EINVAL or EPERM, then just ignore it.
|
||||
// This specific case is a silent failure in cp, install, tar,
|
||||
// and most other unix tools that manage permissions.
|
||||
// When running as root, or if other types of errors are encountered,
|
||||
// then it's strict.
|
||||
if (!er || (!process.getuid || process.getuid() !== 0)
|
||||
&& (er.code === "EINVAL" || er.code === "EPERM")) return true
|
||||
}
|
18
node_modules/glob/node_modules/graceful-fs/package.json
generated
vendored
Normal file
18
node_modules/glob/node_modules/graceful-fs/package.json
generated
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)",
|
||||
"name": "graceful-fs",
|
||||
"description": "fs monkey-patching to avoid EMFILE and other problems",
|
||||
"version": "1.1.5",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/isaacs/node-graceful-fs.git"
|
||||
},
|
||||
"main": "graceful-fs.js",
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"fast-list": "1"
|
||||
},
|
||||
"devDependencies": {}
|
||||
}
|
51
node_modules/glob/node_modules/inherits/README.md
generated
vendored
Normal file
51
node_modules/glob/node_modules/inherits/README.md
generated
vendored
Normal file
|
@ -0,0 +1,51 @@
|
|||
A dead simple way to do inheritance in JS.
|
||||
|
||||
var inherits = require("inherits")
|
||||
|
||||
function Animal () {
|
||||
this.alive = true
|
||||
}
|
||||
Animal.prototype.say = function (what) {
|
||||
console.log(what)
|
||||
}
|
||||
|
||||
inherits(Dog, Animal)
|
||||
function Dog () {
|
||||
Dog.super.apply(this)
|
||||
}
|
||||
Dog.prototype.sniff = function () {
|
||||
this.say("sniff sniff")
|
||||
}
|
||||
Dog.prototype.bark = function () {
|
||||
this.say("woof woof")
|
||||
}
|
||||
|
||||
inherits(Chihuahua, Dog)
|
||||
function Chihuahua () {
|
||||
Chihuahua.super.apply(this)
|
||||
}
|
||||
Chihuahua.prototype.bark = function () {
|
||||
this.say("yip yip")
|
||||
}
|
||||
|
||||
// also works
|
||||
function Cat () {
|
||||
Cat.super.apply(this)
|
||||
}
|
||||
Cat.prototype.hiss = function () {
|
||||
this.say("CHSKKSS!!")
|
||||
}
|
||||
inherits(Cat, Animal, {
|
||||
meow: function () { this.say("miao miao") }
|
||||
})
|
||||
Cat.prototype.purr = function () {
|
||||
this.say("purr purr")
|
||||
}
|
||||
|
||||
|
||||
var c = new Chihuahua
|
||||
assert(c instanceof Chihuahua)
|
||||
assert(c instanceof Dog)
|
||||
assert(c instanceof Animal)
|
||||
|
||||
The actual function is laughably small. 10-lines small.
|
29
node_modules/glob/node_modules/inherits/inherits.js
generated
vendored
Normal file
29
node_modules/glob/node_modules/inherits/inherits.js
generated
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
module.exports = inherits
|
||||
|
||||
function inherits (c, p, proto) {
|
||||
proto = proto || {}
|
||||
var e = {}
|
||||
;[c.prototype, proto].forEach(function (s) {
|
||||
Object.getOwnPropertyNames(s).forEach(function (k) {
|
||||
e[k] = Object.getOwnPropertyDescriptor(s, k)
|
||||
})
|
||||
})
|
||||
c.prototype = Object.create(p.prototype, e)
|
||||
c.super = p
|
||||
}
|
||||
|
||||
//function Child () {
|
||||
// Child.super.call(this)
|
||||
// console.error([this
|
||||
// ,this.constructor
|
||||
// ,this.constructor === Child
|
||||
// ,this.constructor.super === Parent
|
||||
// ,Object.getPrototypeOf(this) === Child.prototype
|
||||
// ,Object.getPrototypeOf(Object.getPrototypeOf(this))
|
||||
// === Parent.prototype
|
||||
// ,this instanceof Child
|
||||
// ,this instanceof Parent])
|
||||
//}
|
||||
//function Parent () {}
|
||||
//inherits(Child, Parent)
|
||||
//new Child
|
7
node_modules/glob/node_modules/inherits/package.json
generated
vendored
Normal file
7
node_modules/glob/node_modules/inherits/package.json
generated
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
{ "name" : "inherits"
|
||||
, "description": "A tiny simple way to do classic inheritance in js"
|
||||
, "version" : "1.0.0"
|
||||
, "keywords" : ["inheritance", "class", "klass", "oop", "object-oriented"]
|
||||
, "main" : "./inherits.js"
|
||||
, "repository" : "https://github.com/isaacs/inherits"
|
||||
, "author" : "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)" }
|
28
node_modules/glob/package.json
generated
vendored
Normal file
28
node_modules/glob/package.json
generated
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
{
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"name": "glob",
|
||||
"description": "a little globber",
|
||||
"version": "3.0.1",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/isaacs/node-glob.git"
|
||||
},
|
||||
"main": "glob.js",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
},
|
||||
"dependencies": {
|
||||
"fast-list":"1",
|
||||
"minimatch": "0.1",
|
||||
"graceful-fs": "~1.1.2",
|
||||
"inherits": "1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"tap": "0.1",
|
||||
"mkdirp":"0.2",
|
||||
"rimraf":"1"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "tap test/*.js"
|
||||
}
|
||||
}
|
61
node_modules/glob/test/00-setup.js
generated
vendored
Normal file
61
node_modules/glob/test/00-setup.js
generated
vendored
Normal file
|
@ -0,0 +1,61 @@
|
|||
// just a little pre-run script to set up the fixtures.
|
||||
// zz-finish cleans it up
|
||||
|
||||
var mkdirp = require("mkdirp")
|
||||
var path = require("path")
|
||||
var i = 0
|
||||
var tap = require("tap")
|
||||
var fs = require("fs")
|
||||
var rimraf = require("rimraf")
|
||||
|
||||
var files =
|
||||
[ "a/.abcdef/x/y/z/a"
|
||||
, "a/abcdef/g/h"
|
||||
, "a/abcfed/g/h"
|
||||
, "a/b/c/d"
|
||||
, "a/bc/e/f"
|
||||
, "a/c/d/c/b"
|
||||
, "a/cb/e/f"
|
||||
]
|
||||
|
||||
var symlinkTo = path.resolve(__dirname, "a/symlink/a/b/c")
|
||||
var symlinkFrom = "../.."
|
||||
|
||||
files = files.map(function (f) {
|
||||
return path.resolve(__dirname, f)
|
||||
})
|
||||
|
||||
tap.test("remove fixtures", function (t) {
|
||||
rimraf(path.resolve(__dirname, "a"), function (er) {
|
||||
t.ifError(er, "remove fixtures")
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
files.forEach(function (f) {
|
||||
tap.test(f, function (t) {
|
||||
var d = path.dirname(f)
|
||||
mkdirp(d, 0755, function (er) {
|
||||
if (er) {
|
||||
t.fail(er)
|
||||
return t.bailout()
|
||||
}
|
||||
fs.writeFile(f, "i like tests", function (er) {
|
||||
t.ifError(er, "make file")
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
tap.test("symlinky", function (t) {
|
||||
var d = path.dirname(symlinkTo)
|
||||
console.error("mkdirp", d)
|
||||
mkdirp(d, 0755, function (er) {
|
||||
t.ifError(er)
|
||||
fs.symlink(symlinkFrom, symlinkTo, function (er) {
|
||||
t.ifError(er, "make symlink")
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
})
|
103
node_modules/glob/test/bash-comparison.js
generated
vendored
Normal file
103
node_modules/glob/test/bash-comparison.js
generated
vendored
Normal file
|
@ -0,0 +1,103 @@
|
|||
// basic test
|
||||
// show that it does the same thing by default as the shell.
|
||||
var tap = require("tap")
|
||||
, child_process = require("child_process")
|
||||
|
||||
// put more patterns here.
|
||||
, globs =
|
||||
["test/a/*/+(c|g)/./d"
|
||||
,"test/a/**/[cg]/../[cg]"
|
||||
,"test/a/{b,c,d,e,f}/**/g"
|
||||
,"test/a/b/**"
|
||||
,"test/**/g"
|
||||
,"test/a/abc{fed,def}/g/h"
|
||||
,"test/a/abc{fed/g,def}/**/"
|
||||
,"test/a/abc{fed/g,def}/**///**/"
|
||||
,"test/**/a/**/"
|
||||
,"test/+(a|b|c)/a{/,bc*}/**"
|
||||
,"test/*/*/*/f"
|
||||
,"test/**/f"
|
||||
,"test/a/symlink/a/b/c/a/b/c/a/b/c//a/b/c////a/b/c/**/b/c/**"
|
||||
,"{./*/*,/usr/local/*}"
|
||||
,"{/*,*}" // evil owl face! how you taunt me!
|
||||
]
|
||||
, glob = require("../")
|
||||
, path = require("path")
|
||||
|
||||
// run from the root of the project
|
||||
// this is usually where you're at anyway, but be sure.
|
||||
process.chdir(path.resolve(__dirname, ".."))
|
||||
|
||||
function alphasort (a, b) {
|
||||
a = a.toLowerCase()
|
||||
b = b.toLowerCase()
|
||||
return a > b ? 1 : a < b ? -1 : 0
|
||||
}
|
||||
|
||||
globs.forEach(function (pattern) {
|
||||
var echoOutput
|
||||
tap.test(pattern, function (t) {
|
||||
var bashPattern = pattern //.replace(/(\(|\||\))/g, "\\$1")
|
||||
, cmd = "shopt -s globstar && " +
|
||||
"shopt -s extglob && " +
|
||||
"shopt -s nullglob && " +
|
||||
// "shopt >&2; " +
|
||||
"eval \'for i in " + bashPattern + "; do echo $i; done\'"
|
||||
, cp = child_process.spawn("bash", ["-c",cmd])
|
||||
, out = []
|
||||
, globResult
|
||||
cp.stdout.on("data", function (c) {
|
||||
out.push(c)
|
||||
})
|
||||
cp.stderr.on("data", function (c) {
|
||||
process.stderr.write(c)
|
||||
})
|
||||
cp.on("exit", function () {
|
||||
echoOutput = flatten(out)
|
||||
if (!echoOutput) echoOutput = []
|
||||
else {
|
||||
echoOutput = echoOutput.split(/\r*\n/).map(function (m) {
|
||||
// Bash is a oddly inconsistent with slashes in the
|
||||
// the results. This implementation is a bit more
|
||||
// normalized. Account for this in the test results.
|
||||
return m.replace(/\/+/g, "/").replace(/\/$/, "")
|
||||
}).sort(alphasort).reduce(function (set, f) {
|
||||
if (f !== set[set.length - 1]) set.push(f)
|
||||
return set
|
||||
}, [])
|
||||
}
|
||||
next()
|
||||
})
|
||||
|
||||
glob(pattern, function (er, matches) {
|
||||
t.ifError(er, pattern + " should not error")
|
||||
globResult = matches
|
||||
next()
|
||||
})
|
||||
|
||||
function next () {
|
||||
if (!echoOutput || !globResult) return
|
||||
|
||||
t.deepEqual(globResult, echoOutput, "should match shell")
|
||||
t.end()
|
||||
}
|
||||
})
|
||||
|
||||
tap.test(pattern + " sync", function (t) {
|
||||
t.deepEqual(glob.sync(pattern), echoOutput, "should match shell")
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
function flatten (chunks) {
|
||||
var s = 0
|
||||
chunks.forEach(function (c) { s += c.length })
|
||||
var out = new Buffer(s)
|
||||
s = 0
|
||||
chunks.forEach(function (c) {
|
||||
c.copy(out, s)
|
||||
s += c.length
|
||||
})
|
||||
|
||||
return out.toString().trim()
|
||||
}
|
11
node_modules/glob/test/zz-cleanup.js
generated
vendored
Normal file
11
node_modules/glob/test/zz-cleanup.js
generated
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
// remove the fixtures
|
||||
var tap = require("tap")
|
||||
, rimraf = require("rimraf")
|
||||
, path = require("path")
|
||||
|
||||
tap.test("cleanup fixtures", function (t) {
|
||||
rimraf(path.resolve(__dirname, "a"), function (er) {
|
||||
t.ifError(er, "removed")
|
||||
t.end()
|
||||
})
|
||||
})
|
4
node_modules/minimatch/.travis.yml
generated
vendored
Normal file
4
node_modules/minimatch/.travis.yml
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
language: node_js
|
||||
node_js:
|
||||
- 0.4
|
||||
- 0.6
|
23
node_modules/minimatch/LICENSE
generated
vendored
Normal file
23
node_modules/minimatch/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
|
||||
All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
212
node_modules/minimatch/README.md
generated
vendored
Normal file
212
node_modules/minimatch/README.md
generated
vendored
Normal file
|
@ -0,0 +1,212 @@
|
|||
# minimatch
|
||||
|
||||
A minimal matching utility.
|
||||
|
||||
[](http://travis-ci.org/isaacs/minimatch)
|
||||
|
||||
|
||||
This is the matching library used internally by npm.
|
||||
|
||||
Eventually, it will replace the C binding in node-glob.
|
||||
|
||||
It works by converting glob expressions into JavaScript `RegExp`
|
||||
objects.
|
||||
|
||||
## Usage
|
||||
|
||||
```javascript
|
||||
var minimatch = require("minimatch")
|
||||
|
||||
minimatch("bar.foo", "*.foo") // true!
|
||||
minimatch("bar.foo", "*.bar") // false!
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
Supports these glob features:
|
||||
|
||||
* Brace Expansion
|
||||
* Extended glob matching
|
||||
* "Globstar" `**` matching
|
||||
|
||||
See:
|
||||
|
||||
* `man sh`
|
||||
* `man bash`
|
||||
* `man 3 fnmatch`
|
||||
* `man 5 gitignore`
|
||||
|
||||
### Comparisons to other fnmatch/glob implementations
|
||||
|
||||
While strict compliance with the existing standards is a worthwhile
|
||||
goal, some discrepancies exist between minimatch and other
|
||||
implementations, and are intentional.
|
||||
|
||||
If the pattern starts with a `!` character, then it is negated. Set the
|
||||
`nonegate` flag to suppress this behavior, and treat leading `!`
|
||||
characters normally. This is perhaps relevant if you wish to start the
|
||||
pattern with a negative extglob pattern like `!(a|B)`. Multiple `!`
|
||||
characters at the start of a pattern will negate the pattern multiple
|
||||
times.
|
||||
|
||||
If a pattern starts with `#`, then it is treated as a comment, and
|
||||
will not match anything. Use `\#` to match a literal `#` at the
|
||||
start of a line, or set the `nocomment` flag to suppress this behavior.
|
||||
|
||||
The double-star character `**` is supported by default, unless the
|
||||
`noglobstar` flag is set. This is supported in the manner of bsdglob
|
||||
and bash 4.1, where `**` only has special significance if it is the only
|
||||
thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
|
||||
`a/**b` will not. **Note that this is different from the way that `**` is
|
||||
handled by ruby's `Dir` class.**
|
||||
|
||||
If an escaped pattern has no matches, and the `null` flag is not set,
|
||||
then minimatch.match returns the pattern as-provided, rather than
|
||||
interpreting the character escapes. For example,
|
||||
`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
|
||||
`"*a?"`.
|
||||
|
||||
If brace expansion is not disabled, then it is performed before any
|
||||
other interpretation of the glob pattern. Thus, a pattern like
|
||||
`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
|
||||
**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
|
||||
checked for validity. Since those two are valid, matching proceeds.
|
||||
|
||||
|
||||
## Minimatch Class
|
||||
|
||||
Create a minimatch object by instanting the `minimatch.Minimatch` class.
|
||||
|
||||
```javascript
|
||||
var Minimatch = require("minimatch").Minimatch
|
||||
var mm = new Minimatch(pattern, options)
|
||||
```
|
||||
|
||||
### Properties
|
||||
|
||||
* `pattern` The original pattern the minimatch object represents.
|
||||
* `options` The options supplied to the constructor.
|
||||
* `set` A 2-dimensional array of regexp or string expressions.
|
||||
Each row in the
|
||||
array corresponds to a brace-expanded pattern. Each item in the row
|
||||
corresponds to a single path-part. For example, the pattern
|
||||
`{a,b/c}/d` would expand to a set of patterns like:
|
||||
|
||||
[ [ a, d ]
|
||||
, [ b, c, d ] ]
|
||||
|
||||
If a portion of the pattern doesn't have any "magic" in it
|
||||
(that is, it's something like `"foo"` rather than `fo*o?`), then it
|
||||
will be left as a string rather than converted to a regular
|
||||
expression.
|
||||
|
||||
* `regexp` Created by the `makeRe` method. A single regular expression
|
||||
expressing the entire pattern. This is useful in cases where you wish
|
||||
to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled.
|
||||
* `negate` True if the pattern is negated.
|
||||
* `comment` True if the pattern is a comment.
|
||||
* `empty` True if the pattern is `""`.
|
||||
|
||||
### Methods
|
||||
|
||||
* `makeRe` Generate the `regexp` member if necessary, and return it.
|
||||
Will return `false` if the pattern is invalid.
|
||||
* `match(fname)` Return true if the filename matches the pattern, or
|
||||
false otherwise.
|
||||
* `matchOne(fileArray, patternArray, partial)` Take a `/`-split
|
||||
filename, and match it against a single row in the `regExpSet`. This
|
||||
method is mainly for internal use, but is exposed so that it can be
|
||||
used by a glob-walker that needs to avoid excessive filesystem calls.
|
||||
|
||||
All other methods are internal, and will be called as necessary.
|
||||
|
||||
## Functions
|
||||
|
||||
The top-level exported function has a `cache` property, which is an LRU
|
||||
cache set to store 100 items. So, calling these methods repeatedly
|
||||
with the same pattern and options will use the same Minimatch object,
|
||||
saving the cost of parsing it multiple times.
|
||||
|
||||
### minimatch(path, pattern, options)
|
||||
|
||||
Main export. Tests a path against the pattern using the options.
|
||||
|
||||
```javascript
|
||||
var isJS = minimatch(file, "*.js", { matchBase: true })
|
||||
```
|
||||
|
||||
### minimatch.filter(pattern, options)
|
||||
|
||||
Returns a function that tests its
|
||||
supplied argument, suitable for use with `Array.filter`. Example:
|
||||
|
||||
```javascript
|
||||
var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true}))
|
||||
```
|
||||
|
||||
### minimatch.match(list, pattern, options)
|
||||
|
||||
Match against the list of
|
||||
files, in the style of fnmatch or glob. If nothing is matched, then
|
||||
return the pattern (unless `{ null: true }` in the options.)
|
||||
|
||||
```javascript
|
||||
var javascripts = minimatch.match(fileList, "*.js", {matchBase: true}))
|
||||
```
|
||||
|
||||
### minimatch.makeRe(pattern, options)
|
||||
|
||||
Make a regular expression object from the pattern.
|
||||
|
||||
## Options
|
||||
|
||||
All options are `false` by default.
|
||||
|
||||
### debug
|
||||
|
||||
Dump a ton of stuff to stderr.
|
||||
|
||||
### nobrace
|
||||
|
||||
Do not expand `{a,b}` and `{1..3}` brace sets.
|
||||
|
||||
### noglobstar
|
||||
|
||||
Disable `**` matching against multiple folder names.
|
||||
|
||||
### dot
|
||||
|
||||
Allow patterns to match filenames starting with a period, even if
|
||||
the pattern does not explicitly have a period in that spot.
|
||||
|
||||
Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot`
|
||||
is set.
|
||||
|
||||
### noext
|
||||
|
||||
Disable "extglob" style patterns like `+(a|b)`.
|
||||
|
||||
### nocase
|
||||
|
||||
Perform a case-insensitive match.
|
||||
|
||||
### nonull
|
||||
|
||||
When a match is not found by `minimatch.match`, return a list containing
|
||||
the pattern itself. When set, an empty list is returned if there are
|
||||
no matches.
|
||||
|
||||
### matchBase
|
||||
|
||||
If set, then patterns without slashes will be matched
|
||||
against the basename of the path if it contains slashes. For example,
|
||||
`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.
|
||||
|
||||
### nocomment
|
||||
|
||||
Suppress the behavior of treating `#` at the start of a pattern as a
|
||||
comment.
|
||||
|
||||
### nonegate
|
||||
|
||||
Suppress the behavior of treating a leading `!` character as negation.
|
1010
node_modules/minimatch/minimatch.js
generated
vendored
Normal file
1010
node_modules/minimatch/minimatch.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1
node_modules/minimatch/node_modules/lru-cache/.npmignore
generated
vendored
Normal file
1
node_modules/minimatch/node_modules/lru-cache/.npmignore
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
/node_modules
|
23
node_modules/minimatch/node_modules/lru-cache/LICENSE
generated
vendored
Normal file
23
node_modules/minimatch/node_modules/lru-cache/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
|
||||
All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
12
node_modules/minimatch/node_modules/lru-cache/README.md
generated
vendored
Normal file
12
node_modules/minimatch/node_modules/lru-cache/README.md
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
# lru cache
|
||||
|
||||
A cache object that deletes the least-recently-used items.
|
||||
|
||||
Usage:
|
||||
|
||||
var LRU = require("lru-cache")
|
||||
, cache = LRU(10) // max 10 items. default = Infinity
|
||||
cache.set("key", "value")
|
||||
cache.get("key") // "value"
|
||||
|
||||
RTFS for more info.
|
100
node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js
generated
vendored
Normal file
100
node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js
generated
vendored
Normal file
|
@ -0,0 +1,100 @@
|
|||
;(function () { // closure for web browsers
|
||||
|
||||
if (module) {
|
||||
module.exports = LRUCache
|
||||
} else {
|
||||
// just set the global for non-node platforms.
|
||||
;(function () { return this })().LRUCache = LRUCache
|
||||
}
|
||||
|
||||
function hOP (obj, key) {
|
||||
return Object.prototype.hasOwnProperty.call(obj, key)
|
||||
}
|
||||
|
||||
function LRUCache (maxLength) {
|
||||
if (!(this instanceof LRUCache)) {
|
||||
return new LRUCache(maxLength)
|
||||
}
|
||||
var cache = {} // hash of items by key
|
||||
, lruList = {} // list of items in order of use recency
|
||||
, lru = 0 // least recently used
|
||||
, mru = 0 // most recently used
|
||||
, length = 0 // number of items in the list
|
||||
|
||||
// resize the cache when the maxLength changes.
|
||||
Object.defineProperty(this, "maxLength",
|
||||
{ set : function (mL) {
|
||||
if (!mL || !(typeof mL === "number") || mL <= 0 ) mL = Infinity
|
||||
maxLength = mL
|
||||
// if it gets above double maxLength, trim right away.
|
||||
// otherwise, do it whenever it's convenient.
|
||||
if (length > maxLength) trim()
|
||||
}
|
||||
, get : function () { return maxLength }
|
||||
, enumerable : true
|
||||
})
|
||||
|
||||
this.maxLength = maxLength
|
||||
|
||||
Object.defineProperty(this, "length",
|
||||
{ get : function () { return length }
|
||||
, enumerable : true
|
||||
})
|
||||
|
||||
this.reset = function () {
|
||||
cache = {}
|
||||
lruList = {}
|
||||
lru = 0
|
||||
mru = 0
|
||||
length = 0
|
||||
}
|
||||
|
||||
this.set = function (key, value) {
|
||||
if (hOP(cache, key)) {
|
||||
this.get(key)
|
||||
cache[key].value = value
|
||||
return undefined
|
||||
}
|
||||
var hit = {key:key, value:value, lu:mru++}
|
||||
lruList[hit.lu] = cache[key] = hit
|
||||
length ++
|
||||
if (length > maxLength) trim()
|
||||
}
|
||||
|
||||
this.get = function (key) {
|
||||
if (!hOP(cache, key)) return undefined
|
||||
var hit = cache[key]
|
||||
delete lruList[hit.lu]
|
||||
if (hit.lu === lru) lruWalk()
|
||||
hit.lu = mru ++
|
||||
lruList[hit.lu] = hit
|
||||
return hit.value
|
||||
}
|
||||
|
||||
this.del = function (key) {
|
||||
if (!hOP(cache, key)) return undefined
|
||||
var hit = cache[key]
|
||||
delete cache[key]
|
||||
delete lruList[hit.lu]
|
||||
if (hit.lu === lru) lruWalk()
|
||||
length --
|
||||
}
|
||||
|
||||
function lruWalk () {
|
||||
// lru has been deleted, hop up to the next hit.
|
||||
lru = Object.keys(lruList).shift()
|
||||
}
|
||||
|
||||
function trim () {
|
||||
if (length <= maxLength) return undefined
|
||||
var prune = Object.keys(lruList).slice(0, length - maxLength)
|
||||
for (var i = 0, l = (length - maxLength); i < l; i ++) {
|
||||
delete cache[ lruList[prune[i]].key ]
|
||||
delete lruList[prune[i]]
|
||||
}
|
||||
length = maxLength
|
||||
lruWalk()
|
||||
}
|
||||
}
|
||||
|
||||
})()
|
13
node_modules/minimatch/node_modules/lru-cache/package.json
generated
vendored
Normal file
13
node_modules/minimatch/node_modules/lru-cache/package.json
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
{ "name": "lru-cache"
|
||||
, "description": "A cache object that deletes the least-recently-used items."
|
||||
, "version": "1.0.5"
|
||||
, "author": "Isaac Z. Schlueter <i@izs.me>"
|
||||
, "scripts": { "test": "tap test" }
|
||||
, "main": "lib/lru-cache.js"
|
||||
, "repository": "git://github.com/isaacs/node-lru-cache.git"
|
||||
, "devDependencies": { "tap": "0.1" }
|
||||
, "license":
|
||||
{ "type": "MIT"
|
||||
, "url": "http://github.com/isaacs/node-lru-cache/raw/master/LICENSE"
|
||||
}
|
||||
}
|
93
node_modules/minimatch/node_modules/lru-cache/test/basic.js
generated
vendored
Normal file
93
node_modules/minimatch/node_modules/lru-cache/test/basic.js
generated
vendored
Normal file
|
@ -0,0 +1,93 @@
|
|||
var test = require('tap').test
|
||||
, LRU = require('../')
|
||||
|
||||
test('basic', function (t) {
|
||||
var cache = new LRU(10)
|
||||
cache.set("key", "value")
|
||||
t.equal(cache.get("key"), "value")
|
||||
t.equal(cache.get("nada"), undefined)
|
||||
t.equal(cache.length, 1)
|
||||
t.equal(cache.maxLength, 10)
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('least recently set', function (t) {
|
||||
var cache = new LRU(2)
|
||||
cache.set("a", "A")
|
||||
cache.set("b", "B")
|
||||
cache.set("c", "C")
|
||||
t.equal(cache.get("c"), "C")
|
||||
t.equal(cache.get("b"), "B")
|
||||
t.equal(cache.get("a"), undefined)
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('lru recently gotten', function (t) {
|
||||
var cache = new LRU(2)
|
||||
cache.set("a", "A")
|
||||
cache.set("b", "B")
|
||||
cache.get("a")
|
||||
cache.set("c", "C")
|
||||
t.equal(cache.get("c"), "C")
|
||||
t.equal(cache.get("b"), undefined)
|
||||
t.equal(cache.get("a"), "A")
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('del', function (t) {
|
||||
var cache = new LRU(2)
|
||||
cache.set("a", "A")
|
||||
cache.del("a")
|
||||
t.equal(cache.get("a"), undefined)
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('maxLength', function (t) {
|
||||
var cache = new LRU(3)
|
||||
|
||||
// test changing the maxLength, verify that the LRU items get dropped.
|
||||
cache.maxLength = 100
|
||||
for (var i = 0; i < 100; i ++) cache.set(i, i)
|
||||
t.equal(cache.length, 100)
|
||||
for (var i = 0; i < 100; i ++) {
|
||||
t.equal(cache.get(i), i)
|
||||
}
|
||||
cache.maxLength = 3
|
||||
t.equal(cache.length, 3)
|
||||
for (var i = 0; i < 97; i ++) {
|
||||
t.equal(cache.get(i), undefined)
|
||||
}
|
||||
for (var i = 98; i < 100; i ++) {
|
||||
t.equal(cache.get(i), i)
|
||||
}
|
||||
|
||||
// now remove the maxLength restriction, and try again.
|
||||
cache.maxLength = "hello"
|
||||
for (var i = 0; i < 100; i ++) cache.set(i, i)
|
||||
t.equal(cache.length, 100)
|
||||
for (var i = 0; i < 100; i ++) {
|
||||
t.equal(cache.get(i), i)
|
||||
}
|
||||
// should trigger an immediate resize
|
||||
cache.maxLength = 3
|
||||
t.equal(cache.length, 3)
|
||||
for (var i = 0; i < 97; i ++) {
|
||||
t.equal(cache.get(i), undefined)
|
||||
}
|
||||
for (var i = 98; i < 100; i ++) {
|
||||
t.equal(cache.get(i), i)
|
||||
}
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('reset', function (t) {
|
||||
var cache = new LRU(10)
|
||||
cache.set("a", "A")
|
||||
cache.set("b", "B")
|
||||
cache.reset()
|
||||
t.equal(cache.length, 0)
|
||||
t.equal(cache.maxLength, 10)
|
||||
t.equal(cache.get("a"), undefined)
|
||||
t.equal(cache.get("b"), undefined)
|
||||
t.end()
|
||||
})
|
29
node_modules/minimatch/package.json
generated
vendored
Normal file
29
node_modules/minimatch/package.json
generated
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
{
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)",
|
||||
"name": "minimatch",
|
||||
"description": "a glob matcher in javascript",
|
||||
"version": "0.1.5",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/isaacs/minimatch.git"
|
||||
},
|
||||
"main": "minimatch.js",
|
||||
"scripts": {
|
||||
"test": "tap test"
|
||||
},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
},
|
||||
"dependencies": {
|
||||
"lru-cache": "~1.0.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"tap": "~0.1.3"
|
||||
},
|
||||
"licenses" : [
|
||||
{
|
||||
"type" : "MIT",
|
||||
"url" : "http://github.com/isaacs/minimatch/raw/master/LICENSE"
|
||||
}
|
||||
]
|
||||
}
|
261
node_modules/minimatch/test/basic.js
generated
vendored
Normal file
261
node_modules/minimatch/test/basic.js
generated
vendored
Normal file
|
@ -0,0 +1,261 @@
|
|||
// http://www.bashcookbook.com/bashinfo/source/bash-1.14.7/tests/glob-test
|
||||
//
|
||||
// TODO: Some of these tests do very bad things with backslashes, and will
|
||||
// most likely fail badly on windows. They should probably be skipped.
|
||||
|
||||
var tap = require("tap")
|
||||
, globalBefore = Object.keys(global)
|
||||
, mm = require("../")
|
||||
, files = [ "a", "b", "c", "d", "abc"
|
||||
, "abd", "abe", "bb", "bcd"
|
||||
, "ca", "cb", "dd", "de"
|
||||
, "bdir/", "bdir/cfile"]
|
||||
, next = files.concat([ "a-b", "aXb"
|
||||
, ".x", ".y" ])
|
||||
|
||||
tap.test("basic tests", function (t) {
|
||||
var start = Date.now()
|
||||
|
||||
// [ pattern, [matches], MM opts, files, TAP opts]
|
||||
; [ "http://www.bashcookbook.com/bashinfo" +
|
||||
"/source/bash-1.14.7/tests/glob-test"
|
||||
, ["a*", ["a", "abc", "abd", "abe"]]
|
||||
, ["X*", ["X*"], {nonull: true}]
|
||||
|
||||
// allow null glob expansion
|
||||
, ["X*", []]
|
||||
|
||||
// isaacs: Slightly different than bash/sh/ksh
|
||||
// \\* is not un-escaped to literal "*" in a failed match,
|
||||
// but it does make it get treated as a literal star
|
||||
, ["\\*", ["\\*"], {nonull: true}]
|
||||
, ["\\**", ["\\**"], {nonull: true}]
|
||||
, ["\\*\\*", ["\\*\\*"], {nonull: true}]
|
||||
|
||||
, ["b*/", ["bdir/"]]
|
||||
, ["c*", ["c", "ca", "cb"]]
|
||||
, ["**", files]
|
||||
|
||||
, ["\\.\\./*/", ["\\.\\./*/"], {nonull: true}]
|
||||
, ["s/\\..*//", ["s/\\..*//"], {nonull: true}]
|
||||
|
||||
, "legendary larry crashes bashes"
|
||||
, ["/^root:/{s/^[^:]*:[^:]*:\([^:]*\).*$/\\1/"
|
||||
, ["/^root:/{s/^[^:]*:[^:]*:\([^:]*\).*$/\\1/"], {nonull: true}]
|
||||
, ["/^root:/{s/^[^:]*:[^:]*:\([^:]*\).*$/\1/"
|
||||
, ["/^root:/{s/^[^:]*:[^:]*:\([^:]*\).*$/\1/"], {nonull: true}]
|
||||
|
||||
, "character classes"
|
||||
, ["[a-c]b*", ["abc", "abd", "abe", "bb", "cb"]]
|
||||
, ["[a-y]*[^c]", ["abd", "abe", "bb", "bcd",
|
||||
"bdir/", "ca", "cb", "dd", "de"]]
|
||||
, ["a*[^c]", ["abd", "abe"]]
|
||||
, function () { files.push("a-b", "aXb") }
|
||||
, ["a[X-]b", ["a-b", "aXb"]]
|
||||
, function () { files.push(".x", ".y") }
|
||||
, ["[^a-c]*", ["d", "dd", "de"]]
|
||||
, function () { files.push("a*b/", "a*b/ooo") }
|
||||
, ["a\\*b/*", ["a*b/ooo"]]
|
||||
, ["a\\*?/*", ["a*b/ooo"]]
|
||||
, ["*\\\\!*", [], {null: true}, ["echo !7"]]
|
||||
, ["*\\!*", ["echo !7"], null, ["echo !7"]]
|
||||
, ["*.\\*", ["r.*"], null, ["r.*"]]
|
||||
, ["a[b]c", ["abc"]]
|
||||
, ["a[\\b]c", ["abc"]]
|
||||
, ["a?c", ["abc"]]
|
||||
, ["a\\*c", [], {null: true}, ["abc"]]
|
||||
, ["", [""], { null: true }, [""]]
|
||||
|
||||
, "http://www.opensource.apple.com/source/bash/bash-23/" +
|
||||
"bash/tests/glob-test"
|
||||
, function () { files.push("man/", "man/man1/", "man/man1/bash.1") }
|
||||
, ["*/man*/bash.*", ["man/man1/bash.1"]]
|
||||
, ["man/man1/bash.1", ["man/man1/bash.1"]]
|
||||
, ["a***c", ["abc"], null, ["abc"]]
|
||||
, ["a*****?c", ["abc"], null, ["abc"]]
|
||||
, ["?*****??", ["abc"], null, ["abc"]]
|
||||
, ["*****??", ["abc"], null, ["abc"]]
|
||||
, ["?*****?c", ["abc"], null, ["abc"]]
|
||||
, ["?***?****c", ["abc"], null, ["abc"]]
|
||||
, ["?***?****?", ["abc"], null, ["abc"]]
|
||||
, ["?***?****", ["abc"], null, ["abc"]]
|
||||
, ["*******c", ["abc"], null, ["abc"]]
|
||||
, ["*******?", ["abc"], null, ["abc"]]
|
||||
, ["a*cd**?**??k", ["abcdecdhjk"], null, ["abcdecdhjk"]]
|
||||
, ["a**?**cd**?**??k", ["abcdecdhjk"], null, ["abcdecdhjk"]]
|
||||
, ["a**?**cd**?**??k***", ["abcdecdhjk"], null, ["abcdecdhjk"]]
|
||||
, ["a**?**cd**?**??***k", ["abcdecdhjk"], null, ["abcdecdhjk"]]
|
||||
, ["a**?**cd**?**??***k**", ["abcdecdhjk"], null, ["abcdecdhjk"]]
|
||||
, ["a****c**?**??*****", ["abcdecdhjk"], null, ["abcdecdhjk"]]
|
||||
, ["[-abc]", ["-"], null, ["-"]]
|
||||
, ["[abc-]", ["-"], null, ["-"]]
|
||||
, ["\\", ["\\"], null, ["\\"]]
|
||||
, ["[\\\\]", ["\\"], null, ["\\"]]
|
||||
, ["[[]", ["["], null, ["["]]
|
||||
, ["[", ["["], null, ["["]]
|
||||
, ["[*", ["[abc"], null, ["[abc"]]
|
||||
, "a right bracket shall lose its special meaning and\n" +
|
||||
"represent itself in a bracket expression if it occurs\n" +
|
||||
"first in the list. -- POSIX.2 2.8.3.2"
|
||||
, ["[]]", ["]"], null, ["]"]]
|
||||
, ["[]-]", ["]"], null, ["]"]]
|
||||
, ["[a-\z]", ["p"], null, ["p"]]
|
||||
, ["??**********?****?", [], { null: true }, ["abc"]]
|
||||
, ["??**********?****c", [], { null: true }, ["abc"]]
|
||||
, ["?************c****?****", [], { null: true }, ["abc"]]
|
||||
, ["*c*?**", [], { null: true }, ["abc"]]
|
||||
, ["a*****c*?**", [], { null: true }, ["abc"]]
|
||||
, ["a********???*******", [], { null: true }, ["abc"]]
|
||||
, ["[]", [], { null: true }, ["a"]]
|
||||
, ["[abc", [], { null: true }, ["["]]
|
||||
|
||||
, "nocase tests"
|
||||
, ["XYZ", ["xYz"], { nocase: true, null: true }
|
||||
, ["xYz", "ABC", "IjK"]]
|
||||
, ["ab*", ["ABC"], { nocase: true, null: true }
|
||||
, ["xYz", "ABC", "IjK"]]
|
||||
, ["[ia]?[ck]", ["ABC", "IjK"], { nocase: true, null: true }
|
||||
, ["xYz", "ABC", "IjK"]]
|
||||
|
||||
// [ pattern, [matches], MM opts, files, TAP opts]
|
||||
, "onestar/twostar"
|
||||
, ["{/*,*}", [], {null: true}, ["/asdf/asdf/asdf"]]
|
||||
, ["{/?,*}", ["/a", "bb"], {null: true}
|
||||
, ["/a", "/b/b", "/a/b/c", "bb"]]
|
||||
|
||||
, "dots should not match unless requested"
|
||||
, ["**", ["a/b"], {}, ["a/b", "a/.d", ".a/.d"]]
|
||||
|
||||
// .. and . can only match patterns starting with .,
|
||||
// even when options.dot is set.
|
||||
, function () {
|
||||
files = ["a/./b", "a/../b", "a/c/b", "a/.d/b"]
|
||||
}
|
||||
, ["a/*/b", ["a/c/b", "a/.d/b"], {dot: true}]
|
||||
, ["a/.*/b", ["a/./b", "a/../b", "a/.d/b"], {dot: true}]
|
||||
, ["a/*/b", ["a/c/b"], {dot:false}]
|
||||
, ["a/.*/b", ["a/./b", "a/../b", "a/.d/b"], {dot: false}]
|
||||
|
||||
|
||||
// this also tests that changing the options needs
|
||||
// to change the cache key, even if the pattern is
|
||||
// the same!
|
||||
, ["**", ["a/b","a/.d",".a/.d"], { dot: true }
|
||||
, [ ".a/.d", "a/.d", "a/b"]]
|
||||
|
||||
, "paren sets cannot contain slashes"
|
||||
, ["*(a/b)", ["*(a/b)"], {nonull: true}, ["a/b"]]
|
||||
|
||||
// brace sets trump all else.
|
||||
//
|
||||
// invalid glob pattern. fails on bash4 and bsdglob.
|
||||
// however, in this implementation, it's easier just
|
||||
// to do the intuitive thing, and let brace-expansion
|
||||
// actually come before parsing any extglob patterns,
|
||||
// like the documentation seems to say.
|
||||
//
|
||||
// XXX: if anyone complains about this, either fix it
|
||||
// or tell them to grow up and stop complaining.
|
||||
//
|
||||
// bash/bsdglob says this:
|
||||
// , ["*(a|{b),c)}", ["*(a|{b),c)}"], {}, ["a", "ab", "ac", "ad"]]
|
||||
// but we do this instead:
|
||||
, ["*(a|{b),c)}", ["a", "ab", "ac"], {}, ["a", "ab", "ac", "ad"]]
|
||||
|
||||
// test partial parsing in the presence of comment/negation chars
|
||||
, ["[!a*", ["[!ab"], {}, ["[!ab", "[ab"]]
|
||||
, ["[#a*", ["[#ab"], {}, ["[#ab", "[ab"]]
|
||||
|
||||
// like: {a,b|c\\,d\\\|e} except it's unclosed, so it has to be escaped.
|
||||
, ["+(a|*\\|c\\\\|d\\\\\\|e\\\\\\\\|f\\\\\\\\\\|g"
|
||||
, ["+(a|b\\|c\\\\|d\\\\|e\\\\\\\\|f\\\\\\\\|g"]
|
||||
, {}
|
||||
, ["+(a|b\\|c\\\\|d\\\\|e\\\\\\\\|f\\\\\\\\|g", "a", "b\\c"]]
|
||||
|
||||
|
||||
// crazy nested {,,} and *(||) tests.
|
||||
, function () {
|
||||
files = [ "a", "b", "c", "d"
|
||||
, "ab", "ac", "ad"
|
||||
, "bc", "cb"
|
||||
, "bc,d", "c,db", "c,d"
|
||||
, "d)", "(b|c", "*(b|c"
|
||||
, "b|c", "b|cc", "cb|c"
|
||||
, "x(a|b|c)", "x(a|c)"
|
||||
, "(a|b|c)", "(a|c)"]
|
||||
}
|
||||
, ["*(a|{b,c})", ["a", "b", "c", "ab", "ac"]]
|
||||
, ["{a,*(b|c,d)}", ["a","(b|c", "*(b|c", "d)"]]
|
||||
// a
|
||||
// *(b|c)
|
||||
// *(b|d)
|
||||
, ["{a,*(b|{c,d})}", ["a","b", "bc", "cb", "c", "d"]]
|
||||
, ["*(a|{b|c,c})", ["a", "b", "c", "ab", "ac", "bc", "cb"]]
|
||||
|
||||
|
||||
// test various flag settings.
|
||||
, [ "*(a|{b|c,c})", ["x(a|b|c)", "x(a|c)", "(a|b|c)", "(a|c)"]
|
||||
, { noext: true } ]
|
||||
, ["a?b", ["x/y/acb", "acb/"], {matchBase: true}
|
||||
, ["x/y/acb", "acb/", "acb/d/e", "x/y/acb/d"] ]
|
||||
, ["#*", ["#a", "#b"], {nocomment: true}, ["#a", "#b", "c#d"]]
|
||||
|
||||
|
||||
// begin channelling Boole and deMorgan...
|
||||
, "negation tests"
|
||||
, function () {
|
||||
files = ["d", "e", "!ab", "!abc", "a!b", "\\!a"]
|
||||
}
|
||||
|
||||
// anything that is NOT a* matches.
|
||||
, ["!a*", ["\\!a", "d", "e", "!ab", "!abc"]]
|
||||
|
||||
// anything that IS !a* matches.
|
||||
, ["!a*", ["!ab", "!abc"], {nonegate: true}]
|
||||
|
||||
// anything that IS a* matches
|
||||
, ["!!a*", ["a!b"]]
|
||||
|
||||
// anything that is NOT !a* matches
|
||||
, ["!\\!a*", ["a!b", "d", "e", "\\!a"]]
|
||||
|
||||
].forEach(function (c) {
|
||||
if (typeof c === "function") return c()
|
||||
if (typeof c === "string") return t.comment(c)
|
||||
|
||||
var pattern = c[0]
|
||||
, expect = c[1].sort(alpha)
|
||||
, options = c[2] || {}
|
||||
, f = c[3] || files
|
||||
, tapOpts = c[4] || {}
|
||||
|
||||
// options.debug = true
|
||||
var m = new mm.Minimatch(pattern, options)
|
||||
var r = m.makeRe()
|
||||
tapOpts.re = String(r) || JSON.stringify(r)
|
||||
tapOpts.files = JSON.stringify(f)
|
||||
tapOpts.pattern = pattern
|
||||
tapOpts.set = m.set
|
||||
tapOpts.negated = m.negate
|
||||
|
||||
var actual = mm.match(f, pattern, options)
|
||||
actual.sort(alpha)
|
||||
|
||||
t.equivalent( actual, expect
|
||||
, JSON.stringify(pattern) + " " + JSON.stringify(expect)
|
||||
, tapOpts )
|
||||
})
|
||||
|
||||
t.comment("time=" + (Date.now() - start) + "ms")
|
||||
t.end()
|
||||
})
|
||||
|
||||
tap.test("global leak test", function (t) {
|
||||
var globalAfter = Object.keys(global)
|
||||
t.equivalent(globalAfter, globalBefore, "no new globals, please")
|
||||
t.end()
|
||||
})
|
||||
|
||||
function alpha (a, b) {
|
||||
return a > b ? 1 : -1
|
||||
}
|
33
node_modules/minimatch/test/brace-expand.js
generated
vendored
Normal file
33
node_modules/minimatch/test/brace-expand.js
generated
vendored
Normal file
|
@ -0,0 +1,33 @@
|
|||
var tap = require("tap")
|
||||
, minimatch = require("../")
|
||||
|
||||
tap.test("brace expansion", function (t) {
|
||||
// [ pattern, [expanded] ]
|
||||
; [ [ "a{b,c{d,e},{f,g}h}x{y,z}"
|
||||
, [ "abxy"
|
||||
, "abxz"
|
||||
, "acdxy"
|
||||
, "acdxz"
|
||||
, "acexy"
|
||||
, "acexz"
|
||||
, "afhxy"
|
||||
, "afhxz"
|
||||
, "aghxy"
|
||||
, "aghxz" ] ]
|
||||
, [ "a{1..5}b"
|
||||
, [ "a1b"
|
||||
, "a2b"
|
||||
, "a3b"
|
||||
, "a4b"
|
||||
, "a5b" ] ]
|
||||
, [ "a{b}c", ["a{b}c"] ]
|
||||
].forEach(function (tc) {
|
||||
var p = tc[0]
|
||||
, expect = tc[1]
|
||||
t.equivalent(minimatch.braceExpand(p), expect, p)
|
||||
})
|
||||
console.error("ending")
|
||||
t.end()
|
||||
})
|
||||
|
||||
|
14
node_modules/minimatch/test/caching.js
generated
vendored
Normal file
14
node_modules/minimatch/test/caching.js
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
var Minimatch = require("../minimatch.js").Minimatch
|
||||
var tap = require("tap")
|
||||
tap.test("cache test", function (t) {
|
||||
var mm1 = new Minimatch("a?b")
|
||||
var mm2 = new Minimatch("a?b")
|
||||
t.equal(mm1, mm2, "should get the same object")
|
||||
// the lru should drop it after 100 entries
|
||||
for (var i = 0; i < 100; i ++) {
|
||||
new Minimatch("a"+i)
|
||||
}
|
||||
mm2 = new Minimatch("a?b")
|
||||
t.notEqual(mm1, mm2, "cache should have dropped")
|
||||
t.end()
|
||||
})
|
2
node_modules/mkdirp/.gitignore.orig
generated
vendored
Normal file
2
node_modules/mkdirp/.gitignore.orig
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
node_modules/
|
||||
npm-debug.log
|
5
node_modules/mkdirp/.gitignore.rej
generated
vendored
Normal file
5
node_modules/mkdirp/.gitignore.rej
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
--- /dev/null
|
||||
+++ .gitignore
|
||||
@@ -0,0 +1,2 @@
|
||||
+node_modules/
|
||||
+npm-debug.log
|
2
node_modules/mkdirp/.npmignore
generated
vendored
Normal file
2
node_modules/mkdirp/.npmignore
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
node_modules/
|
||||
npm-debug.log
|
21
node_modules/mkdirp/LICENSE
generated
vendored
Normal file
21
node_modules/mkdirp/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
Copyright 2010 James Halliday (mail@substack.net)
|
||||
|
||||
This project is free software released under the MIT/X11 license:
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
54
node_modules/mkdirp/README.markdown
generated
vendored
Normal file
54
node_modules/mkdirp/README.markdown
generated
vendored
Normal file
|
@ -0,0 +1,54 @@
|
|||
mkdirp
|
||||
======
|
||||
|
||||
Like `mkdir -p`, but in node.js!
|
||||
|
||||
example
|
||||
=======
|
||||
|
||||
pow.js
|
||||
------
|
||||
var mkdirp = require('mkdirp');
|
||||
|
||||
mkdirp('/tmp/foo/bar/baz', function (err) {
|
||||
if (err) console.error(err)
|
||||
else console.log('pow!')
|
||||
});
|
||||
|
||||
Output
|
||||
pow!
|
||||
|
||||
And now /tmp/foo/bar/baz exists, huzzah!
|
||||
|
||||
methods
|
||||
=======
|
||||
|
||||
var mkdirp = require('mkdirp');
|
||||
|
||||
mkdirp(dir, mode, cb)
|
||||
---------------------
|
||||
|
||||
Create a new directory and any necessary subdirectories at `dir` with octal
|
||||
permission string `mode`.
|
||||
|
||||
If `mode` isn't specified, it defaults to `0777 & (~process.umask())`.
|
||||
|
||||
mkdirp.sync(dir, mode)
|
||||
----------------------
|
||||
|
||||
Synchronously create a new directory and any necessary subdirectories at `dir`
|
||||
with octal permission string `mode`.
|
||||
|
||||
If `mode` isn't specified, it defaults to `0777 & (~process.umask())`.
|
||||
|
||||
install
|
||||
=======
|
||||
|
||||
With [npm](http://npmjs.org) do:
|
||||
|
||||
npm install mkdirp
|
||||
|
||||
license
|
||||
=======
|
||||
|
||||
MIT/X11
|
6
node_modules/mkdirp/examples/pow.js
generated
vendored
Normal file
6
node_modules/mkdirp/examples/pow.js
generated
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
var mkdirp = require('mkdirp');
|
||||
|
||||
mkdirp('/tmp/foo/bar/baz', function (err) {
|
||||
if (err) console.error(err)
|
||||
else console.log('pow!')
|
||||
});
|
6
node_modules/mkdirp/examples/pow.js.orig
generated
vendored
Normal file
6
node_modules/mkdirp/examples/pow.js.orig
generated
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
var mkdirp = require('mkdirp');
|
||||
|
||||
mkdirp('/tmp/foo/bar/baz', 0755, function (err) {
|
||||
if (err) console.error(err)
|
||||
else console.log('pow!')
|
||||
});
|
19
node_modules/mkdirp/examples/pow.js.rej
generated
vendored
Normal file
19
node_modules/mkdirp/examples/pow.js.rej
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
--- examples/pow.js
|
||||
+++ examples/pow.js
|
||||
@@ -1,6 +1,15 @@
|
||||
-var mkdirp = require('mkdirp').mkdirp;
|
||||
+var mkdirp = require('../').mkdirp,
|
||||
+ mkdirpSync = require('../').mkdirpSync;
|
||||
|
||||
mkdirp('/tmp/foo/bar/baz', 0755, function (err) {
|
||||
if (err) console.error(err)
|
||||
else console.log('pow!')
|
||||
});
|
||||
+
|
||||
+try {
|
||||
+ mkdirpSync('/tmp/bar/foo/baz', 0755);
|
||||
+ console.log('double pow!');
|
||||
+}
|
||||
+catch (ex) {
|
||||
+ console.log(ex);
|
||||
+}
|
79
node_modules/mkdirp/index.js
generated
vendored
Normal file
79
node_modules/mkdirp/index.js
generated
vendored
Normal file
|
@ -0,0 +1,79 @@
|
|||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
|
||||
module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP;
|
||||
|
||||
function mkdirP (p, mode, f) {
|
||||
if (typeof mode === 'function' || mode === undefined) {
|
||||
f = mode;
|
||||
mode = 0777 & (~process.umask());
|
||||
}
|
||||
|
||||
var cb = f || function () {};
|
||||
if (typeof mode === 'string') mode = parseInt(mode, 8);
|
||||
p = path.resolve(p);
|
||||
|
||||
fs.mkdir(p, mode, function (er) {
|
||||
if (!er) return cb();
|
||||
switch (er.code) {
|
||||
case 'ENOENT':
|
||||
mkdirP(path.dirname(p), mode, function (er) {
|
||||
if (er) cb(er);
|
||||
else mkdirP(p, mode, cb);
|
||||
});
|
||||
break;
|
||||
|
||||
case 'EEXIST':
|
||||
fs.stat(p, function (er2, stat) {
|
||||
// if the stat fails, then that's super weird.
|
||||
// let the original EEXIST be the failure reason.
|
||||
if (er2 || !stat.isDirectory()) cb(er)
|
||||
else cb();
|
||||
});
|
||||
break;
|
||||
|
||||
default:
|
||||
cb(er);
|
||||
break;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
mkdirP.sync = function sync (p, mode) {
|
||||
if (mode === undefined) {
|
||||
mode = 0777 & (~process.umask());
|
||||
}
|
||||
|
||||
if (typeof mode === 'string') mode = parseInt(mode, 8);
|
||||
p = path.resolve(p);
|
||||
|
||||
try {
|
||||
fs.mkdirSync(p, mode)
|
||||
}
|
||||
catch (err0) {
|
||||
switch (err0.code) {
|
||||
case 'ENOENT' :
|
||||
var err1 = sync(path.dirname(p), mode)
|
||||
if (err1) throw err1;
|
||||
else return sync(p, mode);
|
||||
break;
|
||||
|
||||
case 'EEXIST' :
|
||||
var stat;
|
||||
try {
|
||||
stat = fs.statSync(p);
|
||||
}
|
||||
catch (err1) {
|
||||
throw err0
|
||||
}
|
||||
if (!stat.isDirectory()) throw err0;
|
||||
else return null;
|
||||
break;
|
||||
default :
|
||||
throw err0
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
23
node_modules/mkdirp/package.json
generated
vendored
Normal file
23
node_modules/mkdirp/package.json
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
{
|
||||
"name" : "mkdirp",
|
||||
"description" : "Recursively mkdir, like `mkdir -p`",
|
||||
"version" : "0.3.0",
|
||||
"author" : "James Halliday <mail@substack.net> (http://substack.net)",
|
||||
"main" : "./index",
|
||||
"keywords" : [
|
||||
"mkdir",
|
||||
"directory"
|
||||
],
|
||||
"repository" : {
|
||||
"type" : "git",
|
||||
"url" : "http://github.com/substack/node-mkdirp.git"
|
||||
},
|
||||
"scripts" : {
|
||||
"test" : "tap test/*.js"
|
||||
},
|
||||
"devDependencies" : {
|
||||
"tap" : "0.0.x"
|
||||
},
|
||||
"license" : "MIT/X11",
|
||||
"engines": { "node": "*" }
|
||||
}
|
38
node_modules/mkdirp/test/chmod.js
generated
vendored
Normal file
38
node_modules/mkdirp/test/chmod.js
generated
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
var mkdirp = require('../').mkdirp;
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
var ps = [ '', 'tmp' ];
|
||||
|
||||
for (var i = 0; i < 25; i++) {
|
||||
var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
ps.push(dir);
|
||||
}
|
||||
|
||||
var file = ps.join('/');
|
||||
|
||||
test('chmod-pre', function (t) {
|
||||
var mode = 0744
|
||||
mkdirp(file, mode, function (er) {
|
||||
t.ifError(er, 'should not error');
|
||||
fs.stat(file, function (er, stat) {
|
||||
t.ifError(er, 'should exist');
|
||||
t.ok(stat && stat.isDirectory(), 'should be directory');
|
||||
t.equal(stat && stat.mode & 0777, mode, 'should be 0744');
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('chmod', function (t) {
|
||||
var mode = 0755
|
||||
mkdirp(file, mode, function (er) {
|
||||
t.ifError(er, 'should not error');
|
||||
fs.stat(file, function (er, stat) {
|
||||
t.ifError(er, 'should exist');
|
||||
t.ok(stat && stat.isDirectory(), 'should be directory');
|
||||
t.end();
|
||||
});
|
||||
});
|
||||
});
|
37
node_modules/mkdirp/test/clobber.js
generated
vendored
Normal file
37
node_modules/mkdirp/test/clobber.js
generated
vendored
Normal file
|
@ -0,0 +1,37 @@
|
|||
var mkdirp = require('../').mkdirp;
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
var ps = [ '', 'tmp' ];
|
||||
|
||||
for (var i = 0; i < 25; i++) {
|
||||
var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
ps.push(dir);
|
||||
}
|
||||
|
||||
var file = ps.join('/');
|
||||
|
||||
// a file in the way
|
||||
var itw = ps.slice(0, 3).join('/');
|
||||
|
||||
|
||||
test('clobber-pre', function (t) {
|
||||
console.error("about to write to "+itw)
|
||||
fs.writeFileSync(itw, 'I AM IN THE WAY, THE TRUTH, AND THE LIGHT.');
|
||||
|
||||
fs.stat(itw, function (er, stat) {
|
||||
t.ifError(er)
|
||||
t.ok(stat && stat.isFile(), 'should be file')
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
test('clobber', function (t) {
|
||||
t.plan(2);
|
||||
mkdirp(file, 0755, function (err) {
|
||||
t.ok(err);
|
||||
t.equal(err.code, 'ENOTDIR');
|
||||
t.end();
|
||||
});
|
||||
});
|
28
node_modules/mkdirp/test/mkdirp.js
generated
vendored
Normal file
28
node_modules/mkdirp/test/mkdirp.js
generated
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('woo', function (t) {
|
||||
t.plan(2);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/tmp/' + [x,y,z].join('/');
|
||||
|
||||
mkdirp(file, 0755, function (err) {
|
||||
if (err) t.fail(err);
|
||||
else path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
t.end();
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
32
node_modules/mkdirp/test/perm.js
generated
vendored
Normal file
32
node_modules/mkdirp/test/perm.js
generated
vendored
Normal file
|
@ -0,0 +1,32 @@
|
|||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('async perm', function (t) {
|
||||
t.plan(2);
|
||||
var file = '/tmp/' + (Math.random() * (1<<30)).toString(16);
|
||||
|
||||
mkdirp(file, 0755, function (err) {
|
||||
if (err) t.fail(err);
|
||||
else path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
t.end();
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
test('async root perm', function (t) {
|
||||
mkdirp('/tmp', 0755, function (err) {
|
||||
if (err) t.fail(err);
|
||||
t.end();
|
||||
});
|
||||
t.end();
|
||||
});
|
39
node_modules/mkdirp/test/perm_sync.js
generated
vendored
Normal file
39
node_modules/mkdirp/test/perm_sync.js
generated
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('sync perm', function (t) {
|
||||
t.plan(2);
|
||||
var file = '/tmp/' + (Math.random() * (1<<30)).toString(16) + '.json';
|
||||
|
||||
mkdirp.sync(file, 0755);
|
||||
path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
t.end();
|
||||
}
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
test('sync root perm', function (t) {
|
||||
t.plan(1);
|
||||
|
||||
var file = '/tmp';
|
||||
mkdirp.sync(file, 0755);
|
||||
path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
t.end();
|
||||
}
|
||||
})
|
||||
});
|
||||
});
|
41
node_modules/mkdirp/test/race.js
generated
vendored
Normal file
41
node_modules/mkdirp/test/race.js
generated
vendored
Normal file
|
@ -0,0 +1,41 @@
|
|||
var mkdirp = require('../').mkdirp;
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('race', function (t) {
|
||||
t.plan(4);
|
||||
var ps = [ '', 'tmp' ];
|
||||
|
||||
for (var i = 0; i < 25; i++) {
|
||||
var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
ps.push(dir);
|
||||
}
|
||||
var file = ps.join('/');
|
||||
|
||||
var res = 2;
|
||||
mk(file, function () {
|
||||
if (--res === 0) t.end();
|
||||
});
|
||||
|
||||
mk(file, function () {
|
||||
if (--res === 0) t.end();
|
||||
});
|
||||
|
||||
function mk (file, cb) {
|
||||
mkdirp(file, 0755, function (err) {
|
||||
if (err) t.fail(err);
|
||||
else path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
if (cb) cb();
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
}
|
||||
});
|
32
node_modules/mkdirp/test/rel.js
generated
vendored
Normal file
32
node_modules/mkdirp/test/rel.js
generated
vendored
Normal file
|
@ -0,0 +1,32 @@
|
|||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('rel', function (t) {
|
||||
t.plan(2);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var cwd = process.cwd();
|
||||
process.chdir('/tmp');
|
||||
|
||||
var file = [x,y,z].join('/');
|
||||
|
||||
mkdirp(file, 0755, function (err) {
|
||||
if (err) t.fail(err);
|
||||
else path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
process.chdir(cwd);
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
t.end();
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
27
node_modules/mkdirp/test/sync.js
generated
vendored
Normal file
27
node_modules/mkdirp/test/sync.js
generated
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('sync', function (t) {
|
||||
t.plan(2);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/tmp/' + [x,y,z].join('/');
|
||||
|
||||
var err = mkdirp.sync(file, 0755);
|
||||
if (err) t.fail(err);
|
||||
else path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
t.equal(stat.mode & 0777, 0755);
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
t.end();
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
28
node_modules/mkdirp/test/umask.js
generated
vendored
Normal file
28
node_modules/mkdirp/test/umask.js
generated
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('implicit mode from umask', function (t) {
|
||||
t.plan(2);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/tmp/' + [x,y,z].join('/');
|
||||
|
||||
mkdirp(file, function (err) {
|
||||
if (err) t.fail(err);
|
||||
else path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
t.equal(stat.mode & 0777, 0777 & (~process.umask()));
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
t.end();
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
27
node_modules/mkdirp/test/umask_sync.js
generated
vendored
Normal file
27
node_modules/mkdirp/test/umask_sync.js
generated
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
var mkdirp = require('../');
|
||||
var path = require('path');
|
||||
var fs = require('fs');
|
||||
var test = require('tap').test;
|
||||
|
||||
test('umask sync modes', function (t) {
|
||||
t.plan(2);
|
||||
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
|
||||
|
||||
var file = '/tmp/' + [x,y,z].join('/');
|
||||
|
||||
var err = mkdirp.sync(file);
|
||||
if (err) t.fail(err);
|
||||
else path.exists(file, function (ex) {
|
||||
if (!ex) t.fail('file not created')
|
||||
else fs.stat(file, function (err, stat) {
|
||||
if (err) t.fail(err)
|
||||
else {
|
||||
t.equal(stat.mode & 0777, (0777 & (~process.umask())));
|
||||
t.ok(stat.isDirectory(), 'target not a directory');
|
||||
t.end();
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
0
node_modules/nopt/.npmignore
generated
vendored
Normal file
0
node_modules/nopt/.npmignore
generated
vendored
Normal file
23
node_modules/nopt/LICENSE
generated
vendored
Normal file
23
node_modules/nopt/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
|
||||
All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
208
node_modules/nopt/README.md
generated
vendored
Normal file
208
node_modules/nopt/README.md
generated
vendored
Normal file
|
@ -0,0 +1,208 @@
|
|||
If you want to write an option parser, and have it be good, there are
|
||||
two ways to do it. The Right Way, and the Wrong Way.
|
||||
|
||||
The Wrong Way is to sit down and write an option parser. We've all done
|
||||
that.
|
||||
|
||||
The Right Way is to write some complex configurable program with so many
|
||||
options that you go half-insane just trying to manage them all, and put
|
||||
it off with duct-tape solutions until you see exactly to the core of the
|
||||
problem, and finally snap and write an awesome option parser.
|
||||
|
||||
If you want to write an option parser, don't write an option parser.
|
||||
Write a package manager, or a source control system, or a service
|
||||
restarter, or an operating system. You probably won't end up with a
|
||||
good one of those, but if you don't give up, and you are relentless and
|
||||
diligent enough in your procrastination, you may just end up with a very
|
||||
nice option parser.
|
||||
|
||||
## USAGE
|
||||
|
||||
// my-program.js
|
||||
var nopt = require("nopt")
|
||||
, Stream = require("stream").Stream
|
||||
, path = require("path")
|
||||
, knownOpts = { "foo" : [String, null]
|
||||
, "bar" : [Stream, Number]
|
||||
, "baz" : path
|
||||
, "bloo" : [ "big", "medium", "small" ]
|
||||
, "flag" : Boolean
|
||||
, "pick" : Boolean
|
||||
, "many" : [String, Array]
|
||||
}
|
||||
, shortHands = { "foofoo" : ["--foo", "Mr. Foo"]
|
||||
, "b7" : ["--bar", "7"]
|
||||
, "m" : ["--bloo", "medium"]
|
||||
, "p" : ["--pick"]
|
||||
, "f" : ["--flag"]
|
||||
}
|
||||
// everything is optional.
|
||||
// knownOpts and shorthands default to {}
|
||||
// arg list defaults to process.argv
|
||||
// slice defaults to 2
|
||||
, parsed = nopt(knownOpts, shortHands, process.argv, 2)
|
||||
console.log(parsed)
|
||||
|
||||
This would give you support for any of the following:
|
||||
|
||||
```bash
|
||||
$ node my-program.js --foo "blerp" --no-flag
|
||||
{ "foo" : "blerp", "flag" : false }
|
||||
|
||||
$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag
|
||||
{ bar: 7, foo: "Mr. Hand", flag: true }
|
||||
|
||||
$ node my-program.js --foo "blerp" -f -----p
|
||||
{ foo: "blerp", flag: true, pick: true }
|
||||
|
||||
$ node my-program.js -fp --foofoo
|
||||
{ foo: "Mr. Foo", flag: true, pick: true }
|
||||
|
||||
$ node my-program.js --foofoo -- -fp # -- stops the flag parsing.
|
||||
{ foo: "Mr. Foo", argv: { remain: ["-fp"] } }
|
||||
|
||||
$ node my-program.js --blatzk 1000 -fp # unknown opts are ok.
|
||||
{ blatzk: 1000, flag: true, pick: true }
|
||||
|
||||
$ node my-program.js --blatzk true -fp # but they need a value
|
||||
{ blatzk: true, flag: true, pick: true }
|
||||
|
||||
$ node my-program.js --no-blatzk -fp # unless they start with "no-"
|
||||
{ blatzk: false, flag: true, pick: true }
|
||||
|
||||
$ node my-program.js --baz b/a/z # known paths are resolved.
|
||||
{ baz: "/Users/isaacs/b/a/z" }
|
||||
|
||||
# if Array is one of the types, then it can take many
|
||||
# values, and will always be an array. The other types provided
|
||||
# specify what types are allowed in the list.
|
||||
|
||||
$ node my-program.js --many 1 --many null --many foo
|
||||
{ many: ["1", "null", "foo"] }
|
||||
|
||||
$ node my-program.js --many foo
|
||||
{ many: ["foo"] }
|
||||
```
|
||||
|
||||
Read the tests at the bottom of `lib/nopt.js` for more examples of
|
||||
what this puppy can do.
|
||||
|
||||
## Types
|
||||
|
||||
The following types are supported, and defined on `nopt.typeDefs`
|
||||
|
||||
* String: A normal string. No parsing is done.
|
||||
* path: A file system path. Gets resolved against cwd if not absolute.
|
||||
* url: A url. If it doesn't parse, it isn't accepted.
|
||||
* Number: Must be numeric.
|
||||
* Date: Must parse as a date. If it does, and `Date` is one of the options,
|
||||
then it will return a Date object, not a string.
|
||||
* Boolean: Must be either `true` or `false`. If an option is a boolean,
|
||||
then it does not need a value, and its presence will imply `true` as
|
||||
the value. To negate boolean flags, do `--no-whatever` or `--whatever
|
||||
false`
|
||||
* NaN: Means that the option is strictly not allowed. Any value will
|
||||
fail.
|
||||
* Stream: An object matching the "Stream" class in node. Valuable
|
||||
for use when validating programmatically. (npm uses this to let you
|
||||
supply any WriteStream on the `outfd` and `logfd` config options.)
|
||||
* Array: If `Array` is specified as one of the types, then the value
|
||||
will be parsed as a list of options. This means that multiple values
|
||||
can be specified, and that the value will always be an array.
|
||||
|
||||
If a type is an array of values not on this list, then those are
|
||||
considered valid values. For instance, in the example above, the
|
||||
`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`,
|
||||
and any other value will be rejected.
|
||||
|
||||
When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be
|
||||
interpreted as their JavaScript equivalents, and numeric values will be
|
||||
interpreted as a number.
|
||||
|
||||
You can also mix types and values, or multiple types, in a list. For
|
||||
instance `{ blah: [Number, null] }` would allow a value to be set to
|
||||
either a Number or null.
|
||||
|
||||
To define a new type, add it to `nopt.typeDefs`. Each item in that
|
||||
hash is an object with a `type` member and a `validate` method. The
|
||||
`type` member is an object that matches what goes in the type list. The
|
||||
`validate` method is a function that gets called with `validate(data,
|
||||
key, val)`. Validate methods should assign `data[key]` to the valid
|
||||
value of `val` if it can be handled properly, or return boolean
|
||||
`false` if it cannot.
|
||||
|
||||
You can also call `nopt.clean(data, types, typeDefs)` to clean up a
|
||||
config object and remove its invalid properties.
|
||||
|
||||
## Error Handling
|
||||
|
||||
By default, nopt outputs a warning to standard error when invalid
|
||||
options are found. You can change this behavior by assigning a method
|
||||
to `nopt.invalidHandler`. This method will be called with
|
||||
the offending `nopt.invalidHandler(key, val, types)`.
|
||||
|
||||
If no `nopt.invalidHandler` is assigned, then it will console.error
|
||||
its whining. If it is assigned to boolean `false` then the warning is
|
||||
suppressed.
|
||||
|
||||
## Abbreviations
|
||||
|
||||
Yes, they are supported. If you define options like this:
|
||||
|
||||
```javascript
|
||||
{ "foolhardyelephants" : Boolean
|
||||
, "pileofmonkeys" : Boolean }
|
||||
```
|
||||
|
||||
Then this will work:
|
||||
|
||||
```bash
|
||||
node program.js --foolhar --pil
|
||||
node program.js --no-f --pileofmon
|
||||
# etc.
|
||||
```
|
||||
|
||||
## Shorthands
|
||||
|
||||
Shorthands are a hash of shorter option names to a snippet of args that
|
||||
they expand to.
|
||||
|
||||
If multiple one-character shorthands are all combined, and the
|
||||
combination does not unambiguously match any other option or shorthand,
|
||||
then they will be broken up into their constituent parts. For example:
|
||||
|
||||
```json
|
||||
{ "s" : ["--loglevel", "silent"]
|
||||
, "g" : "--global"
|
||||
, "f" : "--force"
|
||||
, "p" : "--parseable"
|
||||
, "l" : "--long"
|
||||
}
|
||||
```
|
||||
|
||||
```bash
|
||||
npm ls -sgflp
|
||||
# just like doing this:
|
||||
npm ls --loglevel silent --global --force --long --parseable
|
||||
```
|
||||
|
||||
## The Rest of the args
|
||||
|
||||
The config object returned by nopt is given a special member called
|
||||
`argv`, which is an object with the following fields:
|
||||
|
||||
* `remain`: The remaining args after all the parsing has occurred.
|
||||
* `original`: The args as they originally appeared.
|
||||
* `cooked`: The args after flags and shorthands are expanded.
|
||||
|
||||
## Slicing
|
||||
|
||||
Node programs are called with more or less the exact argv as it appears
|
||||
in C land, after the v8 and node-specific options have been plucked off.
|
||||
As such, `argv[0]` is always `node` and `argv[1]` is always the
|
||||
JavaScript program being run.
|
||||
|
||||
That's usually not very useful to you. So they're sliced off by
|
||||
default. If you want them, then you can pass in `0` as the last
|
||||
argument, or any other number that you'd like to slice off the start of
|
||||
the list.
|
44
node_modules/nopt/bin/nopt.js
generated
vendored
Executable file
44
node_modules/nopt/bin/nopt.js
generated
vendored
Executable file
|
@ -0,0 +1,44 @@
|
|||
#!/usr/bin/env node
|
||||
var nopt = require("../lib/nopt")
|
||||
, types = { num: Number
|
||||
, bool: Boolean
|
||||
, help: Boolean
|
||||
, list: Array
|
||||
, "num-list": [Number, Array]
|
||||
, "str-list": [String, Array]
|
||||
, "bool-list": [Boolean, Array]
|
||||
, str: String }
|
||||
, shorthands = { s: [ "--str", "astring" ]
|
||||
, b: [ "--bool" ]
|
||||
, nb: [ "--no-bool" ]
|
||||
, tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ]
|
||||
, "?": ["--help"]
|
||||
, h: ["--help"]
|
||||
, H: ["--help"]
|
||||
, n: [ "--num", "125" ] }
|
||||
, parsed = nopt( types
|
||||
, shorthands
|
||||
, process.argv
|
||||
, 2 )
|
||||
|
||||
console.log("parsed", parsed)
|
||||
|
||||
if (parsed.help) {
|
||||
console.log("")
|
||||
console.log("nopt cli tester")
|
||||
console.log("")
|
||||
console.log("types")
|
||||
console.log(Object.keys(types).map(function M (t) {
|
||||
var type = types[t]
|
||||
if (Array.isArray(type)) {
|
||||
return [t, type.map(function (type) { return type.name })]
|
||||
}
|
||||
return [t, type && type.name]
|
||||
}).reduce(function (s, i) {
|
||||
s[i[0]] = i[1]
|
||||
return s
|
||||
}, {}))
|
||||
console.log("")
|
||||
console.log("shorthands")
|
||||
console.log(shorthands)
|
||||
}
|
30
node_modules/nopt/examples/my-program.js
generated
vendored
Executable file
30
node_modules/nopt/examples/my-program.js
generated
vendored
Executable file
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
//process.env.DEBUG_NOPT = 1
|
||||
|
||||
// my-program.js
|
||||
var nopt = require("../lib/nopt")
|
||||
, Stream = require("stream").Stream
|
||||
, path = require("path")
|
||||
, knownOpts = { "foo" : [String, null]
|
||||
, "bar" : [Stream, Number]
|
||||
, "baz" : path
|
||||
, "bloo" : [ "big", "medium", "small" ]
|
||||
, "flag" : Boolean
|
||||
, "pick" : Boolean
|
||||
}
|
||||
, shortHands = { "foofoo" : ["--foo", "Mr. Foo"]
|
||||
, "b7" : ["--bar", "7"]
|
||||
, "m" : ["--bloo", "medium"]
|
||||
, "p" : ["--pick"]
|
||||
, "f" : ["--flag", "true"]
|
||||
, "g" : ["--flag"]
|
||||
, "s" : "--flag"
|
||||
}
|
||||
// everything is optional.
|
||||
// knownOpts and shorthands default to {}
|
||||
// arg list defaults to process.argv
|
||||
// slice defaults to 2
|
||||
, parsed = nopt(knownOpts, shortHands, process.argv, 2)
|
||||
|
||||
console.log("parsed =\n"+ require("util").inspect(parsed))
|
552
node_modules/nopt/lib/nopt.js
generated
vendored
Normal file
552
node_modules/nopt/lib/nopt.js
generated
vendored
Normal file
|
@ -0,0 +1,552 @@
|
|||
// info about each config option.
|
||||
|
||||
var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG
|
||||
? function () { console.error.apply(console, arguments) }
|
||||
: function () {}
|
||||
|
||||
var url = require("url")
|
||||
, path = require("path")
|
||||
, Stream = require("stream").Stream
|
||||
, abbrev = require("abbrev")
|
||||
|
||||
module.exports = exports = nopt
|
||||
exports.clean = clean
|
||||
|
||||
exports.typeDefs =
|
||||
{ String : { type: String, validate: validateString }
|
||||
, Boolean : { type: Boolean, validate: validateBoolean }
|
||||
, url : { type: url, validate: validateUrl }
|
||||
, Number : { type: Number, validate: validateNumber }
|
||||
, path : { type: path, validate: validatePath }
|
||||
, Stream : { type: Stream, validate: validateStream }
|
||||
, Date : { type: Date, validate: validateDate }
|
||||
}
|
||||
|
||||
function nopt (types, shorthands, args, slice) {
|
||||
args = args || process.argv
|
||||
types = types || {}
|
||||
shorthands = shorthands || {}
|
||||
if (typeof slice !== "number") slice = 2
|
||||
|
||||
debug(types, shorthands, args, slice)
|
||||
|
||||
args = args.slice(slice)
|
||||
var data = {}
|
||||
, key
|
||||
, remain = []
|
||||
, cooked = args
|
||||
, original = args.slice(0)
|
||||
|
||||
parse(args, data, remain, types, shorthands)
|
||||
// now data is full
|
||||
clean(data, types, exports.typeDefs)
|
||||
data.argv = {remain:remain,cooked:cooked,original:original}
|
||||
data.argv.toString = function () {
|
||||
return this.original.map(JSON.stringify).join(" ")
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
function clean (data, types, typeDefs) {
|
||||
typeDefs = typeDefs || exports.typeDefs
|
||||
var remove = {}
|
||||
, typeDefault = [false, true, null, String, Number]
|
||||
|
||||
Object.keys(data).forEach(function (k) {
|
||||
if (k === "argv") return
|
||||
var val = data[k]
|
||||
, isArray = Array.isArray(val)
|
||||
, type = types[k]
|
||||
if (!isArray) val = [val]
|
||||
if (!type) type = typeDefault
|
||||
if (type === Array) type = typeDefault.concat(Array)
|
||||
if (!Array.isArray(type)) type = [type]
|
||||
|
||||
debug("val=%j", val)
|
||||
debug("types=", type)
|
||||
val = val.map(function (val) {
|
||||
// if it's an unknown value, then parse false/true/null/numbers/dates
|
||||
if (typeof val === "string") {
|
||||
debug("string %j", val)
|
||||
val = val.trim()
|
||||
if ((val === "null" && ~type.indexOf(null))
|
||||
|| (val === "true" &&
|
||||
(~type.indexOf(true) || ~type.indexOf(Boolean)))
|
||||
|| (val === "false" &&
|
||||
(~type.indexOf(false) || ~type.indexOf(Boolean)))) {
|
||||
val = JSON.parse(val)
|
||||
debug("jsonable %j", val)
|
||||
} else if (~type.indexOf(Number) && !isNaN(val)) {
|
||||
debug("convert to number", val)
|
||||
val = +val
|
||||
} else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) {
|
||||
debug("convert to date", val)
|
||||
val = new Date(val)
|
||||
}
|
||||
}
|
||||
|
||||
if (!types.hasOwnProperty(k)) {
|
||||
return val
|
||||
}
|
||||
|
||||
// allow `--no-blah` to set 'blah' to null if null is allowed
|
||||
if (val === false && ~type.indexOf(null) &&
|
||||
!(~type.indexOf(false) || ~type.indexOf(Boolean))) {
|
||||
val = null
|
||||
}
|
||||
|
||||
var d = {}
|
||||
d[k] = val
|
||||
debug("prevalidated val", d, val, types[k])
|
||||
if (!validate(d, k, val, types[k], typeDefs)) {
|
||||
if (exports.invalidHandler) {
|
||||
exports.invalidHandler(k, val, types[k], data)
|
||||
} else if (exports.invalidHandler !== false) {
|
||||
debug("invalid: "+k+"="+val, types[k])
|
||||
}
|
||||
return remove
|
||||
}
|
||||
debug("validated val", d, val, types[k])
|
||||
return d[k]
|
||||
}).filter(function (val) { return val !== remove })
|
||||
|
||||
if (!val.length) delete data[k]
|
||||
else if (isArray) {
|
||||
debug(isArray, data[k], val)
|
||||
data[k] = val
|
||||
} else data[k] = val[0]
|
||||
|
||||
debug("k=%s val=%j", k, val, data[k])
|
||||
})
|
||||
}
|
||||
|
||||
function validateString (data, k, val) {
|
||||
data[k] = String(val)
|
||||
}
|
||||
|
||||
function validatePath (data, k, val) {
|
||||
data[k] = path.resolve(String(val))
|
||||
return true
|
||||
}
|
||||
|
||||
function validateNumber (data, k, val) {
|
||||
debug("validate Number %j %j %j", k, val, isNaN(val))
|
||||
if (isNaN(val)) return false
|
||||
data[k] = +val
|
||||
}
|
||||
|
||||
function validateDate (data, k, val) {
|
||||
debug("validate Date %j %j %j", k, val, Date.parse(val))
|
||||
var s = Date.parse(val)
|
||||
if (isNaN(s)) return false
|
||||
data[k] = new Date(val)
|
||||
}
|
||||
|
||||
function validateBoolean (data, k, val) {
|
||||
if (val instanceof Boolean) val = val.valueOf()
|
||||
else if (typeof val === "string") {
|
||||
if (!isNaN(val)) val = !!(+val)
|
||||
else if (val === "null" || val === "false") val = false
|
||||
else val = true
|
||||
} else val = !!val
|
||||
data[k] = val
|
||||
}
|
||||
|
||||
function validateUrl (data, k, val) {
|
||||
val = url.parse(String(val))
|
||||
if (!val.host) return false
|
||||
data[k] = val.href
|
||||
}
|
||||
|
||||
function validateStream (data, k, val) {
|
||||
if (!(val instanceof Stream)) return false
|
||||
data[k] = val
|
||||
}
|
||||
|
||||
function validate (data, k, val, type, typeDefs) {
|
||||
// arrays are lists of types.
|
||||
if (Array.isArray(type)) {
|
||||
for (var i = 0, l = type.length; i < l; i ++) {
|
||||
if (type[i] === Array) continue
|
||||
if (validate(data, k, val, type[i], typeDefs)) return true
|
||||
}
|
||||
delete data[k]
|
||||
return false
|
||||
}
|
||||
|
||||
// an array of anything?
|
||||
if (type === Array) return true
|
||||
|
||||
// NaN is poisonous. Means that something is not allowed.
|
||||
if (type !== type) {
|
||||
debug("Poison NaN", k, val, type)
|
||||
delete data[k]
|
||||
return false
|
||||
}
|
||||
|
||||
// explicit list of values
|
||||
if (val === type) {
|
||||
debug("Explicitly allowed %j", val)
|
||||
// if (isArray) (data[k] = data[k] || []).push(val)
|
||||
// else data[k] = val
|
||||
data[k] = val
|
||||
return true
|
||||
}
|
||||
|
||||
// now go through the list of typeDefs, validate against each one.
|
||||
var ok = false
|
||||
, types = Object.keys(typeDefs)
|
||||
for (var i = 0, l = types.length; i < l; i ++) {
|
||||
debug("test type %j %j %j", k, val, types[i])
|
||||
var t = typeDefs[types[i]]
|
||||
if (t && type === t.type) {
|
||||
var d = {}
|
||||
ok = false !== t.validate(d, k, val)
|
||||
val = d[k]
|
||||
if (ok) {
|
||||
// if (isArray) (data[k] = data[k] || []).push(val)
|
||||
// else data[k] = val
|
||||
data[k] = val
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
debug("OK? %j (%j %j %j)", ok, k, val, types[i])
|
||||
|
||||
if (!ok) delete data[k]
|
||||
return ok
|
||||
}
|
||||
|
||||
function parse (args, data, remain, types, shorthands) {
|
||||
debug("parse", args, data, remain)
|
||||
|
||||
var key = null
|
||||
, abbrevs = abbrev(Object.keys(types))
|
||||
, shortAbbr = abbrev(Object.keys(shorthands))
|
||||
|
||||
for (var i = 0; i < args.length; i ++) {
|
||||
var arg = args[i]
|
||||
debug("arg", arg)
|
||||
|
||||
if (arg.match(/^-{2,}$/)) {
|
||||
// done with keys.
|
||||
// the rest are args.
|
||||
remain.push.apply(remain, args.slice(i + 1))
|
||||
args[i] = "--"
|
||||
break
|
||||
}
|
||||
if (arg.charAt(0) === "-") {
|
||||
if (arg.indexOf("=") !== -1) {
|
||||
var v = arg.split("=")
|
||||
arg = v.shift()
|
||||
v = v.join("=")
|
||||
args.splice.apply(args, [i, 1].concat([arg, v]))
|
||||
}
|
||||
// see if it's a shorthand
|
||||
// if so, splice and back up to re-parse it.
|
||||
var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs)
|
||||
debug("arg=%j shRes=%j", arg, shRes)
|
||||
if (shRes) {
|
||||
debug(arg, shRes)
|
||||
args.splice.apply(args, [i, 1].concat(shRes))
|
||||
if (arg !== shRes[0]) {
|
||||
i --
|
||||
continue
|
||||
}
|
||||
}
|
||||
arg = arg.replace(/^-+/, "")
|
||||
var no = false
|
||||
while (arg.toLowerCase().indexOf("no-") === 0) {
|
||||
no = !no
|
||||
arg = arg.substr(3)
|
||||
}
|
||||
|
||||
if (abbrevs[arg]) arg = abbrevs[arg]
|
||||
|
||||
var isArray = types[arg] === Array ||
|
||||
Array.isArray(types[arg]) && types[arg].indexOf(Array) !== -1
|
||||
|
||||
var val
|
||||
, la = args[i + 1]
|
||||
|
||||
var isBool = no ||
|
||||
types[arg] === Boolean ||
|
||||
Array.isArray(types[arg]) && types[arg].indexOf(Boolean) !== -1 ||
|
||||
(la === "false" &&
|
||||
(types[arg] === null ||
|
||||
Array.isArray(types[arg]) && ~types[arg].indexOf(null)))
|
||||
|
||||
if (isBool) {
|
||||
// just set and move along
|
||||
val = !no
|
||||
// however, also support --bool true or --bool false
|
||||
if (la === "true" || la === "false") {
|
||||
val = JSON.parse(la)
|
||||
la = null
|
||||
if (no) val = !val
|
||||
i ++
|
||||
}
|
||||
|
||||
// also support "foo":[Boolean, "bar"] and "--foo bar"
|
||||
if (Array.isArray(types[arg]) && la) {
|
||||
if (~types[arg].indexOf(la)) {
|
||||
// an explicit type
|
||||
val = la
|
||||
i ++
|
||||
} else if ( la === "null" && ~types[arg].indexOf(null) ) {
|
||||
// null allowed
|
||||
val = null
|
||||
i ++
|
||||
} else if ( !la.match(/^-{2,}[^-]/) &&
|
||||
!isNaN(la) &&
|
||||
~types[arg].indexOf(Number) ) {
|
||||
// number
|
||||
val = +la
|
||||
i ++
|
||||
} else if ( !la.match(/^-[^-]/) && ~types[arg].indexOf(String) ) {
|
||||
// string
|
||||
val = la
|
||||
i ++
|
||||
}
|
||||
}
|
||||
|
||||
if (isArray) (data[arg] = data[arg] || []).push(val)
|
||||
else data[arg] = val
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if (la && la.match(/^-{2,}$/)) {
|
||||
la = undefined
|
||||
i --
|
||||
}
|
||||
|
||||
val = la === undefined ? true : la
|
||||
if (isArray) (data[arg] = data[arg] || []).push(val)
|
||||
else data[arg] = val
|
||||
|
||||
i ++
|
||||
continue
|
||||
}
|
||||
remain.push(arg)
|
||||
}
|
||||
}
|
||||
|
||||
function resolveShort (arg, shorthands, shortAbbr, abbrevs) {
|
||||
// handle single-char shorthands glommed together, like
|
||||
// npm ls -glp, but only if there is one dash, and only if
|
||||
// all of the chars are single-char shorthands, and it's
|
||||
// not a match to some other abbrev.
|
||||
arg = arg.replace(/^-+/, '')
|
||||
if (abbrevs[arg] && !shorthands[arg]) {
|
||||
return null
|
||||
}
|
||||
if (shortAbbr[arg]) {
|
||||
arg = shortAbbr[arg]
|
||||
} else {
|
||||
var singles = shorthands.___singles
|
||||
if (!singles) {
|
||||
singles = Object.keys(shorthands).filter(function (s) {
|
||||
return s.length === 1
|
||||
}).reduce(function (l,r) { l[r] = true ; return l }, {})
|
||||
shorthands.___singles = singles
|
||||
}
|
||||
var chrs = arg.split("").filter(function (c) {
|
||||
return singles[c]
|
||||
})
|
||||
if (chrs.join("") === arg) return chrs.map(function (c) {
|
||||
return shorthands[c]
|
||||
}).reduce(function (l, r) {
|
||||
return l.concat(r)
|
||||
}, [])
|
||||
}
|
||||
|
||||
if (shorthands[arg] && !Array.isArray(shorthands[arg])) {
|
||||
shorthands[arg] = shorthands[arg].split(/\s+/)
|
||||
}
|
||||
return shorthands[arg]
|
||||
}
|
||||
|
||||
if (module === require.main) {
|
||||
var assert = require("assert")
|
||||
, util = require("util")
|
||||
|
||||
, shorthands =
|
||||
{ s : ["--loglevel", "silent"]
|
||||
, d : ["--loglevel", "info"]
|
||||
, dd : ["--loglevel", "verbose"]
|
||||
, ddd : ["--loglevel", "silly"]
|
||||
, noreg : ["--no-registry"]
|
||||
, reg : ["--registry"]
|
||||
, "no-reg" : ["--no-registry"]
|
||||
, silent : ["--loglevel", "silent"]
|
||||
, verbose : ["--loglevel", "verbose"]
|
||||
, h : ["--usage"]
|
||||
, H : ["--usage"]
|
||||
, "?" : ["--usage"]
|
||||
, help : ["--usage"]
|
||||
, v : ["--version"]
|
||||
, f : ["--force"]
|
||||
, desc : ["--description"]
|
||||
, "no-desc" : ["--no-description"]
|
||||
, "local" : ["--no-global"]
|
||||
, l : ["--long"]
|
||||
, p : ["--parseable"]
|
||||
, porcelain : ["--parseable"]
|
||||
, g : ["--global"]
|
||||
}
|
||||
|
||||
, types =
|
||||
{ aoa: Array
|
||||
, nullstream: [null, Stream]
|
||||
, date: Date
|
||||
, str: String
|
||||
, browser : String
|
||||
, cache : path
|
||||
, color : ["always", Boolean]
|
||||
, depth : Number
|
||||
, description : Boolean
|
||||
, dev : Boolean
|
||||
, editor : path
|
||||
, force : Boolean
|
||||
, global : Boolean
|
||||
, globalconfig : path
|
||||
, group : [String, Number]
|
||||
, gzipbin : String
|
||||
, logfd : [Number, Stream]
|
||||
, loglevel : ["silent","win","error","warn","info","verbose","silly"]
|
||||
, long : Boolean
|
||||
, "node-version" : [false, String]
|
||||
, npaturl : url
|
||||
, npat : Boolean
|
||||
, "onload-script" : [false, String]
|
||||
, outfd : [Number, Stream]
|
||||
, parseable : Boolean
|
||||
, pre: Boolean
|
||||
, prefix: path
|
||||
, proxy : url
|
||||
, "rebuild-bundle" : Boolean
|
||||
, registry : url
|
||||
, searchopts : String
|
||||
, searchexclude: [null, String]
|
||||
, shell : path
|
||||
, t: [Array, String]
|
||||
, tag : String
|
||||
, tar : String
|
||||
, tmp : path
|
||||
, "unsafe-perm" : Boolean
|
||||
, usage : Boolean
|
||||
, user : String
|
||||
, username : String
|
||||
, userconfig : path
|
||||
, version : Boolean
|
||||
, viewer: path
|
||||
, _exit : Boolean
|
||||
}
|
||||
|
||||
; [["-v", {version:true}, []]
|
||||
,["---v", {version:true}, []]
|
||||
,["ls -s --no-reg connect -d",
|
||||
{loglevel:"info",registry:null},["ls","connect"]]
|
||||
,["ls ---s foo",{loglevel:"silent"},["ls","foo"]]
|
||||
,["ls --registry blargle", {}, ["ls"]]
|
||||
,["--no-registry", {registry:null}, []]
|
||||
,["--no-color true", {color:false}, []]
|
||||
,["--no-color false", {color:true}, []]
|
||||
,["--no-color", {color:false}, []]
|
||||
,["--color false", {color:false}, []]
|
||||
,["--color --logfd 7", {logfd:7,color:true}, []]
|
||||
,["--color=true", {color:true}, []]
|
||||
,["--logfd=10", {logfd:10}, []]
|
||||
,["--tmp=/tmp -tar=gtar",{tmp:"/tmp",tar:"gtar"},[]]
|
||||
,["--tmp=tmp -tar=gtar",
|
||||
{tmp:path.resolve(process.cwd(), "tmp"),tar:"gtar"},[]]
|
||||
,["--logfd x", {}, []]
|
||||
,["a -true -- -no-false", {true:true},["a","-no-false"]]
|
||||
,["a -no-false", {false:false},["a"]]
|
||||
,["a -no-no-true", {true:true}, ["a"]]
|
||||
,["a -no-no-no-false", {false:false}, ["a"]]
|
||||
,["---NO-no-No-no-no-no-nO-no-no"+
|
||||
"-No-no-no-no-no-no-no-no-no"+
|
||||
"-no-no-no-no-NO-NO-no-no-no-no-no-no"+
|
||||
"-no-body-can-do-the-boogaloo-like-I-do"
|
||||
,{"body-can-do-the-boogaloo-like-I-do":false}, []]
|
||||
,["we are -no-strangers-to-love "+
|
||||
"--you-know the-rules --and so-do-i "+
|
||||
"---im-thinking-of=a-full-commitment "+
|
||||
"--no-you-would-get-this-from-any-other-guy "+
|
||||
"--no-gonna-give-you-up "+
|
||||
"-no-gonna-let-you-down=true "+
|
||||
"--no-no-gonna-run-around false "+
|
||||
"--desert-you=false "+
|
||||
"--make-you-cry false "+
|
||||
"--no-tell-a-lie "+
|
||||
"--no-no-and-hurt-you false"
|
||||
,{"strangers-to-love":false
|
||||
,"you-know":"the-rules"
|
||||
,"and":"so-do-i"
|
||||
,"you-would-get-this-from-any-other-guy":false
|
||||
,"gonna-give-you-up":false
|
||||
,"gonna-let-you-down":false
|
||||
,"gonna-run-around":false
|
||||
,"desert-you":false
|
||||
,"make-you-cry":false
|
||||
,"tell-a-lie":false
|
||||
,"and-hurt-you":false
|
||||
},["we", "are"]]
|
||||
,["-t one -t two -t three"
|
||||
,{t: ["one", "two", "three"]}
|
||||
,[]]
|
||||
,["-t one -t null -t three four five null"
|
||||
,{t: ["one", "null", "three"]}
|
||||
,["four", "five", "null"]]
|
||||
,["-t foo"
|
||||
,{t:["foo"]}
|
||||
,[]]
|
||||
,["--no-t"
|
||||
,{t:["false"]}
|
||||
,[]]
|
||||
,["-no-no-t"
|
||||
,{t:["true"]}
|
||||
,[]]
|
||||
,["-aoa one -aoa null -aoa 100"
|
||||
,{aoa:["one", null, 100]}
|
||||
,[]]
|
||||
,["-str 100"
|
||||
,{str:"100"}
|
||||
,[]]
|
||||
,["--color always"
|
||||
,{color:"always"}
|
||||
,[]]
|
||||
,["--no-nullstream"
|
||||
,{nullstream:null}
|
||||
,[]]
|
||||
,["--nullstream false"
|
||||
,{nullstream:null}
|
||||
,[]]
|
||||
,["--notadate 2011-01-25"
|
||||
,{notadate: "2011-01-25"}
|
||||
,[]]
|
||||
,["--date 2011-01-25"
|
||||
,{date: new Date("2011-01-25")}
|
||||
,[]]
|
||||
].forEach(function (test) {
|
||||
var argv = test[0].split(/\s+/)
|
||||
, opts = test[1]
|
||||
, rem = test[2]
|
||||
, actual = nopt(types, shorthands, argv, 0)
|
||||
, parsed = actual.argv
|
||||
delete actual.argv
|
||||
console.log(util.inspect(actual, false, 2, true), parsed.remain)
|
||||
for (var i in opts) {
|
||||
var e = JSON.stringify(opts[i])
|
||||
, a = JSON.stringify(actual[i] === undefined ? null : actual[i])
|
||||
if (e && typeof e === "object") {
|
||||
assert.deepEqual(e, a)
|
||||
} else {
|
||||
assert.equal(e, a)
|
||||
}
|
||||
}
|
||||
assert.deepEqual(rem, parsed.remain)
|
||||
})
|
||||
}
|
23
node_modules/nopt/node_modules/abbrev/README.md
generated
vendored
Normal file
23
node_modules/nopt/node_modules/abbrev/README.md
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
# abbrev-js
|
||||
|
||||
Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev).
|
||||
|
||||
Usage:
|
||||
|
||||
var abbrev = require("abbrev");
|
||||
abbrev("foo", "fool", "folding", "flop");
|
||||
|
||||
// returns:
|
||||
{ fl: 'flop'
|
||||
, flo: 'flop'
|
||||
, flop: 'flop'
|
||||
, fol: 'folding'
|
||||
, fold: 'folding'
|
||||
, foldi: 'folding'
|
||||
, foldin: 'folding'
|
||||
, folding: 'folding'
|
||||
, foo: 'foo'
|
||||
, fool: 'fool'
|
||||
}
|
||||
|
||||
This is handy for command-line scripts, or other cases where you want to be able to accept shorthands.
|
106
node_modules/nopt/node_modules/abbrev/lib/abbrev.js
generated
vendored
Normal file
106
node_modules/nopt/node_modules/abbrev/lib/abbrev.js
generated
vendored
Normal file
|
@ -0,0 +1,106 @@
|
|||
|
||||
module.exports = exports = abbrev.abbrev = abbrev
|
||||
|
||||
abbrev.monkeyPatch = monkeyPatch
|
||||
|
||||
function monkeyPatch () {
|
||||
Array.prototype.abbrev = function () { return abbrev(this) }
|
||||
Object.prototype.abbrev = function () { return abbrev(Object.keys(this)) }
|
||||
}
|
||||
|
||||
function abbrev (list) {
|
||||
if (arguments.length !== 1 || !Array.isArray(list)) {
|
||||
list = Array.prototype.slice.call(arguments, 0)
|
||||
}
|
||||
for (var i = 0, l = list.length, args = [] ; i < l ; i ++) {
|
||||
args[i] = typeof list[i] === "string" ? list[i] : String(list[i])
|
||||
}
|
||||
|
||||
// sort them lexicographically, so that they're next to their nearest kin
|
||||
args = args.sort(lexSort)
|
||||
|
||||
// walk through each, seeing how much it has in common with the next and previous
|
||||
var abbrevs = {}
|
||||
, prev = ""
|
||||
for (var i = 0, l = args.length ; i < l ; i ++) {
|
||||
var current = args[i]
|
||||
, next = args[i + 1] || ""
|
||||
, nextMatches = true
|
||||
, prevMatches = true
|
||||
if (current === next) continue
|
||||
for (var j = 0, cl = current.length ; j < cl ; j ++) {
|
||||
var curChar = current.charAt(j)
|
||||
nextMatches = nextMatches && curChar === next.charAt(j)
|
||||
prevMatches = prevMatches && curChar === prev.charAt(j)
|
||||
if (nextMatches || prevMatches) continue
|
||||
else {
|
||||
j ++
|
||||
break
|
||||
}
|
||||
}
|
||||
prev = current
|
||||
if (j === cl) {
|
||||
abbrevs[current] = current
|
||||
continue
|
||||
}
|
||||
for (var a = current.substr(0, j) ; j <= cl ; j ++) {
|
||||
abbrevs[a] = current
|
||||
a += current.charAt(j)
|
||||
}
|
||||
}
|
||||
return abbrevs
|
||||
}
|
||||
|
||||
function lexSort (a, b) {
|
||||
return a === b ? 0 : a > b ? 1 : -1
|
||||
}
|
||||
|
||||
|
||||
// tests
|
||||
if (module === require.main) {
|
||||
|
||||
var assert = require("assert")
|
||||
, sys
|
||||
sys = require("util")
|
||||
|
||||
console.log("running tests")
|
||||
function test (list, expect) {
|
||||
var actual = abbrev(list)
|
||||
assert.deepEqual(actual, expect,
|
||||
"abbrev("+sys.inspect(list)+") === " + sys.inspect(expect) + "\n"+
|
||||
"actual: "+sys.inspect(actual))
|
||||
actual = abbrev.apply(exports, list)
|
||||
assert.deepEqual(abbrev.apply(exports, list), expect,
|
||||
"abbrev("+list.map(JSON.stringify).join(",")+") === " + sys.inspect(expect) + "\n"+
|
||||
"actual: "+sys.inspect(actual))
|
||||
}
|
||||
|
||||
test([ "ruby", "ruby", "rules", "rules", "rules" ],
|
||||
{ rub: 'ruby'
|
||||
, ruby: 'ruby'
|
||||
, rul: 'rules'
|
||||
, rule: 'rules'
|
||||
, rules: 'rules'
|
||||
})
|
||||
test(["fool", "foom", "pool", "pope"],
|
||||
{ fool: 'fool'
|
||||
, foom: 'foom'
|
||||
, poo: 'pool'
|
||||
, pool: 'pool'
|
||||
, pop: 'pope'
|
||||
, pope: 'pope'
|
||||
})
|
||||
test(["a", "ab", "abc", "abcd", "abcde", "acde"],
|
||||
{ a: 'a'
|
||||
, ab: 'ab'
|
||||
, abc: 'abc'
|
||||
, abcd: 'abcd'
|
||||
, abcde: 'abcde'
|
||||
, ac: 'acde'
|
||||
, acd: 'acde'
|
||||
, acde: 'acde'
|
||||
})
|
||||
|
||||
console.log("pass")
|
||||
|
||||
}
|
8
node_modules/nopt/node_modules/abbrev/package.json
generated
vendored
Normal file
8
node_modules/nopt/node_modules/abbrev/package.json
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
{ "name" : "abbrev"
|
||||
, "version" : "1.0.3"
|
||||
, "description" : "Like ruby's abbrev module, but in js"
|
||||
, "author" : "Isaac Z. Schlueter <i@izs.me>"
|
||||
, "main" : "./lib/abbrev.js"
|
||||
, "scripts" : { "test" : "node lib/abbrev.js" }
|
||||
, "repository" : "http://github.com/isaacs/abbrev-js"
|
||||
}
|
12
node_modules/nopt/package.json
generated
vendored
Normal file
12
node_modules/nopt/package.json
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
{ "name" : "nopt"
|
||||
, "version" : "1.0.10"
|
||||
, "description" : "Option parsing for Node, supporting types, shorthands, etc. Used by npm."
|
||||
, "author" : "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)"
|
||||
, "main" : "lib/nopt.js"
|
||||
, "scripts" : { "test" : "node lib/nopt.js" }
|
||||
, "repository" : "http://github.com/isaacs/nopt"
|
||||
, "bin" : "./bin/nopt.js"
|
||||
, "license" :
|
||||
{ "type" : "MIT"
|
||||
, "url" : "https://github.com/isaacs/nopt/raw/master/LICENSE" }
|
||||
, "dependencies" : { "abbrev" : "1" }}
|
55
node_modules/request/LICENSE
generated
vendored
Normal file
55
node_modules/request/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,55 @@
|
|||
Apache License
|
||||
|
||||
Version 2.0, January 2004
|
||||
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
|
||||
|
||||
You must give any other recipients of the Work or Derivative Works a copy of this License; and
|
||||
|
||||
You must cause any modified files to carry prominent notices stating that You changed the files; and
|
||||
|
||||
You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
|
||||
|
||||
If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
286
node_modules/request/README.md
generated
vendored
Normal file
286
node_modules/request/README.md
generated
vendored
Normal file
|
@ -0,0 +1,286 @@
|
|||
# Request -- Simplified HTTP request method
|
||||
|
||||
## Install
|
||||
|
||||
<pre>
|
||||
npm install request
|
||||
</pre>
|
||||
|
||||
Or from source:
|
||||
|
||||
<pre>
|
||||
git clone git://github.com/mikeal/request.git
|
||||
cd request
|
||||
npm link
|
||||
</pre>
|
||||
|
||||
## Super simple to use
|
||||
|
||||
Request is designed to be the simplest way possible to make http calls. It support HTTPS and follows redirects by default.
|
||||
|
||||
```javascript
|
||||
var request = require('request');
|
||||
request('http://www.google.com', function (error, response, body) {
|
||||
if (!error && response.statusCode == 200) {
|
||||
console.log(body) // Print the google web page.
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
## Streaming
|
||||
|
||||
You can stream any response to a file stream.
|
||||
|
||||
```javascript
|
||||
request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'))
|
||||
```
|
||||
|
||||
You can also stream a file to a PUT or POST request. This method will also check the file extension against a mapping of file extensions to content-types, in this case `application/json`, and use the proper content-type in the PUT request if one is not already provided in the headers.
|
||||
|
||||
```javascript
|
||||
fs.readStream('file.json').pipe(request.put('http://mysite.com/obj.json'))
|
||||
```
|
||||
|
||||
Request can also pipe to itself. When doing so the content-type and content-length will be preserved in the PUT headers.
|
||||
|
||||
```javascript
|
||||
request.get('http://google.com/img.png').pipe(request.put('http://mysite.com/img.png'))
|
||||
```
|
||||
|
||||
Now let's get fancy.
|
||||
|
||||
```javascript
|
||||
http.createServer(function (req, resp) {
|
||||
if (req.url === '/doodle.png') {
|
||||
if (req.method === 'PUT') {
|
||||
req.pipe(request.put('http://mysite.com/doodle.png'))
|
||||
} else if (req.method === 'GET' || req.method === 'HEAD') {
|
||||
request.get('http://mysite.com/doodle.png').pipe(resp)
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
You can also pipe() from a http.ServerRequest instance and to a http.ServerResponse instance. The HTTP method and headers will be sent as well as the entity-body data. Which means that, if you don't really care about security, you can do:
|
||||
|
||||
```javascript
|
||||
http.createServer(function (req, resp) {
|
||||
if (req.url === '/doodle.png') {
|
||||
var x = request('http://mysite.com/doodle.png')
|
||||
req.pipe(x)
|
||||
x.pipe(resp)
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
And since pipe() returns the destination stream in node 0.5.x you can do one line proxying :)
|
||||
|
||||
```javascript
|
||||
req.pipe(request('http://mysite.com/doodle.png')).pipe(resp)
|
||||
```
|
||||
|
||||
Also, none of this new functionality conflicts with requests previous features, it just expands them.
|
||||
|
||||
```javascript
|
||||
var r = request.defaults({'proxy':'http://localproxy.com'})
|
||||
|
||||
http.createServer(function (req, resp) {
|
||||
if (req.url === '/doodle.png') {
|
||||
r.get('http://google.com/doodle.png').pipe(resp)
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
You can still use intermediate proxies, the requests will still follow HTTP forwards, etc.
|
||||
|
||||
## OAuth Signing
|
||||
|
||||
```javascript
|
||||
// Twitter OAuth
|
||||
var qs = require('querystring')
|
||||
, oauth =
|
||||
{ callback: 'http://mysite.com/callback/'
|
||||
, consumer_key: CONSUMER_KEY
|
||||
, consumer_secret: CONSUMER_SECRET
|
||||
}
|
||||
, url = 'https://api.twitter.com/oauth/request_token'
|
||||
;
|
||||
request.post({url:url, oauth:oauth}, function (e, r, body) {
|
||||
// Assume by some stretch of magic you aquired the verifier
|
||||
var access_token = qs.parse(body)
|
||||
, oauth =
|
||||
{ consumer_key: CONSUMER_KEY
|
||||
, consumer_secret: CONSUMER_SECRET
|
||||
, token: access_token.oauth_token
|
||||
, verifier: VERIFIER
|
||||
, token_secret: access_token.oauth_token_secret
|
||||
}
|
||||
, url = 'https://api.twitter.com/oauth/access_token'
|
||||
;
|
||||
request.post({url:url, oauth:oauth}, function (e, r, body) {
|
||||
var perm_token = qs.parse(body)
|
||||
, oauth =
|
||||
{ consumer_key: CONSUMER_KEY
|
||||
, consumer_secret: CONSUMER_SECRET
|
||||
, token: perm_token.oauth_token
|
||||
, token_secret: perm_token.oauth_token_secret
|
||||
}
|
||||
, url = 'https://api.twitter.com/1/users/show.json?'
|
||||
, params =
|
||||
{ screen_name: perm_token.screen_name
|
||||
, user_id: perm_token.user_id
|
||||
}
|
||||
;
|
||||
url += qs.stringify(params)
|
||||
request.get({url:url, oauth:oauth, json:true}, function (e, r, user) {
|
||||
console.log(user)
|
||||
})
|
||||
})
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
|
||||
### request(options, callback)
|
||||
|
||||
The first argument can be either a url or an options object. The only required option is uri, all others are optional.
|
||||
|
||||
* `uri` || `url` - fully qualified uri or a parsed url object from url.parse()
|
||||
* `method` - http method, defaults to GET
|
||||
* `headers` - http headers, defaults to {}
|
||||
* `body` - entity body for POST and PUT requests. Must be buffer or string.
|
||||
* `form` - sets `body` but to querystring representation of value and adds `Content-type: application/x-www-form-urlencoded; charset=utf-8` header.
|
||||
* `json` - sets `body` but to JSON representation of value and adds `Content-type: application/json` header.
|
||||
* `multipart` - (experimental) array of objects which contains their own headers and `body` attribute. Sends `multipart/related` request. See example below.
|
||||
* `followRedirect` - follow HTTP 3xx responses as redirects. defaults to true.
|
||||
* `maxRedirects` - the maximum number of redirects to follow, defaults to 10.
|
||||
* `onResponse` - If true the callback will be fired on the "response" event instead of "end". If a function it will be called on "response" and not effect the regular semantics of the main callback on "end".
|
||||
* `encoding` - Encoding to be used on `setEncoding` of response data. If set to `null`, the body is returned as a Buffer.
|
||||
* `pool` - A hash object containing the agents for these requests. If omitted this request will use the global pool which is set to node's default maxSockets.
|
||||
* `pool.maxSockets` - Integer containing the maximum amount of sockets in the pool.
|
||||
* `timeout` - Integer containing the number of milliseconds to wait for a request to respond before aborting the request
|
||||
* `proxy` - An HTTP proxy to be used. Support proxy Auth with Basic Auth the same way it's supported with the `url` parameter by embedding the auth info in the uri.
|
||||
* `oauth` - Options for OAuth HMAC-SHA1 signing, see documentation above.
|
||||
* `strictSSL` - Set to `true` to require that SSL certificates be valid. Note: to use your own certificate authority, you need to specify an agent that was created with that ca as an option.
|
||||
* `jar` - Set to `false` if you don't want cookies to be remembered for future use or define your custom cookie jar (see examples section)
|
||||
|
||||
|
||||
The callback argument gets 3 arguments. The first is an error when applicable (usually from the http.Client option not the http.ClientRequest object). The second in an http.ClientResponse object. The third is the response body String or Buffer.
|
||||
|
||||
## Convenience methods
|
||||
|
||||
There are also shorthand methods for different HTTP METHODs and some other conveniences.
|
||||
|
||||
### request.defaults(options)
|
||||
|
||||
This method returns a wrapper around the normal request API that defaults to whatever options you pass in to it.
|
||||
|
||||
### request.put
|
||||
|
||||
Same as request() but defaults to `method: "PUT"`.
|
||||
|
||||
```javascript
|
||||
request.put(url)
|
||||
```
|
||||
|
||||
### request.post
|
||||
|
||||
Same as request() but defaults to `method: "POST"`.
|
||||
|
||||
```javascript
|
||||
request.post(url)
|
||||
```
|
||||
|
||||
### request.head
|
||||
|
||||
Same as request() but defaults to `method: "HEAD"`.
|
||||
|
||||
```javascript
|
||||
request.head(url)
|
||||
```
|
||||
|
||||
### request.del
|
||||
|
||||
Same as request() but defaults to `method: "DELETE"`.
|
||||
|
||||
```javascript
|
||||
request.del(url)
|
||||
```
|
||||
|
||||
### request.get
|
||||
|
||||
Alias to normal request method for uniformity.
|
||||
|
||||
```javascript
|
||||
request.get(url)
|
||||
```
|
||||
### request.cookie
|
||||
|
||||
Function that creates a new cookie.
|
||||
|
||||
```javascript
|
||||
request.cookie('cookie_string_here')
|
||||
```
|
||||
### request.jar
|
||||
|
||||
Function that creates a new cookie jar.
|
||||
|
||||
```javascript
|
||||
request.jar()
|
||||
```
|
||||
|
||||
|
||||
## Examples:
|
||||
|
||||
```javascript
|
||||
var request = require('request')
|
||||
, rand = Math.floor(Math.random()*100000000).toString()
|
||||
;
|
||||
request(
|
||||
{ method: 'PUT'
|
||||
, uri: 'http://mikeal.iriscouch.com/testjs/' + rand
|
||||
, multipart:
|
||||
[ { 'content-type': 'application/json'
|
||||
, body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}})
|
||||
}
|
||||
, { body: 'I am an attachment' }
|
||||
]
|
||||
}
|
||||
, function (error, response, body) {
|
||||
if(response.statusCode == 201){
|
||||
console.log('document saved as: http://mikeal.iriscouch.com/testjs/'+ rand)
|
||||
} else {
|
||||
console.log('error: '+ response.statusCode)
|
||||
console.log(body)
|
||||
}
|
||||
}
|
||||
)
|
||||
```
|
||||
Cookies are enabled by default (so they can be used in subsequent requests). To disable cookies set jar to false (either in defaults or in the options sent).
|
||||
|
||||
```javascript
|
||||
var request = request.defaults({jar: false})
|
||||
request('http://www.google.com', function () {
|
||||
request('http://images.google.com')
|
||||
})
|
||||
```
|
||||
|
||||
If you to use a custom cookie jar (instead of letting request use its own global cookie jar) you do so by setting the jar default or by specifying it as an option:
|
||||
|
||||
```javascript
|
||||
var j = request.jar()
|
||||
var request = request.defaults({jar:j})
|
||||
request('http://www.google.com', function () {
|
||||
request('http://images.google.com')
|
||||
})
|
||||
```
|
||||
OR
|
||||
|
||||
```javascript
|
||||
var j = request.jar()
|
||||
var cookie = request.cookie('your_cookie_here')
|
||||
j.add(cookie)
|
||||
request({url: 'http://www.google.com', jar: j}, function () {
|
||||
request('http://images.google.com')
|
||||
})
|
||||
```
|
84
node_modules/request/forever.js
generated
vendored
Normal file
84
node_modules/request/forever.js
generated
vendored
Normal file
|
@ -0,0 +1,84 @@
|
|||
module.exports = ForeverAgent
|
||||
|
||||
var util = require('util')
|
||||
, Agent = require('http').Agent
|
||||
, net = require('net')
|
||||
|
||||
function ForeverAgent(options) {
|
||||
var self = this
|
||||
self.options = options || {}
|
||||
self.requests = {}
|
||||
self.sockets = {}
|
||||
self.freeSockets = {}
|
||||
self.maxSockets = self.options.maxSockets || Agent.defaultMaxSockets
|
||||
self.minSockets = self.options.minSockets || ForeverAgent.defaultMinSockets
|
||||
self.on('free', function(socket, host, port) {
|
||||
var name = host + ':' + port
|
||||
if (self.requests[name] && self.requests[name].length) {
|
||||
self.requests[name].shift().onSocket(socket)
|
||||
} else if (self.sockets[name].length < self.minSockets) {
|
||||
if (!self.freeSockets[name]) self.freeSockets[name] = []
|
||||
self.freeSockets[name].push(socket)
|
||||
|
||||
// if an error happens while we don't use the socket anyway, meh, throw the socket away
|
||||
function onIdleError() {
|
||||
socket.destroy()
|
||||
}
|
||||
socket._onIdleError = onIdleError
|
||||
socket.on('error', onIdleError)
|
||||
} else {
|
||||
// If there are no pending requests just destroy the
|
||||
// socket and it will get removed from the pool. This
|
||||
// gets us out of timeout issues and allows us to
|
||||
// default to Connection:keep-alive.
|
||||
socket.destroy();
|
||||
}
|
||||
})
|
||||
self.createConnection = net.createConnection
|
||||
}
|
||||
util.inherits(ForeverAgent, Agent)
|
||||
|
||||
ForeverAgent.defaultMinSockets = 5
|
||||
|
||||
ForeverAgent.prototype.addRequestNoreuse = Agent.prototype.addRequest
|
||||
ForeverAgent.prototype.addRequest = function(req, host, port) {
|
||||
var name = host + ':' + port
|
||||
if (this.freeSockets[name] && this.freeSockets[name].length > 0 && !req.useChunkedEncodingByDefault) {
|
||||
var idleSocket = this.freeSockets[name].pop()
|
||||
idleSocket.removeListener('error', idleSocket._onIdleError)
|
||||
delete idleSocket._onIdleError
|
||||
req._reusedSocket = true
|
||||
req.onSocket(idleSocket)
|
||||
} else {
|
||||
this.addRequestNoreuse(req, host, port)
|
||||
}
|
||||
}
|
||||
|
||||
ForeverAgent.prototype.removeSocket = function(s, name, host, port) {
|
||||
if (this.sockets[name]) {
|
||||
var index = this.sockets[name].indexOf(s);
|
||||
if (index !== -1) {
|
||||
this.sockets[name].splice(index, 1);
|
||||
}
|
||||
} else if (this.sockets[name] && this.sockets[name].length === 0) {
|
||||
// don't leak
|
||||
delete this.sockets[name];
|
||||
delete this.requests[name];
|
||||
}
|
||||
|
||||
if (this.freeSockets[name]) {
|
||||
var index = this.freeSockets[name].indexOf(s)
|
||||
if (index !== -1) {
|
||||
this.freeSockets[name].splice(index, 1)
|
||||
if (this.freeSockets[name].length === 0) {
|
||||
delete this.freeSockets[name]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.requests[name] && this.requests[name].length) {
|
||||
// If we have pending requests and a socket gets closed a new one
|
||||
// needs to be created to take over in the pool for the one that closed.
|
||||
this.createSocket(name, host, port).emit('free');
|
||||
}
|
||||
}
|
652
node_modules/request/main.js
generated
vendored
Normal file
652
node_modules/request/main.js
generated
vendored
Normal file
|
@ -0,0 +1,652 @@
|
|||
// Copyright 2010-2011 Mikeal Rogers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
var http = require('http')
|
||||
, https = false
|
||||
, tls = false
|
||||
, url = require('url')
|
||||
, util = require('util')
|
||||
, stream = require('stream')
|
||||
, qs = require('querystring')
|
||||
, mimetypes = require('./mimetypes')
|
||||
, oauth = require('./oauth')
|
||||
, uuid = require('./uuid')
|
||||
, ForeverAgent = require('./forever')
|
||||
, Cookie = require('./vendor/cookie')
|
||||
, CookieJar = require('./vendor/cookie/jar')
|
||||
, cookieJar = new CookieJar
|
||||
;
|
||||
|
||||
if (process.logging) {
|
||||
var log = process.logging('request')
|
||||
}
|
||||
|
||||
try {
|
||||
https = require('https')
|
||||
} catch (e) {}
|
||||
|
||||
try {
|
||||
tls = require('tls')
|
||||
} catch (e) {}
|
||||
|
||||
function toBase64 (str) {
|
||||
return (new Buffer(str || "", "ascii")).toString("base64")
|
||||
}
|
||||
|
||||
// Hacky fix for pre-0.4.4 https
|
||||
if (https && !https.Agent) {
|
||||
https.Agent = function (options) {
|
||||
http.Agent.call(this, options)
|
||||
}
|
||||
util.inherits(https.Agent, http.Agent)
|
||||
https.Agent.prototype._getConnection = function(host, port, cb) {
|
||||
var s = tls.connect(port, host, this.options, function() {
|
||||
// do other checks here?
|
||||
if (cb) cb()
|
||||
})
|
||||
return s
|
||||
}
|
||||
}
|
||||
|
||||
function isReadStream (rs) {
|
||||
if (rs.readable && rs.path && rs.mode) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
function copy (obj) {
|
||||
var o = {}
|
||||
for (var i in obj) o[i] = obj[i]
|
||||
return o
|
||||
}
|
||||
|
||||
var isUrl = /^https?:/
|
||||
|
||||
var globalPool = {}
|
||||
|
||||
function Request (options) {
|
||||
stream.Stream.call(this)
|
||||
this.readable = true
|
||||
this.writable = true
|
||||
|
||||
if (typeof options === 'string') {
|
||||
options = {uri:options}
|
||||
}
|
||||
|
||||
for (var i in options) {
|
||||
this[i] = options[i]
|
||||
}
|
||||
if (!this.pool) this.pool = globalPool
|
||||
this.dests = []
|
||||
this.__isRequestRequest = true
|
||||
}
|
||||
util.inherits(Request, stream.Stream)
|
||||
Request.prototype.getAgent = function (host, port) {
|
||||
if (!this.pool[host+':'+port]) {
|
||||
this.pool[host+':'+port] = new this.httpModule.Agent({host:host, port:port})
|
||||
}
|
||||
return this.pool[host+':'+port]
|
||||
}
|
||||
Request.prototype.request = function () {
|
||||
var self = this
|
||||
|
||||
// Protect against double callback
|
||||
if (!self._callback && self.callback) {
|
||||
self._callback = self.callback
|
||||
self.callback = function () {
|
||||
if (self._callbackCalled) return // Print a warning maybe?
|
||||
self._callback.apply(self, arguments)
|
||||
self._callbackCalled = true
|
||||
}
|
||||
}
|
||||
|
||||
if (self.url) {
|
||||
// People use this property instead all the time so why not just support it.
|
||||
self.uri = self.url
|
||||
delete self.url
|
||||
}
|
||||
|
||||
if (!self.uri) {
|
||||
throw new Error("options.uri is a required argument")
|
||||
} else {
|
||||
if (typeof self.uri == "string") self.uri = url.parse(self.uri)
|
||||
}
|
||||
if (self.proxy) {
|
||||
if (typeof self.proxy == 'string') self.proxy = url.parse(self.proxy)
|
||||
}
|
||||
|
||||
self._redirectsFollowed = self._redirectsFollowed || 0
|
||||
self.maxRedirects = (self.maxRedirects !== undefined) ? self.maxRedirects : 10
|
||||
self.followRedirect = (self.followRedirect !== undefined) ? self.followRedirect : true
|
||||
if (self.followRedirect)
|
||||
self.redirects = self.redirects || []
|
||||
|
||||
self.headers = self.headers ? copy(self.headers) : {}
|
||||
|
||||
var setHost = false
|
||||
if (!self.headers.host) {
|
||||
self.headers.host = self.uri.hostname
|
||||
if (self.uri.port) {
|
||||
if ( !(self.uri.port === 80 && self.uri.protocol === 'http:') &&
|
||||
!(self.uri.port === 443 && self.uri.protocol === 'https:') )
|
||||
self.headers.host += (':'+self.uri.port)
|
||||
}
|
||||
setHost = true
|
||||
}
|
||||
|
||||
if (self.jar === false) {
|
||||
// disable cookies
|
||||
var cookies = false;
|
||||
self._disableCookies = true;
|
||||
} else if (self.jar) {
|
||||
// fetch cookie from the user defined cookie jar
|
||||
var cookies = self.jar.get({ url: self.uri.href })
|
||||
} else {
|
||||
// fetch cookie from the global cookie jar
|
||||
var cookies = cookieJar.get({ url: self.uri.href })
|
||||
}
|
||||
if (cookies) {
|
||||
var cookieString = cookies.map(function (c) {
|
||||
return c.name + "=" + c.value;
|
||||
}).join("; ");
|
||||
|
||||
self.headers.Cookie = cookieString;
|
||||
}
|
||||
|
||||
if (!self.uri.pathname) {self.uri.pathname = '/'}
|
||||
if (!self.uri.port) {
|
||||
if (self.uri.protocol == 'http:') {self.uri.port = 80}
|
||||
else if (self.uri.protocol == 'https:') {self.uri.port = 443}
|
||||
}
|
||||
|
||||
if (self.proxy) {
|
||||
self.port = self.proxy.port
|
||||
self.host = self.proxy.hostname
|
||||
} else {
|
||||
self.port = self.uri.port
|
||||
self.host = self.uri.hostname
|
||||
}
|
||||
|
||||
if (self.onResponse === true) {
|
||||
self.onResponse = self.callback
|
||||
delete self.callback
|
||||
}
|
||||
|
||||
var clientErrorHandler = function (error) {
|
||||
if (setHost) delete self.headers.host
|
||||
if (self.req._reusedSocket && error.code === 'ECONNRESET') {
|
||||
self.agent = {addRequest: ForeverAgent.prototype.addRequestNoreuse.bind(self.agent)}
|
||||
self.start()
|
||||
self.req.end()
|
||||
return
|
||||
}
|
||||
if (self.timeout && self.timeoutTimer) clearTimeout(self.timeoutTimer)
|
||||
self.emit('error', error)
|
||||
}
|
||||
if (self.onResponse) self.on('error', function (e) {self.onResponse(e)})
|
||||
if (self.callback) self.on('error', function (e) {self.callback(e)})
|
||||
|
||||
if (self.form) {
|
||||
self.headers['content-type'] = 'application/x-www-form-urlencoded; charset=utf-8'
|
||||
self.body = qs.stringify(self.form).toString('utf8')
|
||||
}
|
||||
|
||||
if (self.oauth) {
|
||||
var form
|
||||
if (self.headers['content-type'] &&
|
||||
self.headers['content-type'].slice(0, 'application/x-www-form-urlencoded'.length) ===
|
||||
'application/x-www-form-urlencoded'
|
||||
) {
|
||||
form = qs.parse(self.body)
|
||||
}
|
||||
if (self.uri.query) {
|
||||
form = qs.parse(self.uri.query)
|
||||
}
|
||||
if (!form) form = {}
|
||||
var oa = {}
|
||||
for (var i in form) oa[i] = form[i]
|
||||
for (var i in self.oauth) oa['oauth_'+i] = self.oauth[i]
|
||||
if (!oa.oauth_version) oa.oauth_version = '1.0'
|
||||
if (!oa.oauth_timestamp) oa.oauth_timestamp = Math.floor( (new Date()).getTime() / 1000 ).toString()
|
||||
if (!oa.oauth_nonce) oa.oauth_nonce = uuid().replace(/-/g, '')
|
||||
|
||||
oa.oauth_signature_method = 'HMAC-SHA1'
|
||||
|
||||
var consumer_secret = oa.oauth_consumer_secret
|
||||
delete oa.oauth_consumer_secret
|
||||
var token_secret = oa.oauth_token_secret
|
||||
delete oa.oauth_token_secret
|
||||
|
||||
var baseurl = self.uri.protocol + '//' + self.uri.host + self.uri.pathname
|
||||
var signature = oauth.hmacsign(self.method, baseurl, oa, consumer_secret, token_secret)
|
||||
|
||||
// oa.oauth_signature = signature
|
||||
for (var i in form) {
|
||||
if ( i.slice(0, 'oauth_') in self.oauth) {
|
||||
// skip
|
||||
} else {
|
||||
delete oa['oauth_'+i]
|
||||
}
|
||||
}
|
||||
self.headers.authorization =
|
||||
'OAuth '+Object.keys(oa).sort().map(function (i) {return i+'="'+oauth.rfc3986(oa[i])+'"'}).join(',')
|
||||
self.headers.authorization += ',oauth_signature="'+oauth.rfc3986(signature)+'"'
|
||||
}
|
||||
|
||||
if (self.uri.auth && !self.headers.authorization) {
|
||||
self.headers.authorization = "Basic " + toBase64(self.uri.auth.split(':').map(function(item){ return qs.unescape(item)}).join(':'))
|
||||
}
|
||||
if (self.proxy && self.proxy.auth && !self.headers['proxy-authorization']) {
|
||||
self.headers['proxy-authorization'] = "Basic " + toBase64(self.proxy.auth.split(':').map(function(item){ return qs.unescape(item)}).join(':'))
|
||||
}
|
||||
|
||||
if (self.uri.path) {
|
||||
self.path = self.uri.path
|
||||
} else {
|
||||
self.path = self.uri.pathname + (self.uri.search || "")
|
||||
}
|
||||
|
||||
if (self.path.length === 0) self.path = '/'
|
||||
|
||||
if (self.proxy) self.path = (self.uri.protocol + '//' + self.uri.host + self.path)
|
||||
|
||||
if (self.json) {
|
||||
self.headers['content-type'] = 'application/json'
|
||||
if (typeof self.json === 'boolean') {
|
||||
if (typeof self.body === 'object') self.body = JSON.stringify(self.body)
|
||||
} else {
|
||||
self.body = JSON.stringify(self.json)
|
||||
}
|
||||
|
||||
} else if (self.multipart) {
|
||||
self.body = []
|
||||
|
||||
if (!self.headers['content-type']) {
|
||||
self.headers['content-type'] = 'multipart/related;boundary="frontier"';
|
||||
} else {
|
||||
self.headers['content-type'] = self.headers['content-type'].split(';')[0] + ';boundary="frontier"';
|
||||
}
|
||||
|
||||
if (!self.multipart.forEach) throw new Error('Argument error, options.multipart.')
|
||||
|
||||
self.multipart.forEach(function (part) {
|
||||
var body = part.body
|
||||
if(!body) throw Error('Body attribute missing in multipart.')
|
||||
delete part.body
|
||||
var preamble = '--frontier\r\n'
|
||||
Object.keys(part).forEach(function(key){
|
||||
preamble += key + ': ' + part[key] + '\r\n'
|
||||
})
|
||||
preamble += '\r\n'
|
||||
self.body.push(new Buffer(preamble))
|
||||
self.body.push(new Buffer(body))
|
||||
self.body.push(new Buffer('\r\n'))
|
||||
})
|
||||
self.body.push(new Buffer('--frontier--'))
|
||||
}
|
||||
|
||||
if (self.body) {
|
||||
var length = 0
|
||||
if (!Buffer.isBuffer(self.body)) {
|
||||
if (Array.isArray(self.body)) {
|
||||
for (var i = 0; i < self.body.length; i++) {
|
||||
length += self.body[i].length
|
||||
}
|
||||
} else {
|
||||
self.body = new Buffer(self.body)
|
||||
length = self.body.length
|
||||
}
|
||||
} else {
|
||||
length = self.body.length
|
||||
}
|
||||
if (length) {
|
||||
self.headers['content-length'] = length
|
||||
} else {
|
||||
throw new Error('Argument error, options.body.')
|
||||
}
|
||||
}
|
||||
|
||||
var protocol = self.proxy ? self.proxy.protocol : self.uri.protocol
|
||||
, defaultModules = {'http:':http, 'https:':https}
|
||||
, httpModules = self.httpModules || {}
|
||||
;
|
||||
self.httpModule = httpModules[protocol] || defaultModules[protocol]
|
||||
|
||||
if (!self.httpModule) throw new Error("Invalid protocol")
|
||||
|
||||
if (self.pool === false) {
|
||||
self.agent = false
|
||||
} else {
|
||||
if (self.maxSockets) {
|
||||
// Don't use our pooling if node has the refactored client
|
||||
self.agent = self.agent || self.httpModule.globalAgent || self.getAgent(self.host, self.port)
|
||||
self.agent.maxSockets = self.maxSockets
|
||||
}
|
||||
if (self.pool.maxSockets) {
|
||||
// Don't use our pooling if node has the refactored client
|
||||
self.agent = self.agent || self.httpModule.globalAgent || self.getAgent(self.host, self.port)
|
||||
self.agent.maxSockets = self.pool.maxSockets
|
||||
}
|
||||
}
|
||||
|
||||
self.start = function () {
|
||||
self._started = true
|
||||
self.method = self.method || 'GET'
|
||||
self.href = self.uri.href
|
||||
if (log) log('%method %href', self)
|
||||
self.req = self.httpModule.request(self, function (response) {
|
||||
self.response = response
|
||||
response.request = self
|
||||
|
||||
if (self.httpModule === https &&
|
||||
self.strictSSL &&
|
||||
!response.client.authorized) {
|
||||
var sslErr = response.client.authorizationError
|
||||
self.emit('error', new Error('SSL Error: '+ sslErr))
|
||||
return
|
||||
}
|
||||
|
||||
if (setHost) delete self.headers.host
|
||||
if (self.timeout && self.timeoutTimer) clearTimeout(self.timeoutTimer)
|
||||
|
||||
if (response.headers['set-cookie'] && (!self._disableCookies)) {
|
||||
response.headers['set-cookie'].forEach(function(cookie) {
|
||||
if (self.jar) self.jar.add(new Cookie(cookie))
|
||||
else cookieJar.add(new Cookie(cookie))
|
||||
})
|
||||
}
|
||||
|
||||
if (response.statusCode >= 300 &&
|
||||
response.statusCode < 400 &&
|
||||
self.followRedirect &&
|
||||
self.method !== 'PUT' &&
|
||||
self.method !== 'POST' &&
|
||||
response.headers.location) {
|
||||
if (self._redirectsFollowed >= self.maxRedirects) {
|
||||
self.emit('error', new Error("Exceeded maxRedirects. Probably stuck in a redirect loop."))
|
||||
return
|
||||
}
|
||||
self._redirectsFollowed += 1
|
||||
|
||||
if (!isUrl.test(response.headers.location)) {
|
||||
response.headers.location = url.resolve(self.uri.href, response.headers.location)
|
||||
}
|
||||
self.uri = response.headers.location
|
||||
self.redirects.push(
|
||||
{ statusCode : response.statusCode
|
||||
, redirectUri: response.headers.location
|
||||
}
|
||||
)
|
||||
delete self.req
|
||||
delete self.agent
|
||||
delete self._started
|
||||
if (self.headers) {
|
||||
delete self.headers.host
|
||||
}
|
||||
if (log) log('Redirect to %uri', self)
|
||||
request(self, self.callback)
|
||||
return // Ignore the rest of the response
|
||||
} else {
|
||||
self._redirectsFollowed = self._redirectsFollowed || 0
|
||||
// Be a good stream and emit end when the response is finished.
|
||||
// Hack to emit end on close because of a core bug that never fires end
|
||||
response.on('close', function () {
|
||||
if (!self._ended) self.response.emit('end')
|
||||
})
|
||||
|
||||
if (self.encoding) {
|
||||
if (self.dests.length !== 0) {
|
||||
console.error("Ingoring encoding parameter as this stream is being piped to another stream which makes the encoding option invalid.")
|
||||
} else {
|
||||
response.setEncoding(self.encoding)
|
||||
}
|
||||
}
|
||||
|
||||
self.pipeDest = function (dest) {
|
||||
if (dest.headers) {
|
||||
dest.headers['content-type'] = response.headers['content-type']
|
||||
if (response.headers['content-length']) {
|
||||
dest.headers['content-length'] = response.headers['content-length']
|
||||
}
|
||||
}
|
||||
if (dest.setHeader) {
|
||||
for (var i in response.headers) {
|
||||
dest.setHeader(i, response.headers[i])
|
||||
}
|
||||
dest.statusCode = response.statusCode
|
||||
}
|
||||
if (self.pipefilter) self.pipefilter(response, dest)
|
||||
}
|
||||
|
||||
self.dests.forEach(function (dest) {
|
||||
self.pipeDest(dest)
|
||||
})
|
||||
|
||||
response.on("data", function (chunk) {
|
||||
self._destdata = true
|
||||
self.emit("data", chunk)
|
||||
})
|
||||
response.on("end", function (chunk) {
|
||||
self._ended = true
|
||||
self.emit("end", chunk)
|
||||
})
|
||||
response.on("close", function () {self.emit("close")})
|
||||
|
||||
self.emit('response', response)
|
||||
|
||||
if (self.onResponse) {
|
||||
self.onResponse(null, response)
|
||||
}
|
||||
if (self.callback) {
|
||||
var buffer = []
|
||||
var bodyLen = 0
|
||||
self.on("data", function (chunk) {
|
||||
buffer.push(chunk)
|
||||
bodyLen += chunk.length
|
||||
})
|
||||
self.on("end", function () {
|
||||
if (buffer.length && Buffer.isBuffer(buffer[0])) {
|
||||
var body = new Buffer(bodyLen)
|
||||
var i = 0
|
||||
buffer.forEach(function (chunk) {
|
||||
chunk.copy(body, i, 0, chunk.length)
|
||||
i += chunk.length
|
||||
})
|
||||
if (self.encoding === null) {
|
||||
response.body = body
|
||||
} else {
|
||||
response.body = body.toString()
|
||||
}
|
||||
} else if (buffer.length) {
|
||||
response.body = buffer.join('')
|
||||
}
|
||||
|
||||
if (self.json) {
|
||||
try {
|
||||
response.body = JSON.parse(response.body)
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
self.callback(null, response, response.body)
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (self.timeout && !self.timeoutTimer) {
|
||||
self.timeoutTimer = setTimeout(function() {
|
||||
self.req.abort()
|
||||
var e = new Error("ETIMEDOUT")
|
||||
e.code = "ETIMEDOUT"
|
||||
self.emit("error", e)
|
||||
}, self.timeout)
|
||||
}
|
||||
|
||||
self.req.on('error', clientErrorHandler)
|
||||
}
|
||||
|
||||
self.once('pipe', function (src) {
|
||||
if (self.ntick) throw new Error("You cannot pipe to this stream after the first nextTick() after creation of the request stream.")
|
||||
self.src = src
|
||||
if (isReadStream(src)) {
|
||||
if (!self.headers['content-type'] && !self.headers['Content-Type'])
|
||||
self.headers['content-type'] = mimetypes.lookup(src.path.slice(src.path.lastIndexOf('.')+1))
|
||||
} else {
|
||||
if (src.headers) {
|
||||
for (var i in src.headers) {
|
||||
if (!self.headers[i]) {
|
||||
self.headers[i] = src.headers[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
if (src.method && !self.method) {
|
||||
self.method = src.method
|
||||
}
|
||||
}
|
||||
|
||||
self.on('pipe', function () {
|
||||
console.error("You have already piped to this stream. Pipeing twice is likely to break the request.")
|
||||
})
|
||||
})
|
||||
|
||||
process.nextTick(function () {
|
||||
if (self.body) {
|
||||
if (Array.isArray(self.body)) {
|
||||
self.body.forEach(function(part) {
|
||||
self.write(part)
|
||||
})
|
||||
} else {
|
||||
self.write(self.body)
|
||||
}
|
||||
self.end()
|
||||
} else if (self.requestBodyStream) {
|
||||
console.warn("options.requestBodyStream is deprecated, please pass the request object to stream.pipe.")
|
||||
self.requestBodyStream.pipe(self)
|
||||
} else if (!self.src) {
|
||||
self.headers['content-length'] = 0
|
||||
self.end()
|
||||
}
|
||||
self.ntick = true
|
||||
})
|
||||
}
|
||||
Request.prototype.pipe = function (dest) {
|
||||
if (this.response) {
|
||||
if (this._destdata) {
|
||||
throw new Error("You cannot pipe after data has been emitted from the response.")
|
||||
} else if (this._ended) {
|
||||
throw new Error("You cannot pipe after the response has been ended.")
|
||||
} else {
|
||||
stream.Stream.prototype.pipe.call(this, dest)
|
||||
this.pipeDest(dest)
|
||||
return dest
|
||||
}
|
||||
} else {
|
||||
this.dests.push(dest)
|
||||
stream.Stream.prototype.pipe.call(this, dest)
|
||||
return dest
|
||||
}
|
||||
}
|
||||
Request.prototype.write = function () {
|
||||
if (!this._started) this.start()
|
||||
if (!this.req) throw new Error("This request has been piped before http.request() was called.")
|
||||
this.req.write.apply(this.req, arguments)
|
||||
}
|
||||
Request.prototype.end = function () {
|
||||
if (!this._started) this.start()
|
||||
if (!this.req) throw new Error("This request has been piped before http.request() was called.")
|
||||
this.req.end.apply(this.req, arguments)
|
||||
}
|
||||
Request.prototype.pause = function () {
|
||||
if (!this.response) throw new Error("This request has been piped before http.request() was called.")
|
||||
this.response.pause.apply(this.response, arguments)
|
||||
}
|
||||
Request.prototype.resume = function () {
|
||||
if (!this.response) throw new Error("This request has been piped before http.request() was called.")
|
||||
this.response.resume.apply(this.response, arguments)
|
||||
}
|
||||
|
||||
function request (options, callback) {
|
||||
if (typeof options === 'string') options = {uri:options}
|
||||
if (callback) options.callback = callback
|
||||
var r = new Request(options)
|
||||
r.request()
|
||||
return r
|
||||
}
|
||||
|
||||
module.exports = request
|
||||
|
||||
request.defaults = function (options) {
|
||||
var def = function (method) {
|
||||
var d = function (opts, callback) {
|
||||
if (typeof opts === 'string') opts = {uri:opts}
|
||||
for (var i in options) {
|
||||
if (opts[i] === undefined) opts[i] = options[i]
|
||||
}
|
||||
return method(opts, callback)
|
||||
}
|
||||
return d
|
||||
}
|
||||
var de = def(request)
|
||||
de.get = def(request.get)
|
||||
de.post = def(request.post)
|
||||
de.put = def(request.put)
|
||||
de.head = def(request.head)
|
||||
de.del = def(request.del)
|
||||
de.cookie = def(request.cookie)
|
||||
de.jar = def(request.jar)
|
||||
return de
|
||||
}
|
||||
|
||||
request.forever = function (agentOptions, optionsArg) {
|
||||
var options = {}
|
||||
if (agentOptions) {
|
||||
for (option in optionsArg) {
|
||||
options[option] = optionsArg[option]
|
||||
}
|
||||
}
|
||||
options.agent = new ForeverAgent(agentOptions)
|
||||
return request.defaults(options)
|
||||
}
|
||||
|
||||
request.get = request
|
||||
request.post = function (options, callback) {
|
||||
if (typeof options === 'string') options = {uri:options}
|
||||
options.method = 'POST'
|
||||
return request(options, callback)
|
||||
}
|
||||
request.put = function (options, callback) {
|
||||
if (typeof options === 'string') options = {uri:options}
|
||||
options.method = 'PUT'
|
||||
return request(options, callback)
|
||||
}
|
||||
request.head = function (options, callback) {
|
||||
if (typeof options === 'string') options = {uri:options}
|
||||
options.method = 'HEAD'
|
||||
if (options.body || options.requestBodyStream || options.json || options.multipart) {
|
||||
throw new Error("HTTP HEAD requests MUST NOT include a request body.")
|
||||
}
|
||||
return request(options, callback)
|
||||
}
|
||||
request.del = function (options, callback) {
|
||||
if (typeof options === 'string') options = {uri:options}
|
||||
options.method = 'DELETE'
|
||||
return request(options, callback)
|
||||
}
|
||||
request.jar = function () {
|
||||
return new CookieJar
|
||||
}
|
||||
request.cookie = function (str) {
|
||||
if (typeof str !== 'string') throw new Error("The cookie function only accepts STRING as param")
|
||||
return new Cookie(str)
|
||||
}
|
146
node_modules/request/mimetypes.js
generated
vendored
Normal file
146
node_modules/request/mimetypes.js
generated
vendored
Normal file
|
@ -0,0 +1,146 @@
|
|||
// from http://github.com/felixge/node-paperboy
|
||||
exports.types = {
|
||||
"aiff":"audio/x-aiff",
|
||||
"arj":"application/x-arj-compressed",
|
||||
"asf":"video/x-ms-asf",
|
||||
"asx":"video/x-ms-asx",
|
||||
"au":"audio/ulaw",
|
||||
"avi":"video/x-msvideo",
|
||||
"bcpio":"application/x-bcpio",
|
||||
"ccad":"application/clariscad",
|
||||
"cod":"application/vnd.rim.cod",
|
||||
"com":"application/x-msdos-program",
|
||||
"cpio":"application/x-cpio",
|
||||
"cpt":"application/mac-compactpro",
|
||||
"csh":"application/x-csh",
|
||||
"css":"text/css",
|
||||
"deb":"application/x-debian-package",
|
||||
"dl":"video/dl",
|
||||
"doc":"application/msword",
|
||||
"drw":"application/drafting",
|
||||
"dvi":"application/x-dvi",
|
||||
"dwg":"application/acad",
|
||||
"dxf":"application/dxf",
|
||||
"dxr":"application/x-director",
|
||||
"etx":"text/x-setext",
|
||||
"ez":"application/andrew-inset",
|
||||
"fli":"video/x-fli",
|
||||
"flv":"video/x-flv",
|
||||
"gif":"image/gif",
|
||||
"gl":"video/gl",
|
||||
"gtar":"application/x-gtar",
|
||||
"gz":"application/x-gzip",
|
||||
"hdf":"application/x-hdf",
|
||||
"hqx":"application/mac-binhex40",
|
||||
"html":"text/html",
|
||||
"ice":"x-conference/x-cooltalk",
|
||||
"ico":"image/x-icon",
|
||||
"ief":"image/ief",
|
||||
"igs":"model/iges",
|
||||
"ips":"application/x-ipscript",
|
||||
"ipx":"application/x-ipix",
|
||||
"jad":"text/vnd.sun.j2me.app-descriptor",
|
||||
"jar":"application/java-archive",
|
||||
"jpeg":"image/jpeg",
|
||||
"jpg":"image/jpeg",
|
||||
"js":"text/javascript",
|
||||
"json":"application/json",
|
||||
"latex":"application/x-latex",
|
||||
"lsp":"application/x-lisp",
|
||||
"lzh":"application/octet-stream",
|
||||
"m":"text/plain",
|
||||
"m3u":"audio/x-mpegurl",
|
||||
"man":"application/x-troff-man",
|
||||
"me":"application/x-troff-me",
|
||||
"midi":"audio/midi",
|
||||
"mif":"application/x-mif",
|
||||
"mime":"www/mime",
|
||||
"movie":"video/x-sgi-movie",
|
||||
"mustache":"text/plain",
|
||||
"mp4":"video/mp4",
|
||||
"mpg":"video/mpeg",
|
||||
"mpga":"audio/mpeg",
|
||||
"ms":"application/x-troff-ms",
|
||||
"nc":"application/x-netcdf",
|
||||
"oda":"application/oda",
|
||||
"ogm":"application/ogg",
|
||||
"pbm":"image/x-portable-bitmap",
|
||||
"pdf":"application/pdf",
|
||||
"pgm":"image/x-portable-graymap",
|
||||
"pgn":"application/x-chess-pgn",
|
||||
"pgp":"application/pgp",
|
||||
"pm":"application/x-perl",
|
||||
"png":"image/png",
|
||||
"pnm":"image/x-portable-anymap",
|
||||
"ppm":"image/x-portable-pixmap",
|
||||
"ppz":"application/vnd.ms-powerpoint",
|
||||
"pre":"application/x-freelance",
|
||||
"prt":"application/pro_eng",
|
||||
"ps":"application/postscript",
|
||||
"qt":"video/quicktime",
|
||||
"ra":"audio/x-realaudio",
|
||||
"rar":"application/x-rar-compressed",
|
||||
"ras":"image/x-cmu-raster",
|
||||
"rgb":"image/x-rgb",
|
||||
"rm":"audio/x-pn-realaudio",
|
||||
"rpm":"audio/x-pn-realaudio-plugin",
|
||||
"rtf":"text/rtf",
|
||||
"rtx":"text/richtext",
|
||||
"scm":"application/x-lotusscreencam",
|
||||
"set":"application/set",
|
||||
"sgml":"text/sgml",
|
||||
"sh":"application/x-sh",
|
||||
"shar":"application/x-shar",
|
||||
"silo":"model/mesh",
|
||||
"sit":"application/x-stuffit",
|
||||
"skt":"application/x-koan",
|
||||
"smil":"application/smil",
|
||||
"snd":"audio/basic",
|
||||
"sol":"application/solids",
|
||||
"spl":"application/x-futuresplash",
|
||||
"src":"application/x-wais-source",
|
||||
"stl":"application/SLA",
|
||||
"stp":"application/STEP",
|
||||
"sv4cpio":"application/x-sv4cpio",
|
||||
"sv4crc":"application/x-sv4crc",
|
||||
"svg":"image/svg+xml",
|
||||
"swf":"application/x-shockwave-flash",
|
||||
"tar":"application/x-tar",
|
||||
"tcl":"application/x-tcl",
|
||||
"tex":"application/x-tex",
|
||||
"texinfo":"application/x-texinfo",
|
||||
"tgz":"application/x-tar-gz",
|
||||
"tiff":"image/tiff",
|
||||
"tr":"application/x-troff",
|
||||
"tsi":"audio/TSP-audio",
|
||||
"tsp":"application/dsptype",
|
||||
"tsv":"text/tab-separated-values",
|
||||
"unv":"application/i-deas",
|
||||
"ustar":"application/x-ustar",
|
||||
"vcd":"application/x-cdlink",
|
||||
"vda":"application/vda",
|
||||
"vivo":"video/vnd.vivo",
|
||||
"vrm":"x-world/x-vrml",
|
||||
"wav":"audio/x-wav",
|
||||
"wax":"audio/x-ms-wax",
|
||||
"wma":"audio/x-ms-wma",
|
||||
"wmv":"video/x-ms-wmv",
|
||||
"wmx":"video/x-ms-wmx",
|
||||
"wrl":"model/vrml",
|
||||
"wvx":"video/x-ms-wvx",
|
||||
"xbm":"image/x-xbitmap",
|
||||
"xlw":"application/vnd.ms-excel",
|
||||
"xml":"text/xml",
|
||||
"xpm":"image/x-xpixmap",
|
||||
"xwd":"image/x-xwindowdump",
|
||||
"xyz":"chemical/x-pdb",
|
||||
"zip":"application/zip",
|
||||
};
|
||||
|
||||
exports.lookup = function(ext, defaultType) {
|
||||
defaultType = defaultType || 'application/octet-stream';
|
||||
|
||||
return (ext in exports.types)
|
||||
? exports.types[ext]
|
||||
: defaultType;
|
||||
};
|
34
node_modules/request/oauth.js
generated
vendored
Normal file
34
node_modules/request/oauth.js
generated
vendored
Normal file
|
@ -0,0 +1,34 @@
|
|||
var crypto = require('crypto')
|
||||
, qs = require('querystring')
|
||||
;
|
||||
|
||||
function sha1 (key, body) {
|
||||
return crypto.createHmac('sha1', key).update(body).digest('base64')
|
||||
}
|
||||
|
||||
function rfc3986 (str) {
|
||||
return encodeURIComponent(str)
|
||||
.replace('!','%21')
|
||||
.replace('*','%2A')
|
||||
.replace('(','%28')
|
||||
.replace(')','%29')
|
||||
.replace("'",'%27')
|
||||
;
|
||||
}
|
||||
|
||||
function hmacsign (httpMethod, base_uri, params, consumer_secret, token_secret, body) {
|
||||
// adapted from https://dev.twitter.com/docs/auth/oauth
|
||||
var base =
|
||||
httpMethod + "&" +
|
||||
encodeURIComponent( base_uri ) + "&" +
|
||||
Object.keys(params).sort().map(function (i) {
|
||||
// big WTF here with the escape + encoding but it's what twitter wants
|
||||
return escape(rfc3986(i)) + "%3D" + escape(rfc3986(params[i]))
|
||||
}).join("%26")
|
||||
var key = consumer_secret + '&'
|
||||
if (token_secret) key += token_secret
|
||||
return sha1(key, base)
|
||||
}
|
||||
|
||||
exports.hmacsign = hmacsign
|
||||
exports.rfc3986 = rfc3986
|
15
node_modules/request/package.json
generated
vendored
Normal file
15
node_modules/request/package.json
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
{ "name" : "request"
|
||||
, "description" : "Simplified HTTP request client."
|
||||
, "tags" : ["http", "simple", "util", "utility"]
|
||||
, "version" : "2.9.100"
|
||||
, "author" : "Mikeal Rogers <mikeal.rogers@gmail.com>"
|
||||
, "repository" :
|
||||
{ "type" : "git"
|
||||
, "url" : "http://github.com/mikeal/request.git"
|
||||
}
|
||||
, "bugs" :
|
||||
{ "url" : "http://github.com/mikeal/request/issues" }
|
||||
, "engines" : ["node >= 0.3.6"]
|
||||
, "main" : "./main"
|
||||
, "scripts": { "test": "bash tests/run.sh" }
|
||||
}
|
BIN
node_modules/request/tests/googledoodle.png
generated
vendored
Normal file
BIN
node_modules/request/tests/googledoodle.png
generated
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 38 KiB |
6
node_modules/request/tests/run.sh
generated
vendored
Executable file
6
node_modules/request/tests/run.sh
generated
vendored
Executable file
|
@ -0,0 +1,6 @@
|
|||
FAILS=0
|
||||
for i in tests/test-*.js; do
|
||||
echo $i
|
||||
node $i || let FAILS++
|
||||
done
|
||||
exit $FAILS
|
75
node_modules/request/tests/server.js
generated
vendored
Normal file
75
node_modules/request/tests/server.js
generated
vendored
Normal file
|
@ -0,0 +1,75 @@
|
|||
var fs = require('fs')
|
||||
, http = require('http')
|
||||
, path = require('path')
|
||||
, https = require('https')
|
||||
, events = require('events')
|
||||
, stream = require('stream')
|
||||
, assert = require('assert')
|
||||
;
|
||||
|
||||
exports.createServer = function (port) {
|
||||
port = port || 6767
|
||||
var s = http.createServer(function (req, resp) {
|
||||
s.emit(req.url, req, resp);
|
||||
})
|
||||
s.port = port
|
||||
s.url = 'http://localhost:'+port
|
||||
return s;
|
||||
}
|
||||
|
||||
exports.createSSLServer = function(port) {
|
||||
port = port || 16767
|
||||
|
||||
var options = { 'key' : fs.readFileSync(path.join(__dirname, 'ssl', 'test.key'))
|
||||
, 'cert': fs.readFileSync(path.join(__dirname, 'ssl', 'test.crt'))
|
||||
}
|
||||
|
||||
var s = https.createServer(options, function (req, resp) {
|
||||
s.emit(req.url, req, resp);
|
||||
})
|
||||
s.port = port
|
||||
s.url = 'https://localhost:'+port
|
||||
return s;
|
||||
}
|
||||
|
||||
exports.createPostStream = function (text) {
|
||||
var postStream = new stream.Stream();
|
||||
postStream.writeable = true;
|
||||
postStream.readable = true;
|
||||
setTimeout(function () {postStream.emit('data', new Buffer(text)); postStream.emit('end')}, 0);
|
||||
return postStream;
|
||||
}
|
||||
exports.createPostValidator = function (text) {
|
||||
var l = function (req, resp) {
|
||||
var r = '';
|
||||
req.on('data', function (chunk) {r += chunk})
|
||||
req.on('end', function () {
|
||||
if (r !== text) console.log(r, text);
|
||||
assert.equal(r, text)
|
||||
resp.writeHead(200, {'content-type':'text/plain'})
|
||||
resp.write('OK')
|
||||
resp.end()
|
||||
})
|
||||
}
|
||||
return l;
|
||||
}
|
||||
exports.createGetResponse = function (text, contentType) {
|
||||
var l = function (req, resp) {
|
||||
contentType = contentType || 'text/plain'
|
||||
resp.writeHead(200, {'content-type':contentType})
|
||||
resp.write(text)
|
||||
resp.end()
|
||||
}
|
||||
return l;
|
||||
}
|
||||
exports.createChunkResponse = function (chunks, contentType) {
|
||||
var l = function (req, resp) {
|
||||
contentType = contentType || 'text/plain'
|
||||
resp.writeHead(200, {'content-type':contentType})
|
||||
chunks.forEach(function (chunk) {
|
||||
resp.write(chunk)
|
||||
})
|
||||
resp.end()
|
||||
}
|
||||
return l;
|
||||
}
|
15
node_modules/request/tests/ssl/test.crt
generated
vendored
Normal file
15
node_modules/request/tests/ssl/test.crt
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIICQzCCAawCCQCO/XWtRFck1jANBgkqhkiG9w0BAQUFADBmMQswCQYDVQQGEwJU
|
||||
SDEQMA4GA1UECBMHQmFuZ2tvazEOMAwGA1UEBxMFU2lsb20xGzAZBgNVBAoTElRo
|
||||
ZSBSZXF1ZXN0IE1vZHVsZTEYMBYGA1UEAxMPcmVxdWVzdC5leGFtcGxlMB4XDTEx
|
||||
MTIwMzAyMjkyM1oXDTIxMTEzMDAyMjkyM1owZjELMAkGA1UEBhMCVEgxEDAOBgNV
|
||||
BAgTB0Jhbmdrb2sxDjAMBgNVBAcTBVNpbG9tMRswGQYDVQQKExJUaGUgUmVxdWVz
|
||||
dCBNb2R1bGUxGDAWBgNVBAMTD3JlcXVlc3QuZXhhbXBsZTCBnzANBgkqhkiG9w0B
|
||||
AQEFAAOBjQAwgYkCgYEAwmctddZqlA48+NXs0yOy92DijcQV1jf87zMiYAIlNUto
|
||||
wghVbTWgJU5r0pdKrD16AptnWJTzKanhItEX8XCCPgsNkq1afgTtJP7rNkwu3xcj
|
||||
eIMkhJg/ay4ZnkbnhYdsii5VTU5prix6AqWRAhbkBgoA+iVyHyof8wvZyKBoFTMC
|
||||
AwEAATANBgkqhkiG9w0BAQUFAAOBgQB6BybMJbpeiABgihDfEVBcAjDoQ8gUMgwV
|
||||
l4NulugfKTDmArqnR9aPd4ET5jX5dkMP4bwCHYsvrcYDeWEQy7x5WWuylOdKhua4
|
||||
L4cEi2uDCjqEErIG3cc1MCOk6Cl6Ld6tkIzQSf953qfdEACRytOeUqLNQcrXrqeE
|
||||
c7U8F6MWLQ==
|
||||
-----END CERTIFICATE-----
|
15
node_modules/request/tests/ssl/test.key
generated
vendored
Normal file
15
node_modules/request/tests/ssl/test.key
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIICXgIBAAKBgQDCZy111mqUDjz41ezTI7L3YOKNxBXWN/zvMyJgAiU1S2jCCFVt
|
||||
NaAlTmvSl0qsPXoCm2dYlPMpqeEi0RfxcII+Cw2SrVp+BO0k/us2TC7fFyN4gySE
|
||||
mD9rLhmeRueFh2yKLlVNTmmuLHoCpZECFuQGCgD6JXIfKh/zC9nIoGgVMwIDAQAB
|
||||
AoGBALXFwfUf8vHTSmGlrdZS2AGFPvEtuvldyoxi9K5u8xmdFCvxnOcLsF2RsTHt
|
||||
Mu5QYWhUpNJoG+IGLTPf7RJdj/kNtEs7xXqWy4jR36kt5z5MJzqiK+QIgiO9UFWZ
|
||||
fjUb6oeDnTIJA9YFBdYi97MDuL89iU/UK3LkJN3hd4rciSbpAkEA+MCkowF5kSFb
|
||||
rkOTBYBXZfiAG78itDXN6DXmqb9XYY+YBh3BiQM28oxCeQYyFy6pk/nstnd4TXk6
|
||||
V/ryA2g5NwJBAMgRKTY9KvxJWbESeMEFe2iBIV0c26/72Amgi7ZKUCLukLfD4tLF
|
||||
+WSZdmTbbqI1079YtwaiOVfiLm45Q/3B0eUCQAaQ/0eWSGE+Yi8tdXoVszjr4GXb
|
||||
G81qBi91DMu6U1It+jNfIba+MPsiHLcZJMVb4/oWBNukN7bD1nhwFWdlnu0CQQCf
|
||||
Is9WHkdvz2RxbZDxb8verz/7kXXJQJhx5+rZf7jIYFxqX3yvTNv3wf2jcctJaWlZ
|
||||
fVZwB193YSivcgt778xlAkEAprYUz3jczjF5r2hrgbizPzPDR94tM5BTO3ki2v3w
|
||||
kbf+j2g7FNAx6kZiVN8XwfLc8xEeUGiPKwtq3ddPDFh17w==
|
||||
-----END RSA PRIVATE KEY-----
|
95
node_modules/request/tests/test-body.js
generated
vendored
Normal file
95
node_modules/request/tests/test-body.js
generated
vendored
Normal file
|
@ -0,0 +1,95 @@
|
|||
var server = require('./server')
|
||||
, events = require('events')
|
||||
, stream = require('stream')
|
||||
, assert = require('assert')
|
||||
, request = require('../main.js')
|
||||
;
|
||||
|
||||
var s = server.createServer();
|
||||
|
||||
var tests =
|
||||
{ testGet :
|
||||
{ resp : server.createGetResponse("TESTING!")
|
||||
, expectBody: "TESTING!"
|
||||
}
|
||||
, testGetChunkBreak :
|
||||
{ resp : server.createChunkResponse(
|
||||
[ new Buffer([239])
|
||||
, new Buffer([163])
|
||||
, new Buffer([191])
|
||||
, new Buffer([206])
|
||||
, new Buffer([169])
|
||||
, new Buffer([226])
|
||||
, new Buffer([152])
|
||||
, new Buffer([131])
|
||||
])
|
||||
, expectBody: "Ω☃"
|
||||
}
|
||||
, testGetBuffer :
|
||||
{ resp : server.createGetResponse(new Buffer("TESTING!"))
|
||||
, encoding: null
|
||||
, expectBody: new Buffer("TESTING!")
|
||||
}
|
||||
, testGetJSON :
|
||||
{ resp : server.createGetResponse('{"test":true}', 'application/json')
|
||||
, json : true
|
||||
, expectBody: {"test":true}
|
||||
}
|
||||
, testPutString :
|
||||
{ resp : server.createPostValidator("PUTTINGDATA")
|
||||
, method : "PUT"
|
||||
, body : "PUTTINGDATA"
|
||||
}
|
||||
, testPutBuffer :
|
||||
{ resp : server.createPostValidator("PUTTINGDATA")
|
||||
, method : "PUT"
|
||||
, body : new Buffer("PUTTINGDATA")
|
||||
}
|
||||
, testPutJSON :
|
||||
{ resp : server.createPostValidator(JSON.stringify({foo: 'bar'}))
|
||||
, method: "PUT"
|
||||
, json: {foo: 'bar'}
|
||||
}
|
||||
, testPutMultipart :
|
||||
{ resp: server.createPostValidator(
|
||||
'--frontier\r\n' +
|
||||
'content-type: text/html\r\n' +
|
||||
'\r\n' +
|
||||
'<html><body>Oh hi.</body></html>' +
|
||||
'\r\n--frontier\r\n\r\n' +
|
||||
'Oh hi.' +
|
||||
'\r\n--frontier--'
|
||||
)
|
||||
, method: "PUT"
|
||||
, multipart:
|
||||
[ {'content-type': 'text/html', 'body': '<html><body>Oh hi.</body></html>'}
|
||||
, {'body': 'Oh hi.'}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
s.listen(s.port, function () {
|
||||
|
||||
var counter = 0
|
||||
|
||||
for (i in tests) {
|
||||
(function () {
|
||||
var test = tests[i]
|
||||
s.on('/'+i, test.resp)
|
||||
test.uri = s.url + '/' + i
|
||||
request(test, function (err, resp, body) {
|
||||
if (err) throw err
|
||||
if (test.expectBody) {
|
||||
assert.deepEqual(test.expectBody, body)
|
||||
}
|
||||
counter = counter - 1;
|
||||
if (counter === 0) {
|
||||
console.log(Object.keys(tests).length+" tests passed.")
|
||||
s.close()
|
||||
}
|
||||
})
|
||||
counter++
|
||||
})()
|
||||
}
|
||||
})
|
||||
|
29
node_modules/request/tests/test-cookie.js
generated
vendored
Normal file
29
node_modules/request/tests/test-cookie.js
generated
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
var Cookie = require('../vendor/cookie')
|
||||
, assert = require('assert');
|
||||
|
||||
var str = 'sid="s543qactge.wKE61E01Bs%2BKhzmxrwrnug="; path=/; httpOnly; expires=Sat, 04 Dec 2010 23:27:28 GMT';
|
||||
var cookie = new Cookie(str);
|
||||
|
||||
// test .toString()
|
||||
assert.equal(cookie.toString(), str);
|
||||
|
||||
// test .path
|
||||
assert.equal(cookie.path, '/');
|
||||
|
||||
// test .httpOnly
|
||||
assert.equal(cookie.httpOnly, true);
|
||||
|
||||
// test .name
|
||||
assert.equal(cookie.name, 'sid');
|
||||
|
||||
// test .value
|
||||
assert.equal(cookie.value, '"s543qactge.wKE61E01Bs%2BKhzmxrwrnug="');
|
||||
|
||||
// test .expires
|
||||
assert.equal(cookie.expires instanceof Date, true);
|
||||
|
||||
// test .path default
|
||||
var cookie = new Cookie('foo=bar', { url: 'http://foo.com/bar' });
|
||||
assert.equal(cookie.path, '/bar');
|
||||
|
||||
console.log('All tests passed');
|
90
node_modules/request/tests/test-cookiejar.js
generated
vendored
Normal file
90
node_modules/request/tests/test-cookiejar.js
generated
vendored
Normal file
|
@ -0,0 +1,90 @@
|
|||
var Cookie = require('../vendor/cookie')
|
||||
, Jar = require('../vendor/cookie/jar')
|
||||
, assert = require('assert');
|
||||
|
||||
function expires(ms) {
|
||||
return new Date(Date.now() + ms).toUTCString();
|
||||
}
|
||||
|
||||
// test .get() expiration
|
||||
(function() {
|
||||
var jar = new Jar;
|
||||
var cookie = new Cookie('sid=1234; path=/; expires=' + expires(1000));
|
||||
jar.add(cookie);
|
||||
setTimeout(function(){
|
||||
var cookies = jar.get({ url: 'http://foo.com/foo' });
|
||||
assert.equal(cookies.length, 1);
|
||||
assert.equal(cookies[0], cookie);
|
||||
setTimeout(function(){
|
||||
var cookies = jar.get({ url: 'http://foo.com/foo' });
|
||||
assert.equal(cookies.length, 0);
|
||||
}, 1000);
|
||||
}, 5);
|
||||
})();
|
||||
|
||||
// test .get() path support
|
||||
(function() {
|
||||
var jar = new Jar;
|
||||
var a = new Cookie('sid=1234; path=/');
|
||||
var b = new Cookie('sid=1111; path=/foo/bar');
|
||||
var c = new Cookie('sid=2222; path=/');
|
||||
jar.add(a);
|
||||
jar.add(b);
|
||||
jar.add(c);
|
||||
|
||||
// should remove the duplicates
|
||||
assert.equal(jar.cookies.length, 2);
|
||||
|
||||
// same name, same path, latter prevails
|
||||
var cookies = jar.get({ url: 'http://foo.com/' });
|
||||
assert.equal(cookies.length, 1);
|
||||
assert.equal(cookies[0], c);
|
||||
|
||||
// same name, diff path, path specifity prevails, latter prevails
|
||||
var cookies = jar.get({ url: 'http://foo.com/foo/bar' });
|
||||
assert.equal(cookies.length, 1);
|
||||
assert.equal(cookies[0], b);
|
||||
|
||||
var jar = new Jar;
|
||||
var a = new Cookie('sid=1111; path=/foo/bar');
|
||||
var b = new Cookie('sid=1234; path=/');
|
||||
jar.add(a);
|
||||
jar.add(b);
|
||||
|
||||
var cookies = jar.get({ url: 'http://foo.com/foo/bar' });
|
||||
assert.equal(cookies.length, 1);
|
||||
assert.equal(cookies[0], a);
|
||||
|
||||
var cookies = jar.get({ url: 'http://foo.com/' });
|
||||
assert.equal(cookies.length, 1);
|
||||
assert.equal(cookies[0], b);
|
||||
|
||||
var jar = new Jar;
|
||||
var a = new Cookie('sid=1111; path=/foo/bar');
|
||||
var b = new Cookie('sid=3333; path=/foo/bar');
|
||||
var c = new Cookie('pid=3333; path=/foo/bar');
|
||||
var d = new Cookie('sid=2222; path=/foo/');
|
||||
var e = new Cookie('sid=1234; path=/');
|
||||
jar.add(a);
|
||||
jar.add(b);
|
||||
jar.add(c);
|
||||
jar.add(d);
|
||||
jar.add(e);
|
||||
|
||||
var cookies = jar.get({ url: 'http://foo.com/foo/bar' });
|
||||
assert.equal(cookies.length, 2);
|
||||
assert.equal(cookies[0], b);
|
||||
assert.equal(cookies[1], c);
|
||||
|
||||
var cookies = jar.get({ url: 'http://foo.com/foo/' });
|
||||
assert.equal(cookies.length, 1);
|
||||
assert.equal(cookies[0], d);
|
||||
|
||||
var cookies = jar.get({ url: 'http://foo.com/' });
|
||||
assert.equal(cookies.length, 1);
|
||||
assert.equal(cookies[0], e);
|
||||
})();
|
||||
|
||||
setTimeout(function() {
|
||||
console.log('All tests passed');
|
||||
}, 1200);
|
30
node_modules/request/tests/test-errors.js
generated
vendored
Normal file
30
node_modules/request/tests/test-errors.js
generated
vendored
Normal file
|
@ -0,0 +1,30 @@
|
|||
var server = require('./server')
|
||||
, events = require('events')
|
||||
, assert = require('assert')
|
||||
, request = require('../main.js')
|
||||
;
|
||||
|
||||
var local = 'http://localhost:8888/asdf'
|
||||
|
||||
try {
|
||||
request({uri:local, body:{}})
|
||||
assert.fail("Should have throw")
|
||||
} catch(e) {
|
||||
assert.equal(e.message, 'Argument error, options.body.')
|
||||
}
|
||||
|
||||
try {
|
||||
request({uri:local, multipart: 'foo'})
|
||||
assert.fail("Should have throw")
|
||||
} catch(e) {
|
||||
assert.equal(e.message, 'Argument error, options.multipart.')
|
||||
}
|
||||
|
||||
try {
|
||||
request({uri:local, multipart: [{}]})
|
||||
assert.fail("Should have throw")
|
||||
} catch(e) {
|
||||
assert.equal(e.message, 'Body attribute missing in multipart.')
|
||||
}
|
||||
|
||||
console.log("All tests passed.")
|
94
node_modules/request/tests/test-httpModule.js
generated
vendored
Normal file
94
node_modules/request/tests/test-httpModule.js
generated
vendored
Normal file
|
@ -0,0 +1,94 @@
|
|||
var http = require('http')
|
||||
, https = require('https')
|
||||
, server = require('./server')
|
||||
, assert = require('assert')
|
||||
, request = require('../main.js')
|
||||
|
||||
|
||||
var faux_requests_made = {'http':0, 'https':0}
|
||||
function wrap_request(name, module) {
|
||||
// Just like the http or https module, but note when a request is made.
|
||||
var wrapped = {}
|
||||
Object.keys(module).forEach(function(key) {
|
||||
var value = module[key];
|
||||
|
||||
if(key != 'request')
|
||||
wrapped[key] = value;
|
||||
else
|
||||
wrapped[key] = function(options, callback) {
|
||||
faux_requests_made[name] += 1
|
||||
return value.apply(this, arguments)
|
||||
}
|
||||
})
|
||||
|
||||
return wrapped;
|
||||
}
|
||||
|
||||
|
||||
var faux_http = wrap_request('http', http)
|
||||
, faux_https = wrap_request('https', https)
|
||||
, plain_server = server.createServer()
|
||||
, https_server = server.createSSLServer()
|
||||
|
||||
|
||||
plain_server.listen(plain_server.port, function() {
|
||||
plain_server.on('/plain', function (req, res) {
|
||||
res.writeHead(200)
|
||||
res.end('plain')
|
||||
})
|
||||
plain_server.on('/to_https', function (req, res) {
|
||||
res.writeHead(301, {'location':'https://localhost:'+https_server.port + '/https'})
|
||||
res.end()
|
||||
})
|
||||
|
||||
https_server.listen(https_server.port, function() {
|
||||
https_server.on('/https', function (req, res) {
|
||||
res.writeHead(200)
|
||||
res.end('https')
|
||||
})
|
||||
https_server.on('/to_plain', function (req, res) {
|
||||
res.writeHead(302, {'location':'http://localhost:'+plain_server.port + '/plain'})
|
||||
res.end()
|
||||
})
|
||||
|
||||
run_tests()
|
||||
run_tests({})
|
||||
run_tests({'http:':faux_http})
|
||||
run_tests({'https:':faux_https})
|
||||
run_tests({'http:':faux_http, 'https:':faux_https})
|
||||
})
|
||||
})
|
||||
|
||||
function run_tests(httpModules) {
|
||||
var to_https = {'httpModules':httpModules, 'uri':'http://localhost:'+plain_server.port+'/to_https'}
|
||||
var to_plain = {'httpModules':httpModules, 'uri':'https://localhost:'+https_server.port+'/to_plain'}
|
||||
|
||||
request(to_https, function (er, res, body) {
|
||||
assert.ok(!er, 'Bounce to SSL worked')
|
||||
assert.equal(body, 'https', 'Received HTTPS server body')
|
||||
done()
|
||||
})
|
||||
|
||||
request(to_plain, function (er, res, body) {
|
||||
assert.ok(!er, 'Bounce to plaintext server worked')
|
||||
assert.equal(body, 'plain', 'Received HTTPS server body')
|
||||
done()
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
var passed = 0;
|
||||
function done() {
|
||||
passed += 1
|
||||
var expected = 10
|
||||
|
||||
if(passed == expected) {
|
||||
plain_server.close()
|
||||
https_server.close()
|
||||
|
||||
assert.equal(faux_requests_made.http, 4, 'Wrapped http module called appropriately')
|
||||
assert.equal(faux_requests_made.https, 4, 'Wrapped https module called appropriately')
|
||||
|
||||
console.log((expected+2) + ' tests passed.')
|
||||
}
|
||||
}
|
86
node_modules/request/tests/test-https.js
generated
vendored
Normal file
86
node_modules/request/tests/test-https.js
generated
vendored
Normal file
|
@ -0,0 +1,86 @@
|
|||
var server = require('./server')
|
||||
, assert = require('assert')
|
||||
, request = require('../main.js')
|
||||
|
||||
var s = server.createSSLServer();
|
||||
|
||||
var tests =
|
||||
{ testGet :
|
||||
{ resp : server.createGetResponse("TESTING!")
|
||||
, expectBody: "TESTING!"
|
||||
}
|
||||
, testGetChunkBreak :
|
||||
{ resp : server.createChunkResponse(
|
||||
[ new Buffer([239])
|
||||
, new Buffer([163])
|
||||
, new Buffer([191])
|
||||
, new Buffer([206])
|
||||
, new Buffer([169])
|
||||
, new Buffer([226])
|
||||
, new Buffer([152])
|
||||
, new Buffer([131])
|
||||
])
|
||||
, expectBody: "Ω☃"
|
||||
}
|
||||
, testGetJSON :
|
||||
{ resp : server.createGetResponse('{"test":true}', 'application/json')
|
||||
, json : true
|
||||
, expectBody: {"test":true}
|
||||
}
|
||||
, testPutString :
|
||||
{ resp : server.createPostValidator("PUTTINGDATA")
|
||||
, method : "PUT"
|
||||
, body : "PUTTINGDATA"
|
||||
}
|
||||
, testPutBuffer :
|
||||
{ resp : server.createPostValidator("PUTTINGDATA")
|
||||
, method : "PUT"
|
||||
, body : new Buffer("PUTTINGDATA")
|
||||
}
|
||||
, testPutJSON :
|
||||
{ resp : server.createPostValidator(JSON.stringify({foo: 'bar'}))
|
||||
, method: "PUT"
|
||||
, json: {foo: 'bar'}
|
||||
}
|
||||
, testPutMultipart :
|
||||
{ resp: server.createPostValidator(
|
||||
'--frontier\r\n' +
|
||||
'content-type: text/html\r\n' +
|
||||
'\r\n' +
|
||||
'<html><body>Oh hi.</body></html>' +
|
||||
'\r\n--frontier\r\n\r\n' +
|
||||
'Oh hi.' +
|
||||
'\r\n--frontier--'
|
||||
)
|
||||
, method: "PUT"
|
||||
, multipart:
|
||||
[ {'content-type': 'text/html', 'body': '<html><body>Oh hi.</body></html>'}
|
||||
, {'body': 'Oh hi.'}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
s.listen(s.port, function () {
|
||||
|
||||
var counter = 0
|
||||
|
||||
for (i in tests) {
|
||||
(function () {
|
||||
var test = tests[i]
|
||||
s.on('/'+i, test.resp)
|
||||
test.uri = s.url + '/' + i
|
||||
request(test, function (err, resp, body) {
|
||||
if (err) throw err
|
||||
if (test.expectBody) {
|
||||
assert.deepEqual(test.expectBody, body)
|
||||
}
|
||||
counter = counter - 1;
|
||||
if (counter === 0) {
|
||||
console.log(Object.keys(tests).length+" tests passed.")
|
||||
s.close()
|
||||
}
|
||||
})
|
||||
counter++
|
||||
})()
|
||||
}
|
||||
})
|
117
node_modules/request/tests/test-oauth.js
generated
vendored
Normal file
117
node_modules/request/tests/test-oauth.js
generated
vendored
Normal file
|
@ -0,0 +1,117 @@
|
|||
var hmacsign = require('../oauth').hmacsign
|
||||
, assert = require('assert')
|
||||
, qs = require('querystring')
|
||||
, request = require('../main')
|
||||
;
|
||||
|
||||
function getsignature (r) {
|
||||
var sign
|
||||
r.headers.authorization.slice('OAuth '.length).replace(/,\ /g, ',').split(',').forEach(function (v) {
|
||||
if (v.slice(0, 'oauth_signature="'.length) === 'oauth_signature="') sign = v.slice('oauth_signature="'.length, -1)
|
||||
})
|
||||
return decodeURIComponent(sign)
|
||||
}
|
||||
|
||||
// Tests from Twitter documentation https://dev.twitter.com/docs/auth/oauth
|
||||
|
||||
var reqsign = hmacsign('POST', 'https://api.twitter.com/oauth/request_token',
|
||||
{ oauth_callback: 'http://localhost:3005/the_dance/process_callback?service_provider_id=11'
|
||||
, oauth_consumer_key: 'GDdmIQH6jhtmLUypg82g'
|
||||
, oauth_nonce: 'QP70eNmVz8jvdPevU3oJD2AfF7R7odC2XJcn4XlZJqk'
|
||||
, oauth_signature_method: 'HMAC-SHA1'
|
||||
, oauth_timestamp: '1272323042'
|
||||
, oauth_version: '1.0'
|
||||
}, "MCD8BKwGdgPHvAuvgvz4EQpqDAtx89grbuNMRd7Eh98")
|
||||
|
||||
console.log(reqsign)
|
||||
console.log('8wUi7m5HFQy76nowoCThusfgB+Q=')
|
||||
assert.equal(reqsign, '8wUi7m5HFQy76nowoCThusfgB+Q=')
|
||||
|
||||
var accsign = hmacsign('POST', 'https://api.twitter.com/oauth/access_token',
|
||||
{ oauth_consumer_key: 'GDdmIQH6jhtmLUypg82g'
|
||||
, oauth_nonce: '9zWH6qe0qG7Lc1telCn7FhUbLyVdjEaL3MO5uHxn8'
|
||||
, oauth_signature_method: 'HMAC-SHA1'
|
||||
, oauth_token: '8ldIZyxQeVrFZXFOZH5tAwj6vzJYuLQpl0WUEYtWc'
|
||||
, oauth_timestamp: '1272323047'
|
||||
, oauth_verifier: 'pDNg57prOHapMbhv25RNf75lVRd6JDsni1AJJIDYoTY'
|
||||
, oauth_version: '1.0'
|
||||
}, "MCD8BKwGdgPHvAuvgvz4EQpqDAtx89grbuNMRd7Eh98", "x6qpRnlEmW9JbQn4PQVVeVG8ZLPEx6A0TOebgwcuA")
|
||||
|
||||
console.log(accsign)
|
||||
console.log('PUw/dHA4fnlJYM6RhXk5IU/0fCc=')
|
||||
assert.equal(accsign, 'PUw/dHA4fnlJYM6RhXk5IU/0fCc=')
|
||||
|
||||
var upsign = hmacsign('POST', 'http://api.twitter.com/1/statuses/update.json',
|
||||
{ oauth_consumer_key: "GDdmIQH6jhtmLUypg82g"
|
||||
, oauth_nonce: "oElnnMTQIZvqvlfXM56aBLAf5noGD0AQR3Fmi7Q6Y"
|
||||
, oauth_signature_method: "HMAC-SHA1"
|
||||
, oauth_token: "819797-Jxq8aYUDRmykzVKrgoLhXSq67TEa5ruc4GJC2rWimw"
|
||||
, oauth_timestamp: "1272325550"
|
||||
, oauth_version: "1.0"
|
||||
, status: 'setting up my twitter 私のさえずりを設定する'
|
||||
}, "MCD8BKwGdgPHvAuvgvz4EQpqDAtx89grbuNMRd7Eh98", "J6zix3FfA9LofH0awS24M3HcBYXO5nI1iYe8EfBA")
|
||||
|
||||
console.log(upsign)
|
||||
console.log('yOahq5m0YjDDjfjxHaXEsW9D+X0=')
|
||||
assert.equal(upsign, 'yOahq5m0YjDDjfjxHaXEsW9D+X0=')
|
||||
|
||||
|
||||
var rsign = request.post(
|
||||
{ url: 'https://api.twitter.com/oauth/request_token'
|
||||
, oauth:
|
||||
{ callback: 'http://localhost:3005/the_dance/process_callback?service_provider_id=11'
|
||||
, consumer_key: 'GDdmIQH6jhtmLUypg82g'
|
||||
, nonce: 'QP70eNmVz8jvdPevU3oJD2AfF7R7odC2XJcn4XlZJqk'
|
||||
, timestamp: '1272323042'
|
||||
, version: '1.0'
|
||||
, consumer_secret: "MCD8BKwGdgPHvAuvgvz4EQpqDAtx89grbuNMRd7Eh98"
|
||||
}
|
||||
})
|
||||
|
||||
setTimeout(function () {
|
||||
console.log(getsignature(rsign))
|
||||
assert.equal(reqsign, getsignature(rsign))
|
||||
})
|
||||
|
||||
var raccsign = request.post(
|
||||
{ url: 'https://api.twitter.com/oauth/access_token'
|
||||
, oauth:
|
||||
{ consumer_key: 'GDdmIQH6jhtmLUypg82g'
|
||||
, nonce: '9zWH6qe0qG7Lc1telCn7FhUbLyVdjEaL3MO5uHxn8'
|
||||
, signature_method: 'HMAC-SHA1'
|
||||
, token: '8ldIZyxQeVrFZXFOZH5tAwj6vzJYuLQpl0WUEYtWc'
|
||||
, timestamp: '1272323047'
|
||||
, verifier: 'pDNg57prOHapMbhv25RNf75lVRd6JDsni1AJJIDYoTY'
|
||||
, version: '1.0'
|
||||
, consumer_secret: "MCD8BKwGdgPHvAuvgvz4EQpqDAtx89grbuNMRd7Eh98"
|
||||
, token_secret: "x6qpRnlEmW9JbQn4PQVVeVG8ZLPEx6A0TOebgwcuA"
|
||||
}
|
||||
})
|
||||
|
||||
setTimeout(function () {
|
||||
console.log(getsignature(raccsign))
|
||||
assert.equal(accsign, getsignature(raccsign))
|
||||
}, 1)
|
||||
|
||||
var rupsign = request.post(
|
||||
{ url: 'http://api.twitter.com/1/statuses/update.json'
|
||||
, oauth:
|
||||
{ consumer_key: "GDdmIQH6jhtmLUypg82g"
|
||||
, nonce: "oElnnMTQIZvqvlfXM56aBLAf5noGD0AQR3Fmi7Q6Y"
|
||||
, signature_method: "HMAC-SHA1"
|
||||
, token: "819797-Jxq8aYUDRmykzVKrgoLhXSq67TEa5ruc4GJC2rWimw"
|
||||
, timestamp: "1272325550"
|
||||
, version: "1.0"
|
||||
, consumer_secret: "MCD8BKwGdgPHvAuvgvz4EQpqDAtx89grbuNMRd7Eh98"
|
||||
, token_secret: "J6zix3FfA9LofH0awS24M3HcBYXO5nI1iYe8EfBA"
|
||||
}
|
||||
, form: {status: 'setting up my twitter 私のさえずりを設定する'}
|
||||
})
|
||||
setTimeout(function () {
|
||||
console.log(getsignature(rupsign))
|
||||
assert.equal(upsign, getsignature(rupsign))
|
||||
}, 1)
|
||||
|
||||
|
||||
|
||||
|
182
node_modules/request/tests/test-pipes.js
generated
vendored
Normal file
182
node_modules/request/tests/test-pipes.js
generated
vendored
Normal file
|
@ -0,0 +1,182 @@
|
|||
var server = require('./server')
|
||||
, events = require('events')
|
||||
, stream = require('stream')
|
||||
, assert = require('assert')
|
||||
, fs = require('fs')
|
||||
, request = require('../main.js')
|
||||
, path = require('path')
|
||||
, util = require('util')
|
||||
;
|
||||
|
||||
var s = server.createServer(3453);
|
||||
|
||||
function ValidationStream(str) {
|
||||
this.str = str
|
||||
this.buf = ''
|
||||
this.on('data', function (data) {
|
||||
this.buf += data
|
||||
})
|
||||
this.on('end', function () {
|
||||
assert.equal(this.str, this.buf)
|
||||
})
|
||||
this.writable = true
|
||||
}
|
||||
util.inherits(ValidationStream, stream.Stream)
|
||||
ValidationStream.prototype.write = function (chunk) {
|
||||
this.emit('data', chunk)
|
||||
}
|
||||
ValidationStream.prototype.end = function (chunk) {
|
||||
if (chunk) emit('data', chunk)
|
||||
this.emit('end')
|
||||
}
|
||||
|
||||
s.listen(s.port, function () {
|
||||
counter = 0;
|
||||
|
||||
var check = function () {
|
||||
counter = counter - 1
|
||||
if (counter === 0) {
|
||||
console.log('All tests passed.')
|
||||
setTimeout(function () {
|
||||
process.exit();
|
||||
}, 500)
|
||||
}
|
||||
}
|
||||
|
||||
// Test pipeing to a request object
|
||||
s.once('/push', server.createPostValidator("mydata"));
|
||||
|
||||
var mydata = new stream.Stream();
|
||||
mydata.readable = true
|
||||
|
||||
counter++
|
||||
var r1 = request.put({url:'http://localhost:3453/push'}, function () {
|
||||
check();
|
||||
})
|
||||
mydata.pipe(r1)
|
||||
|
||||
mydata.emit('data', 'mydata');
|
||||
mydata.emit('end');
|
||||
|
||||
|
||||
// Test pipeing from a request object.
|
||||
s.once('/pull', server.createGetResponse("mypulldata"));
|
||||
|
||||
var mypulldata = new stream.Stream();
|
||||
mypulldata.writable = true
|
||||
|
||||
counter++
|
||||
request({url:'http://localhost:3453/pull'}).pipe(mypulldata)
|
||||
|
||||
var d = '';
|
||||
|
||||
mypulldata.write = function (chunk) {
|
||||
d += chunk;
|
||||
}
|
||||
mypulldata.end = function () {
|
||||
assert.equal(d, 'mypulldata');
|
||||
check();
|
||||
};
|
||||
|
||||
|
||||
s.on('/cat', function (req, resp) {
|
||||
if (req.method === "GET") {
|
||||
resp.writeHead(200, {'content-type':'text/plain-test', 'content-length':4});
|
||||
resp.end('asdf')
|
||||
} else if (req.method === "PUT") {
|
||||
assert.equal(req.headers['content-type'], 'text/plain-test');
|
||||
assert.equal(req.headers['content-length'], 4)
|
||||
var validate = '';
|
||||
|
||||
req.on('data', function (chunk) {validate += chunk})
|
||||
req.on('end', function () {
|
||||
resp.writeHead(201);
|
||||
resp.end();
|
||||
assert.equal(validate, 'asdf');
|
||||
check();
|
||||
})
|
||||
}
|
||||
})
|
||||
s.on('/pushjs', function (req, resp) {
|
||||
if (req.method === "PUT") {
|
||||
assert.equal(req.headers['content-type'], 'text/javascript');
|
||||
check();
|
||||
}
|
||||
})
|
||||
s.on('/catresp', function (req, resp) {
|
||||
request.get('http://localhost:3453/cat').pipe(resp)
|
||||
})
|
||||
s.on('/doodle', function (req, resp) {
|
||||
if (req.headers['x-oneline-proxy']) {
|
||||
resp.setHeader('x-oneline-proxy', 'yup')
|
||||
}
|
||||
resp.writeHead('200', {'content-type':'image/png'})
|
||||
fs.createReadStream(path.join(__dirname, 'googledoodle.png')).pipe(resp)
|
||||
})
|
||||
s.on('/onelineproxy', function (req, resp) {
|
||||
var x = request('http://localhost:3453/doodle')
|
||||
req.pipe(x)
|
||||
x.pipe(resp)
|
||||
})
|
||||
|
||||
counter++
|
||||
fs.createReadStream(__filename).pipe(request.put('http://localhost:3453/pushjs'))
|
||||
|
||||
counter++
|
||||
request.get('http://localhost:3453/cat').pipe(request.put('http://localhost:3453/cat'))
|
||||
|
||||
counter++
|
||||
request.get('http://localhost:3453/catresp', function (e, resp, body) {
|
||||
assert.equal(resp.headers['content-type'], 'text/plain-test');
|
||||
assert.equal(resp.headers['content-length'], 4)
|
||||
check();
|
||||
})
|
||||
|
||||
var doodleWrite = fs.createWriteStream(path.join(__dirname, 'test.png'))
|
||||
|
||||
counter++
|
||||
request.get('http://localhost:3453/doodle').pipe(doodleWrite)
|
||||
|
||||
doodleWrite.on('close', function () {
|
||||
assert.deepEqual(fs.readFileSync(path.join(__dirname, 'googledoodle.png')), fs.readFileSync(path.join(__dirname, 'test.png')))
|
||||
check()
|
||||
})
|
||||
|
||||
process.on('exit', function () {
|
||||
fs.unlinkSync(path.join(__dirname, 'test.png'))
|
||||
})
|
||||
|
||||
counter++
|
||||
request.get({uri:'http://localhost:3453/onelineproxy', headers:{'x-oneline-proxy':'nope'}}, function (err, resp, body) {
|
||||
assert.equal(resp.headers['x-oneline-proxy'], 'yup')
|
||||
check()
|
||||
})
|
||||
|
||||
s.on('/afterresponse', function (req, resp) {
|
||||
resp.write('d')
|
||||
resp.end()
|
||||
})
|
||||
|
||||
counter++
|
||||
var afterresp = request.post('http://localhost:3453/afterresponse').on('response', function () {
|
||||
var v = new ValidationStream('d')
|
||||
afterresp.pipe(v)
|
||||
v.on('end', check)
|
||||
})
|
||||
|
||||
s.on('/forward1', function (req, resp) {
|
||||
resp.writeHead(302, {location:'/forward2'})
|
||||
resp.end()
|
||||
})
|
||||
s.on('/forward2', function (req, resp) {
|
||||
resp.writeHead('200', {'content-type':'image/png'})
|
||||
resp.write('d')
|
||||
resp.end()
|
||||
})
|
||||
|
||||
counter++
|
||||
var validateForward = new ValidationStream('d')
|
||||
validateForward.on('end', check)
|
||||
request.get('http://localhost:3453/forward1').pipe(validateForward)
|
||||
|
||||
})
|
39
node_modules/request/tests/test-proxy.js
generated
vendored
Normal file
39
node_modules/request/tests/test-proxy.js
generated
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
var server = require('./server')
|
||||
, events = require('events')
|
||||
, stream = require('stream')
|
||||
, assert = require('assert')
|
||||
, fs = require('fs')
|
||||
, request = require('../main.js')
|
||||
, path = require('path')
|
||||
, util = require('util')
|
||||
;
|
||||
|
||||
var port = 6768
|
||||
, called = false
|
||||
, proxiedHost = 'google.com'
|
||||
;
|
||||
|
||||
var s = server.createServer(port)
|
||||
s.listen(port, function () {
|
||||
s.on('http://google.com/', function (req, res) {
|
||||
called = true
|
||||
assert.equal(req.headers.host, proxiedHost)
|
||||
res.writeHeader(200)
|
||||
res.end()
|
||||
})
|
||||
request ({
|
||||
url: 'http://'+proxiedHost,
|
||||
proxy: 'http://localhost:'+port
|
||||
/*
|
||||
//should behave as if these arguments where passed:
|
||||
url: 'http://localhost:'+port,
|
||||
headers: {host: proxiedHost}
|
||||
//*/
|
||||
}, function (err, res, body) {
|
||||
s.close()
|
||||
})
|
||||
})
|
||||
|
||||
process.on('exit', function () {
|
||||
assert.ok(called, 'the request must be made to the proxy server')
|
||||
})
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue