mirror of
https://github.com/maxswa/osrs-json-hiscores.git
synced 2025-10-15 10:19:04 +00:00
1.0.1
This commit is contained in:
5
node_modules/formidable/.travis.yml
generated
vendored
Normal file
5
node_modules/formidable/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- 4
|
||||
- 6
|
||||
- 7
|
7
node_modules/formidable/LICENSE
generated
vendored
Normal file
7
node_modules/formidable/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
Copyright (C) 2011 Felix Geisendörfer
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
336
node_modules/formidable/Readme.md
generated
vendored
Normal file
336
node_modules/formidable/Readme.md
generated
vendored
Normal file
@@ -0,0 +1,336 @@
|
||||
# Formidable
|
||||
|
||||
[](https://travis-ci.org/felixge/node-formidable)
|
||||
|
||||
## Purpose
|
||||
|
||||
A Node.js module for parsing form data, especially file uploads.
|
||||
|
||||
## Current status
|
||||
|
||||
**Maintainers Wanted:** Please see https://github.com/felixge/node-formidable/issues/412
|
||||
|
||||
This module was developed for [Transloadit](http://transloadit.com/), a service focused on uploading
|
||||
and encoding images and videos. It has been battle-tested against hundreds of GB of file uploads from
|
||||
a large variety of clients and is considered production-ready.
|
||||
|
||||
## Features
|
||||
|
||||
* Fast (~500mb/sec), non-buffering multipart parser
|
||||
* Automatically writing file uploads to disk
|
||||
* Low memory footprint
|
||||
* Graceful error handling
|
||||
* Very high test coverage
|
||||
|
||||
## Installation
|
||||
|
||||
```sh
|
||||
npm i -S formidable
|
||||
```
|
||||
|
||||
This is a low-level package, and if you're using a high-level framework it may already be included. However, [Express v4](http://expressjs.com) does not include any multipart handling, nor does [body-parser](https://github.com/expressjs/body-parser).
|
||||
|
||||
Note: Formidable requires [gently](http://github.com/felixge/node-gently) to run the unit tests, but you won't need it for just using the library.
|
||||
|
||||
## Example
|
||||
|
||||
Parse an incoming file upload.
|
||||
```javascript
|
||||
var formidable = require('formidable'),
|
||||
http = require('http'),
|
||||
util = require('util');
|
||||
|
||||
http.createServer(function(req, res) {
|
||||
if (req.url == '/upload' && req.method.toLowerCase() == 'post') {
|
||||
// parse a file upload
|
||||
var form = new formidable.IncomingForm();
|
||||
|
||||
form.parse(req, function(err, fields, files) {
|
||||
res.writeHead(200, {'content-type': 'text/plain'});
|
||||
res.write('received upload:\n\n');
|
||||
res.end(util.inspect({fields: fields, files: files}));
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// show a file upload form
|
||||
res.writeHead(200, {'content-type': 'text/html'});
|
||||
res.end(
|
||||
'<form action="/upload" enctype="multipart/form-data" method="post">'+
|
||||
'<input type="text" name="title"><br>'+
|
||||
'<input type="file" name="upload" multiple="multiple"><br>'+
|
||||
'<input type="submit" value="Upload">'+
|
||||
'</form>'
|
||||
);
|
||||
}).listen(8080);
|
||||
```
|
||||
## API
|
||||
|
||||
### Formidable.IncomingForm
|
||||
```javascript
|
||||
var form = new formidable.IncomingForm()
|
||||
```
|
||||
Creates a new incoming form.
|
||||
|
||||
```javascript
|
||||
form.encoding = 'utf-8';
|
||||
```
|
||||
Sets encoding for incoming form fields.
|
||||
|
||||
```javascript
|
||||
form.uploadDir = "/my/dir";
|
||||
```
|
||||
Sets the directory for placing file uploads in. You can move them later on using
|
||||
`fs.rename()`. The default is `os.tmpdir()`.
|
||||
|
||||
```javascript
|
||||
form.keepExtensions = false;
|
||||
```
|
||||
If you want the files written to `form.uploadDir` to include the extensions of the original files, set this property to `true`.
|
||||
|
||||
```javascript
|
||||
form.type
|
||||
```
|
||||
Either 'multipart' or 'urlencoded' depending on the incoming request.
|
||||
|
||||
```javascript
|
||||
form.maxFieldsSize = 20 * 1024 * 1024;
|
||||
```
|
||||
Limits the amount of memory all fields together (except files) can allocate in bytes.
|
||||
If this value is exceeded, an `'error'` event is emitted. The default
|
||||
size is 20MB.
|
||||
|
||||
```javascript
|
||||
form.maxFileSize = 200 * 1024 * 1024;
|
||||
```
|
||||
Limits the size of uploaded file.
|
||||
If this value is exceeded, an `'error'` event is emitted. The default
|
||||
size is 200MB.
|
||||
|
||||
```javascript
|
||||
form.maxFields = 1000;
|
||||
```
|
||||
Limits the number of fields that the querystring parser will decode. Defaults
|
||||
to 1000 (0 for unlimited).
|
||||
|
||||
```javascript
|
||||
form.hash = false;
|
||||
```
|
||||
If you want checksums calculated for incoming files, set this to either `'sha1'` or `'md5'`.
|
||||
|
||||
```javascript
|
||||
form.multiples = false;
|
||||
```
|
||||
If this option is enabled, when you call `form.parse`, the `files` argument will contain arrays of files for inputs which submit multiple files using the HTML5 `multiple` attribute.
|
||||
|
||||
```javascript
|
||||
form.bytesReceived
|
||||
```
|
||||
The amount of bytes received for this form so far.
|
||||
|
||||
```javascript
|
||||
form.bytesExpected
|
||||
```
|
||||
The expected number of bytes in this form.
|
||||
|
||||
```javascript
|
||||
form.parse(request, [cb]);
|
||||
```
|
||||
Parses an incoming node.js `request` containing form data. If `cb` is provided, all fields and files are collected and passed to the callback:
|
||||
|
||||
|
||||
```javascript
|
||||
form.parse(req, function(err, fields, files) {
|
||||
// ...
|
||||
});
|
||||
|
||||
form.onPart(part);
|
||||
```
|
||||
You may overwrite this method if you are interested in directly accessing the multipart stream. Doing so will disable any `'field'` / `'file'` events processing which would occur otherwise, making you fully responsible for handling the processing.
|
||||
|
||||
```javascript
|
||||
form.onPart = function(part) {
|
||||
part.addListener('data', function() {
|
||||
// ...
|
||||
});
|
||||
}
|
||||
```
|
||||
If you want to use formidable to only handle certain parts for you, you can do so:
|
||||
```javascript
|
||||
form.onPart = function(part) {
|
||||
if (!part.filename) {
|
||||
// let formidable handle all non-file parts
|
||||
form.handlePart(part);
|
||||
}
|
||||
}
|
||||
```
|
||||
Check the code in this method for further inspiration.
|
||||
|
||||
|
||||
### Formidable.File
|
||||
```javascript
|
||||
file.size = 0
|
||||
```
|
||||
The size of the uploaded file in bytes. If the file is still being uploaded (see `'fileBegin'` event), this property says how many bytes of the file have been written to disk yet.
|
||||
```javascript
|
||||
file.path = null
|
||||
```
|
||||
The path this file is being written to. You can modify this in the `'fileBegin'` event in
|
||||
case you are unhappy with the way formidable generates a temporary path for your files.
|
||||
```javascript
|
||||
file.name = null
|
||||
```
|
||||
The name this file had according to the uploading client.
|
||||
```javascript
|
||||
file.type = null
|
||||
```
|
||||
The mime type of this file, according to the uploading client.
|
||||
```javascript
|
||||
file.lastModifiedDate = null
|
||||
```
|
||||
A date object (or `null`) containing the time this file was last written to. Mostly
|
||||
here for compatibility with the [W3C File API Draft](http://dev.w3.org/2006/webapi/FileAPI/).
|
||||
```javascript
|
||||
file.hash = null
|
||||
```
|
||||
If hash calculation was set, you can read the hex digest out of this var.
|
||||
|
||||
#### Formidable.File#toJSON()
|
||||
|
||||
This method returns a JSON-representation of the file, allowing you to
|
||||
`JSON.stringify()` the file which is useful for logging and responding
|
||||
to requests.
|
||||
|
||||
### Events
|
||||
|
||||
|
||||
#### 'progress'
|
||||
|
||||
Emitted after each incoming chunk of data that has been parsed. Can be used to roll your own progress bar.
|
||||
|
||||
```javascript
|
||||
form.on('progress', function(bytesReceived, bytesExpected) {
|
||||
});
|
||||
```
|
||||
|
||||
|
||||
|
||||
#### 'field'
|
||||
|
||||
Emitted whenever a field / value pair has been received.
|
||||
|
||||
```javascript
|
||||
form.on('field', function(name, value) {
|
||||
});
|
||||
```
|
||||
|
||||
#### 'fileBegin'
|
||||
|
||||
Emitted whenever a new file is detected in the upload stream. Use this event if
|
||||
you want to stream the file to somewhere else while buffering the upload on
|
||||
the file system.
|
||||
|
||||
```javascript
|
||||
form.on('fileBegin', function(name, file) {
|
||||
});
|
||||
```
|
||||
|
||||
#### 'file'
|
||||
|
||||
Emitted whenever a field / file pair has been received. `file` is an instance of `File`.
|
||||
|
||||
```javascript
|
||||
form.on('file', function(name, file) {
|
||||
});
|
||||
```
|
||||
|
||||
#### 'error'
|
||||
|
||||
Emitted when there is an error processing the incoming form. A request that experiences an error is automatically paused, you will have to manually call `request.resume()` if you want the request to continue firing `'data'` events.
|
||||
|
||||
```javascript
|
||||
form.on('error', function(err) {
|
||||
});
|
||||
```
|
||||
|
||||
#### 'aborted'
|
||||
|
||||
|
||||
Emitted when the request was aborted by the user. Right now this can be due to a 'timeout' or 'close' event on the socket. After this event is emitted, an `error` event will follow. In the future there will be a separate 'timeout' event (needs a change in the node core).
|
||||
```javascript
|
||||
form.on('aborted', function() {
|
||||
});
|
||||
```
|
||||
|
||||
##### 'end'
|
||||
```javascript
|
||||
form.on('end', function() {
|
||||
});
|
||||
```
|
||||
Emitted when the entire request has been received, and all contained files have finished flushing to disk. This is a great place for you to send your response.
|
||||
|
||||
|
||||
|
||||
## Changelog
|
||||
|
||||
### v1.1.1 (2017-01-15)
|
||||
|
||||
* Fix DeprecationWarning about os.tmpDir() (Christian)
|
||||
* Update `buffer.write` order of arguments for Node 7 (Kornel Lesiński)
|
||||
* JSON Parser emits error events to the IncomingForm (alessio.montagnani)
|
||||
* Improved Content-Disposition parsing (Sebastien)
|
||||
* Access WriteStream of fs during runtime instead of include time (Jonas Amundsen)
|
||||
* Use built-in toString to convert buffer to hex (Charmander)
|
||||
* Add hash to json if present (Nick Stamas)
|
||||
* Add license to package.json (Simen Bekkhus)
|
||||
|
||||
### v1.0.14 (2013-05-03)
|
||||
|
||||
* Add failing hash tests. (Ben Trask)
|
||||
* Enable hash calculation again (Eugene Girshov)
|
||||
* Test for immediate data events (Tim Smart)
|
||||
* Re-arrange IncomingForm#parse (Tim Smart)
|
||||
|
||||
### v1.0.13
|
||||
|
||||
* Only update hash if update method exists (Sven Lito)
|
||||
* According to travis v0.10 needs to go quoted (Sven Lito)
|
||||
* Bumping build node versions (Sven Lito)
|
||||
* Additional fix for empty requests (Eugene Girshov)
|
||||
* Change the default to 1000, to match the new Node behaviour. (OrangeDog)
|
||||
* Add ability to control maxKeys in the querystring parser. (OrangeDog)
|
||||
* Adjust test case to work with node 0.9.x (Eugene Girshov)
|
||||
* Update package.json (Sven Lito)
|
||||
* Path adjustment according to eb4468b (Markus Ast)
|
||||
|
||||
### v1.0.12
|
||||
|
||||
* Emit error on aborted connections (Eugene Girshov)
|
||||
* Add support for empty requests (Eugene Girshov)
|
||||
* Fix name/filename handling in Content-Disposition (jesperp)
|
||||
* Tolerate malformed closing boundary in multipart (Eugene Girshov)
|
||||
* Ignore preamble in multipart messages (Eugene Girshov)
|
||||
* Add support for application/json (Mike Frey, Carlos Rodriguez)
|
||||
* Add support for Base64 encoding (Elmer Bulthuis)
|
||||
* Add File#toJSON (TJ Holowaychuk)
|
||||
* Remove support for Node.js 0.4 & 0.6 (Andrew Kelley)
|
||||
* Documentation improvements (Sven Lito, Andre Azevedo)
|
||||
* Add support for application/octet-stream (Ion Lupascu, Chris Scribner)
|
||||
* Use os.tmpdir() to get tmp directory (Andrew Kelley)
|
||||
* Improve package.json (Andrew Kelley, Sven Lito)
|
||||
* Fix benchmark script (Andrew Kelley)
|
||||
* Fix scope issue in incoming_forms (Sven Lito)
|
||||
* Fix file handle leak on error (OrangeDog)
|
||||
|
||||
## License
|
||||
|
||||
Formidable is licensed under the MIT license.
|
||||
|
||||
## Ports
|
||||
|
||||
* [multipart-parser](http://github.com/FooBarWidget/multipart-parser): a C++ parser based on formidable
|
||||
|
||||
## Credits
|
||||
|
||||
* [Ryan Dahl](http://twitter.com/ryah) for his work on [http-parser](http://github.com/ry/http-parser) which heavily inspired multipart_parser.js
|
1
node_modules/formidable/index.js
generated
vendored
Normal file
1
node_modules/formidable/index.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('./lib');
|
81
node_modules/formidable/lib/file.js
generated
vendored
Normal file
81
node_modules/formidable/lib/file.js
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
if (global.GENTLY) require = GENTLY.hijack(require);
|
||||
|
||||
var util = require('util'),
|
||||
fs = require('fs'),
|
||||
EventEmitter = require('events').EventEmitter,
|
||||
crypto = require('crypto');
|
||||
|
||||
function File(properties) {
|
||||
EventEmitter.call(this);
|
||||
|
||||
this.size = 0;
|
||||
this.path = null;
|
||||
this.name = null;
|
||||
this.type = null;
|
||||
this.hash = null;
|
||||
this.lastModifiedDate = null;
|
||||
|
||||
this._writeStream = null;
|
||||
|
||||
for (var key in properties) {
|
||||
this[key] = properties[key];
|
||||
}
|
||||
|
||||
if(typeof this.hash === 'string') {
|
||||
this.hash = crypto.createHash(properties.hash);
|
||||
} else {
|
||||
this.hash = null;
|
||||
}
|
||||
}
|
||||
module.exports = File;
|
||||
util.inherits(File, EventEmitter);
|
||||
|
||||
File.prototype.open = function() {
|
||||
this._writeStream = new fs.WriteStream(this.path);
|
||||
};
|
||||
|
||||
File.prototype.toJSON = function() {
|
||||
var json = {
|
||||
size: this.size,
|
||||
path: this.path,
|
||||
name: this.name,
|
||||
type: this.type,
|
||||
mtime: this.lastModifiedDate,
|
||||
length: this.length,
|
||||
filename: this.filename,
|
||||
mime: this.mime
|
||||
};
|
||||
if (this.hash && this.hash != "") {
|
||||
json.hash = this.hash;
|
||||
}
|
||||
return json;
|
||||
};
|
||||
|
||||
File.prototype.write = function(buffer, cb) {
|
||||
var self = this;
|
||||
if (self.hash) {
|
||||
self.hash.update(buffer);
|
||||
}
|
||||
|
||||
if (this._writeStream.closed) {
|
||||
return cb();
|
||||
}
|
||||
|
||||
this._writeStream.write(buffer, function() {
|
||||
self.lastModifiedDate = new Date();
|
||||
self.size += buffer.length;
|
||||
self.emit('progress', self.size);
|
||||
cb();
|
||||
});
|
||||
};
|
||||
|
||||
File.prototype.end = function(cb) {
|
||||
var self = this;
|
||||
if (self.hash) {
|
||||
self.hash = self.hash.digest('hex');
|
||||
}
|
||||
this._writeStream.end(function() {
|
||||
self.emit('end');
|
||||
cb();
|
||||
});
|
||||
};
|
558
node_modules/formidable/lib/incoming_form.js
generated
vendored
Normal file
558
node_modules/formidable/lib/incoming_form.js
generated
vendored
Normal file
@@ -0,0 +1,558 @@
|
||||
if (global.GENTLY) require = GENTLY.hijack(require);
|
||||
|
||||
var crypto = require('crypto');
|
||||
var fs = require('fs');
|
||||
var util = require('util'),
|
||||
path = require('path'),
|
||||
File = require('./file'),
|
||||
MultipartParser = require('./multipart_parser').MultipartParser,
|
||||
QuerystringParser = require('./querystring_parser').QuerystringParser,
|
||||
OctetParser = require('./octet_parser').OctetParser,
|
||||
JSONParser = require('./json_parser').JSONParser,
|
||||
StringDecoder = require('string_decoder').StringDecoder,
|
||||
EventEmitter = require('events').EventEmitter,
|
||||
Stream = require('stream').Stream,
|
||||
os = require('os');
|
||||
|
||||
function IncomingForm(opts) {
|
||||
if (!(this instanceof IncomingForm)) return new IncomingForm(opts);
|
||||
EventEmitter.call(this);
|
||||
|
||||
opts=opts||{};
|
||||
|
||||
this.error = null;
|
||||
this.ended = false;
|
||||
|
||||
this.maxFields = opts.maxFields || 1000;
|
||||
this.maxFieldsSize = opts.maxFieldsSize || 20 * 1024 * 1024;
|
||||
this.maxFileSize = opts.maxFileSize || 200 * 1024 * 1024;
|
||||
this.keepExtensions = opts.keepExtensions || false;
|
||||
this.uploadDir = opts.uploadDir || (os.tmpdir && os.tmpdir()) || os.tmpDir();
|
||||
this.encoding = opts.encoding || 'utf-8';
|
||||
this.headers = null;
|
||||
this.type = null;
|
||||
this.hash = opts.hash || false;
|
||||
this.multiples = opts.multiples || false;
|
||||
|
||||
this.bytesReceived = null;
|
||||
this.bytesExpected = null;
|
||||
|
||||
this._parser = null;
|
||||
this._flushing = 0;
|
||||
this._fieldsSize = 0;
|
||||
this._fileSize = 0;
|
||||
this.openedFiles = [];
|
||||
|
||||
return this;
|
||||
}
|
||||
util.inherits(IncomingForm, EventEmitter);
|
||||
exports.IncomingForm = IncomingForm;
|
||||
|
||||
IncomingForm.prototype.parse = function(req, cb) {
|
||||
this.pause = function() {
|
||||
try {
|
||||
req.pause();
|
||||
} catch (err) {
|
||||
// the stream was destroyed
|
||||
if (!this.ended) {
|
||||
// before it was completed, crash & burn
|
||||
this._error(err);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
this.resume = function() {
|
||||
try {
|
||||
req.resume();
|
||||
} catch (err) {
|
||||
// the stream was destroyed
|
||||
if (!this.ended) {
|
||||
// before it was completed, crash & burn
|
||||
this._error(err);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
// Setup callback first, so we don't miss anything from data events emitted
|
||||
// immediately.
|
||||
if (cb) {
|
||||
var fields = {}, files = {};
|
||||
this
|
||||
.on('field', function(name, value) {
|
||||
fields[name] = value;
|
||||
})
|
||||
.on('file', function(name, file) {
|
||||
if (this.multiples) {
|
||||
if (files[name]) {
|
||||
if (!Array.isArray(files[name])) {
|
||||
files[name] = [files[name]];
|
||||
}
|
||||
files[name].push(file);
|
||||
} else {
|
||||
files[name] = file;
|
||||
}
|
||||
} else {
|
||||
files[name] = file;
|
||||
}
|
||||
})
|
||||
.on('error', function(err) {
|
||||
cb(err, fields, files);
|
||||
})
|
||||
.on('end', function() {
|
||||
cb(null, fields, files);
|
||||
});
|
||||
}
|
||||
|
||||
// Parse headers and setup the parser, ready to start listening for data.
|
||||
this.writeHeaders(req.headers);
|
||||
|
||||
// Start listening for data.
|
||||
var self = this;
|
||||
req
|
||||
.on('error', function(err) {
|
||||
self._error(err);
|
||||
})
|
||||
.on('aborted', function() {
|
||||
self.emit('aborted');
|
||||
self._error(new Error('Request aborted'));
|
||||
})
|
||||
.on('data', function(buffer) {
|
||||
self.write(buffer);
|
||||
})
|
||||
.on('end', function() {
|
||||
if (self.error) {
|
||||
return;
|
||||
}
|
||||
|
||||
var err = self._parser.end();
|
||||
if (err) {
|
||||
self._error(err);
|
||||
}
|
||||
});
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
IncomingForm.prototype.writeHeaders = function(headers) {
|
||||
this.headers = headers;
|
||||
this._parseContentLength();
|
||||
this._parseContentType();
|
||||
};
|
||||
|
||||
IncomingForm.prototype.write = function(buffer) {
|
||||
if (this.error) {
|
||||
return;
|
||||
}
|
||||
if (!this._parser) {
|
||||
this._error(new Error('uninitialized parser'));
|
||||
return;
|
||||
}
|
||||
|
||||
this.bytesReceived += buffer.length;
|
||||
this.emit('progress', this.bytesReceived, this.bytesExpected);
|
||||
|
||||
var bytesParsed = this._parser.write(buffer);
|
||||
if (bytesParsed !== buffer.length) {
|
||||
this._error(new Error('parser error, '+bytesParsed+' of '+buffer.length+' bytes parsed'));
|
||||
}
|
||||
|
||||
return bytesParsed;
|
||||
};
|
||||
|
||||
IncomingForm.prototype.pause = function() {
|
||||
// this does nothing, unless overwritten in IncomingForm.parse
|
||||
return false;
|
||||
};
|
||||
|
||||
IncomingForm.prototype.resume = function() {
|
||||
// this does nothing, unless overwritten in IncomingForm.parse
|
||||
return false;
|
||||
};
|
||||
|
||||
IncomingForm.prototype.onPart = function(part) {
|
||||
// this method can be overwritten by the user
|
||||
this.handlePart(part);
|
||||
};
|
||||
|
||||
IncomingForm.prototype.handlePart = function(part) {
|
||||
var self = this;
|
||||
|
||||
// This MUST check exactly for undefined. You can not change it to !part.filename.
|
||||
if (part.filename === undefined) {
|
||||
var value = ''
|
||||
, decoder = new StringDecoder(this.encoding);
|
||||
|
||||
part.on('data', function(buffer) {
|
||||
self._fieldsSize += buffer.length;
|
||||
if (self._fieldsSize > self.maxFieldsSize) {
|
||||
self._error(new Error('maxFieldsSize exceeded, received '+self._fieldsSize+' bytes of field data'));
|
||||
return;
|
||||
}
|
||||
value += decoder.write(buffer);
|
||||
});
|
||||
|
||||
part.on('end', function() {
|
||||
self.emit('field', part.name, value);
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
this._flushing++;
|
||||
|
||||
var file = new File({
|
||||
path: this._uploadPath(part.filename),
|
||||
name: part.filename,
|
||||
type: part.mime,
|
||||
hash: self.hash
|
||||
});
|
||||
|
||||
this.emit('fileBegin', part.name, file);
|
||||
|
||||
file.open();
|
||||
this.openedFiles.push(file);
|
||||
|
||||
part.on('data', function(buffer) {
|
||||
self._fileSize += buffer.length;
|
||||
if (self._fileSize > self.maxFileSize) {
|
||||
self._error(new Error('maxFileSize exceeded, received '+self._fileSize+' bytes of file data'));
|
||||
return;
|
||||
}
|
||||
if (buffer.length == 0) {
|
||||
return;
|
||||
}
|
||||
self.pause();
|
||||
file.write(buffer, function() {
|
||||
self.resume();
|
||||
});
|
||||
});
|
||||
|
||||
part.on('end', function() {
|
||||
file.end(function() {
|
||||
self._flushing--;
|
||||
self.emit('file', part.name, file);
|
||||
self._maybeEnd();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
function dummyParser(self) {
|
||||
return {
|
||||
end: function () {
|
||||
self.ended = true;
|
||||
self._maybeEnd();
|
||||
return null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
IncomingForm.prototype._parseContentType = function() {
|
||||
if (this.bytesExpected === 0) {
|
||||
this._parser = dummyParser(this);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.headers['content-type']) {
|
||||
this._error(new Error('bad content-type header, no content-type'));
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.headers['content-type'].match(/octet-stream/i)) {
|
||||
this._initOctetStream();
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.headers['content-type'].match(/urlencoded/i)) {
|
||||
this._initUrlencoded();
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.headers['content-type'].match(/multipart/i)) {
|
||||
var m = this.headers['content-type'].match(/boundary=(?:"([^"]+)"|([^;]+))/i);
|
||||
if (m) {
|
||||
this._initMultipart(m[1] || m[2]);
|
||||
} else {
|
||||
this._error(new Error('bad content-type header, no multipart boundary'));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.headers['content-type'].match(/json/i)) {
|
||||
this._initJSONencoded();
|
||||
return;
|
||||
}
|
||||
|
||||
this._error(new Error('bad content-type header, unknown content-type: '+this.headers['content-type']));
|
||||
};
|
||||
|
||||
IncomingForm.prototype._error = function(err) {
|
||||
if (this.error || this.ended) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.error = err;
|
||||
this.emit('error', err);
|
||||
|
||||
if (Array.isArray(this.openedFiles)) {
|
||||
this.openedFiles.forEach(function(file) {
|
||||
file._writeStream.destroy();
|
||||
setTimeout(fs.unlink, 0, file.path, function(error) { });
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
IncomingForm.prototype._parseContentLength = function() {
|
||||
this.bytesReceived = 0;
|
||||
if (this.headers['content-length']) {
|
||||
this.bytesExpected = parseInt(this.headers['content-length'], 10);
|
||||
} else if (this.headers['transfer-encoding'] === undefined) {
|
||||
this.bytesExpected = 0;
|
||||
}
|
||||
|
||||
if (this.bytesExpected !== null) {
|
||||
this.emit('progress', this.bytesReceived, this.bytesExpected);
|
||||
}
|
||||
};
|
||||
|
||||
IncomingForm.prototype._newParser = function() {
|
||||
return new MultipartParser();
|
||||
};
|
||||
|
||||
IncomingForm.prototype._initMultipart = function(boundary) {
|
||||
this.type = 'multipart';
|
||||
|
||||
var parser = new MultipartParser(),
|
||||
self = this,
|
||||
headerField,
|
||||
headerValue,
|
||||
part;
|
||||
|
||||
parser.initWithBoundary(boundary);
|
||||
|
||||
parser.onPartBegin = function() {
|
||||
part = new Stream();
|
||||
part.readable = true;
|
||||
part.headers = {};
|
||||
part.name = null;
|
||||
part.filename = null;
|
||||
part.mime = null;
|
||||
|
||||
part.transferEncoding = 'binary';
|
||||
part.transferBuffer = '';
|
||||
|
||||
headerField = '';
|
||||
headerValue = '';
|
||||
};
|
||||
|
||||
parser.onHeaderField = function(b, start, end) {
|
||||
headerField += b.toString(self.encoding, start, end);
|
||||
};
|
||||
|
||||
parser.onHeaderValue = function(b, start, end) {
|
||||
headerValue += b.toString(self.encoding, start, end);
|
||||
};
|
||||
|
||||
parser.onHeaderEnd = function() {
|
||||
headerField = headerField.toLowerCase();
|
||||
part.headers[headerField] = headerValue;
|
||||
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
var m = headerValue.match(/\bname=("([^"]*)"|([^\(\)<>@,;:\\"\/\[\]\?=\{\}\s\t/]+))/i);
|
||||
if (headerField == 'content-disposition') {
|
||||
if (m) {
|
||||
part.name = m[2] || m[3] || '';
|
||||
}
|
||||
|
||||
part.filename = self._fileName(headerValue);
|
||||
} else if (headerField == 'content-type') {
|
||||
part.mime = headerValue;
|
||||
} else if (headerField == 'content-transfer-encoding') {
|
||||
part.transferEncoding = headerValue.toLowerCase();
|
||||
}
|
||||
|
||||
headerField = '';
|
||||
headerValue = '';
|
||||
};
|
||||
|
||||
parser.onHeadersEnd = function() {
|
||||
switch(part.transferEncoding){
|
||||
case 'binary':
|
||||
case '7bit':
|
||||
case '8bit':
|
||||
parser.onPartData = function(b, start, end) {
|
||||
part.emit('data', b.slice(start, end));
|
||||
};
|
||||
|
||||
parser.onPartEnd = function() {
|
||||
part.emit('end');
|
||||
};
|
||||
break;
|
||||
|
||||
case 'base64':
|
||||
parser.onPartData = function(b, start, end) {
|
||||
part.transferBuffer += b.slice(start, end).toString('ascii');
|
||||
|
||||
/*
|
||||
four bytes (chars) in base64 converts to three bytes in binary
|
||||
encoding. So we should always work with a number of bytes that
|
||||
can be divided by 4, it will result in a number of buytes that
|
||||
can be divided vy 3.
|
||||
*/
|
||||
var offset = parseInt(part.transferBuffer.length / 4, 10) * 4;
|
||||
part.emit('data', new Buffer(part.transferBuffer.substring(0, offset), 'base64'));
|
||||
part.transferBuffer = part.transferBuffer.substring(offset);
|
||||
};
|
||||
|
||||
parser.onPartEnd = function() {
|
||||
part.emit('data', new Buffer(part.transferBuffer, 'base64'));
|
||||
part.emit('end');
|
||||
};
|
||||
break;
|
||||
|
||||
default:
|
||||
return self._error(new Error('unknown transfer-encoding'));
|
||||
}
|
||||
|
||||
self.onPart(part);
|
||||
};
|
||||
|
||||
|
||||
parser.onEnd = function() {
|
||||
self.ended = true;
|
||||
self._maybeEnd();
|
||||
};
|
||||
|
||||
this._parser = parser;
|
||||
};
|
||||
|
||||
IncomingForm.prototype._fileName = function(headerValue) {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
var m = headerValue.match(/\bfilename=("(.*?)"|([^\(\)<>@,;:\\"\/\[\]\?=\{\}\s\t/]+))($|;\s)/i);
|
||||
if (!m) return;
|
||||
|
||||
var match = m[2] || m[3] || '';
|
||||
var filename = match.substr(match.lastIndexOf('\\') + 1);
|
||||
filename = filename.replace(/%22/g, '"');
|
||||
filename = filename.replace(/&#([\d]{4});/g, function(m, code) {
|
||||
return String.fromCharCode(code);
|
||||
});
|
||||
return filename;
|
||||
};
|
||||
|
||||
IncomingForm.prototype._initUrlencoded = function() {
|
||||
this.type = 'urlencoded';
|
||||
|
||||
var parser = new QuerystringParser(this.maxFields)
|
||||
, self = this;
|
||||
|
||||
parser.onField = function(key, val) {
|
||||
self.emit('field', key, val);
|
||||
};
|
||||
|
||||
parser.onEnd = function() {
|
||||
self.ended = true;
|
||||
self._maybeEnd();
|
||||
};
|
||||
|
||||
this._parser = parser;
|
||||
};
|
||||
|
||||
IncomingForm.prototype._initOctetStream = function() {
|
||||
this.type = 'octet-stream';
|
||||
var filename = this.headers['x-file-name'];
|
||||
var mime = this.headers['content-type'];
|
||||
|
||||
var file = new File({
|
||||
path: this._uploadPath(filename),
|
||||
name: filename,
|
||||
type: mime
|
||||
});
|
||||
|
||||
this.emit('fileBegin', filename, file);
|
||||
file.open();
|
||||
this.openedFiles.push(file);
|
||||
this._flushing++;
|
||||
|
||||
var self = this;
|
||||
|
||||
self._parser = new OctetParser();
|
||||
|
||||
//Keep track of writes that haven't finished so we don't emit the file before it's done being written
|
||||
var outstandingWrites = 0;
|
||||
|
||||
self._parser.on('data', function(buffer){
|
||||
self.pause();
|
||||
outstandingWrites++;
|
||||
|
||||
file.write(buffer, function() {
|
||||
outstandingWrites--;
|
||||
self.resume();
|
||||
|
||||
if(self.ended){
|
||||
self._parser.emit('doneWritingFile');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
self._parser.on('end', function(){
|
||||
self._flushing--;
|
||||
self.ended = true;
|
||||
|
||||
var done = function(){
|
||||
file.end(function() {
|
||||
self.emit('file', 'file', file);
|
||||
self._maybeEnd();
|
||||
});
|
||||
};
|
||||
|
||||
if(outstandingWrites === 0){
|
||||
done();
|
||||
} else {
|
||||
self._parser.once('doneWritingFile', done);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
IncomingForm.prototype._initJSONencoded = function() {
|
||||
this.type = 'json';
|
||||
|
||||
var parser = new JSONParser(this)
|
||||
, self = this;
|
||||
|
||||
parser.onField = function(key, val) {
|
||||
self.emit('field', key, val);
|
||||
};
|
||||
|
||||
parser.onEnd = function() {
|
||||
self.ended = true;
|
||||
self._maybeEnd();
|
||||
};
|
||||
|
||||
this._parser = parser;
|
||||
};
|
||||
|
||||
IncomingForm.prototype._uploadPath = function(filename) {
|
||||
var buf = crypto.randomBytes(16);
|
||||
var name = 'upload_' + buf.toString('hex');
|
||||
|
||||
if (this.keepExtensions) {
|
||||
var ext = path.extname(filename);
|
||||
ext = ext.replace(/(\.[a-z0-9]+).*/i, '$1');
|
||||
|
||||
name += ext;
|
||||
}
|
||||
|
||||
return path.join(this.uploadDir, name);
|
||||
};
|
||||
|
||||
IncomingForm.prototype._maybeEnd = function() {
|
||||
if (!this.ended || this._flushing || this.error) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.emit('end');
|
||||
};
|
3
node_modules/formidable/lib/index.js
generated
vendored
Normal file
3
node_modules/formidable/lib/index.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
var IncomingForm = require('./incoming_form').IncomingForm;
|
||||
IncomingForm.IncomingForm = IncomingForm;
|
||||
module.exports = IncomingForm;
|
30
node_modules/formidable/lib/json_parser.js
generated
vendored
Normal file
30
node_modules/formidable/lib/json_parser.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
if (global.GENTLY) require = GENTLY.hijack(require);
|
||||
|
||||
var Buffer = require('buffer').Buffer;
|
||||
|
||||
function JSONParser(parent) {
|
||||
this.parent = parent;
|
||||
this.chunks = [];
|
||||
this.bytesWritten = 0;
|
||||
}
|
||||
exports.JSONParser = JSONParser;
|
||||
|
||||
JSONParser.prototype.write = function(buffer) {
|
||||
this.bytesWritten += buffer.length;
|
||||
this.chunks.push(buffer);
|
||||
return buffer.length;
|
||||
};
|
||||
|
||||
JSONParser.prototype.end = function() {
|
||||
try {
|
||||
var fields = JSON.parse(Buffer.concat(this.chunks));
|
||||
for (var field in fields) {
|
||||
this.onField(field, fields[field]);
|
||||
}
|
||||
} catch (e) {
|
||||
this.parent.emit('error', e);
|
||||
}
|
||||
this.data = null;
|
||||
|
||||
this.onEnd();
|
||||
};
|
332
node_modules/formidable/lib/multipart_parser.js
generated
vendored
Normal file
332
node_modules/formidable/lib/multipart_parser.js
generated
vendored
Normal file
@@ -0,0 +1,332 @@
|
||||
var Buffer = require('buffer').Buffer,
|
||||
s = 0,
|
||||
S =
|
||||
{ PARSER_UNINITIALIZED: s++,
|
||||
START: s++,
|
||||
START_BOUNDARY: s++,
|
||||
HEADER_FIELD_START: s++,
|
||||
HEADER_FIELD: s++,
|
||||
HEADER_VALUE_START: s++,
|
||||
HEADER_VALUE: s++,
|
||||
HEADER_VALUE_ALMOST_DONE: s++,
|
||||
HEADERS_ALMOST_DONE: s++,
|
||||
PART_DATA_START: s++,
|
||||
PART_DATA: s++,
|
||||
PART_END: s++,
|
||||
END: s++
|
||||
},
|
||||
|
||||
f = 1,
|
||||
F =
|
||||
{ PART_BOUNDARY: f,
|
||||
LAST_BOUNDARY: f *= 2
|
||||
},
|
||||
|
||||
LF = 10,
|
||||
CR = 13,
|
||||
SPACE = 32,
|
||||
HYPHEN = 45,
|
||||
COLON = 58,
|
||||
A = 97,
|
||||
Z = 122,
|
||||
|
||||
lower = function(c) {
|
||||
return c | 0x20;
|
||||
};
|
||||
|
||||
for (s in S) {
|
||||
exports[s] = S[s];
|
||||
}
|
||||
|
||||
function MultipartParser() {
|
||||
this.boundary = null;
|
||||
this.boundaryChars = null;
|
||||
this.lookbehind = null;
|
||||
this.state = S.PARSER_UNINITIALIZED;
|
||||
|
||||
this.index = null;
|
||||
this.flags = 0;
|
||||
}
|
||||
exports.MultipartParser = MultipartParser;
|
||||
|
||||
MultipartParser.stateToString = function(stateNumber) {
|
||||
for (var state in S) {
|
||||
var number = S[state];
|
||||
if (number === stateNumber) return state;
|
||||
}
|
||||
};
|
||||
|
||||
MultipartParser.prototype.initWithBoundary = function(str) {
|
||||
this.boundary = new Buffer(str.length+4);
|
||||
this.boundary.write('\r\n--', 0);
|
||||
this.boundary.write(str, 4);
|
||||
this.lookbehind = new Buffer(this.boundary.length+8);
|
||||
this.state = S.START;
|
||||
|
||||
this.boundaryChars = {};
|
||||
for (var i = 0; i < this.boundary.length; i++) {
|
||||
this.boundaryChars[this.boundary[i]] = true;
|
||||
}
|
||||
};
|
||||
|
||||
MultipartParser.prototype.write = function(buffer) {
|
||||
var self = this,
|
||||
i = 0,
|
||||
len = buffer.length,
|
||||
prevIndex = this.index,
|
||||
index = this.index,
|
||||
state = this.state,
|
||||
flags = this.flags,
|
||||
lookbehind = this.lookbehind,
|
||||
boundary = this.boundary,
|
||||
boundaryChars = this.boundaryChars,
|
||||
boundaryLength = this.boundary.length,
|
||||
boundaryEnd = boundaryLength - 1,
|
||||
bufferLength = buffer.length,
|
||||
c,
|
||||
cl,
|
||||
|
||||
mark = function(name) {
|
||||
self[name+'Mark'] = i;
|
||||
},
|
||||
clear = function(name) {
|
||||
delete self[name+'Mark'];
|
||||
},
|
||||
callback = function(name, buffer, start, end) {
|
||||
if (start !== undefined && start === end) {
|
||||
return;
|
||||
}
|
||||
|
||||
var callbackSymbol = 'on'+name.substr(0, 1).toUpperCase()+name.substr(1);
|
||||
if (callbackSymbol in self) {
|
||||
self[callbackSymbol](buffer, start, end);
|
||||
}
|
||||
},
|
||||
dataCallback = function(name, clear) {
|
||||
var markSymbol = name+'Mark';
|
||||
if (!(markSymbol in self)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!clear) {
|
||||
callback(name, buffer, self[markSymbol], buffer.length);
|
||||
self[markSymbol] = 0;
|
||||
} else {
|
||||
callback(name, buffer, self[markSymbol], i);
|
||||
delete self[markSymbol];
|
||||
}
|
||||
};
|
||||
|
||||
for (i = 0; i < len; i++) {
|
||||
c = buffer[i];
|
||||
switch (state) {
|
||||
case S.PARSER_UNINITIALIZED:
|
||||
return i;
|
||||
case S.START:
|
||||
index = 0;
|
||||
state = S.START_BOUNDARY;
|
||||
case S.START_BOUNDARY:
|
||||
if (index == boundary.length - 2) {
|
||||
if (c == HYPHEN) {
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else if (c != CR) {
|
||||
return i;
|
||||
}
|
||||
index++;
|
||||
break;
|
||||
} else if (index - 1 == boundary.length - 2) {
|
||||
if (flags & F.LAST_BOUNDARY && c == HYPHEN){
|
||||
callback('end');
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else if (!(flags & F.LAST_BOUNDARY) && c == LF) {
|
||||
index = 0;
|
||||
callback('partBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
} else {
|
||||
return i;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (c != boundary[index+2]) {
|
||||
index = -2;
|
||||
}
|
||||
if (c == boundary[index+2]) {
|
||||
index++;
|
||||
}
|
||||
break;
|
||||
case S.HEADER_FIELD_START:
|
||||
state = S.HEADER_FIELD;
|
||||
mark('headerField');
|
||||
index = 0;
|
||||
case S.HEADER_FIELD:
|
||||
if (c == CR) {
|
||||
clear('headerField');
|
||||
state = S.HEADERS_ALMOST_DONE;
|
||||
break;
|
||||
}
|
||||
|
||||
index++;
|
||||
if (c == HYPHEN) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (c == COLON) {
|
||||
if (index == 1) {
|
||||
// empty header field
|
||||
return i;
|
||||
}
|
||||
dataCallback('headerField', true);
|
||||
state = S.HEADER_VALUE_START;
|
||||
break;
|
||||
}
|
||||
|
||||
cl = lower(c);
|
||||
if (cl < A || cl > Z) {
|
||||
return i;
|
||||
}
|
||||
break;
|
||||
case S.HEADER_VALUE_START:
|
||||
if (c == SPACE) {
|
||||
break;
|
||||
}
|
||||
|
||||
mark('headerValue');
|
||||
state = S.HEADER_VALUE;
|
||||
case S.HEADER_VALUE:
|
||||
if (c == CR) {
|
||||
dataCallback('headerValue', true);
|
||||
callback('headerEnd');
|
||||
state = S.HEADER_VALUE_ALMOST_DONE;
|
||||
}
|
||||
break;
|
||||
case S.HEADER_VALUE_ALMOST_DONE:
|
||||
if (c != LF) {
|
||||
return i;
|
||||
}
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
case S.HEADERS_ALMOST_DONE:
|
||||
if (c != LF) {
|
||||
return i;
|
||||
}
|
||||
|
||||
callback('headersEnd');
|
||||
state = S.PART_DATA_START;
|
||||
break;
|
||||
case S.PART_DATA_START:
|
||||
state = S.PART_DATA;
|
||||
mark('partData');
|
||||
case S.PART_DATA:
|
||||
prevIndex = index;
|
||||
|
||||
if (index === 0) {
|
||||
// boyer-moore derrived algorithm to safely skip non-boundary data
|
||||
i += boundaryEnd;
|
||||
while (i < bufferLength && !(buffer[i] in boundaryChars)) {
|
||||
i += boundaryLength;
|
||||
}
|
||||
i -= boundaryEnd;
|
||||
c = buffer[i];
|
||||
}
|
||||
|
||||
if (index < boundary.length) {
|
||||
if (boundary[index] == c) {
|
||||
if (index === 0) {
|
||||
dataCallback('partData', true);
|
||||
}
|
||||
index++;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index == boundary.length) {
|
||||
index++;
|
||||
if (c == CR) {
|
||||
// CR = part boundary
|
||||
flags |= F.PART_BOUNDARY;
|
||||
} else if (c == HYPHEN) {
|
||||
// HYPHEN = end boundary
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index - 1 == boundary.length) {
|
||||
if (flags & F.PART_BOUNDARY) {
|
||||
index = 0;
|
||||
if (c == LF) {
|
||||
// unset the PART_BOUNDARY flag
|
||||
flags &= ~F.PART_BOUNDARY;
|
||||
callback('partEnd');
|
||||
callback('partBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
}
|
||||
} else if (flags & F.LAST_BOUNDARY) {
|
||||
if (c == HYPHEN) {
|
||||
callback('partEnd');
|
||||
callback('end');
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (index > 0) {
|
||||
// when matching a possible boundary, keep a lookbehind reference
|
||||
// in case it turns out to be a false lead
|
||||
lookbehind[index-1] = c;
|
||||
} else if (prevIndex > 0) {
|
||||
// if our boundary turned out to be rubbish, the captured lookbehind
|
||||
// belongs to partData
|
||||
callback('partData', lookbehind, 0, prevIndex);
|
||||
prevIndex = 0;
|
||||
mark('partData');
|
||||
|
||||
// reconsider the current character even so it interrupted the sequence
|
||||
// it could be the beginning of a new sequence
|
||||
i--;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.END:
|
||||
break;
|
||||
default:
|
||||
return i;
|
||||
}
|
||||
}
|
||||
|
||||
dataCallback('headerField');
|
||||
dataCallback('headerValue');
|
||||
dataCallback('partData');
|
||||
|
||||
this.index = index;
|
||||
this.state = state;
|
||||
this.flags = flags;
|
||||
|
||||
return len;
|
||||
};
|
||||
|
||||
MultipartParser.prototype.end = function() {
|
||||
var callback = function(self, name) {
|
||||
var callbackSymbol = 'on'+name.substr(0, 1).toUpperCase()+name.substr(1);
|
||||
if (callbackSymbol in self) {
|
||||
self[callbackSymbol]();
|
||||
}
|
||||
};
|
||||
if ((this.state == S.HEADER_FIELD_START && this.index === 0) ||
|
||||
(this.state == S.PART_DATA && this.index == this.boundary.length)) {
|
||||
callback(this, 'partEnd');
|
||||
callback(this, 'end');
|
||||
} else if (this.state != S.END) {
|
||||
return new Error('MultipartParser.end(): stream ended unexpectedly: ' + this.explain());
|
||||
}
|
||||
};
|
||||
|
||||
MultipartParser.prototype.explain = function() {
|
||||
return 'state = ' + MultipartParser.stateToString(this.state);
|
||||
};
|
20
node_modules/formidable/lib/octet_parser.js
generated
vendored
Normal file
20
node_modules/formidable/lib/octet_parser.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
var EventEmitter = require('events').EventEmitter
|
||||
, util = require('util');
|
||||
|
||||
function OctetParser(options){
|
||||
if(!(this instanceof OctetParser)) return new OctetParser(options);
|
||||
EventEmitter.call(this);
|
||||
}
|
||||
|
||||
util.inherits(OctetParser, EventEmitter);
|
||||
|
||||
exports.OctetParser = OctetParser;
|
||||
|
||||
OctetParser.prototype.write = function(buffer) {
|
||||
this.emit('data', buffer);
|
||||
return buffer.length;
|
||||
};
|
||||
|
||||
OctetParser.prototype.end = function() {
|
||||
this.emit('end');
|
||||
};
|
27
node_modules/formidable/lib/querystring_parser.js
generated
vendored
Normal file
27
node_modules/formidable/lib/querystring_parser.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
if (global.GENTLY) require = GENTLY.hijack(require);
|
||||
|
||||
// This is a buffering parser, not quite as nice as the multipart one.
|
||||
// If I find time I'll rewrite this to be fully streaming as well
|
||||
var querystring = require('querystring');
|
||||
|
||||
function QuerystringParser(maxKeys) {
|
||||
this.maxKeys = maxKeys;
|
||||
this.buffer = '';
|
||||
}
|
||||
exports.QuerystringParser = QuerystringParser;
|
||||
|
||||
QuerystringParser.prototype.write = function(buffer) {
|
||||
this.buffer += buffer.toString('ascii');
|
||||
return buffer.length;
|
||||
};
|
||||
|
||||
QuerystringParser.prototype.end = function() {
|
||||
var fields = querystring.parse(this.buffer, '&', '=', { maxKeys: this.maxKeys });
|
||||
for (var field in fields) {
|
||||
this.onField(field, fields[field]);
|
||||
}
|
||||
this.buffer = '';
|
||||
|
||||
this.onEnd();
|
||||
};
|
||||
|
57
node_modules/formidable/package.json
generated
vendored
Normal file
57
node_modules/formidable/package.json
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
{
|
||||
"_from": "formidable@^1.2.0",
|
||||
"_id": "formidable@1.2.1",
|
||||
"_inBundle": false,
|
||||
"_integrity": "sha512-Fs9VRguL0gqGHkXS5GQiMCr1VhZBxz0JnJs4JmMp/2jL18Fmbzvv7vOFRU+U8TBkHEE/CX1qDXzJplVULgsLeg==",
|
||||
"_location": "/formidable",
|
||||
"_phantomChildren": {},
|
||||
"_requested": {
|
||||
"type": "range",
|
||||
"registry": true,
|
||||
"raw": "formidable@^1.2.0",
|
||||
"name": "formidable",
|
||||
"escapedName": "formidable",
|
||||
"rawSpec": "^1.2.0",
|
||||
"saveSpec": null,
|
||||
"fetchSpec": "^1.2.0"
|
||||
},
|
||||
"_requiredBy": [
|
||||
"/superagent"
|
||||
],
|
||||
"_resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.1.tgz",
|
||||
"_shasum": "70fb7ca0290ee6ff961090415f4b3df3d2082659",
|
||||
"_spec": "formidable@^1.2.0",
|
||||
"_where": "C:\\Users\\mkswa\\Documents\\osrs-json-hiscores\\node_modules\\superagent",
|
||||
"bugs": {
|
||||
"url": "http://github.com/felixge/node-formidable/issues"
|
||||
},
|
||||
"bundleDependencies": false,
|
||||
"dependencies": {},
|
||||
"deprecated": false,
|
||||
"description": "A node.js module for parsing form data, especially file uploads.",
|
||||
"devDependencies": {
|
||||
"findit": "^0.1.2",
|
||||
"gently": "^0.8.0",
|
||||
"hashish": "^0.0.4",
|
||||
"request": "^2.11.4",
|
||||
"urun": "^0.0.6",
|
||||
"utest": "^0.0.8"
|
||||
},
|
||||
"directories": {
|
||||
"lib": "./lib"
|
||||
},
|
||||
"homepage": "https://github.com/felixge/node-formidable",
|
||||
"license": "MIT",
|
||||
"main": "./lib/index",
|
||||
"name": "formidable",
|
||||
"optionalDependencies": {},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/felixge/node-formidable.git"
|
||||
},
|
||||
"scripts": {
|
||||
"clean": "rm test/tmp/*",
|
||||
"test": "node test/run.js"
|
||||
},
|
||||
"version": "1.2.1"
|
||||
}
|
2891
node_modules/formidable/yarn.lock
generated
vendored
Normal file
2891
node_modules/formidable/yarn.lock
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user