0
0
mirror of https://github.com/nodejs/node.git synced 2024-11-30 07:27:22 +01:00
nodejs/lib/_http_common.js
cjihrig 6ac8bdc0ab lib: reduce util.is*() usage
Many of the util.is*() methods used to check data types
simply compare against a single value or the result of
typeof. This commit replaces calls to these methods with
equivalent checks. This commit does not touch calls to the
more complex methods (isRegExp(), isDate(), etc.).

Fixes: https://github.com/iojs/io.js/issues/607
PR-URL: https://github.com/iojs/io.js/pull/647
Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl>
2015-01-31 23:47:29 -05:00

204 lines
5.6 KiB
JavaScript

'use strict';
const FreeList = require('freelist').FreeList;
const HTTPParser = process.binding('http_parser').HTTPParser;
const incoming = require('_http_incoming');
const IncomingMessage = incoming.IncomingMessage;
const readStart = incoming.readStart;
const readStop = incoming.readStop;
const debug = require('util').debuglog('http');
exports.debug = debug;
exports.CRLF = '\r\n';
exports.chunkExpression = /chunk/i;
exports.continueExpression = /100-continue/i;
exports.methods = HTTPParser.methods;
const kOnHeaders = HTTPParser.kOnHeaders | 0;
const kOnHeadersComplete = HTTPParser.kOnHeadersComplete | 0;
const kOnBody = HTTPParser.kOnBody | 0;
const kOnMessageComplete = HTTPParser.kOnMessageComplete | 0;
// Only called in the slow case where slow means
// that the request headers were either fragmented
// across multiple TCP packets or too large to be
// processed in a single run. This method is also
// called to process trailing HTTP headers.
function parserOnHeaders(headers, url) {
// Once we exceeded headers limit - stop collecting them
if (this.maxHeaderPairs <= 0 ||
this._headers.length < this.maxHeaderPairs) {
this._headers = this._headers.concat(headers);
}
this._url += url;
}
// `headers` and `url` are set only if .onHeaders() has not been called for
// this request.
// `url` is not set for response parsers but that's not applicable here since
// all our parsers are request parsers.
function parserOnHeadersComplete(versionMajor, versionMinor, headers, method,
url, statusCode, statusMessage, upgrade,
shouldKeepAlive) {
var parser = this;
if (!headers) {
headers = parser._headers;
parser._headers = [];
}
if (!url) {
url = parser._url;
parser._url = '';
}
parser.incoming = new IncomingMessage(parser.socket);
parser.incoming.httpVersionMajor = versionMajor;
parser.incoming.httpVersionMinor = versionMinor;
parser.incoming.httpVersion = versionMajor + '.' + versionMinor;
parser.incoming.url = url;
var n = headers.length;
// If parser.maxHeaderPairs <= 0 assume that there's no limit.
if (parser.maxHeaderPairs > 0)
n = Math.min(n, parser.maxHeaderPairs);
parser.incoming._addHeaderLines(headers, n);
if (typeof method === 'number') {
// server only
parser.incoming.method = HTTPParser.methods[method];
} else {
// client only
parser.incoming.statusCode = statusCode;
parser.incoming.statusMessage = statusMessage;
}
parser.incoming.upgrade = upgrade;
var skipBody = false; // response to HEAD or CONNECT
if (!upgrade) {
// For upgraded connections and CONNECT method request, we'll emit this
// after parser.execute so that we can capture the first part of the new
// protocol.
skipBody = parser.onIncoming(parser.incoming, shouldKeepAlive);
}
return skipBody;
}
// XXX This is a mess.
// TODO: http.Parser should be a Writable emits request/response events.
function parserOnBody(b, start, len) {
var parser = this;
var stream = parser.incoming;
// if the stream has already been removed, then drop it.
if (!stream)
return;
var socket = stream.socket;
// pretend this was the result of a stream._read call.
if (len > 0 && !stream._dumped) {
var slice = b.slice(start, start + len);
var ret = stream.push(slice);
if (!ret)
readStop(socket);
}
}
function parserOnMessageComplete() {
var parser = this;
var stream = parser.incoming;
if (stream) {
stream.complete = true;
// Emit any trailing headers.
var headers = parser._headers;
if (headers) {
parser.incoming._addHeaderLines(headers, headers.length);
parser._headers = [];
parser._url = '';
}
if (!stream.upgrade)
// For upgraded connections, also emit this after parser.execute
stream.push(null);
}
if (stream && !parser.incoming._pendings.length) {
// For emit end event
stream.push(null);
}
// force to read the next incoming message
readStart(parser.socket);
}
var parsers = new FreeList('parsers', 1000, function() {
var parser = new HTTPParser(HTTPParser.REQUEST);
parser._headers = [];
parser._url = '';
// Only called in the slow case where slow means
// that the request headers were either fragmented
// across multiple TCP packets or too large to be
// processed in a single run. This method is also
// called to process trailing HTTP headers.
parser[kOnHeaders] = parserOnHeaders;
parser[kOnHeadersComplete] = parserOnHeadersComplete;
parser[kOnBody] = parserOnBody;
parser[kOnMessageComplete] = parserOnMessageComplete;
return parser;
});
exports.parsers = parsers;
// Free the parser and also break any links that it
// might have to any other things.
// TODO: All parser data should be attached to a
// single object, so that it can be easily cleaned
// up by doing `parser.data = {}`, which should
// be done in FreeList.free. `parsers.free(parser)`
// should be all that is needed.
function freeParser(parser, req, socket) {
if (parser) {
parser._headers = [];
parser.onIncoming = null;
if (parser.socket)
parser.socket.parser = null;
parser.socket = null;
parser.incoming = null;
if (parsers.free(parser) === false)
parser.close();
parser = null;
}
if (req) {
req.parser = null;
}
if (socket) {
socket.parser = null;
}
}
exports.freeParser = freeParser;
function ondrain() {
if (this._httpMessage) this._httpMessage.emit('drain');
}
function httpSocketSetup(socket) {
socket.removeListener('drain', ondrain);
socket.on('drain', ondrain);
}
exports.httpSocketSetup = httpSocketSetup;