mirror of
https://github.com/nodejs/node.git
synced 2024-12-01 16:10:02 +01:00
More fast buffer work
This commit is contained in:
parent
d5bdda74c1
commit
7b772f3f68
@ -90,19 +90,22 @@ function allocPool () {
|
||||
}
|
||||
|
||||
|
||||
function Buffer (subject, encoding, legacy, slice_legacy) {
|
||||
function Buffer (subject, encoding, offset) {
|
||||
if (!(this instanceof Buffer)) {
|
||||
return new Buffer(subject, encoding, legacy, slice_legacy);
|
||||
return new Buffer(subject, encoding, offset);
|
||||
}
|
||||
|
||||
var length, type;
|
||||
|
||||
// Are we slicing?
|
||||
if (typeof legacy === 'number') {
|
||||
this.parent = subject;
|
||||
if (typeof offset === 'number') {
|
||||
this.length = encoding;
|
||||
this.offset = legacy;
|
||||
legacy = slice_legacy;
|
||||
Object.defineProperty(this, "parent", { enumerable: false,
|
||||
value: subject,
|
||||
writable: false });
|
||||
Object.defineProperty(this, "offset", { enumerable: false,
|
||||
value: offset,
|
||||
writable: false });
|
||||
} else {
|
||||
// Find the length
|
||||
switch (type = typeof subject) {
|
||||
@ -111,7 +114,7 @@ function Buffer (subject, encoding, legacy, slice_legacy) {
|
||||
break;
|
||||
|
||||
case 'string':
|
||||
length = Buffer.byteLength(subject);
|
||||
length = Buffer.byteLength(subject, encoding);
|
||||
break;
|
||||
|
||||
case 'object': // Assume object is an array
|
||||
@ -126,13 +129,22 @@ function Buffer (subject, encoding, legacy, slice_legacy) {
|
||||
|
||||
if (length > POOLSIZE) {
|
||||
// Big buffer, just alloc one.
|
||||
this.parent = new SlowBuffer(subject, encoding);
|
||||
this.offset = 0;
|
||||
var parent = new SlowBuffer(subject, encoding);
|
||||
Object.defineProperty(this, "parent", { enumerable: false,
|
||||
value: parent,
|
||||
writable: false });
|
||||
Object.defineProperty(this, "offset", { enumerable: false,
|
||||
value: 0,
|
||||
writable: false });
|
||||
} else {
|
||||
// Small buffer.
|
||||
if (!pool || pool.length - pool.used < length) allocPool();
|
||||
this.parent = pool;
|
||||
this.offset = pool.used;
|
||||
Object.defineProperty(this, "parent", { enumerable: false,
|
||||
value: pool,
|
||||
writable: false });
|
||||
Object.defineProperty(this, "offset", { enumerable: false,
|
||||
value: pool.used,
|
||||
writable: false });
|
||||
pool.used += length;
|
||||
|
||||
// Do we need to write stuff?
|
||||
@ -150,11 +162,7 @@ function Buffer (subject, encoding, legacy, slice_legacy) {
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure the api is equivilent to old buffers, unless user doesn't
|
||||
// want overhead
|
||||
if (legacy !== false) {
|
||||
SlowBuffer.makeFastBuffer(this.parent, this, this.offset, this.length);
|
||||
}
|
||||
SlowBuffer.makeFastBuffer(this.parent, this, this.offset, this.length);
|
||||
}
|
||||
|
||||
exports.Buffer = Buffer;
|
||||
@ -197,23 +205,35 @@ Buffer.prototype.write = function write (string, offset, encoding) {
|
||||
offset = swap;
|
||||
}
|
||||
|
||||
offset || (offset = 0);
|
||||
encoding || (encoding = 'utf8');
|
||||
offset = +offset || 0;
|
||||
encoding = String(encoding || 'utf8').toLowerCase();
|
||||
|
||||
// Make sure we are not going to overflow
|
||||
var max_length = this.length - offset;
|
||||
if (Buffer.byteLength(string) > max_length) {
|
||||
// FIXME: Char length !== byte length
|
||||
string = string.slice(0, max_length);
|
||||
}
|
||||
var maxLength = this.length - offset;
|
||||
|
||||
return this.parent.write(string, this.offset + offset, encoding);
|
||||
switch (encoding) {
|
||||
case 'utf8':
|
||||
case 'utf-8':
|
||||
return this.parent.utf8Write(string, this.offset + offset, maxLength);
|
||||
|
||||
case 'ascii':
|
||||
return this.parent.asciiWrite(string, this.offset + offset, maxLength);
|
||||
|
||||
case 'binary':
|
||||
return this.parent.binaryWrite(string, this.offset + offset, maxLength);
|
||||
|
||||
case 'base64':
|
||||
return this.parent.base64Write(string, this.offset + offset, maxLength);
|
||||
|
||||
default:
|
||||
throw new Error('Unknown encoding');
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// toString(encoding, start=0, end=buffer.length)
|
||||
Buffer.prototype.toString = function (encoding, start, end) {
|
||||
if (typeof encoding == 'undefined') encoding = 'utf8';
|
||||
encoding = String(encoding || 'utf8').toLowerCase();
|
||||
|
||||
if (typeof start == 'undefined' || start < 0) {
|
||||
start = 0;
|
||||
@ -227,7 +247,26 @@ Buffer.prototype.toString = function (encoding, start, end) {
|
||||
end = 0;
|
||||
}
|
||||
|
||||
return this.parent.toString(encoding, start + this.offset, end + this.offset);
|
||||
start = start + this.offset;
|
||||
end = end + this.offset;
|
||||
|
||||
switch (encoding) {
|
||||
case 'utf8':
|
||||
case 'utf-8':
|
||||
return this.parent.utf8Slice(start, end);
|
||||
|
||||
case 'ascii':
|
||||
return this.parent.asciiSlice(start, end);
|
||||
|
||||
case 'binary':
|
||||
return this.parent.binarySlice(start, end);
|
||||
|
||||
case 'base64':
|
||||
return this.parent.base64Slice(start, end);
|
||||
|
||||
default:
|
||||
throw new Error('Unknown encoding');
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@ -276,7 +315,7 @@ Buffer.prototype.copy = function copy (target, target_start, start, end) {
|
||||
|
||||
|
||||
// slice(start, end)
|
||||
Buffer.prototype.slice = function (start, end, legacy) {
|
||||
Buffer.prototype.slice = function (start, end) {
|
||||
if (end > this.length) {
|
||||
throw new Error("oob");
|
||||
}
|
||||
@ -284,6 +323,6 @@ Buffer.prototype.slice = function (start, end, legacy) {
|
||||
throw new Error("oob");
|
||||
}
|
||||
|
||||
return new Buffer(this.parent, end - start, +start + this.offset, legacy);
|
||||
return new Buffer(this.parent, end - start, +start + this.offset);
|
||||
};
|
||||
|
||||
|
@ -505,7 +505,7 @@ Handle<Value> Buffer::Copy(const Arguments &args) {
|
||||
}
|
||||
|
||||
|
||||
// var charsWritten = buffer.utf8Write(string, offset);
|
||||
// var charsWritten = buffer.utf8Write(string, offset, [maxLength]);
|
||||
Handle<Value> Buffer::Utf8Write(const Arguments &args) {
|
||||
HandleScope scope;
|
||||
Buffer *buffer = ObjectWrap::Unwrap<Buffer>(args.This());
|
||||
@ -517,19 +517,23 @@ Handle<Value> Buffer::Utf8Write(const Arguments &args) {
|
||||
|
||||
Local<String> s = args[0]->ToString();
|
||||
|
||||
size_t offset = args[1]->Int32Value();
|
||||
size_t offset = args[1]->Uint32Value();
|
||||
|
||||
if (s->Utf8Length() > 0 && offset >= buffer->length_) {
|
||||
return ThrowException(Exception::TypeError(String::New(
|
||||
"Offset is out of bounds")));
|
||||
}
|
||||
|
||||
size_t max_length = args[2].IsEmpty() ? buffer->length_ - offset
|
||||
: args[2]->Uint32Value();
|
||||
max_length = MIN(buffer->length_ - offset, max_length);
|
||||
|
||||
char* p = buffer->data() + offset;
|
||||
|
||||
int char_written;
|
||||
|
||||
int written = s->WriteUtf8(reinterpret_cast<char*>(p),
|
||||
buffer->length_ - offset,
|
||||
max_length,
|
||||
&char_written,
|
||||
String::HINT_MANY_WRITES_EXPECTED);
|
||||
|
||||
@ -562,18 +566,20 @@ Handle<Value> Buffer::AsciiWrite(const Arguments &args) {
|
||||
"Offset is out of bounds")));
|
||||
}
|
||||
|
||||
char *p = buffer->data() + offset;
|
||||
size_t max_length = args[2].IsEmpty() ? buffer->length_ - offset
|
||||
: args[2]->Uint32Value();
|
||||
max_length = MIN(s->Length(), MIN(buffer->length_ - offset, max_length));
|
||||
|
||||
size_t towrite = MIN((unsigned long) s->Length(), buffer->length_ - offset);
|
||||
char *p = buffer->data() + offset;
|
||||
|
||||
int written = s->WriteAscii(reinterpret_cast<char*>(p),
|
||||
0,
|
||||
towrite,
|
||||
max_length,
|
||||
String::HINT_MANY_WRITES_EXPECTED);
|
||||
return scope.Close(Integer::New(written));
|
||||
}
|
||||
|
||||
// var bytesWritten = buffer.base64Write(string, offset);
|
||||
// var bytesWritten = buffer.base64Write(string, offset, [maxLength]);
|
||||
Handle<Value> Buffer::Base64Write(const Arguments &args) {
|
||||
HandleScope scope;
|
||||
|
||||
@ -666,70 +672,6 @@ Handle<Value> Buffer::BinaryWrite(const Arguments &args) {
|
||||
}
|
||||
|
||||
|
||||
// buffer.unpack(format, index);
|
||||
// Starting at 'index', unpacks binary from the buffer into an array.
|
||||
// 'format' is a string
|
||||
//
|
||||
// FORMAT RETURNS
|
||||
// N uint32_t a 32bit unsigned integer in network byte order
|
||||
// n uint16_t a 16bit unsigned integer in network byte order
|
||||
// o uint8_t a 8bit unsigned integer
|
||||
Handle<Value> Buffer::Unpack(const Arguments &args) {
|
||||
HandleScope scope;
|
||||
Buffer *buffer = ObjectWrap::Unwrap<Buffer>(args.This());
|
||||
|
||||
if (!args[0]->IsString()) {
|
||||
return ThrowException(Exception::TypeError(String::New(
|
||||
"Argument must be a string")));
|
||||
}
|
||||
|
||||
String::AsciiValue format(args[0]->ToString());
|
||||
uint32_t index = args[1]->Uint32Value();
|
||||
|
||||
#define OUT_OF_BOUNDS ThrowException(Exception::Error(String::New("Out of bounds")))
|
||||
|
||||
Local<Array> array = Array::New(format.length());
|
||||
|
||||
uint8_t uint8;
|
||||
uint16_t uint16;
|
||||
uint32_t uint32;
|
||||
|
||||
for (int i = 0; i < format.length(); i++) {
|
||||
switch ((*format)[i]) {
|
||||
// 32bit unsigned integer in network byte order
|
||||
case 'N':
|
||||
if (index + 3 >= buffer->length_) return OUT_OF_BOUNDS;
|
||||
uint32 = htonl(*(uint32_t*)(buffer->data() + index));
|
||||
array->Set(Integer::New(i), Integer::NewFromUnsigned(uint32));
|
||||
index += 4;
|
||||
break;
|
||||
|
||||
// 16bit unsigned integer in network byte order
|
||||
case 'n':
|
||||
if (index + 1 >= buffer->length_) return OUT_OF_BOUNDS;
|
||||
uint16 = htons(*(uint16_t*)(buffer->data() + index));
|
||||
array->Set(Integer::New(i), Integer::NewFromUnsigned(uint16));
|
||||
index += 2;
|
||||
break;
|
||||
|
||||
// a single octet, unsigned.
|
||||
case 'o':
|
||||
if (index >= buffer->length_) return OUT_OF_BOUNDS;
|
||||
uint8 = (uint8_t)buffer->data()[index];
|
||||
array->Set(Integer::New(i), Integer::NewFromUnsigned(uint8));
|
||||
index += 1;
|
||||
break;
|
||||
|
||||
default:
|
||||
return ThrowException(Exception::Error(
|
||||
String::New("Unknown format character")));
|
||||
}
|
||||
}
|
||||
|
||||
return scope.Close(array);
|
||||
}
|
||||
|
||||
|
||||
// var nbytes = Buffer.byteLength("string", "utf8")
|
||||
Handle<Value> Buffer::ByteLength(const Arguments &args) {
|
||||
HandleScope scope;
|
||||
@ -741,11 +683,26 @@ Handle<Value> Buffer::ByteLength(const Arguments &args) {
|
||||
|
||||
Local<String> s = args[0]->ToString();
|
||||
enum encoding e = ParseEncoding(args[1], UTF8);
|
||||
String::Utf8Value v(s);
|
||||
|
||||
Local<Integer> length =
|
||||
Integer::New(e == UTF8 ? s->Utf8Length() : s->Length());
|
||||
size_t length;
|
||||
|
||||
switch (e) {
|
||||
case UTF8:
|
||||
length = s->Utf8Length();
|
||||
break;
|
||||
|
||||
return scope.Close(length);
|
||||
case BASE64:
|
||||
length = base64_decoded_size(*v, v.length());
|
||||
break;
|
||||
|
||||
default:
|
||||
length = s->Length();
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
return scope.Close(Integer::New(length));
|
||||
}
|
||||
|
||||
|
||||
@ -801,7 +758,6 @@ void Buffer::Initialize(Handle<Object> target) {
|
||||
NODE_SET_PROTOTYPE_METHOD(constructor_template, "asciiWrite", Buffer::AsciiWrite);
|
||||
NODE_SET_PROTOTYPE_METHOD(constructor_template, "binaryWrite", Buffer::BinaryWrite);
|
||||
NODE_SET_PROTOTYPE_METHOD(constructor_template, "base64Write", Buffer::Base64Write);
|
||||
NODE_SET_PROTOTYPE_METHOD(constructor_template, "unpack", Buffer::Unpack);
|
||||
NODE_SET_PROTOTYPE_METHOD(constructor_template, "copy", Buffer::Copy);
|
||||
|
||||
NODE_SET_METHOD(constructor_template->GetFunction(),
|
||||
|
@ -161,7 +161,7 @@ for (var j = 0; j < 500; j++) {
|
||||
var asciiSlice = b.toString('ascii', 0, asciiString.length);
|
||||
assert.equal(asciiString, asciiSlice);
|
||||
|
||||
var written = b.asciiWrite(asciiString, offset);
|
||||
var written = b.write(asciiString, offset, 'ascii');
|
||||
assert.equal(asciiString.length, written);
|
||||
var asciiSlice = b.toString('ascii', offset, offset+asciiString.length);
|
||||
assert.equal(asciiString, asciiSlice);
|
||||
@ -185,35 +185,11 @@ for (var j = 0; j < 100; j++) {
|
||||
}
|
||||
|
||||
|
||||
// unpack
|
||||
|
||||
var b = new Buffer(10);
|
||||
b[0] = 0x00;
|
||||
b[1] = 0x01;
|
||||
b[2] = 0x03;
|
||||
b[3] = 0x00;
|
||||
|
||||
assert.deepEqual([0x0001], b.unpack('n', 0));
|
||||
assert.deepEqual([0x0001, 0x0300], b.unpack('nn', 0));
|
||||
assert.deepEqual([0x0103], b.unpack('n', 1));
|
||||
assert.deepEqual([0x0300], b.unpack('n', 2));
|
||||
assert.deepEqual([0x00010300], b.unpack('N', 0));
|
||||
assert.throws(function () {
|
||||
b.unpack('N', 8);
|
||||
});
|
||||
|
||||
b[4] = 0xDE;
|
||||
b[5] = 0xAD;
|
||||
b[6] = 0xBE;
|
||||
b[7] = 0xEF;
|
||||
|
||||
assert.deepEqual([0xDEADBEEF], b.unpack('N', 4));
|
||||
|
||||
|
||||
// Bug regression test
|
||||
var testValue = '\u00F6\u65E5\u672C\u8A9E'; // ö日本語
|
||||
var buffer = new Buffer(32);
|
||||
var size = buffer.utf8Write(testValue, 0);
|
||||
var size = buffer.write(testValue, 0, 'utf8');
|
||||
console.log('bytes written to buffer: ' + size);
|
||||
var slice = buffer.toString('utf8', 0, size);
|
||||
assert.equal(slice, testValue);
|
||||
@ -239,9 +215,11 @@ assert.equal(d[1], 42);
|
||||
assert.equal(d[2], 255);
|
||||
|
||||
var e = new Buffer('über');
|
||||
console.error("uber: '%s'", e.toString());
|
||||
assert.deepEqual(e, new Buffer([195, 188, 98, 101, 114]));
|
||||
|
||||
var f = new Buffer('über', 'ascii');
|
||||
console.error("f.length: %d (should be 4)", f.length);
|
||||
assert.deepEqual(f, new Buffer([252, 98, 101, 114]));
|
||||
|
||||
|
||||
@ -257,8 +235,8 @@ assert.equal(expected, (new Buffer(quote)).toString('base64'));
|
||||
|
||||
b = new Buffer(1024);
|
||||
bytesWritten = b.write(expected, 0, 'base64');
|
||||
assert.equal(quote, b.toString('ascii', 0, quote.length));
|
||||
assert.equal(quote.length, bytesWritten);
|
||||
assert.equal(quote, b.toString('ascii', 0, quote.length));
|
||||
|
||||
assert.equal(new Buffer('', 'base64').toString(), '');
|
||||
assert.equal(new Buffer('K', 'base64').toString(), '');
|
||||
|
Loading…
Reference in New Issue
Block a user