I have been testing two methods of array creation and initialization in JavaScript using Node.js. One method involves dynamically pushing elements into the array, while the other method involves pre-allocating the array to a specific size and then setting each element. Surprisingly, the pre-allocated array method performed significantly worse, and I'm unsure why this is the case.
Here is the code I used for testing:
function testPush(size) {
let start = process.hrtime.bigint();
let array = [];
for (let i = 0; i < size; i++) {
array.push(i);
}
let end = process.hrtime.bigint();
return Number(end - start) / 1e6; // Convert to milliseconds
}
function testPreAllocated(size) {
let start = process.hrtime.bigint();
let array = new Array(size);
// let array = new Uint32Array(size);
// let array = Array.from({ length: size });
for (let i = 0; i < size; i++) {
array[i] = i;
}
let end = process.hrtime.bigint();
return Number(end - start) / 1e6; // Convert to milliseconds
}
function compareArrays() {
const size = 1e8; // 100 million
console.log(`Testing with array size: ${size}`);
console.log("Starting push test...");
let pushTime = testPush(size);
console.log(`Push test completed in ${pushTime.toFixed(2)} ms`);
console.log("Starting pre-allocated test...");
let preAllocatedTime = testPreAllocated(size);
console.log(`Pre-allocated test completed in ${preAllocatedTime.toFixed(2)} ms`);
}
compareArrays();
Output:
Testing with array size: 100000000
Starting push test...
Push test completed in 1624.93 ms
Starting pre-allocated test...
Pre-allocated test completed in 4824.86 ms
I am using Node.js version 20.12.2. My expectation was that pre-allocating the array would be faster because the JavaScript engine wouldn't need to resize the array continually. However, the results show the opposite. Why does pre-allocating an array result in worse performance compared to dynamically pushing elements into an array in this case?
When you do new Array(size)
with size
being more 32 million (the constant kMaxFastArrayLength
in V8 to be precise), V8 creates a dictionary-like object rather than a "real" array. The behavior is the same as far as users are concerned (ie, you can still call push
, pop
, map
, iter
, access elements with []
, etc), but performance is much worse (but memory usage is much better if you end up not populating the whole array).
This doesn't happen when repeatedly calling array.push
: the underlying V8 object remains a proper array.
You can confirm this by setting size
to 32 * 1024 * 1024
and then 32 * 1024 * 1024 + 1
: the performance of testPreAllocated
drop from 151ms to 987ms (on my machine).
You can also confirm this by running something like this in v8 on the command line (run with --allow-natives-syntax
):
let fast_arr = new Array(32 * 1024 * 1024);
%DebugPrint(fast_arr);
let slow_arr = new Array(32 * 1024 * 1024 + 1);
%DebugPrint(slow_arr);
The 1st one prints map: 0x1b6e0018cc81 <Map[16](HOLEY_SMI_ELEMENTS)>
(which means that the array is indeed represented internally as a proper array), while the second one prints map: 0x1b6e00198701 <Map[16](DICTIONARY_ELEMENTS)>
(which means that the array is represented internally as a dictionary).