The straightforward way to do it is with a for-loop:
function copy(a, t, start, end) {
for (var j = start; j < end; j++) {
a[j] = t[j];
}
}
I thought a better implementation would be to use spreads + splice:
function copy_splice(a, t, start, end) {
a.splice(start, end - start, ...t.slice(start, end));
}
However, when I test this with the following code, I get significantly worse performance:
function randArray(length) {
var result = [];
for (var j = 0; j < length; j++) {
result.push(Math.floor(60000 * Math.random()));
}
return result;
}
function test_func(copy_func) {
[10, 100, 1000, 100000].forEach(function (length) {
var a = randArray(length);
const t = new Int32Array(randArray(length));
const start = length / 10;
const end = length / 2;
const time_start = performance.now();
for (var repeats = 0; repeats < 500; repeats++) {
copy_func(a, t, start, end);
}
const time_end = performance.now();
console.log(copy_func.name + ' timing: ' + (time_end - time_start));
});
}
// Test first
var a1 = randArray(100);
var a2 = a1.slice();
var t = new Int32Array(randArray(100));
copy(a1, t, 10, 50);
copy_splice(a2, t, 10, 50);
console.assert(a1.reduce((a, b) => a + b, 0) == a2.reduce((a, b) => a + b, 0));
test_func(copy);
test_func(copy_splice);
Results in Firefox:
copy timing: 1
copy timing: 0
copy timing: 37
copy_splice timing: 4
copy_splice timing: 20
copy_splice timing: 1140
Results in Nodejs:
copy timing: 0.08499000035226345
copy timing: 1.3703359998762608
copy timing: 0.8646280001848936
copy timing: 24.584946000017226
copy_splice timing: 0.8248800002038479
copy_splice timing: 2.532259000465274
copy_splice timing: 5.594846999272704
copy_splice timing: 529.5111650004983
So:
- Is there any better implementation than these mentioned above?
- (optional) Why are array spreads +
.splice
performing worse here?