const lastStoneWeight = (stones = []) => {
if (!stones || !stones.length) return 0;
while (stones.length > 1) {
stones.sort((a, b) => a - b);
const [x, y] = stones.splice(-2);
if (x !== y) stones.push(y - x);
}
return stones[0] || 0;
};
const lastStoneWeight = (stones = []) => {
if (!stones?.length) return 0;
while (stones.length > 1) {
stones.sort((a, b) => a - b);
const [x, y] = stones.splice(-2);
if (x !== y) stones.push(y - x);
}
return stones[0] ?? 0;
};
const lastStoneWeightHeap = (stones = []) => {
if (!stones?.length) return 0;
const heap = new PriorityQueue(); // <-- uses sameequivalent comparator as array::sort
stones.forEach((stone) => heap.enq(stone)); // <-- populate heap
while (heap.size() > 1) {
const y = heap.deq();
const x = heap.deq();
if (x !== y) heap.enq(y - x);
}
return heap.size() ? heap.deq() : 0;
};
const lastStoneWeight = (stones = []) => {
if (!stones || !stones.length) return 0;
while (stones.length > 1) {
stones.sort();
const [x, y] = stones.splice(-2);
if (x !== y) stones.push(y - x);
}
return stones[0] || 0;
};
const lastStoneWeight = (stones = []) => {
if (!stones?.length) return 0;
while (stones.length > 1) {
stones.sort();
const [x, y] = stones.splice(-2);
if (x !== y) stones.push(y - x);
}
return stones[0] ?? 0;
};
const lastStoneWeightHeap = (stones = []) => {
if (!stones?.length) return 0;
const heap = new PriorityQueue(); // <-- uses same comparator as array::sort
stones.forEach((stone) => heap.enq(stone)); // <-- populate heap
while (heap.size() > 1) {
const y = heap.deq();
const x = heap.deq();
if (x !== y) heap.enq(y - x);
}
return heap.size() ? heap.deq() : 0;
};
const lastStoneWeight = (stones = []) => {
if (!stones || !stones.length) return 0;
while (stones.length > 1) {
stones.sort((a, b) => a - b);
const [x, y] = stones.splice(-2);
if (x !== y) stones.push(y - x);
}
return stones[0] || 0;
};
const lastStoneWeight = (stones = []) => {
if (!stones?.length) return 0;
while (stones.length > 1) {
stones.sort((a, b) => a - b);
const [x, y] = stones.splice(-2);
if (x !== y) stones.push(y - x);
}
return stones[0] ?? 0;
};
const lastStoneWeightHeap = (stones = []) => {
if (!stones?.length) return 0;
const heap = new PriorityQueue(); // <-- uses equivalent comparator as array::sort
stones.forEach((stone) => heap.enq(stone)); // <-- populate heap
while (heap.size() > 1) {
const y = heap.deq();
const x = heap.deq();
if (x !== y) heap.enq(y - x);
}
return heap.size() ? heap.deq() : 0;
};
Code
Code
const heaviestStoneAlgorithmlastStoneWeight = (stones = []) => {
if (!stones?.length) return 0;
while (stones.length > 1) {
stones.sort();
const [x, y] = stones.splice(-2);
if (x !== y) stones.push(y - x);
}
return stones[0] ?? 0;
};
Using a Heap Data Structure
const heaviestStoneHeapAlgorithmlastStoneWeightHeap = (stones = []) => {
if (!stones?.length) return 0;
const queueheap = new PriorityQueue(); // <-- uses same comparator as array::sort
stones.forEach((stone) => queueheap.enq(stone)); // <-- populate heap
while (queueheap.size() > 1) {
const y = queueheap.deq();
const x = queueheap.deq();
if (x !== y) queueheap.enq(y - x);
}
return queueheap.deqsize() ?? heap.deq() : 0;
};
# Elements t1 10 iterations x 10000 t2runs
# Elements t0 avg t2:t1 ratioavg
01 8 0.580(NaN) 0.550(NaN) 00363 0.948300106
12 16 0.625(1.1) 01036 0.655(1.2) 1.048000157
23 32 0.700(1.1) 0.685(1.0) 01781 0.978600224
34 64 0.640(0.9) 09148 0.880(1.3) 1.375000432
45 128 0.755(1.2) 22560 0.820(0.9) 1.086100944
56 256 1.390(1.8) 1.285(1.6) 0.9245
6 512 3.650(2.6) 4.490(3.5) 56833 10.230101618
7 1024 512 12.520(3.4) 12.125(2.7)37584 0.968506091
8 2048 1024 53.520(4.3) 48.295(48.0)78741 0.902412614
9 4096 2048 178.055(3.3) 177.325(334.7)29092 0.995929697
10 8192 720.725(4.0) 706.570(4.0) 0.9804
11 16384 2727.070(3.8) 2719.485(3.8) 0.9972
12 32768 11281.675(4.1) 11195.270(4.1) 0.9923
13 4096 65536 47478.700(4130.2)50169 46838.140(4.2) 0.986563872
- Time is in milliseconds
- Item in
()
is the ratio to the previous time (i.e. line above) - Each iteration 1024 runs for performance measuring
Takeaway is that there is no significant difference in performance between the two, though it could be a poor priority queue implementation. I simply searched on npm and selected the one with some maturity and smallest package size.
Performance Benchmarking
Conclusion
Using a heap data structure is orders of magnitude an improvement. With the naive implementation it is clearly at least an \$O(n^2)\$ complexity as each time the dataset size doubles (2x) the runtime roughly quadruples (~4x) whereas the implementation using a heap roughly only doubles (~2x) the runtime with each doubling of the dataset.
Performance Benchmarking
performanceBenchmark.js
const RUNS = 1 << 10; // 1024
const SEED = 8;
const measurePerf = (fn, data, runs = RUNS1e3) =>
[...Array(runs).keys()]
.map(() => {
const start = performance.now();
fn(data[...data]);
const end = performance.now();
return end - start;
})
.reduce((total, current) => total + current); / runs;
const testPerformancetoFixed = async (val, fixed) => {
Number.isFinite(val) ? Number(val).toFixed(fixed) : val;
export const resultsbenchmark = [];async ({
functions = [],
forcreateRunData,
(let iiterations = 0;5,
i <=runs 13;= i++1e3,
logIntermediateResults
}) => {
logIntermediateResults && constconsole.log(`${iterations} dataLengthx =${runs}`);
SEED <<const i;results = [];
logIntermediateResults &&
const stones = [...Array(dataLength)console.keyslog()]
`\t# Elements\t${functions.map((_, i) => `t${i} avg`).join("\t")}`
);
Math.floor(Math.randomfor ()let *i dataLength= 0; i < iterations; i++) {
const data = createRunData(i);
const [t1, t2]res = await Promise.all([
measurePerf(heaviestStoneAlgorithm, [..functions.stones]map((fn),
=> measurePerf(heaviestStoneHeapAlgorithmfn, [...stones]data, runs))
]);
results.push({res);
logIntermediateResults &&
iteration: i, console.log(
dataLength: dataLength, `${i + 1}\t${data.length}\t${res
t1: Number.isFinite(t1) ? Number .map(t1(t) :=> t1`${toFixed(t, 5)}`)
t2: Number.isFinite(t2) ? Number .join(t2"\t")}`
: t2
});
}
return results;
};
Setup & benchmark
const ITERATIONS = 10;
const RUNS = 1e4;
const SEED = 8;
testPerformanceconst functions = [
lastStoneWeight,
lastStoneWeightHeap,
];
const createRunData = (i) => {
const dataLength = SEED << i;
const stones = [.then..Array(dataLength).keys(results)].map(() => {
console Math.logfloor(`\t#Math.random() Elements\tt1\tt2\tt2:t1* ratio`dataLength)
);
results.forEach(return stones;
};
benchmark({ iteration, dataLengthfunctions, t1 createRunData, t2 }iterations: ITERATIONS, i runs: RUNS, arr logIntermediateResults: true
});
Extended heap implementation benchmark
15 =>x {10000
const# prevElements = arr[it0 -avg 1]
1 || {}; 8 0.00100
2 16 const t1Fixed = t1 0.toFixed(00171
3); 32 0.00242
4 64 const t2Fixed = t2 0.toFixed(3);00434
5 128 const t2t1Ratio = Number(t2 / t1) 0.toFixed(4);00933
6 256 const t1Ratio = Number(t1 / prev 0.t1)01825
7 512 0.toFixed(1);05681
8 1024 const t2Ratio = Number(t2 / prev 0.t2)13715
9 2048 0.toFixed(1);27621
10 4096 0.59631
11 8192 console 1.log(24577
12 16384 `${iteration}\t${dataLength}\t${t1Fixed}(${t1Ratio})\t${t2Fixed}(${t2Ratio})\t${t2t1Ratio}` 4.75092
13 32768 ); 6.09799
14 });65536 13.07677
});15 131072 28.88058
Code
const heaviestStoneAlgorithm = (stones = []) => {
if (!stones?.length) return 0;
while (stones.length > 1) {
stones.sort();
const [x, y] = stones.splice(-2);
if (x !== y) stones.push(y - x);
}
return stones[0] ?? 0;
};
const heaviestStoneHeapAlgorithm = (stones = []) => {
if (!stones?.length) return 0;
const queue = new PriorityQueue(); // <-- uses same comparator as array::sort
stones.forEach((stone) => queue.enq(stone)); // <-- populate heap
while (queue.size() > 1) {
const y = queue.deq();
const x = queue.deq();
if (x !== y) queue.enq(y - x);
}
return queue.deq() ?? 0;
};
# Elements t1 t2 t2:t1 ratio
0 8 0.580(NaN) 0.550(NaN) 0.9483
1 16 0.625(1.1) 0.655(1.2) 1.0480
2 32 0.700(1.1) 0.685(1.0) 0.9786
3 64 0.640(0.9) 0.880(1.3) 1.3750
4 128 0.755(1.2) 0.820(0.9) 1.0861
5 256 1.390(1.8) 1.285(1.6) 0.9245
6 512 3.650(2.6) 4.490(3.5) 1.2301
7 1024 12.520(3.4) 12.125(2.7) 0.9685
8 2048 53.520(4.3) 48.295(4.0) 0.9024
9 4096 178.055(3.3) 177.325(3.7) 0.9959
10 8192 720.725(4.0) 706.570(4.0) 0.9804
11 16384 2727.070(3.8) 2719.485(3.8) 0.9972
12 32768 11281.675(4.1) 11195.270(4.1) 0.9923
13 65536 47478.700(4.2) 46838.140(4.2) 0.9865
- Time is in milliseconds
- Item in
()
is the ratio to the previous time (i.e. line above) - Each iteration 1024 runs for performance measuring
Takeaway is that there is no significant difference in performance between the two, though it could be a poor priority queue implementation. I simply searched on npm and selected the one with some maturity and smallest package size.
Performance Benchmarking
const RUNS = 1 << 10; // 1024
const SEED = 8;
const measurePerf = (fn, data, runs = RUNS) =>
[...Array(runs).keys()]
.map(() => {
const start = performance.now();
fn(data);
const end = performance.now();
return end - start;
})
.reduce((total, current) => total + current);
const testPerformance = async () => {
const results = [];
for (let i = 0; i <= 13; i++) {
const dataLength = SEED << i;
const stones = [...Array(dataLength).keys()].map(() =>
Math.floor(Math.random() * dataLength)
);
const [t1, t2] = await Promise.all([
measurePerf(heaviestStoneAlgorithm, [...stones]),
measurePerf(heaviestStoneHeapAlgorithm, [...stones])
]);
results.push({
iteration: i,
dataLength: dataLength,
t1: Number.isFinite(t1) ? Number(t1) : t1,
t2: Number.isFinite(t2) ? Number(t2) : t2
});
}
return results;
};
testPerformance().then((results) => {
console.log(`\t# Elements\tt1\tt2\tt2:t1 ratio`);
results.forEach(({ iteration, dataLength, t1, t2 }, i, arr) => {
const prev = arr[i - 1] || {};
const t1Fixed = t1.toFixed(3);
const t2Fixed = t2.toFixed(3);
const t2t1Ratio = Number(t2 / t1).toFixed(4);
const t1Ratio = Number(t1 / prev.t1).toFixed(1);
const t2Ratio = Number(t2 / prev.t2).toFixed(1);
console.log(
`${iteration}\t${dataLength}\t${t1Fixed}(${t1Ratio})\t${t2Fixed}(${t2Ratio})\t${t2t1Ratio}`
);
});
});
Code
const lastStoneWeight = (stones = []) => {
if (!stones?.length) return 0;
while (stones.length > 1) {
stones.sort();
const [x, y] = stones.splice(-2);
if (x !== y) stones.push(y - x);
}
return stones[0] ?? 0;
};
Using a Heap Data Structure
const lastStoneWeightHeap = (stones = []) => {
if (!stones?.length) return 0;
const heap = new PriorityQueue(); // <-- uses same comparator as array::sort
stones.forEach((stone) => heap.enq(stone)); // <-- populate heap
while (heap.size() > 1) {
const y = heap.deq();
const x = heap.deq();
if (x !== y) heap.enq(y - x);
}
return heap.size() ? heap.deq() : 0;
};
10 iterations x 10000 runs
# Elements t0 avg t1 avg
1 8 0.00363 0.00106
2 16 0.01036 0.00157
3 32 0.01781 0.00224
4 64 0.09148 0.00432
5 128 0.22560 0.00944
6 256 0.56833 0.01618
7 512 2.37584 0.06091
8 1024 8.78741 0.12614
9 2048 34.29092 0.29697
10 4096 130.50169 0.63872
- Time is in milliseconds
Conclusion
Using a heap data structure is orders of magnitude an improvement. With the naive implementation it is clearly at least an \$O(n^2)\$ complexity as each time the dataset size doubles (2x) the runtime roughly quadruples (~4x) whereas the implementation using a heap roughly only doubles (~2x) the runtime with each doubling of the dataset.
Performance Benchmarking
performanceBenchmark.js
const measurePerf = (fn, data, runs = 1e3) =>
[...Array(runs).keys()]
.map(() => {
const start = performance.now();
fn([...data]);
const end = performance.now();
return end - start;
})
.reduce((total, current) => total + current) / runs;
const toFixed = (val, fixed) =>
Number.isFinite(val) ? Number(val).toFixed(fixed) : val;
export const benchmark = async ({
functions = [],
createRunData,
iterations = 5,
runs = 1e3,
logIntermediateResults
}) => {
logIntermediateResults && console.log(`${iterations} x ${runs}`);
const results = [];
logIntermediateResults &&
console.log(
`\t# Elements\t${functions.map((_, i) => `t${i} avg`).join("\t")}`
);
for (let i = 0; i < iterations; i++) {
const data = createRunData(i);
const res = await Promise.all(
functions.map((fn) => measurePerf(fn, data, runs))
);
results.push(res);
logIntermediateResults &&
console.log(
`${i + 1}\t${data.length}\t${res
.map((t) => `${toFixed(t, 5)}`)
.join("\t")}`
);
}
return results;
};
Setup & benchmark
const ITERATIONS = 10;
const RUNS = 1e4;
const SEED = 8;
const functions = [
lastStoneWeight,
lastStoneWeightHeap,
];
const createRunData = (i) => {
const dataLength = SEED << i;
const stones = [...Array(dataLength).keys()].map(() =>
Math.floor(Math.random() * dataLength)
);
return stones;
};
benchmark({ functions, createRunData, iterations: ITERATIONS, runs: RUNS, logIntermediateResults: true
});
Extended heap implementation benchmark
15 x 10000
# Elements t0 avg
1 8 0.00100
2 16 0.00171
3 32 0.00242
4 64 0.00434
5 128 0.00933
6 256 0.01825
7 512 0.05681
8 1024 0.13715
9 2048 0.27621
10 4096 0.59631
11 8192 1.24577
12 16384 4.75092
13 32768 6.09799
14 65536 13.07677
15 131072 28.88058
const RUNS = 1 << 10; // 1024
const SEED = 8;
const measurePerf = (fn, data, runs = RUNS) =>
[...Array(runs).keys()]
.map(() => {
const start = performance.now();
fn(data);
const end = performance.now();
return end - start;
})
.reduce((total, current) => total + current);
const testPerformance = async () => {
const results = [];
for (let i = 0; i <= 13; i++) {
const dataLength = SEED << i;
const stones = [...Array(dataLength).keys()].map(() =>
Math.floor(Math.random() * dataLength)
);
const [t1, t2] = await Promise.all([
measurePerf(heaviestStoneAlgorithm, [...stones]),
measurePerf(heaviestStoneHeapAlgorithm, [...stones])
]);
results.push({
iteration: i,
dataLength: dataLength,
t1: Number.isFinite(t1) ? Number(t1) : t1,
t2: Number.isFinite(t2) ? Number(t2) : t2
});
}
return results;
};
testPerformance().then((results) => {
console.log(`\t# Elements\tt1\tt2\tt2:t1 ratio`);
results.forEach(({ iteration, dataLength, t1, t2 }, i, arr) => {
const prev = arr[i - 1] || {};
const t1Fixed = t1.toFixed(3);
const t2Fixed = t2.toFixed(3);
const t2t1Ratio = Number(t2 / t1).toFixed(4);
const t1Ratio = Number(t1 / prev.t1).toFixed(1);
const t2Ratio = Number(t2 / prev.t2).toFixed(1);
console.log(
`${iteration}\t${dataLength}\t${t1Fixed}(${t1Ratio})\t${t2Fixed}(${t2Ratio})\t${t2t1Ratio}`
);
});
});
const RUNS = 1 << 10;
const SEED = 8;
console.log(RUNS);
const measurePerf = (fn, data, runs = RUNS) =>
[...Array(runs).keys()]
.map(() => {
const start = performance.now();
fn(data);
const end = performance.now();
return end - start;
})
.reduce((total, current) => total + current);
const testPerformance = async () => {
const results = [];
for (let i = 0; i <= 13; i++) {
const dataLength = SEED << i;
const stones = [...Array(dataLength).keys()].map(() =>
Math.floor(Math.random() * dataLength)
);
const [t1, t2] = await Promise.all([
measurePerf(heaviestStoneAlgorithm, [...stones]),
measurePerf(heaviestStoneHeapAlgorithm, [...stones])
]);
results.push({
iteration: i,
dataLength: dataLength,
t1: Number.isFinite(t1) ? Number(t1) : t1,
t2: Number.isFinite(t2) ? Number(t2) : t2
});
}
return results;
};
testPerformance().then((results) => {
console.log(`\t# Elements\tt1\tt2\tt2:t1 ratio`);
results.forEach(({ iteration, dataLength, t1, t2 }, i, arr) => {
const prev = arr[i - 1] || {};
const t1Fixed = t1.toFixed(3);
const t2Fixed = t2.toFixed(3);
const t2t1Ratio = Number(t2 / t1).toFixed(4);
const t1Ratio = Number(t1 / prev.t1).toFixed(1);
const t2Ratio = Number(t2 / prev.t2).toFixed(1);
console.log(
`${iteration}\t${dataLength}\t${t1Fixed}(${t1Ratio})\t${t2Fixed}(${t2Ratio})\t${t2t1Ratio}`
);
});
});
const RUNS = 1 << 10; // 1024
const SEED = 8;
const measurePerf = (fn, data, runs = RUNS) =>
[...Array(runs).keys()]
.map(() => {
const start = performance.now();
fn(data);
const end = performance.now();
return end - start;
})
.reduce((total, current) => total + current);
const testPerformance = async () => {
const results = [];
for (let i = 0; i <= 13; i++) {
const dataLength = SEED << i;
const stones = [...Array(dataLength).keys()].map(() =>
Math.floor(Math.random() * dataLength)
);
const [t1, t2] = await Promise.all([
measurePerf(heaviestStoneAlgorithm, [...stones]),
measurePerf(heaviestStoneHeapAlgorithm, [...stones])
]);
results.push({
iteration: i,
dataLength: dataLength,
t1: Number.isFinite(t1) ? Number(t1) : t1,
t2: Number.isFinite(t2) ? Number(t2) : t2
});
}
return results;
};
testPerformance().then((results) => {
console.log(`\t# Elements\tt1\tt2\tt2:t1 ratio`);
results.forEach(({ iteration, dataLength, t1, t2 }, i, arr) => {
const prev = arr[i - 1] || {};
const t1Fixed = t1.toFixed(3);
const t2Fixed = t2.toFixed(3);
const t2t1Ratio = Number(t2 / t1).toFixed(4);
const t1Ratio = Number(t1 / prev.t1).toFixed(1);
const t2Ratio = Number(t2 / prev.t2).toFixed(1);
console.log(
`${iteration}\t${dataLength}\t${t1Fixed}(${t1Ratio})\t${t2Fixed}(${t2Ratio})\t${t2t1Ratio}`
);
});
});
const RUNS = 1 << 10;
const SEED = 8;
console.log(RUNS);
const measurePerf = (fn, data, runs = RUNS) =>
[...Array(runs).keys()]
.map(() => {
const start = performance.now();
fn(data);
const end = performance.now();
return end - start;
})
.reduce((total, current) => total + current);
const testPerformance = async () => {
const results = [];
for (let i = 0; i <= 13; i++) {
const dataLength = SEED << i;
const stones = [...Array(dataLength).keys()].map(() =>
Math.floor(Math.random() * dataLength)
);
const [t1, t2] = await Promise.all([
measurePerf(heaviestStoneAlgorithm, [...stones]),
measurePerf(heaviestStoneHeapAlgorithm, [...stones])
]);
results.push({
iteration: i,
dataLength: dataLength,
t1: Number.isFinite(t1) ? Number(t1) : t1,
t2: Number.isFinite(t2) ? Number(t2) : t2
});
}
return results;
};
testPerformance().then((results) => {
console.log(`\t# Elements\tt1\tt2\tt2:t1 ratio`);
results.forEach(({ iteration, dataLength, t1, t2 }, i, arr) => {
const prev = arr[i - 1] || {};
const t1Fixed = t1.toFixed(3);
const t2Fixed = t2.toFixed(3);
const t2t1Ratio = Number(t2 / t1).toFixed(4);
const t1Ratio = Number(t1 / prev.t1).toFixed(1);
const t2Ratio = Number(t2 / prev.t2).toFixed(1);
console.log(
`${iteration}\t${dataLength}\t${t1Fixed}(${t1Ratio})\t${t2Fixed}(${t2Ratio})\t${t2t1Ratio}`
);
});
});
const RUNS = 1 << 10; // 1024
const SEED = 8;
const measurePerf = (fn, data, runs = RUNS) =>
[...Array(runs).keys()]
.map(() => {
const start = performance.now();
fn(data);
const end = performance.now();
return end - start;
})
.reduce((total, current) => total + current);
const testPerformance = async () => {
const results = [];
for (let i = 0; i <= 13; i++) {
const dataLength = SEED << i;
const stones = [...Array(dataLength).keys()].map(() =>
Math.floor(Math.random() * dataLength)
);
const [t1, t2] = await Promise.all([
measurePerf(heaviestStoneAlgorithm, [...stones]),
measurePerf(heaviestStoneHeapAlgorithm, [...stones])
]);
results.push({
iteration: i,
dataLength: dataLength,
t1: Number.isFinite(t1) ? Number(t1) : t1,
t2: Number.isFinite(t2) ? Number(t2) : t2
});
}
return results;
};
testPerformance().then((results) => {
console.log(`\t# Elements\tt1\tt2\tt2:t1 ratio`);
results.forEach(({ iteration, dataLength, t1, t2 }, i, arr) => {
const prev = arr[i - 1] || {};
const t1Fixed = t1.toFixed(3);
const t2Fixed = t2.toFixed(3);
const t2t1Ratio = Number(t2 / t1).toFixed(4);
const t1Ratio = Number(t1 / prev.t1).toFixed(1);
const t2Ratio = Number(t2 / prev.t2).toFixed(1);
console.log(
`${iteration}\t${dataLength}\t${t1Fixed}(${t1Ratio})\t${t2Fixed}(${t2Ratio})\t${t2t1Ratio}`
);
});
});