A few painfully obvious edits of my own:
The frivolous for-loop:
for (var a = 0; a < regions.length; a++) {
d.push([]);
readers.push(new readBinaryVCF(tabix, vcf, function(x) {x = x;}));
}
Why, why did I think this was a good idea? It's entirely unnecessary.
Omitted.
Callback-waiting:
if (i == regions.length - 1) {
var finalText = "";
for (var j = 0; j < d.length; j++) {
for (var k = 0; k < d[j].length; k++) {
finalText = finalText + d[j][k] + "<br />";
}
}
$(".text").html(finalText);
}
I thought I could wait until the "last" callback had ended before parsing my data, not realizing that—because they're, again, callbacks—they won't finish in order. This answer This answer inspired me to use a manual counter to determine when they've all finished.
Revised code:
var d = [];
var readers = [];
var callbacksRemaining = regions.length;
regions.forEach(function(item, i) {
readers[i] = new readBinaryVCF(tabix, vcf, function(tabixReader) {
reader = readers[i];
var index = 0;
for (key in tabixReader.idxContent.namehash) {
if (key == regions[i][0]) {
index = tabixReader.idxContent.namehash[key];
}
}
reader.getRecords(index, regions[i][1], regions[i][2], function(data) {;
d[i] = data;
callbacksRemaining--;
if (callbacksRemaining <= 0) {
processData(d);
}
});
});
});
Summary:
I've made these few minor edits, but my code still isn't running as fast as I'd like. I'm sure there are many more changes I could and should make—I'm still hoping for advice!
A few painfully obvious edits of my own:
The frivolous for-loop:
for (var a = 0; a < regions.length; a++) {
d.push([]);
readers.push(new readBinaryVCF(tabix, vcf, function(x) {x = x;}));
}
Why, why did I think this was a good idea? It's entirely unnecessary.
Omitted.
Callback-waiting:
if (i == regions.length - 1) {
var finalText = "";
for (var j = 0; j < d.length; j++) {
for (var k = 0; k < d[j].length; k++) {
finalText = finalText + d[j][k] + "<br />";
}
}
$(".text").html(finalText);
}
I thought I could wait until the "last" callback had ended before parsing my data, not realizing that—because they're, again, callbacks—they won't finish in order. This answer inspired me to use a manual counter to determine when they've all finished.
Revised code:
var d = [];
var readers = [];
var callbacksRemaining = regions.length;
regions.forEach(function(item, i) {
readers[i] = new readBinaryVCF(tabix, vcf, function(tabixReader) {
reader = readers[i];
var index = 0;
for (key in tabixReader.idxContent.namehash) {
if (key == regions[i][0]) {
index = tabixReader.idxContent.namehash[key];
}
}
reader.getRecords(index, regions[i][1], regions[i][2], function(data) {;
d[i] = data;
callbacksRemaining--;
if (callbacksRemaining <= 0) {
processData(d);
}
});
});
});
Summary:
I've made these few minor edits, but my code still isn't running as fast as I'd like. I'm sure there are many more changes I could and should make—I'm still hoping for advice!
A few painfully obvious edits of my own:
The frivolous for-loop:
for (var a = 0; a < regions.length; a++) {
d.push([]);
readers.push(new readBinaryVCF(tabix, vcf, function(x) {x = x;}));
}
Why, why did I think this was a good idea? It's entirely unnecessary.
Omitted.
Callback-waiting:
if (i == regions.length - 1) {
var finalText = "";
for (var j = 0; j < d.length; j++) {
for (var k = 0; k < d[j].length; k++) {
finalText = finalText + d[j][k] + "<br />";
}
}
$(".text").html(finalText);
}
I thought I could wait until the "last" callback had ended before parsing my data, not realizing that—because they're, again, callbacks—they won't finish in order. This answer inspired me to use a manual counter to determine when they've all finished.
Revised code:
var d = [];
var readers = [];
var callbacksRemaining = regions.length;
regions.forEach(function(item, i) {
readers[i] = new readBinaryVCF(tabix, vcf, function(tabixReader) {
reader = readers[i];
var index = 0;
for (key in tabixReader.idxContent.namehash) {
if (key == regions[i][0]) {
index = tabixReader.idxContent.namehash[key];
}
}
reader.getRecords(index, regions[i][1], regions[i][2], function(data) {;
d[i] = data;
callbacksRemaining--;
if (callbacksRemaining <= 0) {
processData(d);
}
});
});
});
Summary:
I've made these few minor edits, but my code still isn't running as fast as I'd like. I'm sure there are many more changes I could and should make—I'm still hoping for advice!
A few painfully obvious edits of my own:
The frivolous for-loop:
for (var a = 0; a < regions.length; a++) {
d.push([]);
readers.push(new readBinaryVCF(tabix, vcf, function(x) {x = x;}));
}
Why, why did I think this was a good idea? It's entirely unnecessary.
Omitted.
Callback-waiting:
if (i == regions.length - 1) {
var finalText = "";
for (var j = 0; j < d.length; j++) {
for (var k = 0; k < d[j].length; k++) {
finalText = finalText + d[j][k] + "<br />";
}
}
$(".text").html(finalText);
}
I thought I could wait until the "last" callback had ended before parsing my data, not realizing that—because they're, again, callbacks—they won't finish in order. This answer inspired me to use a manual counter to determine when they've all finished.
Revised code:
var d = [];
var readers = [];
var callbacksRemaining = regions.length;
regions.forEach(function(item, i) {
readers[i] = new readBinaryVCF(tabix, vcf, function(tabixReader) {
reader = readers[i];
var index = 0;
for (key in tabixReader.idxContent.namehash) {
if (key == regions[i][0]) {
index = tabixReader.idxContent.namehash[key];
}
}
reader.getRecords(index, regions[i][1], regions[i][2], function(data) {;
d[i] = data;
callbacksRemaining--;
if (callbacksRemaining <= 0) {
processData(d);
}
});
});
});
Summary:
I've made these few minor edits, but my code still isn't running as fast as I'd like. I'm sure there are many more changes I could and should make—I'm still hoping for advice!