To the time measurements, I like the math in the way you compute the elapsed time by summing:
long start = 0;
long end = 0;
for(int i = 0; i < count; i++) {
start += System.nanoTime();
fileToSet(path,hs);
end += System.nanoTime();
hs.clear();
}
long average = (end - start) / count;
Why you should not do this, however, is that you can quickly run out of the long
range. Slightly refined:
long elapsed = 0;
for(int i = 0; i < count; i++) {
long start = System.nanoTime();
fileToSet(path,hs);
elapsed += System.nanoTime() - start;
hs.clear();
}
long average = elapsed / count;
But what can happen here is that if the fileToSet()fileToSet()
is faster than the system time granularity, you may end up summing zeroes. That's why you shouldyou'd be reallybetter using thisan inverse approach instead — measure all operations outside the benchmarked code, then substract this from the total time:
long start = System.nanoTime();
long clearend = start;
long exclude = 0;
for(int i = 0; i < count; i++) {
exclude += System.nanoTime() - clear;end;
fileToSet(path,hs);
clearend = System.nanoTime();
hs.clear();
}
long elapsed = clear(end - start) - exclude;
long average = elapsed / count;
To the time measurements, I like the math in the way you compute the elapsed time by summing:
long start = 0;
long end = 0;
for(int i = 0; i < count; i++) {
start += System.nanoTime();
fileToSet(path,hs);
end += System.nanoTime();
hs.clear();
}
long average = (end - start) / count;
Why you should not do this, however, is that you can run out of the long
range. Slightly refined:
long elapsed = 0;
for(int i = 0; i < count; i++) {
long start = System.nanoTime();
fileToSet(path,hs);
elapsed += System.nanoTime() - start;
hs.clear();
}
long average = elapsed / count;
But what can happen here is that if the fileToSet() is faster than system time granularity, you may end up summing zeroes. That's why you should be really using this instead:
long start = System.nanoTime();
long clear = start;
long exclude = 0;
for(int i = 0; i < count; i++) {
exclude += System.nanoTime() - clear;
fileToSet(path,hs);
clear = System.nanoTime();
hs.clear();
}
long elapsed = clear - start - exclude;
long average = elapsed / count;
To the time measurements, I like the math in the way you compute the elapsed time by summing:
long start = 0;
long end = 0;
for(int i = 0; i < count; i++) {
start += System.nanoTime();
fileToSet(path,hs);
end += System.nanoTime();
hs.clear();
}
long average = (end - start) / count;
Why you should not do this, however, is that you can quickly run out of the long
range. Slightly refined:
long elapsed = 0;
for(int i = 0; i < count; i++) {
long start = System.nanoTime();
fileToSet(path,hs);
elapsed += System.nanoTime() - start;
hs.clear();
}
long average = elapsed / count;
But what can happen here is that if the fileToSet()
is faster than the system time granularity, you may end up summing zeroes. That's why you'd be better using an inverse approach instead — measure all operations outside the benchmarked code, then substract this from the total time:
long start = System.nanoTime();
long end = start;
long exclude = 0;
for(int i = 0; i < count; i++) {
exclude += System.nanoTime() - end;
fileToSet(path,hs);
end = System.nanoTime();
hs.clear();
}
long elapsed = (end - start) - exclude;
long average = elapsed / count;
To the time measurements, I like the math in the way you compute the elapsed time by summing:
long start = 0;
long end = 0;
for(int i = 0; i < count; i++) {
start += System.nanoTime();
fileToSet(path,hs);
end += System.nanoTime();
hs.clear();
}
long average = (end - start) / count;
Why you should not do this, however, is that you can run out of the long
range. Slightly refined:
long elapsed = 0;
for(int i = 0; i < count; i++) {
long start = System.nanoTime();
fileToSet(path,hs);
elapsed += System.nanoTime() - start;
hs.clear();
}
long average = elapsed / count;
But what can happen here is that if the fileToSet() is faster than system time granularity, you may end up summing zeroes. That's why you should be really using this instead:
long start = System.nanoTime();
long clear = start;
long exclude = 0;
for(int i = 0; i < count; i++) {
exclude += System.nanoTime() - clear;
fileToSet(path,hs);
long clear = System.nanoTime();
hs.clear();
exclude += System.nanoTime() - clear;
}
long elapsed = System.nanoTime()clear - start - exclude;
long average = elapsed / count;
To the time measurements, I like the math in the way you compute the elapsed time by summing:
long start = 0;
long end = 0;
for(int i = 0; i < count; i++) {
start += System.nanoTime();
fileToSet(path,hs);
end += System.nanoTime();
hs.clear();
}
long average = (end - start) / count;
Why you should not do this, however, is that you can run out of the long
range. Slightly refined:
long elapsed = 0;
for(int i = 0; i < count; i++) {
long start = System.nanoTime();
fileToSet(path,hs);
elapsed += System.nanoTime() - start;
hs.clear();
}
long average = elapsed / count;
But what can happen here is that if the fileToSet() is faster than system time granularity, you may end up summing zeroes. That's why you should be really using this instead:
long start = System.nanoTime();
long exclude = 0;
for(int i = 0; i < count; i++) {
fileToSet(path,hs);
long clear = System.nanoTime();
hs.clear();
exclude += System.nanoTime() - clear;
}
long elapsed = System.nanoTime() - start - exclude;
long average = elapsed / count;
To the time measurements, I like the math in the way you compute the elapsed time by summing:
long start = 0;
long end = 0;
for(int i = 0; i < count; i++) {
start += System.nanoTime();
fileToSet(path,hs);
end += System.nanoTime();
hs.clear();
}
long average = (end - start) / count;
Why you should not do this, however, is that you can run out of the long
range. Slightly refined:
long elapsed = 0;
for(int i = 0; i < count; i++) {
long start = System.nanoTime();
fileToSet(path,hs);
elapsed += System.nanoTime() - start;
hs.clear();
}
long average = elapsed / count;
But what can happen here is that if the fileToSet() is faster than system time granularity, you may end up summing zeroes. That's why you should be really using this instead:
long start = System.nanoTime();
long clear = start;
long exclude = 0;
for(int i = 0; i < count; i++) {
exclude += System.nanoTime() - clear;
fileToSet(path,hs);
clear = System.nanoTime();
hs.clear();
}
long elapsed = clear - start - exclude;
long average = elapsed / count;
To the time measurements, I like the math in the way you compute the elapsed time by summing:
long start = 0;
long end = 0;
for(int i = 0; i < count; i++) {
start += System.nanoTime();
fileToSet(path,hs);
end += System.nanoTime();
hs.clear();
}
long average = (end - start) / count;
Why you should not do this, however, is that you can run out of the long
range. Slightly refined:
long elapsed = 0;
for(int i = 0; i < count; i++) {
long start = System.nanoTime();
fileToSet(path,hs);
elapsed += System.nanoTime() - start;
hs.clear();
}
long average = elapsed / count;
But what can happen here is that if the fileToSet() is faster than system time granularity, you may end up summing zeroes. That's why you should be really using this instead:
long start = System.nanoTime();
long exclude = 0;
for(int i = 0; i < count; i++) {
fileToSet(path,hs);
long clear = System.nanoTime();
hs.clear();
exclude += System.nanoTime() - clear;
}
long elapsed = System.nanoTime() - start - exclude;
long average = elapsed / count;