redis-benchmark: default precision=1, integer ms after 2 milliseconds.

Reltaed to discussion and PR #5840.
This commit is contained in:
antirez 2019-02-14 13:19:45 +01:00
parent e58ed84589
commit 78a2115c4c

View File

@ -458,10 +458,21 @@ static void showLatencyReport(void) {
qsort(config.latency,config.requests,sizeof(long long),compareLatency);
for (i = 0; i < config.requests; i++) {
if (config.latency[i]/usbetweenlat != curlat || i == (config.requests-1)) {
if (config.latency[i]/usbetweenlat != curlat ||
i == (config.requests-1))
{
curlat = config.latency[i]/usbetweenlat;
perc = ((float)(i+1)*100)/config.requests;
printf("%.2f%% <= %.*f milliseconds\n", perc, config.precision, curlat/pow(10.0, config.precision));
printf("%.2f%% <= %.*f milliseconds\n", perc, config.precision,
curlat/pow(10.0, config.precision));
/* After the 2 milliseconds latency to have percentages split
* by decimals will just add a lot of noise to the output. */
if (config.latency[i] > 2000) {
config.precision = 0;
usbetweenlat = ipow(10,
MAX_LATENCY_PRECISION-config.precision);
}
}
}
printf("%.2f requests per second\n\n", reqpersec);
@ -699,7 +710,7 @@ int main(int argc, const char **argv) {
config.tests = NULL;
config.dbnum = 0;
config.auth = NULL;
config.precision = 0;
config.precision = 1;
i = parseOptions(argc,argv);
argc -= i;