Skip to content

Commit

Permalink
Update results.cc | Add another significant digit to percentile laten…
Browse files Browse the repository at this point in the history
…cy display (#2066)
  • Loading branch information
arjunsuresh authored Feb 1, 2025
1 parent 10d123c commit d7bbf88
Showing 1 changed file with 8 additions and 8 deletions.
16 changes: 8 additions & 8 deletions loadgen/results.cc
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ bool PerformanceSummary::EarlyStopping(
std::to_string(queries_issued) + ").\n" + " * Would discard " +
std::to_string(t - 1) + " highest latency queries.\n" +
" * Early stopping " +
DoubleToString(target_latency_percentile.percentile * 100, 0) +
DoubleToString(target_latency_percentile.percentile * 100, 1) +
"th percentile estimate: " + std::to_string(percentile_estimate);
early_stopping_latency_ss = percentile_estimate;

Expand All @@ -202,7 +202,7 @@ bool PerformanceSummary::EarlyStopping(
if (queries_issued < h_min + 1) {
*recommendation +=
"\n * Not enough queries processed for " +
DoubleToString(multi_stream_percentile * 100, 0) +
DoubleToString(multi_stream_percentile * 100, 1) +
"th percentile\n" +
" early stopping estimate (would need to process at\n least " +
std::to_string(h_min + 1) + " total queries).";
Expand All @@ -218,7 +218,7 @@ bool PerformanceSummary::EarlyStopping(
percentile_estimate = (*sample_latencies)[queries_issued - t];
*recommendation +=
"\n * Early stopping " +
DoubleToString(multi_stream_percentile * 100, 0) +
DoubleToString(multi_stream_percentile * 100, 1) +
"th percentile estimate: " + std::to_string(percentile_estimate);
early_stopping_latency_ms = percentile_estimate;
}
Expand Down Expand Up @@ -273,7 +273,7 @@ bool PerformanceSummary::EarlyStopping(
std::to_string(queries_issued) + ").\n" + " * Would discard " +
std::to_string(t - 1) + " highest latency queries.\n" +
" * Early stopping " +
DoubleToString(target_latency_percentile.percentile * 100, 0) +
DoubleToString(target_latency_percentile.percentile * 100, 1) +
"th percentile estimate: " + std::to_string(percentile_estimate);
early_stopping_latency_ms = percentile_estimate;
break;
Expand Down Expand Up @@ -392,13 +392,13 @@ void PerformanceSummary::LogSummary(AsyncSummary& summary) {

switch (settings.scenario) {
case TestScenario::SingleStream: {
summary(DoubleToString(target_latency_percentile.percentile * 100, 0) +
summary(DoubleToString(target_latency_percentile.percentile * 100, 1) +
"th percentile latency (ns) : ",
target_latency_percentile.sample_latency);
break;
}
case TestScenario::MultiStream: {
summary(DoubleToString(target_latency_percentile.percentile * 100, 0) +
summary(DoubleToString(target_latency_percentile.percentile * 100, 1) +
"th percentile latency (ns) : ",
target_latency_percentile.query_latency);
break;
Expand Down Expand Up @@ -430,14 +430,14 @@ void PerformanceSummary::LogSummary(AsyncSummary& summary) {
switch (settings.scenario) {
case TestScenario::SingleStream: {
summary(DoubleToString(token_target_latency_percentile.percentile * 100,
0) +
1) +
"th first token percentile latency (ns) : ",
token_target_latency_percentile.sample_latency);
break;
}
case TestScenario::MultiStream: {
summary(DoubleToString(token_target_latency_percentile.percentile * 100,
0) +
1) +
"th first token percentile latency (ns) : ",
token_target_latency_percentile.sample_latency);
break;
Expand Down

0 comments on commit d7bbf88

Please sign in to comment.