diff --git a/README.md b/README.md
index b760cfc22..ea651516d 100644
--- a/README.md
+++ b/README.md
@@ -15,7 +15,7 @@ Bencher could have prevented that from happening.
Bencher allows you to detect and prevent performance regressions _before_ they hit production.
- **Run**: Run your benchmarks locally or in CI using your favorite benchmarking tools. The `bencher` CLI simply wraps your existing benchmark harness and stores its results.
-- **Track**: Track the results of your benchmarks over time. Monitor, query, and graph the results using the Bencher web console based on the source branch, testbed, and metric kind.
+- **Track**: Track the results of your benchmarks over time. Monitor, query, and graph the results using the Bencher web console based on the source branch, testbed, and measure.
- **Catch**: Catch performance regressions in CI. Bencher uses state of the art, customizable analytics to detect performance regressions before they make it to production.
For the same reasons that unit tests are run in CI to prevent feature regressions, benchmarks should be run in CI with Bencher to prevent performance regressions. Performance bugs are bugs!
@@ -192,23 +192,23 @@ bencher run --project my-project-slug --github-actions "${{ secrets.GITHUB_TOKEN
JsonAdapter::Json
- 🚨 (view plot | view alert )
+ 🚨 (view plot | view alert )
JsonAdapter::Magic (JSON)
- ✅ (view plot )
+ ✅ (view plot )
JsonAdapter::Magic (Rust)
- ✅ (view plot )
+ ✅ (view plot )
JsonAdapter::Rust
- ✅ (view plot )
+ ✅ (view plot )
JsonAdapter::RustBench
- 🚨 (view plot | view alert )
+ 🚨 (view plot | view alert )
@@ -245,9 +245,9 @@ Do **not** specify an exact version if using Bencher _Cloud_ as there are still
All public projects have their own [perf page](https://bencher.dev/perf). These results can easily be shared with an auto-updating perf image. Perfect for your README!
-
+
diff --git a/lib/bencher_adapter/src/adapters/java/jmh.rs b/lib/bencher_adapter/src/adapters/java/jmh.rs
index cacb04cb2..f18a0ce05 100644
--- a/lib/bencher_adapter/src/adapters/java/jmh.rs
+++ b/lib/bencher_adapter/src/adapters/java/jmh.rs
@@ -5,7 +5,7 @@ use serde::Deserialize;
use crate::{
adapters::util::{latency_as_nanos, throughput_as_secs},
- results::adapter_results::{AdapterMetricKind, AdapterResults},
+ results::adapter_results::{AdapterMeasure, AdapterResults},
Adaptable, AdapterError, Settings,
};
@@ -66,7 +66,7 @@ impl TryFrom for Option {
score_unit,
} = primary_metric;
- let metric_kind = if let Some((unit, slash_op)) = score_unit.split_once("/op") {
+ let measure = if let Some((unit, slash_op)) = score_unit.split_once("/op") {
if !slash_op.is_empty() {
return Err(AdapterError::BenchmarkUnits(slash_op.into()));
}
@@ -80,7 +80,7 @@ impl TryFrom for Option {
lower_value: Some(lower_value),
upper_value: Some(upper_value),
};
- AdapterMetricKind::Latency(json_metric)
+ AdapterMeasure::Latency(json_metric)
} else if let Some((ops_slash, unit)) = score_unit.split_once("ops/") {
if !ops_slash.is_empty() {
return Err(AdapterError::BenchmarkUnits(ops_slash.into()));
@@ -95,12 +95,12 @@ impl TryFrom for Option {
lower_value: Some(lower_value),
upper_value: Some(upper_value),
};
- AdapterMetricKind::Throughput(json_metric)
+ AdapterMeasure::Throughput(json_metric)
} else {
return Err(AdapterError::BenchmarkUnits(score_unit));
};
- benchmark_metrics.push((benchmark_name, metric_kind));
+ benchmark_metrics.push((benchmark_name, measure));
}
Ok(AdapterResults::new(benchmark_metrics))
diff --git a/lib/bencher_adapter/src/adapters/mod.rs b/lib/bencher_adapter/src/adapters/mod.rs
index 4f11c5281..daa659ccd 100644
--- a/lib/bencher_adapter/src/adapters/mod.rs
+++ b/lib/bencher_adapter/src/adapters/mod.rs
@@ -25,7 +25,7 @@ fn print_ln(input: &str) -> IResult<&str, ()> {
#[allow(clippy::panic, clippy::unwrap_used)]
pub(crate) mod test_util {
use bencher_json::project::{
- metric_kind::{LATENCY_SLUG_STR, THROUGHPUT_SLUG_STR},
+ measure::{LATENCY_SLUG_STR, THROUGHPUT_SLUG_STR},
report::JsonAverage,
};
use ordered_float::OrderedFloat;
diff --git a/lib/bencher_adapter/src/adapters/rust/iai.rs b/lib/bencher_adapter/src/adapters/rust/iai.rs
index d4052ee59..fafae9fa3 100644
--- a/lib/bencher_adapter/src/adapters/rust/iai.rs
+++ b/lib/bencher_adapter/src/adapters/rust/iai.rs
@@ -1,6 +1,6 @@
use bencher_json::{
project::{
- metric_kind::{
+ measure::{
ESTIMATED_CYCLES_NAME_STR, INSTRUCTIONS_NAME_STR, L1_ACCESSES_NAME_STR,
L2_ACCESSES_NAME_STR, RAM_ACCESSES_NAME_STR,
},
@@ -19,7 +19,7 @@ use nom::{
use crate::{
adapters::util::{parse_f64, parse_u64},
- results::adapter_results::{AdapterResults, IaiMetricKind},
+ results::adapter_results::{AdapterResults, IaiMeasure},
Adaptable, Settings,
};
@@ -55,7 +55,7 @@ impl Adaptable for AdapterRustIai {
fn parse_iai_lines(
lines: [&str; IAI_METRICS_LINE_COUNT],
-) -> Option<(BenchmarkName, Vec)> {
+) -> Option<(BenchmarkName, Vec)> {
let [benchmark_name_line, instructions_line, l1_accesses_line, l2_accesses_line, ram_accesses_line, estimated_cycles_line] =
lines;
@@ -65,33 +65,32 @@ fn parse_iai_lines(
(
INSTRUCTIONS_NAME_STR,
instructions_line,
- IaiMetricKind::Instructions as fn(JsonMetric) -> IaiMetricKind,
+ IaiMeasure::Instructions as fn(JsonMetric) -> IaiMeasure,
),
(
L1_ACCESSES_NAME_STR,
l1_accesses_line,
- IaiMetricKind::L1Accesses,
+ IaiMeasure::L1Accesses,
),
(
L2_ACCESSES_NAME_STR,
l2_accesses_line,
- IaiMetricKind::L2Accesses,
+ IaiMeasure::L2Accesses,
),
(
RAM_ACCESSES_NAME_STR,
ram_accesses_line,
- IaiMetricKind::RamAccesses,
+ IaiMeasure::RamAccesses,
),
(
ESTIMATED_CYCLES_NAME_STR,
estimated_cycles_line,
- IaiMetricKind::EstimatedCycles,
+ IaiMeasure::EstimatedCycles,
),
]
.into_iter()
- .map(|(metric_kind, input, into_variant)| {
- parse_iai_metric(input, metric_kind)
- .map(|(_remainder, json_metric)| into_variant(json_metric))
+ .map(|(measure, input, into_variant)| {
+ parse_iai_metric(input, measure).map(|(_remainder, json_metric)| into_variant(json_metric))
})
.collect::, _>>()
.ok()?;
@@ -100,11 +99,11 @@ fn parse_iai_lines(
}
#[allow(clippy::cast_precision_loss)]
-fn parse_iai_metric<'a>(input: &'a str, metric_kind: &'static str) -> IResult<&'a str, JsonMetric> {
+fn parse_iai_metric<'a>(input: &'a str, measure: &'static str) -> IResult<&'a str, JsonMetric> {
map(
tuple((
space0,
- tag(metric_kind),
+ tag(measure),
tag(":"),
space1,
parse_u64,
@@ -147,7 +146,7 @@ pub(crate) mod test_rust_iai {
Adaptable, AdapterResults,
};
use bencher_json::{
- project::metric_kind::{
+ project::measure::{
ESTIMATED_CYCLES_SLUG_STR, INSTRUCTIONS_NAME_STR, INSTRUCTIONS_SLUG_STR,
L1_ACCESSES_SLUG_STR, L2_ACCESSES_SLUG_STR, RAM_ACCESSES_SLUG_STR,
},
diff --git a/lib/bencher_adapter/src/results/adapter_metrics.rs b/lib/bencher_adapter/src/results/adapter_metrics.rs
index 8a162e28a..86297ba54 100644
--- a/lib/bencher_adapter/src/results/adapter_metrics.rs
+++ b/lib/bencher_adapter/src/results/adapter_metrics.rs
@@ -3,7 +3,7 @@ use std::{collections::HashMap, str::FromStr};
use bencher_json::JsonMetric;
use serde::{Deserialize, Serialize};
-use super::{CombinedKind, MetricKind, OrdKind};
+use super::{CombinedKind, Measure, OrdKind};
#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct AdapterMetrics {
@@ -11,7 +11,7 @@ pub struct AdapterMetrics {
pub inner: MetricsMap,
}
-pub type MetricsMap = HashMap;
+pub type MetricsMap = HashMap;
impl From for AdapterMetrics {
fn from(inner: MetricsMap) -> Self {
@@ -22,8 +22,8 @@ impl From for AdapterMetrics {
impl AdapterMetrics {
pub(crate) fn combined(self, mut other: Self, kind: CombinedKind) -> Self {
let mut metric_map = HashMap::new();
- for (metric_kind, metric) in self.inner {
- let other_metric = other.inner.remove(&metric_kind);
+ for (measure, metric) in self.inner {
+ let other_metric = other.inner.remove(&measure);
let combined_metric = if let Some(other_metric) = other_metric {
match kind {
CombinedKind::Ord(ord_kind) => match ord_kind {
@@ -35,14 +35,14 @@ impl AdapterMetrics {
} else {
metric
};
- metric_map.insert(metric_kind, combined_metric);
+ metric_map.insert(measure, combined_metric);
}
metric_map.extend(other.inner);
metric_map.into()
}
pub fn get(&self, key: &str) -> Option<&JsonMetric> {
- self.inner.get(&MetricKind::from_str(key).ok()?)
+ self.inner.get(&Measure::from_str(key).ok()?)
}
}
@@ -51,8 +51,8 @@ impl std::ops::Div for AdapterMetrics {
fn div(self, rhs: usize) -> Self::Output {
let mut metric_map = HashMap::new();
- for (metric_kind, metric) in self.inner {
- metric_map.insert(metric_kind, metric / rhs);
+ for (measure, metric) in self.inner {
+ metric_map.insert(measure, metric / rhs);
}
metric_map.into()
}
diff --git a/lib/bencher_adapter/src/results/adapter_results.rs b/lib/bencher_adapter/src/results/adapter_results.rs
index 073f0d207..3fa681fe2 100644
--- a/lib/bencher_adapter/src/results/adapter_results.rs
+++ b/lib/bencher_adapter/src/results/adapter_results.rs
@@ -2,11 +2,11 @@ use std::{collections::HashMap, str::FromStr};
use bencher_json::{
project::{
- metric::Mean,
- metric_kind::{
+ measure::{
ESTIMATED_CYCLES_SLUG_STR, INSTRUCTIONS_SLUG_STR, L1_ACCESSES_SLUG_STR,
L2_ACCESSES_SLUG_STR, LATENCY_SLUG_STR, RAM_ACCESSES_SLUG_STR, THROUGHPUT_SLUG_STR,
},
+ metric::Mean,
},
BenchmarkName, JsonMetric, ResourceId,
};
@@ -16,38 +16,35 @@ use serde::{Deserialize, Serialize};
use super::{adapter_metrics::AdapterMetrics, CombinedKind};
-const METRIC_KIND_SLUG_ERROR: &str = "Failed to parse metric kind slug.";
+const MEASURE_SLUG_ERROR: &str = "Failed to parse measure slug.";
#[allow(clippy::expect_used)]
pub static LATENCY_RESOURCE_ID: Lazy =
- Lazy::new(|| LATENCY_SLUG_STR.parse().expect(METRIC_KIND_SLUG_ERROR));
+ Lazy::new(|| LATENCY_SLUG_STR.parse().expect(MEASURE_SLUG_ERROR));
#[allow(clippy::expect_used)]
pub static THROUGHPUT_RESOURCE_ID: Lazy =
- Lazy::new(|| THROUGHPUT_SLUG_STR.parse().expect(METRIC_KIND_SLUG_ERROR));
+ Lazy::new(|| THROUGHPUT_SLUG_STR.parse().expect(MEASURE_SLUG_ERROR));
#[allow(clippy::expect_used)]
pub static INSTRUCTIONS_RESOURCE_ID: Lazy =
- Lazy::new(|| INSTRUCTIONS_SLUG_STR.parse().expect(METRIC_KIND_SLUG_ERROR));
+ Lazy::new(|| INSTRUCTIONS_SLUG_STR.parse().expect(MEASURE_SLUG_ERROR));
#[allow(clippy::expect_used)]
pub static L1_ACCESSES_RESOURCE_ID: Lazy =
- Lazy::new(|| L1_ACCESSES_SLUG_STR.parse().expect(METRIC_KIND_SLUG_ERROR));
+ Lazy::new(|| L1_ACCESSES_SLUG_STR.parse().expect(MEASURE_SLUG_ERROR));
#[allow(clippy::expect_used)]
pub static L2_ACCESSES_RESOURCE_ID: Lazy =
- Lazy::new(|| L2_ACCESSES_SLUG_STR.parse().expect(METRIC_KIND_SLUG_ERROR));
+ Lazy::new(|| L2_ACCESSES_SLUG_STR.parse().expect(MEASURE_SLUG_ERROR));
#[allow(clippy::expect_used)]
pub static RAM_ACCESSES_RESOURCE_ID: Lazy =
- Lazy::new(|| RAM_ACCESSES_SLUG_STR.parse().expect(METRIC_KIND_SLUG_ERROR));
+ Lazy::new(|| RAM_ACCESSES_SLUG_STR.parse().expect(MEASURE_SLUG_ERROR));
#[allow(clippy::expect_used)]
-pub static ESTIMATED_CYCLES_RESOURCE_ID: Lazy = Lazy::new(|| {
- ESTIMATED_CYCLES_SLUG_STR
- .parse()
- .expect(METRIC_KIND_SLUG_ERROR)
-});
+pub static ESTIMATED_CYCLES_RESOURCE_ID: Lazy =
+ Lazy::new(|| ESTIMATED_CYCLES_SLUG_STR.parse().expect(MEASURE_SLUG_ERROR));
#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct AdapterResults {
@@ -64,13 +61,13 @@ impl From for AdapterResults {
}
#[derive(Debug, Clone)]
-pub enum AdapterMetricKind {
+pub enum AdapterMeasure {
Latency(JsonMetric),
Throughput(JsonMetric),
}
#[derive(Debug, Clone, PartialEq, Eq)]
-pub enum IaiMetricKind {
+pub enum IaiMeasure {
Instructions(JsonMetric),
L1Accesses(JsonMetric),
L2Accesses(JsonMetric),
@@ -79,21 +76,21 @@ pub enum IaiMetricKind {
}
impl AdapterResults {
- pub fn new(benchmark_metrics: Vec<(BenchmarkName, AdapterMetricKind)>) -> Option {
+ pub fn new(benchmark_metrics: Vec<(BenchmarkName, AdapterMeasure)>) -> Option {
if benchmark_metrics.is_empty() {
return None;
}
let mut results_map = HashMap::new();
- for (benchmark_name, metric_kind) in benchmark_metrics {
+ for (benchmark_name, measure) in benchmark_metrics {
let adapter_metrics = AdapterMetrics {
- inner: match metric_kind {
- AdapterMetricKind::Latency(json_metric) => {
+ inner: match measure {
+ AdapterMeasure::Latency(json_metric) => {
hmap! {
LATENCY_RESOURCE_ID.clone() => json_metric
}
},
- AdapterMetricKind::Throughput(json_metric) => {
+ AdapterMeasure::Throughput(json_metric) => {
hmap! {
THROUGHPUT_RESOURCE_ID.clone() => json_metric
}
@@ -111,7 +108,7 @@ impl AdapterResults {
benchmark_metrics
.into_iter()
.map(|(benchmark_name, json_metric)| {
- (benchmark_name, AdapterMetricKind::Latency(json_metric))
+ (benchmark_name, AdapterMeasure::Latency(json_metric))
})
.collect(),
)
@@ -122,13 +119,13 @@ impl AdapterResults {
benchmark_metrics
.into_iter()
.map(|(benchmark_name, json_metric)| {
- (benchmark_name, AdapterMetricKind::Throughput(json_metric))
+ (benchmark_name, AdapterMeasure::Throughput(json_metric))
})
.collect(),
)
}
- pub fn new_iai(benchmark_metrics: Vec<(BenchmarkName, Vec)>) -> Option {
+ pub fn new_iai(benchmark_metrics: Vec<(BenchmarkName, Vec)>) -> Option {
if benchmark_metrics.is_empty() {
return None;
}
@@ -140,19 +137,19 @@ impl AdapterResults {
.or_insert_with(AdapterMetrics::default);
for metric in metrics {
let (resource_id, metric) = match metric {
- IaiMetricKind::Instructions(json_metric) => {
+ IaiMeasure::Instructions(json_metric) => {
(INSTRUCTIONS_RESOURCE_ID.clone(), json_metric)
},
- IaiMetricKind::L1Accesses(json_metric) => {
+ IaiMeasure::L1Accesses(json_metric) => {
(L1_ACCESSES_RESOURCE_ID.clone(), json_metric)
},
- IaiMetricKind::L2Accesses(json_metric) => {
+ IaiMeasure::L2Accesses(json_metric) => {
(L2_ACCESSES_RESOURCE_ID.clone(), json_metric)
},
- IaiMetricKind::RamAccesses(json_metric) => {
+ IaiMeasure::RamAccesses(json_metric) => {
(RAM_ACCESSES_RESOURCE_ID.clone(), json_metric)
},
- IaiMetricKind::EstimatedCycles(json_metric) => {
+ IaiMeasure::EstimatedCycles(json_metric) => {
(ESTIMATED_CYCLES_RESOURCE_ID.clone(), json_metric)
},
};
diff --git a/lib/bencher_adapter/src/results/mod.rs b/lib/bencher_adapter/src/results/mod.rs
index 473e96006..a58d399d1 100644
--- a/lib/bencher_adapter/src/results/mod.rs
+++ b/lib/bencher_adapter/src/results/mod.rs
@@ -17,7 +17,7 @@ pub mod results_reducer;
use adapter_results::{AdapterResults, ResultsMap};
use results_reducer::ResultsReducer;
-pub type MetricKind = ResourceId;
+pub type Measure = ResourceId;
#[derive(Debug, Clone)]
pub struct AdapterResultsArray {
diff --git a/lib/bencher_adapter/src/results/results_reducer.rs b/lib/bencher_adapter/src/results/results_reducer.rs
index 3cacc6418..061606a3e 100644
--- a/lib/bencher_adapter/src/results/results_reducer.rs
+++ b/lib/bencher_adapter/src/results/results_reducer.rs
@@ -3,13 +3,12 @@ use std::collections::HashMap;
use bencher_json::{project::metric::Median, BenchmarkName, JsonMetric};
use super::{
- adapter_metrics::AdapterMetrics, adapter_results::AdapterResults, AdapterResultsArray,
- MetricKind,
+ adapter_metrics::AdapterMetrics, adapter_results::AdapterResults, AdapterResultsArray, Measure,
};
#[derive(Debug, Clone, Default)]
pub struct ResultsReducer {
- pub inner: HashMap,
+ pub inner: HashMap,
}
impl From for ResultsReducer {
@@ -25,23 +24,23 @@ impl From for ResultsReducer {
impl ResultsReducer {
fn reduce(&mut self, results: AdapterResults) {
for (benchmark_name, metrics) in results.inner {
- if let Some(metric_kind_map) = self.inner.get_mut(&benchmark_name) {
- for (metric_kind, metric) in metrics.inner {
- if let Some(list) = metric_kind_map.inner.get_mut(&metric_kind) {
+ if let Some(measures_map) = self.inner.get_mut(&benchmark_name) {
+ for (measure, metric) in metrics.inner {
+ if let Some(list) = measures_map.inner.get_mut(&measure) {
list.push(metric);
} else {
- metric_kind_map.inner.insert(metric_kind, vec![metric]);
+ measures_map.inner.insert(measure, vec![metric]);
}
}
} else {
- let mut metric_kind_map = HashMap::new();
- for (metric_kind, metric) in metrics.inner {
- metric_kind_map.insert(metric_kind, vec![metric]);
+ let mut measures_map = HashMap::new();
+ for (measure, metric) in metrics.inner {
+ measures_map.insert(measure, vec![metric]);
}
self.inner.insert(
benchmark_name,
- MetricKindMap {
- inner: metric_kind_map,
+ MeasuresMap {
+ inner: measures_map,
},
);
}
@@ -50,16 +49,16 @@ impl ResultsReducer {
}
#[derive(Debug, Clone)]
-pub struct MetricKindMap {
- pub inner: HashMap>,
+pub struct MeasuresMap {
+ pub inner: HashMap>,
}
-impl MetricKindMap {
+impl MeasuresMap {
pub(crate) fn median(self) -> AdapterMetrics {
let mut metric_map = HashMap::new();
- for (metric_kind, metric) in self.inner {
+ for (measure, metric) in self.inner {
if let Some(median) = JsonMetric::median(metric) {
- metric_map.insert(metric_kind, median);
+ metric_map.insert(measure, median);
}
}
metric_map.into()
diff --git a/lib/bencher_client/src/lib.rs b/lib/bencher_client/src/lib.rs
index 4df2a7452..29790f8e7 100644
--- a/lib/bencher_client/src/lib.rs
+++ b/lib/bencher_client/src/lib.rs
@@ -47,10 +47,10 @@ from_client!(
OrganizationUuid,
ProjectUuid,
ReportUuid,
- MetricKindUuid,
BranchUuid,
TestbedUuid,
BenchmarkUuid,
+ MeasureUuid,
ThresholdUuid,
StatisticUuid,
AlertUuid,
@@ -96,8 +96,8 @@ try_from_client!(
JsonBenchmark,
JsonBranches,
JsonBranch,
- JsonMetricKinds,
- JsonMetricKind,
+ JsonMeasures,
+ JsonMeasure,
JsonProjects,
JsonProject,
JsonPerf,
diff --git a/lib/bencher_comment/src/lib.rs b/lib/bencher_comment/src/lib.rs
index adad844d0..f3b2588dc 100644
--- a/lib/bencher_comment/src/lib.rs
+++ b/lib/bencher_comment/src/lib.rs
@@ -3,7 +3,7 @@ use std::{collections::BTreeMap, time::Duration};
use bencher_json::{
project::perf::{LOWER_BOUNDARY, UPPER_BOUNDARY},
AlertUuid, BenchmarkName, BenchmarkUuid, BranchUuid, DateTime, JsonBoundary, JsonPerfQuery,
- JsonReport, MetricKindUuid, NonEmpty, Slug, TestbedUuid,
+ JsonReport, MeasureUuid, NonEmpty, Slug, TestbedUuid,
};
use url::Url;
@@ -30,12 +30,12 @@ impl ReportComment {
let mut comment = String::new();
comment.push_str("View results:");
- for (benchmark, metric_kinds) in &self.benchmark_urls.0 {
- for (metric_kind, MetricKindData { console_url, .. }) in metric_kinds {
+ for (benchmark, measures) in &self.benchmark_urls.0 {
+ for (measure, MeasureData { console_url, .. }) in measures {
comment.push_str(&format!(
- "\n- {benchmark_name} ({metric_kind_name}): {console_url}",
+ "\n- {benchmark_name} ({measure_name}): {console_url}",
benchmark_name = benchmark.name,
- metric_kind_name = metric_kind.name
+ measure_name = measure.name
));
}
}
@@ -45,11 +45,11 @@ impl ReportComment {
}
comment.push_str("\nView alerts:");
- for ((benchmark, metric_kind), AlertData { console_url, .. }) in &self.alert_urls.0 {
+ for ((benchmark, measure), AlertData { console_url, .. }) in &self.alert_urls.0 {
comment.push_str(&format!(
- "\n- {benchmark_name} ({metric_kind_name}): {console_url}",
+ "\n- {benchmark_name} ({measure_name}): {console_url}",
benchmark_name = benchmark.name,
- metric_kind_name = metric_kind.name,
+ measure_name = measure.name,
));
}
@@ -142,14 +142,14 @@ impl ReportComment {
require_threshold: bool,
public_links: bool,
) {
- let Some((_benchmark, metric_kinds)) = self.benchmark_urls.0.first_key_value() else {
+ let Some((_benchmark, measures)) = self.benchmark_urls.0.first_key_value() else {
html.push_str("No benchmarks found! ");
return;
};
html.push_str("");
self.html_benchmarks_table_header(
html,
- metric_kinds,
+ measures,
with_metrics,
require_threshold,
public_links,
@@ -161,45 +161,43 @@ impl ReportComment {
fn html_benchmarks_table_header(
&self,
html: &mut String,
- metric_kinds: &MetricKindsMap,
+ measures: &MeasuresMap,
with_metrics: bool,
require_threshold: bool,
public_links: bool,
) {
html.push_str("");
html.push_str("Benchmark ");
- for (metric_kind, MetricKindData { boundary, .. }) in metric_kinds {
+ for (measure, MeasureData { boundary, .. }) in measures {
if require_threshold && !BenchmarkUrls::boundary_has_threshold(*boundary) {
continue;
}
- let metric_kind_name = &metric_kind.name;
+ let measure_name = &measure.name;
if public_links {
- html.push_str(&format!("{metric_kind_name} "));
+ html.push_str(&format!("{measure_name} "));
} else {
- let metric_kind_path = format!(
- "/console/projects/{}/metric-kinds/{}",
- self.project_slug, metric_kind.slug
+ let measure_path = format!(
+ "/console/projects/{}/measures/{}",
+ self.project_slug, measure.slug
);
let url = self.endpoint_url.clone();
- let url = url.join(&metric_kind_path).unwrap_or(url);
- html.push_str(&format!(
- r#"{metric_kind_name} "#
- ));
+ let url = url.join(&measure_path).unwrap_or(url);
+ html.push_str(&format!(r#"{measure_name} "#));
}
if with_metrics {
- let units = &metric_kind.units;
+ let units = &measure.units;
html.push_str(&format!(
- "{metric_kind_name} Results {units} | (Δ%) ",
+ "{measure_name} Results {units} | (Δ%) ",
));
if boundary.lower_limit.is_some() {
html.push_str(&format!(
- "{metric_kind_name} Lower Boundary {units} | (%) "
+ "{measure_name} Lower Boundary {units} | (%) "
));
}
if boundary.upper_limit.is_some() {
html.push_str(&format!(
- "{metric_kind_name} Upper Boundary {units} | (%) "
+ "{measure_name} Upper Boundary {units} | (%) "
));
}
}
@@ -214,7 +212,7 @@ impl ReportComment {
require_threshold: bool,
public_links: bool,
) {
- for (benchmark, metric_kinds) in &self.benchmark_urls.0 {
+ for (benchmark, measures) in &self.benchmark_urls.0 {
html.push_str(" ");
if public_links {
html.push_str(&format!("{name} ", name = benchmark.name,));
@@ -231,14 +229,14 @@ impl ReportComment {
));
}
for (
- metric_kind,
- MetricKindData {
+ measure,
+ MeasureData {
public_url,
console_url,
value,
boundary,
},
- ) in metric_kinds
+ ) in measures
{
if require_threshold && !BenchmarkUrls::boundary_has_threshold(*boundary) {
continue;
@@ -251,7 +249,7 @@ impl ReportComment {
let alert_url = self
.alert_urls
.0
- .get(&(benchmark.clone(), metric_kind.clone()))
+ .get(&(benchmark.clone(), measure.clone()))
.map(
|AlertData {
public_url,
@@ -354,8 +352,8 @@ impl ReportComment {
}
}
-pub struct BenchmarkUrls(BTreeMap);
-pub type MetricKindsMap = BTreeMap;
+pub struct BenchmarkUrls(BTreeMap);
+pub type MeasuresMap = BTreeMap;
#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
struct Benchmark {
@@ -364,14 +362,14 @@ struct Benchmark {
}
#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
-pub struct MetricKind {
+pub struct Measure {
name: NonEmpty,
slug: Slug,
units: NonEmpty,
}
#[derive(Clone)]
-pub struct MetricKindData {
+pub struct MeasureData {
pub public_url: Url,
pub console_url: Url,
pub value: f64,
@@ -392,10 +390,10 @@ impl BenchmarkUrls {
let mut urls = BTreeMap::new();
if let Some(iteration) = json_report.results.first() {
for result in iteration {
- let metric_kind = MetricKind {
- name: result.metric_kind.name.clone(),
- slug: result.metric_kind.slug.clone(),
- units: result.metric_kind.units.clone(),
+ let measure = Measure {
+ name: result.measure.name.clone(),
+ slug: result.measure.slug.clone(),
+ units: result.measure.units.clone(),
};
for benchmark_metric in &result.benchmarks {
let benchmark = Benchmark {
@@ -405,21 +403,21 @@ impl BenchmarkUrls {
let benchmark_urls = urls.entry(benchmark).or_insert_with(BTreeMap::new);
let boundary = benchmark_metric.boundary.into();
- let data = MetricKindData {
+ let data = MeasureData {
public_url: benchmark_url.to_public_url(
- result.metric_kind.uuid,
benchmark_metric.uuid,
+ result.measure.uuid,
boundary,
),
console_url: benchmark_url.to_console_url(
- result.metric_kind.uuid,
benchmark_metric.uuid,
+ result.measure.uuid,
boundary,
),
value: benchmark_metric.metric.value.into(),
boundary,
};
- benchmark_urls.insert(metric_kind.clone(), data);
+ benchmark_urls.insert(measure.clone(), data);
}
}
}
@@ -431,10 +429,10 @@ impl BenchmarkUrls {
self.0.values().any(Self::benchmark_has_threshold)
}
- fn benchmark_has_threshold(metric_kinds: &MetricKindsMap) -> bool {
- metric_kinds
+ fn benchmark_has_threshold(measures: &MeasuresMap) -> bool {
+ measures
.values()
- .any(|MetricKindData { boundary, .. }| Self::boundary_has_threshold(*boundary))
+ .any(|MeasureData { boundary, .. }| Self::boundary_has_threshold(*boundary))
}
fn boundary_has_threshold(boundary: Boundary) -> bool {
@@ -475,34 +473,34 @@ impl BenchmarkUrl {
fn to_public_url(
&self,
- metric_kind: MetricKindUuid,
benchmark: BenchmarkUuid,
+ measure: MeasureUuid,
boundary: Boundary,
) -> Url {
- self.to_url(metric_kind, benchmark, boundary, true)
+ self.to_url(benchmark, measure, boundary, true)
}
fn to_console_url(
&self,
- metric_kind: MetricKindUuid,
benchmark: BenchmarkUuid,
+ measure: MeasureUuid,
boundary: Boundary,
) -> Url {
- self.to_url(metric_kind, benchmark, boundary, false)
+ self.to_url(benchmark, measure, boundary, false)
}
fn to_url(
&self,
- metric_kind: MetricKindUuid,
benchmark: BenchmarkUuid,
+ measure: MeasureUuid,
boundary: Boundary,
public_links: bool,
) -> Url {
let json_perf_query = JsonPerfQuery {
- metric_kinds: vec![metric_kind],
branches: vec![self.branch],
testbeds: vec![self.testbed],
benchmarks: vec![benchmark],
+ measures: vec![measure],
start_time: Some((self.start_time.into_inner() - DEFAULT_REPORT_HISTORY).into()),
end_time: Some(self.end_time),
};
@@ -558,7 +556,7 @@ impl Boundary {
}
}
-pub struct AlertUrls(BTreeMap<(Benchmark, MetricKind), AlertData>);
+pub struct AlertUrls(BTreeMap<(Benchmark, Measure), AlertData>);
#[derive(Clone)]
pub struct AlertData {
@@ -575,10 +573,10 @@ impl AlertUrls {
name: alert.benchmark.name.clone(),
slug: alert.benchmark.slug.clone(),
};
- let metric_kind = MetricKind {
- name: alert.threshold.metric_kind.name.clone(),
- slug: alert.threshold.metric_kind.slug.clone(),
- units: alert.threshold.metric_kind.units.clone(),
+ let measure = Measure {
+ name: alert.threshold.measure.name.clone(),
+ slug: alert.threshold.measure.slug.clone(),
+ units: alert.threshold.measure.units.clone(),
};
let public_url =
Self::to_public_url(endpoint_url.clone(), &json_report.project.slug, alert.uuid);
@@ -588,7 +586,7 @@ impl AlertUrls {
public_url,
console_url,
};
- urls.insert((benchmark, metric_kind), data);
+ urls.insert((benchmark, measure), data);
}
Self(urls)
diff --git a/lib/bencher_json/src/lib.rs b/lib/bencher_json/src/lib.rs
index 5d0724c45..280e75c4f 100644
--- a/lib/bencher_json/src/lib.rs
+++ b/lib/bencher_json/src/lib.rs
@@ -35,8 +35,8 @@ pub use project::{
benchmark::{BenchmarkUuid, JsonBenchmark, JsonBenchmarks},
boundary::{BoundaryUuid, JsonBoundaries, JsonBoundary},
branch::{BranchUuid, JsonBranch, JsonBranches, JsonNewBranch, VersionUuid},
- metric::{JsonMetric, JsonMetricsMap, JsonResultsMap, MetricKind, MetricUuid},
- metric_kind::{JsonMetricKind, JsonMetricKinds, JsonNewMetricKind, MetricKindUuid},
+ measure::{JsonMeasure, JsonMeasures, JsonNewMeasure, MeasureUuid},
+ metric::{JsonMetric, JsonMetricsMap, JsonResultsMap, Measure, MetricUuid},
perf::{JsonPerf, JsonPerfQuery, PerfUuid},
report::{JsonNewReport, JsonReport, JsonReports, ReportUuid},
testbed::{JsonNewTestbed, JsonTestbed, JsonTestbeds, TestbedUuid},
diff --git a/lib/bencher_json/src/project/metric_kind.rs b/lib/bencher_json/src/project/measure.rs
similarity index 62%
rename from lib/bencher_json/src/project/metric_kind.rs
rename to lib/bencher_json/src/project/measure.rs
index c23970430..d2180a633 100644
--- a/lib/bencher_json/src/project/metric_kind.rs
+++ b/lib/bencher_json/src/project/measure.rs
@@ -10,122 +10,102 @@ use serde::{Deserialize, Serialize};
use crate::ProjectUuid;
-const METRIC_KIND_NAME_ERROR: &str = "Failed to parse metric kind name.";
-const METRIC_KIND_SLUG_ERROR: &str = "Failed to parse metric kind slug.";
-const METRIC_KIND_UNITS_ERROR: &str = "Failed to parse metric kind units.";
+const MEASURE_NAME_ERROR: &str = "Failed to parse measure name.";
+const MEASURE_SLUG_ERROR: &str = "Failed to parse measure slug.";
+const MEASURE_UNITS_ERROR: &str = "Failed to parse measure units.";
pub const LATENCY_NAME_STR: &str = "Latency";
pub const LATENCY_SLUG_STR: &str = "latency";
pub const LATENCY_UNITS_STR: &str = "nanoseconds (ns)";
static LATENCY_NAME: Lazy =
- Lazy::new(|| LATENCY_NAME_STR.parse().expect(METRIC_KIND_NAME_ERROR));
+ Lazy::new(|| LATENCY_NAME_STR.parse().expect(MEASURE_NAME_ERROR));
static LATENCY_SLUG: Lazy> =
- Lazy::new(|| Some(LATENCY_SLUG_STR.parse().expect(METRIC_KIND_SLUG_ERROR)));
+ Lazy::new(|| Some(LATENCY_SLUG_STR.parse().expect(MEASURE_SLUG_ERROR)));
static LATENCY_UNITS: Lazy =
- Lazy::new(|| LATENCY_UNITS_STR.parse().expect(METRIC_KIND_UNITS_ERROR));
+ Lazy::new(|| LATENCY_UNITS_STR.parse().expect(MEASURE_UNITS_ERROR));
pub const THROUGHPUT_NAME_STR: &str = "Throughput";
pub const THROUGHPUT_SLUG_STR: &str = "throughput";
pub const THROUGHPUT_UNITS_STR: &str = "operations / second (ops/s)";
static THROUGHPUT_NAME: Lazy =
- Lazy::new(|| THROUGHPUT_NAME_STR.parse().expect(METRIC_KIND_NAME_ERROR));
+ Lazy::new(|| THROUGHPUT_NAME_STR.parse().expect(MEASURE_NAME_ERROR));
static THROUGHPUT_SLUG: Lazy> =
- Lazy::new(|| Some(THROUGHPUT_SLUG_STR.parse().expect(METRIC_KIND_SLUG_ERROR)));
+ Lazy::new(|| Some(THROUGHPUT_SLUG_STR.parse().expect(MEASURE_SLUG_ERROR)));
static THROUGHPUT_UNITS: Lazy =
- Lazy::new(|| THROUGHPUT_UNITS_STR.parse().expect(METRIC_KIND_UNITS_ERROR));
+ Lazy::new(|| THROUGHPUT_UNITS_STR.parse().expect(MEASURE_UNITS_ERROR));
-// Iai metric kinds
+// Iai measures
pub const INSTRUCTIONS_NAME_STR: &str = "Instructions";
pub const INSTRUCTIONS_SLUG_STR: &str = "instructions";
pub const INSTRUCTIONS_UNITS_STR: &str = "instructions";
static INSTRUCTIONS_NAME: Lazy =
- Lazy::new(|| INSTRUCTIONS_NAME_STR.parse().expect(METRIC_KIND_NAME_ERROR));
+ Lazy::new(|| INSTRUCTIONS_NAME_STR.parse().expect(MEASURE_NAME_ERROR));
static INSTRUCTIONS_SLUG: Lazy> =
- Lazy::new(|| Some(INSTRUCTIONS_SLUG_STR.parse().expect(METRIC_KIND_SLUG_ERROR)));
-static INSTRUCTIONS_UNITS: Lazy = Lazy::new(|| {
- INSTRUCTIONS_UNITS_STR
- .parse()
- .expect(METRIC_KIND_UNITS_ERROR)
-});
+ Lazy::new(|| Some(INSTRUCTIONS_SLUG_STR.parse().expect(MEASURE_SLUG_ERROR)));
+static INSTRUCTIONS_UNITS: Lazy =
+ Lazy::new(|| INSTRUCTIONS_UNITS_STR.parse().expect(MEASURE_UNITS_ERROR));
pub const L1_ACCESSES_NAME_STR: &str = "L1 Accesses";
pub const L1_ACCESSES_SLUG_STR: &str = "l1-accesses";
pub const L1_ACCESSES_UNITS_STR: &str = "accesses";
static L1_ACCESSES_NAME: Lazy =
- Lazy::new(|| L1_ACCESSES_NAME_STR.parse().expect(METRIC_KIND_NAME_ERROR));
+ Lazy::new(|| L1_ACCESSES_NAME_STR.parse().expect(MEASURE_NAME_ERROR));
static L1_ACCESSES_SLUG: Lazy> =
- Lazy::new(|| Some(L1_ACCESSES_SLUG_STR.parse().expect(METRIC_KIND_SLUG_ERROR)));
-static L1_ACCESSES_UNITS: Lazy = Lazy::new(|| {
- L1_ACCESSES_UNITS_STR
- .parse()
- .expect(METRIC_KIND_UNITS_ERROR)
-});
+ Lazy::new(|| Some(L1_ACCESSES_SLUG_STR.parse().expect(MEASURE_SLUG_ERROR)));
+static L1_ACCESSES_UNITS: Lazy =
+ Lazy::new(|| L1_ACCESSES_UNITS_STR.parse().expect(MEASURE_UNITS_ERROR));
pub const L2_ACCESSES_NAME_STR: &str = "L2 Accesses";
pub const L2_ACCESSES_SLUG_STR: &str = "l2-accesses";
pub const L2_ACCESSES_UNITS_STR: &str = "accesses";
static L2_ACCESSES_NAME: Lazy =
- Lazy::new(|| L2_ACCESSES_NAME_STR.parse().expect(METRIC_KIND_NAME_ERROR));
+ Lazy::new(|| L2_ACCESSES_NAME_STR.parse().expect(MEASURE_NAME_ERROR));
static L2_ACCESSES_SLUG: Lazy> =
- Lazy::new(|| Some(L2_ACCESSES_SLUG_STR.parse().expect(METRIC_KIND_SLUG_ERROR)));
-static L2_ACCESSES_UNITS: Lazy = Lazy::new(|| {
- L2_ACCESSES_UNITS_STR
- .parse()
- .expect(METRIC_KIND_UNITS_ERROR)
-});
+ Lazy::new(|| Some(L2_ACCESSES_SLUG_STR.parse().expect(MEASURE_SLUG_ERROR)));
+static L2_ACCESSES_UNITS: Lazy =
+ Lazy::new(|| L2_ACCESSES_UNITS_STR.parse().expect(MEASURE_UNITS_ERROR));
pub const RAM_ACCESSES_NAME_STR: &str = "RAM Accesses";
pub const RAM_ACCESSES_SLUG_STR: &str = "ram-accesses";
pub const RAM_ACCESSES_UNITS_STR: &str = "accesses";
static RAM_ACCESSES_NAME: Lazy =
- Lazy::new(|| RAM_ACCESSES_NAME_STR.parse().expect(METRIC_KIND_NAME_ERROR));
+ Lazy::new(|| RAM_ACCESSES_NAME_STR.parse().expect(MEASURE_NAME_ERROR));
static RAM_ACCESSES_SLUG: Lazy> =
- Lazy::new(|| Some(RAM_ACCESSES_SLUG_STR.parse().expect(METRIC_KIND_SLUG_ERROR)));
-static RAM_ACCESSES_UNITS: Lazy = Lazy::new(|| {
- RAM_ACCESSES_UNITS_STR
- .parse()
- .expect(METRIC_KIND_UNITS_ERROR)
-});
+ Lazy::new(|| Some(RAM_ACCESSES_SLUG_STR.parse().expect(MEASURE_SLUG_ERROR)));
+static RAM_ACCESSES_UNITS: Lazy =
+ Lazy::new(|| RAM_ACCESSES_UNITS_STR.parse().expect(MEASURE_UNITS_ERROR));
pub const ESTIMATED_CYCLES_NAME_STR: &str = "Estimated Cycles";
pub const ESTIMATED_CYCLES_SLUG_STR: &str = "estimated-cycles";
pub const ESTIMATED_CYCLES_UNITS_STR: &str = "estimated cycles";
-static ESTIMATED_CYCLES_NAME: Lazy = Lazy::new(|| {
- ESTIMATED_CYCLES_NAME_STR
- .parse()
- .expect(METRIC_KIND_NAME_ERROR)
-});
-static ESTIMATED_CYCLES_SLUG: Lazy> = Lazy::new(|| {
- Some(
- ESTIMATED_CYCLES_SLUG_STR
- .parse()
- .expect(METRIC_KIND_SLUG_ERROR),
- )
-});
+static ESTIMATED_CYCLES_NAME: Lazy =
+ Lazy::new(|| ESTIMATED_CYCLES_NAME_STR.parse().expect(MEASURE_NAME_ERROR));
+static ESTIMATED_CYCLES_SLUG: Lazy> =
+ Lazy::new(|| Some(ESTIMATED_CYCLES_SLUG_STR.parse().expect(MEASURE_SLUG_ERROR)));
static ESTIMATED_CYCLES_UNITS: Lazy = Lazy::new(|| {
ESTIMATED_CYCLES_UNITS_STR
.parse()
- .expect(METRIC_KIND_UNITS_ERROR)
+ .expect(MEASURE_UNITS_ERROR)
});
-crate::typed_uuid::typed_uuid!(MetricKindUuid);
+crate::typed_uuid::typed_uuid!(MeasureUuid);
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(JsonSchema))]
-pub struct JsonNewMetricKind {
+pub struct JsonNewMeasure {
pub name: NonEmpty,
pub slug: Option,
pub units: NonEmpty,
}
-impl JsonNewMetricKind {
+impl JsonNewMeasure {
pub fn latency() -> Self {
Self {
name: LATENCY_NAME.clone(),
@@ -185,15 +165,15 @@ impl JsonNewMetricKind {
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(JsonSchema))]
-pub struct JsonMetricKinds(pub Vec);
+pub struct JsonMeasures(pub Vec);
-crate::from_vec!(JsonMetricKinds[JsonMetricKind]);
+crate::from_vec!(JsonMeasures[JsonMeasure]);
#[typeshare::typeshare]
#[derive(Debug, Clone, Deserialize, Serialize)]
#[cfg_attr(feature = "schema", derive(JsonSchema))]
-pub struct JsonMetricKind {
- pub uuid: MetricKindUuid,
+pub struct JsonMeasure {
+ pub uuid: MeasureUuid,
pub project: ProjectUuid,
pub name: NonEmpty,
pub slug: Slug,
@@ -202,7 +182,7 @@ pub struct JsonMetricKind {
pub modified: DateTime,
}
-impl fmt::Display for JsonMetricKind {
+impl fmt::Display for JsonMeasure {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}: {}", self.name, self.units)
}
@@ -210,7 +190,7 @@ impl fmt::Display for JsonMetricKind {
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(JsonSchema))]
-pub struct JsonUpdateMetricKind {
+pub struct JsonUpdateMeasure {
pub name: Option,
pub slug: Option,
pub units: Option,
diff --git a/lib/bencher_json/src/project/metric/mod.rs b/lib/bencher_json/src/project/metric/mod.rs
index ca46a664d..9b9d78d6a 100644
--- a/lib/bencher_json/src/project/metric/mod.rs
+++ b/lib/bencher_json/src/project/metric/mod.rs
@@ -18,10 +18,10 @@ crate::typed_uuid::typed_uuid!(MetricUuid);
pub type JsonResultsMap = HashMap;
#[typeshare::typeshare]
-pub type JsonMetricsMap = HashMap;
+pub type JsonMetricsMap = HashMap;
#[typeshare::typeshare]
-pub type MetricKind = ResourceId;
+pub type Measure = ResourceId;
#[typeshare::typeshare]
#[derive(Debug, Copy, Clone, Default, Eq, Serialize, Deserialize)]
diff --git a/lib/bencher_json/src/project/mod.rs b/lib/bencher_json/src/project/mod.rs
index 169ef3b0c..4c632dd87 100644
--- a/lib/bencher_json/src/project/mod.rs
+++ b/lib/bencher_json/src/project/mod.rs
@@ -12,8 +12,8 @@ pub mod alert;
pub mod benchmark;
pub mod boundary;
pub mod branch;
+pub mod measure;
pub mod metric;
-pub mod metric_kind;
pub mod perf;
pub mod report;
pub mod testbed;
diff --git a/lib/bencher_json/src/project/perf.rs b/lib/bencher_json/src/project/perf.rs
index 7b096b4a5..248648d8f 100644
--- a/lib/bencher_json/src/project/perf.rs
+++ b/lib/bencher_json/src/project/perf.rs
@@ -6,8 +6,8 @@ use url::Url;
use crate::urlencoded::{from_urlencoded_list, to_urlencoded, to_urlencoded_list, UrlEncodedError};
use crate::{
- BenchmarkUuid, BranchUuid, DateTime, DateTimeMillis, JsonBenchmark, JsonBranch, JsonMetricKind,
- JsonProject, JsonTestbed, MetricKindUuid, ReportUuid, TestbedUuid,
+ BenchmarkUuid, BranchUuid, DateTime, DateTimeMillis, JsonBenchmark, JsonBranch, JsonMeasure,
+ JsonProject, JsonTestbed, MeasureUuid, ReportUuid, TestbedUuid,
};
use super::alert::JsonPerfAlert;
@@ -27,10 +27,12 @@ crate::typed_uuid::typed_uuid!(PerfUuid);
#[cfg_attr(feature = "schema", derive(JsonSchema))]
pub struct JsonPerfQueryParams {
pub title: Option,
- pub metric_kinds: String,
pub branches: String,
pub testbeds: String,
pub benchmarks: String,
+ // TODO remove in due time
+ #[serde(alias = "metric_kinds")]
+ pub measures: String,
pub start_time: Option,
pub end_time: Option,
}
@@ -40,10 +42,10 @@ pub struct JsonPerfQueryParams {
#[typeshare::typeshare]
#[derive(Debug, Clone)]
pub struct JsonPerfQuery {
- pub metric_kinds: Vec,
pub branches: Vec,
pub testbeds: Vec,
pub benchmarks: Vec,
+ pub measures: Vec,
pub start_time: Option,
pub end_time: Option,
}
@@ -54,24 +56,24 @@ impl TryFrom for JsonPerfQuery {
fn try_from(query_params: JsonPerfQueryParams) -> Result {
let JsonPerfQueryParams {
title: _,
- metric_kinds,
branches,
testbeds,
benchmarks,
+ measures,
start_time,
end_time,
} = query_params;
- let metric_kinds = from_urlencoded_list(&metric_kinds)?;
let branches = from_urlencoded_list(&branches)?;
let testbeds = from_urlencoded_list(&testbeds)?;
let benchmarks = from_urlencoded_list(&benchmarks)?;
+ let measures = from_urlencoded_list(&measures)?;
Ok(Self {
- metric_kinds,
branches,
testbeds,
benchmarks,
+ measures,
start_time: start_time.map(Into::into),
end_time: end_time.map(Into::into),
})
@@ -118,10 +120,10 @@ impl JsonPerfQuery {
QUERY_KEYS
.into_iter()
.zip([
- Some(self.metric_kinds()),
Some(self.branches()),
Some(self.testbeds()),
Some(self.benchmarks()),
+ Some(self.measures()),
self.start_time_str(),
self.end_time_str(),
])
@@ -130,10 +132,6 @@ impl JsonPerfQuery {
.map_err(UrlEncodedError::Vec)
}
- pub fn metric_kinds(&self) -> String {
- to_urlencoded_list(&self.metric_kinds)
- }
-
pub fn branches(&self) -> String {
to_urlencoded_list(&self.branches)
}
@@ -146,6 +144,10 @@ impl JsonPerfQuery {
to_urlencoded_list(&self.benchmarks)
}
+ pub fn measures(&self) -> String {
+ to_urlencoded_list(&self.measures)
+ }
+
pub fn start_time(&self) -> Option {
self.start_time.map(Into::into)
}
@@ -168,30 +170,27 @@ impl JsonPerfQuery {
#[cfg_attr(feature = "schema", derive(JsonSchema))]
#[serde(rename_all = "snake_case")]
pub enum PerfQueryKey {
- MetricKinds,
Branches,
Testbeds,
Benchmarks,
+ Measures,
StartTime,
EndTime,
// Console Keys
LowerBoundary,
UpperBoundary,
+ /// TODO remove in due time
+ MetricKinds,
}
-pub const METRIC_KINDS: &str = "metric_kinds";
pub const BRANCHES: &str = "branches";
pub const TESTBEDS: &str = "testbeds";
pub const BENCHMARKS: &str = "benchmarks";
+pub const MEASURES: &str = "measures";
pub const START_TIME: &str = "start_time";
pub const END_TIME: &str = "end_time";
const QUERY_KEYS: [&str; 6] = [
- METRIC_KINDS,
- BRANCHES,
- TESTBEDS,
- BENCHMARKS,
- START_TIME,
- END_TIME,
+ BRANCHES, TESTBEDS, BENCHMARKS, MEASURES, START_TIME, END_TIME,
];
// Console Keys
pub const LOWER_BOUNDARY: &str = "lower_boundary";
@@ -211,10 +210,10 @@ pub struct JsonPerf {
#[derive(Debug, Clone, Deserialize, Serialize)]
#[cfg_attr(feature = "schema", derive(JsonSchema))]
pub struct JsonPerfMetrics {
- pub metric_kind: JsonMetricKind,
pub branch: JsonBranch,
pub testbed: JsonTestbed,
pub benchmark: JsonBenchmark,
+ pub measure: JsonMeasure,
pub metrics: Vec,
}
@@ -288,8 +287,8 @@ pub mod table {
use tabled::{Table, Tabled};
use crate::{
- project::branch::VersionNumber, DateTime, JsonBenchmark, JsonBranch, JsonMetric,
- JsonMetricKind, JsonPerf, JsonProject, JsonTestbed,
+ project::branch::VersionNumber, DateTime, JsonBenchmark, JsonBranch, JsonMeasure,
+ JsonMetric, JsonPerf, JsonProject, JsonTestbed,
};
use super::Iteration;
@@ -301,10 +300,10 @@ pub mod table {
for metric in result.metrics {
perf_table.push(PerfTable {
project: json_perf.project.clone(),
- metric_kind: result.metric_kind.clone(),
branch: result.branch.clone(),
testbed: result.testbed.clone(),
benchmark: result.benchmark.clone(),
+ measure: result.measure.clone(),
iteration: metric.iteration,
start_time: metric.start_time,
end_time: metric.end_time,
@@ -324,14 +323,14 @@ pub mod table {
pub struct PerfTable {
#[tabled(rename = "Project")]
pub project: JsonProject,
- #[tabled(rename = "Metric Kind")]
- pub metric_kind: JsonMetricKind,
#[tabled(rename = "Branch")]
pub branch: JsonBranch,
#[tabled(rename = "Testbed")]
pub testbed: JsonTestbed,
#[tabled(rename = "Benchmark")]
pub benchmark: JsonBenchmark,
+ #[tabled(rename = "Measure")]
+ pub measure: JsonMeasure,
#[tabled(rename = "Iteration")]
pub iteration: Iteration,
#[tabled(rename = "Start Time")]
diff --git a/lib/bencher_json/src/project/report.rs b/lib/bencher_json/src/project/report.rs
index 3de9ed923..8216fd07c 100644
--- a/lib/bencher_json/src/project/report.rs
+++ b/lib/bencher_json/src/project/report.rs
@@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
use crate::{
urlencoded::{from_urlencoded, to_urlencoded, UrlEncodedError},
- JsonAlert, JsonMetricKind, JsonProject, JsonTestbed, JsonUser, ResourceId,
+ JsonAlert, JsonMeasure, JsonProject, JsonTestbed, JsonUser, ResourceId,
};
use super::{
@@ -241,7 +241,10 @@ pub type JsonReportIteration = Vec;
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(JsonSchema))]
pub struct JsonReportResult {
- pub metric_kind: JsonMetricKind,
+ /// TODO remove in due time
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub metric_kind: Option,
+ pub measure: JsonMeasure,
// The threshold should be the same for all the benchmark results
pub threshold: Option,
pub benchmarks: Vec,
diff --git a/lib/bencher_json/src/project/threshold.rs b/lib/bencher_json/src/project/threshold.rs
index deff47d46..56730e8b8 100644
--- a/lib/bencher_json/src/project/threshold.rs
+++ b/lib/bencher_json/src/project/threshold.rs
@@ -3,7 +3,7 @@ use bencher_valid::{Boundary, DateTime, SampleSize, Window};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use crate::{JsonBranch, JsonMetricKind, JsonTestbed, ProjectUuid, ResourceId};
+use crate::{JsonBranch, JsonMeasure, JsonTestbed, ProjectUuid, ResourceId};
crate::typed_uuid::typed_uuid!(ThresholdUuid);
crate::typed_uuid::typed_uuid!(StatisticUuid);
@@ -11,9 +11,9 @@ crate::typed_uuid::typed_uuid!(StatisticUuid);
#[derive(Debug, Clone, Deserialize, Serialize)]
#[cfg_attr(feature = "schema", derive(JsonSchema))]
pub struct JsonNewThreshold {
- pub metric_kind: ResourceId,
pub branch: ResourceId,
pub testbed: ResourceId,
+ pub measure: ResourceId,
#[serde(flatten)]
pub statistic: JsonNewStatistic,
}
@@ -65,9 +65,12 @@ crate::from_vec!(JsonThresholds[JsonThreshold]);
pub struct JsonThreshold {
pub uuid: ThresholdUuid,
pub project: ProjectUuid,
- pub metric_kind: JsonMetricKind,
pub branch: JsonBranch,
pub testbed: JsonTestbed,
+ /// TODO remove in due time
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub metric_kind: Option,
+ pub measure: JsonMeasure,
pub statistic: JsonStatistic,
pub created: DateTime,
pub modified: DateTime,
diff --git a/lib/bencher_plot/decimal.json b/lib/bencher_plot/decimal.json
index a4d00f4c9..98f507183 100644
--- a/lib/bencher_plot/decimal.json
+++ b/lib/bencher_plot/decimal.json
@@ -13,15 +13,6 @@
"end_time": null,
"results": [
{
- "metric_kind": {
- "uuid": "61a385d0-f19d-4f20-895a-e3c684ec6cbc",
- "project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
- "name": "Latency",
- "slug": "latency",
- "units": "nanoseconds (ns)",
- "created": "2023-07-02T12:53:33Z",
- "modified": "2023-07-02T12:53:33Z"
- },
"branch": {
"uuid": "ff685107-8b8b-4034-9133-c41f644a0538",
"project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
@@ -46,6 +37,15 @@
"created": "2023-07-02T12:53:33Z",
"modified": "2023-07-02T12:53:33Z"
},
+ "measure": {
+ "uuid": "61a385d0-f19d-4f20-895a-e3c684ec6cbc",
+ "project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
+ "name": "Latency",
+ "slug": "latency",
+ "units": "nanoseconds (ns)",
+ "created": "2023-07-02T12:53:33Z",
+ "modified": "2023-07-02T12:53:33Z"
+ },
"metrics": [
{
"report": "5fcb6b56-d8a3-4507-a997-5f1ce98140ba",
@@ -155,15 +155,6 @@
]
},
{
- "metric_kind": {
- "uuid": "61a385d0-f19d-4f20-895a-e3c684ec6cbc",
- "project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
- "name": "Latency",
- "slug": "latency",
- "units": "nanoseconds (ns)",
- "created": "2023-07-02T12:53:33Z",
- "modified": "2023-07-02T12:53:33Z"
- },
"branch": {
"uuid": "ff685107-8b8b-4034-9133-c41f644a0538",
"project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
@@ -188,6 +179,15 @@
"created": "2023-07-02T12:53:33Z",
"modified": "2023-07-02T12:53:33Z"
},
+ "measure": {
+ "uuid": "61a385d0-f19d-4f20-895a-e3c684ec6cbc",
+ "project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
+ "name": "Latency",
+ "slug": "latency",
+ "units": "nanoseconds (ns)",
+ "created": "2023-07-02T12:53:33Z",
+ "modified": "2023-07-02T12:53:33Z"
+ },
"metrics": [
{
"report": "5fcb6b56-d8a3-4507-a997-5f1ce98140ba",
@@ -297,15 +297,6 @@
]
},
{
- "metric_kind": {
- "uuid": "61a385d0-f19d-4f20-895a-e3c684ec6cbc",
- "project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
- "name": "Latency",
- "slug": "latency",
- "units": "nanoseconds (ns)",
- "created": "2023-07-02T12:53:33Z",
- "modified": "2023-07-02T12:53:33Z"
- },
"branch": {
"uuid": "ff685107-8b8b-4034-9133-c41f644a0538",
"project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
@@ -330,6 +321,15 @@
"created": "2023-07-02T12:53:33Z",
"modified": "2023-07-02T12:53:33Z"
},
+ "measure": {
+ "uuid": "61a385d0-f19d-4f20-895a-e3c684ec6cbc",
+ "project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
+ "name": "Latency",
+ "slug": "latency",
+ "units": "nanoseconds (ns)",
+ "created": "2023-07-02T12:53:33Z",
+ "modified": "2023-07-02T12:53:33Z"
+ },
"metrics": [
{
"report": "5fcb6b56-d8a3-4507-a997-5f1ce98140ba",
diff --git a/lib/bencher_plot/perf.json b/lib/bencher_plot/perf.json
index 23efc896f..8cb3e4625 100644
--- a/lib/bencher_plot/perf.json
+++ b/lib/bencher_plot/perf.json
@@ -13,15 +13,6 @@
"end_time": null,
"results": [
{
- "metric_kind": {
- "uuid": "61a385d0-f19d-4f20-895a-e3c684ec6cbc",
- "project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
- "name": "Latency",
- "slug": "latency",
- "units": "nanoseconds (ns)",
- "created": "2023-07-02T12:53:33Z",
- "modified": "2023-07-02T12:53:33Z"
- },
"branch": {
"uuid": "7d7e73de-78c2-43f7-bc2a-da31a5b9a819",
"project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
@@ -46,6 +37,15 @@
"created": "2023-07-02T12:53:33Z",
"modified": "2023-07-02T12:53:33Z"
},
+ "measure": {
+ "uuid": "61a385d0-f19d-4f20-895a-e3c684ec6cbc",
+ "project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
+ "name": "Latency",
+ "slug": "latency",
+ "units": "nanoseconds (ns)",
+ "created": "2023-07-02T12:53:33Z",
+ "modified": "2023-07-02T12:53:33Z"
+ },
"metrics": [
{
"report": "ef582192-c7f4-47a0-8668-55cf7d99d8cc",
@@ -218,15 +218,6 @@
]
},
{
- "metric_kind": {
- "uuid": "61a385d0-f19d-4f20-895a-e3c684ec6cbc",
- "project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
- "name": "Latency",
- "slug": "latency",
- "units": "nanoseconds (ns)",
- "created": "2023-07-02T12:53:33Z",
- "modified": "2023-07-02T12:53:33Z"
- },
"branch": {
"uuid": "7d7e73de-78c2-43f7-bc2a-da31a5b9a819",
"project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
@@ -251,6 +242,15 @@
"created": "2023-07-02T12:53:33Z",
"modified": "2023-07-02T12:53:33Z"
},
+ "measure": {
+ "uuid": "61a385d0-f19d-4f20-895a-e3c684ec6cbc",
+ "project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
+ "name": "Latency",
+ "slug": "latency",
+ "units": "nanoseconds (ns)",
+ "created": "2023-07-02T12:53:33Z",
+ "modified": "2023-07-02T12:53:33Z"
+ },
"metrics": [
{
"report": "ef582192-c7f4-47a0-8668-55cf7d99d8cc",
@@ -423,15 +423,6 @@
]
},
{
- "metric_kind": {
- "uuid": "61a385d0-f19d-4f20-895a-e3c684ec6cbc",
- "project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
- "name": "Latency",
- "slug": "latency",
- "units": "nanoseconds (ns)",
- "created": "2023-07-02T12:53:33Z",
- "modified": "2023-07-02T12:53:33Z"
- },
"branch": {
"uuid": "7d7e73de-78c2-43f7-bc2a-da31a5b9a819",
"project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
@@ -456,6 +447,15 @@
"created": "2023-07-02T12:53:33Z",
"modified": "2023-07-02T12:53:33Z"
},
+ "measure": {
+ "uuid": "61a385d0-f19d-4f20-895a-e3c684ec6cbc",
+ "project": "c7fd3581-73d1-443c-b30f-6aa5c1c516cf",
+ "name": "Latency",
+ "slug": "latency",
+ "units": "nanoseconds (ns)",
+ "created": "2023-07-02T12:53:33Z",
+ "modified": "2023-07-02T12:53:33Z"
+ },
"metrics": [
{
"report": "ef582192-c7f4-47a0-8668-55cf7d99d8cc",
diff --git a/lib/bencher_plot/src/line.rs b/lib/bencher_plot/src/line.rs
index 5639a4237..261506601 100644
--- a/lib/bencher_plot/src/line.rs
+++ b/lib/bencher_plot/src/line.rs
@@ -282,8 +282,8 @@ impl PerfData {
let y_desc = json_perf
.results
.first()
- .map_or("Units: unitless".to_owned(), |result| {
- result.metric_kind.to_string()
+ .map_or("Measure: unitless".to_owned(), |result| {
+ result.measure.to_string()
});
Some(PerfData {
lines,
diff --git a/lib/bencher_valid/swagger.json b/lib/bencher_valid/swagger.json
index c317ea822..c309004e4 100644
--- a/lib/bencher_valid/swagger.json
+++ b/lib/bencher_valid/swagger.json
@@ -2981,13 +2981,13 @@
}
}
},
- "/v0/projects/{project}/metric-kinds": {
+ "/v0/projects/{project}/measures": {
"get": {
"tags": [
"projects",
- "metric kinds"
+ "measures"
],
- "operationId": "proj_metric_kinds_get",
+ "operationId": "proj_measures_get",
"parameters": [
{
"in": "path",
@@ -3028,7 +3028,7 @@
"in": "query",
"name": "sort",
"schema": {
- "$ref": "#/components/schemas/ProjMetricKindsSort"
+ "$ref": "#/components/schemas/ProjMeasuresSort"
}
},
{
@@ -3075,7 +3075,7 @@
"content": {
"application/json": {
"schema": {
- "$ref": "#/components/schemas/JsonMetricKinds"
+ "$ref": "#/components/schemas/JsonMeasures"
}
}
}
@@ -3091,9 +3091,9 @@
"post": {
"tags": [
"projects",
- "metric kinds"
+ "measures"
],
- "operationId": "proj_metric_kind_post",
+ "operationId": "proj_measure_post",
"parameters": [
{
"in": "path",
@@ -3108,7 +3108,7 @@
"content": {
"application/json": {
"schema": {
- "$ref": "#/components/schemas/JsonNewMetricKind"
+ "$ref": "#/components/schemas/JsonNewMeasure"
}
}
},
@@ -3150,7 +3150,7 @@
"content": {
"application/json": {
"schema": {
- "$ref": "#/components/schemas/JsonMetricKind"
+ "$ref": "#/components/schemas/JsonMeasure"
}
}
}
@@ -3164,17 +3164,17 @@
}
}
},
- "/v0/projects/{project}/metric-kinds/{metric_kind}": {
+ "/v0/projects/{project}/measures/{measure}": {
"get": {
"tags": [
"projects",
- "metric kinds"
+ "measures"
],
- "operationId": "proj_metric_kind_get",
+ "operationId": "proj_measure_get",
"parameters": [
{
"in": "path",
- "name": "metric_kind",
+ "name": "measure",
"required": true,
"schema": {
"$ref": "#/components/schemas/ResourceId"
@@ -3225,7 +3225,7 @@
"content": {
"application/json": {
"schema": {
- "$ref": "#/components/schemas/JsonMetricKind"
+ "$ref": "#/components/schemas/JsonMeasure"
}
}
}
@@ -3241,13 +3241,13 @@
"delete": {
"tags": [
"projects",
- "metric kinds"
+ "measures"
],
- "operationId": "proj_metric_kind_delete",
+ "operationId": "proj_measure_delete",
"parameters": [
{
"in": "path",
- "name": "metric_kind",
+ "name": "measure",
"required": true,
"schema": {
"$ref": "#/components/schemas/ResourceId"
@@ -3314,13 +3314,13 @@
"patch": {
"tags": [
"projects",
- "metric kinds"
+ "measures"
],
- "operationId": "proj_metric_kind_patch",
+ "operationId": "proj_measure_patch",
"parameters": [
{
"in": "path",
- "name": "metric_kind",
+ "name": "measure",
"required": true,
"schema": {
"$ref": "#/components/schemas/ResourceId"
@@ -3339,7 +3339,7 @@
"content": {
"application/json": {
"schema": {
- "$ref": "#/components/schemas/JsonUpdateMetricKind"
+ "$ref": "#/components/schemas/JsonUpdateMeasure"
}
}
},
@@ -3381,7 +3381,7 @@
"content": {
"application/json": {
"schema": {
- "$ref": "#/components/schemas/JsonMetricKind"
+ "$ref": "#/components/schemas/JsonMeasure"
}
}
}
@@ -3436,7 +3436,7 @@
},
{
"in": "query",
- "name": "metric_kinds",
+ "name": "measures",
"required": true,
"schema": {
"type": "string"
@@ -3557,7 +3557,7 @@
},
{
"in": "query",
- "name": "metric_kinds",
+ "name": "measures",
"required": true,
"schema": {
"type": "string"
@@ -6742,6 +6742,47 @@
"email"
]
},
+ "JsonMeasure": {
+ "type": "object",
+ "properties": {
+ "created": {
+ "$ref": "#/components/schemas/DateTime"
+ },
+ "modified": {
+ "$ref": "#/components/schemas/DateTime"
+ },
+ "name": {
+ "$ref": "#/components/schemas/NonEmpty"
+ },
+ "project": {
+ "$ref": "#/components/schemas/ProjectUuid"
+ },
+ "slug": {
+ "$ref": "#/components/schemas/Slug"
+ },
+ "units": {
+ "$ref": "#/components/schemas/NonEmpty"
+ },
+ "uuid": {
+ "$ref": "#/components/schemas/MeasureUuid"
+ }
+ },
+ "required": [
+ "created",
+ "modified",
+ "name",
+ "project",
+ "slug",
+ "units",
+ "uuid"
+ ]
+ },
+ "JsonMeasures": {
+ "type": "array",
+ "items": {
+ "$ref": "#/components/schemas/JsonMeasure"
+ }
+ },
"JsonMember": {
"type": "object",
"properties": {
@@ -6805,47 +6846,6 @@
"value"
]
},
- "JsonMetricKind": {
- "type": "object",
- "properties": {
- "created": {
- "$ref": "#/components/schemas/DateTime"
- },
- "modified": {
- "$ref": "#/components/schemas/DateTime"
- },
- "name": {
- "$ref": "#/components/schemas/NonEmpty"
- },
- "project": {
- "$ref": "#/components/schemas/ProjectUuid"
- },
- "slug": {
- "$ref": "#/components/schemas/Slug"
- },
- "units": {
- "$ref": "#/components/schemas/NonEmpty"
- },
- "uuid": {
- "$ref": "#/components/schemas/MetricKindUuid"
- }
- },
- "required": [
- "created",
- "modified",
- "name",
- "project",
- "slug",
- "units",
- "uuid"
- ]
- },
- "JsonMetricKinds": {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/JsonMetricKind"
- }
- },
"JsonNewBenchmark": {
"type": "object",
"properties": {
@@ -6896,50 +6896,50 @@
"name"
]
},
- "JsonNewMember": {
+ "JsonNewMeasure": {
"type": "object",
"properties": {
- "email": {
- "$ref": "#/components/schemas/Email"
- },
"name": {
+ "$ref": "#/components/schemas/NonEmpty"
+ },
+ "slug": {
"nullable": true,
"allOf": [
{
- "$ref": "#/components/schemas/UserName"
+ "$ref": "#/components/schemas/Slug"
}
]
},
- "role": {
- "$ref": "#/components/schemas/OrganizationRole"
+ "units": {
+ "$ref": "#/components/schemas/NonEmpty"
}
},
"required": [
- "email",
- "role"
+ "name",
+ "units"
]
},
- "JsonNewMetricKind": {
+ "JsonNewMember": {
"type": "object",
"properties": {
- "name": {
- "$ref": "#/components/schemas/NonEmpty"
+ "email": {
+ "$ref": "#/components/schemas/Email"
},
- "slug": {
+ "name": {
"nullable": true,
"allOf": [
{
- "$ref": "#/components/schemas/Slug"
+ "$ref": "#/components/schemas/UserName"
}
]
},
- "units": {
- "$ref": "#/components/schemas/NonEmpty"
+ "role": {
+ "$ref": "#/components/schemas/OrganizationRole"
}
},
"required": [
- "name",
- "units"
+ "email",
+ "role"
]
},
"JsonNewOrganization": {
@@ -7123,7 +7123,7 @@
}
]
},
- "metric_kind": {
+ "measure": {
"$ref": "#/components/schemas/ResourceId"
},
"min_sample_size": {
@@ -7159,7 +7159,7 @@
},
"required": [
"branch",
- "metric_kind",
+ "measure",
"test",
"testbed"
]
@@ -7398,8 +7398,8 @@
"branch": {
"$ref": "#/components/schemas/JsonBranch"
},
- "metric_kind": {
- "$ref": "#/components/schemas/JsonMetricKind"
+ "measure": {
+ "$ref": "#/components/schemas/JsonMeasure"
},
"metrics": {
"type": "array",
@@ -7414,7 +7414,7 @@
"required": [
"benchmark",
"branch",
- "metric_kind",
+ "measure",
"metrics",
"testbed"
]
@@ -7726,8 +7726,17 @@
"$ref": "#/components/schemas/JsonBenchmarkMetric"
}
},
+ "measure": {
+ "$ref": "#/components/schemas/JsonMeasure"
+ },
"metric_kind": {
- "$ref": "#/components/schemas/JsonMetricKind"
+ "nullable": true,
+ "description": "TODO remove in due time",
+ "allOf": [
+ {
+ "$ref": "#/components/schemas/JsonMeasure"
+ }
+ ]
},
"threshold": {
"nullable": true,
@@ -7740,7 +7749,7 @@
},
"required": [
"benchmarks",
- "metric_kind"
+ "measure"
]
},
"JsonReportSettings": {
@@ -8112,8 +8121,17 @@
"created": {
"$ref": "#/components/schemas/DateTime"
},
+ "measure": {
+ "$ref": "#/components/schemas/JsonMeasure"
+ },
"metric_kind": {
- "$ref": "#/components/schemas/JsonMetricKind"
+ "nullable": true,
+ "description": "TODO remove in due time",
+ "allOf": [
+ {
+ "$ref": "#/components/schemas/JsonMeasure"
+ }
+ ]
},
"modified": {
"$ref": "#/components/schemas/DateTime"
@@ -8134,7 +8152,7 @@
"required": [
"branch",
"created",
- "metric_kind",
+ "measure",
"modified",
"project",
"statistic",
@@ -8338,20 +8356,7 @@
"config"
]
},
- "JsonUpdateMember": {
- "type": "object",
- "properties": {
- "role": {
- "nullable": true,
- "allOf": [
- {
- "$ref": "#/components/schemas/OrganizationRole"
- }
- ]
- }
- }
- },
- "JsonUpdateMetricKind": {
+ "JsonUpdateMeasure": {
"type": "object",
"properties": {
"name": {
@@ -8380,6 +8385,19 @@
}
}
},
+ "JsonUpdateMember": {
+ "type": "object",
+ "properties": {
+ "role": {
+ "nullable": true,
+ "allOf": [
+ {
+ "$ref": "#/components/schemas/OrganizationRole"
+ }
+ ]
+ }
+ }
+ },
"JsonUpdateOrganization": {
"anyOf": [
{
@@ -8597,7 +8615,7 @@
"critical"
]
},
- "MetricKindUuid": {
+ "MeasureUuid": {
"type": "string",
"format": "uuid"
},
@@ -8852,7 +8870,7 @@
"name"
]
},
- "ProjMetricKindsSort": {
+ "ProjMeasuresSort": {
"type": "string",
"enum": [
"name"
@@ -8931,11 +8949,11 @@
"name": "endpoint"
},
{
- "name": "members"
+ "name": "measures",
+ "description": "Measures"
},
{
- "name": "metric kinds",
- "description": "Metric Kinds"
+ "name": "members"
},
{
"name": "organizations",
diff --git a/services/api/migrations/2023-12-04-101416_measure/down.sql b/services/api/migrations/2023-12-04-101416_measure/down.sql
new file mode 100644
index 000000000..b3dc93f82
--- /dev/null
+++ b/services/api/migrations/2023-12-04-101416_measure/down.sql
@@ -0,0 +1,111 @@
+PRAGMA foreign_keys = off;
+-- metric kind
+CREATE TABLE metric_kind (
+ id INTEGER PRIMARY KEY NOT NULL,
+ uuid TEXT NOT NULL UNIQUE,
+ project_id INTEGER NOT NULL,
+ name TEXT NOT NULL,
+ slug TEXT NOT NULL,
+ units TEXT NOT NULL,
+ created BIGINT NOT NULL,
+ modified BIGINT NOT NULL,
+ FOREIGN KEY (project_id) REFERENCES project (id) ON DELETE CASCADE,
+ UNIQUE(project_id, name),
+ UNIQUE(project_id, slug)
+);
+INSERT INTO metric_kind(
+ id,
+ uuid,
+ project_id,
+ name,
+ slug,
+ units,
+ created,
+ modified
+ )
+SELECT id,
+ uuid,
+ project_id,
+ name,
+ slug,
+ units,
+ created,
+ modified
+FROM measure;
+DROP TABLE measure;
+-- metric
+CREATE TABLE down_metric (
+ id INTEGER PRIMARY KEY NOT NULL,
+ uuid TEXT NOT NULL UNIQUE,
+ perf_id INTEGER NOT NULL,
+ metric_kind_id INTEGER NOT NULL,
+ value DOUBLE NOT NULL,
+ lower_value DOUBLE,
+ upper_value DOUBLE,
+ FOREIGN KEY (perf_id) REFERENCES perf (id) ON DELETE CASCADE,
+ FOREIGN KEY (metric_kind_id) REFERENCES metric_kind (id),
+ UNIQUE(perf_id, metric_kind_id)
+);
+INSERT INTO down_metric(
+ id,
+ uuid,
+ perf_id,
+ metric_kind_id,
+ value,
+ lower_value,
+ upper_value
+ )
+SELECT id,
+ uuid,
+ perf_id,
+ measure_id,
+ value,
+ lower_value,
+ lower_value
+FROM metric;
+DROP TABLE metric;
+ALTER TABLE down_metric
+ RENAME TO metric;
+-- threshold
+CREATE TABLE down_threshold (
+ id INTEGER PRIMARY KEY NOT NULL,
+ uuid TEXT NOT NULL UNIQUE,
+ project_id INTEGER NOT NULL,
+ metric_kind_id INTEGER NOT NULL,
+ branch_id INTEGER NOT NULL,
+ testbed_id INTEGER NOT NULL,
+ statistic_id INTEGER,
+ created BIGINT NOT NULL,
+ modified BIGINT NOT NULL,
+ FOREIGN KEY (project_id) REFERENCES project (id) ON DELETE CASCADE,
+ FOREIGN KEY (metric_kind_id) REFERENCES metric_kind (id),
+ FOREIGN KEY (branch_id) REFERENCES branch (id),
+ FOREIGN KEY (testbed_id) REFERENCES testbed (id),
+ FOREIGN KEY (statistic_id) REFERENCES statistic (id),
+ UNIQUE(metric_kind_id, branch_id, testbed_id)
+);
+INSERT INTO down_threshold(
+ id,
+ uuid,
+ project_id,
+ metric_kind_id,
+ branch_id,
+ testbed_id,
+ statistic_id,
+ created,
+ modified
+ )
+SELECT id,
+ uuid,
+ project_id,
+ measure_id,
+ branch_id,
+ testbed_id,
+ statistic_id,
+ created,
+ modified
+FROM threshold;
+DROP TABLE threshold;
+ALTER TABLE down_threshold
+ RENAME TO threshold;
+PRAGMA foreign_keys = on;
\ No newline at end of file
diff --git a/services/api/migrations/2023-12-04-101416_measure/up.sql b/services/api/migrations/2023-12-04-101416_measure/up.sql
new file mode 100644
index 000000000..e7ea7db6b
--- /dev/null
+++ b/services/api/migrations/2023-12-04-101416_measure/up.sql
@@ -0,0 +1,111 @@
+PRAGMA foreign_keys = off;
+-- metric kind
+CREATE TABLE measure (
+ id INTEGER PRIMARY KEY NOT NULL,
+ uuid TEXT NOT NULL UNIQUE,
+ project_id INTEGER NOT NULL,
+ name TEXT NOT NULL,
+ slug TEXT NOT NULL,
+ units TEXT NOT NULL,
+ created BIGINT NOT NULL,
+ modified BIGINT NOT NULL,
+ FOREIGN KEY (project_id) REFERENCES project (id) ON DELETE CASCADE,
+ UNIQUE(project_id, name),
+ UNIQUE(project_id, slug)
+);
+INSERT INTO measure(
+ id,
+ uuid,
+ project_id,
+ name,
+ slug,
+ units,
+ created,
+ modified
+ )
+SELECT id,
+ uuid,
+ project_id,
+ name,
+ slug,
+ units,
+ created,
+ modified
+FROM metric_kind;
+DROP TABLE metric_kind;
+-- metric
+CREATE TABLE up_metric (
+ id INTEGER PRIMARY KEY NOT NULL,
+ uuid TEXT NOT NULL UNIQUE,
+ perf_id INTEGER NOT NULL,
+ measure_id INTEGER NOT NULL,
+ value DOUBLE NOT NULL,
+ lower_value DOUBLE,
+ upper_value DOUBLE,
+ FOREIGN KEY (perf_id) REFERENCES perf (id) ON DELETE CASCADE,
+ FOREIGN KEY (measure_id) REFERENCES measure (id),
+ UNIQUE(perf_id, measure_id)
+);
+INSERT INTO up_metric(
+ id,
+ uuid,
+ perf_id,
+ measure_id,
+ value,
+ lower_value,
+ upper_value
+ )
+SELECT id,
+ uuid,
+ perf_id,
+ metric_kind_id,
+ value,
+ lower_value,
+ lower_value
+FROM metric;
+DROP TABLE metric;
+ALTER TABLE up_metric
+ RENAME TO metric;
+-- threshold
+CREATE TABLE up_threshold (
+ id INTEGER PRIMARY KEY NOT NULL,
+ uuid TEXT NOT NULL UNIQUE,
+ project_id INTEGER NOT NULL,
+ branch_id INTEGER NOT NULL,
+ testbed_id INTEGER NOT NULL,
+ measure_id INTEGER NOT NULL,
+ statistic_id INTEGER,
+ created BIGINT NOT NULL,
+ modified BIGINT NOT NULL,
+ FOREIGN KEY (project_id) REFERENCES project (id) ON DELETE CASCADE,
+ FOREIGN KEY (branch_id) REFERENCES branch (id),
+ FOREIGN KEY (testbed_id) REFERENCES testbed (id),
+ FOREIGN KEY (measure_id) REFERENCES measure (id),
+ FOREIGN KEY (statistic_id) REFERENCES statistic (id),
+ UNIQUE(branch_id, testbed_id, measure_id)
+);
+INSERT INTO up_threshold(
+ id,
+ uuid,
+ project_id,
+ branch_id,
+ testbed_id,
+ measure_id,
+ statistic_id,
+ created,
+ modified
+ )
+SELECT id,
+ uuid,
+ project_id,
+ branch_id,
+ testbed_id,
+ metric_kind_id,
+ statistic_id,
+ created,
+ modified
+FROM threshold;
+DROP TABLE threshold;
+ALTER TABLE up_threshold
+ RENAME TO threshold;
+PRAGMA foreign_keys = on;
\ No newline at end of file
diff --git a/services/api/src/bin/swagger.rs b/services/api/src/bin/swagger.rs
index 06b290e56..b8280eae3 100644
--- a/services/api/src/bin/swagger.rs
+++ b/services/api/src/bin/swagger.rs
@@ -38,12 +38,12 @@ fn main() -> Result<(), SwaggerError> {
"auth" => TagDetails { description: Some("Auth".into()), external_docs: None},
"organizations" => TagDetails { description: Some("Organizations".into()), external_docs: None},
"projects" => TagDetails { description: Some("Projects".into()), external_docs: None},
- "perf" => TagDetails { description: Some("Perf Metrics".into()), external_docs: None},
"reports" => TagDetails { description: Some("Reports".into()), external_docs: None},
- "metric kinds" => TagDetails { description: Some("Metric Kinds".into()), external_docs: None},
+ "perf" => TagDetails { description: Some("Perf Metrics".into()), external_docs: None},
"branches" => TagDetails { description: Some("Branches".into()), external_docs: None},
"testbeds" => TagDetails { description: Some("Testbeds".into()), external_docs: None},
"benchmarks" => TagDetails { description: Some("Benchmarks".into()), external_docs: None},
+ "measures" => TagDetails { description: Some("Measures".into()), external_docs: None},
"thresholds" => TagDetails { description: Some("Thresholds".into()), external_docs: None},
"statistics" => TagDetails { description: Some("Statistics".into()), external_docs: None},
"alerts" => TagDetails { description: Some("Alerts".into()), external_docs: None},
diff --git a/services/api/src/endpoints/mod.rs b/services/api/src/endpoints/mod.rs
index 497e48876..5be4a049c 100644
--- a/services/api/src/endpoints/mod.rs
+++ b/services/api/src/endpoints/mod.rs
@@ -108,18 +108,6 @@ impl Api {
}
api.register(project::allowed::proj_allowed_get)?;
- // Perf
- if http_options {
- api.register(project::perf::proj_perf_options)?;
- }
- api.register(project::perf::proj_perf_get)?;
-
- // Perf Image
- if http_options {
- api.register(project::perf::img::proj_perf_img_options)?;
- }
- api.register(project::perf::img::proj_perf_img_get)?;
-
// Reports
if http_options {
api.register(project::reports::proj_reports_options)?;
@@ -130,16 +118,17 @@ impl Api {
api.register(project::reports::proj_report_get)?;
api.register(project::reports::proj_report_delete)?;
- // Metric Kinds
+ // Perf
+ if http_options {
+ api.register(project::perf::proj_perf_options)?;
+ }
+ api.register(project::perf::proj_perf_get)?;
+
+ // Perf Image
if http_options {
- api.register(project::metric_kinds::proj_metric_kinds_options)?;
- api.register(project::metric_kinds::proj_metric_kind_options)?;
+ api.register(project::perf::img::proj_perf_img_options)?;
}
- api.register(project::metric_kinds::proj_metric_kinds_get)?;
- api.register(project::metric_kinds::proj_metric_kind_post)?;
- api.register(project::metric_kinds::proj_metric_kind_get)?;
- api.register(project::metric_kinds::proj_metric_kind_patch)?;
- api.register(project::metric_kinds::proj_metric_kind_delete)?;
+ api.register(project::perf::img::proj_perf_img_get)?;
// Branches
if http_options {
@@ -174,6 +163,17 @@ impl Api {
api.register(project::benchmarks::proj_benchmark_patch)?;
api.register(project::benchmarks::proj_benchmark_delete)?;
+ // Measures
+ if http_options {
+ api.register(project::measures::proj_measures_options)?;
+ api.register(project::measures::proj_measure_options)?;
+ }
+ api.register(project::measures::proj_measures_get)?;
+ api.register(project::measures::proj_measure_post)?;
+ api.register(project::measures::proj_measure_get)?;
+ api.register(project::measures::proj_measure_patch)?;
+ api.register(project::measures::proj_measure_delete)?;
+
// Thresholds
if http_options {
api.register(project::thresholds::proj_thresholds_options)?;
diff --git a/services/api/src/endpoints/organization/projects.rs b/services/api/src/endpoints/organization/projects.rs
index c1e473482..60f478bec 100644
--- a/services/api/src/endpoints/organization/projects.rs
+++ b/services/api/src/endpoints/organization/projects.rs
@@ -19,7 +19,7 @@ use crate::{
organization::QueryOrganization,
project::{
branch::{InsertBranch, QueryBranch},
- metric_kind::{InsertMetricKind, QueryMetricKind},
+ measure::{InsertMeasure, QueryMeasure},
project_role::InsertProjectRole,
testbed::{InsertTestbed, QueryTestbed},
threshold::InsertThreshold,
@@ -221,37 +221,25 @@ async fn post_inner(
.map_err(resource_conflict_err!(Testbed, insert_testbed))?;
let testbed_id = QueryTestbed::get_id(conn, insert_testbed.uuid)?;
- // Add a `latency` metric kind to the project
- let insert_metric_kind = InsertMetricKind::latency(conn, query_project.id)?;
- diesel::insert_into(schema::metric_kind::table)
- .values(&insert_metric_kind)
+ // Add a `latency` measure to the project
+ let insert_measure = InsertMeasure::latency(conn, query_project.id)?;
+ diesel::insert_into(schema::measure::table)
+ .values(&insert_measure)
.execute(conn)
- .map_err(resource_conflict_err!(MetricKind, insert_metric_kind))?;
- let metric_kind_id = QueryMetricKind::get_id(conn, insert_metric_kind.uuid)?;
+ .map_err(resource_conflict_err!(Measure, insert_measure))?;
+ let measure_id = QueryMeasure::get_id(conn, insert_measure.uuid)?;
// Add a `latency` threshold to the project
- InsertThreshold::upper_boundary(
- conn,
- query_project.id,
- metric_kind_id,
- branch_id,
- testbed_id,
- )?;
+ InsertThreshold::upper_boundary(conn, query_project.id, branch_id, testbed_id, measure_id)?;
- // Add a `throughput` metric kind to the project
- let insert_metric_kind = InsertMetricKind::throughput(conn, query_project.id)?;
- diesel::insert_into(schema::metric_kind::table)
- .values(&insert_metric_kind)
+ // Add a `throughput` measure to the project
+ let insert_measure = InsertMeasure::throughput(conn, query_project.id)?;
+ diesel::insert_into(schema::measure::table)
+ .values(&insert_measure)
.execute(conn)
- .map_err(resource_conflict_err!(MetricKind, insert_metric_kind))?;
- let metric_kind_id = QueryMetricKind::get_id(conn, insert_metric_kind.uuid)?;
+ .map_err(resource_conflict_err!(Measure, insert_measure))?;
+ let measure_id = QueryMeasure::get_id(conn, insert_measure.uuid)?;
// Add a `throughput` threshold to the project
- InsertThreshold::lower_boundary(
- conn,
- query_project.id,
- metric_kind_id,
- branch_id,
- testbed_id,
- )?;
+ InsertThreshold::lower_boundary(conn, query_project.id, branch_id, testbed_id, measure_id)?;
query_project.into_json(conn)
}
diff --git a/services/api/src/endpoints/project/metric_kinds.rs b/services/api/src/endpoints/project/measures.rs
similarity index 50%
rename from services/api/src/endpoints/project/metric_kinds.rs
rename to services/api/src/endpoints/project/measures.rs
index 36cd7d28b..83edb232e 100644
--- a/services/api/src/endpoints/project/metric_kinds.rs
+++ b/services/api/src/endpoints/project/measures.rs
@@ -1,6 +1,6 @@
use bencher_json::{
- project::metric_kind::JsonUpdateMetricKind, JsonDirection, JsonEmpty, JsonMetricKind,
- JsonMetricKinds, JsonNewMetricKind, JsonPagination, NonEmpty, ResourceId,
+ project::measure::JsonUpdateMeasure, JsonDirection, JsonEmpty, JsonMeasure, JsonMeasures,
+ JsonNewMeasure, JsonPagination, NonEmpty, ResourceId,
};
use bencher_rbac::project::Permission;
use diesel::{BelongingToDsl, ExpressionMethods, QueryDsl, RunQueryDsl};
@@ -18,7 +18,7 @@ use crate::{
model::user::auth::{AuthUser, PubBearerToken},
model::{
project::{
- metric_kind::{InsertMetricKind, QueryMetricKind, UpdateMetricKind},
+ measure::{InsertMeasure, QueryMeasure, UpdateMeasure},
QueryProject,
},
user::auth::BearerToken,
@@ -27,50 +27,50 @@ use crate::{
};
#[derive(Deserialize, JsonSchema)]
-pub struct ProjMetricKindsParams {
+pub struct ProjMeasuresParams {
pub project: ResourceId,
}
-pub type ProjMetricKindsPagination = JsonPagination;
+pub type ProjMeasuresPagination = JsonPagination;
#[derive(Clone, Copy, Default, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
-pub enum ProjMetricKindsSort {
+pub enum ProjMeasuresSort {
#[default]
Name,
}
#[derive(Deserialize, JsonSchema)]
-pub struct ProjMetricKindsQuery {
+pub struct ProjMeasuresQuery {
pub name: Option,
}
#[allow(clippy::unused_async)]
#[endpoint {
method = OPTIONS,
- path = "/v0/projects/{project}/metric-kinds",
- tags = ["projects", "metric kinds"]
+ path = "/v0/projects/{project}/measures",
+ tags = ["projects", "measures"]
}]
-pub async fn proj_metric_kinds_options(
+pub async fn proj_measures_options(
_rqctx: RequestContext,
- _path_params: Path,
- _pagination_params: Query,
- _query_params: Query,
+ _path_params: Path,
+ _pagination_params: Query,
+ _query_params: Query,
) -> Result {
Ok(Endpoint::cors(&[Get.into(), Post.into()]))
}
#[endpoint {
method = GET,
- path = "/v0/projects/{project}/metric-kinds",
- tags = ["projects", "metric kinds"]
+ path = "/v0/projects/{project}/measures",
+ tags = ["projects", "measures"]
}]
-pub async fn proj_metric_kinds_get(
+pub async fn proj_measures_get(
rqctx: RequestContext,
- path_params: Path,
- pagination_params: Query,
- query_params: Query,
-) -> Result, HttpError> {
+ path_params: Path,
+ pagination_params: Query,
+ query_params: Query,
+) -> Result, HttpError> {
let auth_user = AuthUser::new_pub(&rqctx).await?;
let json = get_ls_inner(
rqctx.context(),
@@ -86,25 +86,25 @@ pub async fn proj_metric_kinds_get(
async fn get_ls_inner(
context: &ApiContext,
auth_user: Option<&AuthUser>,
- path_params: ProjMetricKindsParams,
- pagination_params: ProjMetricKindsPagination,
- query_params: ProjMetricKindsQuery,
-) -> Result {
+ path_params: ProjMeasuresParams,
+ pagination_params: ProjMeasuresPagination,
+ query_params: ProjMeasuresQuery,
+) -> Result {
let conn = &mut *context.conn().await;
let query_project =
QueryProject::is_allowed_public(conn, &context.rbac, &path_params.project, auth_user)?;
- let mut query = QueryMetricKind::belonging_to(&query_project).into_boxed();
+ let mut query = QueryMeasure::belonging_to(&query_project).into_boxed();
if let Some(name) = query_params.name.as_ref() {
- query = query.filter(schema::metric_kind::name.eq(name.as_ref()));
+ query = query.filter(schema::measure::name.eq(name.as_ref()));
}
query = match pagination_params.order() {
- ProjMetricKindsSort::Name => match pagination_params.direction {
- Some(JsonDirection::Asc) | None => query.order(schema::metric_kind::name.asc()),
- Some(JsonDirection::Desc) => query.order(schema::metric_kind::name.desc()),
+ ProjMeasuresSort::Name => match pagination_params.direction {
+ Some(JsonDirection::Asc) | None => query.order(schema::measure::name.asc()),
+ Some(JsonDirection::Desc) => query.order(schema::measure::name.desc()),
},
};
@@ -112,24 +112,24 @@ async fn get_ls_inner(
Ok(query
.offset(pagination_params.offset())
.limit(pagination_params.limit())
- .load::(conn)
- .map_err(resource_not_found_err!(MetricKind, project))?
+ .load::(conn)
+ .map_err(resource_not_found_err!(Measure, project))?
.into_iter()
- .map(|metric_kind| metric_kind.into_json_for_project(project))
+ .map(|measure| measure.into_json_for_project(project))
.collect())
}
#[endpoint {
method = POST,
- path = "/v0/projects/{project}/metric-kinds",
- tags = ["projects", "metric kinds"]
+ path = "/v0/projects/{project}/measures",
+ tags = ["projects", "measures"]
}]
-pub async fn proj_metric_kind_post(
+pub async fn proj_measure_post(
rqctx: RequestContext,
bearer_token: BearerToken,
- path_params: Path,
- body: TypedBody,
-) -> Result, HttpError> {
+ path_params: Path,
+ body: TypedBody,
+) -> Result, HttpError> {
let auth_user = AuthUser::from_token(rqctx.context(), bearer_token).await?;
let json = post_inner(
rqctx.context(),
@@ -143,10 +143,10 @@ pub async fn proj_metric_kind_post(
async fn post_inner(
context: &ApiContext,
- path_params: ProjMetricKindsParams,
- json_metric_kind: JsonNewMetricKind,
+ path_params: ProjMeasuresParams,
+ json_measure: JsonNewMeasure,
auth_user: &AuthUser,
-) -> Result {
+) -> Result {
let conn = &mut *context.conn().await;
// Verify that the user is allowed
@@ -158,49 +158,49 @@ async fn post_inner(
Permission::Create,
)?;
- let insert_metric_kind = InsertMetricKind::from_json(conn, query_project.id, json_metric_kind)?;
+ let insert_measure = InsertMeasure::from_json(conn, query_project.id, json_measure)?;
- diesel::insert_into(schema::metric_kind::table)
- .values(&insert_metric_kind)
+ diesel::insert_into(schema::measure::table)
+ .values(&insert_measure)
.execute(conn)
- .map_err(resource_conflict_err!(MetricKind, insert_metric_kind))?;
+ .map_err(resource_conflict_err!(Measure, insert_measure))?;
- schema::metric_kind::table
- .filter(schema::metric_kind::uuid.eq(&insert_metric_kind.uuid))
- .first::(conn)
- .map(|metric_kind| metric_kind.into_json_for_project(&query_project))
- .map_err(resource_not_found_err!(MetricKind, insert_metric_kind))
+ schema::measure::table
+ .filter(schema::measure::uuid.eq(&insert_measure.uuid))
+ .first::(conn)
+ .map(|measure| measure.into_json_for_project(&query_project))
+ .map_err(resource_not_found_err!(Measure, insert_measure))
}
#[derive(Deserialize, JsonSchema)]
-pub struct ProjMetricKindParams {
+pub struct ProjMeasureParams {
pub project: ResourceId,
- pub metric_kind: ResourceId,
+ pub measure: ResourceId,
}
#[allow(clippy::unused_async)]
#[endpoint {
method = OPTIONS,
- path = "/v0/projects/{project}/metric-kinds/{metric_kind}",
- tags = ["projects", "metric kinds"]
+ path = "/v0/projects/{project}/measures/{measure}",
+ tags = ["projects", "measures"]
}]
-pub async fn proj_metric_kind_options(
+pub async fn proj_measure_options(
_rqctx: RequestContext,
- _path_params: Path,
+ _path_params: Path,
) -> Result {
Ok(Endpoint::cors(&[Get.into(), Patch.into(), Delete.into()]))
}
#[endpoint {
method = GET,
- path = "/v0/projects/{project}/metric-kinds/{metric_kind}",
- tags = ["projects", "metric kinds"]
+ path = "/v0/projects/{project}/measures/{measure}",
+ tags = ["projects", "measures"]
}]
-pub async fn proj_metric_kind_get(
+pub async fn proj_measure_get(
rqctx: RequestContext,
bearer_token: PubBearerToken,
- path_params: Path,
-) -> Result, HttpError> {
+ path_params: Path,
+) -> Result, HttpError> {
let auth_user = AuthUser::from_pub_token(rqctx.context(), bearer_token).await?;
let json = get_one_inner(
rqctx.context(),
@@ -213,35 +213,35 @@ pub async fn proj_metric_kind_get(
async fn get_one_inner(
context: &ApiContext,
- path_params: ProjMetricKindParams,
+ path_params: ProjMeasureParams,
auth_user: Option<&AuthUser>,
-) -> Result {
+) -> Result {
let conn = &mut *context.conn().await;
let query_project =
QueryProject::is_allowed_public(conn, &context.rbac, &path_params.project, auth_user)?;
- QueryMetricKind::belonging_to(&query_project)
- .filter(QueryMetricKind::resource_id(&path_params.metric_kind)?)
- .first::(conn)
- .map(|metric_kind| metric_kind.into_json_for_project(&query_project))
+ QueryMeasure::belonging_to(&query_project)
+ .filter(QueryMeasure::resource_id(&path_params.measure)?)
+ .first::(conn)
+ .map(|measure| measure.into_json_for_project(&query_project))
.map_err(resource_not_found_err!(
- MetricKind,
- (&query_project, path_params.metric_kind)
+ Measure,
+ (&query_project, path_params.measure)
))
}
#[endpoint {
method = PATCH,
- path = "/v0/projects/{project}/metric-kinds/{metric_kind}",
- tags = ["projects", "metric kinds"]
+ path = "/v0/projects/{project}/measures/{measure}",
+ tags = ["projects", "measures"]
}]
-pub async fn proj_metric_kind_patch(
+pub async fn proj_measure_patch(
rqctx: RequestContext,
bearer_token: BearerToken,
- path_params: Path,
- body: TypedBody,
-) -> Result, HttpError> {
+ path_params: Path,
+ body: TypedBody,
+) -> Result, HttpError> {
let auth_user = AuthUser::from_token(rqctx.context(), bearer_token).await?;
let json = patch_inner(
rqctx.context(),
@@ -255,10 +255,10 @@ pub async fn proj_metric_kind_patch(
async fn patch_inner(
context: &ApiContext,
- path_params: ProjMetricKindParams,
- json_metric_kind: JsonUpdateMetricKind,
+ path_params: ProjMeasureParams,
+ json_measure: JsonUpdateMeasure,
auth_user: &AuthUser,
-) -> Result {
+) -> Result {
let conn = &mut *context.conn().await;
// Verify that the user is allowed
@@ -270,33 +270,31 @@ async fn patch_inner(
Permission::Edit,
)?;
- let query_metric_kind =
- QueryMetricKind::from_resource_id(conn, query_project.id, &path_params.metric_kind)?;
+ let query_measure =
+ QueryMeasure::from_resource_id(conn, query_project.id, &path_params.measure)?;
- diesel::update(
- schema::metric_kind::table.filter(schema::metric_kind::id.eq(query_metric_kind.id)),
- )
- .set(&UpdateMetricKind::from(json_metric_kind.clone()))
- .execute(conn)
- .map_err(resource_conflict_err!(
- MetricKind,
- (&query_metric_kind, &json_metric_kind)
- ))?;
-
- QueryMetricKind::get(conn, query_metric_kind.id)
- .map(|metric_kind| metric_kind.into_json_for_project(&query_project))
- .map_err(resource_not_found_err!(MetricKind, query_metric_kind))
+ diesel::update(schema::measure::table.filter(schema::measure::id.eq(query_measure.id)))
+ .set(&UpdateMeasure::from(json_measure.clone()))
+ .execute(conn)
+ .map_err(resource_conflict_err!(
+ Measure,
+ (&query_measure, &json_measure)
+ ))?;
+
+ QueryMeasure::get(conn, query_measure.id)
+ .map(|measure| measure.into_json_for_project(&query_project))
+ .map_err(resource_not_found_err!(Measure, query_measure))
}
#[endpoint {
method = DELETE,
- path = "/v0/projects/{project}/metric-kinds/{metric_kind}",
- tags = ["projects", "metric kinds"]
+ path = "/v0/projects/{project}/measures/{measure}",
+ tags = ["projects", "measures"]
}]
-pub async fn proj_metric_kind_delete(
+pub async fn proj_measure_delete(
rqctx: RequestContext,
bearer_token: BearerToken,
- path_params: Path,
+ path_params: Path,
) -> Result, HttpError> {
let auth_user = AuthUser::from_token(rqctx.context(), bearer_token).await?;
let json = delete_inner(rqctx.context(), path_params.into_inner(), &auth_user).await?;
@@ -305,7 +303,7 @@ pub async fn proj_metric_kind_delete(
async fn delete_inner(
context: &ApiContext,
- path_params: ProjMetricKindParams,
+ path_params: ProjMeasureParams,
auth_user: &AuthUser,
) -> Result {
let conn = &mut *context.conn().await;
@@ -319,14 +317,12 @@ async fn delete_inner(
Permission::Delete,
)?;
- let query_metric_kind =
- QueryMetricKind::from_resource_id(conn, query_project.id, &path_params.metric_kind)?;
+ let query_measure =
+ QueryMeasure::from_resource_id(conn, query_project.id, &path_params.measure)?;
- diesel::delete(
- schema::metric_kind::table.filter(schema::metric_kind::id.eq(query_metric_kind.id)),
- )
- .execute(conn)
- .map_err(resource_conflict_err!(MetricKind, query_metric_kind))?;
+ diesel::delete(schema::measure::table.filter(schema::measure::id.eq(query_measure.id)))
+ .execute(conn)
+ .map_err(resource_conflict_err!(Measure, query_measure))?;
Ok(JsonEmpty {})
}
diff --git a/services/api/src/endpoints/project/mod.rs b/services/api/src/endpoints/project/mod.rs
index 808bddcb2..d3248ef40 100644
--- a/services/api/src/endpoints/project/mod.rs
+++ b/services/api/src/endpoints/project/mod.rs
@@ -1,7 +1,7 @@
pub mod allowed;
pub mod benchmarks;
pub mod branches;
-pub mod metric_kinds;
+pub mod measures;
pub mod perf;
pub mod projects;
pub mod reports;
diff --git a/services/api/src/endpoints/project/perf/mod.rs b/services/api/src/endpoints/project/perf/mod.rs
index 245febb59..4960eccc8 100644
--- a/services/api/src/endpoints/project/perf/mod.rs
+++ b/services/api/src/endpoints/project/perf/mod.rs
@@ -4,8 +4,8 @@ use bencher_json::{
branch::{JsonVersion, VersionNumber},
perf::{Iteration, JsonPerfMetric, JsonPerfMetrics, JsonPerfQueryParams},
},
- BenchmarkUuid, BranchUuid, DateTime, GitHash, JsonPerf, JsonPerfQuery, MetricKindUuid,
- ReportUuid, ResourceId, TestbedUuid,
+ BenchmarkUuid, BranchUuid, DateTime, GitHash, JsonPerf, JsonPerfQuery, MeasureUuid, ReportUuid,
+ ResourceId, TestbedUuid,
};
use diesel::{
ExpressionMethods, NullableExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper,
@@ -26,8 +26,8 @@ use crate::{
project::{
benchmark::QueryBenchmark,
branch::QueryBranch,
+ measure::QueryMeasure,
metric::QueryMetric,
- metric_kind::QueryMetricKind,
testbed::QueryTestbed,
threshold::{
alert::QueryAlert, boundary::QueryBoundary, statistic::QueryStatistic,
@@ -103,10 +103,10 @@ async fn get_inner(
QueryProject::is_allowed_public(conn, &context.rbac, &path_params.project, auth_user)?;
let JsonPerfQuery {
- metric_kinds,
branches,
testbeds,
benchmarks,
+ measures,
start_time,
end_time,
} = json_perf_query;
@@ -119,10 +119,10 @@ async fn get_inner(
let results = perf_results(
conn,
&project,
- &metric_kinds,
&branches,
&testbeds,
&benchmarks,
+ &measures,
times,
)?;
@@ -143,24 +143,24 @@ struct Times {
fn perf_results(
conn: &mut DbConnection,
project: &QueryProject,
- metric_kinds: &[MetricKindUuid],
branches: &[BranchUuid],
testbeds: &[TestbedUuid],
benchmarks: &[BenchmarkUuid],
+ measures: &[MeasureUuid],
times: Times,
) -> Result, HttpError> {
- let permutations = metric_kinds.len() * branches.len() * testbeds.len() * benchmarks.len();
+ let permutations = branches.len() * testbeds.len() * benchmarks.len() * measures.len();
let gt_max_permutations = permutations > MAX_PERMUTATIONS;
let mut results = Vec::with_capacity(permutations.min(MAX_PERMUTATIONS));
- for (metric_kind_index, metric_kind_uuid) in metric_kinds.iter().enumerate() {
- for (branch_index, branch_uuid) in branches.iter().enumerate() {
- for (testbed_index, testbed_uuid) in testbeds.iter().enumerate() {
- for (benchmark_index, benchmark_uuid) in benchmarks.iter().enumerate() {
+ for (branch_index, branch_uuid) in branches.iter().enumerate() {
+ for (testbed_index, testbed_uuid) in testbeds.iter().enumerate() {
+ for (benchmark_index, benchmark_uuid) in benchmarks.iter().enumerate() {
+ for (measure_index, measure_uuid) in measures.iter().enumerate() {
if gt_max_permutations
- && (metric_kind_index + 1)
- * (branch_index + 1)
+ && (branch_index + 1)
* (testbed_index + 1)
* (benchmark_index + 1)
+ * (measure_index + 1)
> MAX_PERMUTATIONS
{
return Ok(results);
@@ -169,10 +169,10 @@ fn perf_results(
if let Some(perf_metrics) = perf_query(
conn,
project,
- *metric_kind_uuid,
*branch_uuid,
*testbed_uuid,
*benchmark_uuid,
+ *measure_uuid,
times,
)?
.into_iter()
@@ -201,14 +201,13 @@ fn perf_results(
fn perf_query(
conn: &mut DbConnection,
project: &QueryProject,
- metric_kind_uuid: MetricKindUuid,
branch_uuid: BranchUuid,
testbed_uuid: TestbedUuid,
benchmark_uuid: BenchmarkUuid,
+ measure_uuid: MeasureUuid,
times: Times,
) -> Result, HttpError> {
let mut query = schema::metric::table
- .inner_join(schema::metric_kind::table)
.inner_join(
schema::perf::table.inner_join(
schema::report::table
@@ -221,13 +220,14 @@ fn perf_query(
)
.inner_join(schema::benchmark::table)
)
+ .inner_join(schema::measure::table)
// It is important to filter for the branch on the `branch_version` table and not on the branch in the `report` table.
// This is because the `branch_version` table is the one that is updated when a branch is cloned/used as a start point.
// In contrast, the `report` table is only set to a single branch when the report is created.
- .filter(schema::metric_kind::uuid.eq(metric_kind_uuid))
.filter(schema::branch::uuid.eq(branch_uuid))
.filter(schema::testbed::uuid.eq(testbed_uuid))
.filter(schema::benchmark::uuid.eq(benchmark_uuid))
+ .filter(schema::measure::uuid.eq(measure_uuid))
// There may or may not be a boundary for any given metric
.left_join(
schema::boundary::table
@@ -259,10 +259,10 @@ fn perf_query(
schema::perf::iteration,
))
.select((
- QueryMetricKind::as_select(),
QueryBranch::as_select(),
QueryTestbed::as_select(),
QueryBenchmark::as_select(),
+ QueryMeasure::as_select(),
schema::report::uuid,
schema::perf::iteration,
schema::report::start_time,
@@ -284,7 +284,7 @@ fn perf_query(
schema::threshold::id,
schema::threshold::uuid,
schema::threshold::project_id,
- schema::threshold::metric_kind_id,
+ schema::threshold::measure_id,
schema::threshold::branch_id,
schema::threshold::testbed_id,
schema::threshold::statistic_id,
@@ -315,14 +315,14 @@ fn perf_query(
QueryMetric::as_select(),
))
.load::(conn)
- .map_err(resource_not_found_err!(Metric, (project, metric_kind_uuid, branch_uuid, testbed_uuid, benchmark_uuid)))
+ .map_err(resource_not_found_err!(Metric, (project, branch_uuid, testbed_uuid, benchmark_uuid, measure_uuid)))
}
type PerfQuery = (
- QueryMetricKind,
QueryBranch,
QueryTestbed,
QueryBenchmark,
+ QueryMeasure,
ReportUuid,
Iteration,
DateTime,
@@ -339,10 +339,10 @@ type PerfQuery = (
);
struct QueryDimensions {
- metric_kind: QueryMetricKind,
branch: QueryBranch,
testbed: QueryTestbed,
benchmark: QueryBenchmark,
+ measure: QueryMeasure,
}
type PerfMetricQuery = (
@@ -363,10 +363,10 @@ type PerfMetricQuery = (
fn split_perf_query(
(
- metric_kind,
branch,
testbed,
benchmark,
+ measure,
report_uuid,
iteration,
start_time,
@@ -378,10 +378,10 @@ fn split_perf_query(
): PerfQuery,
) -> (QueryDimensions, PerfMetricQuery) {
let query_dimensions = QueryDimensions {
- metric_kind,
branch,
testbed,
benchmark,
+ measure,
};
let metric_query = (
report_uuid,
@@ -402,16 +402,16 @@ fn new_perf_metrics(
metric: JsonPerfMetric,
) -> JsonPerfMetrics {
let QueryDimensions {
- metric_kind,
branch,
testbed,
benchmark,
+ measure,
} = query_dimensions;
JsonPerfMetrics {
- metric_kind: metric_kind.into_json_for_project(project),
branch: branch.into_json_for_project(project),
testbed: testbed.into_json_for_project(project),
benchmark: benchmark.into_json_for_project(project),
+ measure: measure.into_json_for_project(project),
metrics: vec![metric],
}
}
diff --git a/services/api/src/endpoints/project/thresholds/mod.rs b/services/api/src/endpoints/project/thresholds/mod.rs
index ab2a7bc2e..5866fd292 100644
--- a/services/api/src/endpoints/project/thresholds/mod.rs
+++ b/services/api/src/endpoints/project/thresholds/mod.rs
@@ -19,7 +19,7 @@ use crate::{
model::{
project::{
branch::QueryBranch,
- metric_kind::QueryMetricKind,
+ measure::QueryMeasure,
testbed::QueryTestbed,
threshold::{
statistic::InsertStatistic, InsertThreshold, QueryThreshold, UpdateThreshold,
@@ -168,19 +168,18 @@ async fn post_inner(
)?;
let project_id = query_project.id;
- // Verify that the branch, testbed, and metric kind are part of the same project
+ // Verify that the branch, testbed, and measure are part of the same project
let branch_id = QueryBranch::from_resource_id(conn, project_id, &json_threshold.branch)?.id;
let testbed_id = QueryTestbed::from_resource_id(conn, project_id, &json_threshold.testbed)?.id;
- let metric_kind_id =
- QueryMetricKind::from_resource_id(conn, project_id, &json_threshold.metric_kind)?.id;
+ let measure_id = QueryMeasure::from_resource_id(conn, project_id, &json_threshold.measure)?.id;
// Create the new threshold
let threshold_id = InsertThreshold::insert_from_json(
conn,
project_id,
- metric_kind_id,
branch_id,
testbed_id,
+ measure_id,
json_threshold.statistic,
)?;
diff --git a/services/api/src/error.rs b/services/api/src/error.rs
index c8a3d3e0d..11e111333 100644
--- a/services/api/src/error.rs
+++ b/services/api/src/error.rs
@@ -12,13 +12,13 @@ pub enum BencherResource {
Project,
ProjectRole,
Report,
- MetricKind,
+ Perf,
Branch,
Version,
BranchVersion,
Testbed,
Benchmark,
- Perf,
+ Measure,
Metric,
Threshold,
Statistic,
@@ -43,13 +43,13 @@ impl fmt::Display for BencherResource {
Self::Project => "Project",
Self::ProjectRole => "Project Role",
Self::Report => "Report",
- Self::MetricKind => "Metric Kind",
+ Self::Perf => "Perf",
Self::Branch => "Branch",
Self::Version => "Version",
Self::BranchVersion => "Branch Version",
Self::Testbed => "Testbed",
Self::Benchmark => "Benchmark",
- Self::Perf => "Perf",
+ Self::Measure => "Measure",
Self::Metric => "Metric",
Self::Threshold => "Threshold",
Self::Statistic => "Statistic",
diff --git a/services/api/src/model/project/branch.rs b/services/api/src/model/project/branch.rs
index eaa17647a..838e10359 100644
--- a/services/api/src/model/project/branch.rs
+++ b/services/api/src/model/project/branch.rs
@@ -191,9 +191,9 @@ impl InsertBranch {
// Clone the threshold for the new branch
let insert_threshold = InsertThreshold::new(
self.project_id,
- query_threshold.metric_kind_id,
new_branch_id,
query_threshold.testbed_id,
+ query_threshold.measure_id,
);
// Create the new threshold
diff --git a/services/api/src/model/project/metric_kind.rs b/services/api/src/model/project/measure.rs
similarity index 58%
rename from services/api/src/model/project/metric_kind.rs
rename to services/api/src/model/project/measure.rs
index 91f848888..a18a1a942 100644
--- a/services/api/src/model/project/metric_kind.rs
+++ b/services/api/src/model/project/measure.rs
@@ -1,11 +1,11 @@
use bencher_json::{
- project::metric_kind::{
- JsonUpdateMetricKind, MetricKindUuid, ESTIMATED_CYCLES_NAME_STR, ESTIMATED_CYCLES_SLUG_STR,
+ project::measure::{
+ JsonUpdateMeasure, MeasureUuid, ESTIMATED_CYCLES_NAME_STR, ESTIMATED_CYCLES_SLUG_STR,
INSTRUCTIONS_NAME_STR, INSTRUCTIONS_SLUG_STR, L1_ACCESSES_NAME_STR, L1_ACCESSES_SLUG_STR,
L2_ACCESSES_NAME_STR, L2_ACCESSES_SLUG_STR, LATENCY_NAME_STR, LATENCY_SLUG_STR,
RAM_ACCESSES_NAME_STR, RAM_ACCESSES_SLUG_STR, THROUGHPUT_NAME_STR, THROUGHPUT_SLUG_STR,
},
- DateTime, JsonMetricKind, JsonNewMetricKind, NonEmpty, ResourceId, Slug,
+ DateTime, JsonMeasure, JsonNewMeasure, NonEmpty, ResourceId, Slug,
};
use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl};
use dropshot::HttpError;
@@ -15,7 +15,7 @@ use crate::{
error::{assert_parentage, resource_conflict_err, BencherResource},
model::project::QueryProject,
schema,
- schema::metric_kind as metric_kind_table,
+ schema::measure as measure_table,
util::{
fn_get::{fn_get, fn_get_id, fn_get_uuid},
resource_id::{fn_from_resource_id, fn_resource_id},
@@ -25,16 +25,16 @@ use crate::{
use super::ProjectId;
-crate::util::typed_id::typed_id!(MetricKindId);
+crate::util::typed_id::typed_id!(MeasureId);
#[derive(
Debug, Clone, diesel::Queryable, diesel::Identifiable, diesel::Associations, diesel::Selectable,
)]
-#[diesel(table_name = metric_kind_table)]
+#[diesel(table_name = measure_table)]
#[diesel(belongs_to(QueryProject, foreign_key = project_id))]
-pub struct QueryMetricKind {
- pub id: MetricKindId,
- pub uuid: MetricKindUuid,
+pub struct QueryMeasure {
+ pub id: MeasureId,
+ pub uuid: MeasureUuid,
pub project_id: ProjectId,
pub name: NonEmpty,
pub slug: Slug,
@@ -43,58 +43,58 @@ pub struct QueryMetricKind {
pub modified: DateTime,
}
-impl QueryMetricKind {
- fn_resource_id!(metric_kind);
- fn_from_resource_id!(metric_kind, MetricKind);
+impl QueryMeasure {
+ fn_resource_id!(measure);
+ fn_from_resource_id!(measure, Measure);
- fn_get!(metric_kind, MetricKindId);
- fn_get_id!(metric_kind, MetricKindId, MetricKindUuid);
- fn_get_uuid!(metric_kind, MetricKindId, MetricKindUuid);
+ fn_get!(measure, MeasureId);
+ fn_get_id!(measure, MeasureId, MeasureUuid);
+ fn_get_uuid!(measure, MeasureId, MeasureUuid);
pub fn get_or_create(
conn: &mut DbConnection,
project_id: ProjectId,
- metric_kind: &ResourceId,
- ) -> Result {
- let query_metric_kind = Self::from_resource_id(conn, project_id, metric_kind);
+ measure: &ResourceId,
+ ) -> Result {
+ let query_measure = Self::from_resource_id(conn, project_id, measure);
- let http_error = match query_metric_kind {
- Ok(metric_kind) => return Ok(metric_kind.id),
+ let http_error = match query_measure {
+ Ok(measure) => return Ok(measure.id),
Err(e) => e,
};
- // Dynamically create adapter specific metric kinds
- // Or recreate default metric kinds if they were previously deleted
- let insert_metric_kind = match metric_kind.as_ref() {
+ // Dynamically create adapter specific measures
+ // Or recreate default measures if they were previously deleted
+ let insert_measure = match measure.as_ref() {
// Recreate
- LATENCY_SLUG_STR => InsertMetricKind::latency(conn, project_id),
- THROUGHPUT_SLUG_STR => InsertMetricKind::throughput(conn, project_id),
+ LATENCY_SLUG_STR => InsertMeasure::latency(conn, project_id),
+ THROUGHPUT_SLUG_STR => InsertMeasure::throughput(conn, project_id),
// Adapter specific
- INSTRUCTIONS_SLUG_STR => InsertMetricKind::instructions(conn, project_id),
- L1_ACCESSES_SLUG_STR => InsertMetricKind::l1_accesses(conn, project_id),
- L2_ACCESSES_SLUG_STR => InsertMetricKind::l2_accesses(conn, project_id),
- RAM_ACCESSES_SLUG_STR => InsertMetricKind::ram_accesses(conn, project_id),
- ESTIMATED_CYCLES_SLUG_STR => InsertMetricKind::estimated_cycles(conn, project_id),
+ INSTRUCTIONS_SLUG_STR => InsertMeasure::instructions(conn, project_id),
+ L1_ACCESSES_SLUG_STR => InsertMeasure::l1_accesses(conn, project_id),
+ L2_ACCESSES_SLUG_STR => InsertMeasure::l2_accesses(conn, project_id),
+ RAM_ACCESSES_SLUG_STR => InsertMeasure::ram_accesses(conn, project_id),
+ ESTIMATED_CYCLES_SLUG_STR => InsertMeasure::estimated_cycles(conn, project_id),
_ => return Err(http_error),
}?;
- diesel::insert_into(schema::metric_kind::table)
- .values(&insert_metric_kind)
+ diesel::insert_into(schema::measure::table)
+ .values(&insert_measure)
.execute(conn)
- .map_err(resource_conflict_err!(MetricKind, insert_metric_kind))?;
+ .map_err(resource_conflict_err!(Measure, insert_measure))?;
- Self::get_id(conn, insert_metric_kind.uuid)
+ Self::get_id(conn, insert_measure.uuid)
}
pub fn is_system(&self) -> bool {
is_system(self.name.as_ref(), self.slug.as_ref())
}
- pub fn into_json(self, conn: &mut DbConnection) -> Result {
+ pub fn into_json(self, conn: &mut DbConnection) -> Result {
let project = QueryProject::get(conn, self.project_id)?;
Ok(self.into_json_for_project(&project))
}
- pub fn into_json_for_project(self, project: &QueryProject) -> JsonMetricKind {
+ pub fn into_json_for_project(self, project: &QueryProject) -> JsonMeasure {
let Self {
uuid,
project_id,
@@ -108,10 +108,10 @@ impl QueryMetricKind {
assert_parentage(
BencherResource::Project,
project.id,
- BencherResource::MetricKind,
+ BencherResource::Measure,
project_id,
);
- JsonMetricKind {
+ JsonMeasure {
uuid,
project: project.uuid,
name,
@@ -124,9 +124,9 @@ impl QueryMetricKind {
}
#[derive(Debug, diesel::Insertable)]
-#[diesel(table_name = metric_kind_table)]
-pub struct InsertMetricKind {
- pub uuid: MetricKindUuid,
+#[diesel(table_name = measure_table)]
+pub struct InsertMeasure {
+ pub uuid: MeasureUuid,
pub project_id: ProjectId,
pub name: NonEmpty,
pub slug: Slug,
@@ -135,17 +135,17 @@ pub struct InsertMetricKind {
pub modified: DateTime,
}
-impl InsertMetricKind {
+impl InsertMeasure {
pub fn from_json(
conn: &mut DbConnection,
project_id: ProjectId,
- metric_kind: JsonNewMetricKind,
+ measure: JsonNewMeasure,
) -> Result {
- let JsonNewMetricKind { name, slug, units } = metric_kind;
- let slug = ok_slug!(conn, project_id, &name, slug, metric_kind, QueryMetricKind)?;
+ let JsonNewMeasure { name, slug, units } = measure;
+ let slug = ok_slug!(conn, project_id, &name, slug, measure, QueryMeasure)?;
let timestamp = DateTime::now();
Ok(Self {
- uuid: MetricKindUuid::new(),
+ uuid: MeasureUuid::new(),
project_id,
name,
slug,
@@ -156,34 +156,34 @@ impl InsertMetricKind {
}
pub fn latency(conn: &mut DbConnection, project_id: ProjectId) -> Result {
- Self::from_json(conn, project_id, JsonNewMetricKind::latency())
+ Self::from_json(conn, project_id, JsonNewMeasure::latency())
}
pub fn throughput(conn: &mut DbConnection, project_id: ProjectId) -> Result {
- Self::from_json(conn, project_id, JsonNewMetricKind::throughput())
+ Self::from_json(conn, project_id, JsonNewMeasure::throughput())
}
pub fn instructions(conn: &mut DbConnection, project_id: ProjectId) -> Result {
- Self::from_json(conn, project_id, JsonNewMetricKind::instructions())
+ Self::from_json(conn, project_id, JsonNewMeasure::instructions())
}
pub fn l1_accesses(conn: &mut DbConnection, project_id: ProjectId) -> Result {
- Self::from_json(conn, project_id, JsonNewMetricKind::l1_accesses())
+ Self::from_json(conn, project_id, JsonNewMeasure::l1_accesses())
}
pub fn l2_accesses(conn: &mut DbConnection, project_id: ProjectId) -> Result {
- Self::from_json(conn, project_id, JsonNewMetricKind::l2_accesses())
+ Self::from_json(conn, project_id, JsonNewMeasure::l2_accesses())
}
pub fn ram_accesses(conn: &mut DbConnection, project_id: ProjectId) -> Result {
- Self::from_json(conn, project_id, JsonNewMetricKind::ram_accesses())
+ Self::from_json(conn, project_id, JsonNewMeasure::ram_accesses())
}
pub fn estimated_cycles(
conn: &mut DbConnection,
project_id: ProjectId,
) -> Result {
- Self::from_json(conn, project_id, JsonNewMetricKind::estimated_cycles())
+ Self::from_json(conn, project_id, JsonNewMeasure::estimated_cycles())
}
pub fn is_system(&self) -> bool {
@@ -214,17 +214,17 @@ fn is_system(name: &str, slug: &str) -> bool {
}
#[derive(Debug, Clone, diesel::AsChangeset)]
-#[diesel(table_name = metric_kind_table)]
-pub struct UpdateMetricKind {
+#[diesel(table_name = measure_table)]
+pub struct UpdateMeasure {
pub name: Option,
pub slug: Option,
pub units: Option,
pub modified: DateTime,
}
-impl From for UpdateMetricKind {
- fn from(update: JsonUpdateMetricKind) -> Self {
- let JsonUpdateMetricKind { name, slug, units } = update;
+impl From for UpdateMeasure {
+ fn from(update: JsonUpdateMeasure) -> Self {
+ let JsonUpdateMeasure { name, slug, units } = update;
Self {
name,
slug,
diff --git a/services/api/src/model/project/metric.rs b/services/api/src/model/project/metric.rs
index de3746b12..660cc05a8 100644
--- a/services/api/src/model/project/metric.rs
+++ b/services/api/src/model/project/metric.rs
@@ -9,7 +9,7 @@ use crate::{
};
use super::{
- metric_kind::{MetricKindId, QueryMetricKind},
+ measure::{MeasureId, QueryMeasure},
perf::{PerfId, QueryPerf},
};
@@ -20,12 +20,12 @@ crate::util::typed_id::typed_id!(MetricId);
)]
#[diesel(table_name = metric_table)]
#[diesel(belongs_to(QueryPerf, foreign_key = perf_id))]
-#[diesel(belongs_to(QueryMetricKind, foreign_key = metric_kind_id))]
+#[diesel(belongs_to(QueryMeasure, foreign_key = measure_id))]
pub struct QueryMetric {
pub id: MetricId,
pub uuid: MetricUuid,
pub perf_id: PerfId,
- pub metric_kind_id: MetricKindId,
+ pub measure_id: MeasureId,
pub value: f64,
pub lower_value: Option,
pub upper_value: Option,
@@ -97,14 +97,14 @@ impl QueryMetric {
pub struct InsertMetric {
pub uuid: MetricUuid,
pub perf_id: PerfId,
- pub metric_kind_id: MetricKindId,
+ pub measure_id: MeasureId,
pub value: f64,
pub lower_value: Option,
pub upper_value: Option,
}
impl InsertMetric {
- pub fn from_json(perf_id: PerfId, metric_kind_id: MetricKindId, metric: JsonMetric) -> Self {
+ pub fn from_json(perf_id: PerfId, measure_id: MeasureId, metric: JsonMetric) -> Self {
let JsonMetric {
value,
lower_value,
@@ -113,7 +113,7 @@ impl InsertMetric {
Self {
uuid: MetricUuid::new(),
perf_id,
- metric_kind_id,
+ measure_id,
value: value.into(),
lower_value: lower_value.map(Into::into),
upper_value: upper_value.map(Into::into),
diff --git a/services/api/src/model/project/mod.rs b/services/api/src/model/project/mod.rs
index 65e7bcb9b..42ab0f5d0 100644
--- a/services/api/src/model/project/mod.rs
+++ b/services/api/src/model/project/mod.rs
@@ -25,8 +25,8 @@ use super::{organization::OrganizationId, user::auth::BEARER_TOKEN_FORMAT};
pub mod benchmark;
pub mod branch;
pub mod branch_version;
+pub mod measure;
pub mod metric;
-pub mod metric_kind;
pub mod perf;
pub mod project_role;
pub mod report;
diff --git a/services/api/src/model/project/report/mod.rs b/services/api/src/model/project/report/mod.rs
index a256ba860..0418ffd30 100644
--- a/services/api/src/model/project/report/mod.rs
+++ b/services/api/src/model/project/report/mod.rs
@@ -18,8 +18,8 @@ use crate::{
project::{
benchmark::QueryBenchmark,
branch::{BranchId, QueryBranch},
+ measure::QueryMeasure,
metric::QueryMetric,
- metric_kind::QueryMetricKind,
testbed::{QueryTestbed, TestbedId},
threshold::statistic::QueryStatistic,
threshold::{alert::QueryAlert, boundary::QueryBoundary, QueryThreshold},
@@ -95,11 +95,11 @@ impl QueryReport {
type ResultsQuery = (
Iteration,
- QueryMetricKind,
- Option<(QueryThreshold, QueryStatistic)>,
QueryBenchmark,
+ QueryMeasure,
QueryMetric,
Option,
+ Option<(QueryThreshold, QueryStatistic)>,
);
fn get_report_results(
@@ -112,7 +112,7 @@ fn get_report_results(
.filter(schema::perf::report_id.eq(report_id))
.inner_join(schema::benchmark::table)
.inner_join(schema::metric::table
- .inner_join(schema::metric_kind::table)
+ .inner_join(schema::measure::table)
// There may or may not be a boundary for any given metric
.left_join(schema::boundary::table
.inner_join(schema::threshold::table)
@@ -120,17 +120,29 @@ fn get_report_results(
)
)
// It is important to order by the iteration first in order to make sure they are grouped together below
- // Then ordering by metric kind and finally benchmark name makes sure that the benchmarks are in the same order for each iteration
- .order((schema::perf::iteration, schema::metric_kind::name, schema::benchmark::name))
+ // Then ordering by measure and finally benchmark name makes sure that the benchmarks are in the same order for each iteration
+ .order((schema::perf::iteration, schema::measure::name, schema::benchmark::name))
.select((
schema::perf::iteration,
- QueryMetricKind::as_select(),
+ QueryBenchmark::as_select(),
+ QueryMeasure::as_select(),
+ QueryMetric::as_select(),
+ (
+ schema::boundary::id,
+ schema::boundary::uuid,
+ schema::boundary::threshold_id,
+ schema::boundary::statistic_id,
+ schema::boundary::metric_id,
+ schema::boundary::baseline,
+ schema::boundary::lower_limit,
+ schema::boundary::upper_limit,
+ ).nullable(),
(
(
schema::threshold::id,
schema::threshold::uuid,
schema::threshold::project_id,
- schema::threshold::metric_kind_id,
+ schema::threshold::measure_id,
schema::threshold::branch_id,
schema::threshold::testbed_id,
schema::threshold::statistic_id,
@@ -150,18 +162,6 @@ fn get_report_results(
schema::statistic::created,
)
).nullable(),
- QueryBenchmark::as_select(),
- QueryMetric::as_select(),
- (
- schema::boundary::id,
- schema::boundary::uuid,
- schema::boundary::threshold_id,
- schema::boundary::statistic_id,
- schema::boundary::metric_id,
- schema::boundary::baseline,
- schema::boundary::lower_limit,
- schema::boundary::upper_limit,
- ).nullable(),
))
.load::(conn)
.map(|results| into_report_results_json(log, project, results))
@@ -179,11 +179,11 @@ fn into_report_results_json(
let mut report_result: Option = None;
for (
iteration,
- query_metric_kind,
- threshold_statistic,
query_benchmark,
+ query_measure,
query_metric,
query_boundary,
+ threshold_statistic,
) in results
{
// If onto a new iteration, then add the result to the report iteration list.
@@ -199,17 +199,17 @@ fn into_report_results_json(
}
prev_iteration = Some(iteration);
- // If there is a current report result, make sure that the metric kind is the same.
+ // If there is a current report result, make sure that the measure is the same.
// Otherwise, add it to the report iteration list.
if let Some(result) = report_result.take() {
- if query_metric_kind.uuid == result.metric_kind.uuid {
+ if query_measure.uuid == result.measure.uuid {
report_result = Some(result);
} else {
slog::trace!(
log,
- "Metric Kind {} => {}",
- result.metric_kind.uuid,
- query_metric_kind.uuid,
+ "Measure {} => {}",
+ result.measure.uuid,
+ query_measure.uuid,
);
report_iteration.push(result);
}
@@ -224,14 +224,16 @@ fn into_report_results_json(
if let Some(result) = report_result.as_mut() {
result.benchmarks.push(benchmark_metric);
} else {
- let metric_kind = query_metric_kind.into_json_for_project(project);
+ let measure = query_measure.into_json_for_project(project);
let threshold = if let Some((threshold, statistic)) = threshold_statistic {
Some(threshold.into_threshold_statistic_json_for_project(project, statistic))
} else {
None
};
report_result = Some(JsonReportResult {
- metric_kind,
+ // TODO remove in due time
+ metric_kind: Some(measure.clone()),
+ measure,
threshold,
benchmarks: vec![benchmark_metric],
});
diff --git a/services/api/src/model/project/report/results/detector/data.rs b/services/api/src/model/project/report/results/detector/data.rs
index 86fd1970d..cb8b03cc4 100644
--- a/services/api/src/model/project/report/results/detector/data.rs
+++ b/services/api/src/model/project/report/results/detector/data.rs
@@ -8,7 +8,7 @@ use crate::{
context::DbConnection,
error::not_found_error,
model::project::{
- benchmark::BenchmarkId, branch::BranchId, metric_kind::MetricKindId, testbed::TestbedId,
+ benchmark::BenchmarkId, branch::BranchId, measure::MeasureId, testbed::TestbedId,
},
schema,
};
@@ -18,14 +18,13 @@ use super::threshold::MetricsStatistic;
pub fn metrics_data(
log: &Logger,
conn: &mut DbConnection,
- metric_kind_id: MetricKindId,
branch_id: BranchId,
testbed_id: TestbedId,
benchmark_id: BenchmarkId,
+ measure_id: MeasureId,
statistic: &MetricsStatistic,
) -> Result {
let mut query = schema::metric::table
- .filter(schema::metric::metric_kind_id.eq(metric_kind_id))
.inner_join(
schema::perf::table
.inner_join(
@@ -40,6 +39,7 @@ pub fn metrics_data(
.filter(schema::branch::id.eq(branch_id))
.filter(schema::testbed::id.eq(testbed_id))
.filter(schema::benchmark::id.eq(benchmark_id))
+ .filter(schema::metric::measure_id.eq(measure_id))
.into_boxed();
if let Some(window) = statistic.window {
diff --git a/services/api/src/model/project/report/results/detector/mod.rs b/services/api/src/model/project/report/results/detector/mod.rs
index 58077d4ec..5c8a9716c 100644
--- a/services/api/src/model/project/report/results/detector/mod.rs
+++ b/services/api/src/model/project/report/results/detector/mod.rs
@@ -10,8 +10,8 @@ use crate::{
model::project::{
benchmark::BenchmarkId,
branch::BranchId,
+ measure::MeasureId,
metric::QueryMetric,
- metric_kind::MetricKindId,
testbed::TestbedId,
threshold::{alert::InsertAlert, boundary::InsertBoundary},
},
@@ -26,25 +26,25 @@ use threshold::MetricsThreshold;
#[derive(Debug, Clone)]
pub struct Detector {
- pub metric_kind_id: MetricKindId,
pub branch_id: BranchId,
pub testbed_id: TestbedId,
+ pub measure_id: MeasureId,
pub threshold: MetricsThreshold,
}
impl Detector {
pub fn new(
conn: &mut DbConnection,
- metric_kind_id: MetricKindId,
branch_id: BranchId,
testbed_id: TestbedId,
+ measure_id: MeasureId,
) -> Option {
- // Check to see if there is a threshold for the branch/testbed/metric kind grouping.
+ // Check to see if there is a threshold for the branch/testbed/measure grouping.
// If not, then there will be nothing to detect.
- MetricsThreshold::new(conn, metric_kind_id, branch_id, testbed_id).map(|threshold| Self {
- metric_kind_id,
+ MetricsThreshold::new(conn, branch_id, testbed_id, measure_id).map(|threshold| Self {
branch_id,
testbed_id,
+ measure_id,
threshold,
})
}
@@ -60,10 +60,10 @@ impl Detector {
let metrics_data = metrics_data(
log,
conn,
- self.metric_kind_id,
self.branch_id,
self.testbed_id,
benchmark_id,
+ self.measure_id,
&self.threshold.statistic,
)?;
diff --git a/services/api/src/model/project/report/results/detector/threshold.rs b/services/api/src/model/project/report/results/detector/threshold.rs
index 011784019..e3240c3f9 100644
--- a/services/api/src/model/project/report/results/detector/threshold.rs
+++ b/services/api/src/model/project/report/results/detector/threshold.rs
@@ -8,7 +8,7 @@ use crate::{
context::DbConnection,
model::project::{
branch::BranchId,
- metric_kind::MetricKindId,
+ measure::MeasureId,
testbed::TestbedId,
threshold::{
statistic::{QueryStatistic, StatisticId},
@@ -38,9 +38,9 @@ pub struct MetricsStatistic {
impl MetricsThreshold {
pub fn new(
conn: &mut DbConnection,
- metric_kind_id: MetricKindId,
branch_id: BranchId,
testbed_id: TestbedId,
+ measure_id: MeasureId,
) -> Option {
schema::statistic::table
.inner_join(
@@ -48,9 +48,9 @@ impl MetricsThreshold {
.nullable()
.eq(schema::threshold::statistic_id)),
)
- .filter(schema::threshold::metric_kind_id.eq(metric_kind_id))
.filter(schema::threshold::branch_id.eq(branch_id))
.filter(schema::threshold::testbed_id.eq(testbed_id))
+ .filter(schema::threshold::measure_id.eq(measure_id))
.select((schema::threshold::id, QueryStatistic::as_select()))
.first::<(ThresholdId, QueryStatistic)>(conn)
.map(|(threshold_id, query_statistic)| {
diff --git a/services/api/src/model/project/report/results/mod.rs b/services/api/src/model/project/report/results/mod.rs
index 04aab7849..57fed7a8d 100644
--- a/services/api/src/model/project/report/results/mod.rs
+++ b/services/api/src/model/project/report/results/mod.rs
@@ -1,7 +1,7 @@
use std::collections::HashMap;
use bencher_adapter::{
- results::{adapter_metrics::AdapterMetrics, MetricKind},
+ results::{adapter_metrics::AdapterMetrics, Measure},
AdapterResults, AdapterResultsArray, Settings as AdapterSettings,
};
use bencher_json::{
@@ -22,8 +22,8 @@ use crate::{
model::project::{
benchmark::{BenchmarkId, QueryBenchmark},
branch::BranchId,
+ measure::{MeasureId, QueryMeasure},
metric::{InsertMetric, QueryMetric},
- metric_kind::{MetricKindId, QueryMetricKind},
perf::{InsertPerf, QueryPerf},
testbed::TestbedId,
ProjectId,
@@ -37,14 +37,14 @@ use detector::Detector;
use super::ReportId;
-/// `ReportResults` is used to add benchmarks, perf, metric kinds, metrics, and alerts.
+/// `ReportResults` is used to process the report results.
pub struct ReportResults {
pub project_id: ProjectId,
pub branch_id: BranchId,
pub testbed_id: TestbedId,
pub report_id: ReportId,
- pub metric_kind_cache: HashMap,
- pub detector_cache: HashMap>,
+ pub measure_cache: HashMap,
+ pub detector_cache: HashMap>,
}
impl ReportResults {
@@ -59,7 +59,7 @@ impl ReportResults {
branch_id,
testbed_id,
report_id,
- metric_kind_cache: HashMap::new(),
+ measure_cache: HashMap::new(),
detector_cache: HashMap::new(),
}
}
@@ -149,10 +149,10 @@ impl ReportResults {
.map_err(resource_conflict_err!(Perf, insert_perf))?;
let perf_id = QueryPerf::get_id(conn, insert_perf.uuid)?;
- for (metric_kind_key, metric) in metrics.inner {
- let metric_kind_id = self.metric_kind_id(conn, metric_kind_key)?;
+ for (measure_key, metric) in metrics.inner {
+ let measure_id = self.measure_id(conn, measure_key)?;
- let insert_metric = InsertMetric::from_json(perf_id, metric_kind_id, metric);
+ let insert_metric = InsertMetric::from_json(perf_id, measure_id, metric);
diesel::insert_into(schema::metric::table)
.values(&insert_metric)
.execute(conn)
@@ -166,7 +166,7 @@ impl ReportResults {
// Ignored benchmarks do not get checked against the threshold even if one exists
if !ignore_benchmark {
- if let Some(detector) = self.detector(conn, metric_kind_id) {
+ if let Some(detector) = self.detector(conn, measure_id) {
let query_metric = QueryMetric::from_uuid(conn, insert_metric.uuid).map_err(|e| {
issue_error(
StatusCode::NOT_FOUND,
@@ -191,36 +191,26 @@ impl ReportResults {
QueryBenchmark::get_or_create(conn, self.project_id, benchmark_name).map_err(Into::into)
}
- fn metric_kind_id(
+ fn measure_id(
&mut self,
conn: &mut DbConnection,
- metric_kind_key: MetricKind,
- ) -> Result {
- Ok(
- if let Some(id) = self.metric_kind_cache.get(&metric_kind_key) {
- *id
- } else {
- let metric_kind_id =
- QueryMetricKind::get_or_create(conn, self.project_id, &metric_kind_key)?;
-
- self.metric_kind_cache
- .insert(metric_kind_key, metric_kind_id);
-
- metric_kind_id
- },
- )
+ measure_key: Measure,
+ ) -> Result {
+ Ok(if let Some(id) = self.measure_cache.get(&measure_key) {
+ *id
+ } else {
+ let measure_id = QueryMeasure::get_or_create(conn, self.project_id, &measure_key)?;
+ self.measure_cache.insert(measure_key, measure_id);
+ measure_id
+ })
}
- fn detector(
- &mut self,
- conn: &mut DbConnection,
- metric_kind_id: MetricKindId,
- ) -> Option {
- if let Some(detector) = self.detector_cache.get(&metric_kind_id) {
+ fn detector(&mut self, conn: &mut DbConnection, measure_id: MeasureId) -> Option {
+ if let Some(detector) = self.detector_cache.get(&measure_id) {
detector.clone()
} else {
- let detector = Detector::new(conn, metric_kind_id, self.branch_id, self.testbed_id);
- self.detector_cache.insert(metric_kind_id, detector.clone());
+ let detector = Detector::new(conn, self.branch_id, self.testbed_id, measure_id);
+ self.detector_cache.insert(measure_id, detector.clone());
detector
}
}
diff --git a/services/api/src/model/project/threshold/mod.rs b/services/api/src/model/project/threshold/mod.rs
index 6bb56c486..d65b7582d 100644
--- a/services/api/src/model/project/threshold/mod.rs
+++ b/services/api/src/model/project/threshold/mod.rs
@@ -9,7 +9,7 @@ use http::StatusCode;
use self::statistic::{InsertStatistic, QueryStatistic, StatisticId};
use super::{
branch::{BranchId, QueryBranch},
- metric_kind::{MetricKindId, QueryMetricKind},
+ measure::{MeasureId, QueryMeasure},
testbed::{QueryTestbed, TestbedId},
ProjectId, QueryProject,
};
@@ -36,9 +36,9 @@ pub struct QueryThreshold {
pub id: ThresholdId,
pub uuid: ThresholdUuid,
pub project_id: ProjectId,
- pub metric_kind_id: MetricKindId,
pub branch_id: BranchId,
pub testbed_id: TestbedId,
+ pub measure_id: MeasureId,
pub statistic_id: Option,
pub created: DateTime,
pub modified: DateTime,
@@ -81,9 +81,9 @@ impl QueryThreshold {
let Self {
uuid,
project_id,
- metric_kind_id,
branch_id,
testbed_id,
+ measure_id,
statistic_id,
created,
modified,
@@ -103,12 +103,15 @@ impl QueryThreshold {
sentry::capture_error(&err);
return Err(err);
};
+ let measure = QueryMeasure::get(conn, measure_id)?.into_json(conn)?;
Ok(JsonThreshold {
uuid,
project: QueryProject::get_uuid(conn, project_id)?,
- metric_kind: QueryMetricKind::get(conn, metric_kind_id)?.into_json(conn)?,
branch: QueryBranch::get(conn, branch_id)?.into_json(conn)?,
testbed: QueryTestbed::get(conn, testbed_id)?.into_json(conn)?,
+ // TODO remove in due time
+ metric_kind: Some(measure.clone()),
+ measure,
statistic,
created,
modified,
@@ -169,9 +172,9 @@ impl QueryThreshold {
pub struct InsertThreshold {
pub uuid: ThresholdUuid,
pub project_id: ProjectId,
- pub metric_kind_id: MetricKindId,
pub branch_id: BranchId,
pub testbed_id: TestbedId,
+ pub measure_id: MeasureId,
pub statistic_id: Option,
pub created: DateTime,
pub modified: DateTime,
@@ -180,17 +183,17 @@ pub struct InsertThreshold {
impl InsertThreshold {
pub fn new(
project_id: ProjectId,
- metric_kind_id: MetricKindId,
branch_id: BranchId,
testbed_id: TestbedId,
+ measure_id: MeasureId,
) -> Self {
let timestamp = DateTime::now();
Self {
uuid: ThresholdUuid::new(),
project_id,
- metric_kind_id,
branch_id,
testbed_id,
+ measure_id,
statistic_id: None,
created: timestamp,
modified: timestamp,
@@ -200,14 +203,13 @@ impl InsertThreshold {
pub fn insert_from_json(
conn: &mut DbConnection,
project_id: ProjectId,
- metric_kind_id: MetricKindId,
branch_id: BranchId,
testbed_id: TestbedId,
+ measure_id: MeasureId,
json_statistic: JsonNewStatistic,
) -> Result {
// Create the new threshold
- let insert_threshold =
- InsertThreshold::new(project_id, metric_kind_id, branch_id, testbed_id);
+ let insert_threshold = InsertThreshold::new(project_id, branch_id, testbed_id, measure_id);
diesel::insert_into(schema::threshold::table)
.values(&insert_threshold)
.execute(conn)
@@ -241,16 +243,16 @@ impl InsertThreshold {
pub fn lower_boundary(
conn: &mut DbConnection,
project_id: ProjectId,
- metric_kind_id: MetricKindId,
branch_id: BranchId,
testbed_id: TestbedId,
+ measure_id: MeasureId,
) -> Result {
Self::insert_from_json(
conn,
project_id,
- metric_kind_id,
branch_id,
testbed_id,
+ measure_id,
JsonNewStatistic::lower_boundary(),
)
}
@@ -258,16 +260,16 @@ impl InsertThreshold {
pub fn upper_boundary(
conn: &mut DbConnection,
project_id: ProjectId,
- metric_kind_id: MetricKindId,
branch_id: BranchId,
testbed_id: TestbedId,
+ measure_id: MeasureId,
) -> Result {
Self::insert_from_json(
conn,
project_id,
- metric_kind_id,
branch_id,
testbed_id,
+ measure_id,
JsonNewStatistic::upper_boundary(),
)
}
diff --git a/services/api/src/schema.rs b/services/api/src/schema.rs
index 02e755ae9..8e18ffae0 100644
--- a/services/api/src/schema.rs
+++ b/services/api/src/schema.rs
@@ -57,19 +57,7 @@ diesel::table! {
}
diesel::table! {
- metric (id) {
- id -> Integer,
- uuid -> Text,
- perf_id -> Integer,
- metric_kind_id -> Integer,
- value -> Double,
- lower_value -> Nullable,
- upper_value -> Nullable,
- }
-}
-
-diesel::table! {
- metric_kind (id) {
+ measure (id) {
id -> Integer,
uuid -> Text,
project_id -> Integer,
@@ -81,6 +69,18 @@ diesel::table! {
}
}
+diesel::table! {
+ metric (id) {
+ id -> Integer,
+ uuid -> Text,
+ perf_id -> Integer,
+ measure_id -> Integer,
+ value -> Double,
+ lower_value -> Nullable,
+ upper_value -> Nullable,
+ }
+}
+
diesel::table! {
organization (id) {
id -> Integer,
@@ -207,9 +207,9 @@ diesel::table! {
id -> Integer,
uuid -> Text,
project_id -> Integer,
- metric_kind_id -> Integer,
branch_id -> Integer,
testbed_id -> Integer,
+ measure_id -> Integer,
statistic_id -> Nullable,
created -> BigInt,
modified -> BigInt,
@@ -260,9 +260,9 @@ diesel::joinable!(boundary -> threshold (threshold_id));
diesel::joinable!(branch -> project (project_id));
diesel::joinable!(branch_version -> branch (branch_id));
diesel::joinable!(branch_version -> version (version_id));
-diesel::joinable!(metric -> metric_kind (metric_kind_id));
+diesel::joinable!(measure -> project (project_id));
+diesel::joinable!(metric -> measure (measure_id));
diesel::joinable!(metric -> perf (perf_id));
-diesel::joinable!(metric_kind -> project (project_id));
diesel::joinable!(organization_role -> organization (organization_id));
diesel::joinable!(organization_role -> user (user_id));
diesel::joinable!(perf -> benchmark (benchmark_id));
@@ -277,7 +277,7 @@ diesel::joinable!(report -> user (user_id));
diesel::joinable!(report -> version (version_id));
diesel::joinable!(testbed -> project (project_id));
diesel::joinable!(threshold -> branch (branch_id));
-diesel::joinable!(threshold -> metric_kind (metric_kind_id));
+diesel::joinable!(threshold -> measure (measure_id));
diesel::joinable!(threshold -> project (project_id));
diesel::joinable!(threshold -> testbed (testbed_id));
diesel::joinable!(token -> user (user_id));
@@ -289,8 +289,8 @@ diesel::allow_tables_to_appear_in_same_query!(
boundary,
branch,
branch_version,
+ measure,
metric,
- metric_kind,
organization,
organization_role,
perf,
diff --git a/services/cli/src/bencher/sub/mod.rs b/services/cli/src/bencher/sub/mod.rs
index ac5a8b204..ca1950ccc 100644
--- a/services/cli/src/bencher/sub/mod.rs
+++ b/services/cli/src/bencher/sub/mod.rs
@@ -20,7 +20,7 @@ pub use mock::MockError;
use organization::{member::Member, organization::Organization};
pub use project::run::{runner::output::Output, RunError};
use project::{
- alert::Alert, benchmark::Benchmark, branch::Branch, metric_kind::MetricKind, perf::Perf,
+ alert::Alert, benchmark::Benchmark, branch::Branch, measure::Measure, perf::Perf,
project::Project, report::Report, run::Run, statistic::Statistic, testbed::Testbed,
threshold::Threshold,
};
@@ -38,12 +38,12 @@ pub enum Sub {
Plan(organization::plan::Plan),
Project(Project),
Run(Box),
- Perf(Perf),
Report(Report),
- MetricKind(MetricKind),
+ Perf(Perf),
Branch(Branch),
Testbed(Testbed),
Benchmark(Benchmark),
+ Measure(Measure),
Threshold(Threshold),
Statistic(Statistic),
Alert(Alert),
@@ -67,12 +67,12 @@ impl TryFrom for Sub {
CliSub::Plan(plan) => Self::Plan(plan.try_into()?),
CliSub::Project(project) => Self::Project(project.try_into()?),
CliSub::Run(run) => Self::Run(Box::new((*run).try_into()?)),
- CliSub::Perf(perf) => Self::Perf(perf.try_into()?),
CliSub::Report(report) => Self::Report(report.try_into()?),
- CliSub::MetricKind(metric_kind) => Self::MetricKind(metric_kind.try_into()?),
+ CliSub::Perf(perf) => Self::Perf(perf.try_into()?),
CliSub::Branch(branch) => Self::Branch(branch.try_into()?),
CliSub::Testbed(testbed) => Self::Testbed(testbed.try_into()?),
CliSub::Benchmark(benchmark) => Self::Benchmark(benchmark.try_into()?),
+ CliSub::Measure(measure) => Self::Measure(measure.try_into()?),
CliSub::Threshold(threshold) => Self::Threshold(threshold.try_into()?),
CliSub::Statistic(statistic) => Self::Statistic(statistic.try_into()?),
CliSub::Alert(alert) => Self::Alert(alert.try_into()?),
@@ -97,12 +97,12 @@ impl SubCmd for Sub {
Self::Plan(plan) => plan.exec().await,
Self::Project(project) => project.exec().await,
Self::Run(run) => run.exec().await,
- Self::Perf(perf) => perf.exec().await,
Self::Report(report) => report.exec().await,
- Self::MetricKind(metric_kind) => metric_kind.exec().await,
+ Self::Perf(perf) => perf.exec().await,
Self::Branch(branch) => branch.exec().await,
Self::Testbed(testbed) => testbed.exec().await,
Self::Benchmark(benchmark) => benchmark.exec().await,
+ Self::Measure(measure) => measure.exec().await,
Self::Threshold(threshold) => threshold.exec().await,
Self::Statistic(statistic) => statistic.exec().await,
Self::Alert(alert) => alert.exec().await,
diff --git a/services/cli/src/bencher/sub/project/metric_kind/create.rs b/services/cli/src/bencher/sub/project/measure/create.rs
similarity index 74%
rename from services/cli/src/bencher/sub/project/metric_kind/create.rs
rename to services/cli/src/bencher/sub/project/measure/create.rs
index c157d4135..6b4120a81 100644
--- a/services/cli/src/bencher/sub/project/metric_kind/create.rs
+++ b/services/cli/src/bencher/sub/project/measure/create.rs
@@ -1,12 +1,12 @@
use std::convert::TryFrom;
use async_trait::async_trait;
-use bencher_client::types::JsonNewMetricKind;
-use bencher_json::{JsonMetricKind, NonEmpty, ResourceId, Slug};
+use bencher_client::types::JsonNewMeasure;
+use bencher_json::{JsonMeasure, NonEmpty, ResourceId, Slug};
use crate::{
bencher::{backend::Backend, sub::SubCmd},
- parser::project::metric_kind::CliMetricKindCreate,
+ parser::project::measure::CliMeasureCreate,
CliError,
};
@@ -19,11 +19,11 @@ pub struct Create {
pub backend: Backend,
}
-impl TryFrom for Create {
+impl TryFrom for Create {
type Error = CliError;
- fn try_from(create: CliMetricKindCreate) -> Result {
- let CliMetricKindCreate {
+ fn try_from(create: CliMeasureCreate) -> Result {
+ let CliMeasureCreate {
project,
name,
slug,
@@ -40,7 +40,7 @@ impl TryFrom for Create {
}
}
-impl From for JsonNewMetricKind {
+impl From for JsonNewMeasure {
fn from(create: Create) -> Self {
let Create {
name, slug, units, ..
@@ -56,12 +56,12 @@ impl From for JsonNewMetricKind {
#[async_trait]
impl SubCmd for Create {
async fn exec(&self) -> Result<(), CliError> {
- let _json: JsonMetricKind = self
+ let _json: JsonMeasure = self
.backend
.send_with(
|client| async move {
client
- .proj_metric_kind_post()
+ .proj_measure_post()
.project(self.project.clone())
.body(self.clone())
.send()
diff --git a/services/cli/src/bencher/sub/project/metric_kind/delete.rs b/services/cli/src/bencher/sub/project/measure/delete.rs
similarity index 69%
rename from services/cli/src/bencher/sub/project/metric_kind/delete.rs
rename to services/cli/src/bencher/sub/project/measure/delete.rs
index 602e3d337..2cbd15ebd 100644
--- a/services/cli/src/bencher/sub/project/metric_kind/delete.rs
+++ b/services/cli/src/bencher/sub/project/measure/delete.rs
@@ -5,29 +5,29 @@ use bencher_json::{JsonEmpty, ResourceId};
use crate::{
bencher::{backend::Backend, sub::SubCmd},
- parser::project::metric_kind::CliMetricKindDelete,
+ parser::project::measure::CliMeasureDelete,
CliError,
};
#[derive(Debug)]
pub struct Delete {
pub project: ResourceId,
- pub metric_kind: ResourceId,
+ pub measure: ResourceId,
pub backend: Backend,
}
-impl TryFrom for Delete {
+impl TryFrom for Delete {
type Error = CliError;
- fn try_from(delete: CliMetricKindDelete) -> Result {
- let CliMetricKindDelete {
+ fn try_from(delete: CliMeasureDelete) -> Result {
+ let CliMeasureDelete {
project,
- metric_kind,
+ measure,
backend,
} = delete;
Ok(Self {
project,
- metric_kind,
+ measure,
backend: backend.try_into()?,
})
}
@@ -41,9 +41,9 @@ impl SubCmd for Delete {
.send_with(
|client| async move {
client
- .proj_metric_kind_delete()
+ .proj_measure_delete()
.project(self.project.clone())
- .metric_kind(self.metric_kind.clone())
+ .measure(self.measure.clone())
.send()
.await
},
diff --git a/services/cli/src/bencher/sub/project/metric_kind/list.rs b/services/cli/src/bencher/sub/project/measure/list.rs
similarity index 74%
rename from services/cli/src/bencher/sub/project/metric_kind/list.rs
rename to services/cli/src/bencher/sub/project/measure/list.rs
index d9823f012..6f45ea68b 100644
--- a/services/cli/src/bencher/sub/project/metric_kind/list.rs
+++ b/services/cli/src/bencher/sub/project/measure/list.rs
@@ -1,13 +1,13 @@
use std::convert::TryFrom;
use async_trait::async_trait;
-use bencher_client::types::{JsonDirection, ProjMetricKindsSort};
-use bencher_json::{JsonMetricKinds, NonEmpty, ResourceId};
+use bencher_client::types::{JsonDirection, ProjMeasuresSort};
+use bencher_json::{JsonMeasures, NonEmpty, ResourceId};
use crate::{
bencher::{backend::Backend, sub::SubCmd},
parser::{
- project::metric_kind::{CliMetricKindList, CliMetricKindsSort},
+ project::measure::{CliMeasureList, CliMeasuresSort},
CliPagination,
},
CliError,
@@ -23,17 +23,17 @@ pub struct List {
#[derive(Debug)]
pub struct Pagination {
- pub sort: Option,
+ pub sort: Option,
pub direction: Option,
pub per_page: Option,
pub page: Option,
}
-impl TryFrom for List {
+impl TryFrom for List {
type Error = CliError;
- fn try_from(list: CliMetricKindList) -> Result {
- let CliMetricKindList {
+ fn try_from(list: CliMeasureList) -> Result {
+ let CliMeasureList {
project,
name,
pagination,
@@ -48,8 +48,8 @@ impl TryFrom for List {
}
}
-impl From> for Pagination {
- fn from(pagination: CliPagination) -> Self {
+impl From> for Pagination {
+ fn from(pagination: CliPagination) -> Self {
let CliPagination {
sort,
direction,
@@ -58,7 +58,7 @@ impl From> for Pagination {
} = pagination;
Self {
sort: sort.map(|sort| match sort {
- CliMetricKindsSort::Name => ProjMetricKindsSort::Name,
+ CliMeasuresSort::Name => ProjMeasuresSort::Name,
}),
direction: direction.map(Into::into),
page,
@@ -70,11 +70,11 @@ impl From> for Pagination {
#[async_trait]
impl SubCmd for List {
async fn exec(&self) -> Result<(), CliError> {
- let _json: JsonMetricKinds = self
+ let _json: JsonMeasures = self
.backend
.send_with(
|client| async move {
- let mut client = client.proj_metric_kinds_get().project(self.project.clone());
+ let mut client = client.proj_measures_get().project(self.project.clone());
if let Some(name) = self.name.clone() {
client = client.name(name);
}
diff --git a/services/cli/src/bencher/sub/project/measure/mod.rs b/services/cli/src/bencher/sub/project/measure/mod.rs
new file mode 100644
index 000000000..eec4c8522
--- /dev/null
+++ b/services/cli/src/bencher/sub/project/measure/mod.rs
@@ -0,0 +1,45 @@
+use async_trait::async_trait;
+
+use crate::{bencher::sub::SubCmd, parser::project::measure::CliMeasure, CliError};
+
+mod create;
+mod delete;
+mod list;
+mod update;
+mod view;
+
+#[derive(Debug)]
+pub enum Measure {
+ List(list::List),
+ Create(create::Create),
+ View(view::View),
+ Update(update::Update),
+ Delete(delete::Delete),
+}
+
+impl TryFrom for Measure {
+ type Error = CliError;
+
+ fn try_from(measure: CliMeasure) -> Result {
+ Ok(match measure {
+ CliMeasure::List(list) => Self::List(list.try_into()?),
+ CliMeasure::Create(create) => Self::Create(create.try_into()?),
+ CliMeasure::View(view) => Self::View(view.try_into()?),
+ CliMeasure::Update(update) => Self::Update(update.try_into()?),
+ CliMeasure::Delete(delete) => Self::Delete(delete.try_into()?),
+ })
+ }
+}
+
+#[async_trait]
+impl SubCmd for Measure {
+ async fn exec(&self) -> Result<(), CliError> {
+ match self {
+ Self::List(list) => list.exec().await,
+ Self::Create(create) => create.exec().await,
+ Self::View(view) => view.exec().await,
+ Self::Update(update) => update.exec().await,
+ Self::Delete(delete) => delete.exec().await,
+ }
+ }
+}
diff --git a/services/cli/src/bencher/sub/project/metric_kind/update.rs b/services/cli/src/bencher/sub/project/measure/update.rs
similarity index 69%
rename from services/cli/src/bencher/sub/project/metric_kind/update.rs
rename to services/cli/src/bencher/sub/project/measure/update.rs
index 50aea8134..c5c814c2d 100644
--- a/services/cli/src/bencher/sub/project/metric_kind/update.rs
+++ b/services/cli/src/bencher/sub/project/measure/update.rs
@@ -1,32 +1,32 @@
use std::convert::TryFrom;
use async_trait::async_trait;
-use bencher_client::types::JsonUpdateMetricKind;
-use bencher_json::{JsonMetricKind, NonEmpty, ResourceId, Slug};
+use bencher_client::types::JsonUpdateMeasure;
+use bencher_json::{JsonMeasure, NonEmpty, ResourceId, Slug};
use crate::{
bencher::{backend::Backend, sub::SubCmd},
- parser::project::metric_kind::CliMetricKindUpdate,
+ parser::project::measure::CliMeasureUpdate,
CliError,
};
#[derive(Debug, Clone)]
pub struct Update {
pub project: ResourceId,
- pub metric_kind: ResourceId,
+ pub measure: ResourceId,
pub name: Option,
pub slug: Option,
pub units: Option,
pub backend: Backend,
}
-impl TryFrom for Update {
+impl TryFrom for Update {
type Error = CliError;
- fn try_from(create: CliMetricKindUpdate) -> Result {
- let CliMetricKindUpdate {
+ fn try_from(create: CliMeasureUpdate) -> Result {
+ let CliMeasureUpdate {
project,
- metric_kind,
+ measure,
name,
slug,
units,
@@ -34,7 +34,7 @@ impl TryFrom for Update {
} = create;
Ok(Self {
project,
- metric_kind,
+ measure,
name,
slug,
units,
@@ -43,7 +43,7 @@ impl TryFrom for Update {
}
}
-impl From for JsonUpdateMetricKind {
+impl From for JsonUpdateMeasure {
fn from(update: Update) -> Self {
let Update {
name, slug, units, ..
@@ -59,14 +59,14 @@ impl From for JsonUpdateMetricKind {
#[async_trait]
impl SubCmd for Update {
async fn exec(&self) -> Result<(), CliError> {
- let _json: JsonMetricKind = self
+ let _json: JsonMeasure = self
.backend
.send_with(
|client| async move {
client
- .proj_metric_kind_patch()
+ .proj_measure_patch()
.project(self.project.clone())
- .metric_kind(self.metric_kind.clone())
+ .measure(self.measure.clone())
.body(self.clone())
.send()
.await
diff --git a/services/cli/src/bencher/sub/project/metric_kind/view.rs b/services/cli/src/bencher/sub/project/measure/view.rs
similarity index 63%
rename from services/cli/src/bencher/sub/project/metric_kind/view.rs
rename to services/cli/src/bencher/sub/project/measure/view.rs
index be5ef6866..90489bb2c 100644
--- a/services/cli/src/bencher/sub/project/metric_kind/view.rs
+++ b/services/cli/src/bencher/sub/project/measure/view.rs
@@ -1,33 +1,33 @@
use std::convert::TryFrom;
use async_trait::async_trait;
-use bencher_json::{JsonMetricKind, ResourceId};
+use bencher_json::{JsonMeasure, ResourceId};
use crate::{
bencher::{backend::Backend, sub::SubCmd},
- parser::project::metric_kind::CliMetricKindView,
+ parser::project::measure::CliMeasureView,
CliError,
};
#[derive(Debug)]
pub struct View {
pub project: ResourceId,
- pub metric_kind: ResourceId,
+ pub measure: ResourceId,
pub backend: Backend,
}
-impl TryFrom for View {
+impl TryFrom