-
Notifications
You must be signed in to change notification settings - Fork 1k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
94aa0f0
commit 81dc87a
Showing
18 changed files
with
247 additions
and
279 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,136 @@ | ||
use std::collections::HashMap; | ||
use std::fmt; | ||
use std::sync::Arc; | ||
use std::time::Duration; | ||
|
||
use graph::data::query::QueryResults; | ||
use graph::prelude::{DeploymentHash, GraphQLMetrics as GraphQLMetricsTrait, MetricsRegistry}; | ||
use graph::prometheus::{Gauge, Histogram, HistogramVec}; | ||
|
||
pub struct GraphQLMetrics { | ||
query_execution_time: Box<HistogramVec>, | ||
query_parsing_time: Box<HistogramVec>, | ||
query_validation_time: Box<HistogramVec>, | ||
query_result_size: Box<Histogram>, | ||
query_result_size_max: Box<Gauge>, | ||
} | ||
|
||
impl fmt::Debug for GraphQLMetrics { | ||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
write!(f, "GraphQLMetrics {{ }}") | ||
} | ||
} | ||
|
||
impl GraphQLMetricsTrait for GraphQLMetrics { | ||
fn observe_query_execution(&self, duration: Duration, results: &QueryResults) { | ||
let id = results | ||
.deployment_hash() | ||
.map(|h| h.as_str()) | ||
.unwrap_or_else(|| { | ||
if results.not_found() { | ||
"notfound" | ||
} else { | ||
"unknown" | ||
} | ||
}); | ||
let status = if results.has_errors() { | ||
"failed" | ||
} else { | ||
"success" | ||
}; | ||
self.query_execution_time | ||
.with_label_values(&[id, status]) | ||
.observe(duration.as_secs_f64()); | ||
} | ||
|
||
fn observe_query_parsing(&self, duration: Duration, results: &QueryResults) { | ||
let id = results | ||
.deployment_hash() | ||
.map(|h| h.as_str()) | ||
.unwrap_or_else(|| { | ||
if results.not_found() { | ||
"notfound" | ||
} else { | ||
"unknown" | ||
} | ||
}); | ||
self.query_parsing_time | ||
.with_label_values(&[id]) | ||
.observe(duration.as_secs_f64()); | ||
} | ||
} | ||
|
||
impl GraphQLMetrics { | ||
pub fn new(registry: Arc<dyn MetricsRegistry>) -> Self { | ||
let query_execution_time = registry | ||
.new_histogram_vec( | ||
"query_execution_time", | ||
"Execution time for successful GraphQL queries", | ||
vec![String::from("deployment"), String::from("status")], | ||
vec![0.1, 0.5, 1.0, 10.0, 100.0], | ||
) | ||
.expect("failed to create `query_execution_time` histogram"); | ||
let query_parsing_time = registry | ||
.new_histogram_vec( | ||
"query_parsing_time", | ||
"Parsing time for GraphQL queries", | ||
vec![String::from("deployment")], | ||
vec![0.1, 0.5, 1.0, 10.0, 100.0], | ||
) | ||
.expect("failed to create `query_parsing_time` histogram"); | ||
|
||
let query_validation_time = registry | ||
.new_histogram_vec( | ||
"query_validation_time", | ||
"Validation time for GraphQL queries", | ||
vec![String::from("deployment")], | ||
vec![0.1, 0.5, 1.0, 10.0, 100.0], | ||
) | ||
.expect("failed to create `query_validation_time` histogram"); | ||
|
||
let bins = (10..32).map(|n| 2u64.pow(n) as f64).collect::<Vec<_>>(); | ||
let query_result_size = registry | ||
.new_histogram( | ||
"query_result_size", | ||
"the size of the result of successful GraphQL queries (in CacheWeight)", | ||
bins, | ||
) | ||
.unwrap(); | ||
|
||
let query_result_size_max = registry | ||
.new_gauge( | ||
"query_result_max", | ||
"the maximum size of a query result (in CacheWeight)", | ||
HashMap::new(), | ||
) | ||
.unwrap(); | ||
|
||
Self { | ||
query_execution_time, | ||
query_parsing_time, | ||
query_validation_time, | ||
query_result_size, | ||
query_result_size_max, | ||
} | ||
} | ||
|
||
// Tests need to construct one of these, but normal code doesn't | ||
#[cfg(debug_assertions)] | ||
pub fn make(registry: Arc<dyn MetricsRegistry>) -> Self { | ||
Self::new(registry) | ||
} | ||
|
||
pub fn observe_query_validation(&self, duration: Duration, id: &DeploymentHash) { | ||
self.query_validation_time | ||
.with_label_values(&[id.as_str()]) | ||
.observe(duration.as_secs_f64()); | ||
} | ||
|
||
pub fn observe_query_result_size(&self, size: usize) { | ||
let size = size as f64; | ||
self.query_result_size.observe(size); | ||
if self.query_result_size_max.get() < size { | ||
self.query_result_size_max.set(size); | ||
} | ||
} | ||
} |
Oops, something went wrong.