diff --git a/Cargo.toml b/Cargo.toml index 25378cb5..a3e05211 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -83,3 +83,5 @@ duration-str = "0.7.1" fake = { version = "2.9.2", features = ["derive"] } rand = "0.8.5" bigdecimal = "0.4.5" +strum = "0.26.3" +strum_macros = "0.26.3" diff --git a/swagger.yml b/swagger.yml index 1aa830b8..f4760f5b 100644 --- a/swagger.yml +++ b/swagger.yml @@ -565,6 +565,7 @@ paths: properties: name: type: string + enum: [chain, governance, parameters, pos, rewards, transactions] timestamp: type: number diff --git a/webserver/Cargo.toml b/webserver/Cargo.toml index ae6764d6..e0acd047 100644 --- a/webserver/Cargo.toml +++ b/webserver/Cargo.toml @@ -47,6 +47,8 @@ namada_parameters.workspace = true deadpool-redis = "0.13.0" bigdecimal.workspace = true shared.workspace = true +strum.workspace = true +strum_macros.workspace = true [build-dependencies] vergen = { version = "8.0.0", features = ["build", "git", "gitcl"] } diff --git a/webserver/src/dto/crawler_state.rs b/webserver/src/dto/crawler_state.rs index 094d592e..cee4a552 100644 --- a/webserver/src/dto/crawler_state.rs +++ b/webserver/src/dto/crawler_state.rs @@ -1,8 +1,12 @@ use serde::{Deserialize, Serialize}; +use strum::{Display, VariantArray}; use validator::Validate; -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive( + Clone, Debug, Serialize, Deserialize, Display, VariantArray, PartialEq, +)] #[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] pub enum CrawlerNameDto { Chain, Governance, diff --git a/webserver/src/handler/crawler_state.rs b/webserver/src/handler/crawler_state.rs index df5d39c3..eecbf5bf 100644 --- a/webserver/src/handler/crawler_state.rs +++ b/webserver/src/handler/crawler_state.rs @@ -2,8 +2,9 @@ use axum::extract::State; use axum::http::HeaderMap; use axum::Json; use axum_extra::extract::Query; +use strum::VariantArray; -use crate::dto::crawler_state::CrawlerStateQueryParams; +use crate::dto::crawler_state::{CrawlerNameDto, CrawlerStateQueryParams}; use crate::error::api::ApiError; use crate::response::crawler_state::CrawlersTimestamps; use crate::state::common::CommonState; @@ -17,8 +18,34 @@ pub async fn get_crawlers_timestamps( let timestamps = state .crawler_state_service - .get_timestamps(crawler_names) + .get_timestamps(crawler_names.clone()) .await?; - Ok(Json(timestamps)) + let variants = if crawler_names.is_empty() { + CrawlerNameDto::VARIANTS.to_vec() + } else { + CrawlerNameDto::VARIANTS + .iter() + .filter(|variant| crawler_names.contains(variant)) + .cloned() + .collect::>() + }; + + let timestamps_with_defaults = variants + .into_iter() + .map(|variant| { + timestamps + .iter() + .find(|timestamp| timestamp.name == variant.to_string()) + .map_or_else( + || CrawlersTimestamps { + name: variant.to_string(), + timestamp: 0, + }, + |ct| ct.clone(), + ) + }) + .collect::>(); + + Ok(Json(timestamps_with_defaults)) }