Skip to content

Commit

Permalink
feat: return timestamp 0 when no values in the db
Browse files Browse the repository at this point in the history
  • Loading branch information
mateuszjasiuk committed Jul 15, 2024
1 parent 158866b commit efc4d09
Show file tree
Hide file tree
Showing 5 changed files with 40 additions and 4 deletions.
2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -83,3 +83,5 @@ duration-str = "0.7.1"
fake = { version = "2.9.2", features = ["derive"] }
rand = "0.8.5"
bigdecimal = "0.4.5"
strum = "0.26.3"
strum_macros = "0.26.3"
1 change: 1 addition & 0 deletions swagger.yml
Original file line number Diff line number Diff line change
Expand Up @@ -565,6 +565,7 @@ paths:
properties:
name:
type: string
enum: [chain, governance, parameters, pos, rewards, transactions]
timestamp:
type: number

Expand Down
2 changes: 2 additions & 0 deletions webserver/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,8 @@ namada_parameters.workspace = true
deadpool-redis = "0.13.0"
bigdecimal.workspace = true
shared.workspace = true
strum.workspace = true
strum_macros.workspace = true

[build-dependencies]
vergen = { version = "8.0.0", features = ["build", "git", "gitcl"] }
6 changes: 5 additions & 1 deletion webserver/src/dto/crawler_state.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
use serde::{Deserialize, Serialize};
use strum::{Display, VariantArray};
use validator::Validate;

#[derive(Clone, Debug, Serialize, Deserialize)]
#[derive(
Clone, Debug, Serialize, Deserialize, Display, VariantArray, PartialEq,
)]
#[serde(rename_all = "camelCase")]
#[strum(serialize_all = "camelCase")]
pub enum CrawlerNameDto {
Chain,
Governance,
Expand Down
33 changes: 30 additions & 3 deletions webserver/src/handler/crawler_state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@ use axum::extract::State;
use axum::http::HeaderMap;
use axum::Json;
use axum_extra::extract::Query;
use strum::VariantArray;

use crate::dto::crawler_state::CrawlerStateQueryParams;
use crate::dto::crawler_state::{CrawlerNameDto, CrawlerStateQueryParams};
use crate::error::api::ApiError;
use crate::response::crawler_state::CrawlersTimestamps;
use crate::state::common::CommonState;
Expand All @@ -17,8 +18,34 @@ pub async fn get_crawlers_timestamps(

let timestamps = state
.crawler_state_service
.get_timestamps(crawler_names)
.get_timestamps(crawler_names.clone())
.await?;

Ok(Json(timestamps))
let variants = if crawler_names.is_empty() {
CrawlerNameDto::VARIANTS.to_vec()
} else {
CrawlerNameDto::VARIANTS
.iter()
.filter(|variant| crawler_names.contains(variant))
.cloned()
.collect::<Vec<_>>()
};

let timestamps_with_defaults = variants
.into_iter()
.map(|variant| {
timestamps
.iter()
.find(|timestamp| timestamp.name == variant.to_string())
.map_or_else(
|| CrawlersTimestamps {
name: variant.to_string(),
timestamp: 0,
},
|ct| ct.clone(),
)
})
.collect::<Vec<_>>();

Ok(Json(timestamps_with_defaults))
}

0 comments on commit efc4d09

Please sign in to comment.