diff --git a/clearing-house-app/Cargo.lock b/clearing-house-app/Cargo.lock index e9395ea..afafe41 100644 --- a/clearing-house-app/Cargo.lock +++ b/clearing-house-app/Cargo.lock @@ -1333,6 +1333,8 @@ dependencies = [ "serde_derive", "serde_json", "sha2 0.10.7", + "tracing", + "tracing-subscriber", ] [[package]] diff --git a/clearing-house-app/logging-service/Cargo.toml b/clearing-house-app/logging-service/Cargo.toml index 0e9ebf9..64733e4 100644 --- a/clearing-house-app/logging-service/Cargo.toml +++ b/clearing-house-app/logging-service/Cargo.toml @@ -29,3 +29,5 @@ sha2 = "0.10.7" generic-array = "0.14.7" openssl = "0.10.56" config = { version = "0.13.3", default-features = false, features = ["toml"] } +tracing = "0.1.37" +tracing-subscriber = { version = "0.3.17", features = ["env-filter"] } diff --git a/clearing-house-app/logging-service/config.toml b/clearing-house-app/logging-service/config.toml index 5e89afe..fe0d348 100644 --- a/clearing-house-app/logging-service/config.toml +++ b/clearing-house-app/logging-service/config.toml @@ -1,3 +1,4 @@ +log_level = "DEBUG" # TRACE, DEBUG, INFO, WARN, ERROR document_database_url= "mongodb://localhost:27017" process_database_url= "mongodb://localhost:27017" keyring_database_url= "mongodb://localhost:27017" diff --git a/clearing-house-app/logging-service/src/config.rs b/clearing-house-app/logging-service/src/config.rs new file mode 100644 index 0000000..6657a21 --- /dev/null +++ b/clearing-house-app/logging-service/src/config.rs @@ -0,0 +1,69 @@ +#[derive(Debug, serde::Deserialize)] +pub(crate) struct CHConfig { + pub(crate) process_database_url: String, + pub(crate) keyring_database_url: String, + pub(crate) document_database_url: String, + pub(crate) clear_db: bool, + #[serde(default)] + pub(crate) log_level: Option, +} + +#[derive(Debug, serde::Deserialize)] +#[serde(rename_all = "UPPERCASE")] +pub(crate) enum LogLevel { + Trace, + Debug, + Info, + Warn, + Error, +} + +impl Into for LogLevel { + fn into(self) -> tracing::Level { + match self { + LogLevel::Trace => tracing::Level::TRACE, + LogLevel::Debug => tracing::Level::DEBUG, + LogLevel::Info => tracing::Level::INFO, + LogLevel::Warn => tracing::Level::WARN, + LogLevel::Error => tracing::Level::ERROR, + } + } +} + +impl ToString for LogLevel { + fn to_string(&self) -> String { + match self { + LogLevel::Trace => String::from("TRACE"), + LogLevel::Debug => String::from("DEBUG"), + LogLevel::Info => String::from("INFO"), + LogLevel::Warn => String::from("WARN"), + LogLevel::Error => String::from("ERROR"), + } + } +} + +pub(crate) fn read_config() -> CHConfig { + let conf = config::Config::builder() + .add_source(config::File::with_name("config.toml")) + .add_source(config::Environment::with_prefix("CH_APP_")) + .build() + .expect("Failure to read configuration! Exiting..."); + + let conf: CHConfig = conf.try_deserialize().expect("Failure to read configuration! Exiting..."); + tracing::trace!(config = ?conf, "Config read"); + + conf +} + +pub(crate) fn configure_logging(log_level: Option) { + if std::env::var("RUST_LOG").is_err() { + if let Some(level) = log_level { + std::env::set_var("RUST_LOG", level.to_string()); + } + } + + // setup logging + tracing_subscriber::fmt() + .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) + .init(); +} \ No newline at end of file diff --git a/clearing-house-app/logging-service/src/db/key_store.rs b/clearing-house-app/logging-service/src/db/key_store.rs index 231f635..1abbbb8 100644 --- a/clearing-house-app/logging-service/src/db/key_store.rs +++ b/clearing-house-app/logging-service/src/db/key_store.rs @@ -27,19 +27,19 @@ impl KeyStore { /// Only one master key may exist in the database. pub async fn store_master_key(&self, key: MasterKey) -> anyhow::Result{ - debug!("Storing new master key..."); + tracing::debug!("Storing new master key..."); let coll = self.database.collection::(MONGO_COLL_MASTER_KEY); - debug!("... but first check if there's already one."); + tracing::debug!("... but first check if there's already one."); let result= coll.find(None, None).await .expect("Error retrieving the master keys") .try_collect().await.unwrap_or_else(|_| vec![]); if result.len() > 1{ - error!("Master Key table corrupted!"); + tracing::error!("Master Key table corrupted!"); exit(1); } if result.len() == 1{ - error!("Master key already exists!"); + tracing::error!("Master key already exists!"); Ok(false) } else{ @@ -50,7 +50,7 @@ impl KeyStore { Ok(true) }, Err(e) => { - error!("master key could not be stored: {:?}", &e); + tracing::error!("master key could not be stored: {:?}", &e); panic!("master key could not be stored") } } @@ -65,14 +65,14 @@ impl KeyStore { .try_collect().await.unwrap_or_else(|_| vec![]); if result.len() > 1{ - error!("Master Key table corrupted!"); + tracing::error!("Master Key table corrupted!"); exit(1); } if result.len() == 1{ Ok(result[0].clone()) } else { - error!("Master Key missing!"); + tracing::error!("Master Key missing!"); exit(1); } } @@ -82,11 +82,11 @@ impl KeyStore { let coll = self.database.collection::(MONGO_COLL_DOC_TYPES); match coll.insert_one(doc_type.clone(), None).await { Ok(_r) => { - debug!("added new document type: {}", &_r.inserted_id); + tracing::debug!("added new document type: {}", &_r.inserted_id); Ok(()) }, Err(e) => { - error!("failed to log document type {}", &doc_type.id); + tracing::error!("failed to log document type {}", &doc_type.id); Err(Error::from(e)) } } @@ -111,7 +111,7 @@ impl KeyStore { match result { Some(_r) => Ok(true), None => { - debug!("document type with id {} and pid {:?} does not exist!", &dt_id, &pid); + tracing::debug!("document type with id {} and pid {:?} does not exist!", &dt_id, &pid); Ok(false) } } @@ -126,11 +126,11 @@ impl KeyStore { pub async fn get_document_type(&self, dt_id: &String) -> Result> { let coll = self.database.collection::(MONGO_COLL_DOC_TYPES); - debug!("get_document_type for dt_id: '{}'", dt_id); + tracing::debug!("get_document_type for dt_id: '{}'", dt_id); match coll.find_one(Some(doc! { MONGO_ID: dt_id}), None).await{ Ok(result) => Ok(result), Err(e) => { - error!("error while getting document type with id {}!", dt_id); + tracing::error!("error while getting document type with id {}!", dt_id); Err(Error::from(e)) } } @@ -141,15 +141,15 @@ impl KeyStore { match coll.replace_one(doc! { MONGO_ID: id}, doc_type, None).await{ Ok(r) => { if r.matched_count != 1 || r.modified_count != 1{ - warn!("while replacing doc type {} matched '{}' dts and modified '{}'", id, r.matched_count, r.modified_count); + tracing::warn!("while replacing doc type {} matched '{}' dts and modified '{}'", id, r.matched_count, r.modified_count); } else{ - debug!("while replacing doc type {} matched '{}' dts and modified '{}'", id, r.matched_count, r.modified_count); + tracing::debug!("while replacing doc type {} matched '{}' dts and modified '{}'", id, r.matched_count, r.modified_count); } Ok(true) }, Err(e) => { - error!("error while updating document type with id {}: {:#?}", id, e); + tracing::error!("error while updating document type with id {}: {:#?}", id, e); Ok(false) } } diff --git a/clearing-house-app/logging-service/src/main.rs b/clearing-house-app/logging-service/src/main.rs index c3fe97e..4a3a356 100644 --- a/clearing-house-app/logging-service/src/main.rs +++ b/clearing-house-app/logging-service/src/main.rs @@ -1,11 +1,12 @@ #![forbid(unsafe_code)] #[macro_use] -extern crate rocket; +extern crate tracing; use std::path::Path; use core_lib::util::{add_service_config}; use rocket::fairing::AdHoc; +use tracing::subscriber; use core_lib::constants::ENV_LOGGING_SERVICE_ID; use db::config::doc_store::DatastoreConfigurator; use db::config::keyring_store::KeyringDbConfigurator; @@ -17,6 +18,7 @@ mod model; mod services; mod crypto; mod ports; +mod config; pub fn add_signing_key() -> AdHoc { AdHoc::try_on_ignite("Adding Signing Key", |rocket| async { @@ -24,35 +26,17 @@ pub fn add_signing_key() -> AdHoc { if Path::new(&private_key_path).exists() { Ok(rocket.manage(private_key_path)) } else { - error!("Signing key not found! Aborting startup! Please configure signing_key!"); + tracing::error!("Signing key not found! Aborting startup! Please configure signing_key!"); return Err(rocket); } }) } -#[derive(Debug, serde::Deserialize)] -struct CHConfig { - process_database_url: String, - keyring_database_url: String, - document_database_url: String, - clear_db: bool, -} - #[rocket::main] async fn main() -> Result<(), rocket::Error> { // Read configuration - let conf = config::Config::builder() - .add_source(config::File::with_name("config.toml")) - .add_source(config::Environment::with_prefix("CH_APP_")) - .build() - .expect("Failure to read configuration! Exiting..."); - - // setup logging - // TODO: Setup tracing_subscriber - - let conf: CHConfig = conf.try_deserialize().expect("Failure to read configuration! Exiting..."); - println!("Config: {:?}", conf); - + let conf = config::read_config(); + config::configure_logging(conf.log_level); let process_store = ProcessStoreConfigurator::init_process_store(String::from(conf.process_database_url), conf.clear_db) diff --git a/clearing-house-app/logging-service/src/ports/doc_type_api.rs b/clearing-house-app/logging-service/src/ports/doc_type_api.rs index 4f4d5bb..9b0bcb8 100644 --- a/clearing-house-app/logging-service/src/ports/doc_type_api.rs +++ b/clearing-house-app/logging-service/src/ports/doc_type_api.rs @@ -7,7 +7,7 @@ use rocket::serde::json::{json,Json}; use crate::services::keyring_service::KeyringService; use crate::model::doc_type::DocumentType; -#[post("/", format = "json", data = "")] +#[rocket::post("/", format = "json", data = "")] async fn create_doc_type(key_api: &State, doc_type: Json) -> ApiResponse { match key_api.inner().create_doc_type(doc_type.into_inner()).await{ Ok(dt) => ApiResponse::SuccessCreate(json!(dt)), @@ -18,7 +18,7 @@ async fn create_doc_type(key_api: &State, doc_type: Json", format = "json", data = "")] +#[rocket::post("/", format = "json", data = "")] async fn update_doc_type(key_api: &State, id: String, doc_type: Json) -> ApiResponse { match key_api.inner().update_doc_type(id, doc_type.into_inner()).await{ Ok(id) => ApiResponse::SuccessOk(json!(id)), @@ -29,12 +29,12 @@ async fn update_doc_type(key_api: &State, id: String, doc_type: } } -#[delete("/", format = "json")] +#[rocket::delete("/", format = "json")] async fn delete_default_doc_type(key_api: &State, id: String) -> ApiResponse{ delete_doc_type(key_api, id, DEFAULT_PROCESS_ID.to_string()).await } -#[delete("//", format = "json")] +#[rocket::delete("//", format = "json")] async fn delete_doc_type(key_api: &State, id: String, pid: String) -> ApiResponse{ match key_api.inner().delete_doc_type(id, pid).await{ Ok(id) => ApiResponse::SuccessOk(json!(id)), @@ -45,12 +45,12 @@ async fn delete_doc_type(key_api: &State, id: String, pid: Strin } } -#[get("/", format = "json")] +#[rocket::get("/", format = "json")] async fn get_default_doc_type(key_api: &State, id: String) -> ApiResponse { get_doc_type(key_api, id, DEFAULT_PROCESS_ID.to_string()).await } -#[get("//", format = "json")] +#[rocket::get("//", format = "json")] async fn get_doc_type(key_api: &State, id: String, pid: String) -> ApiResponse { match key_api.inner().get_doc_type(id, pid).await{ Ok(dt) => { @@ -66,7 +66,7 @@ async fn get_doc_type(key_api: &State, id: String, pid: String) } } -#[get("/", format = "json")] +#[rocket::get("/", format = "json")] async fn get_doc_types(key_api: &State) -> ApiResponse { match key_api.inner().get_doc_types().await{ Ok(dt) => ApiResponse::SuccessOk(json!(dt)), @@ -80,7 +80,7 @@ async fn get_doc_types(key_api: &State) -> ApiResponse { pub fn mount_api() -> AdHoc { AdHoc::on_ignite("Mounting Document Type API", |rocket| async { rocket - .mount(ROCKET_DOC_TYPE_API, routes![create_doc_type, + .mount(ROCKET_DOC_TYPE_API, rocket::routes![create_doc_type, update_doc_type, delete_default_doc_type, delete_doc_type, get_default_doc_type, get_doc_type , get_doc_types]) }) diff --git a/clearing-house-app/logging-service/src/ports/logging_api.rs b/clearing-house-app/logging-service/src/ports/logging_api.rs index 8ac0bac..8151caa 100644 --- a/clearing-house-app/logging-service/src/ports/logging_api.rs +++ b/clearing-house-app/logging-service/src/ports/logging_api.rs @@ -13,7 +13,7 @@ use crate::model::ids::request::ClearingHouseMessage; use crate::model::constants::{ROCKET_CLEARING_HOUSE_BASE_API, ROCKET_LOG_API, ROCKET_QUERY_API, ROCKET_PROCESS_API, ROCKET_PK_API}; use crate::services::logging_service::LoggingService; -#[post("/", format = "json", data = "")] +#[rocket::post("/", format = "json", data = "")] async fn log( ch_claims: ChClaims, logging_api: &State, @@ -30,7 +30,7 @@ async fn log( } } -#[post("/", format = "json", data = "")] +#[rocket::post("/", format = "json", data = "")] async fn create_process( ch_claims: ChClaims, logging_api: &State, @@ -46,17 +46,17 @@ async fn create_process( } } -#[post("/<_pid>", format = "json", rank = 50)] +#[rocket::post("/<_pid>", format = "json", rank = 50)] async fn unauth(_pid: Option) -> ApiResponse { ApiResponse::Unauthorized(String::from("Token not valid!")) } -#[post("/<_pid>/<_id>", format = "json", rank = 50)] +#[rocket::post("/<_pid>/<_id>", format = "json", rank = 50)] async fn unauth_id(_pid: Option, _id: Option) -> ApiResponse { ApiResponse::Unauthorized(String::from("Token not valid!")) } -#[post("/?&&&&", format = "json", data = "")] +#[rocket::post("/?&&&&", format = "json", data = "")] async fn query_pid( ch_claims: ChClaims, logging_api: &State, @@ -77,7 +77,7 @@ async fn query_pid( } } -#[post("//", format = "json", data = "")] +#[rocket::post("//", format = "json", data = "")] async fn query_id( ch_claims: ChClaims, logging_api: &State, @@ -94,7 +94,7 @@ async fn query_id( } } -#[get("/.well-known/jwks.json", format = "json")] +#[rocket::get("/.well-known/jwks.json", format = "json")] async fn get_public_sign_key(key_path: &State) -> ApiResponse { match get_jwks(key_path.as_str()) { Some(jwks) => ApiResponse::SuccessOk(json!(jwks)), @@ -105,10 +105,10 @@ async fn get_public_sign_key(key_path: &State) -> ApiResponse { pub fn mount_api() -> AdHoc { AdHoc::on_ignite("Mounting Clearing House API", |rocket| async { rocket - .mount(format!("{}{}", ROCKET_CLEARING_HOUSE_BASE_API, ROCKET_LOG_API).as_str(), routes![log, unauth]) - .mount(format!("{}", ROCKET_PROCESS_API).as_str(), routes![create_process, unauth]) + .mount(format!("{}{}", ROCKET_CLEARING_HOUSE_BASE_API, ROCKET_LOG_API).as_str(), rocket::routes![log, unauth]) + .mount(format!("{}", ROCKET_PROCESS_API).as_str(), rocket::routes![create_process, unauth]) .mount(format!("{}{}", ROCKET_CLEARING_HOUSE_BASE_API, ROCKET_QUERY_API).as_str(), - routes![query_id, query_pid, unauth, unauth_id]) - .mount(format!("{}", ROCKET_PK_API).as_str(), routes![get_public_sign_key]) + rocket::routes![query_id, query_pid, unauth, unauth_id]) + .mount(format!("{}", ROCKET_PK_API).as_str(), rocket::routes![get_public_sign_key]) }) } \ No newline at end of file