Skip to content

Commit

Permalink
style: reformat doc comments and reorganize imports
Browse files Browse the repository at this point in the history
  • Loading branch information
yinho999 committed Jan 25, 2025
1 parent 04a1c4c commit a75bd59
Show file tree
Hide file tree
Showing 18 changed files with 137 additions and 92 deletions.
7 changes: 4 additions & 3 deletions src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -143,10 +143,11 @@ pub trait Hooks: Send {
Ok(false)
}

/// Loads the configuration settings for the application based on the given environment.
/// Loads the configuration settings for the application based on the given
/// environment.
///
/// This function is responsible for retrieving the configuration for the application
/// based on the current environment.
/// This function is responsible for retrieving the configuration for the
/// application based on the current environment.
async fn load_config(env: &Environment) -> Result<Config> {
env.load()
}
Expand Down
56 changes: 34 additions & 22 deletions src/bgworker/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -346,13 +346,16 @@ impl Queue {
}
}

/// Cancels jobs based on the given job name for the configured queue provider.
/// Cancels jobs based on the given job name for the configured queue
/// provider.
///
/// # Errors
/// - If no queue provider is configured, it will return an error indicating the lack of configuration.
/// - If the Redis provider is selected, it will return an error stating that cancellation is not supported.
/// - Any error in the underlying provider's cancellation logic will propagate from the respective function.
///
/// - If no queue provider is configured, it will return an error indicating
/// the lack of configuration.
/// - If the Redis provider is selected, it will return an error stating
/// that cancellation is not supported.
/// - Any error in the underlying provider's cancellation logic will
/// propagate from the respective function.
pub async fn cancel_jobs(&self, job_name: &str) -> Result<()> {
tracing::debug!(job_name = ?job_name, "cancel jobs");

Expand All @@ -378,13 +381,16 @@ impl Queue {
}
}

/// Clears jobs older than a specified number of days for the configured queue provider.
/// Clears jobs older than a specified number of days for the configured
/// queue provider.
///
/// # Errors
/// - If no queue provider is configured, it will return an error indicating the lack of configuration.
/// - If the Redis provider is selected, it will return an error stating that clearing jobs is not supported.
/// - Any error in the underlying provider's job clearing logic will propagate from the respective function.
///
/// - If no queue provider is configured, it will return an error indicating
/// the lack of configuration.
/// - If the Redis provider is selected, it will return an error stating
/// that clearing jobs is not supported.
/// - Any error in the underlying provider's job clearing logic will
/// propagate from the respective function.
pub async fn clear_jobs_older_than(
&self,
age_days: i64,
Expand Down Expand Up @@ -419,9 +425,12 @@ impl Queue {
/// Clears jobs based on their status for the configured queue provider.
///
/// # Errors
/// - If no queue provider is configured, it will return an error indicating the lack of configuration.
/// - If the Redis provider is selected, it will return an error stating that clearing jobs is not supported.
/// - Any error in the underlying provider's job clearing logic will propagate from the respective function.
/// - If no queue provider is configured, it will return an error indicating
/// the lack of configuration.
/// - If the Redis provider is selected, it will return an error stating
/// that clearing jobs is not supported.
/// - Any error in the underlying provider's job clearing logic will
/// propagate from the respective function.
pub async fn clear_by_status(&self, status: Vec<JobStatus>) -> Result<()> {
tracing::debug!(status = ?status, "clear jobs by status");
match self {
Expand All @@ -446,12 +455,13 @@ impl Queue {

/// Dumps the list of jobs to a YAML file at the specified path.
///
/// This function retrieves jobs from the queue, optionally filtered by their status, and
/// writes the job data to a YAML file.
/// This function retrieves jobs from the queue, optionally filtered by
/// their status, and writes the job data to a YAML file.
///
/// # Errors
/// - If the specified path cannot be created, an error will be returned.
/// - If the job retrieval or YAML serialization fails, an error will be returned.
/// - If the job retrieval or YAML serialization fails, an error will be
/// returned.
/// - If there is an issue creating the dump file, an error will be returned
pub async fn dump(
&self,
Expand Down Expand Up @@ -482,14 +492,16 @@ impl Queue {

/// Imports jobs from a YAML file into the configured queue provider.
///
/// This function reads job data from a YAML file located at the specified `path` and imports
/// the jobs into the queue.
/// This function reads job data from a YAML file located at the specified
/// `path` and imports the jobs into the queue.
///
/// # Errors
/// - If there is an issue opening or reading the YAML file, an error will be returned.
/// - If the queue provider is Redis or none, an error will be returned indicating the lack of support.
/// - If any issues occur while enqueuing the jobs, the function will return an error.
///
/// - If there is an issue opening or reading the YAML file, an error will
/// be returned.
/// - If the queue provider is Redis or none, an error will be returned
/// indicating the lack of support.
/// - If any issues occur while enqueuing the jobs, the function will return
/// an error.
pub async fn import(&self, path: &Path) -> Result<()> {
tracing::debug!(path = %path.display(), "import jobs");

Expand Down
34 changes: 18 additions & 16 deletions src/bgworker/pg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -301,9 +301,9 @@ async fn fail_job(pool: &PgPool, id: &JobId, error: &crate::Error) -> Result<()>

/// Cancels jobs in the `pg_loco_queue` table by their name.
///
/// This function updates the status of all jobs with the given `name` and a status of
/// [`JobStatus::Queued`] to [`JobStatus::Cancelled`]. The update also sets the `updated_at` timestamp to the
/// current time.
/// This function updates the status of all jobs with the given `name` and a
/// status of [`JobStatus::Queued`] to [`JobStatus::Cancelled`]. The update also
/// sets the `updated_at` timestamp to the current time.
///
/// # Errors
///
Expand Down Expand Up @@ -334,9 +334,9 @@ pub async fn clear(pool: &PgPool) -> Result<()> {

/// Deletes jobs from the `pg_loco_queue` table based on their status.
///
/// This function removes all jobs with a status that matches any of the statuses provided
/// in the `status` argument. The statuses are checked against the `status` column in the
/// database, and any matching rows are deleted.
/// This function removes all jobs with a status that matches any of the
/// statuses provided in the `status` argument. The statuses are checked against
/// the `status` column in the database, and any matching rows are deleted.
///
/// # Errors
///
Expand All @@ -354,11 +354,12 @@ pub async fn clear_by_status(pool: &PgPool, status: Vec<JobStatus>) -> Result<()
Ok(())
}

/// Deletes jobs from the `pg_loco_queue` table that are older than a specified number of days.
/// Deletes jobs from the `pg_loco_queue` table that are older than a specified
/// number of days.
///
/// This function removes jobs that have a `created_at` timestamp older than the provided
/// number of days. Additionally, if a `status` is provided, only jobs with a status matching
/// one of the provided values will be deleted.
/// This function removes jobs that have a `created_at` timestamp older than the
/// provided number of days. Additionally, if a `status` is provided, only jobs
/// with a status matching one of the provided values will be deleted.
///
/// # Errors
///
Expand Down Expand Up @@ -441,13 +442,14 @@ pub async fn get_jobs(

/// Converts a row from the database into a [`Job`] object.
///
/// This function takes a row from the `Postgres` database and manually extracts the necessary
/// fields to populate a [`Job`] object.
/// This function takes a row from the `Postgres` database and manually extracts
/// the necessary fields to populate a [`Job`] object.
///
/// **Note:** This function manually extracts values from the database row instead of using
/// the `FromRow` trait, which would require enabling the 'macros' feature in the dependencies.
/// The decision to avoid `FromRow` is made to keep the build smaller and faster, as the 'macros'
/// feature is unnecessary in the current dependency tree.
/// **Note:** This function manually extracts values from the database row
/// instead of using the `FromRow` trait, which would require enabling the
/// 'macros' feature in the dependencies. The decision to avoid `FromRow` is
/// made to keep the build smaller and faster, as the 'macros' feature is
/// unnecessary in the current dependency tree.
fn to_job(row: &PgRow) -> Result<Job> {
Ok(Job {
id: row.get("id"),
Expand Down
34 changes: 18 additions & 16 deletions src/bgworker/sqlt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -354,9 +354,9 @@ async fn fail_job(pool: &SqlitePool, id: &JobId, error: &crate::Error) -> Result

/// Cancels jobs in the `sqlt_loco_queue` table by their name.
///
/// This function updates the status of all jobs with the given `name` and a status of
/// [`JobStatus::Queued`] to [`JobStatus::Cancelled`]. The update also sets the `updated_at` timestamp to the
/// current time.
/// This function updates the status of all jobs with the given `name` and a
/// status of [`JobStatus::Queued`] to [`JobStatus::Cancelled`]. The update also
/// sets the `updated_at` timestamp to the current time.
///
/// # Errors
///
Expand Down Expand Up @@ -395,9 +395,9 @@ pub async fn clear(pool: &SqlitePool) -> Result<()> {

/// Deletes jobs from the `sqlt_loco_queue` table based on their status.
///
/// This function removes all jobs with a status that matches any of the statuses provided
/// in the `status` argument. The statuses are checked against the `status` column in the
/// database, and any matching rows are deleted.
/// This function removes all jobs with a status that matches any of the
/// statuses provided in the `status` argument. The statuses are checked against
/// the `status` column in the database, and any matching rows are deleted.
///
/// # Errors
///
Expand All @@ -418,11 +418,12 @@ pub async fn clear_by_status(pool: &SqlitePool, status: Vec<JobStatus>) -> Resul
Ok(())
}

/// Deletes jobs from the `sqlt_loco_queue` table that are older than a specified number of days.
/// Deletes jobs from the `sqlt_loco_queue` table that are older than a
/// specified number of days.
///
/// This function removes jobs that have a `created_at` timestamp older than the provided
/// number of days. Additionally, if a `status` is provided, only jobs with a status matching
/// one of the provided values will be deleted.
/// This function removes jobs that have a `created_at` timestamp older than the
/// provided number of days. Additionally, if a `status` is provided, only jobs
/// with a status matching one of the provided values will be deleted.
///
/// # Errors
///
Expand Down Expand Up @@ -529,13 +530,14 @@ pub async fn get_jobs(

/// Converts a row from the database into a [`Job`] object.
///
/// This function takes a row from the `SQLite` database and manually extracts the necessary
/// fields to populate a [`Job`] object.
/// This function takes a row from the `SQLite` database and manually extracts
/// the necessary fields to populate a [`Job`] object.
///
/// **Note:** This function manually extracts values from the database row instead of using
/// the `FromRow` trait, which would require enabling the 'macros' feature in the dependencies.
/// The decision to avoid `FromRow` is made to keep the build smaller and faster, as the 'macros'
/// feature is unnecessary in the current dependency tree.
/// **Note:** This function manually extracts values from the database row
/// instead of using the `FromRow` trait, which would require enabling the
/// 'macros' feature in the dependencies. The decision to avoid `FromRow` is
/// made to keep the build smaller and faster, as the 'macros' feature is
/// unnecessary in the current dependency tree.
fn to_job(row: &SqliteRow) -> Result<Job> {
Ok(Job {
id: row.get("id"),
Expand Down
3 changes: 1 addition & 2 deletions src/boot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,7 @@ use crate::{
app::{AppContext, Hooks},
banner::print_banner,
bgworker, cache,
config::Config,
config::{self, WorkerMode},
config::{self, Config, WorkerMode},
controller::ListRoutes,
environment::Environment,
errors::Error,
Expand Down
23 changes: 15 additions & 8 deletions src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@ cfg_if::cfg_if! {
feature = "with-db"
))]
use std::process::exit;

use std::{
collections::BTreeMap,
path::{Path, PathBuf},
Expand All @@ -49,11 +48,10 @@ use crate::{
create_app, create_context, list_endpoints, list_middlewares, run_scheduler, run_task,
start, RunDbCommand, ServeParams, StartMode,
},
config::Config,
config::{Config, WorkerMode},
environment::{resolve_from_env, Environment, DEFAULT_ENVIRONMENT},
logger, task, Error,
};
use crate::config::WorkerMode;

#[derive(Parser)]
#[command(author, version, about, long_about = None)]
Expand Down Expand Up @@ -333,7 +331,8 @@ After running the migration, follow these steps to complete the process:
kind: DeploymentKind,
},

/// Override templates and allows you to take control of them. You can always go back when deleting the local template.
/// Override templates and allows you to take control of them. You can
/// always go back when deleting the local template.
#[command(after_help = format!("{}
- Override a Specific File:
* cargo loco generate override scaffold/api/controller.t
Expand All @@ -350,7 +349,8 @@ After running the migration, follow these steps to complete the process:
/// The path to a specific template or directory to copy.
template_path: Option<String>,

/// Show available templates to copy under the specified directory without actually coping them.
/// Show available templates to copy under the specified directory
/// without actually coping them.
#[arg(long, action)]
info: bool,
},
Expand Down Expand Up @@ -434,16 +434,23 @@ impl ComponentArg {
.static_assets
.clone()
.map(|a| a.fallback);

#[cfg(feature = "with-db")]
let postgres = config.database.uri.contains("postgres://");
#[cfg(not(feature = "with-db"))]
let postgres = false;
#[cfg(feature = "with-db")]
let sqlite = config.database.uri.contains("sqlite://");
#[cfg(not(feature = "with-db"))]
let sqlite = false;
Ok(Component::Deployment {
kind,
asset_folder: copy_asset_folder.clone(),
fallback_file: fallback_file.clone(),
host: config.server.host.clone(),
port: config.server.port,
background_queue: config.workers.mode == WorkerMode::BackgroundQueue,
postgres: config.database.uri.contains("postgres://"),
sqlite: config.database.uri.contains("sqlite://"),
postgres,
sqlite,
})
}
Self::Override {
Expand Down
3 changes: 2 additions & 1 deletion src/controller/extractor/validate.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
use crate::Error;
use axum::extract::{Form, FromRequest, Json, Request};
use serde::de::DeserializeOwned;
use validator::Validate;

use crate::Error;

#[derive(Debug, Clone, Copy, Default)]
pub struct JsonValidateWithMessage<T>(pub T);

Expand Down
3 changes: 2 additions & 1 deletion src/controller/format.rs
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,8 @@ impl RenderBuilder {
self.redirect_with_header_key(header::LOCATION, to)
}

/// Finalizes the HTTP response and redirects to a specified location using a dynamic header key.
/// Finalizes the HTTP response and redirects to a specified location using
/// a dynamic header key.
///
/// # Errors
///
Expand Down
3 changes: 2 additions & 1 deletion src/controller/middleware/auth.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,8 @@ where
}
}

/// extract a [JWT] token from request parts, using a non-mutable reference to the [Parts]
/// extract a [JWT] token from request parts, using a non-mutable reference to
/// the [Parts]
///
/// # Errors
/// Return an error when JWT token not configured or when the token is not valid
Expand Down
3 changes: 2 additions & 1 deletion src/env_vars.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ pub fn get(key: &str) -> Result<String, std::env::VarError> {
}

#[allow(dead_code)]
/// Retrieves the value of the given environment variable, or returns a default value if the variable is not set.
/// Retrieves the value of the given environment variable, or returns a default
/// value if the variable is not set.
pub fn get_or_default(key: &str, default: &str) -> String {
get(key).unwrap_or_else(|_| default.to_string())
}
8 changes: 5 additions & 3 deletions src/environment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,13 @@
//! let config = environment.load().expect("failed to load environment");
//! }
//! ```
use super::config::Config;
use crate::{env_vars, Result};
use std::{path::Path, str::FromStr};

use serde::{Deserialize, Serialize};
use serde_variant::to_variant_name;
use std::{path::Path, str::FromStr};

use super::config::Config;
use crate::{env_vars, Result};

pub const DEFAULT_ENVIRONMENT: &str = "development";
pub const LOCO_ENV: &str = "LOCO_ENV";
Expand Down
4 changes: 2 additions & 2 deletions src/hash.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
use crate::{Error, Result};
use argon2::{
password_hash::SaltString, Argon2, Params, PasswordHash, PasswordHasher, PasswordVerifier,
Version,
};
use rand::{distributions::Alphanumeric, thread_rng, Rng};

use crate::{Error, Result};

/// Hashes a plain text password and returns the hashed result.
///
/// # Errors
Expand Down Expand Up @@ -67,7 +68,6 @@ pub fn verify_password(pass: &str, hashed_password: &str) -> bool {
/// let rand_str = hash::random_string(10);
/// assert_eq!(rand_str.len(), 10);
/// assert_ne!(rand_str, hash::random_string(10));
///
/// ```
pub fn random_string(length: usize) -> String {
thread_rng()
Expand Down
Loading

0 comments on commit a75bd59

Please sign in to comment.