Skip to content

Commit

Permalink
chore: run cargo fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
abonander committed Aug 16, 2024
1 parent 6599f1c commit 63349de
Show file tree
Hide file tree
Showing 8 changed files with 44 additions and 37 deletions.
8 changes: 6 additions & 2 deletions sqlx-postgres/src/arguments.rs
Original file line number Diff line number Diff line change
Expand Up @@ -274,6 +274,10 @@ impl DerefMut for PgArgumentBuffer {
}

pub(crate) fn value_size_int4_checked(size: usize) -> Result<i32, String> {
i32::try_from(size)
.map_err(|_| format!("value size would overflow in the binary protocol encoding: {size} > {}", i32::MAX))
i32::try_from(size).map_err(|_| {
format!(
"value size would overflow in the binary protocol encoding: {size} > {}",
i32::MAX
)
})
}
31 changes: 15 additions & 16 deletions sqlx-postgres/src/connection/describe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use crate::error::Error;
use crate::ext::ustr::UStr;
use crate::message::{ParameterDescription, RowDescription};
use crate::query_as::query_as;
use crate::query_scalar::{query_scalar};
use crate::query_scalar::query_scalar;
use crate::statement::PgStatementMetadata;
use crate::type_info::{PgArrayOf, PgCustomType, PgType, PgTypeKind};
use crate::types::Json;
Expand All @@ -11,8 +11,8 @@ use crate::HashMap;
use crate::{PgColumn, PgConnection, PgTypeInfo};
use futures_core::future::BoxFuture;
use smallvec::SmallVec;
use std::sync::Arc;
use sqlx_core::query_builder::QueryBuilder;
use std::sync::Arc;

/// Describes the type of the `pg_type.typtype` column
///
Expand Down Expand Up @@ -426,7 +426,7 @@ WHERE rngtypid = $1
if meta.columns.len() * 3 > 65535 {
tracing::debug!(
?stmt_id,
num_columns=meta.columns.len(),
num_columns = meta.columns.len(),
"number of columns in query is too large to pull nullability for"
);
}
Expand All @@ -436,19 +436,18 @@ WHERE rngtypid = $1
// This will include columns that don't have a `relation_id` (are not from a table);
// assuming those are a minority of columns, it's less code to _not_ work around it
// and just let Postgres return `NULL`.
let mut nullable_query = QueryBuilder::new(
"SELECT NOT pg_attribute.attnotnull FROM ( "
);

nullable_query.push_values(
meta.columns.iter().zip(0i32..),
|mut tuple, (column, i)| {
// ({i}::int4, {column.relation_id}::int4, {column.relation_attribute_no}::int2)
tuple.push_bind(i).push_unseparated("::int4");
tuple.push_bind(column.relation_id).push_unseparated("::int4");
tuple.push_bind(column.relation_attribute_no).push_bind_unseparated("::int2");
},
);
let mut nullable_query = QueryBuilder::new("SELECT NOT pg_attribute.attnotnull FROM ( ");

nullable_query.push_values(meta.columns.iter().zip(0i32..), |mut tuple, (column, i)| {
// ({i}::int4, {column.relation_id}::int4, {column.relation_attribute_no}::int2)
tuple.push_bind(i).push_unseparated("::int4");
tuple
.push_bind(column.relation_id)
.push_unseparated("::int4");
tuple
.push_bind(column.relation_attribute_no)
.push_bind_unseparated("::int2");
});

nullable_query.push(
") as col(idx, table_id, col_idx) \
Expand Down
2 changes: 1 addition & 1 deletion sqlx-postgres/src/types/bit_vec.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use crate::arguments::value_size_int4_checked;
use crate::{
decode::Decode,
encode::{Encode, IsNull},
Expand All @@ -8,7 +9,6 @@ use crate::{
use bit_vec::BitVec;
use sqlx_core::bytes::Buf;
use std::{io, mem};
use crate::arguments::value_size_int4_checked;

impl Type<Postgres> for BitVec {
fn type_info() -> PgTypeInfo {
Expand Down
4 changes: 3 additions & 1 deletion sqlx-postgres/src/types/chrono/date.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,9 @@ impl Encode<'_, Postgres> for NaiveDate {
let days: i32 = (*self - postgres_epoch_date())
.num_days()
.try_into()
.map_err(|_| format!("value {self:?} would overflow binary encoding for Postgres DATE"))?;
.map_err(|_| {
format!("value {self:?} would overflow binary encoding for Postgres DATE")
})?;

Encode::<Postgres>::encode(days, buf)
}
Expand Down
2 changes: 1 addition & 1 deletion sqlx-postgres/src/types/cube.rs
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ impl PgCube {
bytes.len()
),
)
.into());
.into());
}

match (header.is_point, header.dimensions) {
Expand Down
20 changes: 10 additions & 10 deletions sqlx-postgres/src/types/rust_decimal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -193,15 +193,9 @@ mod tests {
fn zero() {
let zero: Decimal = "0".parse().unwrap();

assert_eq!(
PgNumeric::from(&zero),
PgNumeric::ZERO,
);
assert_eq!(PgNumeric::from(&zero), PgNumeric::ZERO,);

assert_eq!(
Decimal::try_from(&PgNumeric::ZERO).unwrap(),
Decimal::ZERO
);
assert_eq!(Decimal::try_from(&PgNumeric::ZERO).unwrap(), Decimal::ZERO);
}

#[test]
Expand Down Expand Up @@ -384,7 +378,10 @@ mod tests {
let actual_decimal = Decimal::try_from(expected_numeric).unwrap();
assert_eq!(actual_decimal, Decimal::MAX);
// Value split by 10,000's to match the expected digits[]
assert_eq!(actual_decimal.mantissa(), 7_9228_1625_1426_4337_5935_4395_0335);
assert_eq!(
actual_decimal.mantissa(),
7_9228_1625_1426_4337_5935_4395_0335
);
assert_eq!(actual_decimal.scale(), 0);
}

Expand All @@ -406,7 +403,10 @@ mod tests {

let actual_decimal = Decimal::try_from(expected_numeric).unwrap();
assert_eq!(actual_decimal, max_value_max_scale);
assert_eq!(actual_decimal.mantissa(), 79_228_162_514_264_337_593_543_950_335);
assert_eq!(
actual_decimal.mantissa(),
79_228_162_514_264_337_593_543_950_335
);
assert_eq!(actual_decimal.scale(), 28);
}

Expand Down
7 changes: 3 additions & 4 deletions sqlx-postgres/src/types/time/date.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,9 @@ impl PgHasArrayType for Date {
impl Encode<'_, Postgres> for Date {
fn encode_by_ref(&self, buf: &mut PgArgumentBuffer) -> Result<IsNull, BoxDynError> {
// DATE is encoded as number of days since epoch (2000-01-01)
let days: i32 = (*self - PG_EPOCH)
.whole_days()
.try_into()
.map_err(|_| format!("value {self:?} would overflow binary encoding for Postgres DATE"))?;
let days: i32 = (*self - PG_EPOCH).whole_days().try_into().map_err(|_| {
format!("value {self:?} would overflow binary encoding for Postgres DATE")
})?;
Encode::<Postgres>::encode(days, buf)
}

Expand Down
7 changes: 5 additions & 2 deletions sqlx-postgres/src/types/time/datetime.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,12 @@ impl PgHasArrayType for OffsetDateTime {
impl Encode<'_, Postgres> for PrimitiveDateTime {
fn encode_by_ref(&self, buf: &mut PgArgumentBuffer) -> Result<IsNull, BoxDynError> {
// TIMESTAMP is encoded as the microseconds since the epoch
let micros: i64 = (*self - PG_EPOCH.midnight()).whole_microseconds()
let micros: i64 = (*self - PG_EPOCH.midnight())
.whole_microseconds()
.try_into()
.map_err(|_| format!("value {self:?} would overflow binary encoding for Postgres TIME"))?;
.map_err(|_| {
format!("value {self:?} would overflow binary encoding for Postgres TIME")
})?;
Encode::<Postgres>::encode(micros, buf)
}

Expand Down

0 comments on commit 63349de

Please sign in to comment.