Skip to content

Commit

Permalink
fix: allow keys of different types to make batching logic consistent …
Browse files Browse the repository at this point in the history
…with single queries (#5059)

* fix: allow combinations of keys of different types to make batching logic consistent with single queries

* fix: split the tests per type, exclude MongoDb in decimal and fix mongo id

* chore: use consistent naming for the test

* chore: minor PR review changes

* chore: formatting adjustments
  • Loading branch information
jacek-prisma authored Dec 2, 2024
1 parent b5a4b8f commit d6d59dd
Show file tree
Hide file tree
Showing 4 changed files with 283 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
mod select_different_key_types;
mod select_one_compound;
mod select_one_singular;
mod transactional_batch;
Original file line number Diff line number Diff line change
@@ -0,0 +1,254 @@
use indoc::indoc;
use query_engine_tests::*;

#[test_suite(schema(schema))]
mod float_in_schema {
fn schema() -> String {
let schema = indoc! {
r#"
model FloatEntity {
#id(Id, Float, @id)
Text String
}
"#
};

schema.to_owned()
}

#[connector_test]
async fn batch_of_two_distinct(runner: Runner) -> TestResult<()> {
create_test_data(&runner).await?;
assert_consistent_with_batch(
&runner,
r#"query { findUniqueFloatEntity(where: { Id: 1 }){ Text }}"#,
r#"query { findUniqueFloatEntity(where: { Id: 2 }){ Text }}"#,
)
.await
}

#[connector_test]
async fn batch_of_two_repeated(runner: Runner) -> TestResult<()> {
create_test_data(&runner).await?;
assert_consistent_with_batch(
&runner,
r#"query { findUniqueFloatEntity(where: { Id: 1 }){ Text }}"#,
r#"query { findUniqueFloatEntity(where: { Id: 1 }){ Text }}"#,
)
.await
}

async fn create_test_data(runner: &Runner) -> TestResult<()> {
for mutation in [
r#"mutation entity {
createOneFloatEntity(data: { Id: 1, Text: "A" }) {
Text
}
}"#,
r#"mutation entity {
createOneFloatEntity(data: { Id: 2, Text: "B" }) {
Text
}
}"#,
] {
runner.query(mutation).await?.assert_success();
}
Ok(())
}
}

#[test_suite(schema(schema))]
mod bigint_in_schema {
fn schema() -> String {
let schema = indoc! {
r#"
model BigIntEntity {
#id(Id, BigInt, @id)
Text String
}
"#
};

schema.to_owned()
}

#[connector_test]
async fn batch_of_two_distinct(runner: Runner) -> TestResult<()> {
create_test_data(&runner).await?;
assert_consistent_with_batch(
&runner,
r#"query { findUniqueBigIntEntity(where: { Id: 1 }){ Text }}"#,
r#"query { findUniqueBigIntEntity(where: { Id: 2 }){ Text }}"#,
)
.await
}

#[connector_test]
async fn batch_of_two_repeated(runner: Runner) -> TestResult<()> {
create_test_data(&runner).await?;
assert_consistent_with_batch(
&runner,
r#"query { findUniqueBigIntEntity(where: { Id: 1 }){ Text }}"#,
r#"query { findUniqueBigIntEntity(where: { Id: 1 }){ Text }}"#,
)
.await
}

async fn create_test_data(runner: &Runner) -> TestResult<()> {
for mutation in [
r#"mutation entity {
createOneBigIntEntity(data: { Id: 1, Text: "A" }) {
Text
}
}"#,
r#"mutation entity {
createOneBigIntEntity(data: { Id: 2, Text: "B" }) {
Text
}
}"#,
] {
runner.query(mutation).await?.assert_success();
}
Ok(())
}
}

#[test_suite(schema(schema), exclude(MongoDb))]
mod decimal_in_schema {
fn schema() -> String {
let schema = indoc! {
r#"
model DecimalEntity {
#id(Id, Decimal, @id)
Text String
}
"#
};

schema.to_owned()
}

#[connector_test]
async fn batch_of_two_distinct(runner: Runner) -> TestResult<()> {
create_test_data(&runner).await?;
assert_consistent_with_batch(
&runner,
r#"query { findUniqueDecimalEntity(where: { Id: 1 }){ Text }}"#,
r#"query { findUniqueDecimalEntity(where: { Id: 2 }){ Text }}"#,
)
.await
}

#[connector_test]
async fn batch_of_two_repeated(runner: Runner) -> TestResult<()> {
create_test_data(&runner).await?;
assert_consistent_with_batch(
&runner,
r#"query { findUniqueDecimalEntity(where: { Id: 1 }){ Text }}"#,
r#"query { findUniqueDecimalEntity(where: { Id: 1 }){ Text }}"#,
)
.await
}

async fn create_test_data(runner: &Runner) -> TestResult<()> {
for mutation in [
r#"mutation entity {
createOneDecimalEntity(data: { Id: 1, Text: "A" }) {
Text
}
}"#,
r#"mutation entity {
createOneDecimalEntity(data: { Id: 2, Text: "B" }) {
Text
}
}"#,
] {
runner.query(mutation).await?.assert_success();
}
Ok(())
}
}

#[test_suite(schema(schema))]
mod datetime_in_schema {
fn schema() -> String {
let schema = indoc! {
r#"
model DateTimeEntity {
#id(Id, DateTime, @id)
Text String
}
"#
};

schema.to_owned()
}

#[connector_test]
async fn batch_of_two_distinct(runner: Runner) -> TestResult<()> {
create_test_data(&runner).await?;
assert_consistent_with_batch(
&runner,
r#"query { findUniqueDateTimeEntity(where: { Id: "2020-01-01T00:00:00Z" }){ Text }}"#,
r#"query { findUniqueDateTimeEntity(where: { Id: "2020-01-02T00:00:00Z" }){ Text }}"#,
)
.await
}

#[connector_test]
async fn batch_of_two_repeated(runner: Runner) -> TestResult<()> {
create_test_data(&runner).await?;
assert_consistent_with_batch(
&runner,
r#"query { findUniqueDateTimeEntity(where: { Id: "2020-01-01T00:00:00Z" }){ Text }}"#,
r#"query { findUniqueDateTimeEntity(where: { Id: "2020-01-01T00:00:00Z" }){ Text }}"#,
)
.await
}

async fn create_test_data(runner: &Runner) -> TestResult<()> {
for mutation in [
r#"mutation entity {
createOneDateTimeEntity(data: { Id: "2020-01-01T00:00:00Z", Text: "A" }) {
Text
}
}"#,
r#"mutation entity {
createOneDateTimeEntity(data: { Id: "2020-01-02T00:00:00Z", Text: "B" }) {
Text
}
}"#,
] {
runner.query(mutation).await?.assert_success();
}
Ok(())
}
}

async fn assert_consistent_with_batch(runner: &Runner, query_a: &str, query_b: &str) -> TestResult<()> {
// These pairs of queries are run as batched and non-batched to verify that the
// batching logic returns the same results as the non-batched logic.
// The reason this is valuable is that the batching logic, unlike regular queries
// relies on a comparison operation implemented in our code, which is sensitive
// to differences in types of values.

let batch_result = runner
.batch(vec![query_a.to_owned(), query_b.to_owned()], false, None)
.await?;
batch_result.assert_success();

let (single_a, single_b) = futures::try_join!(runner.query(query_a), runner.query(query_b),)?;

let batch = batch_result.into_data();
assert_eq!(batch.len(), 2, "{batch:?}");
assert_eq!(
batch,
single_a
.into_data()
.into_iter()
.chain(single_b.into_data())
.collect::<Vec<_>>()
);

Ok(())
}
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,14 @@ impl QueryResult {
serde_json::to_string_pretty(&self.response).unwrap()
}

pub fn into_data(self) -> Vec<serde_json::Value> {
match self.response {
Response::Single(res) => vec![res.data],
Response::Multi(res) => res.batch_result.into_iter().map(|res| res.data).collect(),
Response::Error(_) => vec![],
}
}

/// Transform a JSON protocol response to a GraphQL protocol response, by removing the type
/// tags.
pub(crate) fn detag(&mut self) {
Expand Down
20 changes: 20 additions & 0 deletions query-engine/request-handlers/src/handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -255,6 +255,8 @@ impl<'a> RequestHandler<'a> {
/// Here are the cases covered:
/// - DateTime/String: User-input: DateTime / Response: String
/// - Int/BigInt: User-input: Int / Response: BigInt
/// - Int/Float: User-input: Int / Response: Float
/// - Int/Decimal: User-input: Int / Response: String
/// - (JSON protocol only) Custom types (eg: { "$type": "BigInt", value: "1" }): User-input: Scalar / Response: Object
/// - (JSON protocol only) String/Enum: User-input: String / Response: Enum
///
Expand All @@ -267,6 +269,14 @@ impl<'a> RequestHandler<'a> {
.map(|t1| &t1 == t2)
.unwrap_or_else(|_| t1 == stringify_datetime(t2).as_str())
}
(
ArgumentValue::Scalar(PrismaValue::Int(i1) | PrismaValue::BigInt(i1)),
ArgumentValue::Scalar(PrismaValue::Float(i2)),
)
| (
ArgumentValue::Scalar(PrismaValue::Float(i2)),
ArgumentValue::Scalar(PrismaValue::Int(i1) | PrismaValue::BigInt(i1)),
) => BigDecimal::from(*i1) == *i2,
(ArgumentValue::Scalar(PrismaValue::Int(i1)), ArgumentValue::Scalar(PrismaValue::BigInt(i2)))
| (ArgumentValue::Scalar(PrismaValue::BigInt(i2)), ArgumentValue::Scalar(PrismaValue::Int(i1))) => {
*i1 == *i2
Expand All @@ -279,6 +289,16 @@ impl<'a> RequestHandler<'a> {
Some(t1) => Self::compare_values(t1, t2),
None => left == right,
},
(
ArgumentValue::Scalar(PrismaValue::Int(s1) | PrismaValue::BigInt(s1)),
ArgumentValue::Scalar(PrismaValue::String(s2)),
)
| (
ArgumentValue::Scalar(PrismaValue::String(s2)),
ArgumentValue::Scalar(PrismaValue::Int(s1) | PrismaValue::BigInt(s1)),
) => BigDecimal::from_str(s2)
.map(|s2| s2 == BigDecimal::from(*s1))
.unwrap_or(false),
(ArgumentValue::Scalar(PrismaValue::Float(s1)), ArgumentValue::Scalar(PrismaValue::String(s2)))
| (ArgumentValue::Scalar(PrismaValue::String(s2)), ArgumentValue::Scalar(PrismaValue::Float(s1))) => {
BigDecimal::from_str(s2).map(|s2| s2 == *s1).unwrap_or(false)
Expand Down

0 comments on commit d6d59dd

Please sign in to comment.