diff --git a/Cargo.lock b/Cargo.lock index ba74051db6e..a01513791b6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2445,6 +2445,7 @@ dependencies = [ "temporal-versioning", "thiserror", "time", + "tokio", "type-system", "url", "utoipa", @@ -7158,6 +7159,7 @@ dependencies = [ "console_error_panic_hook", "email_address", "error-stack 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "futures", "graph-test-data", "iso8601-duration", "postgres-types", diff --git a/apps/hash-graph/libs/api/src/rest/data_type.rs b/apps/hash-graph/libs/api/src/rest/data_type.rs index 12af2c932a7..7d7cc81a84c 100644 --- a/apps/hash-graph/libs/api/src/rest/data_type.rs +++ b/apps/hash-graph/libs/api/src/rest/data_type.rs @@ -37,7 +37,6 @@ use graph::{ subgraph::identifier::DataTypeVertexId, }; use graph_types::{ - knowledge::ValueWithMetadata, ontology::{ DataTypeId, DataTypeMetadata, DataTypeWithMetadata, OntologyTemporalMetadata, OntologyTypeClassificationMetadata, OntologyTypeMetadata, OntologyTypeReference, @@ -110,8 +109,6 @@ use crate::rest::{ Conversions, Operator, Variable, - - ValueWithMetadata, ) ), tags( diff --git a/apps/hash-graph/libs/api/src/rest/entity.rs b/apps/hash-graph/libs/api/src/rest/entity.rs index 7c69069c4f4..03867bda290 100644 --- a/apps/hash-graph/libs/api/src/rest/entity.rs +++ b/apps/hash-graph/libs/api/src/rest/entity.rs @@ -44,10 +44,14 @@ use graph_types::{ SourceProvenance, SourceType, }, link::LinkData, - ArrayMetadata, Confidence, EntityTypeIdDiff, ObjectMetadata, Property, PropertyDiff, - PropertyMetadata, PropertyMetadataObject, PropertyObject, PropertyPatchOperation, - PropertyPath, PropertyPathElement, PropertyProvenance, PropertyWithMetadata, - PropertyWithMetadataObject, ValueMetadata, + property::{ + ArrayMetadata, ObjectMetadata, Property, PropertyDiff, PropertyMetadata, + PropertyMetadataObject, PropertyObject, PropertyPatchOperation, PropertyPath, + PropertyPathElement, PropertyProvenance, PropertyWithMetadata, + PropertyWithMetadataArray, PropertyWithMetadataObject, PropertyWithMetadataValue, + ValueMetadata, + }, + Confidence, EntityTypeIdDiff, }, owned_by_id::OwnedById, Embedding, @@ -88,6 +92,8 @@ use crate::rest::{ schemas( CreateEntityRequest, PropertyWithMetadata, + PropertyWithMetadataValue, + PropertyWithMetadataArray, PropertyWithMetadataObject, ValidateEntityParams, CountEntitiesParams, diff --git a/apps/hash-graph/libs/graph/src/snapshot/entity/batch.rs b/apps/hash-graph/libs/graph/src/snapshot/entity/batch.rs index 5e32faab3e1..ae6d608f30d 100644 --- a/apps/hash-graph/libs/graph/src/snapshot/entity/batch.rs +++ b/apps/hash-graph/libs/graph/src/snapshot/entity/batch.rs @@ -1,16 +1,13 @@ -use std::collections::HashMap; - use async_trait::async_trait; use authorization::{backend::ZanzibarBackend, schema::EntityRelationAndSubject, AuthorizationApi}; use error_stack::{Report, ResultExt}; -use futures::TryStreamExt; -use graph_types::{ - knowledge::entity::{Entity, EntityUuid}, - ontology::EntityTypeId, +use graph_types::knowledge::{ + entity::{Entity, EntityUuid}, + property::{visitor::EntityVisitor, PropertyWithMetadataObject}, }; use tokio_postgres::GenericClient; -use type_system::schema::ClosedEntityType; -use validation::{Validate, ValidateEntityComponents}; +use type_system::schema::EntityTypeProvider; +use validation::{EntityPreprocessor, Validate, ValidateEntityComponents}; use crate::{ snapshot::WriteBatch, @@ -240,6 +237,7 @@ where Ok(()) } + #[expect(clippy::too_many_lines)] async fn commit( postgres_client: &mut PostgresStore, validation: bool, @@ -283,36 +281,58 @@ where .await .change_context(InsertionError)?; - let schemas = postgres_client - .read_closed_schemas(&Filter::All(Vec::new()), None) - .await - .change_context(InsertionError)? - .try_collect::>() - .await - .change_context(InsertionError)?; - - let validator_provider = StoreProvider::<_, A> { + let validator_provider = StoreProvider { store: postgres_client, cache: StoreCache::default(), authorization: None, }; - for entity in entities { - let schema = entity - .metadata - .entity_type_ids - .iter() - .map(|id| { - schemas - .get(&EntityTypeId::from_url(id)) - .ok_or(InsertionError) - .cloned() - }) - .collect::>()?; + let mut edition_ids_updates = Vec::new(); + let mut properties_updates = Vec::new(); + let mut metadata_updates = Vec::new(); + + for mut entity in entities { + let validation_components = + if entity.metadata.record_id.entity_id.draft_id.is_some() { + ValidateEntityComponents::draft() + } else { + ValidateEntityComponents::full() + }; + let mut property_with_metadata = PropertyWithMetadataObject::from_parts( + entity.properties.clone(), + Some(entity.metadata.properties.clone()), + ) + .change_context(InsertionError)?; + + let entity_type = validator_provider + .provide_closed_type(&entity.metadata.entity_type_ids) + .await + .change_context(InsertionError)?; + + EntityPreprocessor { + components: validation_components, + } + .visit_object( + &entity_type, + &mut property_with_metadata, + &validator_provider, + ) + .await + .change_context(InsertionError)?; + + let (properties, metadata) = property_with_metadata.into_parts(); + let mut changed = false; + + // We avoid updating the entity if the properties and metadata are the same + if entity.properties != properties || entity.metadata.properties != metadata { + changed = true; + entity.properties = properties; + entity.metadata.properties = metadata; + } entity .validate( - &schema, + &entity_type, if entity.metadata.record_id.entity_id.draft_id.is_some() { ValidateEntityComponents::draft() } else { @@ -322,7 +342,34 @@ where ) .await .change_context(InsertionError)?; + + if changed { + edition_ids_updates.push(entity.metadata.record_id.edition_id); + properties_updates.push(entity.properties); + metadata_updates.push(entity.metadata.properties); + } } + + postgres_client + .as_client() + .client() + .query( + " + UPDATE entity_editions + SET + properties = data_table.properties, + property_metadata = data_table.property_metadata + FROM ( + SELECT unnest($1::uuid[]) as edition_id, + unnest($2::jsonb[]) as properties, + unnest($3::jsonb[]) as property_metadata + ) as data_table + WHERE entity_editions.entity_edition_id = data_table.edition_id; + ", + &[&edition_ids_updates, &properties_updates, &metadata_updates], + ) + .await + .change_context(InsertionError)?; } Ok(()) diff --git a/apps/hash-graph/libs/graph/src/snapshot/entity/record.rs b/apps/hash-graph/libs/graph/src/snapshot/entity/record.rs index 75bc151d3c2..106154f69e9 100644 --- a/apps/hash-graph/libs/graph/src/snapshot/entity/record.rs +++ b/apps/hash-graph/libs/graph/src/snapshot/entity/record.rs @@ -3,7 +3,7 @@ use graph_types::{ knowledge::{ entity::{EntityId, EntityMetadata, EntityUuid}, link::LinkData, - PropertyObject, + property::PropertyObject, }, Embedding, }; diff --git a/apps/hash-graph/libs/graph/src/store/knowledge.rs b/apps/hash-graph/libs/graph/src/store/knowledge.rs index 9ea8c788dc0..a32e80e21dd 100644 --- a/apps/hash-graph/libs/graph/src/store/knowledge.rs +++ b/apps/hash-graph/libs/graph/src/store/knowledge.rs @@ -10,8 +10,10 @@ use graph_types::{ knowledge::{ entity::{Entity, EntityEmbedding, EntityId, EntityUuid, ProvidedEntityEditionProvenance}, link::LinkData, - Confidence, EntityTypeIdDiff, PropertyDiff, PropertyPatchOperation, PropertyPath, - PropertyWithMetadataObject, + property::{ + PropertyDiff, PropertyPatchOperation, PropertyPath, PropertyWithMetadataObject, + }, + Confidence, EntityTypeIdDiff, }, owned_by_id::OwnedById, }; diff --git a/apps/hash-graph/libs/graph/src/store/postgres/knowledge/entity/mod.rs b/apps/hash-graph/libs/graph/src/store/postgres/knowledge/entity/mod.rs index 6258d5eaaf7..39b694a03cf 100644 --- a/apps/hash-graph/libs/graph/src/store/postgres/knowledge/entity/mod.rs +++ b/apps/hash-graph/libs/graph/src/store/postgres/knowledge/entity/mod.rs @@ -1,6 +1,5 @@ mod query; mod read; - use alloc::borrow::Cow; use core::iter::once; use std::collections::{HashMap, HashSet}; @@ -24,28 +23,30 @@ use graph_types::{ EntityMetadata, EntityProvenance, EntityRecordId, EntityTemporalMetadata, EntityUuid, InferredEntityProvenance, }, - Confidence, PropertyMetadataObject, PropertyObject, PropertyPath, - PropertyWithMetadataObject, + property::{ + visitor::EntityVisitor, Property, PropertyMetadata, PropertyMetadataObject, + PropertyObject, PropertyPath, PropertyWithMetadata, + }, + Confidence, }, ontology::EntityTypeId, owned_by_id::OwnedById, Embedding, }; use hash_status::StatusCode; -use postgres_types::{Json, ToSql}; +use postgres_types::ToSql; use temporal_versioning::{ ClosedTemporalBound, DecisionTime, LeftClosedTemporalInterval, LimitedTemporalBound, OpenTemporalBound, RightBoundedTemporalInterval, TemporalBound, TemporalTagged, Timestamp, TransactionTime, }; use tokio_postgres::{error::SqlState, GenericClient, Row}; -use type_system::{schema::ClosedEntityType, url::VersionedUrl}; +use type_system::{schema::EntityTypeProvider, url::VersionedUrl}; use uuid::Uuid; -use validation::{Validate, ValidateEntityComponents}; +use validation::{EntityPreprocessor, Validate, ValidateEntityComponents}; use crate::{ knowledge::EntityQueryPath, - ontology::EntityTypeQueryPath, store::{ crud::{QueryResult, Read, ReadPaginated, Sorting}, error::{DeletionError, EntityDoesNotExist, RaceConditionOnUpdate}, @@ -68,7 +69,7 @@ use crate::{ }, TraversalContext, }, - query::{Filter, FilterExpression, Parameter, ParameterList}, + query::{Filter, FilterExpression, Parameter}, validation::StoreProvider, AsClient, EntityStore, InsertionError, PostgresStore, QueryError, StoreCache, SubgraphRecord, UpdateError, @@ -496,8 +497,40 @@ where let mut entity_has_right_entity_rows = Vec::new(); let mut entities = Vec::with_capacity(params.len()); + // TODO: There are expected to be duplicates but we currently don't have a way to identify + // multi-type entity types. We need a way to speed this up. + let mut validation_params = Vec::with_capacity(params.len()); + + let validator_provider = StoreProvider { + store: self, + cache: StoreCache::default(), + authorization: Some((actor_id, Consistency::FullyConsistent)), + }; + + for mut params in params { + let entity_type = validator_provider + .provide_closed_type(¶ms.entity_type_ids) + .await + .change_context(InsertionError)?; + + let validation_components = if params.draft { + ValidateEntityComponents { + num_items: false, + required_properties: false, + ..ValidateEntityComponents::full() + } + } else { + ValidateEntityComponents::full() + }; + EntityPreprocessor { + components: validation_components, + } + .visit_object(&entity_type, &mut params.properties, &validator_provider) + .await + .attach(StatusCode::InvalidArgument) + .change_context(InsertionError)?; + validation_params.push((entity_type, validation_components)); - for params in params { let (properties, property_metadata) = params.properties.into_parts(); let decision_time = params @@ -640,6 +673,8 @@ where .attach_printable("At least one relationship must be provided")); } } + // We move out the cache, so we can re-use `&mut self` later. + let store_cache = validator_provider.cache; let (instantiate_permissions, zookie) = self .authorization_api @@ -756,37 +791,18 @@ where .await .change_context(InsertionError)?; - let validation_params = entities - .iter() - .map(|entity| { - Ok(ValidateEntityParams { - entity_types: EntityValidationType::Id(Cow::Borrowed( - &entity.metadata.entity_type_ids, - )), - properties: Cow::Owned(PropertyWithMetadataObject::from_parts( - entity.properties.clone(), - Some(entity.metadata.properties.clone()), - )?), - link_data: entity.link_data.as_ref().map(Cow::Borrowed), - components: if entity.metadata.record_id.entity_id.draft_id.is_some() { - ValidateEntityComponents { - num_items: false, - required_properties: false, - ..ValidateEntityComponents::full() - } - } else { - ValidateEntityComponents::full() - }, - }) - }) - .collect::, _>>() - .change_context(InsertionError)?; + let validator_provider = StoreProvider { + store: &transaction, + cache: store_cache, + authorization: Some((actor_id, Consistency::FullyConsistent)), + }; - transaction - .validate_entities(actor_id, Consistency::FullyConsistent, validation_params) - .await - .change_context(InsertionError) - .attach(StatusCode::InvalidArgument)?; + for (entity, (schema, components)) in entities.iter().zip(validation_params) { + entity + .validate(&schema, components, &validator_provider) + .await + .change_context(InsertionError)?; + } let commit_result = transaction.commit().await.change_context(InsertionError); if let Err(mut error) = commit_result { @@ -838,66 +854,19 @@ where let validator_provider = StoreProvider { store: self, cache: StoreCache::default(), - authorization: Some(( - &self.authorization_api, - actor_id, - Consistency::FullyConsistent, - )), + authorization: Some((actor_id, Consistency::FullyConsistent)), }; - for params in params { + for mut params in params { let schema = match params.entity_types { EntityValidationType::ClosedSchema(schema) => schema, EntityValidationType::Schema(schemas) => Cow::Owned(schemas.into_iter().collect()), - EntityValidationType::Id(entity_type_url) => { - let ontology_type_ids = entity_type_url - .as_ref() - .iter() - .map(EntityTypeId::from_url) - .collect::>(); - - if !self - .authorization_api - .check_entity_types_permission( - actor_id, - EntityTypePermission::View, - ontology_type_ids.iter().copied(), - consistency, - ) + EntityValidationType::Id(entity_type_urls) => Cow::Owned( + validator_provider + .provide_closed_type(entity_type_urls.as_ref()) .await - .change_context(ValidateEntityError)? - .0 - .into_iter() - .all(|(_, permission)| permission) - { - bail!( - Report::new(ValidateEntityError).attach(StatusCode::PermissionDenied) - ); - } - - let closed_schema = self - .read_closed_schemas( - &Filter::In( - FilterExpression::Path(EntityTypeQueryPath::OntologyId), - ParameterList::EntityTypeIds(&ontology_type_ids), - ), - Some( - &QueryTemporalAxesUnresolved::DecisionTime { - pinned: PinnedTemporalAxisUnresolved::new(None), - variable: VariableTemporalAxisUnresolved::new(None, None), - } - .resolve(), - ), - ) - .await - .change_context(ValidateEntityError)? - .map_ok(|(_, raw_type)| raw_type) - .try_collect::() - .await - .change_context(ValidateEntityError)?; - - Cow::Owned(closed_schema) - } + .change_context(ValidateEntityError)?, + ), }; if schema.schemas.is_empty() { @@ -909,11 +878,17 @@ where } }; - if let Err(error) = params - .properties - .validate(&schema, params.components, &validator_provider) - .await - { + let pre_process_result = EntityPreprocessor { + components: params.components, + } + .visit_object( + schema.as_ref(), + params.properties.to_mut(), + &validator_provider, + ) + .await + .change_context(validation::EntityValidationError::InvalidProperties); + if let Err(error) = pre_process_result { if let Err(ref mut report) = status { report.extend_one(error); } else { @@ -1182,7 +1157,7 @@ where *locked_row.transaction_time.start(); let ClosedTemporalBound::Inclusive(locked_decision_time) = *locked_row.decision_time.start(); - let mut previous_entity = Read::::read_one( + let previous_entity = Read::::read_one( &transaction, &Filter::Equal( Some(FilterExpression::Path(EntityQueryPath::EditionId)), @@ -1204,14 +1179,6 @@ where .attach(params.entity_id) .change_context(UpdateError)?; - let previous_properties = previous_entity.properties.clone(); - let previous_property_metadata = previous_entity.metadata.properties.clone(); - previous_entity - .patch(params.properties) - .change_context(UpdateError)?; - let properties = previous_entity.properties; - let property_metadata = previous_entity.metadata.properties; - let mut first_non_draft_created_at_decision_time = previous_entity .metadata .provenance @@ -1234,15 +1201,13 @@ where let (entity_type_ids, entity_types_updated) = if params.entity_type_ids.is_empty() { (previous_entity.metadata.entity_type_ids, false) } else { - let previous_entity_types = previous_entity + let added_types = previous_entity .metadata .entity_type_ids - .iter() - .collect::>(); - let new_entity_types = params.entity_type_ids.iter().collect::>(); - - let added_types = new_entity_types.difference(&previous_entity_types); - let removed_types = previous_entity_types.difference(&new_entity_types); + .difference(¶ms.entity_type_ids); + let removed_types = params + .entity_type_ids + .difference(&previous_entity.metadata.entity_type_ids); let mut has_changed = false; for entity_type_id in added_types.chain(removed_types) { @@ -1267,6 +1232,54 @@ where (params.entity_type_ids, has_changed) }; + let previous_properties = previous_entity.properties.clone(); + let previous_property_metadata = previous_entity.metadata.properties.clone(); + + let mut properties_with_metadata = PropertyWithMetadata::from_parts( + Property::Object(previous_entity.properties), + Some(PropertyMetadata::Object { + value: previous_entity.metadata.properties.value, + metadata: previous_entity.metadata.properties.metadata, + }), + ) + .change_context(UpdateError)?; + properties_with_metadata + .patch(params.properties) + .change_context(UpdateError)?; + + let validator_provider = StoreProvider { + store: &transaction, + cache: StoreCache::default(), + authorization: Some((actor_id, Consistency::FullyConsistent)), + }; + let entity_type = validator_provider + .provide_closed_type(&entity_type_ids) + .await + .change_context(UpdateError)?; + + let validation_components = if draft { + ValidateEntityComponents::draft() + } else { + ValidateEntityComponents::full() + }; + + let (properties, property_metadata) = + if let PropertyWithMetadata::Object(mut object) = properties_with_metadata { + EntityPreprocessor { + components: validation_components, + } + .visit_object(&entity_type, &mut object, &validator_provider) + .await + .attach(StatusCode::InvalidArgument) + .change_context(UpdateError)?; + let (properties, property_metadata) = object.into_parts(); + (properties, property_metadata) + } else { + unreachable!("patching should not change the property type"); + }; + // We move out the cache, so we can re-use `&mut self` later. + let store_cache = validator_provider.cache; + #[expect(clippy::needless_collect, reason = "Will be used later")] let diff = previous_properties .diff(&properties, &mut PropertyPath::default()) @@ -1302,7 +1315,7 @@ where archived_by_id: None, provided: params.provenance, }; - let (edition_id, closed_schema) = transaction + let edition_id = transaction .insert_entity_edition( archived, &entity_type_ids, @@ -1413,35 +1426,6 @@ where } }; - let validation_components = if draft { - ValidateEntityComponents::draft() - } else { - ValidateEntityComponents::full() - }; - - transaction - .validate_entity( - actor_id, - Consistency::FullyConsistent, - ValidateEntityParams { - entity_types: EntityValidationType::ClosedSchema(Cow::Borrowed(&closed_schema)), - properties: Cow::Owned( - PropertyWithMetadataObject::from_parts( - properties.clone(), - Some(property_metadata.clone()), - ) - .change_context(UpdateError)?, - ), - link_data: link_data.as_ref().map(Cow::Borrowed), - components: validation_components, - }, - ) - .await - .change_context(UpdateError) - .attach(StatusCode::InvalidArgument)?; - - transaction.commit().await.change_context(UpdateError)?; - let entity_metadata = EntityMetadata { record_id: EntityRecordId { entity_id: params.entity_id, @@ -1461,17 +1445,31 @@ where properties: property_metadata, archived, }; - let entity = Entity { + let entities = [Entity { properties, link_data, metadata: entity_metadata.clone(), + }]; + + let validator_provider = StoreProvider { + store: &transaction, + cache: store_cache, + authorization: Some((actor_id, Consistency::FullyConsistent)), }; + entities[0] + .validate(&entity_type, validation_components, &validator_provider) + .await + .change_context(UpdateError)?; + + transaction.commit().await.change_context(UpdateError)?; + if let Some(temporal_client) = &self.temporal_client { temporal_client - .start_update_entity_embeddings_workflow(actor_id, &[entity.clone()]) + .start_update_entity_embeddings_workflow(actor_id, &entities) .await .change_context(UpdateError)?; } + let [entity] = entities; Ok(entity) } @@ -1619,7 +1617,7 @@ where confidence: Option, provenance: &EntityEditionProvenance, metadata: &PropertyMetadataObject, - ) -> Result<(EntityEditionId, ClosedEntityType), InsertionError> { + ) -> Result { let edition_id: EntityEditionId = self .as_client() .query_one( @@ -1658,20 +1656,7 @@ where .await .change_context(InsertionError)?; - let entity_type = self - .as_client() - .query_raw( - "SELECT closed_schema FROM entity_types WHERE ontology_id = ANY ($1::UUID[]);", - &[&entity_type_ontology_ids], - ) - .await - .change_context(InsertionError)? - .and_then(|row| async move { Ok(row.get::<_, Json>(0).0) }) - .try_collect::() - .await - .change_context(InsertionError)?; - - Ok((edition_id, entity_type)) + Ok(edition_id) } #[tracing::instrument(level = "trace", skip(self))] diff --git a/apps/hash-graph/libs/graph/src/store/postgres/query/rows.rs b/apps/hash-graph/libs/graph/src/store/postgres/query/rows.rs index 4508bb050b9..c6468a641c4 100644 --- a/apps/hash-graph/libs/graph/src/store/postgres/query/rows.rs +++ b/apps/hash-graph/libs/graph/src/store/postgres/query/rows.rs @@ -4,7 +4,8 @@ use graph_types::{ entity::{ DraftId, EntityEditionId, EntityEditionProvenance, EntityUuid, InferredEntityProvenance, }, - Confidence, PropertyMetadataObject, PropertyObject, PropertyProvenance, + property::{PropertyMetadataObject, PropertyObject, PropertyProvenance}, + Confidence, }, ontology::{DataTypeId, EntityTypeId, OntologyEditionProvenance, PropertyTypeId}, owned_by_id::OwnedById, diff --git a/apps/hash-graph/libs/graph/src/store/validation.rs b/apps/hash-graph/libs/graph/src/store/validation.rs index 45f1f09cd44..9076f89f303 100644 --- a/apps/hash-graph/libs/graph/src/store/validation.rs +++ b/apps/hash-graph/libs/graph/src/store/validation.rs @@ -21,10 +21,13 @@ use graph_types::{ use tokio::sync::RwLock; use tokio_postgres::GenericClient; use type_system::{ - schema::{ClosedEntityType, DataType, PropertyType}, + schema::{ + ClosedEntityType, DataType, DataTypeProvider, EntityTypeProvider, OntologyTypeProvider, + PropertyType, PropertyTypeProvider, + }, url::{BaseUrl, VersionedUrl}, }; -use validation::{DataTypeProvider, EntityProvider, EntityTypeProvider, OntologyTypeProvider}; +use validation::EntityProvider; use crate::{ store::{crud::Read, query::Filter, AsClient, PostgresStore, QueryError}, @@ -121,20 +124,21 @@ pub struct StoreCache { } #[derive(Debug)] -pub struct StoreProvider<'a, S, A> { +pub struct StoreProvider<'a, S> { pub store: &'a S, pub cache: StoreCache, - pub authorization: Option<(&'a A, AccountId, Consistency<'static>)>, + pub authorization: Option<(AccountId, Consistency<'static>)>, } -impl StoreProvider<'_, S, A> +impl StoreProvider<'_, PostgresStore> where - S: Read, + C: AsClient, A: AuthorizationApi, { async fn authorize_data_type(&self, type_id: DataTypeId) -> Result<(), Report> { - if let Some((authorization_api, actor_id, consistency)) = self.authorization { - authorization_api + if let Some((actor_id, consistency)) = self.authorization { + self.store + .authorization_api .check_data_type_permission( actor_id, DataTypePermission::View, @@ -151,9 +155,9 @@ where } } -impl OntologyTypeProvider for StoreProvider<'_, S, A> +impl OntologyTypeProvider for StoreProvider<'_, PostgresStore> where - S: Read, + C: AsClient, A: AuthorizationApi, { #[expect(refining_impl_trait)] @@ -175,7 +179,7 @@ where let schema = self .store .read_one( - &Filter::for_versioned_url(type_id), + &Filter::::for_versioned_url(type_id), Some( &QueryTemporalAxesUnresolved::DecisionTime { pinned: PinnedTemporalAxisUnresolved::new(None), @@ -194,7 +198,7 @@ where } } -impl DataTypeProvider for StoreProvider<'_, PostgresStore, A> +impl DataTypeProvider for StoreProvider<'_, PostgresStore> where C: AsClient, A: AuthorizationApi, @@ -227,7 +231,7 @@ where } #[expect(refining_impl_trait)] - async fn has_children(&self, data_type: DataTypeId) -> Result> { + async fn has_children(&self, data_type: &VersionedUrl) -> Result> { let client = self.store.as_client().client(); Ok(client @@ -238,7 +242,7 @@ where WHERE target_data_type_ontology_id = $1 ); ", - &[&data_type], + &[&DataTypeId::from_url(data_type)], ) .await .change_context(QueryError)? @@ -246,17 +250,18 @@ where } } -impl StoreProvider<'_, S, A> +impl StoreProvider<'_, PostgresStore> where - S: Read, + C: AsClient, A: AuthorizationApi, { async fn authorize_property_type( &self, type_id: PropertyTypeId, ) -> Result<(), Report> { - if let Some((authorization_api, actor_id, consistency)) = self.authorization { - authorization_api + if let Some((actor_id, consistency)) = self.authorization { + self.store + .authorization_api .check_property_type_permission( actor_id, PropertyTypePermission::View, @@ -273,9 +278,9 @@ where } } -impl OntologyTypeProvider for StoreProvider<'_, S, A> +impl OntologyTypeProvider for StoreProvider<'_, PostgresStore> where - S: Read, + C: AsClient, A: AuthorizationApi, { #[expect(refining_impl_trait)] @@ -297,7 +302,7 @@ where let schema = self .store .read_one( - &Filter::for_versioned_url(type_id), + &Filter::::for_versioned_url(type_id), Some( &QueryTemporalAxesUnresolved::DecisionTime { pinned: PinnedTemporalAxisUnresolved::new(None), @@ -320,14 +325,22 @@ where } } -impl StoreProvider<'_, PostgresStore, A> +impl PropertyTypeProvider for StoreProvider<'_, PostgresStore> +where + C: AsClient, + A: AuthorizationApi, +{ +} + +impl StoreProvider<'_, PostgresStore> where C: AsClient, A: AuthorizationApi, { async fn authorize_entity_type(&self, type_id: EntityTypeId) -> Result<(), Report> { - if let Some((authorization_api, actor_id, consistency)) = self.authorization { - authorization_api + if let Some((actor_id, consistency)) = self.authorization { + self.store + .authorization_api .check_entity_type_permission( actor_id, EntityTypePermission::View, @@ -384,7 +397,7 @@ where } } -impl OntologyTypeProvider for StoreProvider<'_, PostgresStore, A> +impl OntologyTypeProvider for StoreProvider<'_, PostgresStore> where C: AsClient, A: AuthorizationApi, @@ -418,7 +431,7 @@ where } } -impl EntityTypeProvider for StoreProvider<'_, PostgresStore, A> +impl EntityTypeProvider for StoreProvider<'_, PostgresStore> where C: AsClient, A: AuthorizationApi, @@ -451,9 +464,9 @@ where } } -impl EntityProvider for StoreProvider<'_, S, A> +impl EntityProvider for StoreProvider<'_, PostgresStore> where - S: Read, + C: AsClient, A: AuthorizationApi, { #[expect(refining_impl_trait)] @@ -461,8 +474,9 @@ where if let Some(cached) = self.cache.entities.get(&entity_id).await { return cached; } - if let Some((authorization_api, actor_id, consistency)) = self.authorization { - authorization_api + if let Some((actor_id, consistency)) = self.authorization { + self.store + .authorization_api .check_entity_permission(actor_id, EntityPermission::View, entity_id, consistency) .await .change_context(QueryError)? diff --git a/apps/hash-graph/openapi/openapi.json b/apps/hash-graph/openapi/openapi.json index 34eec24d598..e99629a351b 100644 --- a/apps/hash-graph/openapi/openapi.json +++ b/apps/hash-graph/openapi/openapi.json @@ -7313,40 +7313,31 @@ "PropertyWithMetadata": { "oneOf": [ { - "type": "object", - "title": "PropertyWithMetadataArray", - "properties": { - "metadata": { - "$ref": "#/components/schemas/ArrayMetadata" - }, - "value": { - "type": "array", - "items": { - "$ref": "#/components/schemas/PropertyWithMetadata" - } - } - } + "$ref": "#/components/schemas/PropertyWithMetadataArray" }, { - "type": "object", - "title": "PropertyWithMetadataObject", - "properties": { - "metadata": { - "$ref": "#/components/schemas/ObjectMetadata" - }, - "value": { - "type": "object", - "additionalProperties": { - "$ref": "#/components/schemas/PropertyWithMetadata" - } - } - } + "$ref": "#/components/schemas/PropertyWithMetadataObject" }, { - "$ref": "#/components/schemas/ValueWithMetadata" + "$ref": "#/components/schemas/PropertyWithMetadataValue" } ] }, + "PropertyWithMetadataArray": { + "type": "object", + "properties": { + "metadata": { + "$ref": "#/components/schemas/ArrayMetadata" + }, + "value": { + "type": "array", + "items": { + "$ref": "#/components/schemas/PropertyWithMetadata" + } + } + }, + "additionalProperties": false + }, "PropertyWithMetadataObject": { "type": "object", "properties": { @@ -7362,6 +7353,20 @@ }, "additionalProperties": false }, + "PropertyWithMetadataValue": { + "type": "object", + "required": [ + "value", + "metadata" + ], + "properties": { + "metadata": { + "$ref": "#/components/schemas/ValueMetadata" + }, + "value": {} + }, + "additionalProperties": false + }, "ProvidedEntityEditionProvenance": { "type": "object", "properties": { @@ -8142,20 +8147,6 @@ }, "additionalProperties": false }, - "ValueWithMetadata": { - "type": "object", - "required": [ - "value", - "metadata" - ], - "properties": { - "metadata": { - "$ref": "#/components/schemas/ValueMetadata" - }, - "value": {} - }, - "additionalProperties": false - }, "Variable": { "type": "string", "enum": [ diff --git a/apps/hash-graph/tests/ambiguous.http b/apps/hash-graph/tests/ambiguous.http index b40437aca2b..f8df3ee7a7c 100644 --- a/apps/hash-graph/tests/ambiguous.http +++ b/apps/hash-graph/tests/ambiguous.http @@ -192,7 +192,7 @@ X-Authenticated-User-Actor-Id: {{account_id}} > {% client.test("status", function() { - client.assert(response.status === 400, "Response status is not 200"); + client.assert(response.status === 400, "Response status is not 400"); }); %} diff --git a/libs/@blockprotocol/type-system/rust/Cargo.toml b/libs/@blockprotocol/type-system/rust/Cargo.toml index 9c9a6414c42..9391f3c12bf 100644 --- a/libs/@blockprotocol/type-system/rust/Cargo.toml +++ b/libs/@blockprotocol/type-system/rust/Cargo.toml @@ -30,6 +30,7 @@ uuid = { workspace = true, public = true, features = ["std"] } # Private workspace dependencies codec = { workspace = true, features = ["serde"] } +futures = { workspace = true } # Private third-party dependencies regex = { workspace = true, features = ["std"] } diff --git a/libs/@blockprotocol/type-system/rust/src/schema/array/mod.rs b/libs/@blockprotocol/type-system/rust/src/schema/array/mod.rs index c4bef39d594..d6f40ea64ed 100644 --- a/libs/@blockprotocol/type-system/rust/src/schema/array/mod.rs +++ b/libs/@blockprotocol/type-system/rust/src/schema/array/mod.rs @@ -2,6 +2,8 @@ mod raw; use serde::{Deserialize, Serialize, Serializer}; +use crate::schema::{OneOfSchema, PropertyType, PropertyValues}; + #[derive(Debug, Clone, PartialEq, Eq, Deserialize)] #[serde(from = "raw::ArraySchema")] pub struct ArraySchema { @@ -30,3 +32,37 @@ pub enum ValueOrArray { Value(T), Array(ArraySchema), } + +pub trait PropertyArraySchema { + fn possibilities(&self) -> &[PropertyValues]; + fn min_items(&self) -> Option; + fn max_items(&self) -> Option; +} + +impl PropertyArraySchema for ArraySchema> { + fn possibilities(&self) -> &[PropertyValues] { + &self.items.possibilities + } + + fn min_items(&self) -> Option { + self.min_items + } + + fn max_items(&self) -> Option { + self.max_items + } +} + +impl PropertyArraySchema for ArraySchema<&PropertyType> { + fn possibilities(&self) -> &[PropertyValues] { + &self.items.one_of + } + + fn min_items(&self) -> Option { + self.min_items + } + + fn max_items(&self) -> Option { + self.max_items + } +} diff --git a/libs/@blockprotocol/type-system/rust/src/schema/mod.rs b/libs/@blockprotocol/type-system/rust/src/schema/mod.rs index 930d99df927..c5ca2e3ce38 100644 --- a/libs/@blockprotocol/type-system/rust/src/schema/mod.rs +++ b/libs/@blockprotocol/type-system/rust/src/schema/mod.rs @@ -13,8 +13,13 @@ mod array; mod object; mod one_of; +use core::{borrow::Borrow, future::Future}; + +use error_stack::{Context, Report}; +use futures::{stream, StreamExt, TryStreamExt}; + pub use self::{ - array::{ArraySchema, ValueOrArray}, + array::{ArraySchema, PropertyArraySchema, ValueOrArray}, data_type::{ ClosedDataType, ClosedDataTypeMetadata, ConversionDefinition, ConversionExpression, ConversionValue, Conversions, DataType, DataTypeLabel, DataTypeReference, @@ -25,10 +30,57 @@ pub use self::{ ClosedEntityType, ClosedEntityTypeSchemaData, EntityType, EntityTypeReference, EntityTypeValidationError, EntityTypeValidator, }, - object::{ObjectSchema, ObjectSchemaValidationError, ObjectSchemaValidator}, + object::{ + ObjectSchema, ObjectSchemaValidationError, ObjectSchemaValidator, PropertyObjectSchema, + }, one_of::{OneOfSchema, OneOfSchemaValidationError, OneOfSchemaValidator}, property_type::{ PropertyType, PropertyTypeReference, PropertyTypeValidationError, PropertyTypeValidator, - PropertyValues, + PropertyValueSchema, PropertyValues, }, }; +use crate::url::{BaseUrl, VersionedUrl}; + +pub trait OntologyTypeProvider { + fn provide_type( + &self, + type_id: &VersionedUrl, + ) -> impl Future + Send, Report>> + Send; +} + +pub trait DataTypeProvider: OntologyTypeProvider { + fn is_parent_of( + &self, + child: &VersionedUrl, + parent: &BaseUrl, + ) -> impl Future>> + Send; + fn has_children( + &self, + data_type: &VersionedUrl, + ) -> impl Future>> + Send; +} + +pub trait PropertyTypeProvider: OntologyTypeProvider {} + +pub trait EntityTypeProvider: OntologyTypeProvider { + fn is_parent_of( + &self, + child: &VersionedUrl, + parent: &BaseUrl, + ) -> impl Future>> + Send; + + fn provide_closed_type<'a, I>( + &self, + type_ids: I, + ) -> impl Future>> + Send + where + Self: Sync, + I: IntoIterator + Send, + { + stream::iter(type_ids) + .then(|entity_type_url| async { + Ok(self.provide_type(entity_type_url).await?.borrow().clone()) + }) + .try_collect::() + } +} diff --git a/libs/@blockprotocol/type-system/rust/src/schema/object/mod.rs b/libs/@blockprotocol/type-system/rust/src/schema/object/mod.rs index f8fd62f1516..5e2822f92dd 100644 --- a/libs/@blockprotocol/type-system/rust/src/schema/object/mod.rs +++ b/libs/@blockprotocol/type-system/rust/src/schema/object/mod.rs @@ -6,7 +6,10 @@ use std::collections::{HashMap, HashSet}; use serde::{Deserialize, Serialize, Serializer}; pub use self::validation::{ObjectSchemaValidationError, ObjectSchemaValidator}; -use crate::url::BaseUrl; +use crate::{ + schema::{ClosedEntityType, EntityType, PropertyTypeReference, ValueOrArray}, + url::BaseUrl, +}; #[derive(Debug, Clone, PartialEq, Eq, Deserialize)] #[serde(from = "raw::ObjectSchema")] @@ -28,6 +31,48 @@ where } } +pub trait PropertyObjectSchema { + type Value; + fn properties(&self) -> &HashMap; + fn required(&self) -> &HashSet; +} + +impl PropertyObjectSchema for ObjectSchema { + type Value = T; + + fn properties(&self) -> &HashMap { + &self.properties + } + + fn required(&self) -> &HashSet { + &self.required + } +} + +impl PropertyObjectSchema for EntityType { + type Value = ValueOrArray; + + fn properties(&self) -> &HashMap { + &self.properties + } + + fn required(&self) -> &HashSet { + &self.required + } +} + +impl PropertyObjectSchema for ClosedEntityType { + type Value = ValueOrArray; + + fn properties(&self) -> &HashMap { + &self.properties + } + + fn required(&self) -> &HashSet { + &self.required + } +} + #[cfg(test)] mod tests { use core::str::FromStr; diff --git a/libs/@blockprotocol/type-system/rust/src/schema/property_type/mod.rs b/libs/@blockprotocol/type-system/rust/src/schema/property_type/mod.rs index c24644e1b03..295936ba9ee 100644 --- a/libs/@blockprotocol/type-system/rust/src/schema/property_type/mod.rs +++ b/libs/@blockprotocol/type-system/rust/src/schema/property_type/mod.rs @@ -102,6 +102,22 @@ impl PropertyValues { } } +pub trait PropertyValueSchema { + fn possibilities(&self) -> &[PropertyValues]; +} + +impl PropertyValueSchema for &PropertyType { + fn possibilities(&self) -> &[PropertyValues] { + &self.one_of + } +} + +impl PropertyValueSchema for OneOfSchema { + fn possibilities(&self) -> &[PropertyValues] { + &self.possibilities + } +} + #[cfg(test)] mod tests { use core::str::FromStr; diff --git a/libs/@local/hash-graph-types/rust/Cargo.toml b/libs/@local/hash-graph-types/rust/Cargo.toml index 25df5c7a368..6df017217da 100644 --- a/libs/@local/hash-graph-types/rust/Cargo.toml +++ b/libs/@local/hash-graph-types/rust/Cargo.toml @@ -33,6 +33,7 @@ uuid = { workspace = true, default-features = false, features = ["serde", "v5"] [dev-dependencies] graph-test-data = { workspace = true } +tokio = { workspace = true, features = ["macros", "rt-multi-thread"] } [features] postgres = ["dep:postgres-types", "temporal-versioning/postgres"] diff --git a/libs/@local/hash-graph-types/rust/src/knowledge/entity/mod.rs b/libs/@local/hash-graph-types/rust/src/knowledge/entity/mod.rs index 991b7df481b..ceeb53c78f0 100644 --- a/libs/@local/hash-graph-types/rust/src/knowledge/entity/mod.rs +++ b/libs/@local/hash-graph-types/rust/src/knowledge/entity/mod.rs @@ -25,9 +25,11 @@ pub use self::{ use crate::{ knowledge::{ link::LinkData, - property::{PatchError, PropertyMetadataObject}, - Confidence, Property, PropertyMetadata, PropertyObject, PropertyPatchOperation, - PropertyWithMetadata, + property::{ + PatchError, Property, PropertyMetadata, PropertyMetadataObject, PropertyObject, + PropertyPatchOperation, PropertyWithMetadata, + }, + Confidence, }, owned_by_id::OwnedById, Embedding, @@ -378,7 +380,9 @@ mod tests { use type_system::url::BaseUrl; - use crate::knowledge::{Property, PropertyDiff, PropertyPath, PropertyPathElement}; + use crate::knowledge::property::{ + Property, PropertyDiff, PropertyPath, PropertyPathElement, + }; macro_rules! property { ($($json:tt)+) => { diff --git a/libs/@local/hash-graph-types/rust/src/knowledge/link.rs b/libs/@local/hash-graph-types/rust/src/knowledge/link.rs index 8250e688856..8b5011fcc5d 100644 --- a/libs/@local/hash-graph-types/rust/src/knowledge/link.rs +++ b/libs/@local/hash-graph-types/rust/src/knowledge/link.rs @@ -1,6 +1,6 @@ use serde::{Deserialize, Serialize}; -use crate::knowledge::{entity::EntityId, Confidence, PropertyProvenance}; +use crate::knowledge::{entity::EntityId, property::PropertyProvenance, Confidence}; /// The associated information for 'Link' entities #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] diff --git a/libs/@local/hash-graph-types/rust/src/knowledge/mod.rs b/libs/@local/hash-graph-types/rust/src/knowledge/mod.rs index ee4cbf22968..5420d8f3c95 100644 --- a/libs/@local/hash-graph-types/rust/src/knowledge/mod.rs +++ b/libs/@local/hash-graph-types/rust/src/knowledge/mod.rs @@ -1,16 +1,7 @@ pub mod entity; pub mod link; +pub mod property; -pub use self::{ - confidence::Confidence, - entity::EntityTypeIdDiff, - property::{ - ArrayMetadata, ObjectMetadata, PatchError, Property, PropertyDiff, PropertyMetadata, - PropertyMetadataObject, PropertyObject, PropertyPatchOperation, PropertyPath, - PropertyPathElement, PropertyProvenance, PropertyWithMetadata, PropertyWithMetadataObject, - ValueMetadata, ValueWithMetadata, - }, -}; +pub use self::{confidence::Confidence, entity::EntityTypeIdDiff}; mod confidence; -mod property; diff --git a/libs/@local/hash-graph-types/rust/src/knowledge/property/array.rs b/libs/@local/hash-graph-types/rust/src/knowledge/property/array.rs new file mode 100644 index 00000000000..44f329034cf --- /dev/null +++ b/libs/@local/hash-graph-types/rust/src/knowledge/property/array.rs @@ -0,0 +1,20 @@ +use serde::{Deserialize, Serialize}; + +use crate::knowledge::property::{ArrayMetadata, PropertyWithMetadata}; + +#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)] +#[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct PropertyWithMetadataArray { + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub value: Vec, + #[serde(default, skip_serializing_if = "ArrayMetadata::is_empty")] + pub metadata: ArrayMetadata, +} + +impl PropertyWithMetadataArray { + #[must_use] + pub fn is_empty(&self) -> bool { + self.value.is_empty() && self.metadata.is_empty() + } +} diff --git a/libs/@local/hash-graph-types/rust/src/knowledge/property/diff.rs b/libs/@local/hash-graph-types/rust/src/knowledge/property/diff.rs index ceacb884d59..b214389b6df 100644 --- a/libs/@local/hash-graph-types/rust/src/knowledge/property/diff.rs +++ b/libs/@local/hash-graph-types/rust/src/knowledge/property/diff.rs @@ -2,7 +2,7 @@ use alloc::borrow::Cow; use serde::Serialize; -use crate::knowledge::{Property, PropertyPath}; +use crate::knowledge::property::{Property, PropertyPath}; #[derive(Debug, Clone, PartialEq, Eq, Serialize)] #[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))] diff --git a/libs/@local/hash-graph-types/rust/src/knowledge/property/metadata/array.rs b/libs/@local/hash-graph-types/rust/src/knowledge/property/metadata/array.rs index 422b5562e06..22d73d42ff3 100644 --- a/libs/@local/hash-graph-types/rust/src/knowledge/property/metadata/array.rs +++ b/libs/@local/hash-graph-types/rust/src/knowledge/property/metadata/array.rs @@ -1,6 +1,6 @@ use serde::{Deserialize, Serialize}; -use crate::knowledge::{Confidence, PropertyProvenance}; +use crate::knowledge::{property::PropertyProvenance, Confidence}; #[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)] #[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))] diff --git a/libs/@local/hash-graph-types/rust/src/knowledge/property/metadata/mod.rs b/libs/@local/hash-graph-types/rust/src/knowledge/property/metadata/mod.rs index 502cba069ef..e9f0a5cec31 100644 --- a/libs/@local/hash-graph-types/rust/src/knowledge/property/metadata/mod.rs +++ b/libs/@local/hash-graph-types/rust/src/knowledge/property/metadata/mod.rs @@ -2,7 +2,7 @@ pub use self::{ array::ArrayMetadata, object::{ObjectMetadata, PropertyMetadataObject}, provenance::PropertyProvenance, - value::{ValueMetadata, ValueWithMetadata}, + value::{PropertyWithMetadataValue, ValueMetadata}, }; mod array; diff --git a/libs/@local/hash-graph-types/rust/src/knowledge/property/metadata/object.rs b/libs/@local/hash-graph-types/rust/src/knowledge/property/metadata/object.rs index b8a1ee855d5..1ba03e0195c 100644 --- a/libs/@local/hash-graph-types/rust/src/knowledge/property/metadata/object.rs +++ b/libs/@local/hash-graph-types/rust/src/knowledge/property/metadata/object.rs @@ -9,7 +9,10 @@ use postgres_types::{FromSql, IsNull, Json, ToSql, Type}; use serde::{Deserialize, Serialize}; use type_system::url::BaseUrl; -use crate::knowledge::{Confidence, PropertyMetadata, PropertyProvenance}; +use crate::knowledge::{ + property::{PropertyMetadata, PropertyProvenance}, + Confidence, +}; #[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)] #[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))] diff --git a/libs/@local/hash-graph-types/rust/src/knowledge/property/metadata/value.rs b/libs/@local/hash-graph-types/rust/src/knowledge/property/metadata/value.rs index 02f7f5d4d64..849782234bb 100644 --- a/libs/@local/hash-graph-types/rust/src/knowledge/property/metadata/value.rs +++ b/libs/@local/hash-graph-types/rust/src/knowledge/property/metadata/value.rs @@ -1,9 +1,9 @@ use serde::{Deserialize, Serialize}; use type_system::url::VersionedUrl; -use crate::knowledge::{Confidence, PropertyProvenance}; +use crate::knowledge::{property::PropertyProvenance, Confidence}; -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)] #[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))] #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct ValueMetadata { @@ -20,7 +20,7 @@ pub struct ValueMetadata { #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))] #[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct ValueWithMetadata { +pub struct PropertyWithMetadataValue { pub value: serde_json::Value, pub metadata: ValueMetadata, } diff --git a/libs/@local/hash-graph-types/rust/src/knowledge/property/mod.rs b/libs/@local/hash-graph-types/rust/src/knowledge/property/mod.rs index 346b1a45354..300e01af0ed 100644 --- a/libs/@local/hash-graph-types/rust/src/knowledge/property/mod.rs +++ b/libs/@local/hash-graph-types/rust/src/knowledge/property/mod.rs @@ -1,3 +1,18 @@ +pub mod visitor; + +pub use self::{ + array::PropertyWithMetadataArray, + diff::PropertyDiff, + metadata::{ + ArrayMetadata, ObjectMetadata, PropertyMetadata, PropertyMetadataObject, + PropertyProvenance, PropertyWithMetadataValue, ValueMetadata, + }, + object::{PropertyObject, PropertyWithMetadataObject}, + patch::{PatchError, PropertyPatchOperation}, + path::{PropertyPath, PropertyPathElement}, +}; + +mod array; mod diff; mod metadata; mod object; @@ -8,7 +23,7 @@ use alloc::borrow::Cow; use core::{cmp::Ordering, fmt, iter, mem}; use std::{collections::HashMap, io}; -use error_stack::Report; +use error_stack::{Report, ResultExt}; use serde::{Deserialize, Serialize}; use serde_json::Value as JsonValue; use type_system::{ @@ -16,17 +31,6 @@ use type_system::{ url::{BaseUrl, VersionedUrl}, }; -pub use self::{ - diff::PropertyDiff, - metadata::{ - ArrayMetadata, ObjectMetadata, PropertyMetadata, PropertyMetadataObject, - PropertyProvenance, ValueMetadata, ValueWithMetadata, - }, - object::{PropertyObject, PropertyWithMetadataObject}, - patch::{PatchError, PropertyPatchOperation}, - path::{PropertyPath, PropertyPathElement}, -}; - #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))] #[serde(untagged)] @@ -40,21 +44,9 @@ pub enum Property { #[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))] #[serde(untagged, deny_unknown_fields)] pub enum PropertyWithMetadata { - #[cfg_attr(feature = "utoipa", schema(title = "PropertyWithMetadataArray"))] - Array { - #[serde(default, skip_serializing_if = "Vec::is_empty")] - value: Vec, - #[serde(default, skip_serializing_if = "ArrayMetadata::is_empty")] - metadata: ArrayMetadata, - }, - #[cfg_attr(feature = "utoipa", schema(title = "PropertyWithMetadataObject"))] - Object { - #[serde(default, skip_serializing_if = "HashMap::is_empty")] - value: HashMap, - #[serde(default, skip_serializing_if = "ObjectMetadata::is_empty")] - metadata: ObjectMetadata, - }, - Value(ValueWithMetadata), + Array(PropertyWithMetadataArray), + Object(PropertyWithMetadataObject), + Value(PropertyWithMetadataValue), } #[derive(Debug, thiserror::Error)] @@ -94,6 +86,32 @@ impl PropertyWithMetadata { } } + /// Modify the properties and confidence values of the entity. + /// + /// # Errors + /// + /// Returns an error if the patch operation failed + pub fn patch( + &mut self, + operations: impl IntoIterator, + ) -> Result<(), Report> { + for operation in operations { + match operation { + PropertyPatchOperation::Add { path, property } => { + self.add(path, property).change_context(PatchError)?; + } + PropertyPatchOperation::Remove { path } => { + self.remove(&path).change_context(PatchError)?; + } + PropertyPatchOperation::Replace { path, property } => { + self.replace(&path, property).change_context(PatchError)?; + } + } + } + + Ok(()) + } + fn get_mut( &mut self, path: &[PropertyPathElement<'_>], @@ -101,40 +119,31 @@ impl PropertyWithMetadata { let mut value = self; for path_element in path { match (value, path_element) { - ( - Self::Array { - value: elements, .. - }, - PropertyPathElement::Index(index), - ) => { - let len = elements.len(); - value = elements + (Self::Array(array), PropertyPathElement::Index(index)) => { + let len = array.value.len(); + value = array + .value .get_mut(*index) .ok_or(PropertyPathError::IndexOutOfBounds { index: *index, len })?; } - (Self::Array { .. }, PropertyPathElement::Property(key)) => { + (Self::Array(_), PropertyPathElement::Property(key)) => { return Err(Report::new(PropertyPathError::UnexpectedKey { key: key.clone().into_owned(), })); } - ( - Self::Object { - value: properties, .. - }, - PropertyPathElement::Property(key), - ) => { - value = properties.get_mut(key.as_ref()).ok_or_else(|| { + (Self::Object(object), PropertyPathElement::Property(key)) => { + value = object.value.get_mut(key.as_ref()).ok_or_else(|| { PropertyPathError::InvalidKey { key: key.clone().into_owned(), } })?; } - (Self::Object { .. }, PropertyPathElement::Index(index)) => { + (Self::Object(_), PropertyPathElement::Index(index)) => { return Err(Report::new(PropertyPathError::UnexpectedIndex { index: *index, })); } - (Self::Value { .. }, _) => { + (Self::Value(_), _) => { return Err(Report::new(PropertyPathError::UnexpectedValue)); } } @@ -162,40 +171,30 @@ impl PropertyWithMetadata { let parent = self.get_mut(path.as_ref())?; match (parent, last) { - ( - Self::Array { - value: elements, .. - }, - PropertyPathElement::Index(index), - ) => { - if index <= elements.len() { - elements.insert(index, value); + (Self::Array(array), PropertyPathElement::Index(index)) => { + if index <= array.value.len() { + array.value.insert(index, value); Ok(()) } else { Err(Report::new(PropertyPathError::IndexOutOfBounds { index, - len: elements.len(), + len: array.value.len(), })) } } - (Self::Array { .. }, PropertyPathElement::Property(key)) => { + (Self::Array(_), PropertyPathElement::Property(key)) => { Err(Report::new(PropertyPathError::UnexpectedKey { key: key.clone().into_owned(), })) } - ( - Self::Object { - value: properties, .. - }, - PropertyPathElement::Property(key), - ) => { - properties.insert(key.into_owned(), value); + (Self::Object(object), PropertyPathElement::Property(key)) => { + object.value.insert(key.into_owned(), value); Ok(()) } - (Self::Object { .. }, PropertyPathElement::Index(index)) => { + (Self::Object(_), PropertyPathElement::Index(index)) => { Err(Report::new(PropertyPathError::UnexpectedIndex { index })) } - (Self::Value { .. }, _) => Err(Report::new(PropertyPathError::UnexpectedValue)), + (Self::Value(_), _) => Err(Report::new(PropertyPathError::UnexpectedValue)), } } @@ -228,34 +227,24 @@ impl PropertyWithMetadata { }; let parent = self.get_mut(path)?; match (parent, last) { - ( - Self::Array { - value: elements, .. - }, - PropertyPathElement::Index(index), - ) => { - if *index <= elements.len() { - elements.remove(*index); + (Self::Array(array), PropertyPathElement::Index(index)) => { + if *index <= array.value.len() { + array.value.remove(*index); Ok(()) } else { Err(Report::new(PropertyPathError::IndexOutOfBounds { index: *index, - len: elements.len(), + len: array.value.len(), })) } } - (Self::Array { .. }, PropertyPathElement::Property(key)) => { + (Self::Array(_), PropertyPathElement::Property(key)) => { Err(Report::new(PropertyPathError::UnexpectedKey { key: key.clone().into_owned(), })) } - ( - Self::Object { - value: properties, .. - }, - PropertyPathElement::Property(key), - ) => { - properties.remove(key); + (Self::Object(object), PropertyPathElement::Property(key)) => { + object.value.remove(key); Ok(()) } (Self::Object { .. }, PropertyPathElement::Index(index)) => { @@ -283,7 +272,7 @@ impl PropertyWithMetadata { value: metadata_elements, metadata, }), - ) => Ok(Self::Array { + ) => Ok(Self::Array(PropertyWithMetadataArray { value: metadata_elements .into_iter() .map(Some) @@ -292,21 +281,21 @@ impl PropertyWithMetadata { .map(|(metadata, property)| Self::from_parts(property, metadata)) .collect::>()?, metadata, - }), - (Property::Array(properties), None) => Ok(Self::Array { + })), + (Property::Array(properties), None) => Ok(Self::Array(PropertyWithMetadataArray { value: properties .into_iter() .map(|property| Self::from_parts(property, None)) .collect::>()?, metadata: ArrayMetadata::default(), - }), + })), ( Property::Object(properties), Some(PropertyMetadata::Object { value: mut metadata_elements, metadata, }), - ) => Ok(Self::Object { + ) => Ok(Self::Object(PropertyWithMetadataObject { value: properties .into_iter() .map(|(key, property)| { @@ -318,8 +307,8 @@ impl PropertyWithMetadata { }) .collect::>()?, metadata, - }), - (Property::Object(properties), None) => Ok(Self::Object { + })), + (Property::Object(properties), None) => Ok(Self::Object(PropertyWithMetadataObject { value: properties .into_iter() .map(|(key, property)| { @@ -327,11 +316,11 @@ impl PropertyWithMetadata { }) .collect::>()?, metadata: ObjectMetadata::default(), - }), + })), (Property::Value(value), Some(PropertyMetadata::Value { metadata })) => { - Ok(Self::Value(ValueWithMetadata { value, metadata })) + Ok(Self::Value(PropertyWithMetadataValue { value, metadata })) } - (Property::Value(value), None) => Ok(Self::Value(ValueWithMetadata { + (Property::Value(value), None) => Ok(Self::Value(PropertyWithMetadataValue { value, metadata: ValueMetadata { provenance: PropertyProvenance::default(), @@ -345,19 +334,20 @@ impl PropertyWithMetadata { pub fn into_parts(self) -> (Property, PropertyMetadata) { match self { - Self::Array { value, metadata } => { + Self::Array(array) => { let (properties, metadata_elements) = - value.into_iter().map(Self::into_parts).unzip(); + array.value.into_iter().map(Self::into_parts).unzip(); ( Property::Array(properties), PropertyMetadata::Array { value: metadata_elements, - metadata, + metadata: array.metadata, }, ) } - Self::Object { value, metadata } => { - let (properties, metadata_properties) = value + Self::Object(object) => { + let (properties, metadata_properties) = object + .value .into_iter() .map(|(base_url, property_with_metadata)| { let (property, metadata) = property_with_metadata.into_parts(); @@ -368,7 +358,7 @@ impl PropertyWithMetadata { Property::Object(PropertyObject::new(properties)), PropertyMetadata::Object { value: metadata_properties, - metadata, + metadata: object.metadata, }, ) } diff --git a/libs/@local/hash-graph-types/rust/src/knowledge/property/object.rs b/libs/@local/hash-graph-types/rust/src/knowledge/property/object.rs index 52278243861..aa657ab3d5d 100644 --- a/libs/@local/hash-graph-types/rust/src/knowledge/property/object.rs +++ b/libs/@local/hash-graph-types/rust/src/knowledge/property/object.rs @@ -11,9 +11,9 @@ use serde::{Deserialize, Serialize}; use serde_json::Value as JsonValue; use type_system::url::BaseUrl; -use crate::knowledge::{ - property::PropertyPathError, ObjectMetadata, Property, PropertyDiff, PropertyMetadataObject, - PropertyPath, PropertyPathElement, PropertyWithMetadata, +use crate::knowledge::property::{ + ObjectMetadata, Property, PropertyDiff, PropertyMetadataObject, PropertyPath, + PropertyPathElement, PropertyPathError, PropertyWithMetadata, }; #[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize)] diff --git a/libs/@local/hash-graph-types/rust/src/knowledge/property/patch.rs b/libs/@local/hash-graph-types/rust/src/knowledge/property/patch.rs index de299eda297..c828b5a8c92 100644 --- a/libs/@local/hash-graph-types/rust/src/knowledge/property/patch.rs +++ b/libs/@local/hash-graph-types/rust/src/knowledge/property/patch.rs @@ -1,7 +1,7 @@ use serde::Deserialize; use thiserror::Error; -use crate::knowledge::{PropertyPath, PropertyWithMetadata}; +use crate::knowledge::property::{PropertyPath, PropertyWithMetadata}; #[derive(Debug, Deserialize)] #[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))] diff --git a/libs/@local/hash-graph-types/rust/src/knowledge/property/visitor.rs b/libs/@local/hash-graph-types/rust/src/knowledge/property/visitor.rs new file mode 100644 index 00000000000..0e8b94278ae --- /dev/null +++ b/libs/@local/hash-graph-types/rust/src/knowledge/property/visitor.rs @@ -0,0 +1,589 @@ +use core::{borrow::Borrow, future::Future}; + +use error_stack::{bail, Report, ResultExt}; +use serde_json::Value as JsonValue; +use type_system::{ + schema::{ + ArraySchema, DataType, DataTypeProvider, DataTypeReference, JsonSchemaValueType, + OntologyTypeProvider, PropertyObjectSchema, PropertyType, PropertyTypeProvider, + PropertyTypeReference, PropertyValueSchema, PropertyValues, ValueOrArray, + }, + url::{BaseUrl, VersionedUrl}, +}; + +use crate::knowledge::property::{ + PropertyWithMetadata, PropertyWithMetadataArray, PropertyWithMetadataObject, + PropertyWithMetadataValue, ValueMetadata, +}; + +#[derive(Debug, thiserror::Error)] +pub enum TraversalError { + #[error("the validator was unable to read the data type `{}`", id.url)] + DataTypeRetrieval { id: DataTypeReference }, + #[error("the validator was unable to read the property type `{}`", id.url)] + PropertyTypeRetrieval { id: PropertyTypeReference }, + + #[error("the property `{key}` was specified, but not in the schema")] + UnexpectedProperty { key: BaseUrl }, + #[error( + "the value provided does not match the property type schema, expected `{expected}`, got \ + `{actual}`" + )] + InvalidType { + actual: JsonSchemaValueType, + expected: JsonSchemaValueType, + }, + #[error("a value was expected, but the property provided was of type `{actual}`")] + ExpectedValue { actual: JsonSchemaValueType }, + #[error("The property provided is ambiguous")] + AmbiguousProperty { actual: PropertyWithMetadata }, + #[error("The data type ID was not specified and is ambiguous.")] + AmbiguousDataType, + + #[error( + "the value provided does not match the data type in the metadata, expected `{expected}` \ + or a child of it, got `{actual}`" + )] + InvalidDataType { + actual: VersionedUrl, + expected: VersionedUrl, + }, + #[error("the value provided does not match the constraints of the data type")] + ConstraintUnfulfilled, + #[error("the property `{key}` was required, but not specified")] + MissingRequiredProperty { key: BaseUrl }, + #[error( + "the number of items in the array is too small, expected at least {min}, but found \ + {actual}" + )] + TooFewItems { actual: usize, min: usize }, + #[error( + "the number of items in the array is too large, expected at most {max}, but found {actual}" + )] + TooManyItems { actual: usize, max: usize }, +} + +// TODO: Allow usage of other error types +pub trait EntityVisitor: Sized + Send + Sync { + /// Visits a leaf value. + /// + /// By default, this does nothing. + #[expect(unused_variables, reason = "No-op implementation")] + fn visit_value

( + &mut self, + schema: &DataType, + value: &mut JsonValue, + metadata: &mut ValueMetadata, + type_provider: &P, + ) -> impl Future>> + Send + where + P: DataTypeProvider + Sync, + { + async { Ok(()) } + } + + /// Visits a property. + /// + /// By default, this forwards to [`walk_property`]. + fn visit_property

( + &mut self, + schema: &PropertyType, + property: &mut PropertyWithMetadata, + type_provider: &P, + ) -> impl Future>> + Send + where + P: DataTypeProvider + PropertyTypeProvider + Sync, + { + walk_property(self, schema, property, type_provider) + } + + /// Visits an array property. + /// + /// By default, this forwards to [`walk_array`]. + fn visit_array( + &mut self, + schema: &ArraySchema, + array: &mut PropertyWithMetadataArray, + type_provider: &P, + ) -> impl Future>> + Send + where + T: PropertyValueSchema + Sync, + P: DataTypeProvider + PropertyTypeProvider + Sync, + { + walk_array(self, schema, array, type_provider) + } + + /// Visits an object property. + /// + /// By default, this forwards to [`walk_object`]. + fn visit_object( + &mut self, + schema: &T, + object: &mut PropertyWithMetadataObject, + type_provider: &P, + ) -> impl Future>> + Send + where + T: PropertyObjectSchema> + Sync, + P: DataTypeProvider + PropertyTypeProvider + Sync, + { + walk_object(self, schema, object, type_provider) + } + + /// Visits a property value using the [`PropertyValues`] from a one-of schema. + /// + /// By default, this forwards to [`walk_one_of_property_value`]. + fn visit_one_of_property

( + &mut self, + schema: &[PropertyValues], + property: &mut PropertyWithMetadataValue, + type_provider: &P, + ) -> impl Future>> + Send + where + P: DataTypeProvider + Sync, + { + walk_one_of_property_value(self, schema, property, type_provider) + } + + /// Visits an array property using the [`PropertyValues`] from a one-of schema. + /// + /// By default, this forwards to [`walk_one_of_array`]. + fn visit_one_of_array

( + &mut self, + schema: &[PropertyValues], + array: &mut PropertyWithMetadataArray, + type_provider: &P, + ) -> impl Future>> + Send + where + P: DataTypeProvider + PropertyTypeProvider + Sync, + { + walk_one_of_array(self, schema, array, type_provider) + } + + /// Visits an object property using the [`PropertyValues`] from a one-of schema. + /// + /// By default, this forwards to [`walk_one_of_object`]. + fn visit_one_of_object

( + &mut self, + schema: &[PropertyValues], + object: &mut PropertyWithMetadataObject, + type_provider: &P, + ) -> impl Future>> + Send + where + P: DataTypeProvider + PropertyTypeProvider + Sync, + { + walk_one_of_object(self, schema, object, type_provider) + } +} + +macro_rules! extend_report { + ($status:ident, $error:expr $(,)?) => { + if let Err(ref mut report) = $status { + report.extend_one(error_stack::report!($error)) + } else { + $status = Err(error_stack::report!($error)) + } + }; +} + +/// Walks through a property using the provided schema. +/// +/// Depending on the property, [`EntityVisitor::visit_one_of_property`], +/// [`EntityVisitor::visit_one_of_array`], or [`EntityVisitor::visit_one_of_object`] is called. +/// +/// # Errors +/// +/// Any error that can be returned by the visitor methods. +pub async fn walk_property( + visitor: &mut V, + schema: &PropertyType, + property: &mut PropertyWithMetadata, + type_provider: &P, +) -> Result<(), Report> +where + V: EntityVisitor, + P: DataTypeProvider + PropertyTypeProvider + Sync, +{ + match property { + PropertyWithMetadata::Value(value) => { + visitor + .visit_one_of_property(&schema.one_of, value, type_provider) + .await + } + PropertyWithMetadata::Array(array) => { + visitor + .visit_one_of_array(&schema.one_of, array, type_provider) + .await + } + PropertyWithMetadata::Object(object) => { + visitor + .visit_one_of_object(&schema.one_of, object, type_provider) + .await + } + } +} + +/// Walks through an array property using the provided schema. +/// +/// Depending on the property, [`EntityVisitor::visit_one_of_property`], +/// [`EntityVisitor::visit_one_of_array`], or [`EntityVisitor::visit_one_of_object`] is called. +/// +/// # Errors +/// +/// Any error that can be returned by the visitor methods. +pub async fn walk_array( + visitor: &mut V, + schema: &ArraySchema, + array: &mut PropertyWithMetadataArray, + type_provider: &P, +) -> Result<(), Report> +where + V: EntityVisitor, + S: PropertyValueSchema + Sync, + P: DataTypeProvider + PropertyTypeProvider + Sync, +{ + let mut status = Ok::<_, Report>(()); + for property in &mut array.value { + match property { + PropertyWithMetadata::Value(value) => { + if let Err(error) = visitor + .visit_one_of_property(schema.items.possibilities(), value, type_provider) + .await + { + extend_report!(status, error); + } + } + PropertyWithMetadata::Array(array) => { + if let Err(error) = visitor + .visit_one_of_array(schema.items.possibilities(), array, type_provider) + .await + { + extend_report!(status, error); + } + } + PropertyWithMetadata::Object(object) => { + if let Err(error) = visitor + .visit_one_of_object(schema.items.possibilities(), object, type_provider) + .await + { + extend_report!(status, error); + } + } + } + } + + status +} + +/// Walks through a property object using the provided schema. +/// +/// For each url/property pair in the `properties` map, the property type is retrieved from `schema` +/// and the `visitor` is called to further traverse the property object. The `type_provider` is used +/// to resolve the property types specified in the `schema`. +/// +/// Depending on the property, [`EntityVisitor::visit_property`] or [`EntityVisitor::visit_array`] +/// is called. +/// +/// # Errors +/// +/// - [`UnexpectedProperty`] if a property is specified that is not in the schema. +/// - [`PropertyTypeRetrieval`] if a property type could not be retrieved from the `type_provider`. +/// - [`InvalidType`] if the schema expects an array, but a value or object is provided. +/// - Any error that can be returned by the visitor methods. +/// +/// [`UnexpectedProperty`]: TraversalError::UnexpectedProperty +/// [`PropertyTypeRetrieval`]: TraversalError::PropertyTypeRetrieval +/// [`InvalidType`]: TraversalError::InvalidType +pub async fn walk_object( + visitor: &mut V, + schema: &S, + object: &mut PropertyWithMetadataObject, + type_provider: &P, +) -> Result<(), Report> +where + V: EntityVisitor, + S: PropertyObjectSchema> + Sync, + P: DataTypeProvider + PropertyTypeProvider + Sync, +{ + let mut status = Ok::<_, Report>(()); + + for (base_url, property) in &mut object.value { + let Some(property_type_reference) = schema.properties().get(base_url) else { + extend_report!( + status, + TraversalError::UnexpectedProperty { + key: base_url.clone() + } + ); + continue; + }; + + match property_type_reference { + ValueOrArray::Value(property_type_reference) => { + let property_type =

>::provide_type( + type_provider, + &property_type_reference.url, + ) + .await + .change_context_lazy(|| TraversalError::PropertyTypeRetrieval { + id: property_type_reference.clone(), + })?; + visitor + .visit_property(property_type.borrow(), property, type_provider) + .await?; + } + ValueOrArray::Array(array_schema) => match property { + PropertyWithMetadata::Array(array) => { + let property_type =

>::provide_type( + type_provider, + &array_schema.items.url, + ) + .await + .change_context_lazy(|| { + TraversalError::PropertyTypeRetrieval { + id: array_schema.items.clone(), + } + })?; + let result = visitor + .visit_array( + &ArraySchema { + items: property_type.borrow(), + min_items: array_schema.min_items, + max_items: array_schema.max_items, + }, + array, + type_provider, + ) + .await; + if let Err(error) = result { + extend_report!(status, error); + } + } + PropertyWithMetadata::Object { .. } | PropertyWithMetadata::Value(_) => { + bail!(TraversalError::InvalidType { + actual: property.json_type(), + expected: JsonSchemaValueType::Array, + }) + } + }, + }; + } + status +} + +/// Walks through a property value using the provided schema list. +/// +/// # Errors +/// +/// - [`ExpectedValue`] if an array or object is provided. +/// - [`DataTypeRetrieval`] if a data type could not be retrieved from the `type_provider`. +/// - [`AmbiguousProperty`] if more than one schema is passed. +/// - Any error that can be returned by the visitor methods. +/// +/// [`ExpectedValue`]: TraversalError::ExpectedValue +/// [`DataTypeRetrieval`]: TraversalError::DataTypeRetrieval +/// [`AmbiguousProperty`]: TraversalError::AmbiguousProperty +pub async fn walk_one_of_property_value( + visitor: &mut V, + schema: &[PropertyValues], + property: &mut PropertyWithMetadataValue, + type_provider: &P, +) -> Result<(), Report> +where + V: EntityVisitor, + P: DataTypeProvider + Sync, +{ + let mut status: Result<(), Report> = Ok(()); + let mut passed: usize = 0; + + for schema in schema { + match schema { + PropertyValues::DataTypeReference(data_type_ref) => { + let data_type = type_provider + .provide_type(&data_type_ref.url) + .await + .change_context_lazy(|| TraversalError::DataTypeRetrieval { + id: data_type_ref.clone(), + })?; + if let Err(error) = visitor + .visit_value( + data_type.borrow(), + &mut property.value, + &mut property.metadata, + type_provider, + ) + .await + { + extend_report!(status, error); + } else { + passed += 1; + } + } + PropertyValues::ArrayOfPropertyValues(_) => { + extend_report!( + status, + TraversalError::ExpectedValue { + actual: JsonSchemaValueType::Array, + } + ); + } + PropertyValues::PropertyTypeObject(_) => { + extend_report!( + status, + TraversalError::ExpectedValue { + actual: JsonSchemaValueType::Object, + } + ); + } + } + } + + match passed { + 0 => status, + 1 => Ok(()), + _ => { + extend_report!( + status, + TraversalError::AmbiguousProperty { + actual: PropertyWithMetadata::Value(property.clone()), + } + ); + status + } + } +} + +/// Walks through an array property using the provided schema list. +/// +/// # Errors +/// +/// - [`ExpectedValue`] if a value or object is provided. +/// - [`AmbiguousProperty`] if more than one schema is passed. +/// - Any error that can be returned by the visitor methods. +/// +/// [`ExpectedValue`]: TraversalError::ExpectedValue +/// [`AmbiguousProperty`]: TraversalError::AmbiguousProperty +pub async fn walk_one_of_array( + visitor: &mut V, + schema: &[PropertyValues], + array: &mut PropertyWithMetadataArray, + type_provider: &P, +) -> Result<(), Report> +where + V: EntityVisitor, + P: DataTypeProvider + PropertyTypeProvider + Sync, +{ + let mut status: Result<(), Report> = Ok(()); + let mut passed: usize = 0; + + for schema in schema { + match schema { + PropertyValues::DataTypeReference(_) => { + extend_report!( + status, + TraversalError::ExpectedValue { + actual: JsonSchemaValueType::Array, + } + ); + } + PropertyValues::ArrayOfPropertyValues(array_schema) => { + if let Err(error) = + Box::pin(visitor.visit_array(array_schema, array, type_provider)).await + { + extend_report!(status, error); + } else { + passed += 1; + } + } + PropertyValues::PropertyTypeObject(_) => { + extend_report!( + status, + TraversalError::ExpectedValue { + actual: JsonSchemaValueType::Object, + } + ); + } + } + } + + match passed { + 0 => status, + 1 => Ok(()), + _ => { + extend_report!( + status, + TraversalError::AmbiguousProperty { + actual: PropertyWithMetadata::Array(array.clone()), + } + ); + status + } + } +} + +/// Walks through an object property using the provided schema list. +/// +/// # Errors +/// +/// - [`ExpectedValue`] if a value or array is provided. +/// - [`AmbiguousProperty`] if more than one schema is passed. +/// - Any error that can be returned by the visitor methods. +/// +/// [`ExpectedValue`]: TraversalError::ExpectedValue +/// [`AmbiguousProperty`]: TraversalError::AmbiguousProperty +pub async fn walk_one_of_object( + visitor: &mut V, + schema: &[PropertyValues], + object: &mut PropertyWithMetadataObject, + type_provider: &P, +) -> Result<(), Report> +where + V: EntityVisitor, + P: DataTypeProvider + PropertyTypeProvider + Sync, +{ + let mut status: Result<(), Report> = Ok(()); + let mut passed: usize = 0; + + for schema in schema { + match schema { + PropertyValues::DataTypeReference(_) => { + extend_report!( + status, + TraversalError::ExpectedValue { + actual: JsonSchemaValueType::Array, + } + ); + } + PropertyValues::ArrayOfPropertyValues(_) => { + extend_report!( + status, + TraversalError::ExpectedValue { + actual: JsonSchemaValueType::Object, + } + ); + } + PropertyValues::PropertyTypeObject(object_schema) => { + if let Err(error) = + Box::pin(visitor.visit_object(object_schema, object, type_provider)).await + { + extend_report!(status, error); + } else { + passed += 1; + } + } + } + } + + match passed { + 0 => status, + 1 => Ok(()), + _ => { + extend_report!( + status, + TraversalError::AmbiguousProperty { + actual: PropertyWithMetadata::Object(object.clone()), + } + ); + status + } + } +} diff --git a/libs/@local/hash-validation/src/data_type.rs b/libs/@local/hash-validation/src/data_type.rs deleted file mode 100644 index 6f86f43c0f9..00000000000 --- a/libs/@local/hash-validation/src/data_type.rs +++ /dev/null @@ -1,1650 +0,0 @@ -use core::borrow::Borrow; - -use error_stack::{Report, ResultExt}; -use graph_types::{ - knowledge::{Property, ValueWithMetadata}, - ontology::DataTypeId, -}; -use regex::Regex; -use serde_json::{Number as JsonNumber, Value as JsonValue}; -use thiserror::Error; -use type_system::{ - schema::{DataType, DataTypeReference, JsonSchemaValueType}, - url::VersionedUrl, -}; - -use crate::{ - error::{Actual, Expected}, - DataTypeProvider, Schema, Validate, ValidateEntityComponents, -}; - -macro_rules! extend_report { - ($status:ident, $error:expr $(,)?) => { - if let Err(ref mut report) = $status { - report.extend_one(error_stack::report!($error)) - } else { - $status = Err(error_stack::report!($error)) - } - }; -} - -#[derive(Debug, Error)] -pub enum DataTypeConstraint { - #[error("the provided value is not equal to the expected value")] - Const { - actual: Property, - expected: JsonValue, - }, - #[error("the provided value is not one of the expected values")] - Enum { - actual: Property, - expected: JsonValue, - }, - #[error( - "the provided value is not greater than or equal to the minimum value, got `{actual}`, \ - expected `{expected}`" - )] - Minimum { - actual: JsonNumber, - expected: JsonNumber, - }, - #[error( - "the provided value is not less than or equal to the maximum value, got `{actual}`, \ - expected `{expected}`" - )] - Maximum { - actual: JsonNumber, - expected: JsonNumber, - }, - #[error( - "the provided value is not greater than the minimum value, got `{actual}`, expected \ - `{expected}`" - )] - ExclusiveMinimum { - actual: JsonNumber, - expected: JsonNumber, - }, - #[error( - "the provided value is not less than the maximum value, got `{actual}`, expected \ - `{expected}`" - )] - ExclusiveMaximum { - actual: JsonNumber, - expected: JsonNumber, - }, - #[error( - "the provided value is not a multiple of the expected value, got `{actual}`, expected \ - `{expected}`" - )] - MultipleOf { - actual: JsonNumber, - expected: JsonNumber, - }, - #[error( - "the provided value is shorter than the minimum length, got `{actual}`, expected a string \ - of at least length `{expected}`" - )] - MinLength { actual: String, expected: usize }, - #[error( - "the provided value is longer than the maximum length, got `{actual}`, expected a string \ - of at most length `{expected}`" - )] - MaxLength { actual: String, expected: usize }, - #[error("the provided pattern could not be compiled, got `{pattern}`")] - InvalidPattern { pattern: String }, - #[error("the provided value does not match the expected pattern `{pattern}`, got `{actual}`")] - Pattern { actual: String, pattern: Regex }, - #[error("the provided value `{actual}` does not match the expected format `{format}`")] - Format { - actual: String, - format: &'static str, - }, - #[error("unknown constraint: `{key}`")] - UnknownConstraint { key: String }, - #[error("unknown format: `{key}`")] - UnknownFormat { key: String }, -} - -#[derive(Debug, Error)] -pub enum DataValidationError { - #[error("the validator was unable to read the data type: `{id}`")] - DataTypeRetrieval { id: VersionedUrl }, - #[error( - "the value provided does not match the data type, expected `{expected}`, got `{actual}`" - )] - InvalidType { - actual: JsonSchemaValueType, - expected: JsonSchemaValueType, - }, - #[error( - "the value provided does not match the data type in the metadata, expected `{expected}` \ - or a child of it, got `{actual}`" - )] - InvalidDataType { - actual: VersionedUrl, - expected: VersionedUrl, - }, - #[error("the value provided does not match the constraints of the data type")] - ConstraintUnfulfilled, - #[error( - "The expected data type has potential child data type so a data type needs to be \ - specified. Expected `{expected}`" - )] - AmbiguousDataType { expected: VersionedUrl }, -} - -impl

Schema for DataType -where - P: DataTypeProvider + Sync, -{ - type Error = DataValidationError; - - async fn validate_value<'a>( - &'a self, - value: &'a ValueWithMetadata, - _: ValidateEntityComponents, - provider: &'a P, - ) -> Result<(), Report> { - let mut status: Result<(), Report> = Ok(()); - - if let Some(data_type_url) = &value.metadata.data_type_id { - if self.id != *data_type_url { - let is_compatible = provider - .is_parent_of(data_type_url, &self.id.base_url) - .await - .change_context_lazy(|| DataValidationError::DataTypeRetrieval { - id: self.id.clone(), - })?; - - if !is_compatible { - extend_report!( - status, - DataValidationError::InvalidDataType { - actual: data_type_url.clone(), - expected: self.id.clone(), - } - ); - } - - if let Err(err) = provider - .provide_type(data_type_url) - .await - .change_context_lazy(|| DataValidationError::DataTypeRetrieval { - id: data_type_url.clone(), - })? - .borrow() - .validate_constraints(&value.value) - .change_context(DataValidationError::ConstraintUnfulfilled) - { - extend_report!(status, err); - } - } - } else if provider - .has_children(DataTypeId::from_url(&self.id)) - .await - .change_context_lazy(|| DataValidationError::DataTypeRetrieval { - id: self.id.clone(), - })? - { - extend_report!( - status, - DataValidationError::AmbiguousDataType { - expected: self.id.clone(), - } - ); - } - - if let Err(err) = self - .validate_constraints(&value.value) - .change_context(DataValidationError::ConstraintUnfulfilled) - { - extend_report!(status, err); - } - - status - } -} - -impl

Validate for ValueWithMetadata -where - P: DataTypeProvider + Sync, -{ - type Error = DataValidationError; - - async fn validate( - &self, - schema: &DataType, - components: ValidateEntityComponents, - context: &P, - ) -> Result<(), Report> { - schema.validate_value(self, components, context).await - } -} - -impl

Schema for DataTypeReference -where - P: DataTypeProvider + Sync, -{ - type Error = DataValidationError; - - async fn validate_value<'a>( - &'a self, - value: &'a ValueWithMetadata, - components: ValidateEntityComponents, - provider: &'a P, - ) -> Result<(), Report> { - let data_type = provider - .provide_type(&self.url) - .await - .change_context_lazy(|| DataValidationError::DataTypeRetrieval { - id: self.url.clone(), - })?; - data_type - .borrow() - .validate_value(value, components, provider) - .await - .attach_lazy(|| Expected::DataType(data_type.borrow().clone())) - .attach_lazy(|| Actual::Json(value.value.clone())) - } -} - -impl

Validate for ValueWithMetadata -where - P: DataTypeProvider + Sync, -{ - type Error = DataValidationError; - - async fn validate( - &self, - schema: &DataTypeReference, - components: ValidateEntityComponents, - context: &P, - ) -> Result<(), Report> { - schema.validate_value(self, components, context).await - } -} - -#[cfg(test)] -mod tests { - use serde_json::json; - use uuid::Uuid; - - use crate::{tests::validate_data, ValidateEntityComponents}; - - #[tokio::test] - async fn null() { - validate_data( - json!(null), - graph_test_data::data_type::NULL_V1, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn boolean() { - validate_data( - json!(true), - graph_test_data::data_type::BOOLEAN_V1, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn number() { - validate_data( - json!(42), - graph_test_data::data_type::NUMBER_V1, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn integer() { - let integer_type = serde_json::to_string(&json!({ - "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", - "kind": "dataType", - "$id": "https://localhost:4000/@alice/types/data-type/integer/v/1", - "title": "Integer", - "type": "integer" - })) - .expect("failed to serialize temperature unit type"); - - validate_data(json!(10), &integer_type, ValidateEntityComponents::full()) - .await - .expect("validation failed"); - - validate_data(json!(-10), &integer_type, ValidateEntityComponents::full()) - .await - .expect("validation failed"); - - validate_data(json!(1.0), &integer_type, ValidateEntityComponents::full()) - .await - .expect("validation failed"); - - _ = validate_data( - json!(core::f64::consts::PI), - &integer_type, - ValidateEntityComponents::full(), - ) - .await - .expect_err("validation succeeded"); - - _ = validate_data( - json!("foo"), - &integer_type, - ValidateEntityComponents::full(), - ) - .await - .expect_err("validation succeeded"); - } - - #[tokio::test] - async fn string() { - validate_data( - json!("foo"), - graph_test_data::data_type::TEXT_V1, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn array() { - validate_data( - json!([]), - graph_test_data::data_type::EMPTY_LIST_V1, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - _ = validate_data( - json!(["foo", "bar"]), - graph_test_data::data_type::EMPTY_LIST_V1, - ValidateEntityComponents::full(), - ) - .await - .expect_err("validation succeeded"); - } - - #[tokio::test] - async fn object() { - validate_data( - json!({ - "foo": "bar", - "baz": "qux" - }), - graph_test_data::data_type::OBJECT_V1, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn temperature_unit() { - let meter_type = serde_json::to_string(&json!({ - "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", - "kind": "dataType", - "$id": "https://localhost:4000/@alice/types/data-type/temperature-unit/v/1", - "title": "Temperature Unit", - "type": "string", - "enum": ["Celsius", "Fahrenheit", "Kelvin"] - })) - .expect("failed to serialize temperature unit type"); - - validate_data( - json!("Celsius"), - &meter_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - validate_data( - json!("Fahrenheit"), - &meter_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - _ = validate_data(json!("foo"), &meter_type, ValidateEntityComponents::full()) - .await - .expect_err("validation succeeded"); - } - - #[tokio::test] - async fn meter() { - let meter_type = serde_json::to_string(&json!({ - "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", - "kind": "dataType", - "$id": "https://localhost:4000/@alice/types/data-type/meter/v/1", - "title": "Meter", - "type": "number", - "minimum": 0, - })) - .expect("failed to serialize meter type"); - - validate_data(json!(10), &meter_type, ValidateEntityComponents::full()) - .await - .expect("validation failed"); - - validate_data(json!(0.0), &meter_type, ValidateEntityComponents::full()) - .await - .expect("validation failed"); - - _ = validate_data(json!(-1.0), &meter_type, ValidateEntityComponents::full()) - .await - .expect_err("validation succeeded"); - } - - #[tokio::test] - async fn uri() { - let url_type = serde_json::to_string(&json!({ - "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", - "kind": "dataType", - "$id": "https://localhost:4000/@alice/types/data-type/url/v/1", - "title": "Url", - "type": "string", - "format": "uri", - })) - .expect("failed to serialize uri type"); - - validate_data( - json!("localhost:3000"), - &url_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - validate_data( - json!("https://blockprotocol.org/types/modules/graph/0.3/schema/data-type"), - &url_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - _ = validate_data(json!("10"), &url_type, ValidateEntityComponents::full()) - .await - .expect_err("validation succeeded"); - } - - #[tokio::test] - async fn uuid() { - let uuid_type = serde_json::to_string(&json!({ - "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", - "kind": "dataType", - "$id": "https://localhost:4000/@alice/types/data-type/uuid/v/1", - "title": "UUID", - "type": "string", - "format": "uuid", - })) - .expect("failed to serialize uuid type"); - - validate_data( - json!(Uuid::nil()), - &uuid_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - validate_data( - json!("00000000-0000-0000-0000-000000000000"), - &uuid_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - validate_data( - json!("AC8E0011-84C3-4A7E-872D-1B9F86DB0479"), - &uuid_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - validate_data( - json!("urn:uuid:cc2c0477-2fe7-4eb4-af7b-45bfe7d7bb26"), - &uuid_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - validate_data( - json!("9544f491598e4c238f6bbb8c1f7d05c9"), - &uuid_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - _ = validate_data(json!("10"), &uuid_type, ValidateEntityComponents::full()) - .await - .expect_err("validation succeeded"); - } - - #[tokio::test] - async fn email() { - let mail_type = serde_json::to_string(&json!({ - "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", - "kind": "dataType", - "$id": "https://localhost:4000/@alice/types/data-type/email/v/1", - "title": "E-Mail", - "type": "string", - "format": "email", - })) - .expect("failed to serialize email type"); - - validate_data( - json!("bob@example.com"), - &mail_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - validate_data( - json!("user.name+tag+sorting@example.com"), - &mail_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - _ = validate_data( - json!("job!done"), - &mail_type, - ValidateEntityComponents::full(), - ) - .await - .expect_err("validation succeeded"); - } - - #[tokio::test] - async fn zip_code_us() { - let zip_code = serde_json::to_string(&json!({ - "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", - "kind": "dataType", - "$id": "https://localhost:4000/@alice/types/data-type/zip-code-us/v/1", - "title": "Zip code (US)", - "type": "string", - "pattern": "^[0-9]{5}(?:-[0-9]{4})?$", - })) - .expect("failed to serialize zip code type"); - - validate_data(json!("12345"), &zip_code, ValidateEntityComponents::full()) - .await - .expect("validation failed"); - - validate_data( - json!("12345-6789"), - &zip_code, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - _ = validate_data(json!("1234"), &zip_code, ValidateEntityComponents::full()) - .await - .expect_err("validation succeeded"); - } - - #[tokio::test] - async fn ipv4() { - let ipv4_type = serde_json::to_string(&json!({ - "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", - "kind": "dataType", - "$id": "https://localhost:4000/@alice/types/data-type/ipv4/v/1", - "title": "IPv4", - "type": "string", - "format": "ipv4", - })) - .expect("failed to serialize ipv4 type"); - - validate_data( - json!("127.0.0.1"), - &ipv4_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - validate_data( - json!("0.0.0.0"), - &ipv4_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - validate_data( - json!("255.255.255.255"), - &ipv4_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - _ = validate_data( - json!("255.255.255.256"), - &ipv4_type, - ValidateEntityComponents::full(), - ) - .await - .expect_err("validation succeeded"); - - _ = validate_data( - json!("localhost"), - &ipv4_type, - ValidateEntityComponents::full(), - ) - .await - .expect_err("validation succeeded"); - } - - #[tokio::test] - async fn ipv6() { - let ipv6_type = serde_json::to_string(&json!({ - "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", - "kind": "dataType", - "$id": "https://localhost:4000/@alice/types/data-type/ipv6/v/1", - "title": "IPv6", - "type": "string", - "format": "ipv6", - })) - .expect("failed to serialize ipv6 type"); - - validate_data(json!("::1"), &ipv6_type, ValidateEntityComponents::full()) - .await - .expect("validation failed"); - - validate_data(json!("::"), &ipv6_type, ValidateEntityComponents::full()) - .await - .expect("validation failed"); - - validate_data( - json!("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"), - &ipv6_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - _ = validate_data( - json!("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"), - &ipv6_type, - ValidateEntityComponents::full(), - ) - .await - .expect_err("validation succeeded"); - - _ = validate_data( - json!("localhost"), - &ipv6_type, - ValidateEntityComponents::full(), - ) - .await - .expect_err("validation succeeded"); - } - - #[tokio::test] - async fn hostname() { - let hostname_type = serde_json::to_string(&json!({ - "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", - "kind": "dataType", - "$id": "https://localhost:4000/@alice/types/data-type/hostname/v/1", - "title": "Hostname", - "type": "string", - "format": "hostname", - })) - .expect("failed to serialize hostname type"); - - validate_data( - json!("localhost"), - &hostname_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - validate_data( - json!("[::1]"), - &hostname_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - validate_data( - json!("127.0.0.1"), - &hostname_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - validate_data( - json!("example.com"), - &hostname_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - validate_data( - json!("subdomain.example.com"), - &hostname_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - validate_data( - json!("subdomain.example.com."), - &hostname_type, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - _ = validate_data( - json!("localhost:3000"), - &hostname_type, - ValidateEntityComponents::full(), - ) - .await - .expect_err("validation succeeded"); - - _ = validate_data( - json!("::1"), - &hostname_type, - ValidateEntityComponents::full(), - ) - .await - .expect_err("validation succeeded"); - } - - #[tokio::test] - async fn regex() { - let regex_type = serde_json::to_string(&json!({ - "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", - "kind": "dataType", - "$id": "https://localhost:4000/@alice/types/data-type/regex/v/1", - "title": "Regex", - "type": "string", - "format": "regex", - })) - .expect("failed to serialize regex type"); - - validate_data(json!("^a*$"), ®ex_type, ValidateEntityComponents::full()) - .await - .expect("validation failed"); - - validate_data(json!("^a+$"), ®ex_type, ValidateEntityComponents::full()) - .await - .expect("validation failed"); - - _ = validate_data(json!("("), ®ex_type, ValidateEntityComponents::full()) - .await - .expect_err("validation succeeded"); - } - - #[tokio::test] - async fn short_string() { - let url_type = serde_json::to_string(&json!({ - "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", - "kind": "dataType", - "$id": "https://localhost:4000/@alice/types/data-type/short-string/v/1", - "title": "Short string", - "type": "string", - "minLength": 1, - "maxLength": 10, - })) - .expect("failed to serialize short string type"); - - validate_data(json!("foo"), &url_type, ValidateEntityComponents::full()) - .await - .expect("validation failed"); - - _ = validate_data(json!(""), &url_type, ValidateEntityComponents::full()) - .await - .expect_err("validation succeeded"); - - _ = validate_data( - json!("foo bar baz"), - &url_type, - ValidateEntityComponents::full(), - ) - .await - .expect_err("validation succeeded"); - } - - #[tokio::test] - #[expect(clippy::too_many_lines, reason = "Most lines are just test data")] - async fn date_time() { - const VALID_FORMATS: &[&str] = &[ - "2023-12-22T17:48:15Z", // %Y-%M-%DT%h:%m:%sZ - "2023-12-22T17:48:15.0Z", // %Y-%M-%DT%h:%m:%.1sZ - "2023-12-22T17:48:15.08Z", // %Y-%M-%DT%h:%m:%.2sZ - "2023-12-22T17:48:15.083Z", // %Y-%M-%DT%h:%m:%.3sZ - "2023-12-22T17:48:15.083212Z", // %Y-%M-%DT%h:%m:%s.%uZ - "2023-12-22T18:48:15.083212+01:00", // %Y-%M-%DT%h:%m:%s.%u%Z:%z - "2023-12-22T18:48:15+01:00", // %Y-%M-%DT%h:%m:%s%Z:%z - "2023-12-22T18:48:15.083+01:00", // %Y-%M-%DT%h:%m:%.3s%Z:%z - "2023-12-23T02:33:15+08:45", // %Y-%M-%DT%h:%m:%s+08:45 - "2023-12-22T17:48:15+00:00", // %Y-%M-%DT%h:%m:%s+00:00 - "2023-12-22T18:48:15.0+01:00", // %Y-%M-%DT%h:%m:%.1s%Z:%z - "2023-12-22T18:48:15.08+01:00", // %Y-%M-%DT%h:%m:%.2s%Z:%z - "2023-12-22T17:48:15.083+00:00", // %Y-%M-%DT%h:%m:%.3s+00:00 - "2023-12-22T17:48:15-00:00", // %Y-%M-%DT%h:%m:%s-00:00 - "2023-12-22T17:48:15.083-00:00", // %Y-%M-%DT%h:%m:%.3s-00:00 - ]; - - const INVALID_FORMATS: &[&str] = &[ - "2023-12-22t17:48:15z", // %Y-%M-%Dt%h:%m:%sz - "2023-12-22t17:48:15.083z", // %Y-%M-%Dt%h:%m:%.3sz - "2023-12-22 18:48:15+01:00", // %Y-%M-%D %h:%m:%s%Z:%z - "2023-12-22 18:48:15.0+01:00", // %Y-%M-%D %h:%m:%.1s%Z:%z - "2023-12-22 18:48:15.08+01:00", // %Y-%M-%D %h:%m:%.2s%Z:%z - "2023-12-22 18:48:15.083+01:00", // %Y-%M-%D %h:%m:%.3s%Z:%z - "2023-12-22 18:48:15.083212+01:00", // %Y-%M-%D %h:%m:%s.%u%Z:%z - "2023-12-22 17:48:15Z", // %Y-%M-%D %h:%m:%sZ - "2023-12-22 17:48:15z", // %Y-%M-%D %h:%m:%sz - "2023-12-22 17:48:15.0Z", // %Y-%M-%D %h:%m:%.1sZ - "2023-12-22 17:48:15.08Z", // %Y-%M-%D %h:%m:%.2sZ - "2023-12-22 17:48:15.083Z", // %Y-%M-%D %h:%m:%.3sZ - "2023-12-22 17:48:15.083212Z", // %Y-%M-%D %h:%m:%s.%uZ - "2023-12-22 17:48:15.083z", // %Y-%M-%D %h:%m:%.3sz - "2023-12-22 17:48:15.083212z", // %Y-%M-%D %h:%m:%s.%uz - "2023-12-22 17:48:15-00:00", // %Y-%M-%D %h:%m:%s-00:00 - "2023-12-22 17:48:15.083-00:00", // %Y-%M-%D %h:%m:%.3s-00:00 - "2023-12-22_17:48:15Z", // %Y-%M-%D_%h:%m:%sZ - "2023-12-22_17:48:15z", // %Y-%M-%D_%h:%m:%sz - "2023-12-22_17:48:15.083Z", // %Y-%M-%D_%h:%m:%.3sZ - "2023-12-22_17:48:15.083212Z", // %Y-%M-%D_%h:%m:%s.%uZ - "2023-12-22_17:48:15.083z", // %Y-%M-%D_%h:%m:%.3sz - "2023-12-22_17:48:15.083212z", // %Y-%M-%D_%h:%m:%s.%uz - "2023-12-22T18", // %Y-%M-%DT%h - "2023-12-22T18,8", // %Y-%M-%DT%,1h - "2023-12-22T18.8", // %Y-%M-%DT%.1h - "2023-12-22T18:48", // %Y-%M-%DT%h:%m - "2023-12-22T18:48,2", // %Y-%M-%DT%h:%,1m - "2023-12-22T18:48.2", // %Y-%M-%DT%h:%.1m - "2023-12-22T18:48:15", // %Y-%M-%DT%h:%m:%s - "2023-12-22T18:48:15.0", // %Y-%M-%DT%h:%m:%.1s - "2023-12-22T18:48:15.08", // %Y-%M-%DT%h:%m:%.2s - "2023-12-22T18:48:15,083", // %Y-%M-%DT%h:%m:%,3s - "2023-12-22T18:48:15.083", // %Y-%M-%DT%h:%m:%.3s - "2023-12-22T18:48:15,083212", // %Y-%M-%DT%h:%m:%s,%u - "2023-12-22T18:48:15.083212", // %Y-%M-%DT%h:%m:%s.%u - "2023-12-22T17Z", // %Y-%M-%DT%hZ - "2023-12-22T17,8Z", // %Y-%M-%DT%,1hZ - "2023-12-22T17.8Z", // %Y-%M-%DT%.1hZ - "2023-12-22T17:48Z", // %Y-%M-%DT%h:%mZ - "2023-12-22T17:48,2Z", // %Y-%M-%DT%h:%,1mZ - "2023-12-22T17:48.2Z", // %Y-%M-%DT%h:%.1mZ - "2023-12-22T17:48:15,083Z", // %Y-%M-%DT%h:%m:%,3sZ - "2023-12-22T17:48:15,083212Z", // %Y-%M-%DT%h:%m:%s,%uZ - "2023-12-22T18+01", // %Y-%M-%DT%h%Z - "2023-12-22T18,8+01", // %Y-%M-%DT%,1h%Z - "2023-12-22T18.8+01", // %Y-%M-%DT%.1h%Z - "2023-12-22T18:48+01", // %Y-%M-%DT%h:%m%Z - "2023-12-22T18:48,2+01", // %Y-%M-%DT%h:%,1m%Z - "2023-12-22T18:48.2+01", // %Y-%M-%DT%h:%.1m%Z - "2023-12-22T18:48:15+01", // %Y-%M-%DT%h:%m:%s%Z - "2023-12-22T18:48:15.0+01", // %Y-%M-%DT%h:%m:%.1s%Z - "2023-12-22T18:48:15.08+01", // %Y-%M-%DT%h:%m:%.2s%Z - "2023-12-22T18:48:15,083+01", // %Y-%M-%DT%h:%m:%,3s%Z - "2023-12-22T18:48:15.083+01", // %Y-%M-%DT%h:%m:%.3s%Z - "2023-12-22T18:48:15,083212+01", // %Y-%M-%DT%h:%m:%s,%u%Z - "2023-12-22T18:48:15.083212+01", // %Y-%M-%DT%h:%m:%s.%u%Z - "2023-12-22T18+01:00", // %Y-%M-%DT%h%Z:%z - "2023-12-22T18,8+01:00", // %Y-%M-%DT%,1h%Z:%z - "2023-12-22T18.8+01:00", // %Y-%M-%DT%.1h%Z:%z - "2023-12-22T18:48+01:00", // %Y-%M-%DT%h:%m%Z:%z - "2023-12-22T18:48,2+01:00", // %Y-%M-%DT%h:%,1m%Z:%z - "2023-12-22T18:48.2+01:00", // %Y-%M-%DT%h:%.1m%Z:%z - "2023-12-22T18:48:15,083+01:00", // %Y-%M-%DT%h:%m:%,3s%Z:%z - "2023-12-22T18:48:15,083212+01:00", // %Y-%M-%DT%h:%m:%s,%u%Z:%z - "2023-W51-5T18", // %V-W%W-%wT%h - "2023-W51-5T18,8", // %V-W%W-%wT%,1h - "2023-W51-5T18.8", // %V-W%W-%wT%.1h - "2023-W51-5T18:48", // %V-W%W-%wT%h:%m - "2023-W51-5T18:48,2", // %V-W%W-%wT%h:%,1m - "2023-W51-5T18:48.2", // %V-W%W-%wT%h:%.1m - "2023-W51-5T18:48:15", // %V-W%W-%wT%h:%m:%s - "2023-W51-5T18:48:15.0", // %V-W%W-%wT%h:%m:%.1s - "2023-W51-5T18:48:15.08", // %V-W%W-%wT%h:%m:%.2s - "2023-W51-5T18:48:15,083", // %V-W%W-%wT%h:%m:%,3s - "2023-W51-5T18:48:15.083", // %V-W%W-%wT%h:%m:%.3s - "2023-W51-5T18:48:15,083212", // %V-W%W-%wT%h:%m:%s,%u - "2023-W51-5T18:48:15.083212", // %V-W%W-%wT%h:%m:%s.%u - "2023-W51-5T17Z", // %V-W%W-%wT%hZ - "2023-W51-5T17,8Z", // %V-W%W-%wT%,1hZ - "2023-W51-5T17.8Z", // %V-W%W-%wT%.1hZ - "2023-W51-5T17:48Z", // %V-W%W-%wT%h:%mZ - "2023-W51-5T17:48,2Z", // %V-W%W-%wT%h:%,1mZ - "2023-W51-5T17:48.2Z", // %V-W%W-%wT%h:%.1mZ - "2023-W51-5T17:48:15Z", // %V-W%W-%wT%h:%m:%sZ - "2023-W51-5T17:48:15.0Z", // %V-W%W-%wT%h:%m:%.1sZ - "2023-W51-5T17:48:15.08Z", // %V-W%W-%wT%h:%m:%.2sZ - "2023-W51-5T17:48:15,083Z", // %V-W%W-%wT%h:%m:%,3sZ - "2023-W51-5T17:48:15.083Z", // %V-W%W-%wT%h:%m:%.3sZ - "2023-W51-5T17:48:15,083212Z", // %V-W%W-%wT%h:%m:%s,%uZ - "2023-W51-5T17:48:15.083212Z", // %V-W%W-%wT%h:%m:%s.%uZ - "2023-W51-5T18+01", // %V-W%W-%wT%h%Z - "2023-W51-5T18,8+01", // %V-W%W-%wT%,1h%Z - "2023-W51-5T18.8+01", // %V-W%W-%wT%.1h%Z - "2023-W51-5T18:48+01", // %V-W%W-%wT%h:%m%Z - "2023-W51-5T18:48,2+01", // %V-W%W-%wT%h:%,1m%Z - "2023-W51-5T18:48.2+01", // %V-W%W-%wT%h:%.1m%Z - "2023-W51-5T18:48:15+01", // %V-W%W-%wT%h:%m:%s%Z - "2023-W51-5T18:48:15.0+01", // %V-W%W-%wT%h:%m:%.1s%Z - "2023-W51-5T18:48:15.08+01", // %V-W%W-%wT%h:%m:%.2s%Z - "2023-W51-5T18:48:15,083+01", // %V-W%W-%wT%h:%m:%,3s%Z - "2023-W51-5T18:48:15.083+01", // %V-W%W-%wT%h:%m:%.3s%Z - "2023-W51-5T18:48:15,083212+01", // %V-W%W-%wT%h:%m:%s,%u%Z - "2023-W51-5T18:48:15.083212+01", // %V-W%W-%wT%h:%m:%s.%u%Z - "2023-W51-5T18+01:00", // %V-W%W-%wT%h%Z:%z - "2023-W51-5T18,8+01:00", // %V-W%W-%wT%,1h%Z:%z - "2023-W51-5T18.8+01:00", // %V-W%W-%wT%.1h%Z:%z - "2023-W51-5T18:48+01:00", // %V-W%W-%wT%h:%m%Z:%z - "2023-W51-5T18:48,2+01:00", // %V-W%W-%wT%h:%,1m%Z:%z - "2023-W51-5T18:48.2+01:00", // %V-W%W-%wT%h:%.1m%Z:%z - "2023-W51-5T18:48:15+01:00", // %V-W%W-%wT%h:%m:%s%Z:%z - "2023-W51-5T18:48:15.0+01:00", // %V-W%W-%wT%h:%m:%.1s%Z:%z - "2023-W51-5T18:48:15.08+01:00", // %V-W%W-%wT%h:%m:%.2s%Z:%z - "2023-W51-5T18:48:15,083+01:00", // %V-W%W-%wT%h:%m:%,3s%Z:%z - "2023-W51-5T18:48:15.083+01:00", // %V-W%W-%wT%h:%m:%.3s%Z:%z - "2023-W51-5T18:48:15,083212+01:00", // %V-W%W-%wT%h:%m:%s,%u%Z:%z - "2023-W51-5T18:48:15.083212+01:00", // %V-W%W-%wT%h:%m:%s.%u%Z:%z - "2023-356T18", // %Y-%OT%h - "2023-356T18,8", // %Y-%OT%,1h - "2023-356T18.8", // %Y-%OT%.1h - "2023-356T18:48", // %Y-%OT%h:%m - "2023-356T18:48,2", // %Y-%OT%h:%,1m - "2023-356T18:48.2", // %Y-%OT%h:%.1m - "2023-356T18:48:15", // %Y-%OT%h:%m:%s - "2023-356T18:48:15.0", // %Y-%OT%h:%m:%.1s - "2023-356T18:48:15.08", // %Y-%OT%h:%m:%.2s - "2023-356T18:48:15,083", // %Y-%OT%h:%m:%,3s - "2023-356T18:48:15.083", // %Y-%OT%h:%m:%.3s - "2023-356T18:48:15,083212", // %Y-%OT%h:%m:%s,%u - "2023-356T18:48:15.083212", // %Y-%OT%h:%m:%s.%u - "2023-356T17Z", // %Y-%OT%hZ - "2023-356T17,8Z", // %Y-%OT%,1hZ - "2023-356T17.8Z", // %Y-%OT%.1hZ - "2023-356T17:48Z", // %Y-%OT%h:%mZ - "2023-356T17:48,2Z", // %Y-%OT%h:%,1mZ - "2023-356T17:48.2Z", // %Y-%OT%h:%.1mZ - "2023-356T17:48:15Z", // %Y-%OT%h:%m:%sZ - "2023-356T17:48:15.0Z", // %Y-%OT%h:%m:%.1sZ - "2023-356T17:48:15.08Z", // %Y-%OT%h:%m:%.2sZ - "2023-356T17:48:15,083Z", // %Y-%OT%h:%m:%,3sZ - "2023-356T17:48:15.083Z", // %Y-%OT%h:%m:%.3sZ - "2023-356T17:48:15,083212Z", // %Y-%OT%h:%m:%s,%uZ - "2023-356T17:48:15.083212Z", // %Y-%OT%h:%m:%s.%uZ - "2023-356T18+01", // %Y-%OT%h%Z - "2023-356T18,8+01", // %Y-%OT%,1h%Z - "2023-356T18.8+01", // %Y-%OT%.1h%Z - "2023-356T18:48+01", // %Y-%OT%h:%m%Z - "2023-356T18:48,2+01", // %Y-%OT%h:%,1m%Z - "2023-356T18:48.2+01", // %Y-%OT%h:%.1m%Z - "2023-356T18:48:15+01", // %Y-%OT%h:%m:%s%Z - "2023-356T18:48:15.0+01", // %Y-%OT%h:%m:%.1s%Z - "2023-356T18:48:15.08+01", // %Y-%OT%h:%m:%.2s%Z - "2023-356T18:48:15,083+01", // %Y-%OT%h:%m:%,3s%Z - "2023-356T18:48:15.083+01", // %Y-%OT%h:%m:%.3s%Z - "2023-356T18:48:15,083212+01", // %Y-%OT%h:%m:%s,%u%Z - "2023-356T18:48:15.083212+01", // %Y-%OT%h:%m:%s.%u%Z - "2023-356T18+01:00", // %Y-%OT%h%Z:%z - "2023-356T18,8+01:00", // %Y-%OT%,1h%Z:%z - "2023-356T18.8+01:00", // %Y-%OT%.1h%Z:%z - "2023-356T18:48+01:00", // %Y-%OT%h:%m%Z:%z - "2023-356T18:48,2+01:00", // %Y-%OT%h:%,1m%Z:%z - "2023-356T18:48.2+01:00", // %Y-%OT%h:%.1m%Z:%z - "2023-356T18:48:15+01:00", // %Y-%OT%h:%m:%s%Z:%z - "2023-356T18:48:15.0+01:00", // %Y-%OT%h:%m:%.1s%Z:%z - "2023-356T18:48:15.08+01:00", // %Y-%OT%h:%m:%.2s%Z:%z - "2023-356T18:48:15,083+01:00", // %Y-%OT%h:%m:%,3s%Z:%z - "2023-356T18:48:15.083+01:00", // %Y-%OT%h:%m:%.3s%Z:%z - "2023-356T18:48:15,083212+01:00", // %Y-%OT%h:%m:%s,%u%Z:%z - "2023-356T18:48:15.083212+01:00", // %Y-%OT%h:%m:%s.%u%Z:%z - "20231222T18", // %Y%M%DT%h - "20231222T18,8", // %Y%M%DT%,1h - "20231222T18.8", // %Y%M%DT%.1h - "20231222T1848", // %Y%M%DT%h%m - "20231222T1848,2", // %Y%M%DT%h%,1m - "20231222T1848.2", // %Y%M%DT%h%.1m - "20231222T184815", // %Y%M%DT%h%m%s - "20231222T184815.0", // %Y%M%DT%h%m%.1s - "20231222T184815.08", // %Y%M%DT%h%m%.2s - "20231222T184815,083", // %Y%M%DT%h%m%,3s - "20231222T184815.083", // %Y%M%DT%h%m%.3s - "20231222T184815,083212", // %Y%M%DT%h%m%s,%u - "20231222T184815.083212", // %Y%M%DT%h%m%s.%u - "20231222T17Z", // %Y%M%DT%hZ - "20231222T17,8Z", // %Y%M%DT%,1hZ - "20231222T17.8Z", // %Y%M%DT%.1hZ - "20231222T1748Z", // %Y%M%DT%h%mZ - "20231222T1748,2Z", // %Y%M%DT%h%,1mZ - "20231222T1748.2Z", // %Y%M%DT%h%.1mZ - "20231222T174815Z", // %Y%M%DT%h%m%sZ - "20231222T174815.0Z", // %Y%M%DT%h%m%.1sZ - "20231222T174815.08Z", // %Y%M%DT%h%m%.2sZ - "20231222T174815,083Z", // %Y%M%DT%h%m%,3sZ - "20231222T174815.083Z", // %Y%M%DT%h%m%.3sZ - "20231222T174815,083212Z", // %Y%M%DT%h%m%s,%uZ - "20231222T174815.083212Z", // %Y%M%DT%h%m%s.%uZ - "20231222T18+01", // %Y%M%DT%h%Z - "20231222T18,8+01", // %Y%M%DT%,1h%Z - "20231222T18.8+01", // %Y%M%DT%.1h%Z - "20231222T1848+01", // %Y%M%DT%h%m%Z - "20231222T1848,2+01", // %Y%M%DT%h%,1m%Z - "20231222T1848.2+01", // %Y%M%DT%h%.1m%Z - "20231222T184815+01", // %Y%M%DT%h%m%s%Z - "20231222T184815.0+01", // %Y%M%DT%h%m%.1s%Z - "20231222T184815.08+01", // %Y%M%DT%h%m%.2s%Z - "20231222T184815,083+01", // %Y%M%DT%h%m%,3s%Z - "20231222T184815.083+01", // %Y%M%DT%h%m%.3s%Z - "20231222T184815,083212+01", // %Y%M%DT%h%m%s,%u%Z - "20231222T184815.083212+01", // %Y%M%DT%h%m%s.%u%Z - "20231222T18+0100", // %Y%M%DT%h%Z%z - "20231222T18,8+0100", // %Y%M%DT%,1h%Z%z - "20231222T18.8+0100", // %Y%M%DT%.1h%Z%z - "20231222T1848+0100", // %Y%M%DT%h%m%Z%z - "20231222T1848,2+0100", // %Y%M%DT%h%,1m%Z%z - "20231222T1848.2+0100", // %Y%M%DT%h%.1m%Z%z - "20231222T184815+0100", // %Y%M%DT%h%m%s%Z%z - "20231222T184815.0+0100", // %Y%M%DT%h%m%.1s%Z%z - "20231222T184815.08+0100", // %Y%M%DT%h%m%.2s%Z%z - "20231222T184815,083+0100", // %Y%M%DT%h%m%,3s%Z%z - "20231222T184815.083+0100", // %Y%M%DT%h%m%.3s%Z%z - "20231222T184815,083212+0100", // %Y%M%DT%h%m%s,%u%Z%z - "20231222T184815.083212+0100", // %Y%M%DT%h%m%s.%u%Z%z - "2023W515T18", // %VW%W%wT%h - "2023W515T18,8", // %VW%W%wT%,1h - "2023W515T18.8", // %VW%W%wT%.1h - "2023W515T1848", // %VW%W%wT%h%m - "2023W515T1848,2", // %VW%W%wT%h%,1m - "2023W515T1848.2", // %VW%W%wT%h%.1m - "2023W515T184815", // %VW%W%wT%h%m%s - "2023W515T184815.0", // %VW%W%wT%h%m%.1s - "2023W515T184815.08", // %VW%W%wT%h%m%.2s - "2023W515T184815,083", // %VW%W%wT%h%m%,3s - "2023W515T184815.083", // %VW%W%wT%h%m%.3s - "2023W515T184815,083212", // %VW%W%wT%h%m%s,%u - "2023W515T184815.083212", // %VW%W%wT%h%m%s.%u - "2023W515T17Z", // %VW%W%wT%hZ - "2023W515T17,8Z", // %VW%W%wT%,1hZ - "2023W515T17.8Z", // %VW%W%wT%.1hZ - "2023W515T1748Z", // %VW%W%wT%h%mZ - "2023W515T1748,2Z", // %VW%W%wT%h%,1mZ - "2023W515T1748.2Z", // %VW%W%wT%h%.1mZ - "2023W515T174815Z", // %VW%W%wT%h%m%sZ - "2023W515T174815.0Z", // %VW%W%wT%h%m%.1sZ - "2023W515T174815.08Z", // %VW%W%wT%h%m%.2sZ - "2023W515T174815,083Z", // %VW%W%wT%h%m%,3sZ - "2023W515T174815.083Z", // %VW%W%wT%h%m%.3sZ - "2023W515T174815,083212Z", // %VW%W%wT%h%m%s,%uZ - "2023W515T174815.083212Z", // %VW%W%wT%h%m%s.%uZ - "2023W515T18+01", // %VW%W%wT%h%Z - "2023W515T18,8+01", // %VW%W%wT%,1h%Z - "2023W515T18.8+01", // %VW%W%wT%.1h%Z - "2023W515T1848+01", // %VW%W%wT%h%m%Z - "2023W515T1848,2+01", // %VW%W%wT%h%,1m%Z - "2023W515T1848.2+01", // %VW%W%wT%h%.1m%Z - "2023W515T184815+01", // %VW%W%wT%h%m%s%Z - "2023W515T184815.0+01", // %VW%W%wT%h%m%.1s%Z - "2023W515T184815.08+01", // %VW%W%wT%h%m%.2s%Z - "2023W515T184815,083+01", // %VW%W%wT%h%m%,3s%Z - "2023W515T184815.083+01", // %VW%W%wT%h%m%.3s%Z - "2023W515T184815,083212+01", // %VW%W%wT%h%m%s,%u%Z - "2023W515T184815.083212+01", // %VW%W%wT%h%m%s.%u%Z - "2023W515T18+0100", // %VW%W%wT%h%Z%z - "2023W515T18,8+0100", // %VW%W%wT%,1h%Z%z - "2023W515T18.8+0100", // %VW%W%wT%.1h%Z%z - "2023W515T1848+0100", // %VW%W%wT%h%m%Z%z - "2023W515T1848,2+0100", // %VW%W%wT%h%,1m%Z%z - "2023W515T1848.2+0100", // %VW%W%wT%h%.1m%Z%z - "2023W515T184815+0100", // %VW%W%wT%h%m%s%Z%z - "2023W515T184815.0+0100", // %VW%W%wT%h%m%.1s%Z%z - "2023W515T184815.08+0100", // %VW%W%wT%h%m%.2s%Z%z - "2023W515T184815,083+0100", // %VW%W%wT%h%m%,3s%Z%z - "2023W515T184815.083+0100", // %VW%W%wT%h%m%.3s%Z%z - "2023W515T184815,083212+0100", // %VW%W%wT%h%m%s,%u%Z%z - "2023W515T184815.083212+0100", // %VW%W%wT%h%m%s.%u%Z%z - "2023356T18", // %Y%OT%h - "2023356T18,8", // %Y%OT%,1h - "2023356T18.8", // %Y%OT%.1h - "2023356T1848", // %Y%OT%h%m - "2023356T1848,2", // %Y%OT%h%,1m - "2023356T1848.2", // %Y%OT%h%.1m - "2023356T184815", // %Y%OT%h%m%s - "2023356T184815.0", // %Y%OT%h%m%.1s - "2023356T184815.08", // %Y%OT%h%m%.2s - "2023356T184815,083", // %Y%OT%h%m%,3s - "2023356T184815.083", // %Y%OT%h%m%.3s - "2023356T184815,083212", // %Y%OT%h%m%s,%u - "2023356T184815.083212", // %Y%OT%h%m%s.%u - "2023356T17Z", // %Y%OT%hZ - "2023356T17,8Z", // %Y%OT%,1hZ - "2023356T17.8Z", // %Y%OT%.1hZ - "2023356T1748Z", // %Y%OT%h%mZ - "2023356T1748,2Z", // %Y%OT%h%,1mZ - "2023356T1748.2Z", // %Y%OT%h%.1mZ - "2023356T174815Z", // %Y%OT%h%m%sZ - "2023356T174815.0Z", // %Y%OT%h%m%.1sZ - "2023356T174815.08Z", // %Y%OT%h%m%.2sZ - "2023356T174815,083Z", // %Y%OT%h%m%,3sZ - "2023356T174815.083Z", // %Y%OT%h%m%.3sZ - "2023356T174815,083212Z", // %Y%OT%h%m%s,%uZ - "2023356T174815.083212Z", // %Y%OT%h%m%s.%uZ - "2023356T18+01", // %Y%OT%h%Z - "2023356T18,8+01", // %Y%OT%,1h%Z - "2023356T18.8+01", // %Y%OT%.1h%Z - "2023356T1848+01", // %Y%OT%h%m%Z - "2023356T1848,2+01", // %Y%OT%h%,1m%Z - "2023356T1848.2+01", // %Y%OT%h%.1m%Z - "2023356T184815+01", // %Y%OT%h%m%s%Z - "2023356T184815.0+01", // %Y%OT%h%m%.1s%Z - "2023356T184815.08+01", // %Y%OT%h%m%.2s%Z - "2023356T184815,083+01", // %Y%OT%h%m%,3s%Z - "2023356T184815.083+01", // %Y%OT%h%m%.3s%Z - "2023356T184815,083212+01", // %Y%OT%h%m%s,%u%Z - "2023356T184815.083212+01", // %Y%OT%h%m%s.%u%Z - "2023356T18+0100", // %Y%OT%h%Z%z - "2023356T18,8+0100", // %Y%OT%,1h%Z%z - "2023356T18.8+0100", // %Y%OT%.1h%Z%z - "2023356T1848+0100", // %Y%OT%h%m%Z%z - "2023356T1848,2+0100", // %Y%OT%h%,1m%Z%z - "2023356T1848.2+0100", // %Y%OT%h%.1m%Z%z - "2023356T184815+0100", // %Y%OT%h%m%s%Z%z - "2023356T184815.0+0100", // %Y%OT%h%m%.1s%Z%z - "2023356T184815.08+0100", // %Y%OT%h%m%.2s%Z%z - "2023356T184815,083+0100", // %Y%OT%h%m%,3s%Z%z - "2023356T184815.083+0100", // %Y%OT%h%m%.3s%Z%z - "2023356T184815,083212+0100", // %Y%OT%h%m%s,%u%Z%z - "2023356T184815.083212+0100", // %Y%OT%h%m%s.%u%Z%z - "2023-12-23T01:48:15+08", // %Y-%M-%DT%h:%m:%s+08 - "2023-12-22T05-12", // %Y-%M-%DT%h-12 - "2023-12-22T05-12:00", // %Y-%M-%DT%h-12:00 - "2023-12-22T05:48-12", // %Y-%M-%DT%h:%m-12 - "2023-12-22T05:48-12:00", // %Y-%M-%DT%h:%m-12:00 - "2023-12-22 18:48", // %Y-%M-%D %h:%m - "2023-12-22 18:48:15", // %Y-%M-%D %h:%m:%s - "2023-12-22 18:48:15.0", // %Y-%M-%D %h:%m:%.1s - "2023-12-22 18:48:15.08", // %Y-%M-%D %h:%m:%.2s - "2023-12-22 18:48:15.083", // %Y-%M-%D %h:%m:%.3s - "2023-12-22 17:48Z", // %Y-%M-%D %h:%mZ - "2023-12-22 18:48+01:00", // %Y-%M-%D %h:%m%Z:%z - "2023-12-22T18:48+0100", // %Y-%M-%DT%h:%m%Z%z - "2023-12-22T18:48:15+0100", // %Y-%M-%DT%h:%m:%s%Z%z - "2023-12-22T18:48:15.0+0100", // %Y-%M-%DT%h:%m:%.1s%Z%z - "2023-12-22T18:48:15.08+0100", // %Y-%M-%DT%h:%m:%.2s%Z%z - "2023-12-22T18:48:15.083+0100", // %Y-%M-%DT%h:%m:%.3s%Z%z - "2023-12-22 18:48+0100", // %Y-%M-%D %h:%m%Z%z - "2023-12-22 18:48:15+0100", // %Y-%M-%D %h:%m:%s%Z%z - "2023-12-22 18:48:15.0+0100", // %Y-%M-%D %h:%m:%.1s%Z%z - "2023-12-22 18:48:15.08+0100", // %Y-%M-%D %h:%m:%.2s%Z%z - "2023-12-22 18:48:15.083+0100", // %Y-%M-%D %h:%m:%.3s%Z%z - "2023-12-23T02:33:15+0845", // %Y-%M-%DT%h:%m:%s+0845 - "2023-12-22T17:48:15+0000", // %Y-%M-%DT%h:%m:%s+0000 - "2023-12-22T17:48:15.083+0000", // %Y-%M-%DT%h:%m:%.3s+0000 - ]; - - let url_type = serde_json::to_string(&json!({ - "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", - "kind": "dataType", - "$id": "https://localhost:4000/@alice/types/data-type/date-time/v/1", - "title": "Date Time", - "type": "string", - "format": "date-time", - })) - .expect("failed to serialize date time type"); - - let mut failed_formats = Vec::new(); - for format in VALID_FORMATS { - if validate_data(json!(format), &url_type, ValidateEntityComponents::full()) - .await - .is_err() - { - failed_formats.push(format); - } - } - assert!( - failed_formats.is_empty(), - "failed to validate formats: {failed_formats:#?}" - ); - - _ = validate_data(json!(""), &url_type, ValidateEntityComponents::full()) - .await - .expect_err("validation succeeded"); - - let mut passed_formats = Vec::new(); - for format in INVALID_FORMATS { - if validate_data(json!(format), &url_type, ValidateEntityComponents::full()) - .await - .is_ok() - { - passed_formats.push(format); - } - } - assert!( - passed_formats.is_empty(), - "passed invalid formats: {passed_formats:#?}" - ); - } - - #[tokio::test] - async fn date() { - const VALID_FORMATS: &[&str] = &[ - "2023-12-22", // %Y-%M-%D - ]; - - const INVALID_FORMATS: &[&str] = &[ - "20", // %C - "202", // %X - "2023", // %Y - "2023-12", // %Y-%M - "2023-356", // %Y-%O - "2023-W51", // %V-W%W - "2023-W51-5", // %V-W%W-%w - "20231222", // %Y%M%D - "2023356", // %Y%O - "2023W51", // %VW%W - "2023W515", // %VW%W%w - "--12-22", // --%M-%D - "12-22", // %M-%D - ]; - - let url_type = serde_json::to_string(&json!({ - "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", - "kind": "dataType", - "$id": "https://localhost:4000/@alice/types/data-type/date/v/1", - "title": "Date", - "type": "string", - "format": "date", - })) - .expect("failed to serialize date type"); - - let mut failed_formats = Vec::new(); - for format in VALID_FORMATS { - if validate_data(json!(format), &url_type, ValidateEntityComponents::full()) - .await - .is_err() - { - failed_formats.push(format); - } - } - assert!( - failed_formats.is_empty(), - "failed to validate formats: {failed_formats:#?}" - ); - - _ = validate_data(json!(""), &url_type, ValidateEntityComponents::full()) - .await - .expect_err("validation succeeded"); - - let mut passed_formats = Vec::new(); - for format in INVALID_FORMATS { - if validate_data(json!(format), &url_type, ValidateEntityComponents::full()) - .await - .is_ok() - { - passed_formats.push(format); - } - } - assert!( - passed_formats.is_empty(), - "passed invalid formats: {passed_formats:#?}" - ); - } - - #[tokio::test] - #[expect(clippy::too_many_lines, reason = "Most lines are just test data")] - async fn time() { - const VALID_FORMATS: &[&str] = &[ - "14:26:28+01:00", // %h:%m:%s%Z:%z - "14:26:28.9+01:00", // %h:%m:%.1s%Z:%z - "14:26:28.95+01:00", // %h:%m:%.2s%Z:%z - "14:26:28.950+01:00", // %h:%m:%.3s%Z:%z - "14:26:28.950086+01:00", // %h:%m:%s.%u%Z:%z - "13:26:28Z", // %h:%m:%sZ - "13:26:28.9Z", // %h:%m:%.1sZ - "13:26:28.95Z", // %h:%m:%.2sZ - "13:26:28.950Z", // %h:%m:%.3sZ - "13:26:28.950086Z", // %h:%m:%s.%uZ - "13:26:28+00:00", // %h:%m:%s+00:00 - "13:26:28.9+00:00", // %h:%m:%.1s+00:00 - "13:26:28.950+00:00", // %h:%m:%.3s+00:00 - "13:26:28.950086+00:00", // %h:%m:%s.%u+00:00 - "13:26:28-00:00", // %h:%m:%s-00:00 - "13:26:28.9-00:00", // %h:%m:%.1s-00:00 - "13:26:28.950-00:00", // %h:%m:%.3s-00:00 - "13:26:28.950086-00:00", // %h:%m:%s.%u-00:00 - ]; - - const INVALID_FORMATS: &[&str] = &[ - "14", // %h - "14,4", // %,1h - "14.4", // %.1h - "14:26", // %h:%m - "14:26,4", // %h:%,1m - "14:26.4", // %h:%.1m - "14:26:28", // %h:%m:%s - "14:26:28.9", // %h:%m:%.1s - "14:26:28.95", // %h:%m:%.2s - "14:26:28,950", // %h:%m:%,3s - "14:26:28.950", // %h:%m:%.3s - "14:26:28,950086", // %h:%m:%s,%u - "14:26:28.950086", // %h:%m:%s.%u - "13Z", // %hZ - "13,4Z", // %,1hZ - "13.4Z", // %.1hZ - "13:26Z", // %h:%mZ - "13:26,4Z", // %h:%,1mZ - "13:26.4Z", // %h:%.1mZ - "13:26:28,950Z", // %h:%m:%,3sZ - "13:26:28,950086Z", // %h:%m:%s,%uZ - "14+01", // %h%Z - "14,4+01", // %,1h%Z - "14.4+01", // %.1h%Z - "14:26+01", // %h:%m%Z - "14:26,4+01", // %h:%,1m%Z - "14:26.4+01", // %h:%.1m%Z - "14:26:28+01", // %h:%m:%s%Z - "14:26:28.9+01", // %h:%m:%.1s%Z - "14:26:28.95+01", // %h:%m:%.2s%Z - "14:26:28,950+01", // %h:%m:%,3s%Z - "14:26:28.950+01", // %h:%m:%.3s%Z - "14:26:28,950086+01", // %h:%m:%s,%u%Z - "14:26:28.950086+01", // %h:%m:%s.%u%Z - "14+01:00", // %h%Z:%z - "14,4+01:00", // %,1h%Z:%z - "14.4+01:00", // %.1h%Z:%z - "14:26+01:00", // %h:%m%Z:%z - "14:26,4+01:00", // %h:%,1m%Z:%z - "14:26.4+01:00", // %h:%.1m%Z:%z - "14:26:28,950+01:00", // %h:%m:%,3s%Z:%z - "14:26:28,950086+01:00", // %h:%m:%s,%u%Z:%z - "T14", // T%h - "T14,4", // T%,1h - "T14.4", // T%.1h - "T14:26", // T%h:%m - "T14:26,4", // T%h:%,1m - "T14:26.4", // T%h:%.1m - "T14:26:28", // T%h:%m:%s - "T14:26:28.9", // T%h:%m:%.1s - "T14:26:28.95", // T%h:%m:%.2s - "T14:26:28,950", // T%h:%m:%,3s - "T14:26:28.950", // T%h:%m:%.3s - "T14:26:28,950086", // T%h:%m:%s,%u - "T14:26:28.950086", // T%h:%m:%s.%u - "T13Z", // T%hZ - "T13,4Z", // T%,1hZ - "T13.4Z", // T%.1hZ - "T13:26Z", // T%h:%mZ - "T13:26,4Z", // T%h:%,1mZ - "T13:26.4Z", // T%h:%.1mZ - "T13:26:28Z", // T%h:%m:%sZ - "T13:26:28.9Z", // T%h:%m:%.1sZ - "T13:26:28.95Z", // T%h:%m:%.2sZ - "T13:26:28,950Z", // T%h:%m:%,3sZ - "T13:26:28.950Z", // T%h:%m:%.3sZ - "T13:26:28,950086Z", // T%h:%m:%s,%uZ - "T13:26:28.950086Z", // T%h:%m:%s.%uZ - "T14+01", // T%h%Z - "T14,4+01", // T%,1h%Z - "T14.4+01", // T%.1h%Z - "T14:26+01", // T%h:%m%Z - "T14:26,4+01", // T%h:%,1m%Z - "T14:26.4+01", // T%h:%.1m%Z - "T14:26:28+01", // T%h:%m:%s%Z - "T14:26:28.9+01", // T%h:%m:%.1s%Z - "T14:26:28.95+01", // T%h:%m:%.2s%Z - "T14:26:28,950+01", // T%h:%m:%,3s%Z - "T14:26:28.950+01", // T%h:%m:%.3s%Z - "T14:26:28,950086+01", // T%h:%m:%s,%u%Z - "T14:26:28.950086+01", // T%h:%m:%s.%u%Z - "T14+01:00", // T%h%Z:%z - "T14,4+01:00", // T%,1h%Z:%z - "T14.4+01:00", // T%.1h%Z:%z - "T14:26+01:00", // T%h:%m%Z:%z - "T14:26,4+01:00", // T%h:%,1m%Z:%z - "T14:26.4+01:00", // T%h:%.1m%Z:%z - "T14:26:28+01:00", // T%h:%m:%s%Z:%z - "T14:26:28.9+01:00", // T%h:%m:%.1s%Z:%z - "T14:26:28.95+01:00", // T%h:%m:%.2s%Z:%z - "T14:26:28,950+01:00", // T%h:%m:%,3s%Z:%z - "T14:26:28.950+01:00", // T%h:%m:%.3s%Z:%z - "T14:26:28,950086+01:00", // T%h:%m:%s,%u%Z:%z - "T14:26:28.950086+01:00", // T%h:%m:%s.%u%Z:%z - "1426", // %h%m - "1426,4", // %h%,1m - "1426.4", // %h%.1m - "142628", // %h%m%s - "142628.9", // %h%m%.1s - "142628.95", // %h%m%.2s - "142628,950", // %h%m%,3s - "142628.950", // %h%m%.3s - "142628,950086", // %h%m%s,%u - "142628.950086", // %h%m%s.%u - "1326Z", // %h%mZ - "1326,4Z", // %h%,1mZ - "1326.4Z", // %h%.1mZ - "132628Z", // %h%m%sZ - "132628.9Z", // %h%m%.1sZ - "132628.95Z", // %h%m%.2sZ - "132628,950Z", // %h%m%,3sZ - "132628.950Z", // %h%m%.3sZ - "132628,950086Z", // %h%m%s,%uZ - "132628.950086Z", // %h%m%s.%uZ - "1426+01", // %h%m%Z - "1426,4+01", // %h%,1m%Z - "1426.4+01", // %h%.1m%Z - "142628+01", // %h%m%s%Z - "142628.9+01", // %h%m%.1s%Z - "142628.95+01", // %h%m%.2s%Z - "142628,950+01", // %h%m%,3s%Z - "142628.950+01", // %h%m%.3s%Z - "142628,950086+01", // %h%m%s,%u%Z - "142628.950086+01", // %h%m%s.%u%Z - "14+0100", // %h%Z%z - "14,4+0100", // %,1h%Z%z - "14.4+0100", // %.1h%Z%z - "1426+0100", // %h%m%Z%z - "1426,4+0100", // %h%,1m%Z%z - "1426.4+0100", // %h%.1m%Z%z - "142628+0100", // %h%m%s%Z%z - "142628.9+0100", // %h%m%.1s%Z%z - "142628.95+0100", // %h%m%.2s%Z%z - "142628,950+0100", // %h%m%,3s%Z%z - "142628.950+0100", // %h%m%.3s%Z%z - "142628,950086+0100", // %h%m%s,%u%Z%z - "142628.950086+0100", // %h%m%s.%u%Z%z - "T1426", // T%h%m - "T1426,4", // T%h%,1m - "T1426.4", // T%h%.1m - "T142628", // T%h%m%s - "T142628.9", // T%h%m%.1s - "T142628.95", // T%h%m%.2s - "T142628,950", // T%h%m%,3s - "T142628.950", // T%h%m%.3s - "T142628,950086", // T%h%m%s,%u - "T142628.950086", // T%h%m%s.%u - "T1326Z", // T%h%mZ - "T1326,4Z", // T%h%,1mZ - "T1326.4Z", // T%h%.1mZ - "T132628Z", // T%h%m%sZ - "T132628.9Z", // T%h%m%.1sZ - "T132628.95Z", // T%h%m%.2sZ - "T132628,950Z", // T%h%m%,3sZ - "T132628.950Z", // T%h%m%.3sZ - "T132628,950086Z", // T%h%m%s,%uZ - "T132628.950086Z", // T%h%m%s.%uZ - "T1426+01", // T%h%m%Z - "T1426,4+01", // T%h%,1m%Z - "T1426.4+01", // T%h%.1m%Z - "T142628+01", // T%h%m%s%Z - "T142628.9+01", // T%h%m%.1s%Z - "T142628.95+01", // T%h%m%.2s%Z - "T142628,950+01", // T%h%m%,3s%Z - "T142628.950+01", // T%h%m%.3s%Z - "T142628,950086+01", // T%h%m%s,%u%Z - "T142628.950086+01", // T%h%m%s.%u%Z - "T14+0100", // T%h%Z%z - "T14,4+0100", // T%,1h%Z%z - "T14.4+0100", // T%.1h%Z%z - "T1426+0100", // T%h%m%Z%z - "T1426,4+0100", // T%h%,1m%Z%z - "T1426.4+0100", // T%h%.1m%Z%z - "T142628+0100", // T%h%m%s%Z%z - "T142628.9+0100", // T%h%m%.1s%Z%z - "T142628.95+0100", // T%h%m%.2s%Z%z - "T142628,950+0100", // T%h%m%,3s%Z%z - "T142628.950+0100", // T%h%m%.3s%Z%z - "T142628,950086+0100", // T%h%m%s,%u%Z%z - "T142628.950086+0100", // T%h%m%s.%u%Z%z - ]; - - let url_type = serde_json::to_string(&json!({ - "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", - "kind": "dataType", - "$id": "https://localhost:4000/@alice/types/data-type/time/v/1", - "title": "Time", - "type": "string", - "format": "time", - })) - .expect("failed to serialize time type"); - - let mut failed_formats = Vec::new(); - for format in VALID_FORMATS { - if validate_data(json!(format), &url_type, ValidateEntityComponents::full()) - .await - .is_err() - { - failed_formats.push(format); - } - } - assert!( - failed_formats.is_empty(), - "failed to validate formats: {failed_formats:#?}" - ); - - _ = validate_data(json!(""), &url_type, ValidateEntityComponents::full()) - .await - .expect_err("validation succeeded"); - - let mut passed_formats = Vec::new(); - for format in INVALID_FORMATS { - if validate_data(json!(format), &url_type, ValidateEntityComponents::full()) - .await - .is_ok() - { - passed_formats.push(format); - } - } - assert!( - passed_formats.is_empty(), - "passed invalid formats: {passed_formats:#?}" - ); - } - - #[tokio::test] - async fn duration() { - // TODO: Allow durations which are allowed in ISO8601 - const VALID_FORMATS: &[&str] = &[ - "P1Y", - // "P1,5Y", - "P1.5Y", - "P1M", - "P1W", - "P1D", - "PT1H", - // "P1H", - "PT1M", - "PT1S", - // "P1S", - // "PT1,5S", - "PT1.5S", - "P1Y1M", - "P1Y1D", - "P1Y1M1D", - "P1Y1M1DT1H1M1S", - "P1DT1H", - "P1MT1M", - "P1DT1M", - "P1.5W", - // "P1,5W", - "P1DT1.000S", - "P1DT1.00000S", - "P1DT1H1M1.1S", - // "P1H1M1.1S", - ]; - const INVALID_FORMATS: &[&str] = &[ - "1W1M1S", - "1S1M1H1W", - "1 W", - "1.5W", - "1 D 1 W", - "1.5 S 1.5 M", - "1H 15 M", - ]; - - let url_type = serde_json::to_string(&json!({ - "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", - "kind": "dataType", - "$id": "https://localhost:4000/@alice/types/data-type/duration/v/1", - "title": "Duration", - "type": "string", - "format": "duration", - })) - .expect("failed to serialize duration type"); - - let mut failed_formats = Vec::new(); - for format in VALID_FORMATS { - if validate_data(json!(format), &url_type, ValidateEntityComponents::full()) - .await - .is_err() - { - failed_formats.push(format); - } - } - assert!( - failed_formats.is_empty(), - "failed to validate formats: {failed_formats:#?}" - ); - - _ = validate_data(json!(""), &url_type, ValidateEntityComponents::full()) - .await - .expect_err("validation succeeded"); - - let mut passed_formats = Vec::new(); - for format in INVALID_FORMATS { - if validate_data(json!(format), &url_type, ValidateEntityComponents::full()) - .await - .is_ok() - { - passed_formats.push(format); - } - } - assert!( - passed_formats.is_empty(), - "passed invalid formats: {passed_formats:#?}" - ); - } -} diff --git a/libs/@local/hash-validation/src/entity_type.rs b/libs/@local/hash-validation/src/entity_type.rs index 0bf775b1870..59ed90990a6 100644 --- a/libs/@local/hash-validation/src/entity_type.rs +++ b/libs/@local/hash-validation/src/entity_type.rs @@ -6,19 +6,27 @@ use futures::{stream, StreamExt, TryStreamExt}; use graph_types::knowledge::{ entity::{Entity, EntityId}, link::LinkData, - PropertyPath, PropertyWithMetadataObject, + property::{ + visitor::{ + walk_array, walk_object, walk_one_of_property_value, EntityVisitor, TraversalError, + }, + PropertyPath, PropertyWithMetadataArray, PropertyWithMetadataObject, + PropertyWithMetadataValue, ValueMetadata, + }, }; +use serde_json::Value as JsonValue; use thiserror::Error; use type_system::{ - schema::{ClosedEntityType, DataType, ObjectSchema, PropertyType}, + schema::{ + ArraySchema, ClosedEntityType, DataType, DataTypeProvider, DataTypeReference, + EntityTypeProvider, OntologyTypeProvider, PropertyObjectSchema, PropertyType, + PropertyTypeProvider, PropertyTypeReference, PropertyValueSchema, PropertyValues, + ValueOrArray, + }, url::{BaseUrl, OntologyTypeVersion, VersionedUrl}, }; -use crate::{ - error::{Actual, Expected}, - DataTypeProvider, EntityProvider, EntityTypeProvider, OntologyTypeProvider, Schema, Validate, - ValidateEntityComponents, -}; +use crate::{EntityProvider, Schema, Validate, ValidateEntityComponents}; macro_rules! extend_report { ($status:ident, $error:expr $(,)?) => { @@ -52,49 +60,6 @@ pub enum EntityValidationError { InvalidPropertyPath { path: PropertyPath<'static> }, } -impl

Schema for ClosedEntityType -where - P: OntologyTypeProvider + DataTypeProvider + Sync, -{ - type Error = EntityValidationError; - - async fn validate_value<'a>( - &'a self, - value: &'a PropertyWithMetadataObject, - components: ValidateEntityComponents, - provider: &'a P, - ) -> Result<(), Report> { - // TODO: Distinguish between format validation and content validation so it's possible - // to directly use the correct type. - // see https://linear.app/hash/issue/BP-33 - ObjectSchema::<_> { - properties: self.properties.clone(), - required: self.required.clone(), - } - .validate_value(&value.value, components, provider) - .await - .change_context(EntityValidationError::InvalidProperties) - .attach_lazy(|| Expected::EntityType(Box::new(self.clone()))) - .attach_lazy(|| Actual::Properties(value.clone())) - } -} - -impl

Validate for PropertyWithMetadataObject -where - P: OntologyTypeProvider + DataTypeProvider + Sync, -{ - type Error = EntityValidationError; - - async fn validate( - &self, - schema: &ClosedEntityType, - components: ValidateEntityComponents, - context: &P, - ) -> Result<(), Report> { - schema.validate_value(self, components, context).await - } -} - impl

Validate for Option<&LinkData> where P: EntityProvider @@ -167,23 +132,6 @@ where extend_report!(status, EntityValidationError::EmptyEntityTypes); } - match PropertyWithMetadataObject::from_parts( - self.properties.clone(), - Some(self.metadata.properties.clone()), - ) { - Ok(properties) => { - if let Err(error) = properties.validate(schema, components, context).await { - extend_report!(status, error); - } - } - Err(error) => { - extend_report!( - status, - error.change_context(EntityValidationError::InvalidProperties) - ); - } - } - if let Err(error) = self .link_data .as_ref() @@ -318,6 +266,203 @@ where } } +pub struct EntityPreprocessor { + pub components: ValidateEntityComponents, +} + +impl EntityVisitor for EntityPreprocessor { + async fn visit_value

( + &mut self, + schema: &DataType, + value: &mut JsonValue, + metadata: &mut ValueMetadata, + type_provider: &P, + ) -> Result<(), Report> + where + P: DataTypeProvider + Sync, + { + let mut status: Result<(), Report> = Ok(()); + + if let Some(data_type_url) = &metadata.data_type_id { + if schema.id != *data_type_url { + let is_compatible = type_provider + .is_parent_of(data_type_url, &schema.id.base_url) + .await + .change_context_lazy(|| TraversalError::DataTypeRetrieval { + id: DataTypeReference { + url: schema.id.clone(), + }, + })?; + + if !is_compatible { + extend_report!( + status, + TraversalError::InvalidDataType { + actual: data_type_url.clone(), + expected: schema.id.clone(), + } + ); + } + + if let Err(err) = type_provider + .provide_type(data_type_url) + .await + .change_context_lazy(|| TraversalError::DataTypeRetrieval { + id: DataTypeReference { + url: schema.id.clone(), + }, + })? + .borrow() + .validate_constraints(value) + .change_context(TraversalError::ConstraintUnfulfilled) + { + extend_report!(status, err); + } + } + } else { + extend_report!(status, TraversalError::AmbiguousDataType); + } + + if let Err(err) = schema + .validate_constraints(value) + .change_context(TraversalError::ConstraintUnfulfilled) + { + extend_report!(status, err); + } + + status + } + + async fn visit_one_of_property

( + &mut self, + schema: &[PropertyValues], + property: &mut PropertyWithMetadataValue, + type_provider: &P, + ) -> Result<(), Report> + where + P: DataTypeProvider + Sync, + { + let mut status = Ok::<_, Report>(()); + + // We try to infer the data type ID + if property.metadata.data_type_id.is_none() { + let mut possible_data_types = HashSet::new(); + + for values in schema { + if let PropertyValues::DataTypeReference(data_type_ref) = values { + let has_children = type_provider + .has_children(&data_type_ref.url) + .await + .change_context_lazy(|| TraversalError::DataTypeRetrieval { + id: data_type_ref.clone(), + })?; + if has_children { + extend_report!(status, TraversalError::AmbiguousDataType); + possible_data_types.clear(); + break; + } + + let data_type = type_provider + .provide_type(&data_type_ref.url) + .await + .change_context_lazy(|| TraversalError::DataTypeRetrieval { + id: data_type_ref.clone(), + })?; + + if !data_type.borrow().all_of.is_empty() { + extend_report!(status, TraversalError::AmbiguousDataType); + possible_data_types.clear(); + break; + } + + possible_data_types.insert(data_type_ref.url.clone()); + } + } + + // Only if there is really a single valid data type ID, we set it. Note, that this is + // done before the actual validation step. + if possible_data_types.len() == 1 { + property.metadata.data_type_id = possible_data_types.into_iter().next(); + } + } + + if let Err(error) = walk_one_of_property_value(self, schema, property, type_provider).await + { + extend_report!(status, error); + } + + status + } + + async fn visit_array( + &mut self, + schema: &ArraySchema, + array: &mut PropertyWithMetadataArray, + type_provider: &P, + ) -> Result<(), Report> + where + T: PropertyValueSchema + Sync, + P: DataTypeProvider + PropertyTypeProvider + Sync, + { + let mut status = walk_array(self, schema, array, type_provider).await; + if self.components.num_items { + if let Some(min) = schema.min_items { + if array.value.len() < min { + extend_report!( + status, + TraversalError::TooFewItems { + actual: array.value.len(), + min, + }, + ); + } + } + + if let Some(max) = schema.max_items { + if array.value.len() > max { + extend_report!( + status, + TraversalError::TooManyItems { + actual: array.value.len(), + max, + }, + ); + } + } + } + + status + } + + async fn visit_object( + &mut self, + schema: &T, + object: &mut PropertyWithMetadataObject, + type_provider: &P, + ) -> Result<(), Report> + where + T: PropertyObjectSchema> + Sync, + P: DataTypeProvider + PropertyTypeProvider + Sync, + { + let mut status = walk_object(self, schema, object, type_provider).await; + + if self.components.required_properties { + for required_property in schema.required() { + if !object.value.contains_key(required_property) { + extend_report!( + status, + TraversalError::MissingRequiredProperty { + key: required_property.clone(), + } + ); + } + } + } + + status + } +} + #[cfg(test)] mod tests { use crate::{tests::validate_entity, ValidateEntityComponents}; diff --git a/libs/@local/hash-validation/src/error.rs b/libs/@local/hash-validation/src/error.rs index a7a2dc6bcc8..8dbf9a916f7 100644 --- a/libs/@local/hash-validation/src/error.rs +++ b/libs/@local/hash-validation/src/error.rs @@ -1,7 +1,7 @@ use std::collections::HashSet; use error_stack::Report; -use graph_types::knowledge::{PropertyWithMetadata, PropertyWithMetadataObject}; +use graph_types::knowledge::property::{PropertyWithMetadata, PropertyWithMetadataObject}; use serde_json::Value as JsonValue; use type_system::{ schema::{ClosedEntityType, DataType, PropertyType}, diff --git a/libs/@local/hash-validation/src/lib.rs b/libs/@local/hash-validation/src/lib.rs index 07688564827..effbf542ae4 100644 --- a/libs/@local/hash-validation/src/lib.rs +++ b/libs/@local/hash-validation/src/lib.rs @@ -2,31 +2,20 @@ pub mod error; -pub use self::{ - data_type::{DataTypeConstraint, DataValidationError}, - entity_type::EntityValidationError, - property_type::PropertyValidationError, -}; +pub use self::entity_type::{EntityPreprocessor, EntityValidationError}; -mod data_type; mod entity_type; mod property; -mod property_type; +mod test_data_type; +mod test_property_type; use core::borrow::Borrow; use error_stack::{Context, Report}; -use graph_types::{ - knowledge::entity::{Entity, EntityId}, - ontology::DataTypeId, -}; +use graph_types::knowledge::entity::{Entity, EntityId}; use serde::Deserialize; -use type_system::{ - schema::{ClosedEntityType, DataType}, - url::{BaseUrl, VersionedUrl}, -}; -trait Schema { +pub trait Schema { type Error: Context; fn validate_value<'a>( @@ -93,33 +82,6 @@ pub trait Validate { ) -> impl Future>> + Send; } -pub trait OntologyTypeProvider { - fn provide_type( - &self, - type_id: &VersionedUrl, - ) -> impl Future + Send, Report>> + Send; -} - -pub trait DataTypeProvider: OntologyTypeProvider { - fn is_parent_of( - &self, - child: &VersionedUrl, - parent: &BaseUrl, - ) -> impl Future>> + Send; - fn has_children( - &self, - data_type: DataTypeId, - ) -> impl Future>> + Send; -} - -pub trait EntityTypeProvider: OntologyTypeProvider { - fn is_parent_of( - &self, - child: &VersionedUrl, - parent: &BaseUrl, - ) -> impl Future>> + Send; -} - pub trait EntityProvider { fn provide_entity( &self, @@ -131,13 +93,20 @@ pub trait EntityProvider { mod tests { use std::collections::HashMap; - use graph_types::knowledge::{ - Property, PropertyObject, PropertyProvenance, PropertyWithMetadata, - PropertyWithMetadataObject, ValueMetadata, ValueWithMetadata, + use graph_types::knowledge::property::{ + visitor::{EntityVisitor, TraversalError}, + Property, PropertyMetadata, PropertyObject, PropertyProvenance, PropertyWithMetadata, + PropertyWithMetadataObject, PropertyWithMetadataValue, ValueMetadata, }; use serde_json::Value as JsonValue; use thiserror::Error; - use type_system::schema::{DataType, EntityType, PropertyType}; + use type_system::{ + schema::{ + ClosedEntityType, DataType, DataTypeProvider, EntityType, EntityTypeProvider, + OntologyTypeProvider, PropertyType, PropertyTypeProvider, + }, + url::{BaseUrl, VersionedUrl}, + }; use super::*; use crate::error::install_error_stack_hooks; @@ -253,6 +222,8 @@ mod tests { } } + impl PropertyTypeProvider for Provider {} + impl OntologyTypeProvider for Provider { #[expect(refining_impl_trait)] async fn provide_type( @@ -284,7 +255,7 @@ mod tests { #[expect(refining_impl_trait)] async fn has_children( &self, - _data_type: DataTypeId, + _data_type: &VersionedUrl, ) -> Result> { Ok(false) } @@ -298,7 +269,7 @@ mod tests { property_types: impl IntoIterator + Send, data_types: impl IntoIterator + Send, components: ValidateEntityComponents, - ) -> Result<(), Report> { + ) -> Result> { install_error_stack_hooks(); let provider = Provider::new( @@ -318,22 +289,27 @@ mod tests { serde_json::from_str::(entity_type).expect("failed to parse entity type"), ); - let properties = - serde_json::from_str::(entity).expect("failed to read entity string"); + let mut properties = PropertyWithMetadataObject::from_parts( + serde_json::from_str::(entity).expect("failed to read entity string"), + None, + ) + .expect("failed to create property with metadata"); - PropertyWithMetadataObject::from_parts(properties, None) - .expect("failed to create property with metadata") - .validate(&entity_type, components, &provider) - .await + EntityPreprocessor { components } + .visit_object(&entity_type, &mut properties, &provider) + .await?; + + Ok(properties) } pub(crate) async fn validate_property( property: JsonValue, + metadata: Option, property_type: &'static str, property_types: impl IntoIterator + Send, data_types: impl IntoIterator + Send, components: ValidateEntityComponents, - ) -> Result<(), Report> { + ) -> Result> { install_error_stack_hooks(); let property = Property::deserialize(property).expect("failed to deserialize property"); @@ -351,18 +327,19 @@ mod tests { let property_type: PropertyType = serde_json::from_str(property_type).expect("failed to parse property type"); - let property = PropertyWithMetadata::from_parts(property, None) + let mut property = PropertyWithMetadata::from_parts(property, metadata) .expect("failed to create property with metadata"); - property_type - .validate_value(&property, components, &provider) - .await + EntityPreprocessor { components } + .visit_property(&property_type, &mut property, &provider) + .await?; + Ok(property) } pub(crate) async fn validate_data( - value: JsonValue, + mut value: JsonValue, data_type: &str, components: ValidateEntityComponents, - ) -> Result<(), Report> { + ) -> Result> { install_error_stack_hooks(); let provider = Provider::new([], [], [], []); @@ -370,15 +347,15 @@ mod tests { let data_type: DataType = serde_json::from_str(data_type).expect("failed to parse data type"); - ValueWithMetadata { - value, - metadata: ValueMetadata { - data_type_id: None, - provenance: PropertyProvenance::default(), - confidence: None, - }, - } - .validate(&data_type, components, &provider) - .await + let mut metadata = ValueMetadata { + data_type_id: Some(data_type.id.clone()), + provenance: PropertyProvenance::default(), + confidence: None, + }; + + EntityPreprocessor { components } + .visit_value(&data_type, &mut value, &mut metadata, &provider) + .await?; + Ok(PropertyWithMetadataValue { value, metadata }) } } diff --git a/libs/@local/hash-validation/src/property.rs b/libs/@local/hash-validation/src/property.rs index b394f5f5184..aaf2f4fe496 100644 --- a/libs/@local/hash-validation/src/property.rs +++ b/libs/@local/hash-validation/src/property.rs @@ -1,5 +1,5 @@ use error_stack::Report; -use graph_types::knowledge::{PropertyMetadataObject, PropertyObject}; +use graph_types::knowledge::property::{PropertyMetadataObject, PropertyObject}; use crate::{EntityValidationError, Validate, ValidateEntityComponents}; diff --git a/libs/@local/hash-validation/src/property_type.rs b/libs/@local/hash-validation/src/property_type.rs deleted file mode 100644 index 7b9de5faba4..00000000000 --- a/libs/@local/hash-validation/src/property_type.rs +++ /dev/null @@ -1,764 +0,0 @@ -use core::borrow::Borrow; -use std::collections::HashMap; - -use error_stack::{bail, Report, ResultExt}; -use graph_types::knowledge::{PropertyWithMetadata, ValueMetadata, ValueWithMetadata}; -use serde_json::{json, Value as JsonValue}; -use thiserror::Error; -use type_system::{ - schema::{ - ArraySchema, JsonSchemaValueType, ObjectSchema, OneOfSchema, PropertyType, - PropertyTypeReference, PropertyValues, ValueOrArray, - }, - url::{BaseUrl, VersionedUrl}, -}; - -use crate::{ - error::{Actual, Expected}, - DataTypeProvider, DataValidationError, OntologyTypeProvider, Schema, ValidateEntityComponents, -}; - -macro_rules! extend_report { - ($status:ident, $error:expr $(,)?) => { - if let Err(ref mut report) = $status { - report.extend_one(error_stack::report!($error)) - } else { - $status = Err(error_stack::report!($error)) - } - }; -} - -#[derive(Debug, Error)] -pub enum PropertyValidationError { - #[error("the validator was unable to read the property type `{id}`")] - PropertyTypeRetrieval { id: VersionedUrl }, - #[error("data type validation failed for data type with id `{id}`")] - DataTypeValidation { id: VersionedUrl }, - #[error("the property `{key}` was specified, but not in the schema")] - UnexpectedProperty { key: BaseUrl }, - #[error("the value provided does not match the required schema for `{key}`")] - InvalidProperty { key: BaseUrl }, - #[error("the property key `{key}` is not a valid Base URL")] - InvalidPropertyKey { key: String }, - #[error("the property `{key}` was required, but not specified")] - MissingRequiredProperty { key: BaseUrl }, - #[error( - "the number of items in the array is too small, expected at least {min}, but found \ - {actual}" - )] - TooFewItems { actual: usize, min: usize }, - #[error( - "the number of items in the array is too large, expected at most {max}, but found {actual}" - )] - TooManyItems { actual: usize, max: usize }, - #[error( - "the value provided does not match the property type schema, expected `{expected}`, got \ - `{actual}`" - )] - InvalidType { - actual: JsonSchemaValueType, - expected: JsonSchemaValueType, - }, - #[error( - "a value of type `{expected}` was expected, but the property provided was of type \ - `{actual}`" - )] - ExpectedValue { - actual: JsonSchemaValueType, - expected: VersionedUrl, - }, - #[error("The property provided is ambiguous")] - AmbiguousProperty { actual: PropertyWithMetadata }, -} - -impl

Schema for PropertyType -where - P: OntologyTypeProvider + DataTypeProvider + Sync, -{ - type Error = PropertyValidationError; - - async fn validate_value<'a>( - &'a self, - value: &'a PropertyWithMetadata, - components: ValidateEntityComponents, - provider: &'a P, - ) -> Result<(), Report> { - // TODO: Distinguish between format validation and content validation so it's possible - // to directly use the correct type. - // see https://linear.app/hash/issue/BP-33 - OneOfSchema { - possibilities: self.one_of.clone(), - } - .validate_value(value, components, provider) - .await - .attach_lazy(|| Expected::PropertyType(self.clone())) - .attach_lazy(|| Actual::Property(value.clone())) - } -} - -impl

Schema for PropertyTypeReference -where - P: OntologyTypeProvider + DataTypeProvider + Sync, -{ - type Error = PropertyValidationError; - - async fn validate_value<'a>( - &'a self, - value: &'a PropertyWithMetadata, - components: ValidateEntityComponents, - provider: &'a P, - ) -> Result<(), Report> { - let property_type = OntologyTypeProvider::::provide_type(provider, &self.url) - .await - .change_context_lazy(|| PropertyValidationError::PropertyTypeRetrieval { - id: self.url.clone(), - })?; - property_type - .borrow() - .validate_value(value, components, provider) - .await - .attach_lazy(|| Expected::PropertyType(property_type.borrow().clone())) - .attach_lazy(|| Actual::Property(value.clone())) - } -} - -impl Schema<[V], P> for ArraySchema -where - V: Sync, - P: Sync, - S: Schema + Sync, -{ - type Error = PropertyValidationError; - - async fn validate_value<'a>( - &'a self, - value: &'a [V], - components: ValidateEntityComponents, - provider: &'a P, - ) -> Result<(), Report> { - let mut status: Result<(), Report> = Ok(()); - - if components.num_items { - if let Some(min) = self.min_items { - if value.len() < min { - extend_report!( - status, - PropertyValidationError::TooFewItems { - actual: value.len(), - min, - }, - ); - } - } - - if let Some(max) = self.max_items { - if value.len() > max { - extend_report!( - status, - PropertyValidationError::TooManyItems { - actual: value.len(), - max, - }, - ); - } - } - } - - for value in value { - if let Err(report) = self.items.validate_value(value, components, provider).await { - extend_report!(status, report); - } - } - - status - } -} - -impl

Schema for OneOfSchema -where - P: OntologyTypeProvider + DataTypeProvider + Sync, -{ - type Error = PropertyValidationError; - - async fn validate_value<'a>( - &'a self, - value: &'a PropertyWithMetadata, - components: ValidateEntityComponents, - provider: &'a P, - ) -> Result<(), Report> { - let mut status: Result<(), Report> = Ok(()); - - let mut passed = 0; - let mut candidates = 0; - for schema in &self.possibilities { - if let Err(error) = schema.validate_value(value, components, provider).await { - // If a data type is ambiguous because of a missing data type ID in the metadata we - // cannot validate it. We must not treat this as a failed validation, because the - // data type might be a child of the expected data type, which we cannot know - // without the data type ID. - // This is only interesting if there are two data types possible, one is a match - // and one is ambiguous. In this case we want to return an error, because the - // ambiguous data type might match expected data type. - if error.frames().any(|frame| { - matches!( - frame.downcast_ref::(), - Some(DataValidationError::AmbiguousDataType { .. }) - ) - }) { - candidates += 1; - } - extend_report!(status, error); - } else { - passed += 1; - } - } - - match (passed, candidates) { - // `OneOfSchema` requires at least one element, so if none passed, it's an error - (0, _) => status, - (1, 0) => Ok(()), - // `oneOf` requires exactly one element to pass, so if more than one passed, it's an - // error - // TODO: Remove this branch when changing to `anyOf` in the schema. - // see https://linear.app/hash/issue/BP-105/fix-type-system-to-use-anyof-instead-of-oneof - _ => { - extend_report!( - status, - PropertyValidationError::AmbiguousProperty { - actual: value.clone(), - } - ); - status - } - } - } -} - -impl Schema for ValueOrArray -where - P: Sync, - S: Schema + Sync, -{ - type Error = PropertyValidationError; - - async fn validate_value<'a>( - &'a self, - value: &'a PropertyWithMetadata, - components: ValidateEntityComponents, - provider: &'a P, - ) -> Result<(), Report> { - match (value, self) { - (value, Self::Value(schema)) => { - schema.validate_value(value, components, provider).await - } - (PropertyWithMetadata::Array { value, metadata: _ }, Self::Array(schema)) => { - schema.validate_value(value, components, provider).await - } - (_, Self::Array(_)) => { - bail!(PropertyValidationError::InvalidType { - actual: value.json_type(), - expected: JsonSchemaValueType::Array, - }) - } - } - } -} - -impl

Schema, P> - for ObjectSchema> -where - P: OntologyTypeProvider + DataTypeProvider + Sync, -{ - type Error = PropertyValidationError; - - async fn validate_value<'a>( - &'a self, - value: &'a HashMap, - components: ValidateEntityComponents, - provider: &'a P, - ) -> Result<(), Report> { - let mut status: Result<(), Report> = Ok(()); - - for (key, property) in value { - if let Some(object_schema) = self.properties.get(key) { - if let Err(report) = object_schema - .validate_value(property, components, provider) - .await - { - extend_report!( - status, - report.change_context(PropertyValidationError::InvalidProperty { - key: key.clone(), - }) - ); - } - } else { - extend_report!( - status, - PropertyValidationError::UnexpectedProperty { key: key.clone() } - ); - } - } - - if components.required_properties { - for required_property in &self.required { - if !value.contains_key(required_property) { - extend_report!( - status, - PropertyValidationError::MissingRequiredProperty { - key: required_property.clone(), - } - ); - } - } - } - - status - } -} - -impl

Schema for PropertyValues -where - P: OntologyTypeProvider + DataTypeProvider + Sync, -{ - type Error = PropertyValidationError; - - fn validate_value<'a>( - &'a self, - value: &'a PropertyWithMetadata, - components: ValidateEntityComponents, - provider: &'a P, - ) -> impl Future>> + Send + 'a { - Box::pin(async move { - match self { - Self::DataTypeReference(reference) => match value { - PropertyWithMetadata::Value(property) => reference - .validate_value(property, components, provider) - .await - .change_context(PropertyValidationError::DataTypeValidation { - id: reference.url.clone(), - }), - PropertyWithMetadata::Array { value, metadata } => reference - .validate_value( - &ValueWithMetadata { - value: JsonValue::Array( - value - .iter() - .map(|value| json!(value.clone().into_parts().0)) - .collect(), - ), - metadata: ValueMetadata { - provenance: metadata.provenance.clone(), - confidence: metadata.confidence, - data_type_id: None, - }, - }, - components, - provider, - ) - .await - .change_context(PropertyValidationError::DataTypeValidation { - id: reference.url.clone(), - }), - PropertyWithMetadata::Object { value, metadata } => reference - .validate_value( - &ValueWithMetadata { - value: JsonValue::Object( - value - .iter() - .map(|(key, value)| { - (key.to_string(), json!(value.clone().into_parts().0)) - }) - .collect(), - ), - metadata: ValueMetadata { - provenance: metadata.provenance.clone(), - confidence: metadata.confidence, - data_type_id: None, - }, - }, - components, - provider, - ) - .await - .change_context(PropertyValidationError::DataTypeValidation { - id: reference.url.clone(), - }), - }, - Self::ArrayOfPropertyValues(schema) => match value { - PropertyWithMetadata::Value { .. } => { - Err(Report::new(PropertyValidationError::InvalidType { - actual: value.json_type(), - expected: JsonSchemaValueType::Array, - })) - } - PropertyWithMetadata::Array { value, metadata: _ } => { - schema.validate_value(value, components, provider).await - } - PropertyWithMetadata::Object { .. } => { - Err(Report::new(PropertyValidationError::InvalidType { - actual: JsonSchemaValueType::Object, - expected: JsonSchemaValueType::Array, - })) - } - }, - Self::PropertyTypeObject(schema) => match value { - PropertyWithMetadata::Value { .. } => { - Err(Report::new(PropertyValidationError::InvalidType { - actual: value.json_type(), - expected: JsonSchemaValueType::Object, - })) - } - PropertyWithMetadata::Array { .. } => { - Err(Report::new(PropertyValidationError::InvalidType { - actual: JsonSchemaValueType::Array, - expected: JsonSchemaValueType::Object, - })) - } - PropertyWithMetadata::Object { value, metadata: _ } => { - schema.validate_value(value, components, provider).await - } - }, - } - }) - } -} - -#[cfg(test)] -mod tests { - - use serde_json::json; - - use crate::{tests::validate_property, ValidateEntityComponents}; - - #[tokio::test] - async fn address_line_1() { - let property_types = []; - let data_types = [graph_test_data::data_type::TEXT_V1]; - - validate_property( - json!("123 Fake Street"), - graph_test_data::property_type::ADDRESS_LINE_1_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn age() { - let property_types = []; - let data_types = [graph_test_data::data_type::NUMBER_V1]; - - validate_property( - json!(42), - graph_test_data::property_type::AGE_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn blurb() { - let property_types = []; - let data_types = [graph_test_data::data_type::TEXT_V1]; - - validate_property( - json!("blurb"), - graph_test_data::property_type::BLURB_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn city() { - let property_types = []; - let data_types = [graph_test_data::data_type::TEXT_V1]; - - validate_property( - json!("Bielefeld"), - graph_test_data::property_type::CITY_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn contact_information() { - let property_types = [ - graph_test_data::property_type::EMAIL_V1, - graph_test_data::property_type::PHONE_NUMBER_V1, - ]; - let data_types = [graph_test_data::data_type::TEXT_V1]; - - validate_property( - json! ({ - "https://blockprotocol.org/@alice/types/property-type/email/": "alice@example", - "https://blockprotocol.org/@alice/types/property-type/phone-number/": "+0123456789", - }), - graph_test_data::property_type::CONTACT_INFORMATION_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn contrived_information() { - let property_types = []; - let data_types = [graph_test_data::data_type::NUMBER_V1]; - - validate_property( - json!([12, 34, 56, 78]), - graph_test_data::property_type::CONTRIVED_PROPERTY_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - validate_property( - json!(12_34_56_78), - graph_test_data::property_type::CONTRIVED_PROPERTY_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - _ = validate_property( - json!([10, 20, 30, 40, 50]), - graph_test_data::property_type::CONTRIVED_PROPERTY_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect_err("validation succeeded"); - } - - #[tokio::test] - async fn email() { - let property_types = []; - let data_types = [graph_test_data::data_type::TEXT_V1]; - - validate_property( - json!("alice@example.com"), - graph_test_data::property_type::EMAIL_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn favorite_film() { - let property_types = []; - let data_types = [graph_test_data::data_type::TEXT_V1]; - - validate_property( - json!("Teletubbies"), - graph_test_data::property_type::FAVORITE_FILM_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn favorite_quote() { - let property_types = []; - let data_types = [graph_test_data::data_type::TEXT_V1]; - - validate_property( - json!("hold my beer"), - graph_test_data::property_type::FAVORITE_QUOTE_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn favorite_song() { - let property_types = []; - let data_types = [graph_test_data::data_type::TEXT_V1]; - - validate_property( - json!("Never gonna give you up"), - graph_test_data::property_type::FAVORITE_SONG_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn favorite_hobby() { - let property_types = []; - let data_types = [graph_test_data::data_type::TEXT_V1]; - - validate_property( - json!("Programming in Rust"), - graph_test_data::property_type::HOBBY_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn numbers() { - let property_types = []; - let data_types = [graph_test_data::data_type::NUMBER_V1]; - - validate_property( - json!([1, 2, 3, 4, 5]), - graph_test_data::property_type::NUMBERS_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn phone_number() { - let property_types = []; - let data_types = [graph_test_data::data_type::TEXT_V1]; - - validate_property( - json!("+0123456789"), - graph_test_data::property_type::PHONE_NUMBER_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn postcode() { - let property_types = []; - let data_types = [graph_test_data::data_type::TEXT_V1]; - - validate_property( - json!("12345"), - graph_test_data::property_type::POSTCODE_NUMBER_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn published_on() { - let property_types = []; - let data_types = [graph_test_data::data_type::TEXT_V1]; - - validate_property( - json!("2021-01-01T00:00:00Z"), - graph_test_data::property_type::PUBLISHED_ON_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn text() { - let property_types = []; - let data_types = [graph_test_data::data_type::TEXT_V1]; - - validate_property( - json!("lorem ipsum"), - graph_test_data::property_type::TEXT_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } - - #[tokio::test] - async fn user_id() { - let property_types = []; - let data_types = [ - graph_test_data::data_type::TEXT_V1, - graph_test_data::data_type::NUMBER_V1, - ]; - - validate_property( - json!("1"), - graph_test_data::property_type::USER_ID_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - _ = validate_property( - json!(1), - graph_test_data::property_type::USER_ID_V1, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect_err("validation succeeded"); - - validate_property( - json!("1"), - graph_test_data::property_type::USER_ID_V2, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - - validate_property( - json!(1), - graph_test_data::property_type::USER_ID_V2, - property_types, - data_types, - ValidateEntityComponents::full(), - ) - .await - .expect("validation failed"); - } -} diff --git a/libs/@local/hash-validation/src/test_data_type.rs b/libs/@local/hash-validation/src/test_data_type.rs new file mode 100644 index 00000000000..8af4f780b1d --- /dev/null +++ b/libs/@local/hash-validation/src/test_data_type.rs @@ -0,0 +1,1383 @@ +#![cfg(test)] + +use serde_json::json; +use uuid::Uuid; + +use crate::{tests::validate_data, ValidateEntityComponents}; + +#[tokio::test] +async fn null() { + validate_data( + json!(null), + graph_test_data::data_type::NULL_V1, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn boolean() { + validate_data( + json!(true), + graph_test_data::data_type::BOOLEAN_V1, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn number() { + validate_data( + json!(42), + graph_test_data::data_type::NUMBER_V1, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn integer() { + let integer_type = serde_json::to_string(&json!({ + "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", + "kind": "dataType", + "$id": "https://localhost:4000/@alice/types/data-type/integer/v/1", + "title": "Integer", + "type": "integer" + })) + .expect("failed to serialize temperature unit type"); + + validate_data(json!(10), &integer_type, ValidateEntityComponents::full()) + .await + .expect("validation failed"); + + validate_data(json!(-10), &integer_type, ValidateEntityComponents::full()) + .await + .expect("validation failed"); + + validate_data(json!(1.0), &integer_type, ValidateEntityComponents::full()) + .await + .expect("validation failed"); + + _ = validate_data( + json!(core::f64::consts::PI), + &integer_type, + ValidateEntityComponents::full(), + ) + .await + .expect_err("validation succeeded"); + + _ = validate_data( + json!("foo"), + &integer_type, + ValidateEntityComponents::full(), + ) + .await + .expect_err("validation succeeded"); +} + +#[tokio::test] +async fn string() { + validate_data( + json!("foo"), + graph_test_data::data_type::TEXT_V1, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn array() { + validate_data( + json!([]), + graph_test_data::data_type::EMPTY_LIST_V1, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + _ = validate_data( + json!(["foo", "bar"]), + graph_test_data::data_type::EMPTY_LIST_V1, + ValidateEntityComponents::full(), + ) + .await + .expect_err("validation succeeded"); +} + +#[tokio::test] +async fn object() { + validate_data( + json!({ + "foo": "bar", + "baz": "qux" + }), + graph_test_data::data_type::OBJECT_V1, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn temperature_unit() { + let meter_type = serde_json::to_string(&json!({ + "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", + "kind": "dataType", + "$id": "https://localhost:4000/@alice/types/data-type/temperature-unit/v/1", + "title": "Temperature Unit", + "type": "string", + "enum": ["Celsius", "Fahrenheit", "Kelvin"] + })) + .expect("failed to serialize temperature unit type"); + + validate_data( + json!("Celsius"), + &meter_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + validate_data( + json!("Fahrenheit"), + &meter_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + _ = validate_data(json!("foo"), &meter_type, ValidateEntityComponents::full()) + .await + .expect_err("validation succeeded"); +} + +#[tokio::test] +async fn meter() { + let meter_type = serde_json::to_string(&json!({ + "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", + "kind": "dataType", + "$id": "https://localhost:4000/@alice/types/data-type/meter/v/1", + "title": "Meter", + "type": "number", + "minimum": 0, + })) + .expect("failed to serialize meter type"); + + validate_data(json!(10), &meter_type, ValidateEntityComponents::full()) + .await + .expect("validation failed"); + + validate_data(json!(0.0), &meter_type, ValidateEntityComponents::full()) + .await + .expect("validation failed"); + + _ = validate_data(json!(-1.0), &meter_type, ValidateEntityComponents::full()) + .await + .expect_err("validation succeeded"); +} + +#[tokio::test] +async fn uri() { + let url_type = serde_json::to_string(&json!({ + "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", + "kind": "dataType", + "$id": "https://localhost:4000/@alice/types/data-type/url/v/1", + "title": "Url", + "type": "string", + "format": "uri", + })) + .expect("failed to serialize uri type"); + + validate_data( + json!("localhost:3000"), + &url_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + validate_data( + json!("https://blockprotocol.org/types/modules/graph/0.3/schema/data-type"), + &url_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + _ = validate_data(json!("10"), &url_type, ValidateEntityComponents::full()) + .await + .expect_err("validation succeeded"); +} + +#[tokio::test] +async fn uuid() { + let uuid_type = serde_json::to_string(&json!({ + "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", + "kind": "dataType", + "$id": "https://localhost:4000/@alice/types/data-type/uuid/v/1", + "title": "UUID", + "type": "string", + "format": "uuid", + })) + .expect("failed to serialize uuid type"); + + validate_data( + json!(Uuid::nil()), + &uuid_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + validate_data( + json!("00000000-0000-0000-0000-000000000000"), + &uuid_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + validate_data( + json!("AC8E0011-84C3-4A7E-872D-1B9F86DB0479"), + &uuid_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + validate_data( + json!("urn:uuid:cc2c0477-2fe7-4eb4-af7b-45bfe7d7bb26"), + &uuid_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + validate_data( + json!("9544f491598e4c238f6bbb8c1f7d05c9"), + &uuid_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + _ = validate_data(json!("10"), &uuid_type, ValidateEntityComponents::full()) + .await + .expect_err("validation succeeded"); +} + +#[tokio::test] +async fn email() { + let mail_type = serde_json::to_string(&json!({ + "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", + "kind": "dataType", + "$id": "https://localhost:4000/@alice/types/data-type/email/v/1", + "title": "E-Mail", + "type": "string", + "format": "email", + })) + .expect("failed to serialize email type"); + + validate_data( + json!("bob@example.com"), + &mail_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + validate_data( + json!("user.name+tag+sorting@example.com"), + &mail_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + _ = validate_data( + json!("job!done"), + &mail_type, + ValidateEntityComponents::full(), + ) + .await + .expect_err("validation succeeded"); +} + +#[tokio::test] +async fn zip_code_us() { + let zip_code = serde_json::to_string(&json!({ + "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", + "kind": "dataType", + "$id": "https://localhost:4000/@alice/types/data-type/zip-code-us/v/1", + "title": "Zip code (US)", + "type": "string", + "pattern": "^[0-9]{5}(?:-[0-9]{4})?$", + })) + .expect("failed to serialize zip code type"); + + validate_data(json!("12345"), &zip_code, ValidateEntityComponents::full()) + .await + .expect("validation failed"); + + validate_data( + json!("12345-6789"), + &zip_code, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + _ = validate_data(json!("1234"), &zip_code, ValidateEntityComponents::full()) + .await + .expect_err("validation succeeded"); +} + +#[tokio::test] +async fn ipv4() { + let ipv4_type = serde_json::to_string(&json!({ + "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", + "kind": "dataType", + "$id": "https://localhost:4000/@alice/types/data-type/ipv4/v/1", + "title": "IPv4", + "type": "string", + "format": "ipv4", + })) + .expect("failed to serialize ipv4 type"); + + validate_data( + json!("127.0.0.1"), + &ipv4_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + validate_data( + json!("0.0.0.0"), + &ipv4_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + validate_data( + json!("255.255.255.255"), + &ipv4_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + _ = validate_data( + json!("255.255.255.256"), + &ipv4_type, + ValidateEntityComponents::full(), + ) + .await + .expect_err("validation succeeded"); + + _ = validate_data( + json!("localhost"), + &ipv4_type, + ValidateEntityComponents::full(), + ) + .await + .expect_err("validation succeeded"); +} + +#[tokio::test] +async fn ipv6() { + let ipv6_type = serde_json::to_string(&json!({ + "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", + "kind": "dataType", + "$id": "https://localhost:4000/@alice/types/data-type/ipv6/v/1", + "title": "IPv6", + "type": "string", + "format": "ipv6", + })) + .expect("failed to serialize ipv6 type"); + + validate_data(json!("::1"), &ipv6_type, ValidateEntityComponents::full()) + .await + .expect("validation failed"); + + validate_data(json!("::"), &ipv6_type, ValidateEntityComponents::full()) + .await + .expect("validation failed"); + + validate_data( + json!("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"), + &ipv6_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + _ = validate_data( + json!("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff"), + &ipv6_type, + ValidateEntityComponents::full(), + ) + .await + .expect_err("validation succeeded"); + + _ = validate_data( + json!("localhost"), + &ipv6_type, + ValidateEntityComponents::full(), + ) + .await + .expect_err("validation succeeded"); +} + +#[tokio::test] +async fn hostname() { + let hostname_type = serde_json::to_string(&json!({ + "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", + "kind": "dataType", + "$id": "https://localhost:4000/@alice/types/data-type/hostname/v/1", + "title": "Hostname", + "type": "string", + "format": "hostname", + })) + .expect("failed to serialize hostname type"); + + validate_data( + json!("localhost"), + &hostname_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + validate_data( + json!("[::1]"), + &hostname_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + validate_data( + json!("127.0.0.1"), + &hostname_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + validate_data( + json!("example.com"), + &hostname_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + validate_data( + json!("subdomain.example.com"), + &hostname_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + validate_data( + json!("subdomain.example.com."), + &hostname_type, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + _ = validate_data( + json!("localhost:3000"), + &hostname_type, + ValidateEntityComponents::full(), + ) + .await + .expect_err("validation succeeded"); + + _ = validate_data( + json!("::1"), + &hostname_type, + ValidateEntityComponents::full(), + ) + .await + .expect_err("validation succeeded"); +} + +#[tokio::test] +async fn regex() { + let regex_type = serde_json::to_string(&json!({ + "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", + "kind": "dataType", + "$id": "https://localhost:4000/@alice/types/data-type/regex/v/1", + "title": "Regex", + "type": "string", + "format": "regex", + })) + .expect("failed to serialize regex type"); + + validate_data(json!("^a*$"), ®ex_type, ValidateEntityComponents::full()) + .await + .expect("validation failed"); + + validate_data(json!("^a+$"), ®ex_type, ValidateEntityComponents::full()) + .await + .expect("validation failed"); + + _ = validate_data(json!("("), ®ex_type, ValidateEntityComponents::full()) + .await + .expect_err("validation succeeded"); +} + +#[tokio::test] +async fn short_string() { + let url_type = serde_json::to_string(&json!({ + "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", + "kind": "dataType", + "$id": "https://localhost:4000/@alice/types/data-type/short-string/v/1", + "title": "Short string", + "type": "string", + "minLength": 1, + "maxLength": 10, + })) + .expect("failed to serialize short string type"); + + validate_data(json!("foo"), &url_type, ValidateEntityComponents::full()) + .await + .expect("validation failed"); + + _ = validate_data(json!(""), &url_type, ValidateEntityComponents::full()) + .await + .expect_err("validation succeeded"); + + _ = validate_data( + json!("foo bar baz"), + &url_type, + ValidateEntityComponents::full(), + ) + .await + .expect_err("validation succeeded"); +} + +#[tokio::test] +#[expect(clippy::too_many_lines, reason = "Most lines are just test data")] +async fn date_time() { + const VALID_FORMATS: &[&str] = &[ + "2023-12-22T17:48:15Z", // %Y-%M-%DT%h:%m:%sZ + "2023-12-22T17:48:15.0Z", // %Y-%M-%DT%h:%m:%.1sZ + "2023-12-22T17:48:15.08Z", // %Y-%M-%DT%h:%m:%.2sZ + "2023-12-22T17:48:15.083Z", // %Y-%M-%DT%h:%m:%.3sZ + "2023-12-22T17:48:15.083212Z", // %Y-%M-%DT%h:%m:%s.%uZ + "2023-12-22T18:48:15.083212+01:00", // %Y-%M-%DT%h:%m:%s.%u%Z:%z + "2023-12-22T18:48:15+01:00", // %Y-%M-%DT%h:%m:%s%Z:%z + "2023-12-22T18:48:15.083+01:00", // %Y-%M-%DT%h:%m:%.3s%Z:%z + "2023-12-23T02:33:15+08:45", // %Y-%M-%DT%h:%m:%s+08:45 + "2023-12-22T17:48:15+00:00", // %Y-%M-%DT%h:%m:%s+00:00 + "2023-12-22T18:48:15.0+01:00", // %Y-%M-%DT%h:%m:%.1s%Z:%z + "2023-12-22T18:48:15.08+01:00", // %Y-%M-%DT%h:%m:%.2s%Z:%z + "2023-12-22T17:48:15.083+00:00", // %Y-%M-%DT%h:%m:%.3s+00:00 + "2023-12-22T17:48:15-00:00", // %Y-%M-%DT%h:%m:%s-00:00 + "2023-12-22T17:48:15.083-00:00", // %Y-%M-%DT%h:%m:%.3s-00:00 + ]; + + const INVALID_FORMATS: &[&str] = &[ + "2023-12-22t17:48:15z", // %Y-%M-%Dt%h:%m:%sz + "2023-12-22t17:48:15.083z", // %Y-%M-%Dt%h:%m:%.3sz + "2023-12-22 18:48:15+01:00", // %Y-%M-%D %h:%m:%s%Z:%z + "2023-12-22 18:48:15.0+01:00", // %Y-%M-%D %h:%m:%.1s%Z:%z + "2023-12-22 18:48:15.08+01:00", // %Y-%M-%D %h:%m:%.2s%Z:%z + "2023-12-22 18:48:15.083+01:00", // %Y-%M-%D %h:%m:%.3s%Z:%z + "2023-12-22 18:48:15.083212+01:00", // %Y-%M-%D %h:%m:%s.%u%Z:%z + "2023-12-22 17:48:15Z", // %Y-%M-%D %h:%m:%sZ + "2023-12-22 17:48:15z", // %Y-%M-%D %h:%m:%sz + "2023-12-22 17:48:15.0Z", // %Y-%M-%D %h:%m:%.1sZ + "2023-12-22 17:48:15.08Z", // %Y-%M-%D %h:%m:%.2sZ + "2023-12-22 17:48:15.083Z", // %Y-%M-%D %h:%m:%.3sZ + "2023-12-22 17:48:15.083212Z", // %Y-%M-%D %h:%m:%s.%uZ + "2023-12-22 17:48:15.083z", // %Y-%M-%D %h:%m:%.3sz + "2023-12-22 17:48:15.083212z", // %Y-%M-%D %h:%m:%s.%uz + "2023-12-22 17:48:15-00:00", // %Y-%M-%D %h:%m:%s-00:00 + "2023-12-22 17:48:15.083-00:00", // %Y-%M-%D %h:%m:%.3s-00:00 + "2023-12-22_17:48:15Z", // %Y-%M-%D_%h:%m:%sZ + "2023-12-22_17:48:15z", // %Y-%M-%D_%h:%m:%sz + "2023-12-22_17:48:15.083Z", // %Y-%M-%D_%h:%m:%.3sZ + "2023-12-22_17:48:15.083212Z", // %Y-%M-%D_%h:%m:%s.%uZ + "2023-12-22_17:48:15.083z", // %Y-%M-%D_%h:%m:%.3sz + "2023-12-22_17:48:15.083212z", // %Y-%M-%D_%h:%m:%s.%uz + "2023-12-22T18", // %Y-%M-%DT%h + "2023-12-22T18,8", // %Y-%M-%DT%,1h + "2023-12-22T18.8", // %Y-%M-%DT%.1h + "2023-12-22T18:48", // %Y-%M-%DT%h:%m + "2023-12-22T18:48,2", // %Y-%M-%DT%h:%,1m + "2023-12-22T18:48.2", // %Y-%M-%DT%h:%.1m + "2023-12-22T18:48:15", // %Y-%M-%DT%h:%m:%s + "2023-12-22T18:48:15.0", // %Y-%M-%DT%h:%m:%.1s + "2023-12-22T18:48:15.08", // %Y-%M-%DT%h:%m:%.2s + "2023-12-22T18:48:15,083", // %Y-%M-%DT%h:%m:%,3s + "2023-12-22T18:48:15.083", // %Y-%M-%DT%h:%m:%.3s + "2023-12-22T18:48:15,083212", // %Y-%M-%DT%h:%m:%s,%u + "2023-12-22T18:48:15.083212", // %Y-%M-%DT%h:%m:%s.%u + "2023-12-22T17Z", // %Y-%M-%DT%hZ + "2023-12-22T17,8Z", // %Y-%M-%DT%,1hZ + "2023-12-22T17.8Z", // %Y-%M-%DT%.1hZ + "2023-12-22T17:48Z", // %Y-%M-%DT%h:%mZ + "2023-12-22T17:48,2Z", // %Y-%M-%DT%h:%,1mZ + "2023-12-22T17:48.2Z", // %Y-%M-%DT%h:%.1mZ + "2023-12-22T17:48:15,083Z", // %Y-%M-%DT%h:%m:%,3sZ + "2023-12-22T17:48:15,083212Z", // %Y-%M-%DT%h:%m:%s,%uZ + "2023-12-22T18+01", // %Y-%M-%DT%h%Z + "2023-12-22T18,8+01", // %Y-%M-%DT%,1h%Z + "2023-12-22T18.8+01", // %Y-%M-%DT%.1h%Z + "2023-12-22T18:48+01", // %Y-%M-%DT%h:%m%Z + "2023-12-22T18:48,2+01", // %Y-%M-%DT%h:%,1m%Z + "2023-12-22T18:48.2+01", // %Y-%M-%DT%h:%.1m%Z + "2023-12-22T18:48:15+01", // %Y-%M-%DT%h:%m:%s%Z + "2023-12-22T18:48:15.0+01", // %Y-%M-%DT%h:%m:%.1s%Z + "2023-12-22T18:48:15.08+01", // %Y-%M-%DT%h:%m:%.2s%Z + "2023-12-22T18:48:15,083+01", // %Y-%M-%DT%h:%m:%,3s%Z + "2023-12-22T18:48:15.083+01", // %Y-%M-%DT%h:%m:%.3s%Z + "2023-12-22T18:48:15,083212+01", // %Y-%M-%DT%h:%m:%s,%u%Z + "2023-12-22T18:48:15.083212+01", // %Y-%M-%DT%h:%m:%s.%u%Z + "2023-12-22T18+01:00", // %Y-%M-%DT%h%Z:%z + "2023-12-22T18,8+01:00", // %Y-%M-%DT%,1h%Z:%z + "2023-12-22T18.8+01:00", // %Y-%M-%DT%.1h%Z:%z + "2023-12-22T18:48+01:00", // %Y-%M-%DT%h:%m%Z:%z + "2023-12-22T18:48,2+01:00", // %Y-%M-%DT%h:%,1m%Z:%z + "2023-12-22T18:48.2+01:00", // %Y-%M-%DT%h:%.1m%Z:%z + "2023-12-22T18:48:15,083+01:00", // %Y-%M-%DT%h:%m:%,3s%Z:%z + "2023-12-22T18:48:15,083212+01:00", // %Y-%M-%DT%h:%m:%s,%u%Z:%z + "2023-W51-5T18", // %V-W%W-%wT%h + "2023-W51-5T18,8", // %V-W%W-%wT%,1h + "2023-W51-5T18.8", // %V-W%W-%wT%.1h + "2023-W51-5T18:48", // %V-W%W-%wT%h:%m + "2023-W51-5T18:48,2", // %V-W%W-%wT%h:%,1m + "2023-W51-5T18:48.2", // %V-W%W-%wT%h:%.1m + "2023-W51-5T18:48:15", // %V-W%W-%wT%h:%m:%s + "2023-W51-5T18:48:15.0", // %V-W%W-%wT%h:%m:%.1s + "2023-W51-5T18:48:15.08", // %V-W%W-%wT%h:%m:%.2s + "2023-W51-5T18:48:15,083", // %V-W%W-%wT%h:%m:%,3s + "2023-W51-5T18:48:15.083", // %V-W%W-%wT%h:%m:%.3s + "2023-W51-5T18:48:15,083212", // %V-W%W-%wT%h:%m:%s,%u + "2023-W51-5T18:48:15.083212", // %V-W%W-%wT%h:%m:%s.%u + "2023-W51-5T17Z", // %V-W%W-%wT%hZ + "2023-W51-5T17,8Z", // %V-W%W-%wT%,1hZ + "2023-W51-5T17.8Z", // %V-W%W-%wT%.1hZ + "2023-W51-5T17:48Z", // %V-W%W-%wT%h:%mZ + "2023-W51-5T17:48,2Z", // %V-W%W-%wT%h:%,1mZ + "2023-W51-5T17:48.2Z", // %V-W%W-%wT%h:%.1mZ + "2023-W51-5T17:48:15Z", // %V-W%W-%wT%h:%m:%sZ + "2023-W51-5T17:48:15.0Z", // %V-W%W-%wT%h:%m:%.1sZ + "2023-W51-5T17:48:15.08Z", // %V-W%W-%wT%h:%m:%.2sZ + "2023-W51-5T17:48:15,083Z", // %V-W%W-%wT%h:%m:%,3sZ + "2023-W51-5T17:48:15.083Z", // %V-W%W-%wT%h:%m:%.3sZ + "2023-W51-5T17:48:15,083212Z", // %V-W%W-%wT%h:%m:%s,%uZ + "2023-W51-5T17:48:15.083212Z", // %V-W%W-%wT%h:%m:%s.%uZ + "2023-W51-5T18+01", // %V-W%W-%wT%h%Z + "2023-W51-5T18,8+01", // %V-W%W-%wT%,1h%Z + "2023-W51-5T18.8+01", // %V-W%W-%wT%.1h%Z + "2023-W51-5T18:48+01", // %V-W%W-%wT%h:%m%Z + "2023-W51-5T18:48,2+01", // %V-W%W-%wT%h:%,1m%Z + "2023-W51-5T18:48.2+01", // %V-W%W-%wT%h:%.1m%Z + "2023-W51-5T18:48:15+01", // %V-W%W-%wT%h:%m:%s%Z + "2023-W51-5T18:48:15.0+01", // %V-W%W-%wT%h:%m:%.1s%Z + "2023-W51-5T18:48:15.08+01", // %V-W%W-%wT%h:%m:%.2s%Z + "2023-W51-5T18:48:15,083+01", // %V-W%W-%wT%h:%m:%,3s%Z + "2023-W51-5T18:48:15.083+01", // %V-W%W-%wT%h:%m:%.3s%Z + "2023-W51-5T18:48:15,083212+01", // %V-W%W-%wT%h:%m:%s,%u%Z + "2023-W51-5T18:48:15.083212+01", // %V-W%W-%wT%h:%m:%s.%u%Z + "2023-W51-5T18+01:00", // %V-W%W-%wT%h%Z:%z + "2023-W51-5T18,8+01:00", // %V-W%W-%wT%,1h%Z:%z + "2023-W51-5T18.8+01:00", // %V-W%W-%wT%.1h%Z:%z + "2023-W51-5T18:48+01:00", // %V-W%W-%wT%h:%m%Z:%z + "2023-W51-5T18:48,2+01:00", // %V-W%W-%wT%h:%,1m%Z:%z + "2023-W51-5T18:48.2+01:00", // %V-W%W-%wT%h:%.1m%Z:%z + "2023-W51-5T18:48:15+01:00", // %V-W%W-%wT%h:%m:%s%Z:%z + "2023-W51-5T18:48:15.0+01:00", // %V-W%W-%wT%h:%m:%.1s%Z:%z + "2023-W51-5T18:48:15.08+01:00", // %V-W%W-%wT%h:%m:%.2s%Z:%z + "2023-W51-5T18:48:15,083+01:00", // %V-W%W-%wT%h:%m:%,3s%Z:%z + "2023-W51-5T18:48:15.083+01:00", // %V-W%W-%wT%h:%m:%.3s%Z:%z + "2023-W51-5T18:48:15,083212+01:00", // %V-W%W-%wT%h:%m:%s,%u%Z:%z + "2023-W51-5T18:48:15.083212+01:00", // %V-W%W-%wT%h:%m:%s.%u%Z:%z + "2023-356T18", // %Y-%OT%h + "2023-356T18,8", // %Y-%OT%,1h + "2023-356T18.8", // %Y-%OT%.1h + "2023-356T18:48", // %Y-%OT%h:%m + "2023-356T18:48,2", // %Y-%OT%h:%,1m + "2023-356T18:48.2", // %Y-%OT%h:%.1m + "2023-356T18:48:15", // %Y-%OT%h:%m:%s + "2023-356T18:48:15.0", // %Y-%OT%h:%m:%.1s + "2023-356T18:48:15.08", // %Y-%OT%h:%m:%.2s + "2023-356T18:48:15,083", // %Y-%OT%h:%m:%,3s + "2023-356T18:48:15.083", // %Y-%OT%h:%m:%.3s + "2023-356T18:48:15,083212", // %Y-%OT%h:%m:%s,%u + "2023-356T18:48:15.083212", // %Y-%OT%h:%m:%s.%u + "2023-356T17Z", // %Y-%OT%hZ + "2023-356T17,8Z", // %Y-%OT%,1hZ + "2023-356T17.8Z", // %Y-%OT%.1hZ + "2023-356T17:48Z", // %Y-%OT%h:%mZ + "2023-356T17:48,2Z", // %Y-%OT%h:%,1mZ + "2023-356T17:48.2Z", // %Y-%OT%h:%.1mZ + "2023-356T17:48:15Z", // %Y-%OT%h:%m:%sZ + "2023-356T17:48:15.0Z", // %Y-%OT%h:%m:%.1sZ + "2023-356T17:48:15.08Z", // %Y-%OT%h:%m:%.2sZ + "2023-356T17:48:15,083Z", // %Y-%OT%h:%m:%,3sZ + "2023-356T17:48:15.083Z", // %Y-%OT%h:%m:%.3sZ + "2023-356T17:48:15,083212Z", // %Y-%OT%h:%m:%s,%uZ + "2023-356T17:48:15.083212Z", // %Y-%OT%h:%m:%s.%uZ + "2023-356T18+01", // %Y-%OT%h%Z + "2023-356T18,8+01", // %Y-%OT%,1h%Z + "2023-356T18.8+01", // %Y-%OT%.1h%Z + "2023-356T18:48+01", // %Y-%OT%h:%m%Z + "2023-356T18:48,2+01", // %Y-%OT%h:%,1m%Z + "2023-356T18:48.2+01", // %Y-%OT%h:%.1m%Z + "2023-356T18:48:15+01", // %Y-%OT%h:%m:%s%Z + "2023-356T18:48:15.0+01", // %Y-%OT%h:%m:%.1s%Z + "2023-356T18:48:15.08+01", // %Y-%OT%h:%m:%.2s%Z + "2023-356T18:48:15,083+01", // %Y-%OT%h:%m:%,3s%Z + "2023-356T18:48:15.083+01", // %Y-%OT%h:%m:%.3s%Z + "2023-356T18:48:15,083212+01", // %Y-%OT%h:%m:%s,%u%Z + "2023-356T18:48:15.083212+01", // %Y-%OT%h:%m:%s.%u%Z + "2023-356T18+01:00", // %Y-%OT%h%Z:%z + "2023-356T18,8+01:00", // %Y-%OT%,1h%Z:%z + "2023-356T18.8+01:00", // %Y-%OT%.1h%Z:%z + "2023-356T18:48+01:00", // %Y-%OT%h:%m%Z:%z + "2023-356T18:48,2+01:00", // %Y-%OT%h:%,1m%Z:%z + "2023-356T18:48.2+01:00", // %Y-%OT%h:%.1m%Z:%z + "2023-356T18:48:15+01:00", // %Y-%OT%h:%m:%s%Z:%z + "2023-356T18:48:15.0+01:00", // %Y-%OT%h:%m:%.1s%Z:%z + "2023-356T18:48:15.08+01:00", // %Y-%OT%h:%m:%.2s%Z:%z + "2023-356T18:48:15,083+01:00", // %Y-%OT%h:%m:%,3s%Z:%z + "2023-356T18:48:15.083+01:00", // %Y-%OT%h:%m:%.3s%Z:%z + "2023-356T18:48:15,083212+01:00", // %Y-%OT%h:%m:%s,%u%Z:%z + "2023-356T18:48:15.083212+01:00", // %Y-%OT%h:%m:%s.%u%Z:%z + "20231222T18", // %Y%M%DT%h + "20231222T18,8", // %Y%M%DT%,1h + "20231222T18.8", // %Y%M%DT%.1h + "20231222T1848", // %Y%M%DT%h%m + "20231222T1848,2", // %Y%M%DT%h%,1m + "20231222T1848.2", // %Y%M%DT%h%.1m + "20231222T184815", // %Y%M%DT%h%m%s + "20231222T184815.0", // %Y%M%DT%h%m%.1s + "20231222T184815.08", // %Y%M%DT%h%m%.2s + "20231222T184815,083", // %Y%M%DT%h%m%,3s + "20231222T184815.083", // %Y%M%DT%h%m%.3s + "20231222T184815,083212", // %Y%M%DT%h%m%s,%u + "20231222T184815.083212", // %Y%M%DT%h%m%s.%u + "20231222T17Z", // %Y%M%DT%hZ + "20231222T17,8Z", // %Y%M%DT%,1hZ + "20231222T17.8Z", // %Y%M%DT%.1hZ + "20231222T1748Z", // %Y%M%DT%h%mZ + "20231222T1748,2Z", // %Y%M%DT%h%,1mZ + "20231222T1748.2Z", // %Y%M%DT%h%.1mZ + "20231222T174815Z", // %Y%M%DT%h%m%sZ + "20231222T174815.0Z", // %Y%M%DT%h%m%.1sZ + "20231222T174815.08Z", // %Y%M%DT%h%m%.2sZ + "20231222T174815,083Z", // %Y%M%DT%h%m%,3sZ + "20231222T174815.083Z", // %Y%M%DT%h%m%.3sZ + "20231222T174815,083212Z", // %Y%M%DT%h%m%s,%uZ + "20231222T174815.083212Z", // %Y%M%DT%h%m%s.%uZ + "20231222T18+01", // %Y%M%DT%h%Z + "20231222T18,8+01", // %Y%M%DT%,1h%Z + "20231222T18.8+01", // %Y%M%DT%.1h%Z + "20231222T1848+01", // %Y%M%DT%h%m%Z + "20231222T1848,2+01", // %Y%M%DT%h%,1m%Z + "20231222T1848.2+01", // %Y%M%DT%h%.1m%Z + "20231222T184815+01", // %Y%M%DT%h%m%s%Z + "20231222T184815.0+01", // %Y%M%DT%h%m%.1s%Z + "20231222T184815.08+01", // %Y%M%DT%h%m%.2s%Z + "20231222T184815,083+01", // %Y%M%DT%h%m%,3s%Z + "20231222T184815.083+01", // %Y%M%DT%h%m%.3s%Z + "20231222T184815,083212+01", // %Y%M%DT%h%m%s,%u%Z + "20231222T184815.083212+01", // %Y%M%DT%h%m%s.%u%Z + "20231222T18+0100", // %Y%M%DT%h%Z%z + "20231222T18,8+0100", // %Y%M%DT%,1h%Z%z + "20231222T18.8+0100", // %Y%M%DT%.1h%Z%z + "20231222T1848+0100", // %Y%M%DT%h%m%Z%z + "20231222T1848,2+0100", // %Y%M%DT%h%,1m%Z%z + "20231222T1848.2+0100", // %Y%M%DT%h%.1m%Z%z + "20231222T184815+0100", // %Y%M%DT%h%m%s%Z%z + "20231222T184815.0+0100", // %Y%M%DT%h%m%.1s%Z%z + "20231222T184815.08+0100", // %Y%M%DT%h%m%.2s%Z%z + "20231222T184815,083+0100", // %Y%M%DT%h%m%,3s%Z%z + "20231222T184815.083+0100", // %Y%M%DT%h%m%.3s%Z%z + "20231222T184815,083212+0100", // %Y%M%DT%h%m%s,%u%Z%z + "20231222T184815.083212+0100", // %Y%M%DT%h%m%s.%u%Z%z + "2023W515T18", // %VW%W%wT%h + "2023W515T18,8", // %VW%W%wT%,1h + "2023W515T18.8", // %VW%W%wT%.1h + "2023W515T1848", // %VW%W%wT%h%m + "2023W515T1848,2", // %VW%W%wT%h%,1m + "2023W515T1848.2", // %VW%W%wT%h%.1m + "2023W515T184815", // %VW%W%wT%h%m%s + "2023W515T184815.0", // %VW%W%wT%h%m%.1s + "2023W515T184815.08", // %VW%W%wT%h%m%.2s + "2023W515T184815,083", // %VW%W%wT%h%m%,3s + "2023W515T184815.083", // %VW%W%wT%h%m%.3s + "2023W515T184815,083212", // %VW%W%wT%h%m%s,%u + "2023W515T184815.083212", // %VW%W%wT%h%m%s.%u + "2023W515T17Z", // %VW%W%wT%hZ + "2023W515T17,8Z", // %VW%W%wT%,1hZ + "2023W515T17.8Z", // %VW%W%wT%.1hZ + "2023W515T1748Z", // %VW%W%wT%h%mZ + "2023W515T1748,2Z", // %VW%W%wT%h%,1mZ + "2023W515T1748.2Z", // %VW%W%wT%h%.1mZ + "2023W515T174815Z", // %VW%W%wT%h%m%sZ + "2023W515T174815.0Z", // %VW%W%wT%h%m%.1sZ + "2023W515T174815.08Z", // %VW%W%wT%h%m%.2sZ + "2023W515T174815,083Z", // %VW%W%wT%h%m%,3sZ + "2023W515T174815.083Z", // %VW%W%wT%h%m%.3sZ + "2023W515T174815,083212Z", // %VW%W%wT%h%m%s,%uZ + "2023W515T174815.083212Z", // %VW%W%wT%h%m%s.%uZ + "2023W515T18+01", // %VW%W%wT%h%Z + "2023W515T18,8+01", // %VW%W%wT%,1h%Z + "2023W515T18.8+01", // %VW%W%wT%.1h%Z + "2023W515T1848+01", // %VW%W%wT%h%m%Z + "2023W515T1848,2+01", // %VW%W%wT%h%,1m%Z + "2023W515T1848.2+01", // %VW%W%wT%h%.1m%Z + "2023W515T184815+01", // %VW%W%wT%h%m%s%Z + "2023W515T184815.0+01", // %VW%W%wT%h%m%.1s%Z + "2023W515T184815.08+01", // %VW%W%wT%h%m%.2s%Z + "2023W515T184815,083+01", // %VW%W%wT%h%m%,3s%Z + "2023W515T184815.083+01", // %VW%W%wT%h%m%.3s%Z + "2023W515T184815,083212+01", // %VW%W%wT%h%m%s,%u%Z + "2023W515T184815.083212+01", // %VW%W%wT%h%m%s.%u%Z + "2023W515T18+0100", // %VW%W%wT%h%Z%z + "2023W515T18,8+0100", // %VW%W%wT%,1h%Z%z + "2023W515T18.8+0100", // %VW%W%wT%.1h%Z%z + "2023W515T1848+0100", // %VW%W%wT%h%m%Z%z + "2023W515T1848,2+0100", // %VW%W%wT%h%,1m%Z%z + "2023W515T1848.2+0100", // %VW%W%wT%h%.1m%Z%z + "2023W515T184815+0100", // %VW%W%wT%h%m%s%Z%z + "2023W515T184815.0+0100", // %VW%W%wT%h%m%.1s%Z%z + "2023W515T184815.08+0100", // %VW%W%wT%h%m%.2s%Z%z + "2023W515T184815,083+0100", // %VW%W%wT%h%m%,3s%Z%z + "2023W515T184815.083+0100", // %VW%W%wT%h%m%.3s%Z%z + "2023W515T184815,083212+0100", // %VW%W%wT%h%m%s,%u%Z%z + "2023W515T184815.083212+0100", // %VW%W%wT%h%m%s.%u%Z%z + "2023356T18", // %Y%OT%h + "2023356T18,8", // %Y%OT%,1h + "2023356T18.8", // %Y%OT%.1h + "2023356T1848", // %Y%OT%h%m + "2023356T1848,2", // %Y%OT%h%,1m + "2023356T1848.2", // %Y%OT%h%.1m + "2023356T184815", // %Y%OT%h%m%s + "2023356T184815.0", // %Y%OT%h%m%.1s + "2023356T184815.08", // %Y%OT%h%m%.2s + "2023356T184815,083", // %Y%OT%h%m%,3s + "2023356T184815.083", // %Y%OT%h%m%.3s + "2023356T184815,083212", // %Y%OT%h%m%s,%u + "2023356T184815.083212", // %Y%OT%h%m%s.%u + "2023356T17Z", // %Y%OT%hZ + "2023356T17,8Z", // %Y%OT%,1hZ + "2023356T17.8Z", // %Y%OT%.1hZ + "2023356T1748Z", // %Y%OT%h%mZ + "2023356T1748,2Z", // %Y%OT%h%,1mZ + "2023356T1748.2Z", // %Y%OT%h%.1mZ + "2023356T174815Z", // %Y%OT%h%m%sZ + "2023356T174815.0Z", // %Y%OT%h%m%.1sZ + "2023356T174815.08Z", // %Y%OT%h%m%.2sZ + "2023356T174815,083Z", // %Y%OT%h%m%,3sZ + "2023356T174815.083Z", // %Y%OT%h%m%.3sZ + "2023356T174815,083212Z", // %Y%OT%h%m%s,%uZ + "2023356T174815.083212Z", // %Y%OT%h%m%s.%uZ + "2023356T18+01", // %Y%OT%h%Z + "2023356T18,8+01", // %Y%OT%,1h%Z + "2023356T18.8+01", // %Y%OT%.1h%Z + "2023356T1848+01", // %Y%OT%h%m%Z + "2023356T1848,2+01", // %Y%OT%h%,1m%Z + "2023356T1848.2+01", // %Y%OT%h%.1m%Z + "2023356T184815+01", // %Y%OT%h%m%s%Z + "2023356T184815.0+01", // %Y%OT%h%m%.1s%Z + "2023356T184815.08+01", // %Y%OT%h%m%.2s%Z + "2023356T184815,083+01", // %Y%OT%h%m%,3s%Z + "2023356T184815.083+01", // %Y%OT%h%m%.3s%Z + "2023356T184815,083212+01", // %Y%OT%h%m%s,%u%Z + "2023356T184815.083212+01", // %Y%OT%h%m%s.%u%Z + "2023356T18+0100", // %Y%OT%h%Z%z + "2023356T18,8+0100", // %Y%OT%,1h%Z%z + "2023356T18.8+0100", // %Y%OT%.1h%Z%z + "2023356T1848+0100", // %Y%OT%h%m%Z%z + "2023356T1848,2+0100", // %Y%OT%h%,1m%Z%z + "2023356T1848.2+0100", // %Y%OT%h%.1m%Z%z + "2023356T184815+0100", // %Y%OT%h%m%s%Z%z + "2023356T184815.0+0100", // %Y%OT%h%m%.1s%Z%z + "2023356T184815.08+0100", // %Y%OT%h%m%.2s%Z%z + "2023356T184815,083+0100", // %Y%OT%h%m%,3s%Z%z + "2023356T184815.083+0100", // %Y%OT%h%m%.3s%Z%z + "2023356T184815,083212+0100", // %Y%OT%h%m%s,%u%Z%z + "2023356T184815.083212+0100", // %Y%OT%h%m%s.%u%Z%z + "2023-12-23T01:48:15+08", // %Y-%M-%DT%h:%m:%s+08 + "2023-12-22T05-12", // %Y-%M-%DT%h-12 + "2023-12-22T05-12:00", // %Y-%M-%DT%h-12:00 + "2023-12-22T05:48-12", // %Y-%M-%DT%h:%m-12 + "2023-12-22T05:48-12:00", // %Y-%M-%DT%h:%m-12:00 + "2023-12-22 18:48", // %Y-%M-%D %h:%m + "2023-12-22 18:48:15", // %Y-%M-%D %h:%m:%s + "2023-12-22 18:48:15.0", // %Y-%M-%D %h:%m:%.1s + "2023-12-22 18:48:15.08", // %Y-%M-%D %h:%m:%.2s + "2023-12-22 18:48:15.083", // %Y-%M-%D %h:%m:%.3s + "2023-12-22 17:48Z", // %Y-%M-%D %h:%mZ + "2023-12-22 18:48+01:00", // %Y-%M-%D %h:%m%Z:%z + "2023-12-22T18:48+0100", // %Y-%M-%DT%h:%m%Z%z + "2023-12-22T18:48:15+0100", // %Y-%M-%DT%h:%m:%s%Z%z + "2023-12-22T18:48:15.0+0100", // %Y-%M-%DT%h:%m:%.1s%Z%z + "2023-12-22T18:48:15.08+0100", // %Y-%M-%DT%h:%m:%.2s%Z%z + "2023-12-22T18:48:15.083+0100", // %Y-%M-%DT%h:%m:%.3s%Z%z + "2023-12-22 18:48+0100", // %Y-%M-%D %h:%m%Z%z + "2023-12-22 18:48:15+0100", // %Y-%M-%D %h:%m:%s%Z%z + "2023-12-22 18:48:15.0+0100", // %Y-%M-%D %h:%m:%.1s%Z%z + "2023-12-22 18:48:15.08+0100", // %Y-%M-%D %h:%m:%.2s%Z%z + "2023-12-22 18:48:15.083+0100", // %Y-%M-%D %h:%m:%.3s%Z%z + "2023-12-23T02:33:15+0845", // %Y-%M-%DT%h:%m:%s+0845 + "2023-12-22T17:48:15+0000", // %Y-%M-%DT%h:%m:%s+0000 + "2023-12-22T17:48:15.083+0000", // %Y-%M-%DT%h:%m:%.3s+0000 + ]; + + let url_type = serde_json::to_string(&json!({ + "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", + "kind": "dataType", + "$id": "https://localhost:4000/@alice/types/data-type/date-time/v/1", + "title": "Date Time", + "type": "string", + "format": "date-time", + })) + .expect("failed to serialize date time type"); + + let mut failed_formats = Vec::new(); + for format in VALID_FORMATS { + if validate_data(json!(format), &url_type, ValidateEntityComponents::full()) + .await + .is_err() + { + failed_formats.push(format); + } + } + assert!( + failed_formats.is_empty(), + "failed to validate formats: {failed_formats:#?}" + ); + + _ = validate_data(json!(""), &url_type, ValidateEntityComponents::full()) + .await + .expect_err("validation succeeded"); + + let mut passed_formats = Vec::new(); + for format in INVALID_FORMATS { + if validate_data(json!(format), &url_type, ValidateEntityComponents::full()) + .await + .is_ok() + { + passed_formats.push(format); + } + } + assert!( + passed_formats.is_empty(), + "passed invalid formats: {passed_formats:#?}" + ); +} + +#[tokio::test] +async fn date() { + const VALID_FORMATS: &[&str] = &[ + "2023-12-22", // %Y-%M-%D + ]; + + const INVALID_FORMATS: &[&str] = &[ + "20", // %C + "202", // %X + "2023", // %Y + "2023-12", // %Y-%M + "2023-356", // %Y-%O + "2023-W51", // %V-W%W + "2023-W51-5", // %V-W%W-%w + "20231222", // %Y%M%D + "2023356", // %Y%O + "2023W51", // %VW%W + "2023W515", // %VW%W%w + "--12-22", // --%M-%D + "12-22", // %M-%D + ]; + + let url_type = serde_json::to_string(&json!({ + "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", + "kind": "dataType", + "$id": "https://localhost:4000/@alice/types/data-type/date/v/1", + "title": "Date", + "type": "string", + "format": "date", + })) + .expect("failed to serialize date type"); + + let mut failed_formats = Vec::new(); + for format in VALID_FORMATS { + if validate_data(json!(format), &url_type, ValidateEntityComponents::full()) + .await + .is_err() + { + failed_formats.push(format); + } + } + assert!( + failed_formats.is_empty(), + "failed to validate formats: {failed_formats:#?}" + ); + + _ = validate_data(json!(""), &url_type, ValidateEntityComponents::full()) + .await + .expect_err("validation succeeded"); + + let mut passed_formats = Vec::new(); + for format in INVALID_FORMATS { + if validate_data(json!(format), &url_type, ValidateEntityComponents::full()) + .await + .is_ok() + { + passed_formats.push(format); + } + } + assert!( + passed_formats.is_empty(), + "passed invalid formats: {passed_formats:#?}" + ); +} + +#[tokio::test] +#[expect(clippy::too_many_lines, reason = "Most lines are just test data")] +async fn time() { + const VALID_FORMATS: &[&str] = &[ + "14:26:28+01:00", // %h:%m:%s%Z:%z + "14:26:28.9+01:00", // %h:%m:%.1s%Z:%z + "14:26:28.95+01:00", // %h:%m:%.2s%Z:%z + "14:26:28.950+01:00", // %h:%m:%.3s%Z:%z + "14:26:28.950086+01:00", // %h:%m:%s.%u%Z:%z + "13:26:28Z", // %h:%m:%sZ + "13:26:28.9Z", // %h:%m:%.1sZ + "13:26:28.95Z", // %h:%m:%.2sZ + "13:26:28.950Z", // %h:%m:%.3sZ + "13:26:28.950086Z", // %h:%m:%s.%uZ + "13:26:28+00:00", // %h:%m:%s+00:00 + "13:26:28.9+00:00", // %h:%m:%.1s+00:00 + "13:26:28.950+00:00", // %h:%m:%.3s+00:00 + "13:26:28.950086+00:00", // %h:%m:%s.%u+00:00 + "13:26:28-00:00", // %h:%m:%s-00:00 + "13:26:28.9-00:00", // %h:%m:%.1s-00:00 + "13:26:28.950-00:00", // %h:%m:%.3s-00:00 + "13:26:28.950086-00:00", // %h:%m:%s.%u-00:00 + ]; + + const INVALID_FORMATS: &[&str] = &[ + "14", // %h + "14,4", // %,1h + "14.4", // %.1h + "14:26", // %h:%m + "14:26,4", // %h:%,1m + "14:26.4", // %h:%.1m + "14:26:28", // %h:%m:%s + "14:26:28.9", // %h:%m:%.1s + "14:26:28.95", // %h:%m:%.2s + "14:26:28,950", // %h:%m:%,3s + "14:26:28.950", // %h:%m:%.3s + "14:26:28,950086", // %h:%m:%s,%u + "14:26:28.950086", // %h:%m:%s.%u + "13Z", // %hZ + "13,4Z", // %,1hZ + "13.4Z", // %.1hZ + "13:26Z", // %h:%mZ + "13:26,4Z", // %h:%,1mZ + "13:26.4Z", // %h:%.1mZ + "13:26:28,950Z", // %h:%m:%,3sZ + "13:26:28,950086Z", // %h:%m:%s,%uZ + "14+01", // %h%Z + "14,4+01", // %,1h%Z + "14.4+01", // %.1h%Z + "14:26+01", // %h:%m%Z + "14:26,4+01", // %h:%,1m%Z + "14:26.4+01", // %h:%.1m%Z + "14:26:28+01", // %h:%m:%s%Z + "14:26:28.9+01", // %h:%m:%.1s%Z + "14:26:28.95+01", // %h:%m:%.2s%Z + "14:26:28,950+01", // %h:%m:%,3s%Z + "14:26:28.950+01", // %h:%m:%.3s%Z + "14:26:28,950086+01", // %h:%m:%s,%u%Z + "14:26:28.950086+01", // %h:%m:%s.%u%Z + "14+01:00", // %h%Z:%z + "14,4+01:00", // %,1h%Z:%z + "14.4+01:00", // %.1h%Z:%z + "14:26+01:00", // %h:%m%Z:%z + "14:26,4+01:00", // %h:%,1m%Z:%z + "14:26.4+01:00", // %h:%.1m%Z:%z + "14:26:28,950+01:00", // %h:%m:%,3s%Z:%z + "14:26:28,950086+01:00", // %h:%m:%s,%u%Z:%z + "T14", // T%h + "T14,4", // T%,1h + "T14.4", // T%.1h + "T14:26", // T%h:%m + "T14:26,4", // T%h:%,1m + "T14:26.4", // T%h:%.1m + "T14:26:28", // T%h:%m:%s + "T14:26:28.9", // T%h:%m:%.1s + "T14:26:28.95", // T%h:%m:%.2s + "T14:26:28,950", // T%h:%m:%,3s + "T14:26:28.950", // T%h:%m:%.3s + "T14:26:28,950086", // T%h:%m:%s,%u + "T14:26:28.950086", // T%h:%m:%s.%u + "T13Z", // T%hZ + "T13,4Z", // T%,1hZ + "T13.4Z", // T%.1hZ + "T13:26Z", // T%h:%mZ + "T13:26,4Z", // T%h:%,1mZ + "T13:26.4Z", // T%h:%.1mZ + "T13:26:28Z", // T%h:%m:%sZ + "T13:26:28.9Z", // T%h:%m:%.1sZ + "T13:26:28.95Z", // T%h:%m:%.2sZ + "T13:26:28,950Z", // T%h:%m:%,3sZ + "T13:26:28.950Z", // T%h:%m:%.3sZ + "T13:26:28,950086Z", // T%h:%m:%s,%uZ + "T13:26:28.950086Z", // T%h:%m:%s.%uZ + "T14+01", // T%h%Z + "T14,4+01", // T%,1h%Z + "T14.4+01", // T%.1h%Z + "T14:26+01", // T%h:%m%Z + "T14:26,4+01", // T%h:%,1m%Z + "T14:26.4+01", // T%h:%.1m%Z + "T14:26:28+01", // T%h:%m:%s%Z + "T14:26:28.9+01", // T%h:%m:%.1s%Z + "T14:26:28.95+01", // T%h:%m:%.2s%Z + "T14:26:28,950+01", // T%h:%m:%,3s%Z + "T14:26:28.950+01", // T%h:%m:%.3s%Z + "T14:26:28,950086+01", // T%h:%m:%s,%u%Z + "T14:26:28.950086+01", // T%h:%m:%s.%u%Z + "T14+01:00", // T%h%Z:%z + "T14,4+01:00", // T%,1h%Z:%z + "T14.4+01:00", // T%.1h%Z:%z + "T14:26+01:00", // T%h:%m%Z:%z + "T14:26,4+01:00", // T%h:%,1m%Z:%z + "T14:26.4+01:00", // T%h:%.1m%Z:%z + "T14:26:28+01:00", // T%h:%m:%s%Z:%z + "T14:26:28.9+01:00", // T%h:%m:%.1s%Z:%z + "T14:26:28.95+01:00", // T%h:%m:%.2s%Z:%z + "T14:26:28,950+01:00", // T%h:%m:%,3s%Z:%z + "T14:26:28.950+01:00", // T%h:%m:%.3s%Z:%z + "T14:26:28,950086+01:00", // T%h:%m:%s,%u%Z:%z + "T14:26:28.950086+01:00", // T%h:%m:%s.%u%Z:%z + "1426", // %h%m + "1426,4", // %h%,1m + "1426.4", // %h%.1m + "142628", // %h%m%s + "142628.9", // %h%m%.1s + "142628.95", // %h%m%.2s + "142628,950", // %h%m%,3s + "142628.950", // %h%m%.3s + "142628,950086", // %h%m%s,%u + "142628.950086", // %h%m%s.%u + "1326Z", // %h%mZ + "1326,4Z", // %h%,1mZ + "1326.4Z", // %h%.1mZ + "132628Z", // %h%m%sZ + "132628.9Z", // %h%m%.1sZ + "132628.95Z", // %h%m%.2sZ + "132628,950Z", // %h%m%,3sZ + "132628.950Z", // %h%m%.3sZ + "132628,950086Z", // %h%m%s,%uZ + "132628.950086Z", // %h%m%s.%uZ + "1426+01", // %h%m%Z + "1426,4+01", // %h%,1m%Z + "1426.4+01", // %h%.1m%Z + "142628+01", // %h%m%s%Z + "142628.9+01", // %h%m%.1s%Z + "142628.95+01", // %h%m%.2s%Z + "142628,950+01", // %h%m%,3s%Z + "142628.950+01", // %h%m%.3s%Z + "142628,950086+01", // %h%m%s,%u%Z + "142628.950086+01", // %h%m%s.%u%Z + "14+0100", // %h%Z%z + "14,4+0100", // %,1h%Z%z + "14.4+0100", // %.1h%Z%z + "1426+0100", // %h%m%Z%z + "1426,4+0100", // %h%,1m%Z%z + "1426.4+0100", // %h%.1m%Z%z + "142628+0100", // %h%m%s%Z%z + "142628.9+0100", // %h%m%.1s%Z%z + "142628.95+0100", // %h%m%.2s%Z%z + "142628,950+0100", // %h%m%,3s%Z%z + "142628.950+0100", // %h%m%.3s%Z%z + "142628,950086+0100", // %h%m%s,%u%Z%z + "142628.950086+0100", // %h%m%s.%u%Z%z + "T1426", // T%h%m + "T1426,4", // T%h%,1m + "T1426.4", // T%h%.1m + "T142628", // T%h%m%s + "T142628.9", // T%h%m%.1s + "T142628.95", // T%h%m%.2s + "T142628,950", // T%h%m%,3s + "T142628.950", // T%h%m%.3s + "T142628,950086", // T%h%m%s,%u + "T142628.950086", // T%h%m%s.%u + "T1326Z", // T%h%mZ + "T1326,4Z", // T%h%,1mZ + "T1326.4Z", // T%h%.1mZ + "T132628Z", // T%h%m%sZ + "T132628.9Z", // T%h%m%.1sZ + "T132628.95Z", // T%h%m%.2sZ + "T132628,950Z", // T%h%m%,3sZ + "T132628.950Z", // T%h%m%.3sZ + "T132628,950086Z", // T%h%m%s,%uZ + "T132628.950086Z", // T%h%m%s.%uZ + "T1426+01", // T%h%m%Z + "T1426,4+01", // T%h%,1m%Z + "T1426.4+01", // T%h%.1m%Z + "T142628+01", // T%h%m%s%Z + "T142628.9+01", // T%h%m%.1s%Z + "T142628.95+01", // T%h%m%.2s%Z + "T142628,950+01", // T%h%m%,3s%Z + "T142628.950+01", // T%h%m%.3s%Z + "T142628,950086+01", // T%h%m%s,%u%Z + "T142628.950086+01", // T%h%m%s.%u%Z + "T14+0100", // T%h%Z%z + "T14,4+0100", // T%,1h%Z%z + "T14.4+0100", // T%.1h%Z%z + "T1426+0100", // T%h%m%Z%z + "T1426,4+0100", // T%h%,1m%Z%z + "T1426.4+0100", // T%h%.1m%Z%z + "T142628+0100", // T%h%m%s%Z%z + "T142628.9+0100", // T%h%m%.1s%Z%z + "T142628.95+0100", // T%h%m%.2s%Z%z + "T142628,950+0100", // T%h%m%,3s%Z%z + "T142628.950+0100", // T%h%m%.3s%Z%z + "T142628,950086+0100", // T%h%m%s,%u%Z%z + "T142628.950086+0100", // T%h%m%s.%u%Z%z + ]; + + let url_type = serde_json::to_string(&json!({ + "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", + "kind": "dataType", + "$id": "https://localhost:4000/@alice/types/data-type/time/v/1", + "title": "Time", + "type": "string", + "format": "time", + })) + .expect("failed to serialize time type"); + + let mut failed_formats = Vec::new(); + for format in VALID_FORMATS { + if validate_data(json!(format), &url_type, ValidateEntityComponents::full()) + .await + .is_err() + { + failed_formats.push(format); + } + } + assert!( + failed_formats.is_empty(), + "failed to validate formats: {failed_formats:#?}" + ); + + _ = validate_data(json!(""), &url_type, ValidateEntityComponents::full()) + .await + .expect_err("validation succeeded"); + + let mut passed_formats = Vec::new(); + for format in INVALID_FORMATS { + if validate_data(json!(format), &url_type, ValidateEntityComponents::full()) + .await + .is_ok() + { + passed_formats.push(format); + } + } + assert!( + passed_formats.is_empty(), + "passed invalid formats: {passed_formats:#?}" + ); +} + +#[tokio::test] +async fn duration() { + // TODO: Allow durations which are allowed in ISO8601 + const VALID_FORMATS: &[&str] = &[ + "P1Y", + // "P1,5Y", + "P1.5Y", + "P1M", + "P1W", + "P1D", + "PT1H", + // "P1H", + "PT1M", + "PT1S", + // "P1S", + // "PT1,5S", + "PT1.5S", + "P1Y1M", + "P1Y1D", + "P1Y1M1D", + "P1Y1M1DT1H1M1S", + "P1DT1H", + "P1MT1M", + "P1DT1M", + "P1.5W", + // "P1,5W", + "P1DT1.000S", + "P1DT1.00000S", + "P1DT1H1M1.1S", + // "P1H1M1.1S", + ]; + const INVALID_FORMATS: &[&str] = &[ + "1W1M1S", + "1S1M1H1W", + "1 W", + "1.5W", + "1 D 1 W", + "1.5 S 1.5 M", + "1H 15 M", + ]; + + let url_type = serde_json::to_string(&json!({ + "$schema": "https://blockprotocol.org/types/modules/graph/0.3/schema/data-type", + "kind": "dataType", + "$id": "https://localhost:4000/@alice/types/data-type/duration/v/1", + "title": "Duration", + "type": "string", + "format": "duration", + })) + .expect("failed to serialize duration type"); + + let mut failed_formats = Vec::new(); + for format in VALID_FORMATS { + if validate_data(json!(format), &url_type, ValidateEntityComponents::full()) + .await + .is_err() + { + failed_formats.push(format); + } + } + assert!( + failed_formats.is_empty(), + "failed to validate formats: {failed_formats:#?}" + ); + + _ = validate_data(json!(""), &url_type, ValidateEntityComponents::full()) + .await + .expect_err("validation succeeded"); + + let mut passed_formats = Vec::new(); + for format in INVALID_FORMATS { + if validate_data(json!(format), &url_type, ValidateEntityComponents::full()) + .await + .is_ok() + { + passed_formats.push(format); + } + } + assert!( + passed_formats.is_empty(), + "passed invalid formats: {passed_formats:#?}" + ); +} diff --git a/libs/@local/hash-validation/src/test_property_type.rs b/libs/@local/hash-validation/src/test_property_type.rs new file mode 100644 index 00000000000..328de7a429d --- /dev/null +++ b/libs/@local/hash-validation/src/test_property_type.rs @@ -0,0 +1,384 @@ +#![cfg(test)] + +use core::str::FromStr; + +use graph_types::knowledge::property::{PropertyMetadata, PropertyProvenance, ValueMetadata}; +use serde_json::json; +use type_system::url::VersionedUrl; + +use crate::{tests::validate_property, ValidateEntityComponents}; + +#[tokio::test] +async fn address_line_1() { + let property_types = []; + let data_types = [graph_test_data::data_type::TEXT_V1]; + + validate_property( + json!("123 Fake Street"), + None, + graph_test_data::property_type::ADDRESS_LINE_1_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn age() { + let property_types = []; + let data_types = [graph_test_data::data_type::NUMBER_V1]; + + validate_property( + json!(42), + None, + graph_test_data::property_type::AGE_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn blurb() { + let property_types = []; + let data_types = [graph_test_data::data_type::TEXT_V1]; + + validate_property( + json!("blurb"), + None, + graph_test_data::property_type::BLURB_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn city() { + let property_types = []; + let data_types = [graph_test_data::data_type::TEXT_V1]; + + validate_property( + json!("Bielefeld"), + None, + graph_test_data::property_type::CITY_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn contact_information() { + let property_types = [ + graph_test_data::property_type::EMAIL_V1, + graph_test_data::property_type::PHONE_NUMBER_V1, + ]; + let data_types = [graph_test_data::data_type::TEXT_V1]; + + validate_property( + json! ({ + "https://blockprotocol.org/@alice/types/property-type/email/": "alice@example", + "https://blockprotocol.org/@alice/types/property-type/phone-number/": "+0123456789", + }), + None, + graph_test_data::property_type::CONTACT_INFORMATION_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn contrived_information() { + let property_types = []; + let data_types = [graph_test_data::data_type::NUMBER_V1]; + + validate_property( + json!([12, 34, 56, 78]), + None, + graph_test_data::property_type::CONTRIVED_PROPERTY_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + validate_property( + json!(12_34_56_78), + None, + graph_test_data::property_type::CONTRIVED_PROPERTY_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + _ = validate_property( + json!([10, 20, 30, 40, 50]), + None, + graph_test_data::property_type::CONTRIVED_PROPERTY_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect_err("validation succeeded"); +} + +#[tokio::test] +async fn email() { + let property_types = []; + let data_types = [graph_test_data::data_type::TEXT_V1]; + + validate_property( + json!("alice@example.com"), + None, + graph_test_data::property_type::EMAIL_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn favorite_film() { + let property_types = []; + let data_types = [graph_test_data::data_type::TEXT_V1]; + + validate_property( + json!("Teletubbies"), + None, + graph_test_data::property_type::FAVORITE_FILM_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn favorite_quote() { + let property_types = []; + let data_types = [graph_test_data::data_type::TEXT_V1]; + + validate_property( + json!("hold my beer"), + None, + graph_test_data::property_type::FAVORITE_QUOTE_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn favorite_song() { + let property_types = []; + let data_types = [graph_test_data::data_type::TEXT_V1]; + + validate_property( + json!("Never gonna give you up"), + None, + graph_test_data::property_type::FAVORITE_SONG_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn favorite_hobby() { + let property_types = []; + let data_types = [graph_test_data::data_type::TEXT_V1]; + + validate_property( + json!("Programming in Rust"), + None, + graph_test_data::property_type::HOBBY_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn numbers() { + let property_types = []; + let data_types = [graph_test_data::data_type::NUMBER_V1]; + + validate_property( + json!([1, 2, 3, 4, 5]), + None, + graph_test_data::property_type::NUMBERS_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn phone_number() { + let property_types = []; + let data_types = [graph_test_data::data_type::TEXT_V1]; + + validate_property( + json!("+0123456789"), + None, + graph_test_data::property_type::PHONE_NUMBER_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn postcode() { + let property_types = []; + let data_types = [graph_test_data::data_type::TEXT_V1]; + + validate_property( + json!("12345"), + None, + graph_test_data::property_type::POSTCODE_NUMBER_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn published_on() { + let property_types = []; + let data_types = [graph_test_data::data_type::TEXT_V1]; + + validate_property( + json!("2021-01-01T00:00:00Z"), + None, + graph_test_data::property_type::PUBLISHED_ON_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn text() { + let property_types = []; + let data_types = [graph_test_data::data_type::TEXT_V1]; + + validate_property( + json!("lorem ipsum"), + None, + graph_test_data::property_type::TEXT_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} + +#[tokio::test] +async fn user_id() { + let property_types = []; + let data_types = [ + graph_test_data::data_type::TEXT_V1, + graph_test_data::data_type::NUMBER_V1, + ]; + + validate_property( + json!("1"), + None, + graph_test_data::property_type::USER_ID_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + _ = validate_property( + json!(1), + None, + graph_test_data::property_type::USER_ID_V1, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect_err("validation succeeded"); + + validate_property( + json!("1"), + Some(PropertyMetadata::Value { + metadata: ValueMetadata { + provenance: PropertyProvenance::default(), + confidence: None, + data_type_id: Some( + VersionedUrl::from_str( + "https://blockprotocol.org/@blockprotocol/types/data-type/text/v/1", + ) + .expect("invalid data type ID"), + ), + }, + }), + graph_test_data::property_type::USER_ID_V2, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); + + validate_property( + json!(1), + Some(PropertyMetadata::Value { + metadata: ValueMetadata { + provenance: PropertyProvenance::default(), + confidence: None, + data_type_id: Some( + VersionedUrl::from_str( + "https://blockprotocol.org/@blockprotocol/types/data-type/number/v/1", + ) + .expect("invalid data type ID"), + ), + }, + }), + graph_test_data::property_type::USER_ID_V2, + property_types, + data_types, + ValidateEntityComponents::full(), + ) + .await + .expect("validation failed"); +} diff --git a/tests/hash-graph-benches/read_scaling/knowledge/complete/entity.rs b/tests/hash-graph-benches/read_scaling/knowledge/complete/entity.rs index beda705141d..31052b508d9 100644 --- a/tests/hash-graph-benches/read_scaling/knowledge/complete/entity.rs +++ b/tests/hash-graph-benches/read_scaling/knowledge/complete/entity.rs @@ -25,7 +25,7 @@ use graph_types::{ knowledge::{ entity::{Entity, ProvidedEntityEditionProvenance}, link::LinkData, - PropertyObject, PropertyProvenance, PropertyWithMetadataObject, + property::{PropertyObject, PropertyProvenance, PropertyWithMetadataObject}, }, owned_by_id::OwnedById, }; diff --git a/tests/hash-graph-benches/read_scaling/knowledge/linkless/entity.rs b/tests/hash-graph-benches/read_scaling/knowledge/linkless/entity.rs index c941e24ee72..a8823154bec 100644 --- a/tests/hash-graph-benches/read_scaling/knowledge/linkless/entity.rs +++ b/tests/hash-graph-benches/read_scaling/knowledge/linkless/entity.rs @@ -20,7 +20,7 @@ use graph_types::{ account::AccountId, knowledge::{ entity::{Entity, ProvidedEntityEditionProvenance}, - PropertyObject, PropertyWithMetadataObject, + property::{PropertyObject, PropertyWithMetadataObject}, }, owned_by_id::OwnedById, }; diff --git a/tests/hash-graph-benches/representative_read/seed.rs b/tests/hash-graph-benches/representative_read/seed.rs index 09eceaae2db..a43b379c1b8 100644 --- a/tests/hash-graph-benches/representative_read/seed.rs +++ b/tests/hash-graph-benches/representative_read/seed.rs @@ -13,7 +13,7 @@ use graph_types::{ knowledge::{ entity::{EntityUuid, ProvidedEntityEditionProvenance}, link::LinkData, - PropertyObject, PropertyProvenance, PropertyWithMetadataObject, + property::{PropertyObject, PropertyProvenance, PropertyWithMetadataObject}, }, owned_by_id::OwnedById, }; diff --git a/tests/hash-graph-integration/postgres/data_type.rs b/tests/hash-graph-integration/postgres/data_type.rs index cb77f25e0cf..37c1ad0bdec 100644 --- a/tests/hash-graph-integration/postgres/data_type.rs +++ b/tests/hash-graph-integration/postgres/data_type.rs @@ -15,8 +15,11 @@ use graph::{ }; use graph_types::{ knowledge::{ - entity::ProvidedEntityEditionProvenance, ObjectMetadata, PropertyProvenance, - PropertyWithMetadata, PropertyWithMetadataObject, ValueMetadata, ValueWithMetadata, + entity::ProvidedEntityEditionProvenance, + property::{ + ObjectMetadata, PropertyProvenance, PropertyWithMetadata, PropertyWithMetadataObject, + PropertyWithMetadataValue, ValueMetadata, + }, }, ontology::{ DataTypeId, DataTypeWithMetadata, OntologyTypeClassificationMetadata, @@ -292,7 +295,7 @@ async fn inheritance() { "http://localhost:3000/@alice/types/property-type/length/".to_owned(), ) .expect("couldn't construct Base URL"), - PropertyWithMetadata::Value(ValueWithMetadata { + PropertyWithMetadata::Value(PropertyWithMetadataValue { value: json!(5), metadata: ValueMetadata { provenance: PropertyProvenance::default(), @@ -332,7 +335,7 @@ async fn inheritance() { "http://localhost:3000/@alice/types/property-type/length/".to_owned(), ) .expect("couldn't construct Base URL"), - PropertyWithMetadata::Value(ValueWithMetadata { + PropertyWithMetadata::Value(PropertyWithMetadataValue { value: json!(10), metadata: ValueMetadata { provenance: PropertyProvenance::default(), @@ -371,7 +374,7 @@ async fn inheritance() { "http://localhost:3000/@alice/types/property-type/length/".to_owned(), ) .expect("couldn't construct Base URL"), - PropertyWithMetadata::Value(ValueWithMetadata { + PropertyWithMetadata::Value(PropertyWithMetadataValue { value: json!(10), metadata: ValueMetadata { provenance: PropertyProvenance::default(), diff --git a/tests/hash-graph-integration/postgres/drafts.rs b/tests/hash-graph-integration/postgres/drafts.rs index fada2978d78..c43bd87f462 100644 --- a/tests/hash-graph-integration/postgres/drafts.rs +++ b/tests/hash-graph-integration/postgres/drafts.rs @@ -9,8 +9,10 @@ use graph_test_data::{data_type, entity, entity_type, property_type}; use graph_types::{ knowledge::{ entity::{EntityId, ProvidedEntityEditionProvenance}, - Property, PropertyObject, PropertyPatchOperation, PropertyPath, PropertyWithMetadata, - PropertyWithMetadataObject, + property::{ + Property, PropertyObject, PropertyPatchOperation, PropertyPath, PropertyWithMetadata, + PropertyWithMetadataObject, + }, }, owned_by_id::OwnedById, }; diff --git a/tests/hash-graph-integration/postgres/entity.rs b/tests/hash-graph-integration/postgres/entity.rs index cb8fb14837b..3b2db49dd6f 100644 --- a/tests/hash-graph-integration/postgres/entity.rs +++ b/tests/hash-graph-integration/postgres/entity.rs @@ -15,8 +15,11 @@ use graph::{ use graph_test_data::{data_type, entity, entity_type, property_type}; use graph_types::{ knowledge::{ - entity::ProvidedEntityEditionProvenance, Property, PropertyObject, PropertyPatchOperation, - PropertyPath, PropertyWithMetadata, PropertyWithMetadataObject, + entity::ProvidedEntityEditionProvenance, + property::{ + Property, PropertyObject, PropertyPatchOperation, PropertyPath, PropertyWithMetadata, + PropertyWithMetadataObject, + }, }, owned_by_id::OwnedById, }; diff --git a/tests/hash-graph-integration/postgres/interconnected_graph.rs b/tests/hash-graph-integration/postgres/interconnected_graph.rs index 0a3792ec647..873c7ebffca 100644 --- a/tests/hash-graph-integration/postgres/interconnected_graph.rs +++ b/tests/hash-graph-integration/postgres/interconnected_graph.rs @@ -7,7 +7,7 @@ use graph_types::{ knowledge::{ entity::{EntityId, EntityUuid, ProvidedEntityEditionProvenance}, link::LinkData, - PropertyObject, PropertyProvenance, PropertyWithMetadataObject, + property::{PropertyObject, PropertyProvenance, PropertyWithMetadataObject}, }, owned_by_id::OwnedById, }; diff --git a/tests/hash-graph-integration/postgres/links.rs b/tests/hash-graph-integration/postgres/links.rs index cae55a96147..966dba28311 100644 --- a/tests/hash-graph-integration/postgres/links.rs +++ b/tests/hash-graph-integration/postgres/links.rs @@ -22,8 +22,9 @@ use graph::{ use graph_test_data::{data_type, entity, entity_type, property_type}; use graph_types::{ knowledge::{ - entity::ProvidedEntityEditionProvenance, link::LinkData, PropertyObject, - PropertyProvenance, PropertyWithMetadataObject, + entity::ProvidedEntityEditionProvenance, + link::LinkData, + property::{PropertyObject, PropertyProvenance, PropertyWithMetadataObject}, }, owned_by_id::OwnedById, }; diff --git a/tests/hash-graph-integration/postgres/multi_type.rs b/tests/hash-graph-integration/postgres/multi_type.rs index 7efdc0727a2..8f1fa359a72 100644 --- a/tests/hash-graph-integration/postgres/multi_type.rs +++ b/tests/hash-graph-integration/postgres/multi_type.rs @@ -16,7 +16,7 @@ use graph_test_data::{data_type, entity, entity_type, property_type}; use graph_types::{ knowledge::{ entity::{Entity, ProvidedEntityEditionProvenance}, - PropertyObject, PropertyWithMetadataObject, + property::{PropertyObject, PropertyWithMetadataObject}, }, owned_by_id::OwnedById, }; diff --git a/tests/hash-graph-integration/postgres/partial_updates.rs b/tests/hash-graph-integration/postgres/partial_updates.rs index 2386db0543d..bbe7ca070dc 100644 --- a/tests/hash-graph-integration/postgres/partial_updates.rs +++ b/tests/hash-graph-integration/postgres/partial_updates.rs @@ -15,9 +15,12 @@ use graph::{ use graph_test_data::{data_type, entity, entity_type, property_type}; use graph_types::{ knowledge::{ - entity::ProvidedEntityEditionProvenance, PropertyObject, PropertyPatchOperation, - PropertyPathElement, PropertyProvenance, PropertyWithMetadata, PropertyWithMetadataObject, - ValueMetadata, ValueWithMetadata, + entity::ProvidedEntityEditionProvenance, + property::{ + PropertyObject, PropertyPatchOperation, PropertyPathElement, PropertyProvenance, + PropertyWithMetadata, PropertyWithMetadataObject, PropertyWithMetadataValue, + ValueMetadata, + }, }, owned_by_id::OwnedById, }; @@ -111,7 +114,7 @@ async fn properties_add() { properties: vec![ PropertyPatchOperation::Add { path: once(PropertyPathElement::from(age_property_type_id())).collect(), - property: PropertyWithMetadata::Value(ValueWithMetadata { + property: PropertyWithMetadata::Value(PropertyWithMetadataValue { value: json!(30), metadata: ValueMetadata { confidence: None, @@ -122,7 +125,7 @@ async fn properties_add() { }, PropertyPatchOperation::Add { path: once(PropertyPathElement::from(name_property_type_id())).collect(), - property: PropertyWithMetadata::Value(ValueWithMetadata { + property: PropertyWithMetadata::Value(PropertyWithMetadataValue { value: json!("Alice Allison"), metadata: ValueMetadata { confidence: None, @@ -277,7 +280,7 @@ async fn properties_replace() { entity_type_ids: HashSet::new(), properties: vec![PropertyPatchOperation::Replace { path: once(PropertyPathElement::from(name_property_type_id())).collect(), - property: PropertyWithMetadata::Value(ValueWithMetadata { + property: PropertyWithMetadata::Value(PropertyWithMetadataValue { value: json!("Bob"), metadata: ValueMetadata { confidence: None, diff --git a/tests/hash-graph-integration/postgres/property_metadata.rs b/tests/hash-graph-integration/postgres/property_metadata.rs index acbbde5049f..8d1ec16fbbb 100644 --- a/tests/hash-graph-integration/postgres/property_metadata.rs +++ b/tests/hash-graph-integration/postgres/property_metadata.rs @@ -11,9 +11,13 @@ use graph_test_data::{data_type, entity, entity_type, property_type}; use graph_types::{ knowledge::{ entity::{Location, ProvidedEntityEditionProvenance, SourceProvenance, SourceType}, - Confidence, ObjectMetadata, PropertyMetadata, PropertyMetadataObject, PropertyObject, - PropertyPatchOperation, PropertyPath, PropertyPathElement, PropertyProvenance, - PropertyWithMetadata, PropertyWithMetadataObject, ValueMetadata, ValueWithMetadata, + property::{ + ObjectMetadata, PropertyMetadata, PropertyMetadataObject, PropertyObject, + PropertyPatchOperation, PropertyPath, PropertyPathElement, PropertyProvenance, + PropertyWithMetadata, PropertyWithMetadataObject, PropertyWithMetadataValue, + ValueMetadata, + }, + Confidence, }, owned_by_id::OwnedById, }; @@ -70,6 +74,14 @@ fn film_property_type_id() -> BaseUrl { BaseUrl::new("https://blockprotocol.org/@alice/types/property-type/favorite-film/".to_owned()) .expect("couldn't construct Base URL") } +fn text_data_type_id() -> VersionedUrl { + VersionedUrl::from_str("https://blockprotocol.org/@blockprotocol/types/data-type/text/v/1") + .expect("couldn't construct data type id") +} +fn number_data_type_id() -> VersionedUrl { + VersionedUrl::from_str("https://blockprotocol.org/@blockprotocol/types/data-type/number/v/1") + .expect("couldn't construct data type id") +} fn property_provenance_a() -> PropertyProvenance { PropertyProvenance { @@ -179,7 +191,25 @@ async fn initial_metadata() { .expect("could not create entity"); assert_eq!(entity.metadata.confidence, Confidence::new(0.5)); - assert_eq!(entity.metadata.properties, entity_property_metadata); + assert_eq!( + entity.metadata.properties, + PropertyMetadataObject { + value: HashMap::from([( + name_property_type_id(), + PropertyMetadata::Value { + metadata: ValueMetadata { + provenance: property_provenance_a(), + confidence: Confidence::new(0.5), + data_type_id: Some(text_data_type_id()), + }, + }, + )]), + metadata: ObjectMetadata { + provenance: PropertyProvenance::default(), + confidence: Confidence::new(0.8), + }, + } + ); let name_property_metadata = ValueMetadata { provenance: property_provenance_a(), @@ -196,7 +226,7 @@ async fn initial_metadata() { name_property_type_id(), ))) .collect(), - property: PropertyWithMetadata::Value(ValueWithMetadata { + property: PropertyWithMetadata::Value(PropertyWithMetadataValue { value: json!("Bob"), metadata: name_property_metadata.clone(), }), @@ -218,7 +248,11 @@ async fn initial_metadata() { value: HashMap::from([( name_property_type_id(), PropertyMetadata::Value { - metadata: name_property_metadata + metadata: ValueMetadata { + provenance: property_provenance_a(), + confidence: Confidence::new(0.6), + data_type_id: Some(text_data_type_id()), + } } )]), metadata: ObjectMetadata { @@ -288,7 +322,7 @@ async fn no_initial_metadata() { metadata: ValueMetadata { provenance: PropertyProvenance::default(), confidence: None, - data_type_id: None, + data_type_id: Some(text_data_type_id()) }, }, )]), @@ -342,7 +376,7 @@ async fn no_initial_metadata() { metadata: ValueMetadata { provenance: PropertyProvenance::default(), confidence: None, - data_type_id: None, + data_type_id: Some(text_data_type_id()) }, }, )]), @@ -357,7 +391,7 @@ async fn no_initial_metadata() { entity_id: entity.metadata.record_id.entity_id, properties: vec![PropertyPatchOperation::Replace { path: once(PropertyPathElement::from(name_property_type_id())).collect(), - property: PropertyWithMetadata::Value(ValueWithMetadata { + property: PropertyWithMetadata::Value(PropertyWithMetadataValue { value: json!("Alice"), metadata: ValueMetadata { confidence: Confidence::new(0.5), @@ -387,7 +421,7 @@ async fn no_initial_metadata() { metadata: ValueMetadata { provenance: PropertyProvenance::default(), confidence: Confidence::new(0.5), - data_type_id: None, + data_type_id: Some(text_data_type_id()) }, }, )]), @@ -422,7 +456,7 @@ async fn no_initial_metadata() { metadata: ValueMetadata { provenance: PropertyProvenance::default(), confidence: Confidence::new(0.5), - data_type_id: None, + data_type_id: Some(text_data_type_id()) }, }, )]), @@ -471,7 +505,7 @@ async fn properties_add() { entity_type_ids: HashSet::new(), properties: vec![PropertyPatchOperation::Add { path: path.clone(), - property: PropertyWithMetadata::Value(ValueWithMetadata { + property: PropertyWithMetadata::Value(PropertyWithMetadataValue { value: json!(30), metadata: ValueMetadata { confidence: Confidence::new(0.5), @@ -499,7 +533,7 @@ async fn properties_add() { metadata: ValueMetadata { provenance: PropertyProvenance::default(), confidence: None, - data_type_id: None, + data_type_id: Some(text_data_type_id()) }, }, ), @@ -509,7 +543,7 @@ async fn properties_add() { metadata: ValueMetadata { provenance: PropertyProvenance::default(), confidence: Confidence::new(0.5), - data_type_id: None, + data_type_id: Some(number_data_type_id()) }, }, ) @@ -566,17 +600,17 @@ async fn properties_remove() { PropertyPatchOperation::Add { path: once(PropertyPathElement::from(interests_property_type_id())) .collect(), - property: PropertyWithMetadata::Object { + property: PropertyWithMetadata::Object(PropertyWithMetadataObject { value: HashMap::new(), metadata: ObjectMetadata { confidence: Confidence::new(0.4), provenance: property_provenance_a(), }, - }, + }), }, PropertyPatchOperation::Add { path: film_path.clone(), - property: PropertyWithMetadata::Value(ValueWithMetadata { + property: PropertyWithMetadata::Value(PropertyWithMetadataValue { value: json!("Fight Club"), metadata: ValueMetadata { confidence: Confidence::new(0.5), @@ -605,7 +639,7 @@ async fn properties_remove() { metadata: ValueMetadata { provenance: PropertyProvenance::default(), confidence: None, - data_type_id: None, + data_type_id: Some(text_data_type_id()) }, }, ), @@ -618,7 +652,7 @@ async fn properties_remove() { metadata: ValueMetadata { provenance: property_provenance_b(), confidence: Confidence::new(0.5), - data_type_id: None, + data_type_id: Some(text_data_type_id()) }, }, )]), @@ -661,7 +695,7 @@ async fn properties_remove() { metadata: ValueMetadata { provenance: PropertyProvenance::default(), confidence: None, - data_type_id: None, + data_type_id: Some(text_data_type_id()) }, }, )]), diff --git a/tests/hash-graph-integration/postgres/sorting.rs b/tests/hash-graph-integration/postgres/sorting.rs index 0d6a0b4d0e0..7c3047ecd67 100644 --- a/tests/hash-graph-integration/postgres/sorting.rs +++ b/tests/hash-graph-integration/postgres/sorting.rs @@ -17,7 +17,7 @@ use graph_test_data::{data_type, entity, entity_type, property_type}; use graph_types::{ knowledge::{ entity::{EntityUuid, ProvidedEntityEditionProvenance}, - PropertyObject, PropertyWithMetadataObject, + property::{PropertyObject, PropertyWithMetadataObject}, }, owned_by_id::OwnedById, };