diff --git a/Cargo.lock b/Cargo.lock index 60c7ba8..5d27fe7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -938,6 +938,7 @@ dependencies = [ "grc20-core", "proc-macro2", "quote", + "stringcase", "syn 2.0.96", "testcontainers", "tokio", @@ -949,11 +950,14 @@ dependencies = [ name = "grc20-sdk" version = "0.1.0" dependencies = [ + "async-stream", "chrono", "futures", "grc20-core", "serde", "serde_json", + "tokio", + "tracing-subscriber", "web3-utils", ] @@ -2669,6 +2673,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" +[[package]] +name = "stringcase" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72abeda133c49d7bddece6c154728f83eec8172380c80ab7096da9487e20d27c" + [[package]] name = "strsim" version = "0.11.1" diff --git a/api/src/schema/account.rs b/api/src/schema/account.rs index 4692415..7d84d75 100644 --- a/api/src/schema/account.rs +++ b/api/src/schema/account.rs @@ -33,7 +33,7 @@ impl Account { impl Account { /// Account ID fn id(&self) -> &str { - &self.entity.id + self.entity.id() } /// Ethereum address of the account diff --git a/api/src/schema/entity.rs b/api/src/schema/entity.rs index 45f0831..49ff161 100644 --- a/api/src/schema/entity.rs +++ b/api/src/schema/entity.rs @@ -1,4 +1,5 @@ use futures::TryStreamExt; +use grc20_sdk::models::property; use juniper::{graphql_object, Executor, FieldResult, ScalarValue}; use grc20_core::{ @@ -19,17 +20,24 @@ use super::{AttributeFilter, EntityRelationFilter, EntityVersion}; #[derive(Debug)] pub struct Entity { - node: EntityNode, - space_id: String, - space_version: Option, + pub node: EntityNode, + pub space_id: String, + pub space_version: Option, + pub strict: bool, } impl Entity { - pub fn new(node: EntityNode, space_id: String, space_version: Option) -> Self { + pub fn new( + node: EntityNode, + space_id: String, + space_version: Option, + strict: bool, + ) -> Self { Self { node, space_id, space_version, + strict, } } @@ -38,6 +46,7 @@ impl Entity { id: impl Into, space_id: impl Into, space_version: Option, + strict: bool, ) -> FieldResult> { let id = id.into(); let space_id = space_id.into(); @@ -45,7 +54,7 @@ impl Entity { Ok(entity_node::find_one(neo4j, id) .send() .await? - .map(|node| Entity::new(node, space_id, space_version))) + .map(|node| Entity::new(node, space_id, space_version, strict))) } } @@ -54,66 +63,84 @@ impl Entity { /// Entity object impl Entity { /// Entity ID - fn id(&self) -> &str { + pub fn id(&self) -> &str { &self.node.id } + /// The space ID of the entity (note: the same entity can exist in multiple spaces) + pub fn space_id(&self) -> &str { + &self.space_id + } + + pub fn created_at(&self) -> String { + self.node.system_properties.created_at.to_rfc3339() + } + + pub fn created_at_block(&self) -> &str { + &self.node.system_properties.created_at_block + } + + pub fn updated_at(&self) -> String { + self.node.system_properties.updated_at.to_rfc3339() + } + + pub fn updated_at_block(&self) -> &str { + &self.node.system_properties.updated_at_block + } + /// Entity name (if available) - async fn name<'a, S: ScalarValue>( + pub async fn name<'a, S: ScalarValue>( &'a self, executor: &'a Executor<'_, '_, KnowledgeGraph, S>, - ) -> Option { - triple::find_one( + ) -> FieldResult> { + Ok(property::get_triple( &executor.context().0, system_ids::NAME_ATTRIBUTE, &self.node.id, &self.space_id, self.space_version.clone(), + self.strict, ) - .send() - .await - .expect("Failed to find name") - .map(|triple| triple.value.value) + .await? + .map(|triple| triple.value.value)) } /// Entity description (if available) - async fn description<'a, S: ScalarValue>( + pub async fn description<'a, S: ScalarValue>( &'a self, executor: &'a Executor<'_, '_, KnowledgeGraph, S>, - ) -> Option { - triple::find_one( + ) -> FieldResult> { + Ok(property::get_triple( &executor.context().0, system_ids::DESCRIPTION_ATTRIBUTE, &self.node.id, &self.space_id, self.space_version.clone(), + self.strict, ) - .send() - .await - .expect("Failed to find name") - .map(|triple| triple.value.value) + .await? + .map(|triple| triple.value.value)) } /// Entity cover (if available) - async fn cover<'a, S: ScalarValue>( + pub async fn cover<'a, S: ScalarValue>( &'a self, executor: &'a Executor<'_, '_, KnowledgeGraph, S>, - ) -> Option { - triple::find_one( + ) -> FieldResult> { + Ok(property::get_triple( &executor.context().0, system_ids::COVER_ATTRIBUTE, &self.node.id, &self.space_id, self.space_version.clone(), + self.strict, ) - .send() - .await - .expect("Failed to find name") - .map(|triple| triple.value.value) + .await? + .map(|triple| triple.value.value)) } /// Entity blocks (if available) - async fn blocks<'a, S: ScalarValue>( + pub async fn blocks<'a, S: ScalarValue>( &'a self, executor: &'a Executor<'_, '_, KnowledgeGraph, S>, ) -> FieldResult> { @@ -136,13 +163,20 @@ impl Entity { )) .send() .await? - .map_ok(|node| Entity::new(node, self.space_id.clone(), self.space_version.clone())) + .map_ok(|node| { + Entity::new( + node, + self.space_id.clone(), + self.space_version.clone(), + self.strict, + ) + }) .try_collect::>() .await?) } /// Types of the entity (which are entities themselves) - async fn types<'a, S: ScalarValue>( + pub async fn types<'a, S: ScalarValue>( &'a self, executor: &'a Executor<'_, '_, KnowledgeGraph, S>, ) -> FieldResult> { @@ -165,38 +199,24 @@ impl Entity { )) .send() .await? - .map_ok(|node| Entity::new(node, self.space_id.clone(), self.space_version.clone())) + .map_ok(|node| { + Entity::new( + node, + self.space_id.clone(), + self.space_version.clone(), + self.strict, + ) + }) .try_collect::>() .await?) } - /// The space ID of the entity (note: the same entity can exist in multiple spaces) - fn space_id(&self) -> &str { - &self.space_id - } - - fn created_at(&self) -> String { - self.node.system_properties.created_at.to_rfc3339() - } - - fn created_at_block(&self) -> &str { - &self.node.system_properties.created_at_block - } - - fn updated_at(&self) -> String { - self.node.system_properties.updated_at.to_rfc3339() - } - - fn updated_at_block(&self) -> &str { - &self.node.system_properties.updated_at_block - } - // TODO: Add entity attributes filtering /// Attributes of the entity - async fn attributes( + pub async fn attributes( &self, - _filter: Option, executor: &'_ Executor<'_, '_, KnowledgeGraph, S>, + _filter: Option, ) -> FieldResult> { let mut query = triple::find_many(&executor.context().0) .entity_id(prop_filter::value(&self.node.id)) @@ -215,10 +235,10 @@ impl Entity { } /// Relations outgoing from the entity - async fn relations<'a, S: ScalarValue>( + pub async fn relations<'a, S: ScalarValue>( &'a self, - r#where: Option, executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + r#where: Option, ) -> FieldResult> { let mut base_query = self.node.get_outbound_relations( &executor.context().0, @@ -234,7 +254,12 @@ impl Entity { .send() .await? .map_ok(|relation| { - Relation::new(relation, self.space_id.clone(), self.space_version.clone()) + Relation::new( + relation, + self.space_id.clone(), + self.space_version.clone(), + self.strict, + ) }) .try_collect::>() .await?) @@ -242,7 +267,7 @@ impl Entity { // TODO: Add version filtering (e.g.: time range, edit author) /// Versions of the entity, ordered chronologically - async fn versions<'a, S: ScalarValue>( + pub async fn versions<'a, S: ScalarValue>( &'a self, executor: &'a Executor<'_, '_, KnowledgeGraph, S>, ) -> FieldResult> { diff --git a/api/src/schema/entity_filter.rs b/api/src/schema/entity_filter.rs index d9696f1..d45a631 100644 --- a/api/src/schema/entity_filter.rs +++ b/api/src/schema/entity_filter.rs @@ -2,7 +2,7 @@ use juniper::GraphQLInputObject; use grc20_core::{ mapping::{ - entity_node::{self}, + self, query_utils::{edge_filter::EdgeFilter, prop_filter, PropFilter}, relation_node, }, @@ -65,8 +65,8 @@ impl EntityFilter { filter } - fn types_filter(&self) -> entity_node::EntityRelationFilter { - let mut filter = entity_node::EntityRelationFilter::default(); + fn types_filter(&self) -> mapping::EntityRelationFilter { + let mut filter = mapping::EntityRelationFilter::default(); // if let Some(types) = &self.types { // filter = filter.to_id(EdgeFilter::default().to_id(prop_filter::value_in(types.clone()))); @@ -97,10 +97,10 @@ impl EntityFilter { } } -impl From for entity_node::EntityFilter { +impl From for mapping::EntityFilter { fn from(filter: EntityFilter) -> Self { // TODO: Add types filter - entity_node::EntityFilter::default() + mapping::EntityFilter::default() .id(filter.id_filter()) .relations(filter.types_filter()) .attributes( diff --git a/api/src/schema/mod.rs b/api/src/schema/mod.rs index 550975f..aefd558 100644 --- a/api/src/schema/mod.rs +++ b/api/src/schema/mod.rs @@ -5,12 +5,15 @@ pub mod entity; pub mod entity_filter; pub mod entity_order_by; pub mod entity_version; +pub mod property; pub mod query; pub mod relation; pub mod relation_filter; +pub mod schema_type; pub mod space; pub mod space_filter; pub mod triple; +pub mod triple_filter; pub use account::Account; pub use account_filter::AccountFilter; @@ -18,9 +21,11 @@ pub use attribute_filter::EntityAttributeFilter; pub use entity::Entity; pub use entity_filter::{AttributeFilter, EntityFilter, EntityRelationFilter}; pub use entity_version::EntityVersion; +pub use property::Property; pub use query::RootQuery; pub use relation::Relation; pub use relation_filter::RelationFilter; +pub use schema_type::SchemaType; pub use space::Space; pub use space_filter::SpaceFilter; pub use triple::Triple; diff --git a/api/src/schema/property.rs b/api/src/schema/property.rs new file mode 100644 index 0000000..4c14336 --- /dev/null +++ b/api/src/schema/property.rs @@ -0,0 +1,224 @@ +use futures::TryStreamExt; +use grc20_core::{mapping::EntityNode, system_ids}; +use grc20_sdk::models::property; +use juniper::{graphql_object, Executor, FieldResult, ScalarValue}; + +use crate::context::KnowledgeGraph; + +use super::{AttributeFilter, Entity, EntityRelationFilter, EntityVersion, Relation, Triple}; + +#[derive(Debug)] +pub struct Property { + entity: Entity, +} + +impl Property { + pub fn new( + node: EntityNode, + space_id: String, + space_version: Option, + strict: bool, + ) -> Self { + Self { + entity: Entity::new(node, space_id, space_version, strict), + } + } +} + +#[graphql_object] +#[graphql(context = KnowledgeGraph, scalar = S: ScalarValue)] +impl Property { + /// Entity ID + fn id(&self) -> &str { + self.entity.id() + } + + /// The space ID of the entity (note: the same entity can exist in multiple spaces) + fn space_id(&self) -> &str { + self.entity.space_id() + } + + fn created_at(&self) -> String { + self.entity.created_at() + } + + fn created_at_block(&self) -> &str { + self.entity.created_at_block() + } + + fn updated_at(&self) -> String { + self.entity.updated_at() + } + + fn updated_at_block(&self) -> &str { + self.entity.updated_at_block() + } + + /// Entity name (if available) + async fn name<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + #[graphql(default = true)] _strict: bool, + ) -> FieldResult> { + self.entity.name(executor).await + } + + /// Entity description (if available) + async fn description<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + ) -> FieldResult> { + self.entity.description(executor).await + } + + /// Entity cover (if available) + async fn cover<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + ) -> FieldResult> { + self.entity.cover(executor).await + } + + /// Entity blocks (if available) + async fn blocks<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + ) -> FieldResult> { + self.entity.blocks(executor).await + } + + /// Types of the entity (which are entities themselves) + async fn types<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + ) -> FieldResult> { + self.entity.types(executor).await + } + + // TODO: Add entity attributes filtering + /// Attributes of the entity + async fn attributes( + &self, + executor: &'_ Executor<'_, '_, KnowledgeGraph, S>, + filter: Option, + ) -> FieldResult> { + self.entity.attributes(executor, filter).await + } + + /// Relations outgoing from the entity + async fn relations<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + r#where: Option, + ) -> FieldResult> { + self.entity.relations(executor, r#where).await + } + + // TODO: Add version filtering (e.g.: time range, edit author) + /// Versions of the entity, ordered chronologically + async fn versions<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + ) -> FieldResult> { + self.entity.versions(executor).await + } + + /// Value type of the property + async fn value_type<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + ) -> FieldResult> { + // let value_type = self + // .entity + // .node + // .get_outbound_relations( + // &executor.context().0, + // self.space_id(), + // self.entity.space_version.clone(), + // ) + // .relation_type(prop_filter::value(system_ids::VALUE_TYPE_ATTRIBUTE)) + // .limit(1) + // .send() + // .await?; + tracing::info!("Fetching value type for property {}", self.entity.id()); + + let value_type = property::get_outbound_relations( + &executor.context().0, + system_ids::VALUE_TYPE_ATTRIBUTE, + self.entity.id(), + self.space_id(), + self.entity.space_version.clone(), + Some(1), + None, + self.entity.strict, + ) + .await? + .try_collect::>() + .await?; + + if let Some(value_type) = value_type.first() { + Ok(Entity::load( + &executor.context().0, + &value_type.to, + self.space_id().to_string(), + self.entity.space_version.clone(), + self.entity.strict, + ) + .await?) + } else { + Ok(None) + } + } + + /// Value type of the property + async fn relation_value_type<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + ) -> FieldResult> { + // let rel_value_type = self + // .entity + // .node + // .get_outbound_relations( + // &executor.context().0, + // self.space_id(), + // self.entity.space_version.clone(), + // ) + // .relation_type(prop_filter::value(system_ids::RELATION_VALUE_RELATIONSHIP_TYPE)) + // .limit(1) + // .send() + // .await?; + tracing::info!( + "Fetching relation value type for property {}", + self.entity.id() + ); + + let rel_value_type = property::get_outbound_relations( + &executor.context().0, + system_ids::RELATION_VALUE_RELATIONSHIP_TYPE, + self.entity.id(), + self.space_id(), + self.entity.space_version.clone(), + Some(1), + None, + self.entity.strict, + ) + .await? + .try_collect::>() + .await?; + + // pin_mut!(rel_value_type); + + if let Some(value_type) = rel_value_type.first() { + Ok(Entity::load( + &executor.context().0, + &value_type.to, + self.space_id().to_string(), + self.entity.space_version.clone(), + self.entity.strict, + ) + .await?) + } else { + Ok(None) + } + } +} diff --git a/api/src/schema/query.rs b/api/src/schema/query.rs index 2e3bc9e..0e133e1 100644 --- a/api/src/schema/query.rs +++ b/api/src/schema/query.rs @@ -4,19 +4,19 @@ use juniper::{graphql_object, Executor, FieldResult, ScalarValue}; use grc20_core::{ indexer_ids, mapping::{ - self, entity_node, + self, entity_node, prop_filter, query_utils::{Query, QueryStream}, relation_node, }, }; -use grc20_sdk::models::{account, space}; +use grc20_sdk::models::{account, property, space}; use crate::{ context::KnowledgeGraph, schema::{Account, AccountFilter, Entity, Relation, RelationFilter, Space, SpaceFilter}, }; -use super::{entity_order_by::OrderDirection, EntityFilter}; +use super::{entity_order_by::OrderDirection, EntityFilter, Triple}; #[derive(Clone)] pub struct RootQuery; @@ -29,8 +29,9 @@ impl RootQuery { &'a self, executor: &'a Executor<'_, '_, KnowledgeGraph, S>, id: String, + version: Option, ) -> FieldResult> { - Space::load(&executor.context().0, id).await + Ok(Space::load(&executor.context().0, id, version).await?) } /// Returns multiple spaces according to the provided filter @@ -38,14 +39,15 @@ impl RootQuery { async fn spaces<'a, S: ScalarValue>( &'a self, executor: &'a Executor<'_, '_, KnowledgeGraph, S>, - where_: Option, - first: Option, - skip: Option, + r#where: Option, + version: Option, + #[graphql(default = 100)] first: i32, + #[graphql(default = 0)] skip: i32, ) -> FieldResult> { let mut query = space::find_many(&executor.context().0, indexer_ids::INDEXER_SPACE_ID); // Apply filters if provided - if let Some(where_) = &where_ { + if let Some(where_) = &r#where { // Network filter if let Some(network_filter) = where_.network_filter() { query = query.network(network_filter); @@ -84,21 +86,16 @@ impl RootQuery { } } - if let Some(first) = first { - if first > 1000 { - return Err("Cannot query more than 1000 spaces at once".into()); - } - query = query.limit(first as usize); - } - - if let Some(skip) = skip { - query = query.skip(skip as usize); + if first > 1000 { + return Err("Cannot query more than 1000 relations at once".into()); } Ok(query + .limit(first as usize) + .skip(skip as usize) .send() .await? - .map_ok(Space::new) + .map_ok(|entity| Space::new(entity, version.clone())) .try_collect::>() .await?) } @@ -132,8 +129,8 @@ impl RootQuery { &'a self, executor: &'a Executor<'_, '_, KnowledgeGraph, S>, where_: Option, - first: Option, - skip: Option, + #[graphql(default = 100)] first: i32, + #[graphql(default = 0)] skip: i32, ) -> FieldResult> { let mut query = account::find_many(&executor.context().0, indexer_ids::INDEXER_SPACE_ID); @@ -145,18 +142,13 @@ impl RootQuery { } } - if let Some(first) = first { - if first > 1000 { - return Err("Cannot query more than 1000 accounts at once".into()); - } - query = query.limit(first as usize); - } - - if let Some(skip) = skip { - query = query.skip(skip as usize); + if first > 1000 { + return Err("Cannot query more than 1000 relations at once".into()); } Ok(query + .limit(first as usize) + .skip(skip as usize) .send() .await? .map_ok(Account::new) @@ -170,6 +162,7 @@ impl RootQuery { id: String, space_id: String, version_id: Option, + #[graphql(default = true)] strict: bool, ) -> FieldResult> { let version_index = if let Some(version_id) = version_id { mapping::get_version_index(&executor.context().0, version_id).await? @@ -177,7 +170,7 @@ impl RootQuery { None }; - Entity::load(&executor.context().0, id, space_id, version_index).await + Entity::load(&executor.context().0, id, space_id, version_index, strict).await } #[allow(clippy::too_many_arguments)] @@ -189,15 +182,16 @@ impl RootQuery { order_by: Option, order_direction: Option, r#where: Option, - first: Option, - skip: Option, + #[graphql(default = 100)] first: i32, + #[graphql(default = 0)] skip: i32, + #[graphql(default = true)] strict: bool, ) -> FieldResult> { let mut query = entity_node::find_many(&executor.context().0); let entity_filter = if let Some(r#where) = r#where { - entity_node::EntityFilter::from(r#where).space_id(&space_id) + mapping::EntityFilter::from(r#where).space_id(prop_filter::value(&space_id)) } else { - entity_node::EntityFilter::default().space_id(&space_id) + mapping::EntityFilter::default().space_id(prop_filter::value(&space_id)) }; query = query.with_filter(entity_filter); @@ -211,21 +205,16 @@ impl RootQuery { _ => {} } - if let Some(first) = first { - if first > 1000 { - return Err("Cannot query more than 1000 entities at once".into()); - } - query = query.limit(first as usize); - } - - if let Some(skip) = skip { - query = query.skip(skip as usize); + if first > 1000 { + return Err("Cannot query more than 1000 relations at once".into()); } Ok(query + .limit(first as usize) + .skip(skip as usize) .send() .await? - .map_ok(|entity| Entity::new(entity, space_id.clone(), None)) + .map_ok(|entity| Entity::new(entity, space_id.clone(), None, strict)) .try_collect::>() .await?) } @@ -237,6 +226,7 @@ impl RootQuery { id: String, space_id: String, version_id: Option, + #[graphql(default = true)] strict: bool, ) -> FieldResult> { let version_index = if let Some(version_id) = version_id { mapping::get_version_index(&executor.context().0, version_id).await? @@ -244,7 +234,7 @@ impl RootQuery { None }; - Relation::load(&executor.context().0, id, space_id, version_index).await + Relation::load(&executor.context().0, id, space_id, version_index, strict).await } // TODO: Add order_by and order_direction @@ -257,8 +247,9 @@ impl RootQuery { _order_by: Option, _order_direction: Option, r#where: Option, - first: Option, - skip: Option, + #[graphql(default = 100)] first: i32, + #[graphql(default = 0)] skip: i32, + #[graphql(default = true)] strict: bool, ) -> FieldResult> { let mut query = relation_node::find_many(&executor.context().0); @@ -266,22 +257,46 @@ impl RootQuery { query = r#where.apply_filter(query); } - if let Some(first) = first { - if first > 1000 { - return Err("Cannot query more than 1000 relations at once".into()); - } - query = query.limit(first as usize); - } - - if let Some(skip) = skip { - query = query.skip(skip as usize); + if first > 1000 { + return Err("Cannot query more than 1000 relations at once".into()); } Ok(query + .limit(first as usize) + .skip(skip as usize) .send() .await? - .map_ok(|relation| Relation::new(relation, space_id.clone(), None)) + .map_ok(|relation| Relation::new(relation, space_id.clone(), None, strict)) .try_collect::>() .await?) } + + /// Returns a single triple identified by its entity ID, attribute ID, space ID and + /// optional version ID + async fn triple<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + entity_id: String, + attribute_id: String, + space_id: String, + version_id: Option, + #[graphql(default = true)] strict: bool, + ) -> FieldResult> { + let version_index = if let Some(version_id) = version_id { + mapping::get_version_index(&executor.context().0, version_id).await? + } else { + None + }; + + Ok(property::get_triple( + &executor.context().0, + &attribute_id, + &entity_id, + &space_id, + version_index.clone(), + strict, + ) + .await? + .map(|triple| Triple::new(triple, space_id, version_index))) + } } diff --git a/api/src/schema/relation.rs b/api/src/schema/relation.rs index 3fa5a96..0cd35bb 100644 --- a/api/src/schema/relation.rs +++ b/api/src/schema/relation.rs @@ -14,14 +14,21 @@ pub struct Relation { node: RelationNode, space_id: String, space_version: Option, + strict: bool, } impl Relation { - pub fn new(node: RelationNode, space_id: String, space_version: Option) -> Self { + pub fn new( + node: RelationNode, + space_id: String, + space_version: Option, + strict: bool, + ) -> Self { Self { node, space_id, space_version, + strict, } } @@ -30,6 +37,7 @@ impl Relation { id: impl Into, space_id: impl Into, space_version: Option, + strict: bool, ) -> FieldResult> { let id = id.into(); let space_id = space_id.into(); @@ -38,7 +46,7 @@ impl Relation { relation_node::find_one(neo4j, id, space_id.clone(), space_version.clone()) .send() .await? - .map(|node| Relation::new(node, space_id, space_version)), + .map(|node| Relation::new(node, space_id, space_version, strict)), ) } } @@ -62,6 +70,7 @@ impl Relation { &self.node.id, self.space_id.clone(), self.space_version.clone(), + self.strict, ) .await? .expect("Relation entity not found")) @@ -77,6 +86,7 @@ impl Relation { &self.node.relation_type, self.space_id.clone(), self.space_version.clone(), + self.strict, ) .await? .expect("Relation type entity not found")) @@ -92,6 +102,7 @@ impl Relation { &self.node.from, self.space_id.clone(), self.space_version.clone(), + self.strict, ) .await? .expect("Relation from entity not found")) @@ -107,6 +118,7 @@ impl Relation { &self.node.to, self.space_id.clone(), self.space_version.clone(), + self.strict, ) .await? .expect("Relation to entity not found")) diff --git a/api/src/schema/schema_type.rs b/api/src/schema/schema_type.rs new file mode 100644 index 0000000..578ae59 --- /dev/null +++ b/api/src/schema/schema_type.rs @@ -0,0 +1,176 @@ +use futures::TryStreamExt; +use grc20_core::{ + mapping::{entity_node, prop_filter, query_utils::QueryStream, EntityNode}, + system_ids, +}; +use grc20_sdk::models::property; +use juniper::{graphql_object, Executor, FieldResult, ScalarValue}; + +use crate::context::KnowledgeGraph; + +use super::{ + AttributeFilter, Entity, EntityRelationFilter, EntityVersion, Property, Relation, Triple, +}; + +#[derive(Debug)] +pub struct SchemaType { + entity: Entity, +} + +impl SchemaType { + pub fn new( + node: EntityNode, + space_id: String, + space_version: Option, + strict: bool, + ) -> Self { + Self { + entity: Entity::new(node, space_id, space_version, strict), + } + } +} + +#[graphql_object] +#[graphql(context = KnowledgeGraph, scalar = S: ScalarValue)] +/// SchemaType object +impl SchemaType { + /// Entity ID + fn id(&self) -> &str { + self.entity.id() + } + + /// The space ID of the entity (note: the same entity can exist in multiple spaces) + fn space_id(&self) -> &str { + self.entity.space_id() + } + + fn created_at(&self) -> String { + self.entity.created_at() + } + + fn created_at_block(&self) -> &str { + self.entity.created_at_block() + } + + fn updated_at(&self) -> String { + self.entity.updated_at() + } + + fn updated_at_block(&self) -> &str { + self.entity.updated_at_block() + } + + /// Entity name (if available) + async fn name<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + #[graphql(default = true)] _strict: bool, + ) -> FieldResult> { + self.entity.name(executor).await + } + + /// Entity description (if available) + async fn description<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + ) -> FieldResult> { + self.entity.description(executor).await + } + + /// Entity cover (if available) + async fn cover<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + ) -> FieldResult> { + self.entity.cover(executor).await + } + + /// Entity blocks (if available) + async fn blocks<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + ) -> FieldResult> { + self.entity.blocks(executor).await + } + + /// Types of the entity (which are entities themselves) + async fn types<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + ) -> FieldResult> { + self.entity.types(executor).await + } + + // TODO: Add entity attributes filtering + /// Attributes of the entity + async fn attributes( + &self, + executor: &'_ Executor<'_, '_, KnowledgeGraph, S>, + filter: Option, + ) -> FieldResult> { + self.entity.attributes(executor, filter).await + } + + /// Relations outgoing from the entity + async fn relations<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + r#where: Option, + ) -> FieldResult> { + self.entity.relations(executor, r#where).await + } + + // TODO: Add version filtering (e.g.: time range, edit author) + /// Versions of the entity, ordered chronologically + async fn versions<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + ) -> FieldResult> { + self.entity.versions(executor).await + } + + /// Properties of the Type + async fn properties<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + #[graphql(default = 100)] first: i32, + #[graphql(default = 0)] skip: i32, + ) -> FieldResult> { + tracing::info!("Fetching properties for type {}", self.entity.id()); + + let properties = property::get_outbound_relations( + &executor.context().0, + system_ids::PROPERTIES, + self.entity.id(), + self.space_id(), + self.entity.space_version.clone(), + Some(first as usize), + Some(skip as usize), + self.entity.strict, + ) + .await? + .try_collect::>() + .await?; + + if properties.is_empty() { + Ok(Vec::new()) + } else { + Ok(entity_node::find_many(&executor.context().0) + .id(prop_filter::value_in( + properties.into_iter().map(|rel| rel.to).collect(), + )) + .send() + .await? + .map_ok(|node| { + Property::new( + node, + self.space_id().to_string(), + self.entity.space_version.clone(), + self.entity.strict, + ) + }) + .try_collect::>() + .await?) + } + } +} diff --git a/api/src/schema/space.rs b/api/src/schema/space.rs index ad17881..c729686 100644 --- a/api/src/schema/space.rs +++ b/api/src/schema/space.rs @@ -1,36 +1,42 @@ -use futures::TryStreamExt; +use futures::{StreamExt, TryStreamExt}; use juniper::{graphql_object, Executor, FieldResult, GraphQLEnum, ScalarValue}; use grc20_core::{ + error::DatabaseError, indexer_ids, mapping::{ + self, entity_node, prop_filter, query_utils::{Query, QueryStream}, - Entity, }, neo4rs, }; -use grc20_sdk::models::{space, Space as SdkSpace}; +use grc20_sdk::models::{self, space, Space as SdkSpace}; use crate::context::KnowledgeGraph; -use super::Account; +use super::{entity_order_by::OrderDirection, Account, Entity, EntityFilter, SchemaType}; pub struct Space { - entity: Entity, + entity: mapping::Entity, + version: Option, } impl Space { - pub fn new(entity: Entity) -> Self { - Self { entity } + pub fn new(entity: mapping::Entity, version: Option) -> Self { + Self { entity, version } } - pub async fn load(neo4j: &neo4rs::Graph, id: impl Into) -> FieldResult> { + pub async fn load( + neo4j: &neo4rs::Graph, + id: impl Into, + version: Option, + ) -> Result, DatabaseError> { let id = id.into(); Ok(space::find_one(neo4j, &id, indexer_ids::INDEXER_SPACE_ID) .send() .await? - .map(Space::new)) + .map(|entity| Space::new(entity, version))) } } @@ -78,7 +84,7 @@ impl From<&SpaceGovernanceType> for grc20_sdk::models::space::SpaceGovernanceTyp impl Space { /// Space ID fn id(&self) -> &str { - &self.entity.id + self.entity.id() } /// Network of the space @@ -139,23 +145,18 @@ impl Space { async fn members<'a, S: ScalarValue>( &'a self, executor: &'a Executor<'_, '_, KnowledgeGraph, S>, - first: Option, - skip: Option, + #[graphql(default = 100)] first: i32, + #[graphql(default = 0)] skip: i32, ) -> FieldResult> { - let mut query = SdkSpace::members(&executor.context().0, &self.entity.id); + let query = models::space::members(&executor.context().0, self.entity.id()); - if let Some(first) = first { - if first > 1000 { - return Err("Cannot query more than 1000 members at once".into()); - } - query = query.limit(first as usize); - } - - if let Some(skip) = skip { - query = query.skip(skip as usize); + if first > 1000 { + return Err("Cannot query more than 1000 relations at once".into()); } Ok(query + .limit(first as usize) + .skip(skip as usize) .send() .await? .map_ok(Account::new) @@ -167,23 +168,18 @@ impl Space { async fn editors<'a, S: ScalarValue>( &'a self, executor: &'a Executor<'_, '_, KnowledgeGraph, S>, - first: Option, - skip: Option, + #[graphql(default = 100)] first: i32, + #[graphql(default = 0)] skip: i32, ) -> FieldResult> { - let mut query = SdkSpace::editors(&executor.context().0, &self.entity.id); + let query = models::space::editors(&executor.context().0, self.entity.id()); - if let Some(first) = first { - if first > 1000 { - return Err("Cannot query more than 1000 editors at once".into()); - } - query = query.limit(first as usize); - } - - if let Some(skip) = skip { - query = query.skip(skip as usize); + if first > 1000 { + return Err("Cannot query more than 1000 relations at once".into()); } Ok(query + .limit(first as usize) + .skip(skip as usize) .send() .await? .map_ok(Account::new) @@ -195,26 +191,22 @@ impl Space { async fn parent_spaces<'a, S: ScalarValue>( &'a self, executor: &'a Executor<'_, '_, KnowledgeGraph, S>, - first: Option, - skip: Option, + #[graphql(default = 100)] first: i32, + #[graphql(default = 0)] skip: i32, ) -> FieldResult> { - let mut query = SdkSpace::parent_spaces(&executor.context().0, &self.entity.id); - - if let Some(first) = first { - if first > 1000 { - return Err("Cannot query more than 1000 parent spaces at once".into()); - } - query = query.limit(first as usize); - } + let query = models::space::parent_spaces(&executor.context().0, self.entity.id()); - if let Some(skip) = skip { - query = query.skip(skip as usize); + if first > 1000 { + return Err("Cannot query more than 1000 relations at once".into()); } Ok(query + .limit(first as usize) + .skip(skip as usize) .send() .await? - .map_ok(Space::new) + .and_then(|(space_id, _)| Space::load(&executor.context().0, space_id, None)) + .filter_map(|space| async move { space.transpose() }) .try_collect::>() .await?) } @@ -223,26 +215,88 @@ impl Space { async fn subspaces<'a, S: ScalarValue>( &'a self, executor: &'a Executor<'_, '_, KnowledgeGraph, S>, - first: Option, - skip: Option, + #[graphql(default = 100)] first: i32, + #[graphql(default = 0)] skip: i32, ) -> FieldResult> { - let mut query = SdkSpace::subspaces(&executor.context().0, &self.entity.id); + let query = models::space::subspaces(&executor.context().0, self.entity.id()); + + if first > 1000 { + return Err("Cannot query more than 1000 relations at once".into()); + } + + Ok(query + .limit(first as usize) + .skip(skip as usize) + .send() + .await? + .and_then(|(space_id, _)| Space::load(&executor.context().0, space_id, None)) + .filter_map(|space| async move { space.transpose() }) + .try_collect::>() + .await?) + } + + async fn types<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + #[graphql(default = 100)] first: i32, + #[graphql(default = 0)] skip: i32, + #[graphql(default = true)] strict: bool, + ) -> FieldResult> { + let types = models::space::types(&executor.context().0, self.entity.id()) + .strict(strict) + .limit(first as usize) + .skip(skip as usize) + .send() + .await?; + + Ok(types + .map_ok(|node| SchemaType::new(node, self.entity.id().to_string(), None, strict)) + .try_collect() + .await?) + } - if let Some(first) = first { - if first > 1000 { - return Err("Cannot query more than 1000 subspaces at once".into()); + #[allow(clippy::too_many_arguments)] + async fn entities<'a, S: ScalarValue>( + &'a self, + executor: &'a Executor<'_, '_, KnowledgeGraph, S>, + order_by: Option, + order_direction: Option, + r#where: Option, + #[graphql(default = 100)] first: i32, + #[graphql(default = 0)] skip: i32, + #[graphql(default = true)] strict: bool, + ) -> FieldResult> { + let mut query = entity_node::find_many(&executor.context().0); + + let entity_filter = if let Some(r#where) = r#where { + mapping::EntityFilter::from(r#where).space_id(prop_filter::value(self.id())) + } else { + mapping::EntityFilter::default().space_id(prop_filter::value(self.id())) + }; + query = query.with_filter(entity_filter); + + match (order_by, order_direction) { + (Some(order_by), Some(OrderDirection::Asc) | None) => { + query.order_by_mut(mapping::order_by::asc(order_by)); + } + (Some(order_by), Some(OrderDirection::Desc)) => { + query.order_by_mut(mapping::order_by::desc(order_by)); } - query = query.limit(first as usize); + _ => {} } - if let Some(skip) = skip { - query = query.skip(skip as usize); + if first > 1000 { + return Err("Cannot query more than 1000 relations at once".into()); } Ok(query + .limit(first as usize) + .skip(skip as usize) .send() .await? - .map_ok(Space::new) + .map_ok(|entity| { + Entity::new(entity, self.id().to_owned(), self.version.clone(), strict) + }) .try_collect::>() .await?) } diff --git a/api/src/schema/triple_filter.rs b/api/src/schema/triple_filter.rs new file mode 100644 index 0000000..11599fb --- /dev/null +++ b/api/src/schema/triple_filter.rs @@ -0,0 +1,29 @@ +use juniper::GraphQLInputObject; + +#[derive(Debug, GraphQLInputObject)] +pub struct TripleFilter { + pub entity_id: Option, + pub entity_id_not: Option, + pub entity_id_in: Option>, + pub entity_id_not_in: Option>, + + pub attribute_id: Option, + pub attribute_id_not: Option, + pub attribute_id_in: Option>, + pub attribute_id_not_in: Option>, + + pub space_id: Option, + pub space_id_not: Option, + pub space_id_in: Option>, + pub space_id_not_in: Option>, + + pub value: Option, + pub value_not: Option, + pub value_in: Option>, + pub value_not_in: Option>, + + pub value_type: Option, + pub value_type_not: Option, + pub value_type_in: Option>, + pub value_type_not_in: Option>, +} diff --git a/grc20-core/src/aggregation/aggregation_direction.rs b/grc20-core/src/aggregation/aggregation_direction.rs new file mode 100644 index 0000000..4b1df8a --- /dev/null +++ b/grc20-core/src/aggregation/aggregation_direction.rs @@ -0,0 +1,5 @@ +pub enum AggregationDirection { + Up, + Down, + Bidirectional, +} diff --git a/grc20-core/src/aggregation/error.rs b/grc20-core/src/aggregation/error.rs new file mode 100644 index 0000000..2858979 --- /dev/null +++ b/grc20-core/src/aggregation/error.rs @@ -0,0 +1,7 @@ +use crate::error::DatabaseError; + +#[derive(Debug, thiserror::Error)] +pub enum AggregationError { + #[error("Database error: {0}")] + DatabaseError(#[from] DatabaseError), +} diff --git a/grc20-core/src/aggregation/mod.rs b/grc20-core/src/aggregation/mod.rs new file mode 100644 index 0000000..c2f5c85 --- /dev/null +++ b/grc20-core/src/aggregation/mod.rs @@ -0,0 +1,5 @@ +pub mod aggregation_direction; +pub mod error; +// pub mod pluralism; + +pub use error::AggregationError; diff --git a/grc20-core/src/aggregation/pluralism.rs b/grc20-core/src/aggregation/pluralism.rs new file mode 100644 index 0000000..1d0aaa4 --- /dev/null +++ b/grc20-core/src/aggregation/pluralism.rs @@ -0,0 +1,39 @@ +use crate::mapping::{triple, Query, Triple}; + +use super::{error::AggregationError, space_hierarchy}; + +pub async fn get_triple( + neo4j: &neo4rs::Graph, + attribute_id: String, + entity_id: String, + space_id: String, + space_version: Option, + strict: bool, +) -> Result, AggregationError> { + // Get all spaces to query (just the given space if strict, or all parent spaces if not) + let mut spaces_to_query = vec![(space_id.clone(), 0)]; + if !strict { + let parent_spaces = space_hierarchy::all_parent_spaces(neo4j, &space_id).await?; + spaces_to_query.extend(parent_spaces); + } + + spaces_to_query.sort_by_key(|(_, depth)| *depth); + + for (space_id, _) in spaces_to_query { + let maybe_triple = triple::find_one( + neo4j, + &attribute_id, + &entity_id, + space_id, + space_version.clone(), + ) + .send() + .await?; + + if maybe_triple.is_some() { + return Ok(maybe_triple); + } + } + + Ok(None) +} diff --git a/grc20-core/src/aggregations.rs b/grc20-core/src/aggregations.rs deleted file mode 100644 index 3407d3b..0000000 --- a/grc20-core/src/aggregations.rs +++ /dev/null @@ -1,28 +0,0 @@ -use crate::{error::DatabaseError, models::space}; - -#[derive(Debug, thiserror::Error)] -pub enum AggregationError { - #[error("Database error: {0}")] - DatabaseError(#[from] DatabaseError) -} - -/// Represents a type aggregation, i.e. a list of spaces that should be included in -/// the aggregation for that type when querying the graph from the perspective of the -/// initial space. -#[derive(Debug, Clone)] -pub struct TypeAggregation { - pub id: String, - pub initial_space: String, - pub spaces: Vec, -} - -/// Given a space id, returns a list of all spaces that should be included in the aggregation -pub async fn hierarchy_aggregation( - neo4j: &neo4rs::Graph, - space_id: &str, -) -> Result, AggregationError> { - // First, all types visible in the space - let types = space::types(space_id, neo4j).await?; - - todo!() -} \ No newline at end of file diff --git a/grc20-core/src/ids/system_ids.rs b/grc20-core/src/ids/system_ids.rs index f8dede6..ddbce65 100644 --- a/grc20-core/src/ids/system_ids.rs +++ b/grc20-core/src/ids/system_ids.rs @@ -267,3 +267,6 @@ pub const VALUES_ATTRIBUTE: &str = "3c5k2MpF9PRYAZ925qTKNi"; pub const VISION_ATTRIBUTE: &str = "AAMDNTaJtS2i4aWp59zEAk"; pub const ROOT_SPACE_ID: &str = "25omwWh6HYgeRQKCaSpVpa"; + +// Added by me +pub const AGGREGATION_DIRECTION: &str = "6zd9BPJNdUpcKenuK7LjCh"; diff --git a/grc20-core/src/lib.rs b/grc20-core/src/lib.rs index 3e804bc..535f2d3 100644 --- a/grc20-core/src/lib.rs +++ b/grc20-core/src/lib.rs @@ -1,3 +1,4 @@ +pub mod aggregation; pub mod block; pub mod error; pub mod graph_uri; diff --git a/grc20-core/src/mapping/attribute_node.rs b/grc20-core/src/mapping/attribute_node.rs index 5a05422..a5f1da0 100644 --- a/grc20-core/src/mapping/attribute_node.rs +++ b/grc20-core/src/mapping/attribute_node.rs @@ -4,7 +4,7 @@ use chrono::{DateTime, Utc}; use neo4rs::BoltType; use serde::Deserialize; -use super::{Triple, Value}; +use super::{Triple, TriplesConversionError, Value}; /// Neo4j model of an entity Attribute #[derive(Clone, Debug, Deserialize, PartialEq)] @@ -92,7 +92,7 @@ where } impl TryFrom for String { - type Error = String; + type Error = TriplesConversionError; fn try_from(attr: AttributeNode) -> Result { attr.value.try_into() @@ -100,7 +100,7 @@ impl TryFrom for String { } impl TryFrom for i64 { - type Error = String; + type Error = TriplesConversionError; fn try_from(attr: AttributeNode) -> Result { attr.value.try_into() @@ -108,7 +108,7 @@ impl TryFrom for i64 { } impl TryFrom for u64 { - type Error = String; + type Error = TriplesConversionError; fn try_from(attr: AttributeNode) -> Result { attr.value.try_into() @@ -116,7 +116,7 @@ impl TryFrom for u64 { } impl TryFrom for f64 { - type Error = String; + type Error = TriplesConversionError; fn try_from(attr: AttributeNode) -> Result { attr.value.try_into() @@ -124,7 +124,7 @@ impl TryFrom for f64 { } impl TryFrom for bool { - type Error = String; + type Error = TriplesConversionError; fn try_from(attr: AttributeNode) -> Result { attr.value.try_into() @@ -132,7 +132,7 @@ impl TryFrom for bool { } impl TryFrom for DateTime { - type Error = String; + type Error = TriplesConversionError; fn try_from(attr: AttributeNode) -> Result { attr.value.try_into() diff --git a/grc20-core/src/mapping/attributes.rs b/grc20-core/src/mapping/attributes.rs index 0f73daf..3e71790 100644 --- a/grc20-core/src/mapping/attributes.rs +++ b/grc20-core/src/mapping/attributes.rs @@ -30,33 +30,28 @@ impl Attributes { pub fn pop(&mut self, attribute_id: &str) -> Result where - T: TryFrom, + T: TryFrom, { self.0 .remove(attribute_id) .ok_or_else(|| TriplesConversionError::MissingAttribute(attribute_id.to_string()))? .value .try_into() - .map_err(TriplesConversionError::InvalidValue) } pub fn pop_opt(&mut self, attribute_id: &str) -> Result, TriplesConversionError> where - T: TryFrom, + T: TryFrom, { self.0 .remove(attribute_id) - .map(|attr| { - attr.value - .try_into() - .map_err(TriplesConversionError::InvalidValue) - }) + .map(|attr| attr.value.try_into()) .transpose() } pub fn get(&self, attribute_id: &str) -> Result where - T: TryFrom, + T: TryFrom, { self.0 .get(attribute_id) @@ -64,21 +59,15 @@ impl Attributes { .value .clone() .try_into() - .map_err(TriplesConversionError::InvalidValue) } pub fn get_opt(&self, attribute_id: &str) -> Result, TriplesConversionError> where - T: TryFrom, + T: TryFrom, { self.0 .get(attribute_id) - .map(|attr| { - attr.value - .clone() - .try_into() - .map_err(TriplesConversionError::InvalidValue) - }) + .map(|attr| attr.value.clone().try_into()) .transpose() } diff --git a/grc20-core/src/mapping/entity.rs b/grc20-core/src/mapping/entity.rs index 8b02d55..4beac73 100644 --- a/grc20-core/src/mapping/entity.rs +++ b/grc20-core/src/mapping/entity.rs @@ -4,19 +4,19 @@ use crate::{block::BlockMetadata, error::DatabaseError, ids, mapping::AttributeN use super::{ attributes::{self, FromAttributes, IntoAttributes}, - entity_node::EntityFilter, + entity_node::SystemProperties, order_by::FieldOrderBy, prop_filter, query_utils::{ query_part, AttributeFilter, PropFilter, Query, QueryPart, QueryStream, VersionFilter, }, - relation_node, RelationNode, + relation, relation_node, EntityFilter, EntityNode, RelationFilter, RelationNode, }; /// High level model encapsulating an entity and its attributes. #[derive(Clone, Debug, PartialEq)] pub struct Entity { - pub id: String, + pub(crate) node: EntityNode, pub attributes: T, pub types: Vec, } @@ -24,12 +24,23 @@ pub struct Entity { impl Entity { pub fn new(id: impl Into, attributes: T) -> Self { Entity { - id: id.into(), + node: EntityNode { + id: id.into(), + system_properties: SystemProperties::default(), + }, attributes, types: vec![], } } + pub fn id(&self) -> &str { + &self.node.id + } + + pub fn system_properties(&self) -> &SystemProperties { + &self.node.system_properties + } + pub fn with_type(mut self, r#type: impl Into) -> Self { self.types.push(r#type.into()); self @@ -40,6 +51,36 @@ impl Entity { self } + pub fn get_outbound_relations( + &self, + neo4j: &neo4rs::Graph, + space_id: impl Into, + space_version: Option, + ) -> relation::FindManyQuery { + relation::FindManyQuery::new(neo4j) + .filter( + RelationFilter::default() + .from_(EntityFilter::default().id(prop_filter::value(&self.node.id))), + ) + .space_id(prop_filter::value(space_id.into())) + .version(space_version) + } + + pub fn get_inbound_relations( + &self, + neo4j: &neo4rs::Graph, + space_id: impl Into, + space_version: Option, + ) -> relation::FindManyQuery { + relation::FindManyQuery::new(neo4j) + .filter( + RelationFilter::default() + .to_(EntityFilter::default().id(prop_filter::value(&self.node.id))), + ) + .space_id(prop_filter::value(space_id.into())) + .version(space_version) + } + pub fn insert( self, neo4j: &neo4rs::Graph, @@ -111,7 +152,7 @@ impl Query<()> for InsertOneQuery { attributes::insert_one( &self.neo4j, &self.block, - &self.entity.id, + &self.entity.node.id, &self.space_id, &self.space_version, self.entity.attributes, @@ -129,11 +170,11 @@ impl Query<()> for InsertOneQuery { ids::create_id_from_unique_string(format!( "{}:{}:{}:{}", self.space_id, - self.entity.id, + self.entity.node.id, system_ids::TYPES_ATTRIBUTE, t, )), - &self.entity.id, + &self.entity.node.id, t, system_ids::TYPES_ATTRIBUTE, "0", @@ -186,9 +227,9 @@ impl Query>> for FindOneQuery { .await?; let types = relation_node::find_many(&self.neo4j) - .space_id(PropFilter::default().value(self.space_id.clone())) - .from_id(PropFilter::default().value(self.entity_id.clone())) - .relation_type(PropFilter::default().value(system_ids::TYPES_ATTRIBUTE)) + .space_id(prop_filter::value(self.space_id.clone())) + .from_id(prop_filter::value(self.entity_id.clone())) + .relation_type(prop_filter::value(system_ids::TYPES_ATTRIBUTE)) .send() .await? .try_collect::>() @@ -277,7 +318,11 @@ impl FindManyQuery { .match_clause("(e:Entity)") .limit(self.limit); - query_part.merge_mut(self.filter.into_query_part("e")); + query_part.merge_mut( + self.filter + .space_id(self.space_id.clone()) + .into_query_part("e"), + ); if let Some(order_by) = self.order_by { query_part.merge_mut(order_by.into_query_part("e")); @@ -287,14 +332,14 @@ impl FindManyQuery { query_part = query_part.skip(skip); } - query_part.with_clause("e", { + query_part.with_clause("DISTINCT e", { QueryPart::default() .match_clause("(e) -[r:ATTRIBUTE]-> (n:Attribute)") .merge(self.space_id.into_query_part("r", "space_id")) .merge(self.version.into_query_part("r")) .with_clause( "e, collect(n{.*}) AS attrs", - query_part::return_query("e{.id, attributes: attrs}"), + query_part::return_query("e{.*, attributes: attrs}"), ) }) } @@ -316,7 +361,8 @@ impl QueryStream> for FindManyQuery { #[derive(Debug, serde::Deserialize)] struct RowResult { - id: String, + #[serde(flatten)] + node: EntityNode, attributes: Vec, } @@ -325,10 +371,14 @@ impl QueryStream> for FindManyQuery { .await? .into_stream_as::() .map_err(DatabaseError::from) - .map(|attrs| { - attrs.and_then(|attrs| { - T::from_attributes(attrs.attributes.into()) - .map(|data| Entity::new(attrs.id, data)) + .map(|row_result| { + row_result.and_then(|row| { + T::from_attributes(row.attributes.into()) + .map(|data| Entity { + node: row.node, + attributes: data, + types: vec![], + }) .map_err(DatabaseError::from) }) }); @@ -490,7 +540,7 @@ mod tests { .expect("Failed to get next entity") .expect("Entity not found"); - assert_eq!(found_entity.id, entity.id); + assert_eq!(found_entity.node.id, entity.node.id); assert_eq!(found_entity.attributes, entity.attributes); } } diff --git a/grc20-core/src/mapping/entity_node.rs b/grc20-core/src/mapping/entity_node.rs index 5ee8243..0ddc0e9 100644 --- a/grc20-core/src/mapping/entity_node.rs +++ b/grc20-core/src/mapping/entity_node.rs @@ -3,19 +3,19 @@ use futures::{stream::TryStreamExt, Stream}; use serde::{Deserialize, Serialize}; -use crate::{block::BlockMetadata, error::DatabaseError, indexer_ids, system_ids}; +use crate::{block::BlockMetadata, error::DatabaseError, indexer_ids}; use super::{ attributes, entity_version, query_utils::{ - edge_filter::EdgeFilter, order_by::FieldOrderBy, prop_filter, AttributeFilter, PropFilter, - Query, QueryPart, QueryStream, + order_by::FieldOrderBy, prop_filter, AttributeFilter, PropFilter, Query, QueryPart, + QueryStream, }, - relation_node, triple, AttributeNode, Triple, + relation_node, triple, AttributeNode, EntityFilter, Triple, }; /// Neo4j model of an Entity -#[derive(Debug, Deserialize, PartialEq)] +#[derive(Clone, Debug, Deserialize, PartialEq)] pub struct EntityNode { pub id: String, @@ -137,7 +137,7 @@ pub fn find_many(neo4j: &neo4rs::Graph) -> FindManyQuery { FindManyQuery::new(neo4j) } -#[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, Hash)] pub struct SystemProperties { #[serde(rename = "82nP7aFmHJLbaPFszj2nbx")] // CREATED_AT_TIMESTAMP pub created_at: DateTime, @@ -271,7 +271,7 @@ impl FindManyQuery { fn into_query_part(self) -> QueryPart { let mut query_part = QueryPart::default() .match_clause("(e:Entity)") - .return_clause("e") + .return_clause("DISTINCT e") .limit(self.limit); query_part.merge_mut(self.filter.into_query_part("e")); @@ -296,7 +296,11 @@ impl QueryStream for FindManyQuery { let query = if cfg!(debug_assertions) || cfg!(test) { let query_part = self.into_query_part(); - tracing::info!("entity_node::FindManyQuery:\n{}", query_part.query()); + tracing::info!( + "entity_node::FindManyQuery:\n{}\nparams:{:?}", + query_part.query(), + query_part.params + ); query_part.build() } else { self.into_query_part().build() @@ -316,165 +320,6 @@ impl QueryStream for FindManyQuery { } } -#[derive(Clone, Debug, Default)] -pub struct EntityFilter { - pub(crate) id: Option>, - pub(crate) attributes: Vec, - pub(crate) relations: Option, - pub(crate) space_id: Option>, -} - -impl EntityFilter { - pub fn id(mut self, id: PropFilter) -> Self { - self.id = Some(id); - self - } - - pub fn attribute(mut self, attribute: AttributeFilter) -> Self { - self.attributes.push(attribute); - self - } - - pub fn attribute_mut(&mut self, attribute: AttributeFilter) { - self.attributes.push(attribute); - } - - pub fn attributes(mut self, attributes: impl IntoIterator) -> Self { - self.attributes.extend(attributes); - self - } - - pub fn attributes_mut(&mut self, attributes: impl IntoIterator) { - self.attributes.extend(attributes); - } - - pub fn relations(mut self, relations: impl Into) -> Self { - self.relations = Some(relations.into()); - self - } - - /// Applies a global space_id to all sub-filters (i.e.: attribute and relation filters). - /// If a space_id is already set in a sub-filter, it will be overwritten. - pub fn space_id(mut self, space_id: impl Into) -> Self { - let space_id = space_id.into(); - self.space_id = Some(prop_filter::value(space_id.clone())); - - for attribute in &mut self.attributes { - attribute.space_id_mut(prop_filter::value(&space_id)); - } - - if let Some(relations) = self.relations { - self.relations = Some(relations.with_space_id(space_id)); - } - - self - } - - pub(crate) fn into_query_part(self, node_var: impl Into) -> QueryPart { - let node_var = node_var.into(); - let mut query_part = QueryPart::default(); - - if let Some(id) = self.id { - query_part.merge_mut(id.into_query_part(&node_var, "id")); - } - - if self.attributes.is_empty() { - if let Some(space_id) = self.space_id { - query_part = query_part - .match_clause(format!("({node_var}) -[attribute:ATTRIBUTE]- (:Attribute)",)) - .merge(space_id.into_query_part("attribute", "space_id")); - } - } else { - for attribute in self.attributes { - query_part.merge_mut(attribute.into_query_part(&node_var)); - } - } - - if let Some(relations) = self.relations { - query_part.merge_mut(relations.into_query_part(node_var)); - } - - query_part - } -} - -/// Filter used to: -/// - Filter the relations outgoing from the entity -/// - Filter an entity by its outgoing relations -#[derive(Clone, Debug, Default)] -pub struct EntityRelationFilter { - relation_type: Option, - to_id: Option, - space_version: Option, -} - -impl EntityRelationFilter { - pub fn relation_type(mut self, relation_type: EdgeFilter) -> Self { - self.relation_type = Some(relation_type); - self - } - - pub fn to_id(mut self, to_id: EdgeFilter) -> Self { - self.to_id = Some(to_id); - self - } - - pub fn version(mut self, version: impl Into) -> Self { - self.space_version = Some(version.into()); - self - } - - pub fn is_empty(&self) -> bool { - self.relation_type.is_none() && self.to_id.is_none() - } - - /// Applies a global space_id to all sub-filters (i.e.: relation_type and to_id filters). - /// If a space_id is already set in a sub-filter, it will be overwritten. - pub fn with_space_id(mut self, space_id: impl Into) -> Self { - let space_id = space_id.into(); - self.relation_type = self - .relation_type - .map(|filter| filter.space_id(prop_filter::value(&space_id))); - - self.to_id = self - .to_id - .map(|filter| filter.space_id(prop_filter::value(&space_id))); - - self - } - - pub(crate) fn into_query_part(self, node_var: impl Into) -> QueryPart { - let node_var = node_var.into(); - let rel_node_var = format!("r_{node_var}"); - let mut query_part = QueryPart::default(); - - if !self.is_empty() { - query_part = query_part.match_clause(format!( - "({node_var}) <-[:`{FROM_ENTITY}`]- ({rel_node_var})", - FROM_ENTITY = system_ids::RELATION_FROM_ATTRIBUTE - )); - - if let Some(relation_type) = self.relation_type { - query_part.merge_mut(relation_type.into_query_part( - &rel_node_var, - system_ids::RELATION_TYPE_ATTRIBUTE, - self.space_version.clone(), - )); - } - - if let Some(to_id) = self.to_id { - query_part.merge_mut(to_id.into_query_part( - &rel_node_var, - system_ids::RELATION_TO_ATTRIBUTE, - self.space_version, - )); - } - } - - query_part - } -} - pub struct DeleteOneQuery { neo4j: neo4rs::Graph, block: BlockMetadata, diff --git a/grc20-core/src/mapping/entity_queries.rs b/grc20-core/src/mapping/entity_queries.rs new file mode 100644 index 0000000..5245a2b --- /dev/null +++ b/grc20-core/src/mapping/entity_queries.rs @@ -0,0 +1,172 @@ +use crate::system_ids; + +use super::{ + query_utils::{EdgeFilter, QueryPart}, + AttributeFilter, PropFilter, +}; + +#[derive(Clone, Debug, Default)] +pub struct EntityFilter { + pub(crate) id: Option>, + pub(crate) attributes: Vec, + pub(crate) relations: Option, + pub(crate) space_id: Option>, +} + +impl EntityFilter { + pub fn id(mut self, id: PropFilter) -> Self { + self.id = Some(id); + self + } + + pub fn attribute(mut self, attribute: AttributeFilter) -> Self { + self.attributes.push(attribute); + self + } + + pub fn attribute_mut(&mut self, attribute: AttributeFilter) { + self.attributes.push(attribute); + } + + pub fn attributes(mut self, attributes: impl IntoIterator) -> Self { + self.attributes.extend(attributes); + self + } + + pub fn attributes_mut(&mut self, attributes: impl IntoIterator) { + self.attributes.extend(attributes); + } + + pub fn relations(mut self, relations: impl Into) -> Self { + self.relations = Some(relations.into()); + self + } + + /// Applies a global space_id to all sub-filters (i.e.: attribute and relation filters). + /// If a space_id is already set in a sub-filter, it will be overwritten. + pub fn space_id(mut self, space_id: PropFilter) -> Self { + self.space_id = Some(space_id.clone()); + self + } + + pub(crate) fn into_query_part(self, node_var: impl Into) -> QueryPart { + let node_var = node_var.into(); + let mut query_part = QueryPart::default(); + + if let Some(id) = self.id { + query_part.merge_mut(id.into_query_part(&node_var, "id")); + } + + if self.attributes.is_empty() { + if let Some(space_id) = &self.space_id { + query_part = query_part + .match_clause(format!("({node_var}) -[attribute:ATTRIBUTE]- (:Attribute)",)) + .merge(space_id.clone().into_query_part("attribute", "space_id")); + } + } else { + for mut attribute in self.attributes { + if let Some(space_id) = &self.space_id { + attribute = attribute.space_id(space_id.clone()); + } + query_part.merge_mut(attribute.into_query_part(&node_var)); + } + } + + if let Some(mut relations) = self.relations { + if let Some(space_id) = &self.space_id { + relations = relations.space_id(space_id.clone()); + } + query_part.merge_mut(relations.into_query_part(node_var)); + } + + query_part + } +} + +/// Filter used to: +/// - Filter the relations outgoing from the entity +/// - Filter an entity by its outgoing relations +#[derive(Clone, Debug, Default)] +pub struct EntityRelationFilter { + relation_type: Option, + to_id: Option, + space_id: Option>, + space_version: Option, +} + +impl EntityRelationFilter { + pub fn relation_type(mut self, relation_type: EdgeFilter) -> Self { + self.relation_type = Some(relation_type); + self + } + + pub fn to_id(mut self, to_id: EdgeFilter) -> Self { + self.to_id = Some(to_id); + self + } + + pub fn space_id(mut self, space_id: PropFilter) -> Self { + self.space_id = Some(space_id); + self + } + + pub fn version(mut self, version: impl Into) -> Self { + self.space_version = Some(version.into()); + self + } + + pub fn is_empty(&self) -> bool { + self.relation_type.is_none() && self.to_id.is_none() + } + + /// Applies a global space_id to all sub-filters (i.e.: relation_type and to_id filters). + /// If a space_id is already set in a sub-filter, it will be overwritten. + pub fn with_space_id(mut self, space_id: PropFilter) -> Self { + self.relation_type = self + .relation_type + .map(|filter| filter.space_id(space_id.clone())); + + self.to_id = self.to_id.map(|filter| filter.space_id(space_id)); + + self + } + + pub(crate) fn into_query_part(self, node_var: impl Into) -> QueryPart { + let node_var = node_var.into(); + let rel_node_var = format!("r_{node_var}"); + let mut query_part = QueryPart::default(); + + if !self.is_empty() { + query_part = query_part.match_clause(format!( + "({node_var}) <-[:`{FROM_ENTITY}`]- ({rel_node_var})", + FROM_ENTITY = system_ids::RELATION_FROM_ATTRIBUTE + )); + + if let Some(mut relation_type) = self.relation_type { + if let Some(space_id) = &self.space_id { + relation_type = relation_type.space_id(space_id.clone()); + } + + query_part.merge_mut(relation_type.into_query_part( + &rel_node_var, + system_ids::RELATION_TYPE_ATTRIBUTE, + self.space_version.clone(), + )); + } + + if let Some(mut to_id) = self.to_id { + if let Some(space_id) = self.space_id { + to_id = to_id.space_id(space_id); + } + + query_part.merge_mut(to_id.into_query_part( + &rel_node_var, + system_ids::RELATION_TO_ATTRIBUTE, + self.space_version, + )); + } + } + + query_part + } +} diff --git a/grc20-core/src/mapping/mod.rs b/grc20-core/src/mapping/mod.rs index 359b5df..846e4cf 100644 --- a/grc20-core/src/mapping/mod.rs +++ b/grc20-core/src/mapping/mod.rs @@ -2,25 +2,27 @@ pub mod attribute_node; pub mod attributes; pub mod entity; pub mod entity_node; +pub mod entity_queries; pub mod entity_version; -// pub mod entity_queries; pub mod error; pub mod query_utils; pub mod relation; pub mod relation_node; -// pub mod relation_queries; +pub mod relation_queries; pub mod triple; pub mod value; pub use attribute_node::AttributeNode; pub use attributes::{Attributes, FromAttributes, IntoAttributes}; pub use entity::Entity; -pub use entity_node::{EntityFilter, EntityNode}; +pub use entity_node::EntityNode; +pub use entity_queries::{EntityFilter, EntityRelationFilter}; pub use entity_version::EntityVersion; pub use error::TriplesConversionError; -pub use query_utils::{order_by, prop_filter, AttributeFilter, PropFilter, Query}; +pub use query_utils::{order_by, prop_filter, AttributeFilter, PropFilter, Query, QueryStream}; pub use relation::Relation; pub use relation_node::RelationNode; +pub use relation_queries::RelationFilter; pub use triple::Triple; pub use value::{Options, Value, ValueType}; diff --git a/grc20-core/src/mapping/query_utils/mod.rs b/grc20-core/src/mapping/query_utils/mod.rs index 2747646..8249814 100644 --- a/grc20-core/src/mapping/query_utils/mod.rs +++ b/grc20-core/src/mapping/query_utils/mod.rs @@ -17,11 +17,11 @@ pub use query_part::QueryPart; pub use types_filter::TypesFilter; pub use version_filter::VersionFilter; -pub trait Query { +pub trait Query: Sized { fn send(self) -> impl std::future::Future>; } -pub trait QueryStream { +pub trait QueryStream: Sized { fn send( self, ) -> impl std::future::Future< diff --git a/grc20-core/src/mapping/query_utils/query_part.rs b/grc20-core/src/mapping/query_utils/query_part.rs index 39d8df3..45c2770 100644 --- a/grc20-core/src/mapping/query_utils/query_part.rs +++ b/grc20-core/src/mapping/query_utils/query_part.rs @@ -171,18 +171,18 @@ impl QueryPart { query.push('\n'); } - if let Some((clause, other)) = &self.with_clauses { - query.push_str(&format!("WITH {clause}\n")); - query.push_str(&other.query()); - query.push('\n'); + if let Some(skip) = self.skip { + query.push_str(&format!("SKIP {}\n", skip)); } if let Some(limit) = self.limit { query.push_str(&format!("LIMIT {}\n", limit)); } - if let Some(skip) = self.skip { - query.push_str(&format!("SKIP {}\n", skip)); + if let Some((clause, other)) = &self.with_clauses { + query.push_str(&format!("WITH {clause}\n")); + query.push_str(&other.query()); + query.push('\n'); } if !self.return_clauses.is_empty() { diff --git a/grc20-core/src/mapping/query_utils/types_filter.rs b/grc20-core/src/mapping/query_utils/types_filter.rs index fc3ce86..b7ac533 100644 --- a/grc20-core/src/mapping/query_utils/types_filter.rs +++ b/grc20-core/src/mapping/query_utils/types_filter.rs @@ -1,4 +1,4 @@ -use crate::{mapping::entity_node::EntityRelationFilter, system_ids}; +use crate::{mapping::EntityRelationFilter, system_ids}; use super::{prop_filter, EdgeFilter}; diff --git a/grc20-core/src/mapping/relation.rs b/grc20-core/src/mapping/relation.rs index cf43c29..a887e39 100644 --- a/grc20-core/src/mapping/relation.rs +++ b/grc20-core/src/mapping/relation.rs @@ -1,13 +1,22 @@ -use crate::{block::BlockMetadata, error::DatabaseError}; +use futures::{Stream, StreamExt, TryStreamExt}; + +use crate::{ + block::BlockMetadata, + error::DatabaseError, + mapping::{AttributeNode, EntityNode}, + system_ids, +}; use super::{ attributes::{self, IntoAttributes}, - entity_node, - query_utils::Query, - relation_node, RelationNode, Value, + entity_node, prop_filter, + query_utils::{query_part, Query, QueryPart, VersionFilter}, + relation_node, Entity, FromAttributes, PropFilter, QueryStream, RelationFilter, RelationNode, + Value, }; /// High level model encapsulating a relation and its attributes. +#[derive(Clone, Debug, PartialEq)] pub struct Relation { relation: RelationNode, @@ -53,6 +62,19 @@ impl Relation { } } +pub fn find_one( + neo4j: &neo4rs::Graph, + id: impl Into, + space_id: impl Into, + version: Option, +) -> FindOneQuery { + FindOneQuery::new(neo4j, id.into(), space_id.into(), version) +} + +pub fn find_many(neo4j: &neo4rs::Graph) -> FindManyQuery { + FindManyQuery::new(neo4j) +} + pub fn delete_one( neo4j: &neo4rs::Graph, block: &BlockMetadata, @@ -69,6 +91,324 @@ pub fn delete_one( ) } +pub struct FindOneQuery { + neo4j: neo4rs::Graph, + id: String, + space_id: String, + version: VersionFilter, +} + +impl FindOneQuery { + fn new(neo4j: &neo4rs::Graph, id: String, space_id: String, version: Option) -> Self { + Self { + neo4j: neo4j.clone(), + id, + space_id, + version: VersionFilter::new(version), + } + } + + fn into_query_part(self) -> QueryPart { + QueryPart::default() + .match_clause("(e:Entity:Relation {id: $id})") + .match_clause(format!( + "(e) -[r_from:`{}` {{space_id: $space_id}}]-> (from:Entity)", + system_ids::RELATION_FROM_ATTRIBUTE + )) + .match_clause(format!( + "(e) -[r_to:`{}` {{space_id: $space_id}}]-> (to:Entity)", + system_ids::RELATION_TO_ATTRIBUTE + )) + .match_clause(format!( + "(e) -[r_rt:`{}` {{space_id: $space_id}}]-> (rt:Entity)", + system_ids::RELATION_TYPE_ATTRIBUTE + )) + .match_clause(format!( + r#"(e) -[r_index:ATTRIBUTE {{space_id: $space_id}}]-> (index:Attribute {{id: "{}"}})"#, + system_ids::RELATION_INDEX + )) + .merge(self.version.clone().into_query_part("r_from")) + .merge(self.version.clone().into_query_part("r_to")) + .merge(self.version.clone().into_query_part("r_rt")) + .merge(self.version.clone().into_query_part("r_index")) + .order_by_clause("index.value") + .with_clause("e, from, to, rt, index", { + QueryPart::default() + .match_clause("(e) -[r:ATTRIBUTE]-> (n:Attribute)") + .merge(prop_filter::value::(self.space_id.clone()).into_query_part("r", "space_id")) + .merge(self.version.into_query_part("r")) + .with_clause( + "e, from, to, rt, index, collect(n{.*}) AS attrs", + query_part::return_query("e{.*, from: from.id, to: to.id, relation_type: rt.id, index: index, attributes: attrs}") + ) + }) + .params("id", self.id) + .params("space_id", self.space_id) + } +} + +impl Query>> for FindOneQuery { + async fn send(self) -> Result>, DatabaseError> { + let neo4j = self.neo4j.clone(); + let query = self.into_query_part().build(); + + #[derive(Debug, serde::Deserialize)] + struct RowResult { + #[serde(flatten)] + node: RelationNode, + attributes: Vec, + } + + neo4j + .execute(query) + .await? + .next() + .await? + .map(|row| { + let row = row.to::()?; + Result::<_, DatabaseError>::Ok(Relation { + relation: row.node, + attributes: T::from_attributes(row.attributes.into())?, + types: vec![], + }) + }) + .transpose() + } +} + +pub struct FindManyQuery { + neo4j: neo4rs::Graph, + id: Option>, + filter: RelationFilter, + + space_id: Option>, + version: VersionFilter, + + limit: usize, + skip: Option, +} + +impl FindManyQuery { + pub fn new(neo4j: &neo4rs::Graph) -> Self { + Self { + neo4j: neo4j.clone(), + id: None, + filter: RelationFilter::default(), + space_id: None, + version: VersionFilter::default(), + limit: 100, + skip: None, + } + } + + pub fn id(mut self, id: PropFilter) -> Self { + self.id = Some(id); + self + } + + pub fn filter(mut self, filter: RelationFilter) -> Self { + self.filter = filter; + self + } + + pub fn space_id(mut self, space_id: PropFilter) -> Self { + self.space_id = Some(space_id); + self + } + + pub fn version(mut self, space_version: Option) -> Self { + if let Some(space_version) = space_version { + self.version.version_mut(space_version); + } + self + } + + pub fn limit(mut self, limit: usize) -> Self { + self.limit = limit; + self + } + + pub fn skip(mut self, skip: usize) -> Self { + self.skip = Some(skip); + self + } + + fn into_query_part(self) -> QueryPart { + let mut query_part = QueryPart::default() + .match_clause("(e:Entity:Relation)") + .merge(self.filter.into_query_part("e")) + .order_by_clause("index.value") + .limit(self.limit); + + query_part = query_part + .merge(self.version.clone().into_query_part("r_from")) + .merge(self.version.clone().into_query_part("r_to")) + .merge(self.version.clone().into_query_part("r_rt")) + .merge(self.version.clone().into_query_part("r_index")); + + if let Some(space_id) = &self.space_id { + query_part = query_part + .merge(space_id.clone().into_query_part("r_from", "space_id")) + .merge(space_id.clone().into_query_part("r_to", "space_id")) + .merge(space_id.clone().into_query_part("r_rt", "space_id")) + .merge(space_id.clone().into_query_part("r_index", "space_id")); + } + + if let Some(skip) = self.skip { + query_part = query_part.skip(skip); + } + + query_part + .with_clause("e, from, to, rt, index", { + let mut query_part = QueryPart::default() + .match_clause("(e) -[r:ATTRIBUTE]-> (n:Attribute)") + .merge(self.version.clone().into_query_part("r")); + + if let Some(space_id) = &self.space_id { + query_part.merge_mut(space_id.clone().into_query_part("r", "space_id")); + } + query_part + .with_clause( + "e, from, to, rt, index, collect(n{.*}) AS attrs", + query_part::return_query("e{.*, from: from.id, to: to.id, relation_type: rt.id, index: index, attributes: attrs}") + ) + }) + } + + pub fn select_to(self) -> FindManyToQuery { + let mut query_part = QueryPart::default() + .match_clause("(e:Entity:Relation)") + .merge(self.filter.into_query_part("e")) + .order_by_clause("index.value") + .limit(self.limit); + + query_part = query_part + .merge(self.version.clone().into_query_part("r_from")) + .merge(self.version.clone().into_query_part("r_to")) + .merge(self.version.clone().into_query_part("r_rt")) + .merge(self.version.clone().into_query_part("r_index")); + + if let Some(space_id) = &self.space_id { + query_part = query_part + .merge(space_id.clone().into_query_part("r_from", "space_id")) + .merge(space_id.clone().into_query_part("r_to", "space_id")) + .merge(space_id.clone().into_query_part("r_rt", "space_id")) + .merge(space_id.clone().into_query_part("r_index", "space_id")); + } + + if let Some(skip) = self.skip { + query_part = query_part.skip(skip); + } + + query_part = query_part.with_clause("DISTINCT to", { + let mut query_part = QueryPart::default() + .match_clause("(to) -[r:ATTRIBUTE]-> (n:Attribute)") + .merge(self.version.clone().into_query_part("r")); + + if let Some(space_id) = &self.space_id { + query_part.merge_mut(space_id.clone().into_query_part("r", "space_id")); + } + query_part.with_clause( + "to, collect(n{.*}) AS attrs", + query_part::return_query("to{.*, attributes: attrs}"), + ) + }); + + FindManyToQuery { + neo4j: self.neo4j.clone(), + query_part, + } + } +} + +impl QueryStream> for FindManyQuery { + async fn send( + self, + ) -> Result, DatabaseError>>, DatabaseError> { + let neo4j = self.neo4j.clone(); + + let query = if cfg!(debug_assertions) || cfg!(test) { + let query_part = self.into_query_part(); + tracing::info!("relation_node::FindManyQuery:\n{}", query_part); + query_part.build() + } else { + self.into_query_part().build() + }; + + #[derive(Debug, serde::Deserialize)] + struct RowResult { + #[serde(flatten)] + node: RelationNode, + attributes: Vec, + } + + let stream = neo4j + .execute(query) + .await? + .into_stream_as::() + .map_err(DatabaseError::from) + .map(|row_result| { + row_result.and_then(|row| { + T::from_attributes(row.attributes.into()) + .map(|attributes| Relation { + relation: row.node, + attributes, + types: vec![], + }) + .map_err(DatabaseError::from) + }) + }); + + Ok(stream) + } +} + +pub struct FindManyToQuery { + neo4j: neo4rs::Graph, + query_part: QueryPart, +} + +impl QueryStream> for FindManyToQuery { + async fn send( + self, + ) -> Result, DatabaseError>>, DatabaseError> { + let neo4j = self.neo4j.clone(); + + let query = if cfg!(debug_assertions) || cfg!(test) { + tracing::info!("relation::FindManyToQuery:\n{}", self.query_part); + self.query_part.build() + } else { + self.query_part.build() + }; + + #[derive(Debug, serde::Deserialize)] + struct RowResult { + #[serde(flatten)] + node: EntityNode, + attributes: Vec, + } + + let stream = neo4j + .execute(query) + .await? + .into_stream_as::() + .map_err(DatabaseError::from) + .map(|row_result| { + row_result.and_then(|row| { + T::from_attributes(row.attributes.into()) + .map(|data| Entity { + node: row.node, + attributes: data, + types: vec![], + }) + .map_err(DatabaseError::from) + }) + }); + + Ok(stream) + } +} + pub struct DeleteOneQuery { neo4j: neo4rs::Graph, block: BlockMetadata, @@ -173,3 +513,169 @@ impl Query<()> for InsertOneQuery { .await } } + +#[cfg(test)] +mod tests { + use crate::mapping::{self, triple, EntityFilter, Triple}; + + use super::*; + + use futures::pin_mut; + use testcontainers::{ + core::{IntoContainerPort, WaitFor}, + runners::AsyncRunner, + GenericImage, ImageExt, + }; + + const BOLT_PORT: u16 = 7687; + const HTTP_PORT: u16 = 7474; + + #[derive(Clone, Debug, PartialEq)] + struct Foo { + name: String, + bar: u64, + } + + impl mapping::IntoAttributes for Foo { + fn into_attributes(self) -> Result { + Ok(mapping::Attributes::default() + .attribute(("name", self.name)) + .attribute(("bar", self.bar))) + } + } + + impl mapping::FromAttributes for Foo { + fn from_attributes( + mut attributes: mapping::Attributes, + ) -> Result { + Ok(Self { + name: attributes.pop("name")?, + bar: attributes.pop("bar")?, + }) + } + } + + #[tokio::test] + async fn test_insert_find_one_relation() { + // Setup a local Neo 4J container for testing. NOTE: docker service must be running. + let container = GenericImage::new("neo4j", "2025.01.0-community") + .with_wait_for(WaitFor::Duration { + length: std::time::Duration::from_secs(5), + }) + .with_exposed_port(BOLT_PORT.tcp()) + .with_exposed_port(HTTP_PORT.tcp()) + .with_env_var("NEO4J_AUTH", "none") + .start() + .await + .expect("Failed to start Neo 4J container"); + + let port = container.get_host_port_ipv4(BOLT_PORT).await.unwrap(); + let host = container.get_host().await.unwrap().to_string(); + + let neo4j = neo4rs::Graph::new(format!("neo4j://{host}:{port}"), "user", "password") + .await + .unwrap(); + + let foo = Foo { + name: "Alice".into(), + bar: 42, + }; + + triple::insert_many(&neo4j, &BlockMetadata::default(), "ROOT", "0") + .triples(vec![ + Triple::new("from_id", "name", "FooFrom"), + Triple::new("to_id", "name", "FooTo"), + Triple::new("relation_type", "name", "FooRelation"), + Triple::new(system_ids::TYPES_ATTRIBUTE, "name", "Types"), + ]) + .send() + .await + .expect("Failed to insert triples"); + + let relation = Relation::new("rel_abc", "from_id", "to_id", "relation_type", 0u64, foo); + + relation + .clone() + .insert(&neo4j, &BlockMetadata::default(), "ROOT", "0") + .send() + .await + .expect("Failed to insert relation"); + + let found_relation = FindOneQuery::new(&neo4j, "rel_abc".into(), "ROOT".into(), None) + .send() + .await + .expect("Failed to find relation") + .expect("Relation not found"); + + assert_eq!(found_relation, relation); + } + + #[tokio::test] + async fn test_insert_find_many_relations() { + // Setup a local Neo 4J container for testing. NOTE: docker service must be running. + let container = GenericImage::new("neo4j", "2025.01.0-community") + .with_wait_for(WaitFor::Duration { + length: std::time::Duration::from_secs(5), + }) + .with_exposed_port(BOLT_PORT.tcp()) + .with_exposed_port(HTTP_PORT.tcp()) + .with_env_var("NEO4J_AUTH", "none") + .start() + .await + .expect("Failed to start Neo 4J container"); + + let port = container.get_host_port_ipv4(BOLT_PORT).await.unwrap(); + let host = container.get_host().await.unwrap().to_string(); + + let neo4j = neo4rs::Graph::new(format!("neo4j://{host}:{port}"), "user", "password") + .await + .unwrap(); + + let foo = Foo { + name: "Alice".into(), + bar: 42, + }; + + triple::insert_many(&neo4j, &BlockMetadata::default(), "ROOT", "0") + .triples(vec![ + Triple::new("from_id", "name", "FooFrom"), + Triple::new("to_id", "name", "FooTo"), + Triple::new("relation_type", "name", "FooRelation"), + Triple::new(system_ids::TYPES_ATTRIBUTE, "name", "Types"), + ]) + .send() + .await + .expect("Failed to insert triples"); + + let relation = Relation::new("rel_abc", "from_id", "to_id", "relation_type", 0u64, foo); + + relation + .clone() + .insert(&neo4j, &BlockMetadata::default(), "ROOT", "0") + .send() + .await + .expect("Failed to insert relation"); + + let stream = FindManyQuery::new(&neo4j) + .space_id(prop_filter::value::("ROOT")) + .filter( + RelationFilter::default() + .relation_type(EntityFilter::default().id(prop_filter::value("relation_type"))), + ) + .limit(1) + .send() + .await + .expect("Failed to find relations"); + + pin_mut!(stream); + + let found_relation: Relation = stream + .next() + .await + .expect("Failed to get next relation") + .expect("Relation not found"); + + assert_eq!(found_relation.relation.id, relation.relation.id); + assert_eq!(found_relation.attributes, relation.attributes); + } +} diff --git a/grc20-core/src/mapping/relation_node.rs b/grc20-core/src/mapping/relation_node.rs index 0470b0e..7b62bcb 100644 --- a/grc20-core/src/mapping/relation_node.rs +++ b/grc20-core/src/mapping/relation_node.rs @@ -7,9 +7,10 @@ use serde::Deserialize; use crate::{block::BlockMetadata, error::DatabaseError, indexer_ids, pb, system_ids}; use super::{ - attributes, entity_node, + attributes, + entity_node::{self, SystemProperties}, query_utils::{PropFilter, Query, QueryPart, QueryStream, VersionFilter}, - triple, AttributeNode, Attributes, Triple, Value, + triple, AttributeNode, Attributes, EntityFilter, Triple, Value, }; #[derive(Clone, Debug, Deserialize, PartialEq)] @@ -20,6 +21,10 @@ pub struct RelationNode { pub to: String, pub relation_type: String, pub index: AttributeNode, + + /// System properties + #[serde(flatten)] + pub system_properties: SystemProperties, } impl RelationNode { @@ -36,6 +41,7 @@ impl RelationNode { to: to.into(), relation_type: relation_type.into(), index: AttributeNode::new(system_ids::RELATION_INDEX, index), + system_properties: SystemProperties::default(), } } @@ -132,6 +138,7 @@ impl From for RelationNode { to: relation.to_entity, relation_type: relation.r#type, index: AttributeNode::new(system_ids::RELATION_INDEX, relation.index), + system_properties: SystemProperties::default(), } } } @@ -635,7 +642,7 @@ impl FindOneQuery { .merge(self.space_version.clone().into_query_part("r_to")) .merge(self.space_version.clone().into_query_part("r_rt")) .merge(self.space_version.into_query_part("r_index")) - .return_clause("e{.id, from: from.id, to: to.id, relation_type: rt.id, index: index}") + .return_clause("e{.*, from: from.id, to: to.id, relation_type: rt.id, index: index}") .order_by_clause("index.value") .params("id", self.id) .params("space_id", self.space_id) @@ -664,8 +671,8 @@ pub struct FindManyQuery { to_id: Option>, relation_type: Option>, - from_: Option, - to_: Option, + from_: Option, + to_: Option, space_id: Option>, space_version: VersionFilter, @@ -711,12 +718,12 @@ impl FindManyQuery { self } - pub fn from_(mut self, from_: entity_node::EntityFilter) -> Self { + pub fn from_(mut self, from_: EntityFilter) -> Self { self.from_ = Some(from_); self } - pub fn to_(mut self, to_: entity_node::EntityFilter) -> Self { + pub fn to_(mut self, to_: EntityFilter) -> Self { self.to_ = Some(to_); self } @@ -763,7 +770,7 @@ impl FindManyQuery { system_ids::RELATION_INDEX )) .merge(self.space_version.clone().into_query_part("r_index")) - .return_clause("e{.id, from: from.id, to: to.id, relation_type: rt.id, index: index}") + .return_clause("e{.*, from: from.id, to: to.id, relation_type: rt.id, index: index}") .order_by_clause("index.value") .limit(self.limit); @@ -869,12 +876,20 @@ mod tests { .await .unwrap(); + let block = &BlockMetadata::default(); + neo4j.run(neo4rs::query(&format!( r#" CREATE (alice:Entity {{id: "alice"}}) CREATE (bob:Entity {{id: "bob"}}) CREATE (knows:Entity {{id: "knows"}}) CREATE (r:Entity:Relation {{id: "abc"}}) + SET r += {{ + `{CREATED_AT}`: datetime($block_timestamp), + `{CREATED_AT_BLOCK}`: $block_number, + `{UPDATED_AT}`: datetime($block_timestamp), + `{UPDATED_AT_BLOCK}`: $block_number + }} CREATE (r) -[:`{FROM_ENTITY}` {{space_id: "ROOT", min_version: "0"}}]-> (alice) CREATE (r) -[:`{TO_ENTITY}` {{space_id: "ROOT", min_version: "0"}}]-> (bob) CREATE (r) -[:`{RELATION_TYPE}` {{space_id: "ROOT", min_version: "0"}}]-> (knows) @@ -884,7 +899,14 @@ mod tests { TO_ENTITY = system_ids::RELATION_TO_ATTRIBUTE, RELATION_TYPE = system_ids::RELATION_TYPE_ATTRIBUTE, INDEX = system_ids::RELATION_INDEX, - ))) + CREATED_AT = indexer_ids::CREATED_AT_TIMESTAMP, + CREATED_AT_BLOCK = indexer_ids::CREATED_AT_BLOCK, + UPDATED_AT = indexer_ids::UPDATED_AT_TIMESTAMP, + UPDATED_AT_BLOCK = indexer_ids::UPDATED_AT_BLOCK, + )) + .param("block_timestamp", block.timestamp.to_rfc3339()) + .param("block_number", block.block_number.to_string()) + ) .await .expect("Failed to insert data"); @@ -969,6 +991,8 @@ mod tests { .await .unwrap(); + let block = &BlockMetadata::default(); + neo4j.run(neo4rs::query(&format!( r#" CREATE (alice:Entity {{id: "alice"}}) @@ -976,11 +1000,23 @@ mod tests { CREATE (charlie:Entity {{id: "charlie"}}) CREATE (knows:Entity {{id: "knows"}}) CREATE (r1:Entity:Relation {{id: "abc"}}) + SET r1 += {{ + `{CREATED_AT}`: datetime($block_timestamp), + `{CREATED_AT_BLOCK}`: $block_number, + `{UPDATED_AT}`: datetime($block_timestamp), + `{UPDATED_AT_BLOCK}`: $block_number + }} CREATE (r1) -[:`{FROM_ENTITY}` {{space_id: "ROOT", min_version: "0"}}]-> (alice) CREATE (r1) -[:`{TO_ENTITY}` {{space_id: "ROOT", min_version: "0"}}]-> (bob) CREATE (r1) -[:`{RELATION_TYPE}` {{space_id: "ROOT", min_version: "0"}}]-> (knows) CREATE (r1) -[:ATTRIBUTE {{space_id: "ROOT", min_version: "0"}}]-> (:Attribute {{id: "{INDEX}", value: "0", value_type: "TEXT"}}) CREATE (r2:Entity:Relation {{id: "dev"}}) + SET r2 += {{ + `{CREATED_AT}`: datetime($block_timestamp), + `{CREATED_AT_BLOCK}`: $block_number, + `{UPDATED_AT}`: datetime($block_timestamp), + `{UPDATED_AT_BLOCK}`: $block_number + }} CREATE (r2) -[:`{FROM_ENTITY}` {{space_id: "ROOT", min_version: "0"}}]-> (alice) CREATE (r2) -[:`{TO_ENTITY}` {{space_id: "ROOT", min_version: "0"}}]-> (charlie) CREATE (r2) -[:`{RELATION_TYPE}` {{space_id: "ROOT", min_version: "0"}}]-> (knows) @@ -990,7 +1026,14 @@ mod tests { TO_ENTITY = system_ids::RELATION_TO_ATTRIBUTE, RELATION_TYPE = system_ids::RELATION_TYPE_ATTRIBUTE, INDEX = system_ids::RELATION_INDEX, - ))) + CREATED_AT = indexer_ids::CREATED_AT_TIMESTAMP, + CREATED_AT_BLOCK = indexer_ids::CREATED_AT_BLOCK, + UPDATED_AT = indexer_ids::UPDATED_AT_TIMESTAMP, + UPDATED_AT_BLOCK = indexer_ids::UPDATED_AT_BLOCK, + )) + .param("block_timestamp", block.timestamp.to_rfc3339()) + .param("block_number", block.block_number.to_string()) + ) .await .expect("Failed to insert data"); diff --git a/grc20-core/src/mapping/relation_queries.rs b/grc20-core/src/mapping/relation_queries.rs new file mode 100644 index 0000000..4fc4857 --- /dev/null +++ b/grc20-core/src/mapping/relation_queries.rs @@ -0,0 +1,71 @@ +use crate::system_ids; + +use super::{query_utils::QueryPart, EntityFilter, PropFilter}; + +#[derive(Clone, Debug, Default)] +pub struct RelationFilter { + pub(crate) id: Option>, + pub(crate) relation_type: Option, + pub(crate) from_: Option, + pub(crate) to_: Option, +} + +impl RelationFilter { + pub fn id(mut self, id: PropFilter) -> Self { + self.id = Some(id); + self + } + + pub fn relation_type(mut self, relation_type: EntityFilter) -> Self { + self.relation_type = Some(relation_type); + self + } + + pub fn from_(mut self, from_: EntityFilter) -> Self { + self.from_ = Some(from_); + self + } + + pub fn to_(mut self, to_: EntityFilter) -> Self { + self.to_ = Some(to_); + self + } + + pub fn into_query_part(self, node_var: &str) -> QueryPart { + let mut query_part = QueryPart::default() + .match_clause(format!( + "({node_var}) -[r_from:`{}`]-> (from:Entity)", + system_ids::RELATION_FROM_ATTRIBUTE + )) + .match_clause(format!( + "({node_var}) -[r_to:`{}`]-> (to:Entity)", + system_ids::RELATION_TO_ATTRIBUTE + )) + .match_clause(format!( + "({node_var}) -[r_rt:`{}`]-> (rt:Entity)", + system_ids::RELATION_TYPE_ATTRIBUTE + )) + .match_clause(format!( + r#"({node_var}) -[r_index:ATTRIBUTE]-> (index:Attribute {{id: "{}"}})"#, + system_ids::RELATION_INDEX + )); + + if let Some(id_filter) = self.id { + query_part.merge_mut(id_filter.into_query_part(node_var, "id")); + }; + + if let Some(relation_type) = self.relation_type { + query_part = query_part.merge(relation_type.into_query_part("rt")); + } + + if let Some(from_filter) = self.from_ { + query_part = query_part.merge(from_filter.into_query_part("from")); + } + + if let Some(to_filter) = self.to_ { + query_part = query_part.merge(to_filter.into_query_part("to")); + } + + query_part + } +} diff --git a/grc20-core/src/mapping/value.rs b/grc20-core/src/mapping/value.rs index 73ce273..78a10c9 100644 --- a/grc20-core/src/mapping/value.rs +++ b/grc20-core/src/mapping/value.rs @@ -6,6 +6,8 @@ use serde::Deserialize; use crate::pb; +use super::TriplesConversionError; + #[derive(Clone, Debug, Default, Deserialize, PartialEq)] pub struct Value { pub value: String, @@ -186,7 +188,7 @@ impl From> for Value { } impl TryFrom for String { - type Error = String; + type Error = TriplesConversionError; fn try_from(value: Value) -> Result { Ok(value.value) @@ -194,55 +196,68 @@ impl TryFrom for String { } impl TryFrom for i64 { - type Error = String; + type Error = TriplesConversionError; fn try_from(value: Value) -> Result { - value - .value - .parse() - .map_err(|_| format!("Failed to parse i64 value: {}", value.value)) + value.value.parse().map_err(|_| { + TriplesConversionError::InvalidValue(format!( + "Failed to parse i64 value: {}", + value.value + )) + }) } } impl TryFrom for u64 { - type Error = String; + type Error = TriplesConversionError; fn try_from(value: Value) -> Result { - value - .value - .parse() - .map_err(|_| format!("Failed to parse u64 value: {}", value.value)) + value.value.parse().map_err(|_| { + TriplesConversionError::InvalidValue(format!( + "Failed to parse u64 value: {}", + value.value + )) + }) } } impl TryFrom for f64 { - type Error = String; + type Error = TriplesConversionError; fn try_from(value: Value) -> Result { - value - .value - .parse() - .map_err(|_| format!("Failed to parse f64 value: {}", value.value)) + value.value.parse().map_err(|_| { + TriplesConversionError::InvalidValue(format!( + "Failed to parse f64 value: {}", + value.value + )) + }) } } impl TryFrom for bool { - type Error = String; + type Error = TriplesConversionError; fn try_from(value: Value) -> Result { - value - .value - .parse() - .map_err(|_| format!("Failed to parse bool value: {}", value.value)) + value.value.parse().map_err(|_| { + TriplesConversionError::InvalidValue(format!( + "Failed to parse bool value: {}", + value.value + )) + }) } } impl TryFrom for DateTime { - type Error = String; + type Error = TriplesConversionError; fn try_from(value: Value) -> Result { Ok(DateTime::parse_from_rfc3339(&value.value) - .map_err(|e| format!("Failed to parse DateTime value: {}", e))? + .map_err(|e| { + TriplesConversionError::InvalidValue(format!( + "Failed to parse DateTime value: {}", + e + )) + })? .with_timezone(&Utc)) } } diff --git a/grc20-core/src/pb/geo.rs b/grc20-core/src/pb/geo.rs index baf48be..e1545bd 100644 --- a/grc20-core/src/pb/geo.rs +++ b/grc20-core/src/pb/geo.rs @@ -99,9 +99,9 @@ pub struct GeoPersonalSpaceAdminPluginsCreated { /// An editor has editing and voting permissions in a DAO-based space. Editors join a space /// one of two ways: /// 1. They submit a request to join the space as an editor which goes to a vote. The editors -/// in the space vote on whether to accept the new editor. +/// in the space vote on whether to accept the new editor. /// 2. They are added as a set of initial editors when first creating the space. This allows -/// space deployers to bootstrap a set of editors on space creation. +/// space deployers to bootstrap a set of editors on space creation. #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct InitialEditorAdded { diff --git a/grc20-macros/Cargo.toml b/grc20-macros/Cargo.toml index 85d029e..3261ee2 100644 --- a/grc20-macros/Cargo.toml +++ b/grc20-macros/Cargo.toml @@ -12,6 +12,7 @@ quote = "1.0" syn = { version = "2.0", features = ["full", "extra-traits"] } darling = "0.20" tracing = "0.1.41" +stringcase = "0.4.0" [dev-dependencies] futures = "0.3.31" diff --git a/grc20-macros/src/entity.rs b/grc20-macros/src/entity.rs index 9f341dc..86ac39e 100644 --- a/grc20-macros/src/entity.rs +++ b/grc20-macros/src/entity.rs @@ -247,7 +247,7 @@ pub(crate) fn generate_query_impls(opts: &EntityOpts) -> TokenStream2 { let find_one_fn = quote! { /// Find a person by its id pub fn find_one( - neo4j: &neo4rs::Graph, + neo4j: &grc20_core::neo4rs::Graph, id: impl Into, space_id: impl Into, ) -> FindOneQuery { @@ -257,7 +257,7 @@ pub(crate) fn generate_query_impls(opts: &EntityOpts) -> TokenStream2 { let find_many_fn = quote! { /// Find multiple persons with filters - pub fn find_many(neo4j: &neo4rs::Graph, space_id: impl Into) -> FindManyQuery { + pub fn find_many(neo4j: &grc20_core::neo4rs::Graph, space_id: impl Into) -> FindManyQuery { FindManyQuery::new(neo4j.clone(), space_id.into()) } }; @@ -265,14 +265,14 @@ pub(crate) fn generate_query_impls(opts: &EntityOpts) -> TokenStream2 { let find_one_query_struct = quote! { /// Query to find a single person pub struct FindOneQuery { - neo4j: neo4rs::Graph, + neo4j: grc20_core::neo4rs::Graph, id: String, space_id: String, version: Option, } impl FindOneQuery { - fn new(neo4j: neo4rs::Graph, id: String, space_id: String) -> Self { + fn new(neo4j: grc20_core::neo4rs::Graph, id: String, space_id: String) -> Self { Self { neo4j, id, @@ -285,6 +285,11 @@ pub(crate) fn generate_query_impls(opts: &EntityOpts) -> TokenStream2 { self.version = Some(version.into()); self } + + pub fn version_opt(mut self, version: Option) -> Self { + self.version = version; + self + } } impl grc20_core::mapping::query_utils::Query>> for FindOneQuery { @@ -412,7 +417,8 @@ pub(crate) fn generate_query_impls(opts: &EntityOpts) -> TokenStream2 { let find_many_query_struct = quote! { /// Query to find multiple persons with filters pub struct FindManyQuery { - neo4j: neo4rs::Graph, + neo4j: grc20_core::neo4rs::Graph, + id: Option>, #(#find_many_fields)* space_id: String, version: Option, @@ -421,13 +427,14 @@ pub(crate) fn generate_query_impls(opts: &EntityOpts) -> TokenStream2 { } impl FindManyQuery { - fn new(neo4j: neo4rs::Graph, space_id: String) -> Self { + fn new(neo4j: grc20_core::neo4rs::Graph, space_id: String) -> Self { let mut query = Self { neo4j, - space_id, + id: None, #( #field_names: None, )* + space_id, version: None, limit: 100, skip: None, @@ -436,6 +443,11 @@ pub(crate) fn generate_query_impls(opts: &EntityOpts) -> TokenStream2 { query } + pub fn id(mut self, id: grc20_core::mapping::query_utils::PropFilter) -> Self { + self.id = Some(id); + self + } + #(#find_many_methods)* pub fn version(mut self, version: impl Into) -> Self { @@ -443,6 +455,11 @@ pub(crate) fn generate_query_impls(opts: &EntityOpts) -> TokenStream2 { self } + pub fn version_opt(mut self, version: Option) -> Self { + self.version = version; + self + } + /// Limit the number of results pub fn limit(mut self, limit: usize) -> Self { self.limit = limit; diff --git a/grc20-macros/src/lib.rs b/grc20-macros/src/lib.rs index dc5ef87..3a35ebe 100644 --- a/grc20-macros/src/lib.rs +++ b/grc20-macros/src/lib.rs @@ -52,6 +52,9 @@ pub fn entity(_args: TokenStream, input: TokenStream) -> TokenStream { } } + // let struct_name = input.ident.clone(); + // let snake_case_name = syn::Ident::new(&stringcase::snake_case(&input.ident.to_string()), input.ident.span()); + let _impl_builder = entity::generate_builder_impl(&opts); quote! { @@ -60,7 +63,13 @@ pub fn entity(_args: TokenStream, input: TokenStream) -> TokenStream { #impl_from_attributes #impl_into_attributes + + // pub mod #snake_case_name { + // use super::*; + // #impl_query + // } #impl_query + // #impl_builder } .into() diff --git a/grc20-macros/tests/entity.rs b/grc20-macros/tests/entity.rs index ea0fd72..1cf3d26 100644 --- a/grc20-macros/tests/entity.rs +++ b/grc20-macros/tests/entity.rs @@ -1,5 +1,5 @@ use grc20_core::{ - mapping::{triple, Attributes, FromAttributes, IntoAttributes, PropFilter}, + mapping::{triple, Attributes, FromAttributes, IntoAttributes, PropFilter, Query, QueryStream}, neo4rs, system_ids, }; @@ -155,7 +155,7 @@ async fn test_find_one() { .expect("Failed to find entity") .expect("Entity not found"); - assert_eq!(found_entity.id, "abc"); + assert_eq!(found_entity.id(), "abc"); assert_eq!(found_entity.attributes.name, person.name); assert_eq!(found_entity.attributes.nickname, person.nickname); assert_eq!(found_entity.attributes.age, person.age); @@ -247,7 +247,7 @@ async fn test_find_many() { .expect("Failed to get next entity") .expect("Entity not found"); - assert_eq!(found_entity.id, "abc"); + assert_eq!(found_entity.id(), "abc"); assert_eq!(found_entity.attributes.name, person.name); assert_eq!(found_entity.attributes.nickname, person.nickname); assert_eq!(found_entity.attributes.age, person.age); diff --git a/grc20-sdk/Cargo.toml b/grc20-sdk/Cargo.toml index fed9c43..4914e2d 100644 --- a/grc20-sdk/Cargo.toml +++ b/grc20-sdk/Cargo.toml @@ -4,9 +4,14 @@ version = "0.1.0" edition = "2021" [dependencies] +async-stream = "0.3.6" chrono = { version = "0.4.40", features = ["serde"] } futures = "0.3.31" grc20-core = { version = "0.1.0", path = "../grc20-core" } serde = { version = "1.0.219", features = ["derive"] } serde_json = "1.0.140" web3-utils = { version = "0.1.0", path = "../web3-utils" } + +[dev-dependencies] +tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } +tokio = { version = "1.0", features = ["full"] } diff --git a/grc20-sdk/examples/space_hierarchy.rs b/grc20-sdk/examples/space_hierarchy.rs new file mode 100644 index 0000000..bfa2869 --- /dev/null +++ b/grc20-sdk/examples/space_hierarchy.rs @@ -0,0 +1,33 @@ +use futures::{pin_mut, StreamExt}; +use grc20_core::{mapping::query_utils::QueryStream, neo4rs}; +use grc20_sdk::models::space; + +#[tokio::main] +async fn main() -> Result<(), Box> { + // Get space ID from command line args + let space_id = std::env::args() + .nth(1) + .expect("Please provide a space ID as the first argument"); + + // Initialize Neo4j connection + let neo4j = neo4rs::Graph::new("bolt://localhost:7687", "neo4j", "password").await?; + + // Create and execute subspaces query using Space helper + let query = space::subspaces(&neo4j, &space_id) + .max_depth(Some(10)) // Get all subspaces at any depth + .limit(100); // Limit to 100 results + + let stream = query.send().await?; + pin_mut!(stream); + + // Print each subspace ID as we receive it + println!("Found subspaces:"); + while let Some(result) = stream.next().await { + match result { + Ok((subspace_id, _)) => println!(" {}", subspace_id), + Err(e) => eprintln!("Error getting subspace: {}", e), + } + } + + Ok(()) +} diff --git a/grc20-sdk/examples/space_schema.rs b/grc20-sdk/examples/space_schema.rs new file mode 100644 index 0000000..004e007 --- /dev/null +++ b/grc20-sdk/examples/space_schema.rs @@ -0,0 +1,121 @@ +use futures::{pin_mut, StreamExt, TryStreamExt}; +use grc20_core::{mapping::query_utils::QueryStream, neo4rs, system_ids}; +use grc20_sdk::models::{property, space}; +use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; + +#[tokio::main] +async fn main() -> Result<(), Box> { + tracing_subscriber::registry() + .with( + tracing_subscriber::EnvFilter::try_from_default_env() + .unwrap_or_else(|_| "stdout=info".into()), + ) + .with(tracing_subscriber::fmt::layer()) + .init(); + + // Get space ID from command line args + let space_id = std::env::args() + .nth(1) + .expect("Please provide a space ID as the first argument"); + + // Initialize Neo4j connection + let neo4j = neo4rs::Graph::new("bolt://localhost:7687", "neo4j", "password").await?; + + let types = space::types(&neo4j, &space_id).strict(false).send().await?; + + pin_mut!(types); + + while let Some(type_) = types.next().await { + match type_ { + Ok(type_) => { + let name = property::get_triple( + &neo4j, + system_ids::NAME_ATTRIBUTE, + &type_.id, + &space_id, + None, + false, + ) + .await? + .map(|triple| triple.value.value); + + println!( + "Type: {} ({})", + name.unwrap_or("null".to_string()), + type_.id + ); + + let properties = property::get_outbound_relations( + &neo4j, + system_ids::PROPERTIES, + type_.id, + &space_id, + None, + None, + None, + false, + ) + .await?; + + pin_mut!(properties); + + while let Some(property) = properties.next().await { + match property { + Ok(property) => { + let name = property::get_triple( + &neo4j, + system_ids::NAME_ATTRIBUTE, + &property.to, + &space_id, + None, + false, + ) + .await? + .map(|triple| triple.value.value); + + let value_type = property::get_outbound_relations( + &neo4j, + system_ids::VALUE_TYPE_ATTRIBUTE, + &property.to, + &space_id, + None, + Some(1), + None, + false, + ) + .await? + .try_collect::>() + .await?; + + let value_type_name = if let Some(value_type) = value_type.first() { + property::get_triple( + &neo4j, + system_ids::NAME_ATTRIBUTE, + &value_type.to, + &space_id, + None, + false, + ) + .await? + .map(|triple| triple.value.value) + } else { + None + }; + + println!( + " Property: {}: {} ({})", + name.unwrap_or("null".to_string()), + value_type_name.unwrap_or("null".to_string()), + property.to + ); + } + Err(e) => eprintln!("Error: {:?}", e), + } + } + } + Err(e) => eprintln!("Error: {:?}", e), + } + } + + Ok(()) +} diff --git a/grc20-sdk/src/models/account.rs b/grc20-sdk/src/models/account.rs index c7413e5..f97b84f 100644 --- a/grc20-sdk/src/models/account.rs +++ b/grc20-sdk/src/models/account.rs @@ -1,6 +1,6 @@ use web3_utils::checksum_address; -use grc20_core::{ids, mapping::Entity, neo4rs, system_ids}; +use grc20_core::{ids, mapping::Entity, system_ids}; #[derive(Clone, PartialEq)] #[grc20_core::entity] @@ -10,20 +10,18 @@ pub struct Account { pub address: String, } -impl Account { - pub fn gen_id(address: &str) -> String { - ids::create_id_from_unique_string(checksum_address(address)) - } +pub fn new_id(address: &str) -> String { + ids::create_id_from_unique_string(checksum_address(address)) +} - pub fn new(address: String) -> Entity { - let checksummed_address = checksum_address(&address); +pub fn new(address: String) -> Entity { + let checksummed_address = checksum_address(&address); - Entity::new( - Self::gen_id(&checksummed_address), - Self { - address: checksummed_address, - }, - ) - .with_type(system_ids::ACCOUNT_TYPE) - } + Entity::new( + new_id(&checksummed_address), + Account { + address: checksummed_address, + }, + ) + .with_type(system_ids::ACCOUNT_TYPE) } diff --git a/grc20-sdk/src/models/base_entity.rs b/grc20-sdk/src/models/base_entity.rs new file mode 100644 index 0000000..37903b1 --- /dev/null +++ b/grc20-sdk/src/models/base_entity.rs @@ -0,0 +1,64 @@ +use futures::TryStreamExt; +use grc20_core::{ + entity::Entity, + error::DatabaseError, + mapping::{prop_filter, EntityFilter, RelationFilter}, + neo4rs, relation, system_ids, +}; + +#[grc20_core::entity] +pub struct BaseEntity { + #[grc20(attribute = system_ids::NAME_ATTRIBUTE)] + name: Option, + + #[grc20(attribute = system_ids::DESCRIPTION_ATTRIBUTE)] + description: Option, +} + +pub async fn blocks( + neo4j: &neo4rs::Graph, + entity_id: impl Into, + space_id: impl Into, + version: Option, + _strict: bool, +) -> Result>, DatabaseError> { + // TODO: Implement aggregation + relation::FindManyQuery::new(neo4j) + .filter( + RelationFilter::default() + .from_(EntityFilter::default().id(prop_filter::value(entity_id.into()))) + .relation_type(EntityFilter::default().id(prop_filter::value(system_ids::BLOCKS))), + ) + .space_id(prop_filter::value(space_id.into())) + .version(version) + .select_to() + .send() + .await? + .try_collect::>() + .await +} + +pub async fn types( + neo4j: &neo4rs::Graph, + entity_id: impl Into, + space_id: impl Into, + version: Option, + _strict: bool, +) -> Result>, DatabaseError> { + // TODO: Implement aggregation + relation::FindManyQuery::new(neo4j) + .filter( + RelationFilter::default() + .from_(EntityFilter::default().id(prop_filter::value(entity_id.into()))) + .relation_type( + EntityFilter::default().id(prop_filter::value(system_ids::TYPES_ATTRIBUTE)), + ), + ) + .space_id(prop_filter::value(space_id.into())) + .version(version) + .select_to() + .send() + .await? + .try_collect::>() + .await +} diff --git a/grc20-sdk/src/models/edit.rs b/grc20-sdk/src/models/edit.rs index 35da69a..49a8bdc 100644 --- a/grc20-sdk/src/models/edit.rs +++ b/grc20-sdk/src/models/edit.rs @@ -1,7 +1,7 @@ use grc20_core::{ ids, indexer_ids, mapping::{Entity, Relation}, - neo4rs, system_ids, + system_ids, }; #[grc20_core::entity] diff --git a/grc20-sdk/src/models/mod.rs b/grc20-sdk/src/models/mod.rs index db4bc1c..b0e45e7 100644 --- a/grc20-sdk/src/models/mod.rs +++ b/grc20-sdk/src/models/mod.rs @@ -1,17 +1,21 @@ pub mod account; +pub mod base_entity; pub mod cursor; pub mod edit; pub mod editor; pub mod member; +pub mod property; pub mod proposal; pub mod space; pub mod vote; pub use account::Account; +pub use base_entity::BaseEntity; pub use cursor::Cursor; pub use edit::Edit; pub use editor::SpaceEditor; pub use member::SpaceMember; +pub use property::Property; pub use proposal::{ AddEditorProposal, AddMemberProposal, AddSubspaceProposal, EditProposal, Proposal, ProposalCreator, Proposals, RemoveEditorProposal, RemoveMemberProposal, RemoveSubspaceProposal, diff --git a/grc20-sdk/src/models/property.rs b/grc20-sdk/src/models/property.rs new file mode 100644 index 0000000..4a13532 --- /dev/null +++ b/grc20-sdk/src/models/property.rs @@ -0,0 +1,341 @@ +use futures::{Stream, TryStreamExt}; +use grc20_core::{ + entity::Entity, + error::DatabaseError, + mapping::{ + prop_filter, relation_node, triple, Query, QueryStream, TriplesConversionError, Value, + }, + neo4rs, system_ids, +}; + +use crate::models::space::ParentSpacesQuery; + +use super::{base_entity, space::SubspacesQuery, BaseEntity}; + +#[grc20_core::entity] +#[grc20(schema_type = system_ids::ATTRIBUTE)] +pub struct Property { + #[grc20(attribute = system_ids::AGGREGATION_DIRECTION)] + pub aggregation_direction: Option, + + #[grc20(attribute = system_ids::NAME_ATTRIBUTE)] + pub name: Option, + + #[grc20(attribute = system_ids::DESCRIPTION_ATTRIBUTE)] + pub description: Option, + + #[grc20(attribute = system_ids::COVER_ATTRIBUTE)] + pub cover: Option, +} + +pub async fn value_type( + neo4j: &neo4rs::Graph, + property_id: impl Into, + space_id: impl Into, + space_version: Option, + strict: bool, +) -> Result>, DatabaseError> { + let property_id = property_id.into(); + let space_id = space_id.into(); + + let value_type_rel = get_outbound_relations( + neo4j, + system_ids::VALUE_TYPE_ATTRIBUTE, + &property_id, + &space_id, + space_version, + Some(1), + None, + strict, + ) + .await? + .try_collect::>() + .await?; + + if let Some(value_type_rel) = value_type_rel.first() { + base_entity::find_one(neo4j, &value_type_rel.to, &space_id) + .send() + .await + } else { + Ok(None) + } +} + +pub async fn relation_value_type( + neo4j: &neo4rs::Graph, + property_id: impl Into, + space_id: impl Into, + space_version: Option, + strict: bool, +) -> Result>, DatabaseError> { + let property_id = property_id.into(); + let space_id = space_id.into(); + + let value_type_rel = get_outbound_relations( + neo4j, + system_ids::RELATION_VALUE_RELATIONSHIP_TYPE, + &property_id, + &space_id, + space_version, + Some(1), + None, + strict, + ) + .await? + .try_collect::>() + .await?; + + if let Some(value_type_rel) = value_type_rel.first() { + base_entity::find_one(neo4j, &value_type_rel.to, &space_id) + .send() + .await + } else { + Ok(None) + } +} + +#[derive(Clone, Debug)] +pub enum AggregationDirection { + Up, + Down, + Bidirectional, +} + +impl From for Value { + fn from(direction: AggregationDirection) -> Self { + match direction { + AggregationDirection::Up => Value::text("Up"), + AggregationDirection::Down => Value::text("Down"), + AggregationDirection::Bidirectional => Value::text("Bidirectional"), + } + } +} + +impl TryFrom for AggregationDirection { + type Error = TriplesConversionError; + + fn try_from(value: Value) -> Result { + match value.value.as_str() { + "Up" => Ok(AggregationDirection::Up), + "Down" => Ok(AggregationDirection::Down), + "Bidirectional" => Ok(AggregationDirection::Bidirectional), + _ => Err(TriplesConversionError::InvalidValue(format!( + "Invalid aggregation direction: {}", + value.value + ))), + } + } +} + +async fn attribute_aggregation_direction( + neo4j: &neo4rs::Graph, + space_id: &str, + attribute_id: &str, +) -> Result, DatabaseError> { + // Hardcoded for now as the aggregation direction triples are not yet present + // in the knowledge graph + // Might be able to change this to actual queries later + match attribute_id { + // This is the "base case", unclear if it could be replaced with a query even + // if present in the knowledge graph + system_ids::AGGREGATION_DIRECTION => return Ok(Some(AggregationDirection::Down)), + + // These are hardcoded for now since they are not yet present in the knowledge graph + system_ids::NAME_ATTRIBUTE => return Ok(Some(AggregationDirection::Down)), + system_ids::DESCRIPTION_ATTRIBUTE => return Ok(Some(AggregationDirection::Down)), + system_ids::PROPERTIES => return Ok(Some(AggregationDirection::Down)), + system_ids::RELATION_VALUE_RELATIONSHIP_TYPE => { + return Ok(Some(AggregationDirection::Down)) + } + system_ids::VALUE_TYPE_ATTRIBUTE => return Ok(Some(AggregationDirection::Down)), + _ => (), + } + + // Get all spaces to query (just the given space if strict, or all parent spaces if not) + let mut spaces_to_query = vec![(space_id.to_string(), 0)]; + + let parent_spaces = ParentSpacesQuery::new(neo4j.clone(), space_id.to_string()) + .max_depth(None) + .send() + .await? + .try_collect::>() + .await?; + + spaces_to_query.extend(parent_spaces); + + // Note: This may not be necessary since the parent spaces are collected using DFS + // (i.e. the parent spaces *should* be sorted by depth) + spaces_to_query.sort_by_key(|(_, depth)| *depth); + + for (space_id, _) in spaces_to_query { + let maybe_triple = triple::find_one( + neo4j, + system_ids::AGGREGATION_DIRECTION, + attribute_id, + space_id, + None, + ) + .send() + .await?; + + if let Some(triple) = maybe_triple { + let direction = AggregationDirection::try_from(triple.value)?; + return Ok(Some(direction)); + } + } + + Ok(None) +} + +// TODO: Find a better place for this function +pub async fn get_triple( + neo4j: &neo4rs::Graph, + property_id: impl Into, + entity_id: impl Into, + space_id: impl Into, + space_version: Option, + strict: bool, +) -> Result, DatabaseError> { + let space_id = space_id.into(); + let entity_id = entity_id.into(); + let property_id = property_id.into(); + + let mut spaces = spaces_for_property(neo4j, &property_id, &space_id, strict).await?; + + spaces.sort_by_key(|(_, depth)| *depth); + + for (space_id, _) in spaces { + let maybe_triple = triple::find_one( + neo4j, + &property_id, + &entity_id, + &space_id, + space_version.clone(), + ) + .send() + .await?; + + if let Some(triple) = maybe_triple { + return Ok(Some(triple)); + } + } + + Ok(None) +} + +#[allow(clippy::too_many_arguments)] +pub async fn get_outbound_relations( + neo4j: &neo4rs::Graph, + property_id: impl Into, + entity_id: impl Into, + space_id: impl Into, + space_version: Option, + limit: Option, + skip: Option, + strict: bool, +) -> Result>, DatabaseError> { + let neo4j = neo4j.clone(); + let space_id = space_id.into(); + let entity_id = entity_id.into(); + let property_id = property_id.into(); + + let spaces = spaces_for_property(&neo4j, &property_id, &space_id, strict) + .await? + .into_iter() + .map(|(space_id, _)| space_id) + .collect::>(); + // spaces.sort_by_key(|(_, depth)| *depth); + + // TODO: Optimization: We can accept limit/skip parameters here and pass them to the query. + // By counting the number of results we can determine if we need to continue to the next space + // or if we have enough results already. + // let stream = try_stream! { + // for (space_id, _) in spaces { + // let relations_stream = relation_node::FindManyQuery::new(&neo4j) + // .from_id(prop_filter::value(entity_id.clone())) + // .space_id(prop_filter::value(space_id.clone())) + // .relation_type(prop_filter::value(property_id.clone())) + // .version(space_version.clone()) + // .send() + // .await?; + + // pin_mut!(relations_stream); + + // while let Some(relation) = relations_stream.try_next().await? { + // yield relation; + // } + // } + // }; + + relation_node::FindManyQuery::new(&neo4j) + .from_id(prop_filter::value(entity_id.clone())) + .space_id(prop_filter::value_in(spaces)) + .relation_type(prop_filter::value(property_id.clone())) + .version(space_version.clone()) + .limit(limit.unwrap_or(100)) + .skip(skip.unwrap_or(0)) + .send() + .await +} + +/// Returns the spaces from which the property is inherited +async fn spaces_for_property( + neo4j: &neo4rs::Graph, + property_id: impl Into, + space_id: impl Into, + strict: bool, +) -> Result, DatabaseError> { + let space_id = space_id.into(); + let property_id = property_id.into(); + + let mut spaces = vec![(space_id.clone(), 0)]; + + if strict { + return Ok(spaces); + } + + match attribute_aggregation_direction(neo4j, &space_id, &property_id).await? { + Some(AggregationDirection::Up) => { + let subspaces = SubspacesQuery::new(neo4j.clone(), space_id.clone()) + .max_depth(None) + .send() + .await? + .try_collect::>() + .await?; + + spaces.extend(subspaces); + Ok(spaces) + } + Some(AggregationDirection::Down) => { + let parent_spaces = ParentSpacesQuery::new(neo4j.clone(), space_id.clone()) + .max_depth(None) + .send() + .await? + .try_collect::>() + .await?; + + spaces.extend(parent_spaces); + Ok(spaces) + } + Some(AggregationDirection::Bidirectional) => { + let subspaces = SubspacesQuery::new(neo4j.clone(), space_id.clone()) + .max_depth(None) + .send() + .await? + .try_collect::>() + .await?; + + let parent_spaces = ParentSpacesQuery::new(neo4j.clone(), space_id.clone()) + .max_depth(None) + .send() + .await? + .try_collect::>() + .await?; + + spaces.extend(subspaces); + spaces.extend(parent_spaces); + Ok(spaces) + } + None => Ok(spaces), + } +} diff --git a/grc20-sdk/src/models/proposal.rs b/grc20-sdk/src/models/proposal.rs index 0f57737..6b25106 100644 --- a/grc20-sdk/src/models/proposal.rs +++ b/grc20-sdk/src/models/proposal.rs @@ -12,7 +12,7 @@ use grc20_core::{ attributes::{FromAttributes, IntoAttributes}, entity, query_utils::{AttributeFilter, PropFilter, QueryStream}, - Entity, Relation, Value, + Entity, Relation, TriplesConversionError, Value, }, neo4rs, pb, }; @@ -165,7 +165,7 @@ impl From for Value { } impl TryFrom for ProposalStatus { - type Error = String; + type Error = TriplesConversionError; fn try_from(value: Value) -> Result { match value.value.as_str() { @@ -174,7 +174,10 @@ impl TryFrom for ProposalStatus { "Rejected" => Ok(Self::Rejected), "Canceled" => Ok(Self::Canceled), "Executed" => Ok(Self::Executed), - _ => Err(format!("Invalid proposal status: {}", value.value)), + _ => Err(TriplesConversionError::InvalidValue(format!( + "Invalid proposal status: {}", + value.value + ))), } } } diff --git a/grc20-sdk/src/models/space.rs b/grc20-sdk/src/models/space.rs deleted file mode 100644 index e456179..0000000 --- a/grc20-sdk/src/models/space.rs +++ /dev/null @@ -1,697 +0,0 @@ -use futures::{pin_mut, Stream, StreamExt}; -use serde::{Deserialize, Serialize}; -use web3_utils::checksum_address; - -use crate::models::Account; -use grc20_core::{ - block::BlockMetadata, - error::DatabaseError, - ids, indexer_ids, - mapping::{ - entity, - entity_node::{self, EntityFilter}, - prop_filter, - query_utils::{AttributeFilter, PropFilter, Query, QueryStream, TypesFilter}, - relation, relation_node, Entity, EntityNode, Relation, Value, - }, - neo4rs, network_ids, system_ids, -}; - -#[derive(Clone)] -#[grc20_core::entity] -#[grc20(schema_type = system_ids::SPACE_TYPE)] -pub struct Space { - #[grc20(attribute = system_ids::NETWORK_ATTRIBUTE)] - pub network: String, - - #[grc20(attribute = indexer_ids::SPACE_GOVERNANCE_TYPE)] - pub governance_type: SpaceGovernanceType, - - /// The address of the space's DAO contract. - #[grc20(attribute = indexer_ids::SPACE_DAO_ADDRESS)] - pub dao_contract_address: String, - - /// The address of the space plugin contract. - #[grc20(attribute = indexer_ids::SPACE_PLUGIN_ADDRESS)] - pub space_plugin_address: Option, - - /// The address of the voting plugin contract. - #[grc20(attribute = indexer_ids::SPACE_VOTING_PLUGIN_ADDRESS)] - pub voting_plugin_address: Option, - - /// The address of the member access plugin contract. - #[grc20(attribute = indexer_ids::SPACE_MEMBER_PLUGIN_ADDRESS)] - pub member_access_plugin: Option, - - /// The address of the personal space admin plugin contract. - #[grc20(attribute = indexer_ids::SPACE_PERSONAL_PLUGIN_ADDRESS)] - pub personal_space_admin_plugin: Option, -} - -impl Space { - pub fn gen_id(network: &str, address: &str) -> String { - ids::create_id_from_unique_string(format!("{network}:{}", checksum_address(address))) - } - - pub fn builder(id: &str, dao_contract_address: &str) -> SpaceBuilder { - SpaceBuilder::new(id, dao_contract_address) - } - - /// Find a space by its DAO contract address. - pub async fn find_by_dao_address( - neo4j: &neo4rs::Graph, - dao_contract_address: &str, - ) -> Result>, DatabaseError> { - entity::find_one( - neo4j, - Space::gen_id(network_ids::GEO, dao_contract_address), - indexer_ids::INDEXER_SPACE_ID, - None, - ) - .send() - .await - } - - pub async fn find_entity_by_dao_address( - neo4j: &neo4rs::Graph, - dao_contract_address: &str, - ) -> Result, DatabaseError> { - entity_node::find_one(neo4j, Space::gen_id(network_ids::GEO, dao_contract_address)) - .send() - .await - } - - /// Find a space by its space plugin address. - pub async fn find_by_space_plugin_address( - neo4j: &neo4rs::Graph, - space_plugin_address: &str, - ) -> Result>, DatabaseError> { - let stream = entity::find_many(neo4j, indexer_ids::INDEXER_SPACE_ID, None) - .attribute( - AttributeFilter::new(indexer_ids::SPACE_PLUGIN_ADDRESS) - .value(PropFilter::default().value(checksum_address(space_plugin_address))), - ) - .limit(1) - .send() - .await?; - - pin_mut!(stream); - - stream.next().await.transpose() - } - - pub async fn find_entity_by_space_plugin_address( - neo4j: &neo4rs::Graph, - space_plugin_address: &str, - ) -> Result, DatabaseError> { - let stream = entity_node::find_many(neo4j) - .attribute( - AttributeFilter::new(indexer_ids::SPACE_PLUGIN_ADDRESS) - .space_id(prop_filter::value(indexer_ids::INDEXER_SPACE_ID)) - .value(prop_filter::value(checksum_address(space_plugin_address))), - ) - .limit(1) - .send() - .await?; - - pin_mut!(stream); - - stream.next().await.transpose() - } - - /// Find a space by its voting plugin address. - pub async fn find_by_voting_plugin_address( - neo4j: &neo4rs::Graph, - voting_plugin_address: &str, - ) -> Result>, DatabaseError> { - let stream = entity::find_many(neo4j, indexer_ids::INDEXER_SPACE_ID, None) - .attribute( - AttributeFilter::new(indexer_ids::SPACE_VOTING_PLUGIN_ADDRESS) - .value(PropFilter::default().value(checksum_address(voting_plugin_address))), - ) - .limit(1) - .send() - .await?; - - pin_mut!(stream); - - stream.next().await.transpose() - } - - pub async fn find_entity_by_voting_plugin_address( - neo4j: &neo4rs::Graph, - voting_plugin_address: &str, - ) -> Result, DatabaseError> { - let stream = entity_node::find_many(neo4j) - .attribute( - AttributeFilter::new(indexer_ids::SPACE_VOTING_PLUGIN_ADDRESS) - .space_id(prop_filter::value(indexer_ids::INDEXER_SPACE_ID)) - .value(prop_filter::value(checksum_address(voting_plugin_address))), - ) - .limit(1) - .send() - .await?; - - pin_mut!(stream); - - stream.next().await.transpose() - } - - /// Find a space by its member access plugin address. - pub async fn find_by_member_access_plugin( - neo4j: &neo4rs::Graph, - member_access_plugin: &str, - ) -> Result>, DatabaseError> { - let stream = entity::find_many(neo4j, indexer_ids::INDEXER_SPACE_ID, None) - .attribute( - AttributeFilter::new(indexer_ids::SPACE_MEMBER_PLUGIN_ADDRESS) - .value(PropFilter::default().value(checksum_address(member_access_plugin))), - ) - .limit(1) - .send() - .await?; - - pin_mut!(stream); - - stream.next().await.transpose() - } - - /// Find a space by its personal space admin plugin address. - pub async fn find_by_personal_plugin_address( - neo4j: &neo4rs::Graph, - personal_space_admin_plugin: &str, - ) -> Result>, DatabaseError> { - let stream = entity::find_many(neo4j, indexer_ids::INDEXER_SPACE_ID, None) - .attribute( - AttributeFilter::new(indexer_ids::SPACE_PERSONAL_PLUGIN_ADDRESS).value( - PropFilter::default().value(checksum_address(personal_space_admin_plugin)), - ), - ) - .limit(1) - .send() - .await?; - - pin_mut!(stream); - - stream.next().await.transpose() - } - - /// Find all members of a space - pub fn members(neo4j: &neo4rs::Graph, space_id: &str) -> SpaceMembersQuery { - SpaceMembersQuery::new(neo4j.clone(), space_id.to_string()) - } - - /// Find all editors of a space - pub fn editors(neo4j: &neo4rs::Graph, space_id: &str) -> SpaceEditorsQuery { - SpaceEditorsQuery::new(neo4j.clone(), space_id.to_string()) - } - - /// Find all parent spaces of a given space - pub fn parent_spaces(neo4j: &neo4rs::Graph, space_id: &str) -> ParentSpacesQuery { - ParentSpacesQuery::new(neo4j.clone(), space_id.to_string()) - } - - /// Find all subspaces of a given space - pub fn subspaces(neo4j: &neo4rs::Graph, space_id: &str) -> SubspacesQuery { - SubspacesQuery::new(neo4j.clone(), space_id.to_string()) - } - - /// Find all types defined in a space - pub fn types(neo4j: &neo4rs::Graph, space_id: &str) -> SpaceTypesQuery { - SpaceTypesQuery::new(neo4j.clone(), space_id.to_string()) - } -} - -/// Query to find all types defined in a space -pub struct SpaceTypesQuery { - neo4j: neo4rs::Graph, - space_id: String, - limit: usize, - skip: Option, -} - -impl SpaceTypesQuery { - fn new(neo4j: neo4rs::Graph, space_id: String) -> Self { - Self { - neo4j, - space_id, - limit: 100, - skip: None, - } - } - - /// Limit the number of results - pub fn limit(mut self, limit: usize) -> Self { - self.limit = limit; - self - } - - /// Skip a number of results - pub fn skip(mut self, skip: usize) -> Self { - self.skip = Some(skip); - self - } -} - -impl QueryStream for SpaceTypesQuery { - async fn send( - self, - ) -> Result>, DatabaseError> { - // Find all entities that have a TYPES relation to the Type entity - let mut stream = entity_node::find_many(&self.neo4j) - .with_filter( - EntityFilter::default() - .relations(TypesFilter::default().r#type(system_ids::SCHEMA_TYPE)) - .space_id(self.space_id), - ) - .limit(self.limit); - - if let Some(skip) = self.skip { - stream = stream.skip(skip); - } - - stream.send().await - } -} - -/// Query to find all members of a space -pub struct SpaceMembersQuery { - neo4j: neo4rs::Graph, - space_id: String, - limit: usize, - skip: Option, -} - -impl SpaceMembersQuery { - fn new(neo4j: neo4rs::Graph, space_id: String) -> Self { - Self { - neo4j, - space_id, - limit: 100, - skip: None, - } - } - - /// Limit the number of results - pub fn limit(mut self, limit: usize) -> Self { - self.limit = limit; - self - } - - /// Skip a number of results - pub fn skip(mut self, skip: usize) -> Self { - self.skip = Some(skip); - self - } -} - -impl QueryStream> for SpaceMembersQuery { - async fn send( - self, - ) -> Result, DatabaseError>>, DatabaseError> { - // Find all member relations for the space - let relations_stream = relation_node::find_many(&self.neo4j) - .relation_type(PropFilter::default().value(indexer_ids::MEMBER_RELATION)) - .to_id(PropFilter::default().value(self.space_id.clone())) - .space_id(PropFilter::default().value(indexer_ids::INDEXER_SPACE_ID)) - .limit(self.limit) - .send() - .await?; - - // Convert the stream of relations to a stream of accounts - let neo4j = self.neo4j.clone(); - let account_stream = relations_stream - .map(move |relation_result| { - let neo4j = neo4j.clone(); - async move { - let relation = relation_result?; - entity::find_one(&neo4j, &relation.from, indexer_ids::INDEXER_SPACE_ID, None) - .send() - .await? - .ok_or_else(|| { - DatabaseError::NotFound(format!( - "Account with ID {} not found", - relation.from - )) - }) - } - }) - .buffered(10); // Process up to 10 accounts concurrently - - Ok(account_stream) - } -} - -/// Query to find all editors of a space -pub struct SpaceEditorsQuery { - neo4j: neo4rs::Graph, - space_id: String, - limit: usize, - skip: Option, -} - -impl SpaceEditorsQuery { - fn new(neo4j: neo4rs::Graph, space_id: String) -> Self { - Self { - neo4j, - space_id, - limit: 100, - skip: None, - } - } - - /// Limit the number of results - pub fn limit(mut self, limit: usize) -> Self { - self.limit = limit; - self - } - - /// Skip a number of results - pub fn skip(mut self, skip: usize) -> Self { - self.skip = Some(skip); - self - } -} - -impl QueryStream> for SpaceEditorsQuery { - async fn send( - self, - ) -> Result, DatabaseError>>, DatabaseError> { - // Find all editor relations for the space - let relations_stream = relation_node::find_many(&self.neo4j) - .relation_type(PropFilter::default().value(indexer_ids::EDITOR_RELATION)) - .to_id(PropFilter::default().value(self.space_id.clone())) - .space_id(PropFilter::default().value(indexer_ids::INDEXER_SPACE_ID)) - .limit(self.limit) - .send() - .await?; - - // Convert the stream of relations to a stream of accounts - let neo4j = self.neo4j.clone(); - let account_stream = relations_stream - .map(move |relation_result| { - let neo4j = neo4j.clone(); - async move { - let relation = relation_result?; - entity::find_one(&neo4j, &relation.from, indexer_ids::INDEXER_SPACE_ID, None) - .send() - .await? - .ok_or_else(|| { - DatabaseError::NotFound(format!( - "Account with ID {} not found", - relation.from - )) - }) - } - }) - .buffered(10); // Process up to 10 accounts concurrently - - Ok(account_stream) - } -} - -/// Query to find all parent spaces of a given space -pub struct ParentSpacesQuery { - neo4j: neo4rs::Graph, - space_id: String, - limit: usize, - skip: Option, -} - -impl ParentSpacesQuery { - fn new(neo4j: neo4rs::Graph, space_id: String) -> Self { - Self { - neo4j, - space_id, - limit: 100, - skip: None, - } - } - - /// Limit the number of results - pub fn limit(mut self, limit: usize) -> Self { - self.limit = limit; - self - } - - /// Skip a number of results - pub fn skip(mut self, skip: usize) -> Self { - self.skip = Some(skip); - self - } -} - -impl QueryStream> for ParentSpacesQuery { - async fn send( - self, - ) -> Result, DatabaseError>>, DatabaseError> { - // Find all parent space relations for the space - let relations_stream = relation_node::find_many(&self.neo4j) - .relation_type(PropFilter::default().value(indexer_ids::PARENT_SPACE)) - .from_id(PropFilter::default().value(self.space_id.clone())) - .space_id(PropFilter::default().value(indexer_ids::INDEXER_SPACE_ID)) - .limit(self.limit) - .send() - .await?; - - // Convert the stream of relations to a stream of spaces - let neo4j = self.neo4j.clone(); - let space_stream = relations_stream - .map(move |relation_result| { - let neo4j = neo4j.clone(); - async move { - let relation = relation_result?; - entity::find_one(&neo4j, &relation.to, indexer_ids::INDEXER_SPACE_ID, None) - .send() - .await? - .ok_or_else(|| { - DatabaseError::NotFound(format!( - "Space with ID {} not found", - relation.to - )) - }) - } - }) - .buffered(10); // Process up to 10 spaces concurrently - - Ok(space_stream) - } -} - -/// Query to find all subspaces of a given space -pub struct SubspacesQuery { - neo4j: neo4rs::Graph, - space_id: String, - limit: usize, - skip: Option, -} - -impl SubspacesQuery { - fn new(neo4j: neo4rs::Graph, space_id: String) -> Self { - Self { - neo4j, - space_id, - limit: 100, - skip: None, - } - } - - /// Limit the number of results - pub fn limit(mut self, limit: usize) -> Self { - self.limit = limit; - self - } - - /// Skip a number of results - pub fn skip(mut self, skip: usize) -> Self { - self.skip = Some(skip); - self - } -} - -impl QueryStream> for SubspacesQuery { - async fn send( - self, - ) -> Result, DatabaseError>>, DatabaseError> { - // Find all parent space relations where this space is the parent - let relations_stream = relation_node::find_many(&self.neo4j) - .relation_type(PropFilter::default().value(indexer_ids::PARENT_SPACE)) - .to_id(PropFilter::default().value(self.space_id.clone())) - .space_id(PropFilter::default().value(indexer_ids::INDEXER_SPACE_ID)) - .limit(self.limit) - .send() - .await?; - - // Convert the stream of relations to a stream of spaces - let neo4j = self.neo4j.clone(); - let space_stream = relations_stream - .map(move |relation_result| { - let neo4j = neo4j.clone(); - async move { - let relation = relation_result?; - entity::find_one(&neo4j, &relation.from, indexer_ids::INDEXER_SPACE_ID, None) - .send() - .await? - .ok_or_else(|| { - DatabaseError::NotFound(format!( - "Space with ID {} not found", - relation.from - )) - }) - } - }) - .buffered(10); // Process up to 10 spaces concurrently - - Ok(space_stream) - } -} - -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -pub enum SpaceGovernanceType { - #[default] - Public, - Personal, -} - -impl From for Value { - fn from(governance_type: SpaceGovernanceType) -> Self { - match governance_type { - SpaceGovernanceType::Public => Value::text("Public".to_string()), - SpaceGovernanceType::Personal => Value::text("Personal".to_string()), - } - } -} - -impl TryFrom for SpaceGovernanceType { - type Error = String; - - fn try_from(value: Value) -> Result { - match value.value.as_str() { - "Public" => Ok(SpaceGovernanceType::Public), - "Personal" => Ok(SpaceGovernanceType::Personal), - _ => Err(format!( - "Invalid SpaceGovernanceType value: {}", - value.value - )), - } - } -} - -pub struct SpaceBuilder { - id: String, - network: String, - governance_type: SpaceGovernanceType, - dao_contract_address: String, - space_plugin_address: Option, - voting_plugin_address: Option, - member_access_plugin: Option, - personal_space_admin_plugin: Option, -} - -impl SpaceBuilder { - pub fn new(id: &str, dao_contract_address: &str) -> Self { - Self { - id: id.to_string(), - network: network_ids::GEO.to_string(), - governance_type: SpaceGovernanceType::Public, - dao_contract_address: checksum_address(dao_contract_address), - space_plugin_address: None, - voting_plugin_address: None, - member_access_plugin: None, - personal_space_admin_plugin: None, - } - } - - pub fn network(mut self, network: String) -> Self { - self.network = network; - self - } - - pub fn governance_type(mut self, governance_type: SpaceGovernanceType) -> Self { - self.governance_type = governance_type; - self - } - - pub fn dao_contract_address(mut self, dao_contract_address: &str) -> Self { - self.dao_contract_address = checksum_address(dao_contract_address); - self - } - - pub fn space_plugin_address(mut self, space_plugin_address: &str) -> Self { - self.space_plugin_address = Some(checksum_address(space_plugin_address)); - self - } - - pub fn voting_plugin_address(mut self, voting_plugin_address: &str) -> Self { - self.voting_plugin_address = Some(checksum_address(voting_plugin_address)); - self - } - - pub fn member_access_plugin(mut self, member_access_plugin: &str) -> Self { - self.member_access_plugin = Some(checksum_address(member_access_plugin)); - self - } - - pub fn personal_space_admin_plugin(mut self, personal_space_admin_plugin: &str) -> Self { - self.personal_space_admin_plugin = Some(checksum_address(personal_space_admin_plugin)); - self - } - - pub fn build(self) -> Entity { - Entity::new( - &self.id, - Space { - network: self.network, - governance_type: self.governance_type, - dao_contract_address: self.dao_contract_address, - space_plugin_address: self.space_plugin_address, - voting_plugin_address: self.voting_plugin_address, - member_access_plugin: self.member_access_plugin, - personal_space_admin_plugin: self.personal_space_admin_plugin, - }, - ) - .with_type(system_ids::SPACE_TYPE) - } -} - -/// Parent space relation (for subspaces). -/// Space > PARENT_SPACE > Space -#[derive(Clone)] -#[grc20_core::relation] -#[grc20(relation_type = indexer_ids::PARENT_SPACE)] -pub struct ParentSpace; - -impl ParentSpace { - pub fn generate_id(space_id: &str, parent_space_id: &str) -> String { - ids::create_id_from_unique_string(format!("PARENT_SPACE:{space_id}:{parent_space_id}")) - } - - pub fn new(space_id: &str, parent_space_id: &str) -> Relation { - Relation::new( - Self::generate_id(space_id, parent_space_id), - space_id, - parent_space_id, - indexer_ids::PARENT_SPACE, - "0", - Self, - ) - } - - /// Delete a relation between a space and its parent space. - pub async fn remove( - neo4j: &neo4rs::Graph, - block: &BlockMetadata, - space_id: &str, - parent_space_id: &str, - ) -> Result<(), DatabaseError> { - relation::delete_one( - neo4j, - block, - ParentSpace::generate_id(space_id, parent_space_id), - indexer_ids::INDEXER_SPACE_ID, - "0", - ) - .send() - .await - } -} diff --git a/grc20-sdk/src/models/space/mod.rs b/grc20-sdk/src/models/space/mod.rs new file mode 100644 index 0000000..896d6d1 --- /dev/null +++ b/grc20-sdk/src/models/space/mod.rs @@ -0,0 +1,14 @@ +pub mod parent_spaces_query; +pub mod space_editors_query; +pub mod space_model; +// pub mod space_hierarchy; +pub mod space_members_query; +pub mod space_types_query; +pub mod subspaces_query; + +pub use parent_spaces_query::ParentSpacesQuery; +pub use space_editors_query::SpaceEditorsQuery; +pub use space_members_query::SpaceMembersQuery; +pub use space_model::*; +pub use space_types_query::SpaceTypesQuery; +pub use subspaces_query::SubspacesQuery; diff --git a/grc20-sdk/src/models/space/parent_spaces_query.rs b/grc20-sdk/src/models/space/parent_spaces_query.rs new file mode 100644 index 0000000..50e8ec7 --- /dev/null +++ b/grc20-sdk/src/models/space/parent_spaces_query.rs @@ -0,0 +1,158 @@ +use std::collections::HashSet; + +use async_stream::stream; +use futures::{pin_mut, Stream, StreamExt}; + +use grc20_core::{ + error::DatabaseError, + indexer_ids, + mapping::{query_utils::QueryStream, relation_node, PropFilter}, + neo4rs, +}; + +/// Query to find all parent spaces of a given space +pub struct ParentSpacesQuery { + neo4j: neo4rs::Graph, + space_id: String, + limit: usize, + skip: Option, + max_depth: Option, +} + +impl ParentSpacesQuery { + pub(crate) fn new(neo4j: neo4rs::Graph, space_id: String) -> Self { + Self { + neo4j, + space_id, + limit: 100, + skip: None, + max_depth: Some(1), + } + } + + /// Limit the number of results + pub fn limit(mut self, limit: usize) -> Self { + self.limit = limit; + self + } + + /// Skip a number of results + pub fn skip(mut self, skip: usize) -> Self { + self.skip = Some(skip); + self + } + + /// Limit the depth of the search + pub fn max_depth(mut self, max_depth: Option) -> Self { + self.max_depth = max_depth; + self + } +} + +// impl QueryStream> for ParentSpacesQuery { +// async fn send( +// self, +// ) -> Result, DatabaseError>>, DatabaseError> { +// // Find all parent space relations for the space +// let relations_stream = relation_node::find_many(&self.neo4j) +// .relation_type(PropFilter::default().value(indexer_ids::PARENT_SPACE)) +// .from_id(PropFilter::default().value(self.space_id.clone())) +// .space_id(PropFilter::default().value(indexer_ids::INDEXER_SPACE_ID)) +// .limit(self.limit) +// .send() +// .await?; + +// // Convert the stream of relations to a stream of spaces +// let neo4j = self.neo4j.clone(); +// let space_stream = relations_stream +// .map(move |relation_result| { +// let neo4j = neo4j.clone(); +// async move { +// let relation = relation_result?; +// entity::find_one(&neo4j, &relation.to, indexer_ids::INDEXER_SPACE_ID, None) +// .send() +// .await? +// .ok_or_else(|| { +// DatabaseError::NotFound(format!( +// "Space with ID {} not found", +// relation.to +// )) +// }) +// } +// }) +// .buffered(10); // Process up to 10 spaces concurrently + +// Ok(space_stream) +// } +// } + +impl QueryStream<(String, usize)> for ParentSpacesQuery { + async fn send( + self, + ) -> Result>, DatabaseError> { + let mut visited = HashSet::new(); + let mut queue = vec![(self.space_id.clone(), 0)]; // (space_id, depth) + + // Add initial space to visited set + visited.insert(self.space_id.to_string()); + + // Create and return the stream + let stream = stream! { + // Process queue until empty + while let Some((current_space, depth)) = queue.pop() { + // Check if we've reached max depth + if let Some(max_depth) = self.max_depth { + if depth >= max_depth { + continue; + } + } + + // Get immediate parent_spaces + let parent_spaces = immediate_parent_spaces(&self.neo4j, ¤t_space, self.limit).await?; + pin_mut!(parent_spaces); + + // Process each parent_space + while let Some(parent_space_result) = parent_spaces.next().await { + match parent_space_result { + Ok(parent_space) => { + // Skip if already visited (handles cycles) + if !visited.insert(parent_space.clone()) { + continue; + } + + // Yield the parent_space ID + yield Ok((parent_space.clone(), depth)); + + // Add to queue for further processing + queue.push((parent_space, depth + 1)); + }, + Err(e) => yield Err(e), + } + } + } + }; + + Ok(stream.skip(self.skip.unwrap_or(0)).take(self.limit)) + } +} + +async fn immediate_parent_spaces( + neo4j: &neo4rs::Graph, + space_id: &str, + limit: usize, +) -> Result>, DatabaseError> { + // Find all parent space relations where this space is the parent + let relations_stream = relation_node::find_many(neo4j) + .relation_type(PropFilter::default().value(indexer_ids::PARENT_SPACE)) + .from_id(PropFilter::default().value(space_id)) + .space_id(PropFilter::default().value(indexer_ids::INDEXER_SPACE_ID)) + .limit(limit) + .send() + .await?; + + // Convert the stream of relations to a stream of spaces + let space_stream = + relations_stream.map(move |relation_result| relation_result.map(|relation| relation.to)); + + Ok(space_stream) +} diff --git a/grc20-sdk/src/models/space/space_editors_query.rs b/grc20-sdk/src/models/space/space_editors_query.rs new file mode 100644 index 0000000..64c849d --- /dev/null +++ b/grc20-sdk/src/models/space/space_editors_query.rs @@ -0,0 +1,78 @@ +use futures::{Stream, StreamExt}; + +use grc20_core::{ + error::DatabaseError, + indexer_ids, + mapping::{entity, query_utils::QueryStream, relation_node, Entity, PropFilter, Query}, + neo4rs, +}; + +use crate::models::Account; + +/// Query to find all editors of a space +pub struct SpaceEditorsQuery { + neo4j: neo4rs::Graph, + space_id: String, + limit: usize, + skip: Option, +} + +impl SpaceEditorsQuery { + pub(crate) fn new(neo4j: neo4rs::Graph, space_id: String) -> Self { + Self { + neo4j, + space_id, + limit: 100, + skip: None, + } + } + + /// Limit the number of results + pub fn limit(mut self, limit: usize) -> Self { + self.limit = limit; + self + } + + /// Skip a number of results + pub fn skip(mut self, skip: usize) -> Self { + self.skip = Some(skip); + self + } +} + +impl QueryStream> for SpaceEditorsQuery { + async fn send( + self, + ) -> Result, DatabaseError>>, DatabaseError> { + // Find all editor relations for the space + let relations_stream = relation_node::find_many(&self.neo4j) + .relation_type(PropFilter::default().value(indexer_ids::EDITOR_RELATION)) + .to_id(PropFilter::default().value(self.space_id.clone())) + .space_id(PropFilter::default().value(indexer_ids::INDEXER_SPACE_ID)) + .limit(self.limit) + .send() + .await?; + + // Convert the stream of relations to a stream of accounts + let neo4j = self.neo4j.clone(); + let account_stream = relations_stream + .map(move |relation_result| { + let neo4j = neo4j.clone(); + async move { + let relation = relation_result?; + entity::find_one(&neo4j, &relation.from, indexer_ids::INDEXER_SPACE_ID, None) + .send() + .await? + .ok_or_else(|| { + DatabaseError::NotFound(format!( + "Account with ID {} not found", + relation.from + )) + }) + } + }) + .buffered(10); // Process up to 10 accounts concurrently + + Ok(account_stream) + } +} diff --git a/grc20-sdk/src/models/space/space_members_query.rs b/grc20-sdk/src/models/space/space_members_query.rs new file mode 100644 index 0000000..d974211 --- /dev/null +++ b/grc20-sdk/src/models/space/space_members_query.rs @@ -0,0 +1,78 @@ +use futures::{Stream, StreamExt}; + +use grc20_core::{ + error::DatabaseError, + indexer_ids, + mapping::{entity, query_utils::QueryStream, relation_node, Entity, PropFilter, Query}, + neo4rs, +}; + +use crate::models::Account; + +/// Query to find all members of a space +pub struct SpaceMembersQuery { + neo4j: neo4rs::Graph, + space_id: String, + limit: usize, + skip: Option, +} + +impl SpaceMembersQuery { + pub(crate) fn new(neo4j: neo4rs::Graph, space_id: String) -> Self { + Self { + neo4j, + space_id, + limit: 100, + skip: None, + } + } + + /// Limit the number of results + pub fn limit(mut self, limit: usize) -> Self { + self.limit = limit; + self + } + + /// Skip a number of results + pub fn skip(mut self, skip: usize) -> Self { + self.skip = Some(skip); + self + } +} + +impl QueryStream> for SpaceMembersQuery { + async fn send( + self, + ) -> Result, DatabaseError>>, DatabaseError> { + // Find all member relations for the space + let relations_stream = relation_node::find_many(&self.neo4j) + .relation_type(PropFilter::default().value(indexer_ids::MEMBER_RELATION)) + .to_id(PropFilter::default().value(self.space_id.clone())) + .space_id(PropFilter::default().value(indexer_ids::INDEXER_SPACE_ID)) + .limit(self.limit) + .send() + .await?; + + // Convert the stream of relations to a stream of accounts + let neo4j = self.neo4j.clone(); + let account_stream = relations_stream + .map(move |relation_result| { + let neo4j = neo4j.clone(); + async move { + let relation = relation_result?; + entity::find_one(&neo4j, &relation.from, indexer_ids::INDEXER_SPACE_ID, None) + .send() + .await? + .ok_or_else(|| { + DatabaseError::NotFound(format!( + "Account with ID {} not found", + relation.from + )) + }) + } + }) + .buffered(10); // Process up to 10 accounts concurrently + + Ok(account_stream) + } +} diff --git a/grc20-sdk/src/models/space/space_model.rs b/grc20-sdk/src/models/space/space_model.rs new file mode 100644 index 0000000..e112b7d --- /dev/null +++ b/grc20-sdk/src/models/space/space_model.rs @@ -0,0 +1,372 @@ +use futures::{pin_mut, StreamExt}; +use serde::{Deserialize, Serialize}; +use web3_utils::checksum_address; + +use grc20_core::{ + block::BlockMetadata, + error::DatabaseError, + ids, indexer_ids, + mapping::{ + entity, entity_node, prop_filter, + query_utils::{AttributeFilter, PropFilter, Query, QueryStream}, + relation, Entity, EntityNode, Relation, TriplesConversionError, Value, + }, + neo4rs, network_ids, system_ids, +}; + +use super::{ + ParentSpacesQuery, SpaceEditorsQuery, SpaceMembersQuery, SpaceTypesQuery, SubspacesQuery, +}; + +#[derive(Clone)] +#[grc20_core::entity] +#[grc20(schema_type = system_ids::SPACE_TYPE)] +pub struct Space { + #[grc20(attribute = system_ids::NETWORK_ATTRIBUTE)] + pub network: String, + + #[grc20(attribute = indexer_ids::SPACE_GOVERNANCE_TYPE)] + pub governance_type: SpaceGovernanceType, + + /// The address of the space's DAO contract. + #[grc20(attribute = indexer_ids::SPACE_DAO_ADDRESS)] + pub dao_contract_address: String, + + /// The address of the space plugin contract. + #[grc20(attribute = indexer_ids::SPACE_PLUGIN_ADDRESS)] + pub space_plugin_address: Option, + + /// The address of the voting plugin contract. + #[grc20(attribute = indexer_ids::SPACE_VOTING_PLUGIN_ADDRESS)] + pub voting_plugin_address: Option, + + /// The address of the member access plugin contract. + #[grc20(attribute = indexer_ids::SPACE_MEMBER_PLUGIN_ADDRESS)] + pub member_access_plugin: Option, + + /// The address of the personal space admin plugin contract. + #[grc20(attribute = indexer_ids::SPACE_PERSONAL_PLUGIN_ADDRESS)] + pub personal_space_admin_plugin: Option, +} + +/// Generates a unique ID for a space based on its network and DAO contract address. +pub fn new_id(network: &str, address: &str) -> String { + ids::create_id_from_unique_string(format!("{network}:{}", checksum_address(address))) +} + +pub fn builder(id: &str, dao_contract_address: &str) -> SpaceBuilder { + SpaceBuilder::new(id, dao_contract_address) +} + +/// Find a space by its DAO contract address. +pub async fn find_by_dao_address( + neo4j: &neo4rs::Graph, + dao_contract_address: &str, +) -> Result>, DatabaseError> { + entity::find_one( + neo4j, + new_id(network_ids::GEO, dao_contract_address), + indexer_ids::INDEXER_SPACE_ID, + None, + ) + .send() + .await +} + +pub async fn find_entity_by_dao_address( + neo4j: &neo4rs::Graph, + dao_contract_address: &str, +) -> Result, DatabaseError> { + entity_node::find_one(neo4j, new_id(network_ids::GEO, dao_contract_address)) + .send() + .await +} + +/// Find a space by its space plugin address. +pub async fn find_by_space_plugin_address( + neo4j: &neo4rs::Graph, + space_plugin_address: &str, +) -> Result>, DatabaseError> { + let stream = entity::find_many(neo4j, indexer_ids::INDEXER_SPACE_ID, None) + .attribute( + AttributeFilter::new(indexer_ids::SPACE_PLUGIN_ADDRESS) + .value(PropFilter::default().value(checksum_address(space_plugin_address))), + ) + .limit(1) + .send() + .await?; + + pin_mut!(stream); + + stream.next().await.transpose() +} + +pub async fn find_entity_by_space_plugin_address( + neo4j: &neo4rs::Graph, + space_plugin_address: &str, +) -> Result, DatabaseError> { + let stream = entity_node::find_many(neo4j) + .attribute( + AttributeFilter::new(indexer_ids::SPACE_PLUGIN_ADDRESS) + .space_id(prop_filter::value(indexer_ids::INDEXER_SPACE_ID)) + .value(prop_filter::value(checksum_address(space_plugin_address))), + ) + .limit(1) + .send() + .await?; + + pin_mut!(stream); + + stream.next().await.transpose() +} + +/// Find a space by its voting plugin address. +pub async fn find_by_voting_plugin_address( + neo4j: &neo4rs::Graph, + voting_plugin_address: &str, +) -> Result>, DatabaseError> { + let stream = entity::find_many(neo4j, indexer_ids::INDEXER_SPACE_ID, None) + .attribute( + AttributeFilter::new(indexer_ids::SPACE_VOTING_PLUGIN_ADDRESS) + .value(PropFilter::default().value(checksum_address(voting_plugin_address))), + ) + .limit(1) + .send() + .await?; + + pin_mut!(stream); + + stream.next().await.transpose() +} + +pub async fn find_entity_by_voting_plugin_address( + neo4j: &neo4rs::Graph, + voting_plugin_address: &str, +) -> Result, DatabaseError> { + let stream = entity_node::find_many(neo4j) + .attribute( + AttributeFilter::new(indexer_ids::SPACE_VOTING_PLUGIN_ADDRESS) + .space_id(prop_filter::value(indexer_ids::INDEXER_SPACE_ID)) + .value(prop_filter::value(checksum_address(voting_plugin_address))), + ) + .limit(1) + .send() + .await?; + + pin_mut!(stream); + + stream.next().await.transpose() +} + +/// Find a space by its member access plugin address. +pub async fn find_by_member_access_plugin( + neo4j: &neo4rs::Graph, + member_access_plugin: &str, +) -> Result>, DatabaseError> { + let stream = entity::find_many(neo4j, indexer_ids::INDEXER_SPACE_ID, None) + .attribute( + AttributeFilter::new(indexer_ids::SPACE_MEMBER_PLUGIN_ADDRESS) + .value(PropFilter::default().value(checksum_address(member_access_plugin))), + ) + .limit(1) + .send() + .await?; + + pin_mut!(stream); + + stream.next().await.transpose() +} + +/// Find a space by its personal space admin plugin address. +pub async fn find_by_personal_plugin_address( + neo4j: &neo4rs::Graph, + personal_space_admin_plugin: &str, +) -> Result>, DatabaseError> { + let stream = entity::find_many(neo4j, indexer_ids::INDEXER_SPACE_ID, None) + .attribute( + AttributeFilter::new(indexer_ids::SPACE_PERSONAL_PLUGIN_ADDRESS) + .value(PropFilter::default().value(checksum_address(personal_space_admin_plugin))), + ) + .limit(1) + .send() + .await?; + + pin_mut!(stream); + + stream.next().await.transpose() +} + +/// Find all members of a space +pub fn members(neo4j: &neo4rs::Graph, space_id: &str) -> SpaceMembersQuery { + SpaceMembersQuery::new(neo4j.clone(), space_id.to_string()) +} + +/// Find all editors of a space +pub fn editors(neo4j: &neo4rs::Graph, space_id: &str) -> SpaceEditorsQuery { + SpaceEditorsQuery::new(neo4j.clone(), space_id.to_string()) +} + +/// Find all parent spaces of a given space +pub fn parent_spaces(neo4j: &neo4rs::Graph, space_id: &str) -> ParentSpacesQuery { + ParentSpacesQuery::new(neo4j.clone(), space_id.to_string()) +} + +/// Find all subspaces of a given space +pub fn subspaces(neo4j: &neo4rs::Graph, space_id: &str) -> SubspacesQuery { + SubspacesQuery::new(neo4j.clone(), space_id.to_string()) +} + +/// Find all types defined in a space +pub fn types(neo4j: &neo4rs::Graph, space_id: &str) -> SpaceTypesQuery { + SpaceTypesQuery::new(neo4j.clone(), space_id.to_string()) +} + +#[derive(Clone, Debug, Default, Deserialize, Serialize)] +pub enum SpaceGovernanceType { + #[default] + Public, + Personal, +} + +impl From for Value { + fn from(governance_type: SpaceGovernanceType) -> Self { + match governance_type { + SpaceGovernanceType::Public => Value::text("Public".to_string()), + SpaceGovernanceType::Personal => Value::text("Personal".to_string()), + } + } +} + +impl TryFrom for SpaceGovernanceType { + type Error = TriplesConversionError; + + fn try_from(value: Value) -> Result { + match value.value.as_str() { + "Public" => Ok(SpaceGovernanceType::Public), + "Personal" => Ok(SpaceGovernanceType::Personal), + _ => Err(TriplesConversionError::InvalidValue(format!( + "Invalid SpaceGovernanceType value: {}", + value.value + ))), + } + } +} + +pub struct SpaceBuilder { + id: String, + network: String, + governance_type: SpaceGovernanceType, + dao_contract_address: String, + space_plugin_address: Option, + voting_plugin_address: Option, + member_access_plugin: Option, + personal_space_admin_plugin: Option, +} + +impl SpaceBuilder { + pub fn new(id: &str, dao_contract_address: &str) -> Self { + Self { + id: id.to_string(), + network: network_ids::GEO.to_string(), + governance_type: SpaceGovernanceType::Public, + dao_contract_address: checksum_address(dao_contract_address), + space_plugin_address: None, + voting_plugin_address: None, + member_access_plugin: None, + personal_space_admin_plugin: None, + } + } + + pub fn network(mut self, network: String) -> Self { + self.network = network; + self + } + + pub fn governance_type(mut self, governance_type: SpaceGovernanceType) -> Self { + self.governance_type = governance_type; + self + } + + pub fn dao_contract_address(mut self, dao_contract_address: &str) -> Self { + self.dao_contract_address = checksum_address(dao_contract_address); + self + } + + pub fn space_plugin_address(mut self, space_plugin_address: &str) -> Self { + self.space_plugin_address = Some(checksum_address(space_plugin_address)); + self + } + + pub fn voting_plugin_address(mut self, voting_plugin_address: &str) -> Self { + self.voting_plugin_address = Some(checksum_address(voting_plugin_address)); + self + } + + pub fn member_access_plugin(mut self, member_access_plugin: &str) -> Self { + self.member_access_plugin = Some(checksum_address(member_access_plugin)); + self + } + + pub fn personal_space_admin_plugin(mut self, personal_space_admin_plugin: &str) -> Self { + self.personal_space_admin_plugin = Some(checksum_address(personal_space_admin_plugin)); + self + } + + pub fn build(self) -> Entity { + Entity::new( + &self.id, + Space { + network: self.network, + governance_type: self.governance_type, + dao_contract_address: self.dao_contract_address, + space_plugin_address: self.space_plugin_address, + voting_plugin_address: self.voting_plugin_address, + member_access_plugin: self.member_access_plugin, + personal_space_admin_plugin: self.personal_space_admin_plugin, + }, + ) + .with_type(system_ids::SPACE_TYPE) + } +} + +/// Parent space relation (for subspaces). +/// Space > PARENT_SPACE > Space +#[derive(Clone)] +#[grc20_core::relation] +#[grc20(relation_type = indexer_ids::PARENT_SPACE)] +pub struct ParentSpace; + +impl ParentSpace { + pub fn generate_id(space_id: &str, parent_space_id: &str) -> String { + ids::create_id_from_unique_string(format!("PARENT_SPACE:{space_id}:{parent_space_id}")) + } + + pub fn new(space_id: &str, parent_space_id: &str) -> Relation { + Relation::new( + Self::generate_id(space_id, parent_space_id), + space_id, + parent_space_id, + indexer_ids::PARENT_SPACE, + "0", + Self, + ) + } + + /// Delete a relation between a space and its parent space. + pub async fn remove( + neo4j: &neo4rs::Graph, + block: &BlockMetadata, + space_id: &str, + parent_space_id: &str, + ) -> Result<(), DatabaseError> { + relation::delete_one( + neo4j, + block, + ParentSpace::generate_id(space_id, parent_space_id), + indexer_ids::INDEXER_SPACE_ID, + "0", + ) + .send() + .await + } +} diff --git a/grc20-sdk/src/models/space/space_types_query.rs b/grc20-sdk/src/models/space/space_types_query.rs new file mode 100644 index 0000000..4352104 --- /dev/null +++ b/grc20-sdk/src/models/space/space_types_query.rs @@ -0,0 +1,88 @@ +use futures::{Stream, TryStreamExt}; + +use grc20_core::{ + error::DatabaseError, + mapping::{ + entity_node, + query_utils::{QueryStream, TypesFilter}, + EntityFilter, EntityNode, PropFilter, + }, + neo4rs, system_ids, +}; + +use super::ParentSpacesQuery; + +/// Query to find all types defined in a space +pub struct SpaceTypesQuery { + neo4j: neo4rs::Graph, + space_id: String, + limit: usize, + skip: Option, + strict: bool, +} + +impl SpaceTypesQuery { + pub(crate) fn new(neo4j: neo4rs::Graph, space_id: String) -> Self { + Self { + neo4j, + space_id, + limit: 100, + skip: None, + strict: true, + } + } + + /// Limit the number of results + pub fn limit(mut self, limit: usize) -> Self { + self.limit = limit; + self + } + + /// Skip a number of results + pub fn skip(mut self, skip: usize) -> Self { + self.skip = Some(skip); + self + } + + /// Set whether to only query the given space or all parent spaces + pub fn strict(mut self, strict: bool) -> Self { + self.strict = strict; + self + } +} + +impl QueryStream for SpaceTypesQuery { + async fn send( + self, + ) -> Result>, DatabaseError> { + let mut spaces = vec![self.space_id.clone()]; + + if !self.strict { + let parent_spaces: Vec = + ParentSpacesQuery::new(self.neo4j.clone(), self.space_id.clone()) + .max_depth(None) + .send() + .await? + .map_ok(|(space, _)| space) + .try_collect() + .await?; + + spaces.extend(parent_spaces); + } + + // Find all entities that have a TYPES relation to the Type entity + let mut query = entity_node::find_many(&self.neo4j) + .with_filter( + EntityFilter::default() + .relations(TypesFilter::default().r#type(system_ids::SCHEMA_TYPE)) + .space_id(PropFilter::default().value_in(spaces)), + ) + .limit(self.limit); + + if let Some(skip) = self.skip { + query = query.skip(skip); + } + + query.send().await + } +} diff --git a/grc20-sdk/src/models/space/subspaces_query.rs b/grc20-sdk/src/models/space/subspaces_query.rs new file mode 100644 index 0000000..5a01738 --- /dev/null +++ b/grc20-sdk/src/models/space/subspaces_query.rs @@ -0,0 +1,158 @@ +use std::collections::HashSet; + +use async_stream::stream; +use futures::{pin_mut, Stream, StreamExt}; + +use grc20_core::{ + error::DatabaseError, + indexer_ids, + mapping::{query_utils::QueryStream, relation_node, PropFilter}, + neo4rs, +}; + +/// Query to find all subspaces of a given space +pub struct SubspacesQuery { + neo4j: neo4rs::Graph, + space_id: String, + limit: usize, + skip: Option, + max_depth: Option, +} + +impl SubspacesQuery { + pub(crate) fn new(neo4j: neo4rs::Graph, space_id: String) -> Self { + Self { + neo4j, + space_id, + limit: 100, + skip: None, + max_depth: Some(1), + } + } + + /// Limit the number of results + pub fn limit(mut self, limit: usize) -> Self { + self.limit = limit; + self + } + + /// Skip a number of results + pub fn skip(mut self, skip: usize) -> Self { + self.skip = Some(skip); + self + } + + /// Limit the depth of the search + pub fn max_depth(mut self, max_depth: Option) -> Self { + self.max_depth = max_depth; + self + } +} + +// impl QueryStream> for SubspacesQuery { +// async fn send( +// self, +// ) -> Result, DatabaseError>>, DatabaseError> { +// // Find all parent space relations where this space is the parent +// let relations_stream = relation_node::find_many(&self.neo4j) +// .relation_type(PropFilter::default().value(indexer_ids::PARENT_SPACE)) +// .from_id(PropFilter::default().value(self.space_id.clone())) +// .space_id(PropFilter::default().value(indexer_ids::INDEXER_SPACE_ID)) +// .limit(self.limit) +// .send() +// .await?; + +// // Convert the stream of relations to a stream of spaces +// let neo4j = self.neo4j.clone(); +// let space_stream = relations_stream +// .map(move |relation_result| { +// let neo4j = neo4j.clone(); +// async move { +// let relation = relation_result?; +// entity::find_one(&neo4j, &relation.to, indexer_ids::INDEXER_SPACE_ID, None) +// .send() +// .await? +// .ok_or_else(|| { +// DatabaseError::NotFound(format!( +// "Space with ID {} not found", +// relation.to +// )) +// }) +// } +// }) +// .buffered(10); // Process up to 10 spaces concurrently + +// Ok(space_stream) +// } +// } + +impl QueryStream<(String, usize)> for SubspacesQuery { + async fn send( + self, + ) -> Result>, DatabaseError> { + let mut visited = HashSet::new(); + let mut queue = vec![(self.space_id.clone(), 0)]; // (space_id, depth) + + // Add initial space to visited set + visited.insert(self.space_id.to_string()); + + // Create and return the stream + let stream = stream! { + // Process queue until empty + while let Some((current_space, depth)) = queue.pop() { + // Check if we've reached max depth + if let Some(max_depth) = self.max_depth { + if depth >= max_depth { + continue; + } + } + + // Get immediate subspaces + let subspaces = immediate_subspaces(&self.neo4j, ¤t_space, self.limit).await?; + pin_mut!(subspaces); + + // Process each subspace + while let Some(subspace_result) = subspaces.next().await { + match subspace_result { + Ok(subspace) => { + // Skip if already visited (handles cycles) + if !visited.insert(subspace.clone()) { + continue; + } + + // Yield the subspace ID + yield Ok((subspace.clone(), depth)); + + // Add to queue for further processing + queue.push((subspace, depth + 1)); + }, + Err(e) => yield Err(e), + } + } + } + }; + + Ok(stream.skip(self.skip.unwrap_or(0)).take(self.limit)) + } +} + +async fn immediate_subspaces( + neo4j: &neo4rs::Graph, + space_id: &str, + limit: usize, +) -> Result>, DatabaseError> { + // Find all parent space relations where this space is the parent + let relations_stream = relation_node::find_many(neo4j) + .relation_type(PropFilter::default().value(indexer_ids::PARENT_SPACE)) + .to_id(PropFilter::default().value(space_id)) + .space_id(PropFilter::default().value(indexer_ids::INDEXER_SPACE_ID)) + .limit(limit) + .send() + .await?; + + // Convert the stream of relations to a stream of spaces + let space_stream = + relations_stream.map(move |relation_result| relation_result.map(|relation| relation.from)); + + Ok(space_stream) +} diff --git a/grc20-sdk/src/models/vote.rs b/grc20-sdk/src/models/vote.rs index 5979ca9..2e665e0 100644 --- a/grc20-sdk/src/models/vote.rs +++ b/grc20-sdk/src/models/vote.rs @@ -2,7 +2,7 @@ use grc20_core::{ ids, indexer_ids, - mapping::{self, Relation}, + mapping::{self, Relation, TriplesConversionError}, }; /// A vote cast by a user on a proposal. @@ -62,13 +62,16 @@ impl From for mapping::Value { } impl TryFrom for VoteType { - type Error = String; + type Error = TriplesConversionError; fn try_from(value: mapping::Value) -> Result { match (value.value_type, value.value.as_str()) { (mapping::ValueType::Text, "ACCEPT") => Ok(Self::Accept), (mapping::ValueType::Text, "REJECT") => Ok(Self::Reject), - (value_type, _) => Err(format!("Invalid vote type value_type: {:?}", value_type)), + (value_type, _) => Err(TriplesConversionError::InvalidValue(format!( + "Invalid vote type value_type: {:?}", + value_type + ))), } } } diff --git a/sink/src/events/edit_published.rs b/sink/src/events/edit_published.rs index f7c514e..85341de 100644 --- a/sink/src/events/edit_published.rs +++ b/sink/src/events/edit_published.rs @@ -9,7 +9,7 @@ use grc20_core::{ use grc20_sdk::models::{ self, edit::{Edits, ProposedEdit}, - Proposal, Space, + space, Proposal, }; use ipfs::deserialize; use web3_utils::checksum_address; @@ -76,7 +76,7 @@ impl EventHandler { ) -> Result, HandlerError> { // TODO: (optimization) Check if need to fetch entire space let space = if let Some(space) = - Space::find_by_dao_address(&self.neo4j, &edit_published.dao_address) + space::find_by_dao_address(&self.neo4j, &edit_published.dao_address) .await .map_err(|e| { HandlerError::Other( @@ -115,7 +115,7 @@ impl EventHandler { name: edit.name, content_uri: edit_published.content_uri.clone(), proposal_id: edit.id, - space_id: space.id.to_string(), + space_id: space.id().to_string(), space_plugin_address: space .attributes .space_plugin_address @@ -129,7 +129,7 @@ impl EventHandler { let import = deserialize::(&bytes)?; stream::iter(import.edits) .map(|edit_uri| { - let space_id = space.id.clone(); + let space_id = space.id().to_string(); let space_plugin_address = space .attributes .space_plugin_address @@ -244,7 +244,7 @@ impl EventHandler { space_id: &str, proposal_id: &str, ) -> Result<(), DatabaseError> { - let edit_id = edit.id.clone(); + let edit_id = edit.id().to_string(); // Insert edit edit.insert(&self.neo4j, block, indexer_ids::INDEXER_SPACE_ID, "0") diff --git a/sink/src/events/editor_added.rs b/sink/src/events/editor_added.rs index 0d11057..9361cbb 100644 --- a/sink/src/events/editor_added.rs +++ b/sink/src/events/editor_added.rs @@ -1,5 +1,5 @@ use grc20_core::{block::BlockMetadata, indexer_ids, mapping::query_utils::Query, pb::geo}; -use grc20_sdk::models::{Account, Space, SpaceEditor}; +use grc20_sdk::models::{account, space, SpaceEditor}; use web3_utils::checksum_address; @@ -12,11 +12,11 @@ impl EventHandler { block: &BlockMetadata, ) -> Result<(), HandlerError> { if let Some(space) = - Space::find_entity_by_dao_address(&self.neo4j, &editor_added.dao_address).await? + space::find_entity_by_dao_address(&self.neo4j, &editor_added.dao_address).await? { // Create editor account and space editor relation - let editor = Account::new(editor_added.editor_address.clone()); - let editor_relation = SpaceEditor::new(&editor.id, &space.id); + let editor = account::new(editor_added.editor_address.clone()); + let editor_relation = SpaceEditor::new(editor.id(), &space.id); // Insert editor account editor diff --git a/sink/src/events/editor_removed.rs b/sink/src/events/editor_removed.rs index 57ee071..79676c0 100644 --- a/sink/src/events/editor_removed.rs +++ b/sink/src/events/editor_removed.rs @@ -1,5 +1,5 @@ use grc20_core::{block::BlockMetadata, pb::geo}; -use grc20_sdk::models::{Account, Space, SpaceEditor}; +use grc20_sdk::models::{account, space, SpaceEditor}; use super::{handler::HandlerError, EventHandler}; @@ -10,13 +10,13 @@ impl EventHandler { block: &BlockMetadata, ) -> Result<(), HandlerError> { let space = - Space::find_entity_by_dao_address(&self.neo4j, &editor_removed.dao_address).await?; + space::find_entity_by_dao_address(&self.neo4j, &editor_removed.dao_address).await?; if let Some(space) = space { SpaceEditor::remove( &self.neo4j, block, - &Account::gen_id(&editor_removed.editor_address), + &account::new_id(&editor_removed.editor_address), &space.id, ) .await?; diff --git a/sink/src/events/initial_editors_added.rs b/sink/src/events/initial_editors_added.rs index d120bb4..d626d0d 100644 --- a/sink/src/events/initial_editors_added.rs +++ b/sink/src/events/initial_editors_added.rs @@ -1,6 +1,6 @@ use futures::{stream, StreamExt, TryStreamExt}; use grc20_core::{block::BlockMetadata, indexer_ids, mapping::query_utils::Query, pb::geo}; -use grc20_sdk::models::{Account, Space, SpaceEditor}; +use grc20_sdk::models::{account, space, SpaceEditor}; use super::{handler::HandlerError, EventHandler}; @@ -11,7 +11,7 @@ impl EventHandler { block: &BlockMetadata, ) -> Result<(), HandlerError> { let space = - Space::find_entity_by_dao_address(&self.neo4j, &initial_editor_added.dao_address) + space::find_entity_by_dao_address(&self.neo4j, &initial_editor_added.dao_address) .await?; if let Some(space) = &space { @@ -19,8 +19,8 @@ impl EventHandler { .map(Result::<_, HandlerError>::Ok) .try_for_each(|editor_address| async move { // Create editor account and relation - let editor = Account::new(editor_address.clone()); - let editor_rel = SpaceEditor::new(&editor.id, &space.id); + let editor = account::new(editor_address.clone()); + let editor_rel = SpaceEditor::new(editor.id(), &space.id); // Insert editor account editor diff --git a/sink/src/events/member_added.rs b/sink/src/events/member_added.rs index afed6af..43d1e6f 100644 --- a/sink/src/events/member_added.rs +++ b/sink/src/events/member_added.rs @@ -1,5 +1,5 @@ use grc20_core::{block::BlockMetadata, indexer_ids, mapping::query_utils::Query, pb::geo}; -use grc20_sdk::models::{Account, Space, SpaceMember}; +use grc20_sdk::models::{account, space, SpaceMember}; use super::{handler::HandlerError, EventHandler}; @@ -10,18 +10,18 @@ impl EventHandler { block: &BlockMetadata, ) -> Result<(), HandlerError> { // match try_join!( - // Space::find_by_voting_plugin_address( + // space::find_by_voting_plugin_address( // &self.neo4j, // &member_added.main_voting_plugin_address // ), - // Space::find_by_personal_plugin_address( + // space::find_by_personal_plugin_address( // &self.neo4j, // &member_added.main_voting_plugin_address // ) // )? { // // Space found // (Some(space), _) | (None, Some(space)) => { - // let member = Account::new(member_added.member_address.clone()); + // let member = account::new(member_added.member_address.clone()); // let member_rel = SpaceMember::new(&member.id, &space.id); // // Add geo account @@ -47,10 +47,10 @@ impl EventHandler { // }; if let Some(space) = - Space::find_entity_by_dao_address(&self.neo4j, &member_added.dao_address).await? + space::find_entity_by_dao_address(&self.neo4j, &member_added.dao_address).await? { - let member = Account::new(member_added.member_address.clone()); - let member_rel = SpaceMember::new(&member.id, &space.id); + let member = account::new(member_added.member_address.clone()); + let member_rel = SpaceMember::new(member.id(), &space.id); // Add geo account member diff --git a/sink/src/events/member_removed.rs b/sink/src/events/member_removed.rs index a6533fe..8d731ea 100644 --- a/sink/src/events/member_removed.rs +++ b/sink/src/events/member_removed.rs @@ -1,5 +1,5 @@ use grc20_core::{block::BlockMetadata, pb::geo}; -use grc20_sdk::models::{Account, Space, SpaceMember}; +use grc20_sdk::models::{account, space, SpaceMember}; use super::{handler::HandlerError, EventHandler}; @@ -10,13 +10,13 @@ impl EventHandler { block: &BlockMetadata, ) -> Result<(), HandlerError> { let space = - Space::find_entity_by_dao_address(&self.neo4j, &member_removed.dao_address).await?; + space::find_entity_by_dao_address(&self.neo4j, &member_removed.dao_address).await?; if let Some(space) = space { SpaceMember::remove( &self.neo4j, block, - &Account::gen_id(&member_removed.member_address), + &account::new_id(&member_removed.member_address), &space.id, ) .await?; diff --git a/sink/src/events/proposal_created.rs b/sink/src/events/proposal_created.rs index 6e5262c..7c0dec5 100644 --- a/sink/src/events/proposal_created.rs +++ b/sink/src/events/proposal_created.rs @@ -7,10 +7,10 @@ use grc20_core::{ pb::geo, }; use grc20_sdk::models::{ + account, proposal::{ProposalStatus, ProposedAccount, ProposedSubspace}, - Account, AddEditorProposal, AddMemberProposal, AddSubspaceProposal, EditProposal, Proposal, + space, AddEditorProposal, AddMemberProposal, AddSubspaceProposal, EditProposal, Proposal, ProposalCreator, Proposals, RemoveEditorProposal, RemoveMemberProposal, RemoveSubspaceProposal, - Space, }; use web3_utils::checksum_address; @@ -22,9 +22,9 @@ impl EventHandler { add_member_proposal: &geo::AddMemberProposalCreated, block: &BlockMetadata, ) -> Result<(), HandlerError> { - let space_id = Space::gen_id(network_ids::GEO, &add_member_proposal.dao_address); - let creator_id = Account::gen_id(&add_member_proposal.creator); - let proposed_account_id = Account::gen_id(&add_member_proposal.member); + let space_id = space::new_id(network_ids::GEO, &add_member_proposal.dao_address); + let creator_id = account::new_id(&add_member_proposal.creator); + let proposed_account_id = account::new_id(&add_member_proposal.member); // Create proposal let proposal = AddMemberProposal::new(Proposal { @@ -52,9 +52,9 @@ impl EventHandler { remove_member_proposal: &geo::RemoveMemberProposalCreated, block: &BlockMetadata, ) -> Result<(), HandlerError> { - let space_id = Space::gen_id(network_ids::GEO, &remove_member_proposal.dao_address); - let creator_id = Account::gen_id(&remove_member_proposal.creator); - let proposed_account_id = Account::gen_id(&remove_member_proposal.member); + let space_id = space::new_id(network_ids::GEO, &remove_member_proposal.dao_address); + let creator_id = account::new_id(&remove_member_proposal.creator); + let proposed_account_id = account::new_id(&remove_member_proposal.member); // Create proposal let proposal = RemoveMemberProposal::new(Proposal { @@ -82,9 +82,9 @@ impl EventHandler { add_editor_proposal: &geo::AddEditorProposalCreated, block: &BlockMetadata, ) -> Result<(), HandlerError> { - let space_id = Space::gen_id(network_ids::GEO, &add_editor_proposal.dao_address); - let creator_id = Account::gen_id(&add_editor_proposal.creator); - let proposed_account_id = Account::gen_id(&add_editor_proposal.editor); + let space_id = space::new_id(network_ids::GEO, &add_editor_proposal.dao_address); + let creator_id = account::new_id(&add_editor_proposal.creator); + let proposed_account_id = account::new_id(&add_editor_proposal.editor); // Create proposal let proposal = AddEditorProposal::new(Proposal { @@ -112,9 +112,9 @@ impl EventHandler { remove_editor_proposal: &geo::RemoveEditorProposalCreated, block: &BlockMetadata, ) -> Result<(), HandlerError> { - let space_id = Space::gen_id(network_ids::GEO, &remove_editor_proposal.dao_address); - let creator_id = Account::gen_id(&remove_editor_proposal.creator); - let proposed_account_id = Account::gen_id(&remove_editor_proposal.editor); + let space_id = space::new_id(network_ids::GEO, &remove_editor_proposal.dao_address); + let creator_id = account::new_id(&remove_editor_proposal.creator); + let proposed_account_id = account::new_id(&remove_editor_proposal.editor); // Create proposal let proposal = RemoveEditorProposal::new(Proposal { @@ -142,9 +142,9 @@ impl EventHandler { add_subspace_proposal: &geo::AddSubspaceProposalCreated, block: &BlockMetadata, ) -> Result<(), HandlerError> { - let space_id = Space::gen_id(network_ids::GEO, &add_subspace_proposal.dao_address); - let creator_id = Account::gen_id(&add_subspace_proposal.creator); - let proposed_subspace_id = Space::gen_id(network_ids::GEO, &add_subspace_proposal.subspace); + let space_id = space::new_id(network_ids::GEO, &add_subspace_proposal.dao_address); + let creator_id = account::new_id(&add_subspace_proposal.creator); + let proposed_subspace_id = space::new_id(network_ids::GEO, &add_subspace_proposal.subspace); // Create proposal let proposal = AddSubspaceProposal::new(Proposal { @@ -172,10 +172,10 @@ impl EventHandler { remove_subspace_proposal: &geo::RemoveSubspaceProposalCreated, block: &BlockMetadata, ) -> Result<(), HandlerError> { - let space_id = Space::gen_id(network_ids::GEO, &remove_subspace_proposal.dao_address); - let creator_id = Account::gen_id(&remove_subspace_proposal.creator); + let space_id = space::new_id(network_ids::GEO, &remove_subspace_proposal.dao_address); + let creator_id = account::new_id(&remove_subspace_proposal.creator); let proposed_subspace_id = - Space::gen_id(network_ids::GEO, &remove_subspace_proposal.subspace); + space::new_id(network_ids::GEO, &remove_subspace_proposal.subspace); // Create proposal let proposal = RemoveSubspaceProposal::new(Proposal { @@ -203,8 +203,8 @@ impl EventHandler { publish_edit_proposal: &geo::PublishEditProposalCreated, block: &BlockMetadata, ) -> Result<(), HandlerError> { - let space_id = Space::gen_id(network_ids::GEO, &publish_edit_proposal.dao_address); - let creator_id = Account::gen_id(&publish_edit_proposal.creator); + let space_id = space::new_id(network_ids::GEO, &publish_edit_proposal.dao_address); + let creator_id = account::new_id(&publish_edit_proposal.creator); let proposal = EditProposal::new( Proposal { @@ -217,7 +217,7 @@ impl EventHandler { publish_edit_proposal.content_uri.clone(), ); - let proposal_id = proposal.id.clone(); + let proposal_id = proposal.id().to_string(); // Insert Proposal proposal @@ -250,7 +250,7 @@ impl EventHandler { creator_id: &str, proposed_account_id: &str, ) -> Result<(), DatabaseError> { - let proposal_id = proposal.id.clone(); + let proposal_id = proposal.id().to_string(); // Insert Proposal proposal @@ -285,7 +285,7 @@ impl EventHandler { creator_id: &str, proposed_subspace_id: &str, ) -> Result<(), DatabaseError> { - let proposal_id = proposal.id.clone(); + let proposal_id = proposal.id().to_string(); // Insert Proposal proposal diff --git a/sink/src/events/space_created.rs b/sink/src/events/space_created.rs index 7696335..4e62dd3 100644 --- a/sink/src/events/space_created.rs +++ b/sink/src/events/space_created.rs @@ -5,7 +5,7 @@ use grc20_core::{ network_ids, pb::{self, geo}, }; -use grc20_sdk::models::{Account, Space, SpaceGovernanceType}; +use grc20_sdk::models::{account, space, SpaceGovernanceType}; use web3_utils::checksum_address; @@ -41,13 +41,13 @@ impl EventHandler { block.block_number, block.timestamp, checksum_address(&space_created.space_address), - Space::gen_id( + space::new_id( &import.previous_network, &import.previous_contract_address, ) ); - Some(Space::gen_id( + Some(space::new_id( &import.previous_network, &import.previous_contract_address, )) @@ -62,7 +62,7 @@ impl EventHandler { }; let space_id = maybe_existing_space_id - .unwrap_or_else(|| Space::gen_id(network_ids::GEO, &space_created.dao_address)); + .unwrap_or_else(|| space::new_id(network_ids::GEO, &space_created.dao_address)); tracing::info!( "Block #{} ({}): Creating space {}", @@ -71,7 +71,7 @@ impl EventHandler { space_id ); - Space::builder(&space_id, &space_created.dao_address) + space::builder(&space_id, &space_created.dao_address) .network(network_ids::GEO.to_string()) .space_plugin_address(&space_created.space_address) .build() @@ -88,7 +88,7 @@ impl EventHandler { block: &BlockMetadata, ) -> Result<(), HandlerError> { let space = - Space::find_entity_by_dao_address(&self.neo4j, &personal_space_created.dao_address) + space::find_entity_by_dao_address(&self.neo4j, &personal_space_created.dao_address) .await?; if let Some(space) = &space { @@ -112,7 +112,7 @@ impl EventHandler { .await?; // Add initial editors to the personal space - let editor = Account::new(personal_space_created.initial_editor.clone()); + let editor = account::new(personal_space_created.initial_editor.clone()); editor .insert(&self.neo4j, block, indexer_ids::INDEXER_SPACE_ID, "0") @@ -124,7 +124,7 @@ impl EventHandler { block.block_number, block.timestamp, space.id, - Account::gen_id(&personal_space_created.initial_editor), + account::new_id(&personal_space_created.initial_editor), ); } else { tracing::warn!( @@ -144,7 +144,7 @@ impl EventHandler { block: &BlockMetadata, ) -> Result<(), HandlerError> { let space = - Space::find_entity_by_dao_address(&self.neo4j, &governance_plugin_created.dao_address) + space::find_entity_by_dao_address(&self.neo4j, &governance_plugin_created.dao_address) .await?; if let Some(space) = space { diff --git a/sink/src/events/subspace_added.rs b/sink/src/events/subspace_added.rs index 5a23c84..d0806fd 100644 --- a/sink/src/events/subspace_added.rs +++ b/sink/src/events/subspace_added.rs @@ -1,6 +1,6 @@ use futures::join; use grc20_core::{block::BlockMetadata, indexer_ids, mapping::query_utils::Query, pb::geo}; -use grc20_sdk::models::{space::ParentSpace, Space}; +use grc20_sdk::models::{space, space::ParentSpace}; use web3_utils::checksum_address; use super::{handler::HandlerError, EventHandler}; @@ -12,8 +12,8 @@ impl EventHandler { block: &BlockMetadata, ) -> Result<(), HandlerError> { match join!( - Space::find_entity_by_space_plugin_address(&self.neo4j, &subspace_added.plugin_address), - Space::find_entity_by_dao_address(&self.neo4j, &subspace_added.subspace) + space::find_entity_by_space_plugin_address(&self.neo4j, &subspace_added.plugin_address), + space::find_entity_by_dao_address(&self.neo4j, &subspace_added.subspace) ) { (Ok(Some(parent_space)), Ok(Some(subspace))) => { tracing::info!( diff --git a/sink/src/events/subspace_removed.rs b/sink/src/events/subspace_removed.rs index 4f46c45..48b4655 100644 --- a/sink/src/events/subspace_removed.rs +++ b/sink/src/events/subspace_removed.rs @@ -1,5 +1,5 @@ use grc20_core::{block::BlockMetadata, network_ids, pb::geo}; -use grc20_sdk::models::{space::ParentSpace, Space}; +use grc20_sdk::models::{space, space::ParentSpace}; use super::{handler::HandlerError, EventHandler}; @@ -9,13 +9,13 @@ impl EventHandler { subspace_removed: &geo::SubspaceRemoved, block: &BlockMetadata, ) -> Result<(), HandlerError> { - let space = Space::find_entity_by_space_plugin_address( + let space = space::find_entity_by_space_plugin_address( &self.neo4j, &subspace_removed.plugin_address, ) .await?; - let subspace_id = Space::gen_id(network_ids::GEO, &subspace_removed.subspace); + let subspace_id = space::new_id(network_ids::GEO, &subspace_removed.subspace); if let Some(space) = space { ParentSpace::remove(&self.neo4j, block, &subspace_id, &space.id).await?; diff --git a/sink/src/events/vote_cast.rs b/sink/src/events/vote_cast.rs index b137c63..07ecf8b 100644 --- a/sink/src/events/vote_cast.rs +++ b/sink/src/events/vote_cast.rs @@ -4,7 +4,7 @@ use grc20_core::{ mapping::{entity_node, query_utils::Query}, pb::geo, }; -use grc20_sdk::models::{Account, Proposal, VoteCast}; +use grc20_sdk::models::{account, Proposal, VoteCast}; use super::{handler::HandlerError, EventHandler}; @@ -16,8 +16,8 @@ impl EventHandler { ) -> Result<(), HandlerError> { // // TODO: (optimization) Merge the two queries into one OR query // match join!( - // Space::find_by_voting_plugin_address(&self.neo4j, &vote.plugin_address), - // Space::find_by_member_access_plugin(&self.neo4j, &vote.plugin_address) + // space::find_by_voting_plugin_address(&self.neo4j, &vote.plugin_address), + // space::find_by_member_access_plugin(&self.neo4j, &vote.plugin_address) // ) { // // Space found // (Ok(Some(_space)), Ok(_)) | (Ok(None), Ok(Some(_space))) => { @@ -97,7 +97,7 @@ impl EventHandler { .send() .await?; - let maybe_account = entity_node::find_one(&self.neo4j, Account::gen_id(&vote.voter)) + let maybe_account = entity_node::find_one(&self.neo4j, account::new_id(&vote.voter)) .send() .await?;