From 2ca19116ce3bd07ba438e2dca421af6b86449149 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=AD=90=EF=B8=8FNINIKA=E2=AD=90=EF=B8=8F?= Date: Thu, 5 Dec 2024 13:23:42 +0300 Subject: [PATCH 1/2] refactor(queries): Remove even more singular queries (#5261) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(queries): allow query projections to work on metadata values via dynamic keys * refactor(queries): replace uses of FindAccountMetadata with projected FindAccounts * feat(queries): add a prototype of trigger action, exposing trigger metadata to projections * refactor(queries): replace uses of FindTriggerMetadata with projected FindTriggers * feat(queries): add projections and predicates for numeric and store `AssetValue`s * refactor(queries): replace uses of `FindAssetQuantityById` and `FindAssetMetadata` with projected `FindAssets` * refactor(queries): remove now unused singular queries --------- Signed-off-by: ⭐️NINIKA⭐️ --- crates/iroha/tests/asset.rs | 15 +- crates/iroha/tests/multisig.rs | 70 ++- crates/iroha/tests/queries/metadata.rs | 89 ++++ crates/iroha/tests/queries/mod.rs | 1 + crates/iroha/tests/queries/smart_contract.rs | 10 +- crates/iroha/tests/triggers/time_trigger.rs | 20 +- crates/iroha/tests/upgrade.rs | 38 +- crates/iroha_cli/src/main.rs | 9 +- crates/iroha_core/src/query/cursor.rs | 44 +- crates/iroha_core/src/query/store.rs | 62 +-- .../src/smartcontracts/isi/account.rs | 16 - .../src/smartcontracts/isi/asset.rs | 54 +- .../src/smartcontracts/isi/domain.rs | 33 -- .../iroha_core/src/smartcontracts/isi/mod.rs | 4 +- .../src/smartcontracts/isi/query.rs | 101 ---- .../src/smartcontracts/isi/triggers/mod.rs | 22 - crates/iroha_core/src/smartcontracts/wasm.rs | 5 - crates/iroha_core/src/sumeragi/main_loop.rs | 2 +- crates/iroha_data_model/src/lib.rs | 6 - .../src/query/dsl/compound_predicate.rs | 1 + crates/iroha_data_model/src/query/dsl/mod.rs | 31 +- .../src/query/dsl/predicates.rs | 122 +++-- .../src/query/dsl/type_descriptions.rs | 476 ++++++++++++++++-- crates/iroha_data_model/src/query/mod.rs | 165 ++---- crates/iroha_data_model/src/visit.rs | 20 +- .../src/default/isi/multisig/transaction.rs | 44 +- crates/iroha_schema_gen/src/lib.rs | 20 +- crates/iroha_smart_contract/src/lib.rs | 40 +- .../src/complex_isi.rs | 12 +- docs/source/references/schema.json | 357 +++++++------ .../src/lib.rs | 11 +- wasm/samples/mint_rose_trigger/src/lib.rs | 5 +- 32 files changed, 1110 insertions(+), 795 deletions(-) create mode 100644 crates/iroha/tests/queries/metadata.rs diff --git a/crates/iroha/tests/asset.rs b/crates/iroha/tests/asset.rs index 47db4c2550c..5743f7398c6 100644 --- a/crates/iroha/tests/asset.rs +++ b/crates/iroha/tests/asset.rs @@ -284,7 +284,10 @@ fn find_rate_and_make_exchange_isi_should_succeed() { let assert_balance = |asset_id: AssetId, expected: Numeric| { let got = test_client - .query_single(FindAssetQuantityById::new(asset_id)) + .query(FindAssets) + .filter_with(|asset| asset.id.eq(asset_id)) + .select_with(|asset| asset.value.numeric) + .execute_single() .expect("query should succeed"); assert_eq!(got, expected); }; @@ -293,7 +296,10 @@ fn find_rate_and_make_exchange_isi_should_succeed() { assert_balance(buyer_eth.clone(), numeric!(200)); let rate: u32 = test_client - .query_single(FindAssetQuantityById::new(rate)) + .query(FindAssets) + .filter_with(|asset| asset.id.eq(rate)) + .select_with(|asset| asset.value.numeric) + .execute_single() .expect("query should succeed") .try_into() .expect("numeric should be u32 originally"); @@ -306,7 +312,10 @@ fn find_rate_and_make_exchange_isi_should_succeed() { let assert_purged = |asset_id: AssetId| { let _err = test_client - .query_single(FindAssetQuantityById::new(asset_id)) + .query(FindAssets) + .filter_with(|asset| asset.id.eq(asset_id)) + .select_with(|asset| asset.value.numeric) + .execute_single() .expect_err("query should fail, as zero assets are purged from accounts"); }; let seller_eth: AssetId = format!("eth#crypto#{}", &seller_id) diff --git a/crates/iroha/tests/multisig.rs b/crates/iroha/tests/multisig.rs index 0e8f36e17d2..59a1eb6cb9f 100644 --- a/crates/iroha/tests/multisig.rs +++ b/crates/iroha/tests/multisig.rs @@ -249,10 +249,10 @@ fn multisig_base(suite: TestSuite) -> Result<()> { // Check that the multisig transaction has not yet executed let _err = test_client - .query_single(FindAccountMetadata::new( - transaction_target.clone(), - key.clone(), - )) + .query(FindAccounts) + .filter_with(|account| account.id.eq(transaction_target.clone())) + .select_with(|account| account.metadata.key(key.clone())) + .execute_single() .expect_err("instructions shouldn't execute without enough approvals"); // The last approve to proceed to validate and execute the instructions @@ -268,7 +268,11 @@ fn multisig_base(suite: TestSuite) -> Result<()> { } // Check if the multisig transaction has executed - let res = test_client.query_single(FindAccountMetadata::new(transaction_target, key.clone())); + let res = test_client + .query(FindAccounts) + .filter_with(|account| account.id.eq(transaction_target.clone())) + .select_with(|account| account.metadata.key(key.clone())) + .execute_single(); match (&transaction_ttl_ms_opt, &unauthorized_target_opt) { (None, None) => { res.unwrap(); @@ -279,12 +283,17 @@ fn multisig_base(suite: TestSuite) -> Result<()> { } // Check if the transaction entry is deleted - let res = test_client.query_single(FindAccountMetadata::new( - multisig_account_id, - format!("multisig/proposals/{instructions_hash}") - .parse() - .unwrap(), - )); + let res = test_client + .query(FindAccounts) + .filter_with(|account| account.id.eq(multisig_account_id)) + .select_with(|account| { + account.metadata.key( + format!("multisig/proposals/{instructions_hash}") + .parse() + .unwrap(), + ) + }) + .execute_single(); match (&transaction_ttl_ms_opt, &unauthorized_target_opt) { (None, Some(_)) => { // In case failing validation, the entry can exit only by expiring @@ -407,10 +416,14 @@ fn multisig_recursion_base(suite: TestSuite) -> Result<()> { let proposal_value_at = |msa: AccountId, mst_hash: HashOf>| { test_client - .query_single(FindAccountMetadata::new( - msa.clone(), - format!("multisig/proposals/{mst_hash}").parse().unwrap(), - )) + .query(FindAccounts) + .filter_with(|account| account.id.eq(msa.clone())) + .select_with(|account| { + account + .metadata + .key(format!("multisig/proposals/{mst_hash}").parse().unwrap()) + }) + .execute_single() .expect("should be initialized by the root proposal") .try_into_any::() .unwrap() @@ -457,10 +470,10 @@ fn multisig_recursion_base(suite: TestSuite) -> Result<()> { // Check that the multisig transaction has not yet executed let _err = test_client - .query_single(FindAccountMetadata::new( - transaction_target.clone(), - key.clone(), - )) + .query(FindAccounts) + .filter_with(|account| account.id.eq(transaction_target.clone())) + .select_with(|account| account.metadata.key(key.clone())) + .execute_single() .expect_err("instructions shouldn't execute without enough approvals"); // The last approve to proceed to validate and execute the instructions @@ -476,7 +489,11 @@ fn multisig_recursion_base(suite: TestSuite) -> Result<()> { } // Check if the multisig transaction has executed - let res = test_client.query_single(FindAccountMetadata::new(transaction_target, key.clone())); + let res = test_client + .query(FindAccounts) + .filter_with(|account| account.id.eq(transaction_target)) + .select_with(|account| account.metadata.key(key.clone())) + .execute_single(); match (&transaction_ttl_ms_opt, &unauthorized_target_opt) { (None, None) => { res.unwrap(); @@ -493,10 +510,15 @@ fn multisig_recursion_base(suite: TestSuite) -> Result<()> { (msa_12345, approval_hash_to_012345), (msa_012345, instructions_hash), ] { - let res = test_client.query_single(FindAccountMetadata::new( - msa, - format!("multisig/proposals/{mst_hash}").parse().unwrap(), - )); + let res = test_client + .query(FindAccounts) + .filter_with(|account| account.id.eq(msa)) + .select_with(|account| { + account + .metadata + .key(format!("multisig/proposals/{mst_hash}").parse().unwrap()) + }) + .execute_single(); match (&transaction_ttl_ms_opt, &unauthorized_target_opt) { (None, Some(_)) => { // In case the root proposal is failing validation, the relevant entries can exit only by expiring diff --git a/crates/iroha/tests/queries/metadata.rs b/crates/iroha/tests/queries/metadata.rs new file mode 100644 index 00000000000..e1fb8ff1711 --- /dev/null +++ b/crates/iroha/tests/queries/metadata.rs @@ -0,0 +1,89 @@ +use std::{collections::BTreeMap, str::FromStr}; + +use iroha::{client::QueryError, data_model::prelude::*}; +use iroha_data_model::query::{ + builder::SingleQueryError, + error::{FindError, QueryExecutionFail}, +}; +use iroha_test_network::*; +use iroha_test_samples::{ALICE_ID, BOB_ID}; +use serde_json::json; + +#[test] +fn find_accounts_with_asset() { + let (network, _rt) = NetworkBuilder::new().start_blocking().unwrap(); + let test_client = network.client(); + + let key = Name::from_str("key").unwrap(); + let another_key = Name::from_str("another_key").unwrap(); + + test_client + .submit_blocking(SetKeyValue::account( + BOB_ID.clone(), + key.clone(), + json!({"funny": "value"}), + )) + .unwrap(); + test_client + .submit_blocking(SetKeyValue::account( + BOB_ID.clone(), + another_key.clone(), + "value", + )) + .unwrap(); + + // we have the following configuration: + // key another_key + // ALICE "value" - + // BOB {"funny": "value"} "value" + + // check that bulk retrieval works as expected + let key_values = test_client + .query(FindAccounts) + .filter_with(|account| account.id.eq(ALICE_ID.clone()) | account.id.eq(BOB_ID.clone())) + .select_with(|account| (account.id, account.metadata.key(key.clone()))) + .execute_all() + .unwrap() + .into_iter() + .collect::>(); + + assert_eq!(key_values.len(), 2); + assert_eq!(key_values[&ALICE_ID], "value".into()); + assert_eq!(key_values[&BOB_ID], json!({"funny": "value"}).into()); + + // check that missing metadata key produces an error + let alice_no_key_err = test_client + .query(FindAccounts) + .filter_with(|account| account.id.eq(ALICE_ID.clone())) + .select_with(|account| account.metadata.key(another_key.clone())) + .execute_single() + .unwrap_err(); + + let SingleQueryError::QueryError(QueryError::Validation(ValidationFail::QueryFailed( + QueryExecutionFail::Find(FindError::MetadataKey(returned_key)), + ))) = alice_no_key_err + else { + panic!("Got unexpected query error on missing metadata key {alice_no_key_err:?}",); + }; + assert_eq!(returned_key, another_key); + + // check single key retrieval + let another_key_value = test_client + .query(FindAccounts) + .filter_with(|account| account.id.eq(BOB_ID.clone())) + .select_with(|account| account.metadata.key(another_key.clone())) + .execute_single() + .unwrap(); + assert_eq!(another_key_value, "value".into()); + + // check predicates on non-existing metadata (they should just evaluate to false) + let accounts = test_client + .query(FindAccounts) + .filter_with(|account| account.metadata.key(another_key.clone()).eq("value".into())) + .select_with(|account| account.id) + .execute_all() + .unwrap(); + + assert_eq!(accounts.len(), 1); + assert_eq!(accounts[0], BOB_ID.clone()); +} diff --git a/crates/iroha/tests/queries/mod.rs b/crates/iroha/tests/queries/mod.rs index 5b57fab8e44..cb1407e95ea 100644 --- a/crates/iroha/tests/queries/mod.rs +++ b/crates/iroha/tests/queries/mod.rs @@ -9,6 +9,7 @@ use iroha_test_network::*; mod account; mod asset; +mod metadata; mod query_errors; mod role; mod smart_contract; diff --git a/crates/iroha/tests/queries/smart_contract.rs b/crates/iroha/tests/queries/smart_contract.rs index a07b9060461..7e07f71e7bf 100644 --- a/crates/iroha/tests/queries/smart_contract.rs +++ b/crates/iroha/tests/queries/smart_contract.rs @@ -17,10 +17,12 @@ fn live_query_is_dropped_after_smart_contract_end() -> Result<()> { ); client.submit_transaction_blocking(&transaction)?; - let metadata_value: Json = client.query_single(FindAccountMetadata::new( - client.account.clone(), - "cursor".parse().unwrap(), - ))?; + let metadata_value = client + .query(FindAccounts) + .filter_with(|account| account.id.eq(client.account.clone())) + .select_with(|account| account.metadata.key("cursor".parse().unwrap())) + .execute_single()?; + let asset_cursor = metadata_value.try_into_any()?; // here we are breaking the abstraction preventing us from using a cursor we pulled from the metadata diff --git a/crates/iroha/tests/triggers/time_trigger.rs b/crates/iroha/tests/triggers/time_trigger.rs index 03c7418b740..ffeed037863 100644 --- a/crates/iroha/tests/triggers/time_trigger.rs +++ b/crates/iroha/tests/triggers/time_trigger.rs @@ -40,7 +40,11 @@ fn mint_asset_after_3_sec() -> Result<()> { let account_id = ALICE_ID.clone(); let asset_id = AssetId::new(asset_definition_id.clone(), account_id.clone()); - let init_quantity = test_client.query_single(FindAssetQuantityById::new(asset_id.clone()))?; + let init_quantity = test_client + .query(FindAssets) + .filter_with(|asset| asset.id.eq(asset_id.clone())) + .select_with(|asset| asset.value.numeric) + .execute_single()?; let start_time = curr_time(); assert!( @@ -62,16 +66,22 @@ fn mint_asset_after_3_sec() -> Result<()> { // Schedule start is in the future so trigger isn't executed after creating a new block test_client.submit_blocking(Log::new(Level::DEBUG, "Just to create block".to_string()))?; - let after_registration_quantity = - test_client.query_single(FindAssetQuantityById::new(asset_id.clone()))?; + let after_registration_quantity = test_client + .query(FindAssets) + .filter_with(|asset| asset.id.eq(asset_id.clone())) + .select_with(|asset| asset.value.numeric) + .execute_single()?; assert_eq!(init_quantity, after_registration_quantity); // Sleep long enough that trigger start is in the past std::thread::sleep(network.pipeline_time()); test_client.submit_blocking(Log::new(Level::DEBUG, "Just to create block".to_string()))?; - let after_wait_quantity = - test_client.query_single(FindAssetQuantityById::new(asset_id.clone()))?; + let after_wait_quantity = test_client + .query(FindAssets) + .filter_with(|asset| asset.id.eq(asset_id.clone())) + .select_with(|asset| asset.value.numeric) + .execute_single()?; // Schedule is in the past now so trigger is executed assert_eq!( init_quantity.checked_add(1u32.into()).unwrap(), diff --git a/crates/iroha/tests/upgrade.rs b/crates/iroha/tests/upgrade.rs index c6f2c04f995..3b25c3548ea 100644 --- a/crates/iroha/tests/upgrade.rs +++ b/crates/iroha/tests/upgrade.rs @@ -1,4 +1,4 @@ -use executor_custom_data_model::permissions::CanControlDomainLives; +use executor_custom_data_model::{complex_isi::NumericQuery, permissions::CanControlDomainLives}; use eyre::Result; use futures_util::TryStreamExt as _; use iroha::{ @@ -216,7 +216,11 @@ fn executor_custom_instructions_simple() -> Result<()> { // Check that bob has 1 rose assert_eq!( - client.query_single(FindAssetQuantityById::new(bob_rose.clone()))?, + client + .query(FindAssets) + .filter_with(|asset| asset.id.eq(bob_rose.clone())) + .select_with(|asset| asset.value.numeric) + .execute_single()?, Numeric::from(1u32) ); @@ -229,7 +233,11 @@ fn executor_custom_instructions_simple() -> Result<()> { // Check that bob has 2 roses assert_eq!( - client.query_single(FindAssetQuantityById::new(bob_rose.clone()))?, + client + .query(FindAssets) + .filter_with(|asset| asset.id.eq(bob_rose)) + .select_with(|asset| asset.value.numeric) + .execute_single()?, Numeric::from(2u32) ); @@ -258,16 +266,20 @@ fn executor_custom_instructions_complex() -> Result<()> { // Check that bob has 6 roses assert_eq!( - client.query_single(FindAssetQuantityById::new(bob_rose.clone()))?, + client + .query(FindAssets) + .filter_with(|asset| asset.id.eq(bob_rose.clone())) + .select_with(|asset| asset.value.numeric) + .execute_single()?, Numeric::from(6u32) ); // If bob has more then 5 roses, then burn 1 rose let burn_bob_rose_if_more_then_5 = || -> Result<()> { let condition = Greater::new( - EvaluatesTo::new_unchecked(Expression::Query( - FindAssetQuantityById::new(bob_rose.clone()).into(), - )), + EvaluatesTo::new_unchecked(Expression::Query(NumericQuery::FindAssetQuantityById( + bob_rose.clone(), + ))), Numeric::from(5u32), ); let then = Burn::asset_numeric(Numeric::from(1u32), bob_rose.clone()); @@ -281,7 +293,11 @@ fn executor_custom_instructions_complex() -> Result<()> { // Check that bob has 5 roses assert_eq!( - client.query_single(FindAssetQuantityById::new(bob_rose.clone()))?, + client + .query(FindAssets) + .filter_with(|asset| asset.id.eq(bob_rose.clone())) + .select_with(|asset| asset.value.numeric) + .execute_single()?, Numeric::from(5u32) ); @@ -289,7 +305,11 @@ fn executor_custom_instructions_complex() -> Result<()> { // Check that bob has 5 roses assert_eq!( - client.query_single(FindAssetQuantityById::new(bob_rose.clone()))?, + client + .query(FindAssets) + .filter_with(|asset| asset.id.eq(bob_rose.clone())) + .select_with(|asset| asset.value.numeric) + .execute_single()?, Numeric::from(5u32) ); diff --git a/crates/iroha_cli/src/main.rs b/crates/iroha_cli/src/main.rs index 2270d10e52b..52ef99b42bd 100644 --- a/crates/iroha_cli/src/main.rs +++ b/crates/iroha_cli/src/main.rs @@ -1059,10 +1059,13 @@ mod asset { fn run(self, context: &mut dyn RunContext) -> Result<()> { let Self { id: asset_id, key } = self; let client = context.client_from_config(); - let find_key_value = FindAssetMetadata::new(asset_id, key); let asset = client - .query_single(find_key_value) + .query(FindAssets) + .filter_with(|asset| asset.id.eq(asset_id)) + .select_with(|asset| asset.value.store.key(key)) + .execute_single() .wrap_err("Failed to get key-value")?; + context.print_data(&asset)?; Ok(()) } @@ -1228,7 +1231,7 @@ mod json { // for efficiency reasons iroha encodes query results in a columnar format, // so we need to transpose the batch to get the format that is more natural for humans let mut batches = vec![Vec::new(); accumulated_batch.len()]; - for batch in accumulated_batch.into_iter() { + for batch in accumulated_batch { // downcast to json and extract the actual array // dynamic typing is just easier to use here than introducing a bunch of new types only for iroha_cli let batch = serde_json::to_value(batch)?; diff --git a/crates/iroha_core/src/query/cursor.rs b/crates/iroha_core/src/query/cursor.rs index fad98fcb255..fa0276d49bc 100644 --- a/crates/iroha_core/src/query/cursor.rs +++ b/crates/iroha_core/src/query/cursor.rs @@ -6,35 +6,15 @@ use iroha_data_model::{ prelude::SelectorTuple, query::{ dsl::{EvaluateSelector, HasProjection, SelectorMarker}, + error::QueryExecutionFail, QueryOutputBatchBox, QueryOutputBatchBoxTuple, }, }; -use parity_scale_codec::{Decode, Encode}; -use serde::{Deserialize, Serialize}; - -/// An error with cursor processing. -#[derive( - Debug, - displaydoc::Display, - thiserror::Error, - Copy, - Clone, - Serialize, - Deserialize, - Encode, - Decode, -)] -pub enum Error { - /// The server's cursor does not match the provided cursor. - Mismatch, - /// There aren't enough items to proceed. - Done, -} fn evaluate_selector_tuple( batch: Vec, selector: &SelectorTuple, -) -> QueryOutputBatchBoxTuple +) -> Result where T: HasProjection + 'static, T::Projection: EvaluateSelector, @@ -46,22 +26,22 @@ where while let Some(item) = iter.next() { if iter.peek().is_none() { // do not clone the last item - batch_tuple.push(item.project(batch.into_iter())); - return QueryOutputBatchBoxTuple { tuple: batch_tuple }; + batch_tuple.push(item.project(batch.into_iter())?); + return Ok(QueryOutputBatchBoxTuple { tuple: batch_tuple }); } - batch_tuple.push(item.project_clone(batch.iter())); + batch_tuple.push(item.project_clone(batch.iter())?); } // this should only happen for empty selectors - QueryOutputBatchBoxTuple { tuple: batch_tuple } + Ok(QueryOutputBatchBoxTuple { tuple: batch_tuple }) } trait BatchedTrait { fn next_batch( &mut self, cursor: u64, - ) -> Result<(QueryOutputBatchBoxTuple, Option), Error>; + ) -> Result<(QueryOutputBatchBoxTuple, Option), QueryExecutionFail>; fn remaining(&self) -> u64; } @@ -86,15 +66,15 @@ where fn next_batch( &mut self, cursor: u64, - ) -> Result<(QueryOutputBatchBoxTuple, Option), Error> { + ) -> Result<(QueryOutputBatchBoxTuple, Option), QueryExecutionFail> { let Some(server_cursor) = self.cursor else { // the server is done with the iterator - return Err(Error::Done); + return Err(QueryExecutionFail::CursorDone); }; if cursor != server_cursor { // the cursor doesn't match - return Err(Error::Mismatch); + return Err(QueryExecutionFail::CursorMismatch); } let expected_batch_size: usize = self @@ -117,7 +97,7 @@ where .collect(); // evaluate the requested projections - let batch = evaluate_selector_tuple(batch, &self.selector); + let batch = evaluate_selector_tuple(batch, &self.selector)?; // did we get enough elements to continue? if current_batch_size >= expected_batch_size { @@ -186,7 +166,7 @@ impl ErasedQueryIterator { pub fn next_batch( &mut self, cursor: u64, - ) -> Result<(QueryOutputBatchBoxTuple, Option), Error> { + ) -> Result<(QueryOutputBatchBoxTuple, Option), QueryExecutionFail> { self.inner.next_batch(cursor) } diff --git a/crates/iroha_core/src/query/store.rs b/crates/iroha_core/src/query/store.rs index 6694ce4a333..98078ecac0b 100644 --- a/crates/iroha_core/src/query/store.rs +++ b/crates/iroha_core/src/query/store.rs @@ -18,53 +18,9 @@ use iroha_data_model::{ }; use iroha_futures::supervisor::{Child, OnShutdown, ShutdownSignal}; use iroha_logger::{trace, warn}; -use parity_scale_codec::{Decode, Encode}; -use serde::{Deserialize, Serialize}; use tokio::task::JoinHandle; -use super::cursor::{ErasedQueryIterator, Error as CursorError}; - -/// Query service error. -#[derive( - Debug, - thiserror::Error, - displaydoc::Display, - Copy, - Clone, - Serialize, - Deserialize, - Encode, - Decode, -)] -pub enum Error { - /// Query not found in the live query store. - NotFound, - /// Cursor error. - #[error(transparent)] - Cursor(#[from] CursorError), - /// Fetch size is too big. - FetchSizeTooBig, - /// Reached the limit of parallel queries. Either wait for previous queries to complete, or increase the limit in the config. - CapacityLimit, -} - -#[allow(clippy::fallible_impl_from)] -impl From for QueryExecutionFail { - fn from(error: Error) -> Self { - match error { - Error::NotFound => QueryExecutionFail::NotFound, - Error::Cursor(error) => match error { - CursorError::Mismatch => QueryExecutionFail::CursorMismatch, - CursorError::Done => QueryExecutionFail::CursorDone, - }, - Error::FetchSizeTooBig => QueryExecutionFail::FetchSizeTooBig, - Error::CapacityLimit => QueryExecutionFail::CapacityLimit, - } - } -} - -/// Result type for [`LiveQueryStore`] methods. -pub type Result = std::result::Result; +use super::cursor::ErasedQueryIterator; type LiveQuery = ErasedQueryIterator; @@ -186,7 +142,7 @@ impl LiveQueryStore { query_id: QueryId, live_query: ErasedQueryIterator, authority: AccountId, - ) -> Result<()> { + ) -> Result<(), QueryExecutionFail> { trace!(%query_id, "Inserting new query"); self.check_capacity(&authority)?; self.insert(query_id, live_query, authority); @@ -199,13 +155,13 @@ impl LiveQueryStore { &self, query_id: QueryId, cursor: NonZeroU64, - ) -> Result<(QueryOutputBatchBoxTuple, u64, Option)> { + ) -> Result<(QueryOutputBatchBoxTuple, u64, Option), QueryExecutionFail> { trace!(%query_id, "Advancing existing query"); let QueryInfo { mut live_query, authority, .. - } = self.remove(&query_id).ok_or(Error::NotFound)?; + } = self.remove(&query_id).ok_or(QueryExecutionFail::NotFound)?; let (next_batch, next_cursor) = live_query.next_batch(cursor.get())?; let remaining = live_query.remaining(); if next_cursor.is_some() { @@ -214,13 +170,13 @@ impl LiveQueryStore { Ok((next_batch, remaining, next_cursor)) } - fn check_capacity(&self, authority: &AccountId) -> Result<()> { + fn check_capacity(&self, authority: &AccountId) -> Result<(), QueryExecutionFail> { if self.queries.len() >= self.capacity.get() { warn!( max_queries = self.capacity, "Reached maximum allowed number of queries in LiveQueryStore" ); - return Err(Error::CapacityLimit); + return Err(QueryExecutionFail::CapacityLimit); } if let Some(value) = self.queries_per_user.get(authority) { if *value >= self.capacity_per_user.get() { @@ -229,7 +185,7 @@ impl LiveQueryStore { %authority, "Account reached maximum allowed number of queries in LiveQueryStore" ); - return Err(Error::CapacityLimit); + return Err(QueryExecutionFail::CapacityLimit); } } Ok(()) @@ -253,7 +209,7 @@ impl LiveQueryStoreHandle { &self, mut live_query: ErasedQueryIterator, authority: &AccountId, - ) -> Result { + ) -> Result { let query_id = uuid::Uuid::new_v4().to_string(); let curr_cursor = 0; @@ -284,7 +240,7 @@ impl LiveQueryStoreHandle { pub fn handle_iter_continue( &self, ForwardCursor { query, cursor }: ForwardCursor, - ) -> Result { + ) -> Result { let (batch, remaining, next_cursor) = self.store.get_query_next_batch(query.clone(), cursor)?; diff --git a/crates/iroha_core/src/smartcontracts/isi/account.rs b/crates/iroha_core/src/smartcontracts/isi/account.rs index f410a2124d2..f4ffccff76a 100644 --- a/crates/iroha_core/src/smartcontracts/isi/account.rs +++ b/crates/iroha_core/src/smartcontracts/isi/account.rs @@ -5,7 +5,6 @@ use iroha_data_model::{prelude::*, query::error::FindError}; use iroha_telemetry::metrics; use super::prelude::*; -use crate::ValidSingularQuery; impl Registrable for iroha_data_model::account::NewAccount { type Target = Account; @@ -444,7 +443,6 @@ pub mod query { permission::Permission, query::{dsl::CompoundPredicate, error::QueryExecutionFail as Error}, }; - use iroha_primitives::json::Json; use super::*; use crate::{smartcontracts::ValidQuery, state::StateReadOnly}; @@ -497,20 +495,6 @@ pub mod query { } } - impl ValidSingularQuery for FindAccountMetadata { - #[metrics(+"find_account_key_value_by_id_and_key")] - fn execute(&self, state_ro: &impl StateReadOnly) -> Result { - let id = &self.id; - let key = &self.key; - iroha_logger::trace!(%id, %key); - state_ro - .world() - .map_account(id, |account| account.metadata.get(key).cloned())? - .ok_or_else(|| FindError::MetadataKey(key.clone()).into()) - .map(Into::into) - } - } - impl ValidQuery for FindAccountsWithAsset { #[metrics(+"find_accounts_with_asset")] fn execute( diff --git a/crates/iroha_core/src/smartcontracts/isi/asset.rs b/crates/iroha_core/src/smartcontracts/isi/asset.rs index 841f0620def..3382f4cd8d0 100644 --- a/crates/iroha_core/src/smartcontracts/isi/asset.rs +++ b/crates/iroha_core/src/smartcontracts/isi/asset.rs @@ -424,10 +424,9 @@ pub mod isi { pub mod query { use eyre::Result; use iroha_data_model::{ - asset::{Asset, AssetDefinition, AssetValue}, + asset::{Asset, AssetDefinition}, query::{dsl::CompoundPredicate, error::QueryExecutionFail as Error}, }; - use iroha_primitives::json::Json; use super::*; use crate::{smartcontracts::ValidQuery, state::StateReadOnly}; @@ -460,55 +459,4 @@ pub mod query { .cloned()) } } - - impl ValidSingularQuery for FindAssetQuantityById { - #[metrics(+"find_asset_quantity_by_id")] - fn execute(&self, state_ro: &impl StateReadOnly) -> Result { - let id = &self.id; - iroha_logger::trace!(%id); - let value = state_ro - .world() - .asset(id) - .map_err(|asset_err| { - if let Err(definition_err) = state_ro.world().asset_definition(&id.definition) { - Error::Find(definition_err) - } else { - asset_err - } - })? - .value; - - match value { - AssetValue::Store(_) => Err(Error::Conversion( - "Can't get quantity for strore asset".to_string(), - )), - AssetValue::Numeric(numeric) => Ok(numeric), - } - } - } - - impl ValidSingularQuery for FindAssetMetadata { - #[metrics(+"find_asset_key_value_by_id_and_key")] - fn execute(&self, state_ro: &impl StateReadOnly) -> Result { - let id = &self.id; - let key = &self.key; - let asset = state_ro.world().asset(id).map_err(|asset_err| { - if let Err(definition_err) = state_ro.world().asset_definition(&id.definition) { - Error::Find(definition_err) - } else { - asset_err - } - })?; - iroha_logger::trace!(%id, %key); - let AssetValue::Store(store) = &asset.value else { - return Err(Error::Conversion("expected store, found other".to_owned())); - }; - - store - .get(key) - .ok_or_else(|| Error::Find(FindError::MetadataKey(key.clone()))) - .cloned() - .map(Into::into) - } - } } diff --git a/crates/iroha_core/src/smartcontracts/isi/domain.rs b/crates/iroha_core/src/smartcontracts/isi/domain.rs index 5f579b195e1..f0da7c71ea0 100644 --- a/crates/iroha_core/src/smartcontracts/isi/domain.rs +++ b/crates/iroha_core/src/smartcontracts/isi/domain.rs @@ -380,12 +380,10 @@ pub mod isi { /// Query module provides [`Query`] Domain related implementations. pub mod query { - use eyre::Result; use iroha_data_model::{ domain::Domain, query::{dsl::CompoundPredicate, error::QueryExecutionFail}, }; - use iroha_primitives::json::Json; use super::*; use crate::{smartcontracts::ValidQuery, state::StateReadOnly}; @@ -404,35 +402,4 @@ pub mod query { .cloned()) } } - - impl ValidSingularQuery for FindDomainMetadata { - #[metrics(+"find_domain_key_value_by_id_and_key")] - fn execute(&self, state_ro: &impl StateReadOnly) -> Result { - let id = &self.id; - let key = &self.key; - iroha_logger::trace!(%id, %key); - state_ro - .world() - .map_domain(id, |domain| domain.metadata.get(key).cloned())? - .ok_or_else(|| FindError::MetadataKey(key.clone()).into()) - .map(Into::into) - } - } - - impl ValidSingularQuery for FindAssetDefinitionMetadata { - #[metrics(+"find_asset_definition_key_value_by_id_and_key")] - fn execute(&self, state_ro: &impl StateReadOnly) -> Result { - let id = &self.id; - let key = &self.key; - iroha_logger::trace!(%id, %key); - Ok(state_ro - .world() - .asset_definition(id)? - .metadata - .get(key) - .ok_or(FindError::MetadataKey(key.clone())) - .cloned() - .map(Into::into)?) - } - } } diff --git a/crates/iroha_core/src/smartcontracts/isi/mod.rs b/crates/iroha_core/src/smartcontracts/isi/mod.rs index 570f1d64475..b72df9249c8 100644 --- a/crates/iroha_core/src/smartcontracts/isi/mod.rs +++ b/crates/iroha_core/src/smartcontracts/isi/mod.rs @@ -20,7 +20,6 @@ use mv::storage::StorageReadOnly; use super::Execute; use crate::{ - prelude::*, smartcontracts::triggers::set::SetReadOnly, state::{StateReadOnly, StateTransaction, WorldReadOnly}, }; @@ -228,6 +227,7 @@ pub mod prelude { mod tests { use std::sync::Arc; + use iroha_crypto::KeyPair; use iroha_test_samples::{ gen_account_in, ALICE_ID, SAMPLE_GENESIS_ACCOUNT_ID, SAMPLE_GENESIS_ACCOUNT_KEYPAIR, }; @@ -239,7 +239,7 @@ mod tests { kura::Kura, query::store::LiveQueryStore, state::{State, World}, - tx::AcceptTransactionFail, + tx::{AcceptTransactionFail, AcceptedTransaction}, }; fn state_with_test_domains(kura: &Arc) -> Result { diff --git a/crates/iroha_core/src/smartcontracts/isi/query.rs b/crates/iroha_core/src/smartcontracts/isi/query.rs index 72f28da4d72..d2e4929aa2f 100644 --- a/crates/iroha_core/src/smartcontracts/isi/query.rs +++ b/crates/iroha_core/src/smartcontracts/isi/query.rs @@ -236,30 +236,12 @@ impl ValidQueryRequest { match self.0 { QueryRequest::Singular(singular_query) => { let output = match singular_query { - SingularQueryBox::FindAssetQuantityById(q) => { - SingularQueryOutputBox::from(q.execute(state)?) - } SingularQueryBox::FindExecutorDataModel(q) => { SingularQueryOutputBox::from(q.execute(state)?) } SingularQueryBox::FindParameters(q) => { SingularQueryOutputBox::from(q.execute(state)?) } - SingularQueryBox::FindDomainMetadata(q) => { - SingularQueryOutputBox::from(q.execute(state)?) - } - SingularQueryBox::FindAccountMetadata(q) => { - SingularQueryOutputBox::from(q.execute(state)?) - } - SingularQueryBox::FindAssetMetadata(q) => { - SingularQueryOutputBox::from(q.execute(state)?) - } - SingularQueryBox::FindAssetDefinitionMetadata(q) => { - SingularQueryOutputBox::from(q.execute(state)?) - } - SingularQueryBox::FindTriggerMetadata(q) => { - SingularQueryOutputBox::from(q.execute(state)?) - } }; Ok(QueryResponse::Singular(output)) @@ -359,7 +341,6 @@ impl ValidQueryRequest { mod tests { use iroha_crypto::{Hash, KeyPair}; use iroha_data_model::{block::BlockHeader, query::dsl::CompoundPredicate}; - use iroha_primitives::json::Json; use iroha_test_samples::{gen_account_in, ALICE_ID, ALICE_KEYPAIR}; use nonzero_ext::nonzero; use tokio::test; @@ -384,36 +365,6 @@ mod tests { World::with([domain], [account], [asset_definition]) } - fn world_with_test_asset_with_metadata() -> World { - let asset_definition_id = "rose#wonderland" - .parse::() - .expect("Valid"); - let domain = Domain::new("wonderland".parse().expect("Valid")).build(&ALICE_ID); - let account = Account::new(ALICE_ID.clone()).build(&ALICE_ID); - let asset_definition = - AssetDefinition::numeric(asset_definition_id.clone()).build(&ALICE_ID); - - let mut store = Metadata::default(); - store.insert("Bytes".parse().expect("Valid"), vec![1_u32, 2_u32, 3_u32]); - let asset_id = AssetId::new(asset_definition_id, account.id().clone()); - let asset = Asset::new(asset_id, AssetValue::Store(store)); - - World::with_assets([domain], [account], [asset_definition], [asset]) - } - - fn world_with_test_account_with_metadata() -> Result { - let mut metadata = Metadata::default(); - metadata.insert("Bytes".parse()?, vec![1_u32, 2_u32, 3_u32]); - - let domain = Domain::new("wonderland".parse()?).build(&ALICE_ID); - let account = Account::new(ALICE_ID.clone()) - .with_metadata(metadata) - .build(&ALICE_ID); - let asset_definition_id = "rose#wonderland".parse().expect("Valid"); - let asset_definition = AssetDefinition::numeric(asset_definition_id).build(&ALICE_ID); - Ok(World::with([domain], [account], [asset_definition])) - } - fn state_with_test_blocks_and_transactions( blocks: u64, valid_tx_per_block: usize, @@ -490,31 +441,6 @@ mod tests { Ok(state) } - #[test] - async fn asset_store() -> Result<()> { - let kura = Kura::blank_kura_for_testing(); - let query_handle = LiveQueryStore::start_test(); - let state = State::new(world_with_test_asset_with_metadata(), kura, query_handle); - - let asset_definition_id = "rose#wonderland".parse()?; - let asset_id = AssetId::new(asset_definition_id, ALICE_ID.clone()); - let bytes = FindAssetMetadata::new(asset_id, "Bytes".parse()?).execute(&state.view())?; - assert_eq!(Json::from(vec![1_u32, 2_u32, 3_u32,]), bytes,); - Ok(()) - } - - #[test] - async fn account_metadata() -> Result<()> { - let kura = Kura::blank_kura_for_testing(); - let query_handle = LiveQueryStore::start_test(); - let state = State::new(world_with_test_account_with_metadata()?, kura, query_handle); - - let bytes = - FindAccountMetadata::new(ALICE_ID.clone(), "Bytes".parse()?).execute(&state.view())?; - assert_eq!(Json::from(vec![1_u32, 2_u32, 3_u32,]), bytes,); - Ok(()) - } - #[test] async fn find_all_blocks() -> Result<()> { let num_blocks = 100; @@ -676,31 +602,4 @@ mod tests { } Ok(()) } - - #[test] - async fn domain_metadata() -> Result<()> { - let kura = Kura::blank_kura_for_testing(); - let state = { - let mut metadata = Metadata::default(); - metadata.insert("Bytes".parse()?, vec![1_u32, 2_u32, 3_u32]); - let domain = Domain::new("wonderland".parse()?) - .with_metadata(metadata) - .build(&ALICE_ID); - let account = Account::new(ALICE_ID.clone()).build(&ALICE_ID); - let asset_definition_id = "rose#wonderland".parse()?; - let asset_definition = AssetDefinition::numeric(asset_definition_id).build(&ALICE_ID); - let query_handle = LiveQueryStore::start_test(); - State::new( - World::with([domain], [account], [asset_definition]), - kura, - query_handle, - ) - }; - - let domain_id = "wonderland".parse()?; - let key = "Bytes".parse()?; - let bytes = FindDomainMetadata::new(domain_id, key).execute(&state.view())?; - assert_eq!(Json::from(vec![1_u32, 2_u32, 3_u32,]), bytes,); - Ok(()) - } } diff --git a/crates/iroha_core/src/smartcontracts/isi/triggers/mod.rs b/crates/iroha_core/src/smartcontracts/isi/triggers/mod.rs index ed1c9d70785..c0a4623b5b8 100644 --- a/crates/iroha_core/src/smartcontracts/isi/triggers/mod.rs +++ b/crates/iroha_core/src/smartcontracts/isi/triggers/mod.rs @@ -315,7 +315,6 @@ pub mod query { }, trigger::{Trigger, TriggerId}, }; - use iroha_primitives::json::Json; use super::*; use crate::{ @@ -363,25 +362,4 @@ pub mod query { .filter(move |trigger| filter.applies(trigger))) } } - - impl ValidSingularQuery for FindTriggerMetadata { - #[metrics(+"find_trigger_key_value_by_id_and_key")] - fn execute(&self, state_ro: &impl StateReadOnly) -> Result { - let id = &self.id; - let key = &self.key; - iroha_logger::trace!(%id, %key); - state_ro - .world() - .triggers() - .inspect_by_id(id, |action| { - action - .metadata() - .get(key) - .cloned() - .ok_or_else(|| FindError::MetadataKey(key.clone()).into()) - }) - .ok_or_else(|| Error::Find(FindError::Trigger(id.clone())))? - .map(Into::into) - } - } } diff --git a/crates/iroha_core/src/smartcontracts/wasm.rs b/crates/iroha_core/src/smartcontracts/wasm.rs index f4a6feddeba..76cab440708 100644 --- a/crates/iroha_core/src/smartcontracts/wasm.rs +++ b/crates/iroha_core/src/smartcontracts/wasm.rs @@ -109,11 +109,6 @@ pub mod error { pub enum Error { /// Runtime initialization failure Initialization(#[source] WasmtimeError), - /// Runtime finalization failure. - /// - /// Currently only [`crate::query::store::Error`] might fail in this case. - /// [`From`] is not implemented to force users to explicitly wrap this error. - Finalization(#[source] crate::query::store::Error), /// Failed to load module ModuleLoading(#[source] WasmtimeError), /// Module could not be instantiated diff --git a/crates/iroha_core/src/sumeragi/main_loop.rs b/crates/iroha_core/src/sumeragi/main_loop.rs index ac3a05d813a..136bf59daa9 100644 --- a/crates/iroha_core/src/sumeragi/main_loop.rs +++ b/crates/iroha_core/src/sumeragi/main_loop.rs @@ -324,7 +324,7 @@ impl Sumeragi { }) .collect::>() .join("\n"); - panic!("Genesis contains invalid transactions:\n{}", errors); + panic!("Genesis contains invalid transactions:\n{errors}"); } // NOTE: By this time genesis block is executed and list of trusted peers is updated diff --git a/crates/iroha_data_model/src/lib.rs b/crates/iroha_data_model/src/lib.rs index 7d0de719e22..eab5c382d76 100644 --- a/crates/iroha_data_model/src/lib.rs +++ b/crates/iroha_data_model/src/lib.rs @@ -123,15 +123,10 @@ mod seal { // Boxed queries SingularQueryBox, FindAccounts, - FindAccountMetadata, FindAccountsWithAsset, FindAssets, FindAssetsDefinitions, - FindAssetQuantityById, - FindAssetMetadata, - FindAssetDefinitionMetadata, FindDomains, - FindDomainMetadata, FindPeers, FindBlocks, FindBlockHeaders, @@ -140,7 +135,6 @@ mod seal { FindExecutorDataModel, FindActiveTriggerIds, FindTriggers, - FindTriggerMetadata, FindRoles, FindRoleIds, FindRolesByAccountId, diff --git a/crates/iroha_data_model/src/query/dsl/compound_predicate.rs b/crates/iroha_data_model/src/query/dsl/compound_predicate.rs index b2535510611..45d68dc1a83 100644 --- a/crates/iroha_data_model/src/query/dsl/compound_predicate.rs +++ b/crates/iroha_data_model/src/query/dsl/compound_predicate.rs @@ -35,6 +35,7 @@ impl> CompoundPredicate { // aliases for logical operations /// Negate the predicate. #[must_use] + #[expect(clippy::should_implement_trait)] // we do implement the `Not` trait, this is just a shorthand to avoid requiring importing it pub fn not(self) -> Self { !self } diff --git a/crates/iroha_data_model/src/query/dsl/mod.rs b/crates/iroha_data_model/src/query/dsl/mod.rs index 153ee2a3da4..6ee8e7764cc 100644 --- a/crates/iroha_data_model/src/query/dsl/mod.rs +++ b/crates/iroha_data_model/src/query/dsl/mod.rs @@ -87,7 +87,7 @@ pub use self::{ selector_traits::{IntoSelector, IntoSelectorTuple}, selector_tuple::SelectorTuple, }; -use crate::query::QueryOutputBatchBox; +use crate::query::{error::QueryExecutionFail, QueryOutputBatchBox}; /// Trait implemented on all evaluable predicates for type `T`. pub trait EvaluatePredicate { @@ -104,10 +104,24 @@ pub trait HasPredicateAtom { /// Trait implemented on all evaluable selectors for type `T`. pub trait EvaluateSelector { /// Select the field from each of the elements in the input and type-erase the result. Cloning version. - #[expect(single_use_lifetimes)] // FP, this the suggested change is not allowed on stable - fn project_clone<'a>(&self, batch: impl Iterator) -> QueryOutputBatchBox; + /// + /// # Errors + /// + /// Returns an error if the projection fails. + #[expect(single_use_lifetimes)] // FP, the suggested change is not allowed on stable + fn project_clone<'a>( + &self, + batch: impl Iterator, + ) -> Result; /// Select the field from each of the elements in the input and type-erase the result. - fn project(&self, batch: impl Iterator) -> QueryOutputBatchBox; + /// + /// # Errors + /// + /// Returns an error if the projection fails. + fn project( + &self, + batch: impl Iterator, + ) -> Result; } // The IntoSchema derive is only needed for `PredicateMarker` to have `type_name` // the actual value of these types is never encoded @@ -147,7 +161,7 @@ pub trait HasProjection: Projectable { /// A trait allowing to get the prototype for the type. pub trait HasPrototype { /// The prototype type for this type. - type Prototype: Default + Copy; + type Prototype; } /// Describes how to convert a projection on `InputType` to a projection on `OutputType` by wrapping it in a projection. @@ -159,19 +173,22 @@ pub trait ObjectProjector { /// Convert the projection on [`Self::InputType`] to a projection on [`Self::OutputType`]. fn project( + &self, projection: >::Projection, ) -> >::Projection; /// Construct a projection from an atom and convert it to a projection on [`Self::OutputType`]. fn wrap_atom( + &self, atom: >::AtomType, ) -> >::Projection { let input_projection = >::atom(atom); - Self::project(input_projection) + self.project(input_projection) } } /// An [`ObjectProjector`] that does not change the type, serving as a base case for the recursion. +#[derive_where::derive_where(Default, Copy, Clone)] pub struct BaseProjector(PhantomData<(Marker, T)>); impl ObjectProjector for BaseProjector @@ -181,7 +198,7 @@ where type InputType = T; type OutputType = T; - fn project(projection: T::Projection) -> T::Projection { + fn project(&self, projection: T::Projection) -> T::Projection { projection } } diff --git a/crates/iroha_data_model/src/query/dsl/predicates.rs b/crates/iroha_data_model/src/query/dsl/predicates.rs index 3a6d9e23895..c0237c45043 100644 --- a/crates/iroha_data_model/src/query/dsl/predicates.rs +++ b/crates/iroha_data_model/src/query/dsl/predicates.rs @@ -4,6 +4,7 @@ use alloc::{format, string::String, vec::Vec}; use iroha_crypto::{HashOf, PublicKey}; +use iroha_primitives::{json::Json, numeric::Numeric}; use crate::{ account::{Account, AccountId}, @@ -18,14 +19,15 @@ use crate::{ query::{ dsl::{ type_descriptions::{ - AccountIdPrototype, AccountPrototype, AssetDefinitionIdPrototype, + AccountIdPrototype, AccountPrototype, ActionPrototype, AssetDefinitionIdPrototype, AssetDefinitionPrototype, AssetIdPrototype, AssetPrototype, AssetValuePrototype, BlockHeaderHashPrototype, BlockHeaderPrototype, CommittedTransactionPrototype, - DomainIdPrototype, DomainPrototype, MetadataPrototype, NamePrototype, - ParameterPrototype, PeerIdPrototype, PermissionPrototype, PublicKeyPrototype, - RoleIdPrototype, RolePrototype, SignedBlockPrototype, SignedTransactionPrototype, - StringPrototype, TransactionErrorPrototype, TransactionHashPrototype, - TriggerIdPrototype, TriggerPrototype, + DomainIdPrototype, DomainPrototype, JsonPrototype, MetadataPrototype, + NamePrototype, NumericPrototype, ParameterPrototype, PeerIdPrototype, + PermissionPrototype, PublicKeyPrototype, RoleIdPrototype, RolePrototype, + SignedBlockPrototype, SignedTransactionPrototype, StringPrototype, + TransactionErrorPrototype, TransactionHashPrototype, TriggerIdPrototype, + TriggerPrototype, }, CompoundPredicate, ObjectProjector, PredicateMarker, }, @@ -33,7 +35,7 @@ use crate::{ }, role::{Role, RoleId}, transaction::{error::TransactionRejectionReason, SignedTransaction}, - trigger::{Trigger, TriggerId}, + trigger::{action, Trigger, TriggerId}, }; macro_rules! impl_predicate_atom { @@ -96,7 +98,7 @@ macro_rules! impl_predicate_atom { $( $(#[$($variant_attrs)*])* pub fn $constructor_name(self $(, $variant_pat: $variant_ty)?) -> CompoundPredicate { - CompoundPredicate::Atom(Projector::wrap_atom( + CompoundPredicate::Atom(self.projector.wrap_atom( $atom_name::$variant_name$(($variant_pat))? )) } @@ -162,45 +164,46 @@ impl super::EvaluatePredicate for StringPredicateAtom { } // It is unfortunate that we have to repeat the prototype methods on String and Name, but I don't think it's possible to remove this duplication -impl StringPrototype +impl StringPrototype where - Projection: ObjectProjector, + Projector: ObjectProjector, { /// Checks if the input is equal to the expected value. - pub fn eq(self, expected: impl Into) -> CompoundPredicate { - CompoundPredicate::Atom(Projection::wrap_atom(StringPredicateAtom::Equals( - expected.into(), - ))) + pub fn eq(self, expected: impl Into) -> CompoundPredicate { + CompoundPredicate::Atom( + self.projector + .wrap_atom(StringPredicateAtom::Equals(expected.into())), + ) } /// Checks if the input contains an expected substring, like [`str::contains()`]. - pub fn contains( - self, - expected: impl Into, - ) -> CompoundPredicate { - CompoundPredicate::Atom(Projection::wrap_atom(StringPredicateAtom::Contains( - expected.into(), - ))) + pub fn contains(self, expected: impl Into) -> CompoundPredicate { + CompoundPredicate::Atom( + self.projector + .wrap_atom(StringPredicateAtom::Contains(expected.into())), + ) } /// Checks if the input starts with an expected substring, like [`str::starts_with()`]. pub fn starts_with( self, expected: impl Into, - ) -> CompoundPredicate { - CompoundPredicate::Atom(Projection::wrap_atom(StringPredicateAtom::StartsWith( - expected.into(), - ))) + ) -> CompoundPredicate { + CompoundPredicate::Atom( + self.projector + .wrap_atom(StringPredicateAtom::StartsWith(expected.into())), + ) } /// Checks if the input ends with an expected substring, like [`str::ends_with()`]. pub fn ends_with( self, expected: impl Into, - ) -> CompoundPredicate { - CompoundPredicate::Atom(Projection::wrap_atom(StringPredicateAtom::EndsWith( - expected.into(), - ))) + ) -> CompoundPredicate { + CompoundPredicate::Atom( + self.projector + .wrap_atom(StringPredicateAtom::EndsWith(expected.into())), + ) } } @@ -210,9 +213,10 @@ where { /// Checks if the input is equal to the expected value. pub fn eq(self, expected: impl Into) -> CompoundPredicate { - CompoundPredicate::Atom(Projection::wrap_atom(StringPredicateAtom::Equals( - expected.into(), - ))) + CompoundPredicate::Atom( + self.projector + .wrap_atom(StringPredicateAtom::Equals(expected.into())), + ) } /// Checks if the input contains an expected substring, like [`str::contains()`]. @@ -220,9 +224,10 @@ where self, expected: impl Into, ) -> CompoundPredicate { - CompoundPredicate::Atom(Projection::wrap_atom(StringPredicateAtom::Contains( - expected.into(), - ))) + CompoundPredicate::Atom( + self.projector + .wrap_atom(StringPredicateAtom::Contains(expected.into())), + ) } /// Checks if the input starts with an expected substring, like [`str::starts_with()`]. @@ -230,9 +235,10 @@ where self, expected: impl Into, ) -> CompoundPredicate { - CompoundPredicate::Atom(Projection::wrap_atom(StringPredicateAtom::StartsWith( - expected.into(), - ))) + CompoundPredicate::Atom( + self.projector + .wrap_atom(StringPredicateAtom::StartsWith(expected.into())), + ) } /// Checks if the input ends with an expected substring, like [`str::ends_with()`]. @@ -240,9 +246,10 @@ where self, expected: impl Into, ) -> CompoundPredicate { - CompoundPredicate::Atom(Projection::wrap_atom(StringPredicateAtom::EndsWith( - expected.into(), - ))) + CompoundPredicate::Atom( + self.projector + .wrap_atom(StringPredicateAtom::EndsWith(expected.into())), + ) } } @@ -254,6 +261,13 @@ impl_predicate_atom! { /// Checks if the input is equal to the expected value. Equals(expected: PublicKey) [eq] => input == expected, } + JsonPredicateAtom(input: Json) [JsonPrototype] { + /// Checks if the input is equal to the expected value. + Equals(expected: Json) [eq] => input == expected, + } + NumericPredicateAtom(_input: Numeric) [NumericPrototype] { + // TODO: populate + } // account AccountIdPredicateAtom(input: AccountId) [AccountIdPrototype] { @@ -265,8 +279,11 @@ impl_predicate_atom! { // asset AssetDefinitionPredicateAtom(_input: AssetDefinition) [AssetDefinitionPrototype] {} AssetPredicateAtom(_input: Asset) [AssetPrototype] {} - AssetValuePredicateAtom(_input: AssetValue) [AssetValuePrototype] { - // TODO: populate + AssetValuePredicateAtom(input: AssetValue) [AssetValuePrototype] { + /// Checks if the asset value is numeric + IsNumeric [is_numeric] => matches!(input, AssetValue::Numeric(_)), + /// Checks if the asset value is a store + IsStore [is_store] => matches!(input, AssetValue::Store(_)), } AssetIdPredicateAtom(input: AssetId) [AssetIdPrototype] { /// Checks if the input is equal to the expected value. @@ -324,19 +341,20 @@ impl_predicate_atom! { Equals(expected: TriggerId) [eq] => input == expected, } TriggerPredicateAtom(_input: Trigger) [TriggerPrototype] {} + ActionPredicateAtom(_input: action::Action) [ActionPrototype] {} } pub mod prelude { //! Re-export all predicate boxes for a glob import `(::*)` pub use super::{ - AccountIdPredicateAtom, AccountPredicateAtom, AssetDefinitionIdPredicateAtom, - AssetDefinitionPredicateAtom, AssetIdPredicateAtom, AssetPredicateAtom, - AssetValuePredicateAtom, BlockHeaderHashPredicateAtom, BlockHeaderPredicateAtom, - CommittedTransactionPredicateAtom, DomainIdPredicateAtom, DomainPredicateAtom, - MetadataPredicateAtom, ParameterPredicateAtom, PeerIdPredicateAtom, - PermissionPredicateAtom, PublicKeyPredicateAtom, RoleIdPredicateAtom, RolePredicateAtom, - SignedBlockPredicateAtom, SignedTransactionPredicateAtom, StringPredicateAtom, - TransactionErrorPredicateAtom, TransactionHashPredicateAtom, TriggerIdPredicateAtom, - TriggerPredicateAtom, + AccountIdPredicateAtom, AccountPredicateAtom, ActionPredicateAtom, + AssetDefinitionIdPredicateAtom, AssetDefinitionPredicateAtom, AssetIdPredicateAtom, + AssetPredicateAtom, AssetValuePredicateAtom, BlockHeaderHashPredicateAtom, + BlockHeaderPredicateAtom, CommittedTransactionPredicateAtom, DomainIdPredicateAtom, + DomainPredicateAtom, JsonPredicateAtom, MetadataPredicateAtom, NumericPredicateAtom, + ParameterPredicateAtom, PeerIdPredicateAtom, PermissionPredicateAtom, + PublicKeyPredicateAtom, RoleIdPredicateAtom, RolePredicateAtom, SignedBlockPredicateAtom, + SignedTransactionPredicateAtom, StringPredicateAtom, TransactionErrorPredicateAtom, + TransactionHashPredicateAtom, TriggerIdPredicateAtom, TriggerPredicateAtom, }; } diff --git a/crates/iroha_data_model/src/query/dsl/type_descriptions.rs b/crates/iroha_data_model/src/query/dsl/type_descriptions.rs index 31d0512eb3d..bfb6ab519f7 100644 --- a/crates/iroha_data_model/src/query/dsl/type_descriptions.rs +++ b/crates/iroha_data_model/src/query/dsl/type_descriptions.rs @@ -1,10 +1,15 @@ //! This module contains definitions of prototypes and projections for the data model types. See the [module-level documentation](crate::query::dsl) for more information. #[cfg(not(feature = "std"))] -use alloc::{format, string::String, vec::Vec}; +use alloc::{ + format, + string::{String, ToString}, + vec::Vec, +}; use derive_where::derive_where; use iroha_crypto::{HashOf, PublicKey}; +use iroha_primitives::{json::Json, numeric::Numeric}; // used in the macro use crate::query::dsl::{ @@ -21,10 +26,13 @@ use crate::{ parameter::Parameter, peer::PeerId, permission::Permission, - query::{CommittedTransaction, QueryOutputBatchBox}, + query::{ + error::{FindError, QueryExecutionFail}, + CommittedTransaction, QueryOutputBatchBox, + }, role::{Role, RoleId}, transaction::{error::TransactionRejectionReason, SignedTransaction}, - trigger::{Trigger, TriggerId}, + trigger::{action, Trigger, TriggerId}, }; macro_rules! type_descriptions { @@ -57,19 +65,19 @@ macro_rules! type_descriptions { } impl EvaluateSelector<$ty> for $projection_name { - #[expect(single_use_lifetimes)] // FP, this the suggested change is not allowed on stable - fn project_clone<'a>(&self, batch: impl Iterator) -> QueryOutputBatchBox { + #[expect(single_use_lifetimes)] // FP, the suggested change is not allowed on stable + fn project_clone<'a>(&self, batch: impl Iterator) -> Result { match self { - $projection_name::Atom(_) => batch.cloned().collect::>().into(), + $projection_name::Atom(_) => Ok(batch.cloned().collect::>().into()), $( $projection_name::$proj_variant(field) => field.project_clone(batch.map(|item| &item.$field_name)), )* } } - fn project(&self, batch: impl Iterator) -> QueryOutputBatchBox { + fn project(&self, batch: impl Iterator) -> Result { match self { - $projection_name::Atom(_) => batch.collect::>().into(), + $projection_name::Atom(_) => Ok(batch.collect::>().into()), $( $projection_name::$proj_variant(field) => field.project(batch.map(|item| item.$field_name)), )* @@ -84,7 +92,11 @@ macro_rules! type_descriptions { ($($dep_ty_bounds:tt)*) ) => { #[doc = concat!("A projector on [`", stringify!($ty), "`] for its `", stringify!($field_name), "` field.")] - pub struct $projector_name(core::marker::PhantomData<(Marker, Base)>); + #[derive_where::derive_where(Default, Copy, Clone; Base)] + pub struct $projector_name { + base: Base, + phantom: core::marker::PhantomData + } impl ObjectProjector for $projector_name where @@ -96,9 +108,10 @@ macro_rules! type_descriptions { type OutputType = Base::OutputType; fn project( + &self, projection: <$field_ty as HasProjection>::Projection ) -> >::Projection { - Base::project($projection_name::$proj_variant(projection)) + self.base.project($projection_name::$proj_variant(projection)) } } }; @@ -193,14 +206,15 @@ macro_rules! type_descriptions { // prototype struct #[doc = concat!("A prototype for the [`", stringify!($ty), "`] type.")] - #[derive_where::derive_where(Default, Copy, Clone)] + #[derive_where::derive_where(Default, Copy, Clone; Projector)] pub struct $prototype_name { $( // TODO: I think it might make sense to provide field documentation here. How would we do that without copying the docs to the type description macro though? #[doc = concat!("Accessor for the `", stringify!($field_name), "` field.")] pub $field_name: <$field_ty as HasPrototype>::Prototype>, )* - phantom: core::marker::PhantomData<(Marker, Projector)>, + pub(super) projector: Projector, + phantom: core::marker::PhantomData, } impl HasPrototype for $ty @@ -217,7 +231,7 @@ macro_rules! type_descriptions { type SelectedType = Projector::InputType; fn into_selector(self) -> >::Projection { - Projector::wrap_atom(()) + self.projector.wrap_atom(()) } } )* @@ -232,7 +246,7 @@ macro_rules! type_descriptions { type_descriptions! { // Type[ProjectionName, PrototypeName]: Dependency1, Dependency2, ... - Account[AccountProjection, AccountPrototype]: AccountId, DomainId, Name, PublicKey, Metadata { + Account[AccountProjection, AccountPrototype]: AccountId, DomainId, Name, PublicKey, Metadata, Json { // field_name(ProjectionVariant, ProjectorName): FieldType id(Id, AccountIdProjector): AccountId, metadata(Metadata, AccountMetadataProjector): Metadata, @@ -243,7 +257,7 @@ type_descriptions! { } // asset - AssetDefinition[AssetDefinitionProjection, AssetDefinitionPrototype]: AssetDefinitionId, DomainId, Name, Metadata { + AssetDefinition[AssetDefinitionProjection, AssetDefinitionPrototype]: AssetDefinitionId, DomainId, Name, Metadata, Json { id(Id, AssetDefinitionIdProjector): AssetDefinitionId, metadata(Metadata, AssetDefinitionMetadataProjector): Metadata, } @@ -251,7 +265,7 @@ type_descriptions! { domain(Domain, AssetDefinitionIdDomainProjector): DomainId, name(Name, AssetDefinitionIdNameProjector): Name, } - Asset[AssetProjection, AssetPrototype]: AssetId, AccountId, DomainId, Name, PublicKey, AssetDefinitionId, AssetValue { + Asset[AssetProjection, AssetPrototype]: AssetId, AccountId, DomainId, Name, PublicKey, AssetDefinitionId, AssetValue, Numeric, Metadata, Json { id(Id, AssetIdProjector): AssetId, value(Value, AssetValueProjector): AssetValue, } @@ -259,7 +273,11 @@ type_descriptions! { account(Account, AssetIdAccountProjector): AccountId, definition(Definition, AssetIdDefinitionProjector): AssetDefinitionId, } - AssetValue[AssetValueProjection, AssetValuePrototype] {} + #[custom_evaluate] + AssetValue[AssetValueProjection, AssetValuePrototype]: Numeric, Metadata, Json { + numeric(Numeric, AssetValueNumericProjector): Numeric, + store(Store, AssetValueStoreProjector): Metadata, + } // block HashOf[BlockHeaderHashProjection, BlockHeaderHashPrototype] {} @@ -285,7 +303,7 @@ type_descriptions! { } // domain - Domain[DomainProjection, DomainPrototype]: DomainId, Name, Metadata { + Domain[DomainProjection, DomainPrototype]: DomainId, Name, Metadata, Json { id(Id, DomainIdProjector): DomainId, metadata(Metadata, DomainMetadataProjector): Metadata, } @@ -318,8 +336,12 @@ type_descriptions! { TriggerId[TriggerIdProjection, TriggerIdPrototype]: Name { name(Name, TriggerIdNameProjector): Name, } - Trigger[TriggerProjection, TriggerPrototype]: TriggerId, Name { + Trigger[TriggerProjection, TriggerPrototype]: TriggerId, Name, action::Action, Metadata, Json { id(Id, TriggerIdProjector): TriggerId, + action(Action, TriggerActionProjector): action::Action, + } + action::Action[ActionProjection, ActionPrototype]: Metadata, Json { + metadata(Metadata, ActionMetadataProjector): Metadata, } // note: even though `NameProjection` and `StringProjection` are distinct types, @@ -328,8 +350,118 @@ type_descriptions! { String[StringProjection, StringPrototype] {} PublicKey[PublicKeyProjection, PublicKeyPrototype] {} - Metadata[MetadataProjection, MetadataPrototype] { - // TODO: we will probably want to have a special-cased metadata projection that allows accessing fields by string keys (because metadata is not statically typed) + Json[JsonProjection, JsonPrototype] {} + Numeric[NumericProjection, NumericPrototype] {} +} + +/// A set of helpers for [`EvaluateSelector`] implementations that are fallible +mod fallible_selector { + use crate::query::{ + dsl::{EvaluateSelector, HasProjection, SelectorMarker}, + error::QueryExecutionFail, + QueryOutputBatchBox, + }; + + trait Collector { + fn collect( + self, + iter: impl Iterator, + ) -> Result; + } + + struct CollectorClone<'proj, T: HasProjection>(&'proj T::Projection); + + impl<'a, T> Collector<&'a T> for CollectorClone<'_, T> + where + T: HasProjection + 'static, + T::Projection: EvaluateSelector, + { + fn collect( + self, + iter: impl Iterator, + ) -> Result { + self.0.project_clone(iter) + } + } + + struct CollectorNoClone<'proj, T: HasProjection>(&'proj T::Projection); + + impl Collector for CollectorNoClone<'_, T> + where + T: HasProjection + 'static, + T::Projection: EvaluateSelector, + { + fn collect( + self, + iter: impl Iterator, + ) -> Result { + self.0.project(iter) + } + } + + fn map_general( + iterator: IterIn, + map: impl Fn(TIn) -> Result, + collector: impl Collector, + ) -> Result + where + IterIn: Iterator, + { + // what we do here is a bit unwieldy + // the `project_clone` method accepts an iterator over references to the items + // however, while iterating over the metadatas we can find out that a key is missing + // in this case we need to fail the whole operation and return an error + // the `project_clone` by itself doesn't provide such a mechanism + // but we can achieve this by storing an error indicator in a variable and checking it after the iteration + let mut error_accumulator = None; + + let iter_out = iterator + // we use map_while to stop on first error + .map_while(|item| { + let res = map(item); + + match res { + Ok(value) => Some(value), + Err(error) => { + error_accumulator.get_or_insert(error); + None + } + } + }); + let result = collector.collect(iter_out); + + // errors on this layer of projection take precedence + if let Some(error) = error_accumulator { + return Err(error); + } + + result + } + + pub fn map( + iterator: IterIn, + map: impl Fn(TIn) -> Result, + proj: &TOut::Projection, + ) -> Result + where + IterIn: Iterator, + TOut: HasProjection + 'static, + TOut::Projection: EvaluateSelector, + { + map_general(iterator, map, CollectorNoClone(proj)) + } + + pub fn map_clone<'a, TIn, TOut, IterIn>( + iterator: IterIn, + map: impl Fn(TIn) -> Result<&'a TOut, QueryExecutionFail>, + proj: &TOut::Projection, + ) -> Result + where + IterIn: Iterator, + TOut: HasProjection + 'static, + TOut::Projection: EvaluateSelector, + { + map_general(iterator, map, CollectorClone(proj)) } } @@ -344,20 +476,23 @@ impl EvaluatePredicate for BlockHeaderProjection { } impl EvaluateSelector for BlockHeaderProjection { - #[expect(single_use_lifetimes)] // FP, this the suggested change is not allowed on stable + #[expect(single_use_lifetimes)] // FP, the suggested change is not allowed on stable fn project_clone<'a>( &self, batch: impl Iterator, - ) -> QueryOutputBatchBox { + ) -> Result { match self { - BlockHeaderProjection::Atom(_) => batch.cloned().collect::>().into(), - BlockHeaderProjection::Hash(hash) => hash.project(batch.map(|item| item.hash())), + BlockHeaderProjection::Atom(()) => Ok(batch.copied().collect::>().into()), + BlockHeaderProjection::Hash(hash) => hash.project(batch.map(BlockHeader::hash)), } } - fn project(&self, batch: impl Iterator) -> QueryOutputBatchBox { + fn project( + &self, + batch: impl Iterator, + ) -> Result { match self { - BlockHeaderProjection::Atom(_) => batch.collect::>().into(), + BlockHeaderProjection::Atom(()) => Ok(batch.collect::>().into()), BlockHeaderProjection::Hash(hash) => hash.project(batch.map(|item| item.hash())), } } @@ -373,22 +508,23 @@ impl EvaluatePredicate for SignedBlockProjection { } impl EvaluateSelector for SignedBlockProjection { - #[expect(single_use_lifetimes)] // FP, this the suggested change is not allowed on stable + #[expect(single_use_lifetimes)] // FP, the suggested change is not allowed on stable fn project_clone<'a>( &self, batch: impl Iterator, - ) -> QueryOutputBatchBox { + ) -> Result { match self { - SignedBlockProjection::Atom(_) => batch.cloned().collect::>().into(), - SignedBlockProjection::Header(header) => { - header.project(batch.map(|item| item.header())) - } + SignedBlockProjection::Atom(()) => Ok(batch.cloned().collect::>().into()), + SignedBlockProjection::Header(header) => header.project(batch.map(SignedBlock::header)), } } - fn project(&self, batch: impl Iterator) -> QueryOutputBatchBox { + fn project( + &self, + batch: impl Iterator, + ) -> Result { match self { - SignedBlockProjection::Atom(_) => batch.collect::>().into(), + SignedBlockProjection::Atom(()) => Ok(batch.collect::>().into()), SignedBlockProjection::Header(header) => { header.project(batch.map(|item| item.header())) } @@ -402,30 +538,35 @@ impl EvaluatePredicate for SignedTransactionProjection atom.applies(input), SignedTransactionProjection::Hash(hash) => hash.applies(&input.hash()), SignedTransactionProjection::Authority(authority) => { - authority.applies(&input.authority()) + authority.applies(input.authority()) } } } } impl EvaluateSelector for SignedTransactionProjection { - #[expect(single_use_lifetimes)] // FP, this the suggested change is not allowed on stable + #[expect(single_use_lifetimes)] // FP, the suggested change is not allowed on stable fn project_clone<'a>( &self, batch: impl Iterator, - ) -> QueryOutputBatchBox { + ) -> Result { match self { - SignedTransactionProjection::Atom(_) => batch.cloned().collect::>().into(), - SignedTransactionProjection::Hash(hash) => hash.project(batch.map(|item| item.hash())), + SignedTransactionProjection::Atom(()) => Ok(batch.cloned().collect::>().into()), + SignedTransactionProjection::Hash(hash) => { + hash.project(batch.map(SignedTransaction::hash)) + } SignedTransactionProjection::Authority(authority) => { - authority.project_clone(batch.map(|item| item.authority())) + authority.project_clone(batch.map(SignedTransaction::authority)) } } } - fn project(&self, batch: impl Iterator) -> QueryOutputBatchBox { + fn project( + &self, + batch: impl Iterator, + ) -> Result { match self { - SignedTransactionProjection::Atom(_) => batch.collect::>().into(), + SignedTransactionProjection::Atom(()) => Ok(batch.collect::>().into()), SignedTransactionProjection::Hash(hash) => hash.project(batch.map(|item| item.hash())), SignedTransactionProjection::Authority(authority) => { authority.project(batch.map(|item| item.authority().clone())) @@ -434,7 +575,254 @@ impl EvaluateSelector for SignedTransactionProjection for AssetValueProjection { + fn applies(&self, input: &AssetValue) -> bool { + match self { + AssetValueProjection::Atom(atom) => atom.applies(input), + AssetValueProjection::Numeric(numeric) => match input { + AssetValue::Numeric(v) => numeric.applies(v), + AssetValue::Store(_) => false, + }, + AssetValueProjection::Store(store) => match input { + AssetValue::Numeric(_) => false, + AssetValue::Store(v) => store.applies(v), + }, + } + } +} + +impl EvaluateSelector for AssetValueProjection { + #[expect(single_use_lifetimes)] + fn project_clone<'a>( + &self, + batch: impl Iterator, + ) -> Result { + match self { + AssetValueProjection::Atom(()) => Ok(batch.cloned().collect::>().into()), + AssetValueProjection::Numeric(proj) => fallible_selector::map_clone( + batch, + |item| match item { + AssetValue::Numeric(v) => Ok(v), + AssetValue::Store(_) => Err(QueryExecutionFail::Conversion( + "Expected numeric value, got store".to_string(), + )), + }, + proj, + ), + AssetValueProjection::Store(proj) => fallible_selector::map_clone( + batch, + |item| match item { + AssetValue::Numeric(_) => Err(QueryExecutionFail::Conversion( + "Expected store value, got numeric".to_string(), + )), + AssetValue::Store(v) => Ok(v), + }, + proj, + ), + } + } + + fn project( + &self, + batch: impl Iterator, + ) -> Result { + match self { + AssetValueProjection::Atom(()) => Ok(batch.collect::>().into()), + AssetValueProjection::Numeric(proj) => fallible_selector::map( + batch, + |item| match item { + AssetValue::Numeric(v) => Ok(v), + AssetValue::Store(_) => Err(QueryExecutionFail::Conversion( + "Expected numeric value, got store".to_string(), + )), + }, + proj, + ), + AssetValueProjection::Store(proj) => fallible_selector::map( + batch, + |item| match item { + AssetValue::Numeric(_) => Err(QueryExecutionFail::Conversion( + "Expected store value, got numeric".to_string(), + )), + AssetValue::Store(v) => Ok(v), + }, + proj, + ), + } + } +} + +// metadata is a special case because we allow projecting on string-typed keys +/// A projection for the [`Metadata`] type. +#[derive_where(Debug, Eq, PartialEq, Copy, Clone; >::AtomType, MetadataKeyProjection)] +// parity-scale-codec and iroha_schema generates correct bounds by themselves +#[derive(parity_scale_codec::Decode, parity_scale_codec::Encode, iroha_schema::IntoSchema)] +// use serde_where macro to generate the correct #[serde(bounds(...))] attribute +#[iroha_macro::serde_where(>::AtomType, MetadataKeyProjection)] +#[derive(serde::Deserialize, serde::Serialize)] +pub enum MetadataProjection +where + Metadata: Projectable, + Json: HasProjection, +{ + /// Finish the projection with an atom. + Atom(>::AtomType), + // unlike other projections, this one needs to store a value (key being projected) + // hence the separate struct (iroha does not allow enums with more than one field) + /// Projection for a key in the metadata. + Key(MetadataKeyProjection), +} + +/// A projection for a key in the [`Metadata`] type. +#[derive_where(Debug, Eq, PartialEq, Clone; >::Projection)] +// parity-scale-codec and iroha_schema generates correct bounds by themselves +#[derive(parity_scale_codec::Decode, parity_scale_codec::Encode, iroha_schema::IntoSchema)] +// use serde_where macro to generate the correct #[serde(bounds(...))] attribute +#[iroha_macro::serde_where(>::Projection)] +#[derive(serde::Deserialize, serde::Serialize)] +pub struct MetadataKeyProjection +where + Json: HasProjection, +{ + key: Name, + projection: >::Projection, +} + +impl HasProjection for Metadata +where + Metadata: Projectable, + Json: HasProjection, +{ + type Projection = MetadataProjection; + + fn atom(atom: Self::AtomType) -> Self::Projection { + MetadataProjection::Atom(atom) + } +} +impl EvaluatePredicate for MetadataProjection { + fn applies(&self, input: &Metadata) -> bool { + match self { + MetadataProjection::Atom(atom) => atom.applies(input), + MetadataProjection::Key(proj) => input + .get(&proj.key) + .map_or(false, |value| proj.projection.applies(value)), + } + } +} +impl EvaluateSelector for MetadataProjection { + #[expect(single_use_lifetimes)] + fn project_clone<'a>( + &self, + batch: impl Iterator, + ) -> Result { + match self { + MetadataProjection::Atom(()) => Ok(batch.cloned().collect::>().into()), + MetadataProjection::Key(proj) => fallible_selector::map_clone( + batch, + |item| { + item.get(&proj.key).ok_or_else(|| { + QueryExecutionFail::Find(FindError::MetadataKey(proj.key.clone())) + }) + }, + &proj.projection, + ), + } + } + fn project( + &self, + batch: impl Iterator, + ) -> Result { + match self { + MetadataProjection::Atom(()) => Ok(batch.collect::>().into()), + MetadataProjection::Key(proj) => fallible_selector::map( + batch, + |item| { + // using remove here to get a value, not a reference + item.get(&proj.key).cloned().ok_or_else(|| { + QueryExecutionFail::Find(FindError::MetadataKey(proj.key.clone())) + }) + }, + &proj.projection, + ), + } + } +} + +/// A prototype for the [`Metadata`] type. +#[derive_where(Default, Copy, Clone; Projector)] +pub struct MetadataPrototype { + projector: Projector, + phantom: core::marker::PhantomData, +} + +impl HasPrototype for Metadata { + type Prototype = MetadataPrototype; +} +impl IntoSelector for MetadataPrototype +where + Projector: ObjectProjector, + Projector::OutputType: HasProjection, +{ + type SelectingType = Projector::OutputType; + type SelectedType = Projector::InputType; + + fn into_selector(self) -> >::Projection { + self.projector.wrap_atom(()) + } +} + +impl MetadataPrototype +where + Projector: ObjectProjector, +{ + /// Accessor for a key in the metadata. + /// + /// ## Nonexistent keys + /// + /// When a nonexistent key is accessed in a predicate, it will evaluate to `false`. + /// + /// When a nonexistent key is accessed in a selector, the query will fail with a [`FindError::MetadataKey`] error. + pub fn key(self, key: Name) -> JsonPrototype> { + JsonPrototype { + projector: MetadataKeyProjector { + key, + base: self.projector, + phantom: core::marker::PhantomData, + }, + phantom: core::marker::PhantomData, + } + } +} + +/// A projector on [`Metadata`] for one of its keys. +pub struct MetadataKeyProjector { + key: Name, + base: Base, + phantom: core::marker::PhantomData, +} + +impl ObjectProjector for MetadataKeyProjector +where + Base: ObjectProjector, + Json: Projectable, + Metadata: Projectable, +{ + type InputType = Json; + type OutputType = Base::OutputType; + + fn project( + &self, + projection: >::Projection, + ) -> >::Projection { + self.base + .project(MetadataProjection::Key(MetadataKeyProjection { + key: self.key.clone(), + projection, + })) + } +} + pub mod prelude { //! Re-export all projections for a glob import `(::*)` - pub use super::projections::*; + pub use super::{projections::*, MetadataKeyProjection, MetadataProjection}; } diff --git a/crates/iroha_data_model/src/query/mod.rs b/crates/iroha_data_model/src/query/mod.rs index 4a488943a27..07304888181 100644 --- a/crates/iroha_data_model/src/query/mod.rs +++ b/crates/iroha_data_model/src/query/mod.rs @@ -3,7 +3,14 @@ #![allow(clippy::missing_inline_in_public_items)] #[cfg(not(feature = "std"))] -use alloc::{boxed::Box, format, string::String, vec::Vec}; +use alloc::{ + boxed::Box, + format, + string::String, + vec::{self, Vec}, +}; +#[cfg(feature = "std")] +use std::vec; use derive_more::Constructor; use iroha_crypto::{PublicKey, SignatureOf}; @@ -66,6 +73,7 @@ mod model { use iroha_macro::serde_where; use super::*; + use crate::trigger::action; /// An iterable query bundled with a filter #[serde_where(Q, CompoundPredicate, SelectorTuple)] @@ -125,6 +133,8 @@ mod model { PublicKey(Vec), String(Vec), Metadata(Vec), + Json(Vec), + Numeric(Vec), Name(Vec), DomainId(Vec), Domain(Vec), @@ -146,12 +156,15 @@ mod model { RoleId(Vec), TriggerId(Vec), Trigger(Vec), + Action(Vec), Block(Vec), BlockHeader(Vec), BlockHeaderHash(Vec>), } - #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] + #[derive( + Debug, Clone, PartialEq, Eq, Decode, Encode, Constructor, Deserialize, Serialize, IntoSchema, + )] pub struct QueryOutputBatchBoxTuple { pub tuple: Vec, } @@ -161,15 +174,8 @@ mod model { Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema, FromVariant, )] pub enum SingularQueryBox { - FindAssetQuantityById(FindAssetQuantityById), FindExecutorDataModel(FindExecutorDataModel), FindParameters(FindParameters), - - FindDomainMetadata(FindDomainMetadata), - FindAccountMetadata(FindAccountMetadata), - FindAssetMetadata(FindAssetMetadata), - FindAssetDefinitionMetadata(FindAssetDefinitionMetadata), - FindTriggerMetadata(FindTriggerMetadata), } /// An enum of all possible singular query outputs @@ -177,13 +183,8 @@ mod model { Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema, FromVariant, )] pub enum SingularQueryOutputBox { - Numeric(Numeric), ExecutorDataModel(crate::executor::ExecutorDataModel), - Json(Json), - Trigger(crate::trigger::Trigger), Parameters(Parameters), - Transaction(CommittedTransaction), - BlockHeader(BlockHeader), } /// The results of a single iterable query request. @@ -292,6 +293,7 @@ impl QueryOutputBatchBox { (Self::PublicKey(v1), Self::PublicKey(v2)) => v1.extend(v2), (Self::String(v1), Self::String(v2)) => v1.extend(v2), (Self::Metadata(v1), Self::Metadata(v2)) => v1.extend(v2), + (Self::Numeric(v1), Self::Numeric(v2)) => v1.extend(v2), (Self::Name(v1), Self::Name(v2)) => v1.extend(v2), (Self::DomainId(v1), Self::DomainId(v2)) => v1.extend(v2), (Self::Domain(v1), Self::Domain(v2)) => v1.extend(v2), @@ -315,6 +317,7 @@ impl QueryOutputBatchBox { (Self::RoleId(v1), Self::RoleId(v2)) => v1.extend(v2), (Self::TriggerId(v1), Self::TriggerId(v2)) => v1.extend(v2), (Self::Trigger(v1), Self::Trigger(v2)) => v1.extend(v2), + (Self::Action(v1), Self::Action(v2)) => v1.extend(v2), (Self::Block(v1), Self::Block(v2)) => v1.extend(v2), (Self::BlockHeader(v1), Self::BlockHeader(v2)) => v1.extend(v2), (Self::BlockHeaderHash(v1), Self::BlockHeaderHash(v2)) => v1.extend(v2), @@ -329,6 +332,8 @@ impl QueryOutputBatchBox { Self::PublicKey(v) => v.len(), Self::String(v) => v.len(), Self::Metadata(v) => v.len(), + Self::Json(v) => v.len(), + Self::Numeric(v) => v.len(), Self::Name(v) => v.len(), Self::DomainId(v) => v.len(), Self::Domain(v) => v.len(), @@ -350,6 +355,7 @@ impl QueryOutputBatchBox { Self::RoleId(v) => v.len(), Self::TriggerId(v) => v.len(), Self::Trigger(v) => v.len(), + Self::Action(v) => v.len(), Self::Block(v) => v.len(), Self::BlockHeader(v) => v.len(), Self::BlockHeaderHash(v) => v.len(), @@ -364,13 +370,15 @@ impl QueryOutputBatchBoxTuple { /// /// Panics if the types or lengths of the two batche tuples do not match pub fn extend(&mut self, other: Self) { - if self.tuple.len() != other.tuple.len() { - panic!("Cannot extend QueryOutputBatchBoxTuple with different number of elements"); - } + assert_eq!( + self.tuple.len(), + other.tuple.len(), + "Cannot extend QueryOutputBatchBoxTuple with different number of elements" + ); self.tuple .iter_mut() - .zip(other.tuple.into_iter()) + .zip(other) .for_each(|(self_batch, other_batch)| self_batch.extend(other_batch)); } @@ -380,14 +388,34 @@ impl QueryOutputBatchBoxTuple { self.tuple[0].len() } + /// Returns `true` if this batch tuple is empty + pub fn is_empty(&self) -> bool { + self.tuple[0].len() == 0 + } + /// Returns an iterator over the batches in this tuple pub fn iter(&self) -> impl Iterator { self.tuple.iter() } +} + +impl IntoIterator for QueryOutputBatchBoxTuple { + type Item = QueryOutputBatchBox; + type IntoIter = QueryOutputBatchBoxIntoIter; - /// Consumes this batch tuple and returns an iterator over the batches - pub fn into_iter(self) -> impl Iterator { - self.tuple.into_iter() + fn into_iter(self) -> Self::IntoIter { + QueryOutputBatchBoxIntoIter(self.tuple.into_iter()) + } +} + +/// An iterator over the batches in a [`QueryOutputBatchBoxTuple`] +pub struct QueryOutputBatchBoxIntoIter(vec::IntoIter); + +impl Iterator for QueryOutputBatchBoxIntoIter { + type Item = QueryOutputBatchBox; + + fn next(&mut self) -> Option { + self.0.next() } } @@ -659,13 +687,7 @@ impl_iter_queries! { } impl_singular_queries! { - FindAccountMetadata => Json, - FindAssetQuantityById => Numeric, - FindAssetMetadata => Json, - FindAssetDefinitionMetadata => Json, - FindDomainMetadata => Json, FindParameters => crate::parameter::Parameters, - FindTriggerMetadata => Json, FindExecutorDataModel => crate::executor::ExecutorDataModel, } @@ -785,18 +807,6 @@ pub mod account { #[ffi_type] pub struct FindAccounts; - /// [`FindAccountMetadata`] Iroha Query finds an [`MetadataValue`] - /// of the key-value metadata pair in the specified account. - #[derive(Display)] - #[display(fmt = "Find metadata value with `{key}` key in `{id}` account")] - #[ffi_type] - pub struct FindAccountMetadata { - /// `Id` of an account to find. - pub id: AccountId, - /// Key of the specific key-value in the Account's metadata. - pub key: Name, - } - /// [`FindAccountsWithAsset`] Iroha Query gets [`AssetDefinition`]s id as input and /// finds all [`Account`]s storing [`Asset`] with such definition. #[derive(Display)] @@ -812,7 +822,7 @@ pub mod account { /// The prelude re-exports most commonly used traits, structs and macros from this crate. pub mod prelude { - pub use super::{FindAccountMetadata, FindAccounts, FindAccountsWithAsset}; + pub use super::{FindAccounts, FindAccountsWithAsset}; } } @@ -826,8 +836,6 @@ pub mod asset { use derive_more::Display; - use crate::prelude::*; - queries! { /// [`FindAssets`] Iroha Query finds all [`Asset`]s presented in Iroha Peer. #[derive(Copy, Display)] @@ -841,50 +849,10 @@ pub mod asset { #[display(fmt = "Find all asset definitions")] #[ffi_type] pub struct FindAssetsDefinitions; - - /// [`FindAssetQuantityById`] Iroha Query gets [`AssetId`] as input and finds [`Asset::quantity`] - /// value if [`Asset`] is presented in Iroha Peer. - #[derive(Display)] - #[display(fmt = "Find quantity of the `{id}` asset")] - #[repr(transparent)] - // SAFETY: `FindAssetQuantityById` has no trap representation in `AssetId` - #[ffi_type(unsafe {robust})] - pub struct FindAssetQuantityById { - /// `Id` of an [`Asset`] to find quantity of. - pub id: AssetId, - } - - /// [`FindAssetMetadata`] Iroha Query gets [`AssetId`] and key as input and finds [`MetadataValue`] - /// of the key-value pair stored in this asset. - #[derive(Display)] - #[display(fmt = "Find metadata value with `{key}` key in `{id}` asset")] - #[ffi_type] - pub struct FindAssetMetadata { - /// `Id` of an [`Asset`] acting as [`Store`](crate::asset::AssetValue::Store). - pub id: AssetId, - /// The key of the key-value pair stored in the asset. - pub key: Name, - } - - /// [`FindAssetDefinitionMetadata`] Iroha Query gets [`AssetDefinitionId`] and key as input and finds [`MetadataValue`] - /// of the key-value pair stored in this asset definition. - #[derive(Display)] - #[display(fmt = "Find metadata value with `{key}` key in `{id}` asset definition")] - #[ffi_type] - pub struct FindAssetDefinitionMetadata { - /// `Id` of an [`Asset`] acting as [`Store`](crate::asset::AssetValue::Store).. - pub id: AssetDefinitionId, - /// The key of the key-value pair stored in the asset. - pub key: Name, - } - } /// The prelude re-exports most commonly used traits, structs and macros from this crate. pub mod prelude { - pub use super::{ - FindAssetDefinitionMetadata, FindAssetMetadata, FindAssetQuantityById, FindAssets, - FindAssetsDefinitions, - }; + pub use super::{FindAssets, FindAssetsDefinitions}; } } @@ -898,31 +866,17 @@ pub mod domain { use derive_more::Display; - use crate::prelude::*; - queries! { /// [`FindDomains`] Iroha Query finds all [`Domain`]s presented in Iroha [`Peer`]. #[derive(Copy, Display)] #[display(fmt = "Find all domains")] #[ffi_type] pub struct FindDomains; - - /// [`FindDomainMetadata`] Iroha Query finds a [`MetadataValue`] of the key-value metadata pair - /// in the specified domain. - #[derive(Display)] - #[display(fmt = "Find metadata value with key `{key}` in `{id}` domain")] - #[ffi_type] - pub struct FindDomainMetadata { - /// `Id` of an domain to find. - pub id: DomainId, - /// Key of the specific key-value in the domain's metadata. - pub key: Name, - } } /// The prelude re-exports most commonly used traits, structs and macros from this crate. pub mod prelude { - pub use super::{FindDomainMetadata, FindDomains}; + pub use super::FindDomains; } } @@ -983,8 +937,6 @@ pub mod trigger { use derive_more::Display; - use crate::{trigger::TriggerId, Name}; - queries! { /// Find all currently active (as in not disabled and/or expired) /// trigger IDs. @@ -998,22 +950,11 @@ pub mod trigger { #[display(fmt = "Find all triggers")] #[ffi_type] pub struct FindTriggers; - - /// Find Trigger's metadata key-value pairs. - #[derive(Display)] - #[display(fmt = "Find metadata value with `{key}` key in `{id}` trigger")] - #[ffi_type] - pub struct FindTriggerMetadata { - /// The Identification of the trigger to be found. - pub id: TriggerId, - /// The key inside the metadata dictionary to be returned. - pub key: Name, - } } pub mod prelude { //! Prelude Re-exports most commonly used traits, structs and macros from this crate. - pub use super::{FindActiveTriggerIds, FindTriggerMetadata, FindTriggers}; + pub use super::{FindActiveTriggerIds, FindTriggers}; } } diff --git a/crates/iroha_data_model/src/visit.rs b/crates/iroha_data_model/src/visit.rs index bd4f163bd85..1ebd71adc65 100644 --- a/crates/iroha_data_model/src/visit.rs +++ b/crates/iroha_data_model/src/visit.rs @@ -52,14 +52,8 @@ pub trait Visit { visit_custom_instruction(&CustomInstruction), // Visit SingularQueryBox - visit_find_asset_quantity_by_id(&FindAssetQuantityById), visit_find_executor_data_model(&FindExecutorDataModel), visit_find_parameters(&FindParameters), - visit_find_domain_metadata(&FindDomainMetadata), - visit_find_account_metadata(&FindAccountMetadata), - visit_find_asset_metadata(&FindAssetMetadata), - visit_find_asset_definition_metadata(&FindAssetDefinitionMetadata), - visit_find_trigger_metadata(&FindTriggerMetadata), // Visit IterableQueryBox visit_find_domains(&QueryWithFilter), @@ -158,14 +152,8 @@ pub fn visit_singular_query(visitor: &mut V, query: &Singular } singular_query_visitors! { - visit_find_asset_quantity_by_id(FindAssetQuantityById), visit_find_executor_data_model(FindExecutorDataModel), visit_find_parameters(FindParameters), - visit_find_domain_metadata(FindDomainMetadata), - visit_find_account_metadata(FindAccountMetadata), - visit_find_asset_metadata(FindAssetMetadata), - visit_find_asset_definition_metadata(FindAssetDefinitionMetadata), - visit_find_trigger_metadata(FindTriggerMetadata), } } @@ -375,15 +363,9 @@ leaf_visitors! { visit_log(&Log), visit_custom_instruction(&CustomInstruction), - // Singular Quert visitors - visit_find_asset_quantity_by_id(&FindAssetQuantityById), + // Singular Query visitors visit_find_executor_data_model(&FindExecutorDataModel), visit_find_parameters(&FindParameters), - visit_find_domain_metadata(&FindDomainMetadata), - visit_find_account_metadata(&FindAccountMetadata), - visit_find_asset_metadata(&FindAssetMetadata), - visit_find_asset_definition_metadata(&FindAssetDefinitionMetadata), - visit_find_trigger_metadata(&FindTriggerMetadata), // Iterable Query visitors visit_find_domains(&QueryWithFilter), diff --git a/crates/iroha_executor/src/default/isi/multisig/transaction.rs b/crates/iroha_executor/src/default/isi/multisig/transaction.rs index bd93abc356e..47236cb89bb 100644 --- a/crates/iroha_executor/src/default/isi/multisig/transaction.rs +++ b/crates/iroha_executor/src/default/isi/multisig/transaction.rs @@ -6,7 +6,10 @@ use core::num::NonZeroU64; use iroha_smart_contract::data_model::query::error::QueryExecutionFail; use super::*; -use crate::data_model::Level; +use crate::data_model::{ + query::{builder::SingleQueryError, error::FindError}, + Level, +}; impl VisitExecute for MultisigPropose { fn visit(&self, executor: &mut V) { @@ -41,10 +44,10 @@ impl VisitExecute for MultisigPropose { }; if host - .query_single(FindAccountMetadata::new( - multisig_account, - proposal_key(&instructions_hash), - )) + .query(FindAccounts) + .filter_with(|account| account.id.eq(multisig_account)) + .select_with(|account| account.metadata.key(proposal_key(&instructions_hash))) + .execute_single() .is_ok() { deny!(executor, "multisig proposal duplicates") @@ -145,7 +148,19 @@ fn multisig_spec( ) -> Result { executor .host() - .query_single(FindAccountMetadata::new(multisig_account, spec_key()))? + .query(FindAccounts) + .filter_with(|account| account.id.eq(multisig_account.clone())) + .select_with(|account| account.metadata.key(spec_key())) + .execute_single() + .map_err(|e| match e { + SingleQueryError::QueryError(e) => e, + SingleQueryError::ExpectedOneGotNone => ValidationFail::QueryFailed( + QueryExecutionFail::Find(FindError::Account(multisig_account)), + ), + SingleQueryError::ExpectedOneGotMany | SingleQueryError::ExpectedOneOrZeroGotMany => { + unreachable!() + } + })? .try_into_any() .map_err(metadata_conversion_error) } @@ -157,10 +172,19 @@ fn proposal_value( ) -> Result { executor .host() - .query_single(FindAccountMetadata::new( - multisig_account, - proposal_key(&instructions_hash), - ))? + .query(FindAccounts) + .filter_with(|account| account.id.eq(multisig_account.clone())) + .select_with(|account| account.metadata.key(proposal_key(&instructions_hash))) + .execute_single() + .map_err(|e| match e { + SingleQueryError::QueryError(e) => e, + SingleQueryError::ExpectedOneGotNone => ValidationFail::QueryFailed( + QueryExecutionFail::Find(FindError::Account(multisig_account)), + ), + SingleQueryError::ExpectedOneGotMany | SingleQueryError::ExpectedOneOrZeroGotMany => { + unreachable!() + } + })? .try_into_any() .map_err(metadata_conversion_error) } diff --git a/crates/iroha_schema_gen/src/lib.rs b/crates/iroha_schema_gen/src/lib.rs index 41a341c4e3c..cf176edf1c7 100644 --- a/crates/iroha_schema_gen/src/lib.rs +++ b/crates/iroha_schema_gen/src/lib.rs @@ -127,6 +127,9 @@ types!( AccountProjection, AccountRoleChanged, Action, + ActionPredicateAtom, + ActionProjection, + ActionProjection, Algorithm, Asset, AssetChanged, @@ -258,18 +261,13 @@ types!( WasmPath, ExecutorUpgrade, FetchSize, - FindAccountMetadata, FindAccounts, FindAccountsWithAsset, FindActiveTriggerIds, - FindAssetDefinitionMetadata, - FindAssetMetadata, - FindAssetQuantityById, FindAssets, FindAssetsDefinitions, FindBlockHeaders, FindBlocks, - FindDomainMetadata, FindDomains, FindError, FindExecutorDataModel, @@ -280,7 +278,6 @@ types!( FindRoles, FindRolesByAccountId, FindTransactions, - FindTriggerMetadata, FindTriggers, ForwardCursor, GenesisWasmAction, @@ -305,6 +302,9 @@ types!( Ipv6Addr, Ipv4Addr, Json, + JsonPredicateAtom, + JsonProjection, + JsonProjection, Level, Log, MathError, @@ -318,6 +318,8 @@ types!( MetadataPredicateAtom, MetadataProjection, MetadataProjection, + MetadataKeyProjection, + MetadataKeyProjection, Mint, Mint, MintBox, @@ -335,6 +337,9 @@ types!( NonZeroU32, NonZeroU64, Numeric, + NumericPredicateAtom, + NumericProjection, + NumericProjection, NumericSpec, Option, Option, @@ -541,6 +546,7 @@ types!( ValidationFail, Vec, Vec, + Vec, Vec, Vec, Vec, @@ -567,6 +573,7 @@ types!( Vec, Vec, Vec, + Vec, Vec, Vec, Vec, @@ -585,6 +592,7 @@ types!( Vec>, Vec, Vec, + Vec, Vec>, Vec>, Vec>, diff --git a/crates/iroha_smart_contract/src/lib.rs b/crates/iroha_smart_contract/src/lib.rs index 99f75872704..87cf5d09d99 100644 --- a/crates/iroha_smart_contract/src/lib.rs +++ b/crates/iroha_smart_contract/src/lib.rs @@ -246,15 +246,18 @@ pub mod prelude { #[cfg(test)] mod tests { + use alloc::vec; use core::{mem::ManuallyDrop, slice}; + use iroha_data_model::query::{ + parameters::QueryParams, QueryOutput, QueryOutputBatchBox, QueryWithFilter, + }; use iroha_smart_contract_utils::encode_with_length_prefix; use parity_scale_codec::DecodeAll; use webassembly_test::webassembly_test; use super::*; - const QUERY_RESULT: Result = Ok(numeric!(1234)); const ISI_RESULT: Result<(), ValidationFail> = Ok(()); fn get_test_instruction() -> InstructionBox { @@ -262,9 +265,20 @@ mod tests { Register::asset(Asset::new(new_asset_id, 1_u32)).into() } - fn get_test_query() -> FindAssetQuantityById { + fn get_test_query() -> QueryWithParams { let asset_id: AssetId = "rose##ed0120CE7FA46C9DCE7EA4B125E2E36BDB63EA33073E7590AC92816AE1E861B7048B03@wonderland".parse().unwrap(); - FindAssetQuantityById::new(asset_id) + + QueryWithParams::new( + QueryBox::FindAssets(QueryWithFilter::new( + FindAssets, + CompoundPredicate::::build(|asset| asset.id.eq(asset_id)), + SelectorTuple::::build(|asset| asset.value.numeric), + )), + QueryParams::default(), + ) + } + fn get_query_result() -> QueryOutputBatchBoxTuple { + QueryOutputBatchBoxTuple::new(vec![QueryOutputBatchBox::Numeric(vec![numeric!(1234)])]) } #[no_mangle] @@ -286,14 +300,14 @@ mod tests { ) -> *const u8 { let bytes = slice::from_raw_parts(ptr, len); let query_request = QueryRequest::decode_all(&mut &*bytes).unwrap(); - let QueryRequest::Singular(query) = query_request else { - panic!("Expected a singular query") + let QueryRequest::Start(query_with_params) = query_request else { + panic!("Expected Start query, but got {:?}", query_request); }; - let query: FindAssetQuantityById = query.try_into().expect("Unexpected query type"); - assert_eq!(query, get_test_query()); + assert_eq!(query_with_params, get_test_query()); - let response: Result = - Ok(QueryResponse::Singular(QUERY_RESULT.unwrap().into())); + let response: Result = Ok(QueryResponse::Iterable( + QueryOutput::new(get_query_result(), 0, None), + )); ManuallyDrop::new(encode_with_length_prefix(&response)).as_ptr() } @@ -306,6 +320,12 @@ mod tests { #[webassembly_test] fn execute_query() { let host = Iroha; - assert_eq!(host.query_single(get_test_query()), QUERY_RESULT); + let (output, remaining_items, next_cursor) = host.start_query(get_test_query()).unwrap(); + assert_eq!(output, get_query_result()); + assert_eq!(remaining_items, 0); + assert!( + next_cursor.is_none(), + "Expected no cursor, but got {next_cursor:?}", + ); } } diff --git a/data_model/samples/executor_custom_data_model/src/complex_isi.rs b/data_model/samples/executor_custom_data_model/src/complex_isi.rs index afbb8571c48..393676c29b7 100644 --- a/data_model/samples/executor_custom_data_model/src/complex_isi.rs +++ b/data_model/samples/executor_custom_data_model/src/complex_isi.rs @@ -125,9 +125,9 @@ mod expression { use core::marker::PhantomData; use iroha_data_model::{ - asset::AssetDefinitionId, + asset::{AssetDefinitionId, AssetId}, isi::InstructionBox, - prelude::{FindAssetQuantityById, Numeric}, + prelude::Numeric, }; use iroha_schema::{IntoSchema, TypeId}; use serde::{Deserialize, Serialize}; @@ -152,16 +152,10 @@ mod expression { /// Represents all possible queries returning a numerical result. #[derive(Debug, Clone, Deserialize, Serialize, IntoSchema)] pub enum NumericQuery { - FindAssetQuantityById(FindAssetQuantityById), + FindAssetQuantityById(AssetId), FindTotalAssetQuantityByAssetDefinitionId(AssetDefinitionId), } - impl From for NumericQuery { - fn from(value: FindAssetQuantityById) -> Self { - Self::FindAssetQuantityById(value) - } - } - /// Represents all possible expressions. #[derive(Debug, Deserialize, Serialize, IntoSchema)] pub enum Expression { diff --git a/docs/source/references/schema.json b/docs/source/references/schema.json index be7a0cc19d8..a0e9b8e56ad 100644 --- a/docs/source/references/schema.json +++ b/docs/source/references/schema.json @@ -264,6 +264,37 @@ } ] }, + "ActionPredicateAtom": { + "Enum": [] + }, + "ActionProjection": { + "Enum": [ + { + "tag": "Atom", + "discriminant": 0, + "type": "ActionPredicateAtom" + }, + { + "tag": "Metadata", + "discriminant": 1, + "type": "MetadataProjection" + } + ] + }, + "ActionProjection": { + "Enum": [ + { + "tag": "Atom", + "discriminant": 0, + "type": "()" + }, + { + "tag": "Metadata", + "discriminant": 1, + "type": "MetadataProjection" + } + ] + }, "Algorithm": { "Enum": [ { @@ -787,7 +818,16 @@ ] }, "AssetValuePredicateAtom": { - "Enum": [] + "Enum": [ + { + "tag": "IsNumeric", + "discriminant": 0 + }, + { + "tag": "IsStore", + "discriminant": 1 + } + ] }, "AssetValueProjection": { "Enum": [ @@ -795,6 +835,16 @@ "tag": "Atom", "discriminant": 0, "type": "AssetValuePredicateAtom" + }, + { + "tag": "Numeric", + "discriminant": 1, + "type": "NumericProjection" + }, + { + "tag": "Store", + "discriminant": 2, + "type": "MetadataProjection" } ] }, @@ -804,6 +854,16 @@ "tag": "Atom", "discriminant": 0, "type": "()" + }, + { + "tag": "Numeric", + "discriminant": 1, + "type": "NumericProjection" + }, + { + "tag": "Store", + "discriminant": 2, + "type": "MetadataProjection" } ] }, @@ -2143,18 +2203,6 @@ } ] }, - "FindAccountMetadata": { - "Struct": [ - { - "name": "id", - "type": "AccountId" - }, - { - "name": "key", - "type": "Name" - } - ] - }, "FindAccounts": null, "FindAccountsWithAsset": { "Struct": [ @@ -2165,54 +2213,10 @@ ] }, "FindActiveTriggerIds": null, - "FindAssetDefinitionMetadata": { - "Struct": [ - { - "name": "id", - "type": "AssetDefinitionId" - }, - { - "name": "key", - "type": "Name" - } - ] - }, - "FindAssetMetadata": { - "Struct": [ - { - "name": "id", - "type": "AssetId" - }, - { - "name": "key", - "type": "Name" - } - ] - }, - "FindAssetQuantityById": { - "Struct": [ - { - "name": "id", - "type": "AssetId" - } - ] - }, "FindAssets": null, "FindAssetsDefinitions": null, "FindBlockHeaders": null, "FindBlocks": null, - "FindDomainMetadata": { - "Struct": [ - { - "name": "id", - "type": "DomainId" - }, - { - "name": "key", - "type": "Name" - } - ] - }, "FindDomains": null, "FindError": { "Enum": [ @@ -2300,18 +2304,6 @@ ] }, "FindTransactions": null, - "FindTriggerMetadata": { - "Struct": [ - { - "name": "id", - "type": "TriggerId" - }, - { - "name": "key", - "type": "Name" - } - ] - }, "FindTriggers": null, "ForwardCursor": { "Struct": [ @@ -2697,6 +2689,33 @@ "Ipv4Addr": "Array", "Ipv6Addr": "Array", "Json": "String", + "JsonPredicateAtom": { + "Enum": [ + { + "tag": "Equals", + "discriminant": 0, + "type": "Json" + } + ] + }, + "JsonProjection": { + "Enum": [ + { + "tag": "Atom", + "discriminant": 0, + "type": "JsonPredicateAtom" + } + ] + }, + "JsonProjection": { + "Enum": [ + { + "tag": "Atom", + "discriminant": 0, + "type": "()" + } + ] + }, "Level": { "Enum": [ { @@ -2850,6 +2869,30 @@ } ] }, + "MetadataKeyProjection": { + "Struct": [ + { + "name": "key", + "type": "Name" + }, + { + "name": "projection", + "type": "JsonProjection" + } + ] + }, + "MetadataKeyProjection": { + "Struct": [ + { + "name": "key", + "type": "Name" + }, + { + "name": "projection", + "type": "JsonProjection" + } + ] + }, "MetadataPredicateAtom": { "Enum": [] }, @@ -2859,6 +2902,11 @@ "tag": "Atom", "discriminant": 0, "type": "MetadataPredicateAtom" + }, + { + "tag": "Key", + "discriminant": 1, + "type": "MetadataKeyProjection" } ] }, @@ -2868,6 +2916,11 @@ "tag": "Atom", "discriminant": 0, "type": "()" + }, + { + "tag": "Key", + "discriminant": 1, + "type": "MetadataKeyProjection" } ] }, @@ -3146,6 +3199,27 @@ } ] }, + "NumericPredicateAtom": { + "Enum": [] + }, + "NumericProjection": { + "Enum": [ + { + "tag": "Atom", + "discriminant": 0, + "type": "NumericPredicateAtom" + } + ] + }, + "NumericProjection": { + "Enum": [ + { + "tag": "Atom", + "discriminant": 0, + "type": "()" + } + ] + }, "NumericSpec": { "Struct": [ { @@ -3652,123 +3726,138 @@ "type": "Vec" }, { - "tag": "Name", + "tag": "Json", "discriminant": 3, + "type": "Vec" + }, + { + "tag": "Numeric", + "discriminant": 4, + "type": "Vec" + }, + { + "tag": "Name", + "discriminant": 5, "type": "Vec" }, { "tag": "DomainId", - "discriminant": 4, + "discriminant": 6, "type": "Vec" }, { "tag": "Domain", - "discriminant": 5, + "discriminant": 7, "type": "Vec" }, { "tag": "AccountId", - "discriminant": 6, + "discriminant": 8, "type": "Vec" }, { "tag": "Account", - "discriminant": 7, + "discriminant": 9, "type": "Vec" }, { "tag": "AssetId", - "discriminant": 8, + "discriminant": 10, "type": "Vec" }, { "tag": "Asset", - "discriminant": 9, + "discriminant": 11, "type": "Vec" }, { "tag": "AssetValue", - "discriminant": 10, + "discriminant": 12, "type": "Vec" }, { "tag": "AssetDefinitionId", - "discriminant": 11, + "discriminant": 13, "type": "Vec" }, { "tag": "AssetDefinition", - "discriminant": 12, + "discriminant": 14, "type": "Vec" }, { "tag": "Role", - "discriminant": 13, + "discriminant": 15, "type": "Vec" }, { "tag": "Parameter", - "discriminant": 14, + "discriminant": 16, "type": "Vec" }, { "tag": "Permission", - "discriminant": 15, + "discriminant": 17, "type": "Vec" }, { "tag": "CommittedTransaction", - "discriminant": 16, + "discriminant": 18, "type": "Vec" }, { "tag": "SignedTransaction", - "discriminant": 17, + "discriminant": 19, "type": "Vec" }, { "tag": "TransactionHash", - "discriminant": 18, + "discriminant": 20, "type": "Vec>" }, { "tag": "TransactionRejectionReason", - "discriminant": 19, + "discriminant": 21, "type": "Vec>" }, { "tag": "Peer", - "discriminant": 20, + "discriminant": 22, "type": "Vec" }, { "tag": "RoleId", - "discriminant": 21, + "discriminant": 23, "type": "Vec" }, { "tag": "TriggerId", - "discriminant": 22, + "discriminant": 24, "type": "Vec" }, { "tag": "Trigger", - "discriminant": 23, + "discriminant": 25, "type": "Vec" }, + { + "tag": "Action", + "discriminant": 26, + "type": "Vec" + }, { "tag": "Block", - "discriminant": 24, + "discriminant": 27, "type": "Vec" }, { "tag": "BlockHeader", - "discriminant": 25, + "discriminant": 28, "type": "Vec" }, { "tag": "BlockHeaderHash", - "discriminant": 26, + "discriminant": 29, "type": "Vec>" } ] @@ -4837,84 +4926,29 @@ }, "SingularQueryBox": { "Enum": [ - { - "tag": "FindAssetQuantityById", - "discriminant": 0, - "type": "FindAssetQuantityById" - }, { "tag": "FindExecutorDataModel", - "discriminant": 1, + "discriminant": 0, "type": "FindExecutorDataModel" }, { "tag": "FindParameters", - "discriminant": 2, + "discriminant": 1, "type": "FindParameters" - }, - { - "tag": "FindDomainMetadata", - "discriminant": 3, - "type": "FindDomainMetadata" - }, - { - "tag": "FindAccountMetadata", - "discriminant": 4, - "type": "FindAccountMetadata" - }, - { - "tag": "FindAssetMetadata", - "discriminant": 5, - "type": "FindAssetMetadata" - }, - { - "tag": "FindAssetDefinitionMetadata", - "discriminant": 6, - "type": "FindAssetDefinitionMetadata" - }, - { - "tag": "FindTriggerMetadata", - "discriminant": 7, - "type": "FindTriggerMetadata" } ] }, "SingularQueryOutputBox": { "Enum": [ - { - "tag": "Numeric", - "discriminant": 0, - "type": "Numeric" - }, { "tag": "ExecutorDataModel", - "discriminant": 1, + "discriminant": 0, "type": "ExecutorDataModel" }, - { - "tag": "Json", - "discriminant": 2, - "type": "Json" - }, - { - "tag": "Trigger", - "discriminant": 3, - "type": "Trigger" - }, { "tag": "Parameters", - "discriminant": 4, + "discriminant": 1, "type": "Parameters" - }, - { - "tag": "Transaction", - "discriminant": 5, - "type": "CommittedTransaction" - }, - { - "tag": "BlockHeader", - "discriminant": 6, - "type": "BlockHeader" } ] }, @@ -5615,6 +5649,11 @@ "tag": "Id", "discriminant": 1, "type": "TriggerIdProjection" + }, + { + "tag": "Action", + "discriminant": 2, + "type": "ActionProjection" } ] }, @@ -5629,6 +5668,11 @@ "tag": "Id", "discriminant": 1, "type": "TriggerIdProjection" + }, + { + "tag": "Action", + "discriminant": 2, + "type": "ActionProjection" } ] }, @@ -5785,6 +5829,9 @@ "Vec>": { "Vec": "AccountProjection" }, + "Vec": { + "Vec": "Action" + }, "Vec": { "Vec": "Asset" }, @@ -5884,12 +5931,18 @@ "Vec": { "Vec": "InstructionBox" }, + "Vec": { + "Vec": "Json" + }, "Vec": { "Vec": "Metadata" }, "Vec": { "Vec": "Name" }, + "Vec": { + "Vec": "Numeric" + }, "Vec>": { "Vec": "Option" }, diff --git a/wasm/samples/executor_custom_instructions_complex/src/lib.rs b/wasm/samples/executor_custom_instructions_complex/src/lib.rs index 51de5cfeaea..6fd17d53ec6 100644 --- a/wasm/samples/executor_custom_instructions_complex/src/lib.rs +++ b/wasm/samples/executor_custom_instructions_complex/src/lib.rs @@ -73,7 +73,16 @@ struct Context<'i> { impl executor_custom_data_model::complex_isi::Context for Context<'_> { fn query(&self, q: &NumericQuery) -> Result { let result = match q.clone() { - NumericQuery::FindAssetQuantityById(q) => self.host.query_single(q), + NumericQuery::FindAssetQuantityById(asset_id) => self + .host + .query(FindAssets) + .filter_with(|asset| asset.id.eq(asset_id)) + .select_with(|asset| asset.value.numeric) + .execute_single() + .map_err(|e| match e { + SingleQueryError::QueryError(e) => e, + _ => unreachable!(), + }), NumericQuery::FindTotalAssetQuantityByAssetDefinitionId(asset_definition_id) => { let asset_definition = self .host diff --git a/wasm/samples/mint_rose_trigger/src/lib.rs b/wasm/samples/mint_rose_trigger/src/lib.rs index ef1fe988d29..804d0986cba 100644 --- a/wasm/samples/mint_rose_trigger/src/lib.rs +++ b/wasm/samples/mint_rose_trigger/src/lib.rs @@ -24,7 +24,10 @@ fn main(host: Iroha, context: Context) { .try_into_any::() .map_or_else( |_| { - host.query_single(FindTriggerMetadata::new(context.id, "VAL".parse().unwrap())) + host.query(FindTriggers) + .filter_with(|trigger| trigger.id.eq(context.id)) + .select_with(|trigger| trigger.action.metadata.key("VAL".parse().unwrap())) + .execute_single() .dbg_unwrap() .try_into_any::() }, From f32a775c19bd855d0ad0f1f979615557071b7320 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marin=20Ver=C5=A1i=C4=87?= Date: Fri, 6 Dec 2024 12:52:46 +0100 Subject: [PATCH 2/2] chore: update CHANGELOG.md (#5264) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Marin Veršić --- CHANGELOG.md | 84 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d721fea5c6c..4174b19d1e4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,90 @@ ## [Unreleased] +## [2.0.0-rc.1.0] - 2024-12-06 + +### Added + +- implement query projections (#5242) +- use persistent executor (#5082) +- add listen timeouts to iroha cli (#5241) +- add /peers API endpoint to torii (#5235) +- address agnostic p2p (#5176) +- improve multisig utility and usability (#5027) +- protect `BasicAuth::password` from being printed (#5195) +- sort descending in `FindTransactions` query (#5190) +- introduce block header into every smart contract execution context (#5151) +- dynamic commit time based on view change index (#4957) +- define default permission set (#5075) +- add implementation of Niche for `Option>` (#5094) +- transaction and block predicates (#5025) +- report amount of remaining items in query (#5016) +- bounded discrete time (#4928) +- don't validate transactions inside WASM (#4995) +- add missing mathematical operations to `Numeric` (#4976) +- validate block sync messages (#4965) +- query filters (#4833) + +### Changed + +- simplify peer id parsing (#5228) +- move transaction error out of block payload (#5118) +- rename JsonString to Json (#5154) +- add client entity to smart contracts (#5073) +- leader as transaction ordering service (#4967) +- directly provide payload to WASM entrypoints (#5113) +- make kura drop old blocks from memory (#5103) +- use `ConstVec` for instructions in `Executable` (#5096) +- gossip txs at most once (#5079) +- reduce memory usage of `CommittedTransaction` (#5089) +- make query cursor errors more specific (#5086) +- make `PublicKey` decoding lazy inside WASM (#5048) +- reorganize crates (#4970) +- introduce `FindTriggers` query, remove `FindTriggerById` (#5040) +- dont depend on signatures for update (#5039) +- change parameters format in genesis.json (#5020) +- only send current and previous view change proof (#4929) +- disable sending message when not ready to prevent busy loop (#5032) +- move total asset quantity to asset definition (#5029) +- sign only block's header, not the whole payload (#5000) +- use `HashOf` as the type of the block hash (#4998) +- simplify `/health` and `/api_version` (#4960) +- unnest wasm samples from `client`, exclude it from workspace (#4863) +- rename `configs` to `defaults`, remove `swarm` (#4862) + +### Fixed + +- flatten inner role in json (#5198) +- fix `cargo audit` warnings (#5183) +- add range check to signature index (#5157) +- fix model macro example in docs (#5149) +- close ws properly in blocks/events stream (#5101) +- broken trusted peers check (#5121) +- check that next block has height +1 (#5111) +- fix timestamp of genesis block (#5098) +- fix `iroha_genesis` compilation without `transparent_api` feature (#5056) +- serialize WASM code for snapshots (#5009) +- correctly handle `replace_top_block` (#4870) +- fix cloning of executor (#4955) +- display more error details (#4973) +- use `GET` for blocks stream (#4990) +- improve queue transactions handling (#4947) +- prevent redundant blocksync block messages (#4909) +- prevent deadlock on simultaneous sending large message (#4948) +- remove expired transaction from cache (#4922) +- fix torii url with path (#4903) + +### Removed + +- remove module-based api from client (#5184) +- remove `riffle_iter` (#5181) +- remove unused dependencies (#5173) +- remove `max` prefix from `blocks_in_memory` (#5145) +- remove consensus estimation (#5116) +- remove `event_recommendations` from block (#4932) + +### Security + ## [2.0.0-pre-rc.22.1] - 2024-07-30 ### Fixed