diff --git a/tests/runner-tests/file-data-sources/abis/Contract.abi b/tests/runner-tests/file-data-sources/abis/Contract.abi index 9d9f56b9263..6f27d071ad8 100644 --- a/tests/runner-tests/file-data-sources/abis/Contract.abi +++ b/tests/runner-tests/file-data-sources/abis/Contract.abi @@ -7,6 +7,12 @@ "internalType": "string", "name": "testCommand", "type": "string" + }, + { + "indexed": false, + "internalType": "string", + "name": "data", + "type": "string" } ], "name": "TestEvent", diff --git a/tests/runner-tests/file-data-sources/schema.graphql b/tests/runner-tests/file-data-sources/schema.graphql index c715a79c3ba..4b0112f29c0 100644 --- a/tests/runner-tests/file-data-sources/schema.graphql +++ b/tests/runner-tests/file-data-sources/schema.graphql @@ -1,15 +1,10 @@ -type IpfsFile @entity { +type FileEntity @entity { id: ID! content: String! + foo: Foo @relation } -type IpfsFile1 @entity { +type Foo @entity { id: ID! - content: String! + ipfs: FileEntity @derivedFrom(field: "foo") } - -type SpawnTestEntity @entity { - id: ID! - content: String! - context: String! -} \ No newline at end of file diff --git a/tests/runner-tests/file-data-sources/src/mapping.ts b/tests/runner-tests/file-data-sources/src/mapping.ts index e24ccd9074e..0adf0d7ecd4 100644 --- a/tests/runner-tests/file-data-sources/src/mapping.ts +++ b/tests/runner-tests/file-data-sources/src/mapping.ts @@ -4,130 +4,148 @@ import { BigInt, Bytes, DataSourceContext, + store, + log, } from "@graphprotocol/graph-ts"; import { TestEvent } from "../generated/Contract/Contract"; -import { IpfsFile, IpfsFile1, SpawnTestEntity } from "../generated/schema"; - -// CID of `file-data-sources/abis/Contract.abi` after being processed by graph-cli. -const KNOWN_CID = "QmQ2REmceVtzawp7yrnxLQXgNNCtFHEnig6fL9aqE1kcWq"; - -export function handleBlock(block: ethereum.Block): void { - let entity = new IpfsFile("onchain"); - entity.content = "onchain"; - entity.save(); - - // This will create the same data source twice, once at block 0 and another at block 2. - // The creation at block 2 should be detected as a duplicate and therefore a noop. - if (block.number == BigInt.fromI32(0) || block.number == BigInt.fromI32(2)) { - dataSource.create("File", [KNOWN_CID]); +import { FileEntity, Foo } from "../generated/schema"; + +const ONCHAIN_FROM_OFFCHAIN = "CREATE_ONCHAIN_DATASOURCE_FROM_OFFCHAIN_HANDLER"; +const CREATE_FILE = "CREATE_FILE"; +// const CREATE_FILE_FROM_HANDLE_FILE = "CREATE_FILE_FROM_HANDLE_FILE"; +const CREATE_UNDEFINED_ENTITY = "CREATE_UNDEFINED_ENTITY"; +const CREATE_CONFLICTING_ENTITY = "CREATE_CONFLICTING_ENTITY"; +const SPAWN_FDS_FROM_OFFCHAIN_HANDLER = "SPAWN_FDS_FROM_OFFCHAIN_HANDLER"; +const ACCESS_AND_UPDATE_OFFCHAIN_ENTITY_IN_ONCHAIN_HANDLER = + "ACCESS_AND_UPDATE_OFFCHAIN_ENTITY_IN_ONCHAIN_HANDLER"; +const ACCESS_FILE_ENTITY_THROUGH_DERIVED_FIELD = + "ACCESS_FILE_ENTITY_THROUGH_DERIVED_FIELD"; + +const CREATE_FOO = "CREATE_FOO"; +export function handleTestEvent(event: TestEvent): void { + if (event.params.testCommand == CREATE_FILE) { + dataSource.createWithContext( + "File", + [event.params.data], + new DataSourceContext(), + ); } - if (block.number == BigInt.fromI32(1)) { - let entity = IpfsFile.load("onchain")!; - assert(entity.content == "onchain"); - - // The test assumes file data sources are processed in the block in which they are created. - // So the ds created at block 0 will have been processed. - // - // Test that onchain data sources cannot read offchain data. - assert(IpfsFile.load(KNOWN_CID) == null); + if (event.params.testCommand == SPAWN_FDS_FROM_OFFCHAIN_HANDLER) { + let comma_separated_hash = event.params.data; + let hash1 = comma_separated_hash.split(",")[0]; + let hash2 = comma_separated_hash.split(",")[1]; + let context = new DataSourceContext(); + context.setString("command", SPAWN_FDS_FROM_OFFCHAIN_HANDLER); + context.setString("hash", hash2); - // Test that using an invalid CID will be ignored - dataSource.create("File", ["hi, I'm not valid"]); + log.info( + "Creating file data source from handleFile, command : {} ,hash1: {}, hash2: {}", + [SPAWN_FDS_FROM_OFFCHAIN_HANDLER, hash1, hash2], + ); + dataSource.createWithContext("File", [hash1], context); } - // This will invoke File1 data source with same CID, which will be used - // to test whether same cid is triggered across different data source. - if (block.number == BigInt.fromI32(3)) { - // Test that onchain data sources cannot read offchain data (again, but this time more likely to hit the DB than the write queue). - assert(IpfsFile.load(KNOWN_CID) == null); - - dataSource.create("File1", [KNOWN_CID]); + if (event.params.testCommand == ONCHAIN_FROM_OFFCHAIN) { + let context = new DataSourceContext(); + context.setString("command", ONCHAIN_FROM_OFFCHAIN); + context.setString("address", "0x0000000000000000000000000000000000000000"); + dataSource.createWithContext("File", [event.params.data], context); } -} -export function handleTestEvent(event: TestEvent): void { - let command = event.params.testCommand; - - if (command == "createFile2") { - // Will fail the subgraph when processed due to mismatch in the entity type and 'entities'. - dataSource.create("File2", [KNOWN_CID]); - } else if (command == "saveConflictingEntity") { - // Will fail the subgraph because the same entity has been created in a file data source. - let entity = new IpfsFile(KNOWN_CID); - entity.content = "empty"; - entity.save(); - } else if (command == "createFile1") { - // Will fail the subgraph with a conflict between two entities created by offchain data sources. - let context = new DataSourceContext(); - context.setBytes("hash", event.block.hash); - dataSource.createWithContext("File1", [KNOWN_CID], context); - } else if (command == "spawnOffChainHandlerTest") { - // Used to test the spawning of a file data source from another file data source handler. - // `SpawnTestHandler` will spawn a file data source that will be handled by `spawnOffChainHandlerTest`, - // which creates another file data source `OffChainDataSource`, which will be handled by `handleSpawnedTest`. + if (event.params.testCommand == CREATE_UNDEFINED_ENTITY) { + log.info("Creating undefined entity", []); let context = new DataSourceContext(); - context.setString("command", command); - dataSource.createWithContext("SpawnTestHandler", [KNOWN_CID], context); - } else if (command == "spawnOnChainHandlerTest") { - // Used to test the failure of spawning of on-chain data source from a file data source handler. - // `SpawnTestHandler` will spawn a file data source that will be handled by `spawnTestHandler`, - // which creates an `OnChainDataSource`, which should fail since spawning onchain datasources - // from offchain handlers is not allowed. - let context = new DataSourceContext(); - context.setString("command", command); - dataSource.createWithContext("SpawnTestHandler", [KNOWN_CID], context); - } else { - assert(false, "Unknown command: " + command); + context.setString("command", CREATE_UNDEFINED_ENTITY); + dataSource.createWithContext("File", [event.params.data], context); } -} -export function handleFile(data: Bytes): void { - // Test that offchain data sources cannot read onchain data. - assert(IpfsFile.load("onchain") == null); + if (event.params.testCommand == CREATE_CONFLICTING_ENTITY) { + log.info("Creating conflicting entity", []); + let entity = new FileEntity(event.params.data); + entity.content = "content"; + entity.save(); + } if ( - dataSource.stringParam() != "QmVkvoPGi9jvvuxsHDVJDgzPEzagBaWSZRYoRDzU244HjZ" + event.params.testCommand == + ACCESS_AND_UPDATE_OFFCHAIN_ENTITY_IN_ONCHAIN_HANDLER ) { - // Test that an offchain data source cannot read from another offchain data source. - assert( - IpfsFile.load("QmVkvoPGi9jvvuxsHDVJDgzPEzagBaWSZRYoRDzU244HjZ") == null - ); + let hash = event.params.data; + log.info("Creating file data source from handleFile: {}", [hash]); + let entity = FileEntity.load(event.params.data); + if (entity == null) { + log.info("Entity not found", []); + } else { + // This should never be logged if the entity was created in the offchain handler + // Such entities are not accessible in onchain handlers and will return null on load + log.info("Updating entity content", []); + entity.content = "updated content"; + entity.save(); + } } - let entity = new IpfsFile(dataSource.stringParam()); - entity.content = data.toString(); - entity.save(); - - // Test that an offchain data source can load its own entities - let loaded_entity = IpfsFile.load(dataSource.stringParam())!; - assert(loaded_entity.content == entity.content); -} + if (event.params.testCommand == CREATE_FOO) { + let entity = new Foo(event.params.data); + entity.save(); + let context = new DataSourceContext(); + context.setString("command", CREATE_FOO); + dataSource.createWithContext("File", [event.params.data], context); + } -export function handleFile1(data: Bytes): void { - let entity = new IpfsFile1(dataSource.stringParam()); - entity.content = data.toString(); - entity.save(); -} + if (event.params.testCommand == ACCESS_FILE_ENTITY_THROUGH_DERIVED_FIELD) { + let entity = Foo.load(event.params.data); + if (entity == null) { + log.info("Entity not found", []); + } else { + log.info("Accessing file entity through derived field", []); + let fileEntity = entity.ipfs.load(); -// Used to test spawning a file data source from another file data source handler. -// This function spawns a file data source that will be handled by `handleSpawnedTest`. -export function spawnTestHandler(data: Bytes): void { - let context = new DataSourceContext(); - context.setString("file", "fromSpawnTestHandler"); - let command = dataSource.context().getString("command"); - if (command == "spawnOffChainHandlerTest") { - dataSource.createWithContext("OffChainDataSource", [KNOWN_CID], context); - } else if (command == "spawnOnChainHandlerTest") { - dataSource.createWithContext("OnChainDataSource", [KNOWN_CID], context); + assert(fileEntity.length == 0, "Expected exactly one file entity"); + } } } -// This is the handler for the data source spawned by `spawnOffChainHandlerTest`. -export function handleSpawnedTest(data: Bytes): void { - let entity = new SpawnTestEntity(dataSource.stringParam()); - let context = dataSource.context().getString("file"); - entity.content = data.toString(); - entity.context = context; - entity.save(); +export function handleFile(data: Bytes): void { + log.info("handleFile {}", [dataSource.stringParam()]); + let context = dataSource.context(); + + if (context.isSet("command")) { + let contextCommand = context.getString("command"); + + if (contextCommand == SPAWN_FDS_FROM_OFFCHAIN_HANDLER) { + let hash = context.getString("hash"); + log.info("Creating file data source from handleFile: {}", [hash]); + dataSource.createWithContext("File", [hash], new DataSourceContext()); + } + + if (contextCommand == ONCHAIN_FROM_OFFCHAIN) { + log.info("Creating onchain data source from offchain handler", []); + let address = context.getString("address"); + dataSource.create("OnChainDataSource", [address]); + } + + if (contextCommand == CREATE_UNDEFINED_ENTITY) { + log.info("Creating undefined entity", []); + let entity = new Foo(dataSource.stringParam()); + entity.save(); + } + + if (contextCommand == CREATE_FOO) { + log.info("Creating FileEntity with relation to Foo", []); + let entity = new FileEntity(dataSource.stringParam()); + entity.foo = dataSource.stringParam(); + entity.content = data.toString(); + entity.save(); + } + } else { + log.info("Creating FileEntity from handleFile: {} , content : {}", [ + dataSource.stringParam(), + data.toString(), + ]); + + let entity = new FileEntity(dataSource.stringParam()); + entity.content = data.toString(); + entity.save(); + } } diff --git a/tests/runner-tests/file-data-sources/subgraph.yaml b/tests/runner-tests/file-data-sources/subgraph.yaml index c3b251fd1eb..5438b43c9f2 100644 --- a/tests/runner-tests/file-data-sources/subgraph.yaml +++ b/tests/runner-tests/file-data-sources/subgraph.yaml @@ -13,14 +13,13 @@ dataSources: apiVersion: 0.0.7 language: wasm/assemblyscript entities: - - Gravatar + - FileEntity + - Foo abis: - name: Contract file: ./abis/Contract.abi - blockHandlers: - - handler: handleBlock eventHandlers: - - event: TestEvent(string) + - event: TestEvent(string,string) handler: handleTestEvent file: ./src/mapping.ts templates: @@ -38,10 +37,8 @@ templates: abis: - name: Contract file: ./abis/Contract.abi - blockHandlers: - - handler: handleBlock eventHandlers: - - event: TestEvent(string) + - event: TestEvent(string,string) handler: handleTestEvent file: ./src/mapping.ts - kind: file/ipfs @@ -51,61 +48,9 @@ templates: apiVersion: 0.0.7 language: wasm/assemblyscript entities: - - IpfsFile + - FileEntity abis: - name: Contract file: ./abis/Contract.abi handler: handleFile file: ./src/mapping.ts - - kind: file/ipfs - name: File1 - mapping: - kind: ethereum/events - apiVersion: 0.0.7 - language: wasm/assemblyscript - entities: - - IpfsFile1 - abis: - - name: Contract - file: ./abis/Contract.abi - handler: handleFile1 - file: ./src/mapping.ts - - kind: file/ipfs - name: File2 - mapping: - kind: ethereum/events - apiVersion: 0.0.7 - language: wasm/assemblyscript - entities: - - IpfsFile # will trigger an error, should be IpfsFile1 - abis: - - name: Contract - file: ./abis/Contract.abi - handler: handleFile1 - file: ./src/mapping.ts - - kind: file/ipfs - name: SpawnTestHandler - mapping: - kind: ethereum/events - apiVersion: 0.0.7 - language: wasm/assemblyscript - entities: - - SpawnTestEntity - abis: - - name: Contract - file: ./abis/Contract.abi - handler: spawnTestHandler - file: ./src/mapping.ts - - kind: file/ipfs - name: OffChainDataSource - mapping: - kind: ethereum/events - apiVersion: 0.0.7 - language: wasm/assemblyscript - entities: - - SpawnTestEntity - abis: - - name: Contract - file: ./abis/Contract.abi - handler: handleSpawnedTest - file: ./src/mapping.ts \ No newline at end of file diff --git a/tests/src/fixture/ethereum.rs b/tests/src/fixture/ethereum.rs index 57a5cc85c95..b20672ce563 100644 --- a/tests/src/fixture/ethereum.rs +++ b/tests/src/fixture/ethereum.rs @@ -151,6 +151,33 @@ pub fn push_test_log(block: &mut BlockWithTriggers, payload: impl Into, + test_command: impl Into, + data: impl Into, +) { + let log = Arc::new(Log { + address: Address::zero(), + topics: vec![tiny_keccak::keccak256(b"TestEvent(string,string)").into()], + data: ethabi::encode(&[ + ethabi::Token::String(test_command.into()), + ethabi::Token::String(data.into()), + ]) + .into(), + block_hash: Some(H256::from_slice(block.ptr().hash.as_slice())), + block_number: Some(block.ptr().number.into()), + transaction_hash: Some(H256::from_low_u64_be(0)), + transaction_index: Some(0.into()), + log_index: Some(0.into()), + transaction_log_index: Some(0.into()), + log_type: None, + removed: None, + }); + block + .trigger_data + .push(EthereumTrigger::Log(LogRef::FullLog(log, None))) +} + pub fn push_test_polling_trigger(block: &mut BlockWithTriggers) { block.trigger_data.push(EthereumTrigger::Block( block.ptr(), diff --git a/tests/tests/runner_tests.rs b/tests/tests/runner_tests.rs index b3b3824a478..00673a4326b 100644 --- a/tests/tests/runner_tests.rs +++ b/tests/tests/runner_tests.rs @@ -21,7 +21,7 @@ use graph::prelude::{ hex, CheapClone, DeploymentHash, SubgraphAssignmentProvider, SubgraphName, SubgraphStore, }; use graph_tests::fixture::ethereum::{ - chain, empty_block, generate_empty_blocks_for_range, genesis, push_test_log, + chain, empty_block, generate_empty_blocks_for_range, genesis, push_test_command, push_test_log, push_test_polling_trigger, }; @@ -630,21 +630,53 @@ async fn end_block() -> anyhow::Result<()> { #[tokio::test] async fn file_data_sources() { let RunnerTestRecipe { stores, test_info } = - RunnerTestRecipe::new("file_data_sources", "file-data-sources").await; + RunnerTestRecipe::new("file-data-sourcess", "file-data-sources").await; + + let ipfs = IpfsClient::new("http://localhost:5001").unwrap(); + + async fn add_content_to_ipfs(ipfs: &IpfsClient, content: &str) -> String { + let bytes = content.to_string().into_bytes(); + let resp = ipfs.add(bytes).await.unwrap(); + resp.hash + } + + let hash_1 = add_content_to_ipfs(&ipfs, "EXAMPLE_1").await; + let hash_2 = add_content_to_ipfs(&ipfs, "EXAMPLE_2").await; + let hash_3 = add_content_to_ipfs(&ipfs, "EXAMPLE_3").await; + let hash_4 = add_content_to_ipfs(&ipfs, "EXAMPLE_4").await; + + //concatenate hash2 and hash3 + let hash_2_comma_3 = format!("{},{}", hash_2, hash_3); let blocks = { let block_0 = genesis(); - let block_1 = empty_block(block_0.ptr(), test_ptr(1)); - let block_2 = empty_block(block_1.ptr(), test_ptr(2)); - let block_3 = empty_block(block_2.ptr(), test_ptr(3)); + let mut block_1 = empty_block(block_0.ptr(), test_ptr(1)); + push_test_command(&mut block_1, "CREATE_FILE", &hash_1); + let mut block_2 = empty_block(block_1.ptr(), test_ptr(2)); + push_test_command(&mut block_2, "CREATE_FILE", &hash_1); + + let mut block_3 = empty_block(block_2.ptr(), test_ptr(3)); + push_test_command( + &mut block_3, + "SPAWN_FDS_FROM_OFFCHAIN_HANDLER", + &hash_2_comma_3, + ); + let block_4 = empty_block(block_3.ptr(), test_ptr(4)); + let mut block_5 = empty_block(block_4.ptr(), test_ptr(5)); - push_test_log(&mut block_5, "spawnOffChainHandlerTest"); - let block_6 = empty_block(block_5.ptr(), test_ptr(6)); - let mut block_7 = empty_block(block_6.ptr(), test_ptr(7)); - push_test_log(&mut block_7, "createFile2"); + push_test_command( + &mut block_5, + "CREATE_ONCHAIN_DATASOURCE_FROM_OFFCHAIN_HANDLER", + &hash_3, + ); + + let mut block_6 = empty_block(block_5.ptr(), test_ptr(6)); + + push_test_command(&mut block_6, "CREATE_UNDEFINED_ENTITY", &hash_4); + vec![ - block_0, block_1, block_2, block_3, block_4, block_5, block_6, block_7, + block_0, block_1, block_2, block_3, block_4, block_5, block_6, ] }; @@ -665,181 +697,190 @@ async fn file_data_sources() { ) .await; let ctx = fixture::setup(&test_info, &stores, &chain, None, None).await; - ctx.start_and_sync_to(test_ptr(1)).await; - - // CID of `file-data-sources/abis/Contract.abi` after being processed by graph-cli. - let id = "QmQ2REmceVtzawp7yrnxLQXgNNCtFHEnig6fL9aqE1kcWq"; - let content_bytes = ctx - .ipfs - .cat_all(id, Some(Duration::from_secs(10)), usize::MAX) - .await - .unwrap(); - let content = String::from_utf8(content_bytes.into()).unwrap(); - let query_res = ctx - .query(&format!(r#"{{ ipfsFile(id: "{id}") {{ id, content }} }}"#,)) - .await - .unwrap(); - - assert_json_eq!( - query_res, - Some(object! { ipfsFile: object!{ id: id, content: content.clone() } }) - ); - // assert whether duplicate data sources are created. - ctx.start_and_sync_to(test_ptr(2)).await; + { + ctx.start_and_sync_to(test_ptr(1)).await; - let store = ctx.store.cheap_clone(); - let writable = store - .writable(ctx.logger.clone(), ctx.deployment.id, Arc::new(Vec::new())) - .await - .unwrap(); - let datasources = writable.load_dynamic_data_sources(vec![]).await.unwrap(); - assert!(datasources.len() == 1); + let content = "EXAMPLE_1"; + let query_res = ctx + .query(&format!( + r#"{{ fileEntity(id: "{}") {{ id, content }} }}"#, + hash_1.clone() + )) + .await + .unwrap(); - ctx.start_and_sync_to(test_ptr(3)).await; + let store = ctx.store.cheap_clone(); + let writable = store + .writable(ctx.logger.clone(), ctx.deployment.id, Arc::new(Vec::new())) + .await + .unwrap(); + let datasources = writable.load_dynamic_data_sources(vec![]).await.unwrap(); + assert!(datasources.len() == 1); - let query_res = ctx - .query(&format!(r#"{{ ipfsFile1(id: "{id}") {{ id, content }} }}"#,)) - .await - .unwrap(); + assert_json_eq!( + query_res, + Some(object! { fileEntity: object!{ id: hash_1.clone(), content: content } }) + ); + } - assert_json_eq!( - query_res, - Some(object! { ipfsFile1: object!{ id: id , content: content.clone() } }) - ); + // Should not create duplicate datasource + { + ctx.start_and_sync_to(test_ptr(2)).await; - ctx.start_and_sync_to(test_ptr(4)).await; - let writable = ctx - .store - .clone() - .writable(ctx.logger.clone(), ctx.deployment.id, Arc::new(Vec::new())) - .await - .unwrap(); - let data_sources = writable.load_dynamic_data_sources(vec![]).await.unwrap(); - assert!(data_sources.len() == 2); - - let mut causality_region = CausalityRegion::ONCHAIN; - for data_source in data_sources { - assert!(data_source.done_at.is_some()); - assert!(data_source.causality_region == causality_region.next()); - causality_region = causality_region.next(); + let store = ctx.store.cheap_clone(); + let writable = store + .writable(ctx.logger.clone(), ctx.deployment.id, Arc::new(Vec::new())) + .await + .unwrap(); + let datasources = writable.load_dynamic_data_sources(vec![]).await.unwrap(); + assert!(datasources.len() == 1); } - ctx.start_and_sync_to(test_ptr(5)).await; - let writable = ctx - .store - .clone() - .writable(ctx.logger.clone(), ctx.deployment.id, Arc::new(Vec::new())) - .await - .unwrap(); - let data_sources = writable.load_dynamic_data_sources(vec![]).await.unwrap(); - assert!(data_sources.len() == 4); - - ctx.start_and_sync_to(test_ptr(6)).await; - let query_res = ctx - .query(&format!( - r#"{{ spawnTestEntity(id: "{id}") {{ id, content, context }} }}"#, - )) - .await - .unwrap(); + // Create a File data source from a same type of file data source handler + { + ctx.start_and_sync_to(test_ptr(4)).await; - assert_json_eq!( - query_res, - Some( - object! { spawnTestEntity: object!{ id: id , content: content.clone(), context: "fromSpawnTestHandler" } } - ) - ); + let content = "EXAMPLE_3"; + let query_res = ctx + .query(&format!( + r#"{{ fileEntity(id: "{}") {{ id, content }} }}"#, + hash_3.clone() + )) + .await + .unwrap(); + assert_json_eq!( + query_res, + Some(object! { fileEntity: object!{ id: hash_3.clone(), content: content } }) + ); + } - let stop_block = test_ptr(7); - let err = ctx.start_and_sync_to_error(stop_block.clone()).await; - let message = "entity type `IpfsFile1` is not on the 'entities' list for data source `File2`. \ - Hint: Add `IpfsFile1` to the 'entities' list, which currently is: `IpfsFile`." - .to_string(); - let expected_err = SubgraphError { - subgraph_id: ctx.deployment.hash.clone(), - message, - block_ptr: Some(stop_block), - handler: None, - deterministic: false, - }; - assert_eq_ignore_backtrace(&err, &expected_err); + // Should not allow creating on-chain data source from off-chain data source handler + { + let err = ctx.start_and_sync_to_error(test_ptr(5)).await; + let message = + "Attempted to create on-chain data source in offchain data source handler.".to_string(); + assert!(err.to_string().contains(&message)); + } - // Unfail the subgraph to test a conflict between an onchain and offchain entity + // Should not allow creating conflicting entity. ie: Entity created in offchain handler cannot be created in onchain handler { - ctx.rewind(test_ptr(6)); + ctx.rewind(test_ptr(4)); - // Replace block number 7 with one that contains a different event let mut blocks = blocks.clone(); - blocks.pop(); - let block_7_1_ptr = test_ptr_reorged(7, 1); - let mut block_7_1 = empty_block(test_ptr(6), block_7_1_ptr.clone()); - push_test_log(&mut block_7_1, "saveConflictingEntity"); - blocks.push(block_7_1); + blocks.retain(|block| block.block.number() <= 4); + + let mut block_5 = empty_block(test_ptr(4), test_ptr(5)); + push_test_command(&mut block_5, "CREATE_CONFLICTING_ENTITY", &hash_1); + blocks.push(block_5.clone()); chain.set_block_stream(blocks); - // Errors in the store pipeline can be observed by using the runner directly. - let runner = ctx.runner(block_7_1_ptr.clone()).await; + let message = "writing FileEntity entities at block 5 failed: conflicting key value violates exclusion constraint \"file_entity_id_block_range_excl\" Query: insert 1 rows with ids [QmYiiCtcXmSHXN3m2nyqLaTM7zi81KjVdZ9WXkcrCKrkjr@[5, ∞)]"; + + let runner = ctx.runner(block_5.ptr()).await; let err = runner .run() .await .err() .unwrap_or_else(|| panic!("subgraph ran successfully but an error was expected")); - let message = "writing IpfsFile entities at block 7 failed: \ - conflicting key value violates exclusion constraint \"ipfs_file_id_block_range_excl\" \ - Query: insert 1 rows \ - with ids [QmQ2REmceVtzawp7yrnxLQXgNNCtFHEnig6fL9aqE1kcWq@[7, ∞)]" - .to_string(); assert_eq!(err.to_string(), message); } - // Unfail the subgraph to test a conflict between an onchain and offchain entity + // Should not allow accessing entities created in offchain handlers in onchain handlers { - // Replace block number 7 with one that contains a different event + ctx.rewind(test_ptr(4)); + let mut blocks = blocks.clone(); - blocks.pop(); - let block_7_2_ptr = test_ptr_reorged(7, 2); - let mut block_7_2 = empty_block(test_ptr(6), block_7_2_ptr.clone()); - push_test_log(&mut block_7_2, "createFile1"); - blocks.push(block_7_2); + blocks.retain(|block| block.block.number() <= 4); + + let mut block_5 = empty_block(test_ptr(4), test_ptr(5)); + push_test_command( + &mut block_5, + "ACCESS_AND_UPDATE_OFFCHAIN_ENTITY_IN_ONCHAIN_HANDLER", + &hash_1, + ); + blocks.push(block_5.clone()); chain.set_block_stream(blocks); - // Errors in the store pipeline can be observed by using the runner directly. - let err = ctx - .runner(block_7_2_ptr.clone()) + ctx.start_and_sync_to(block_5.ptr()).await; + + let content = "EXAMPLE_1"; + let query_res = ctx + .query(&format!( + r#"{{ fileEntity(id: "{}") {{ id, content }} }}"#, + hash_1.clone() + )) .await - .run() + .unwrap(); + assert_json_eq!( + query_res, + Some(object! { fileEntity: object!{ id: hash_1.clone(), content: content } }) + ); + } + + // Prevent access to entities created by offchain handlers when using derived loaders in onchain handlers. + { + ctx.rewind(test_ptr(4)); + + let mut blocks = blocks.clone(); + blocks.retain(|block| block.block.number() <= 4); + + let hash_5 = add_content_to_ipfs(&ipfs, "EXAMPLE_5").await; + + let mut block_5 = empty_block(test_ptr(4), test_ptr(5)); + push_test_command(&mut block_5, "CREATE_FOO", &hash_5); + blocks.push(block_5.clone()); + + let mut block_6 = empty_block(block_5.ptr(), test_ptr(6)); + push_test_command( + &mut block_6, + "ACCESS_FILE_ENTITY_THROUGH_DERIVED_FIELD", + &hash_5, + ); + blocks.push(block_6.clone()); + + chain.set_block_stream(blocks); + + ctx.start_and_sync_to(block_5.ptr()).await; + + let query_res = ctx + .query(&format!( + r#"{{ foo(id: "{}") {{ id, ipfs {{ id, content }} }} }}"#, + hash_5.clone(), + )) .await - .err() - .unwrap_or_else(|| panic!("subgraph ran successfully but an error was expected")); + .unwrap(); + let content = "EXAMPLE_5"; + assert_json_eq!( + query_res, + Some( + object! { foo: object!{ id: hash_5.clone(), ipfs: object!{id: hash_5.clone(), content: content}} } + ) + ); - let message = "writing IpfsFile1 entities at block 7 failed: \ - conflicting key value violates exclusion constraint \"ipfs_file_1_id_block_range_excl\" \ - Query: insert 1 rows \ - with ids [QmQ2REmceVtzawp7yrnxLQXgNNCtFHEnig6fL9aqE1kcWq@[7, ∞)]" - .to_string(); - assert_eq!(err.to_string(), message); + ctx.start_and_sync_to(block_6.ptr()).await; } + // Should not allow creating entity that is not declared in the manifest for the offchain datasource { - ctx.rewind(test_ptr(6)); - // Replace block number 7 with one that contains a different event + ctx.rewind(test_ptr(4)); + let mut blocks = blocks.clone(); - blocks.pop(); - let block_7_3_ptr = test_ptr_reorged(7, 1); - let mut block_7_3 = empty_block(test_ptr(6), block_7_3_ptr.clone()); - push_test_log(&mut block_7_3, "spawnOnChainHandlerTest"); - blocks.push(block_7_3); + blocks.retain(|block| block.block.number() <= 4); + + let mut block_5 = empty_block(test_ptr(4), test_ptr(5)); + push_test_command(&mut block_5, "CREATE_UNDEFINED_ENTITY", &hash_1); + blocks.push(block_5.clone()); chain.set_block_stream(blocks); - // Errors in the store pipeline can be observed by using the runner directly. - let err = ctx.start_and_sync_to_error(block_7_3_ptr).await; - let message = - "Attempted to create on-chain data source in offchain data source handler. This is not yet supported. at block #7 (0000000100000000000000000000000000000000000000000000000000000007)" - .to_string(); + let message = "error while executing at wasm backtrace:\t 0: 0x3490 - !generated/schema/Foo#save\t 1: 0x3e1c - !src/mapping/handleFile: entity type `Foo` is not on the 'entities' list for data source `File`. Hint: Add `Foo` to the 'entities' list, which currently is: `FileEntity`. in handler `handleFile` at block #5 () at block #5 (0000000000000000000000000000000000000000000000000000000000000005)"; + + let err = ctx.start_and_sync_to_error(block_5.ptr()).await; + assert_eq!(err.to_string(), message); } }