diff --git a/Avail/avail-turing-starter/.github/scripts/publish-deploy.sh b/Avail/avail-turing-starter/.github/scripts/publish-deploy.sh new file mode 100644 index 00000000..3c9dc04b --- /dev/null +++ b/Avail/avail-turing-starter/.github/scripts/publish-deploy.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +while getopts p:o:e: flag +do + case "${flag}" in + e) ENDPOINT=${OPTARG};; + p) PROJECTNAME=${OPTARG};; + o) ORG=${OPTARG};; + *) echo "Usage: $0 [-p projectname] [-o org] [-e endpoint]" && exit 1;; + esac +done + +IPFSCID=$(npx subql publish -o -f .) + +npx subql deployment:deploy -d --ipfsCID="$IPFSCID" --projectName="${PROJECTNAME}" --org="${ORG%/*}" --endpoint="${ENDPOINT}" diff --git a/Avail/avail-turing-starter/.github/workflows/cli-deploy.yml b/Avail/avail-turing-starter/.github/workflows/cli-deploy.yml new file mode 100644 index 00000000..658d2c6c --- /dev/null +++ b/Avail/avail-turing-starter/.github/workflows/cli-deploy.yml @@ -0,0 +1,34 @@ +name: "CLI deploy" + +on: + workflow_dispatch: + inputs: + projectName: + description: "Project name" + required: true + type: string +jobs: + deploy: + name: CLI Deploy + runs-on: ubuntu-latest + environment: + name: DEPLOYMENT + env: + SUBQL_ACCESS_TOKEN: ${{ secrets.SUBQL_ACCESS_TOKEN }} + ENDPOINT: ${{ secrets.ENDPOINT }} + steps: + - uses: actions/checkout@v2 + - name: Setup Node.js environment + uses: actions/setup-node@v2 + with: + node-version: 16 + - run: yarn + - name: Codegen + run: yarn codegen + - name: Version + run: npx subql --version + - name: repo + run: echo ${{github.repository}} + - name: Publish and Deploy + run: | + sh .github/workflows/scripts/publish-deploy.sh -o ${{github.repository}} -p ${{github.event.inputs.projectName}} -e ${{secrets.ENDPOINT}} diff --git a/Avail/avail-turing-starter/.github/workflows/pr.yml b/Avail/avail-turing-starter/.github/workflows/pr.yml new file mode 100644 index 00000000..b428f2d8 --- /dev/null +++ b/Avail/avail-turing-starter/.github/workflows/pr.yml @@ -0,0 +1,24 @@ +name: PR +on: + pull_request: + paths-ignore: + - ".github/workflows/**" +jobs: + pr: + name: pr + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Setup Node.js environment + uses: actions/setup-node@v2 + with: + node-version: 16 + - run: yarn + - name: Codegen + run: yarn codegen + - name: Build + run: yarn build + - name: Install subql-node + run: yarn global add @subql/node + - name: Run tests with Subquery Node + run: subql-node test -f ${{ github.workspace }} diff --git a/Avail/avail-turing-starter/.gitignore b/Avail/avail-turing-starter/.gitignore new file mode 100644 index 00000000..9615e3cf --- /dev/null +++ b/Avail/avail-turing-starter/.gitignore @@ -0,0 +1,58 @@ +# These are some examples of commonly ignored file patterns. +# You should customize this list as applicable to your project. +# Learn more about .gitignore: +# https://www.atlassian.com/git/tutorials/saving-changes/gitignore + +# Node artifact files +node_modules/ +dist/ + +# lock files +yarn.lock +package-lock.json + +# Compiled Java class files +*.class + +# Compiled Python bytecode +*.py[cod] + +# Log files +*.log + +# Package files +*.jar + +# Generated files +target/ +dist/ +src/types +project.yaml + +# JetBrains IDE +.idea/ + +# Unit test reports +TEST*.xml + +# Generated by MacOS +.DS_Store + +# Generated by Windows +Thumbs.db + +# Applications +*.app +*.exe +*.war + +# Large media files +*.mp4 +*.tiff +*.avi +*.flv +*.mov +*.wmv + +.data +.eslintcache diff --git a/Avail/avail-turing-starter/LICENSE b/Avail/avail-turing-starter/LICENSE new file mode 100644 index 00000000..f168fbe1 --- /dev/null +++ b/Avail/avail-turing-starter/LICENSE @@ -0,0 +1,21 @@ +MIT LICENSE + +Copyright 2020-2024 SubQuery Pte Ltd authors & contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Avail/avail-turing-starter/README.md b/Avail/avail-turing-starter/README.md new file mode 100644 index 00000000..189521e1 --- /dev/null +++ b/Avail/avail-turing-starter/README.md @@ -0,0 +1,151 @@ +# SubQuery - Example Project for Avail Turing + +[SubQuery](https://subquery.network) is a fast, flexible, and reliable open-source data indexer that provides you with custom APIs for your web3 project across all of our supported networks. To learn about how to get started with SubQuery, [visit our docs](https://academy.subquery.network). + +**This SubQuery project indexes all asset transfers using the balances pallet on the Avail Turing Network** + +## Start + +First, install SubQuery CLI globally on your terminal by using NPM `npm install -g @subql/cli` + +You can either clone this GitHub repo, or use the `subql` CLI to bootstrap a clean project in the network of your choosing by running `subql init` and following the prompts. + +Don't forget to install dependencies with `npm install` or `yarn install`! + +## Editing your SubQuery project + +Although this is a working example SubQuery project, you can edit the SubQuery project by changing the following files: + +- The project manifest in `project.yaml` defines the key project configuration and mapping handler filters +- The GraphQL Schema (`schema.graphql`) defines the shape of the resulting data that you are using SubQuery to index +- The Mapping functions in `src/mappings/` directory are typescript functions that handle transformation logic + +SubQuery supports various layer-1 blockchain networks and provides [dedicated quick start guides](https://academy.subquery.network/quickstart/quickstart.html) as well as [detailed technical documentation](https://academy.subquery.network/build/introduction.html) for each of them. + +## Run your project + +_If you get stuck, find out how to get help below._ + +The simplest way to run your project is by running `yarn dev` or `npm run-script dev`. This does all of the following: + +1. `yarn codegen` - Generates types from the GraphQL schema definition and contract ABIs and saves them in the `/src/types` directory. This must be done after each change to the `schema.graphql` file or the contract ABIs +2. `yarn build` - Builds and packages the SubQuery project into the `/dist` directory +3. `docker-compose pull && docker-compose up` - Runs a Docker container with an indexer, PostgeSQL DB, and a query service. This requires [Docker to be installed](https://docs.docker.com/engine/install) and running locally. The configuration for this container is set from your `docker-compose.yml` + +You can observe the three services start, and once all are running (it may take a few minutes on your first start), please open your browser and head to [http://localhost:3000](http://localhost:3000) - you should see a GraphQL playground showing with the schemas ready to query. [Read the docs for more information](https://academy.subquery.network/run_publish/run.html) or [explore the possible service configuration for running SubQuery](https://academy.subquery.network/run_publish/references.html). + +## Query your project + +For this project, you can try to query with the following GraphQL code to get a taste of how it works. + +```graphql +{ + query { + transfers(first: 5, orderBy: BLOCK_NUMBER_DESC) { + totalCount + nodes { + id + date + blockNumber + toId + fromId + amount + } + } + accounts(first: 5, orderBy: SENT_TRANSFERS_COUNT_DESC) { + nodes { + id + sentTransfers(first: 5, orderBy: BLOCK_NUMBER_DESC) { + totalCount + nodes { + id + toId + amount + } + } + lastTransferBlock + } + } + } +} +``` + +The result will resemble the following: + +```json +{ + "data": { + "query": { + "transfers": { + "totalCount": 8, + "nodes": [ + { + "id": "5879-2", + "date": "2024-03-29T15:18:00.001", + "blockNumber": 5879, + "toId": "5evhtn73wblaexppdyldxumh6zdga6x77e2phmuhfvmnmdmq", + "fromId": "5ekefhvjv1fzl4vcmitybtcwxg9dtzhogrfbbuy5e4rj8kkd", + "amount": "10000000000000000000" + } + ] + }, + "accounts": { + "nodes": [ + { + "id": "5h9wh9upu2kgzrcmlmedkhhmxh1plgbefmugplggzfvjkkkw", + "sentTransfers": { + "totalCount": 4, + "nodes": [ + { + "id": "5701-2", + "toId": "5ekefhvjv1fzl4vcmitybtcwxg9dtzhogrfbbuy5e4rj8kkd", + "amount": "200000000000000000000" + }, + { + "id": "5691-4", + "toId": "5covxteiqwzzsuurpwuuh4iwhkgty5inci99wzpzysbzghwk", + "amount": "100100000000000000000000" + }, + { + "id": "4941-4", + "toId": "5cdgxh8q9dzd3tnattg6qm6f4yr1kbecbgumh2xbebq8jfa5", + "amount": "1000000000000000000000000" + }, + { + "id": "4929-4", + "toId": "5ekefhvjv1fzl4vcmitybtcwxg9dtzhogrfbbuy5e4rj8kkd", + "amount": "100100000000000000000000" + } + ] + }, + "lastTransferBlock": 5703 + } + ] + } + } + } +} +``` + +You can explore the different possible queries and entities to help you with GraphQL using the documentation draw on the right. + +## Publish your project + +SubQuery is open-source, meaning you have the freedom to run it in the following three ways: + +- Locally on your own computer (or a cloud provider of your choosing), [view the instructions on how to run SubQuery Locally](https://academy.subquery.network/run_publish/run.html) +- By publishing it to our enterprise-level [Managed Service](https://managedservice.subquery.network), where we'll host your SubQuery project in production ready services for mission critical data with zero-downtime blue/green deployments. We even have a generous free tier. [Find out how](https://academy.subquery.network/run_publish/publish.html) +- [Coming Soon] By publishing it to the decentralised [SubQuery Network](https://subquery.network/network), the most open, performant, reliable, and scalable data service for dApp developers. The SubQuery Network indexes and services data to the global community in an incentivised and verifiable way + +## What Next? + +Take a look at some of our advanced features to take your project to the next level! + +- [**Multi-chain indexing support**](https://academy.subquery.network/build/multi-chain.html) - SubQuery allows you to index data from across different layer-1 networks into the same database, this allows you to query a single endpoint to get data for all supported networks. +- [**Dynamic Data Sources**](https://academy.subquery.network/build/dynamicdatasources.html) - When you want to index factory contracts, for example on a DEX or generative NFT project. +- [**Project Optimisation Advice**](https://academy.subquery.network/build/optimisation.html) - Some common tips on how to tweak your project to maximise performance. +- [**GraphQL Subscriptions**](https://academy.subquery.network/run_publish/subscription.html) - Build more reactive front end applications that subscribe to changes in your SubQuery project. + +## Need Help? + +The fastest way to get support is by [searching our documentation](https://academy.subquery.network), or by [joining our discord](https://discord.com/invite/subquery) and messaging us in the `#technical-support` channel. diff --git a/Avail/avail-turing-starter/docker-compose.yml b/Avail/avail-turing-starter/docker-compose.yml new file mode 100644 index 00000000..57a72876 --- /dev/null +++ b/Avail/avail-turing-starter/docker-compose.yml @@ -0,0 +1,66 @@ +version: "3" + +services: + postgres: + build: + context: . + dockerfile: ./docker/pg-Dockerfile + ports: + - 5432:5432 + volumes: + - .data/postgres:/var/lib/postgresql/data + environment: + POSTGRES_PASSWORD: postgres + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 5s + timeout: 5s + retries: 5 + + subquery-node: + image: subquerynetwork/subql-node-substrate:latest + depends_on: + "postgres": + condition: service_healthy + restart: unless-stopped + environment: + DB_USER: postgres + DB_PASS: postgres + DB_DATABASE: postgres + DB_HOST: postgres + DB_PORT: 5432 + volumes: + - ./:/app + command: + - ${SUB_COMMAND:-} # set SUB_COMMAND env variable to "test" to run tests + - -f=/app + - --db-schema=app + - --workers=4 + - --batch-size=30 + - --unfinalized-blocks=true + healthcheck: + test: ["CMD", "curl", "-f", "http://subquery-node:3000/ready"] + interval: 3s + timeout: 5s + retries: 10 + + graphql-engine: + image: subquerynetwork/subql-query:latest + ports: + - 3000:3000 + depends_on: + "postgres": + condition: service_healthy + "subquery-node": + condition: service_healthy + restart: always + environment: + DB_USER: postgres + DB_PASS: postgres + DB_DATABASE: postgres + DB_HOST: postgres + DB_PORT: 5432 + command: + - --name=app + - --playground + - --indexer=http://subquery-node:3000 diff --git a/Avail/avail-turing-starter/docker/load-extensions.sh b/Avail/avail-turing-starter/docker/load-extensions.sh new file mode 100644 index 00000000..6d33f863 --- /dev/null +++ b/Avail/avail-turing-starter/docker/load-extensions.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" <=3.0.1", + }, + query: { + name: "@subql/query", + version: "*", + }, + }, + schema: { + file: "./schema.graphql", + }, + network: { + /* The genesis hash of the network (hash of block 0) */ + chainId: + "0xd3d2f3a3495dc597434a99d7d449ebad6616db45e4e4f178f31cc6fa14378b70", + /** + * These endpoint(s) should be public non-pruned archive node + * We recommend providing more than one endpoint for improved reliability, performance, and uptime + * Public nodes may be rate limited, which can affect indexing speed + * When developing your project we suggest getting a private API key + * If you use a rate limited endpoint, adjust the --batch-size and --workers parameters + * These settings can be found in your docker-compose.yaml, they will slow indexing but prevent your project being rate limited + */ + endpoint: ["wss://turing-rpc.avail.so/ws"], + chaintypes: { + file: "./types.json", + }, + }, + dataSources: [ + { + kind: SubstrateDatasourceKind.Runtime, + startBlock: 139671, + mapping: { + file: "./dist/index.js", + handlers: [ + { + kind: SubstrateHandlerKind.Event, + handler: "handleEvent", + filter: { + module: "balances", + method: "Transfer", + }, + }, + ], + }, + }, + ], +}; + +// Must set default to the project instance +export default project; diff --git a/Avail/avail-turing-starter/schema.graphql b/Avail/avail-turing-starter/schema.graphql new file mode 100644 index 00000000..d436bb62 --- /dev/null +++ b/Avail/avail-turing-starter/schema.graphql @@ -0,0 +1,51 @@ +# To improve query performance, we strongly suggest adding indexes to any field that you plan to filter or sort by +# Add the `@index` or `@index(unique: true)` annotation after any non-key field +# https://academy.subquery.network/build/graphql.html#indexing-by-non-primary-key-field + +type Transfer @entity { + """ + id field is always required and must look like this + """ + id: ID! + """ + Amount that is transferred + """ + amount: BigInt! + """ + The block height of the transfer + """ + blockNumber: Int! + """ + The date of the transfer + """ + date: Date! + """ + The account that transfers are made from + """ + from: Account! + """ + The account that transfers are made to + """ + to: Account! +} + +type Account @entity { + """ + id field is always required and must look like this + """ + id: ID! + """ + The public key of this account (same across all Polkadot parachains) + """ + publicKey: String! + """ + The first block on which we see a transfer involving this account + """ + firstTransferBlock: Int + """ + The most recent block on which we see a transfer involving this account + """ + lastTransferBlock: Int + sentTransfers: [Transfer] @derivedFrom(field: "from") # These are virtual properties to help us navigate to the correct foreign key of Transfer + recievedTransfers: [Transfer] @derivedFrom(field: "to") # These are virtual properties to help us navigate to the correct foreign key of Transfer +} diff --git a/Avail/avail-turing-starter/src/index.ts b/Avail/avail-turing-starter/src/index.ts new file mode 100644 index 00000000..50916157 --- /dev/null +++ b/Avail/avail-turing-starter/src/index.ts @@ -0,0 +1,3 @@ +//Exports all handler functions +export * from "./mappings/mappingHandlers"; +import "@polkadot/api-augment"; diff --git a/Avail/avail-turing-starter/src/mappings/mappingHandlers.ts b/Avail/avail-turing-starter/src/mappings/mappingHandlers.ts new file mode 100644 index 00000000..40914131 --- /dev/null +++ b/Avail/avail-turing-starter/src/mappings/mappingHandlers.ts @@ -0,0 +1,67 @@ +import { + SubstrateExtrinsic, + SubstrateEvent, + SubstrateBlock, +} from "@subql/types"; +import { Account, Transfer } from "../types"; +import { Balance } from "@polkadot/types/interfaces"; +import { decodeAddress } from "@polkadot/util-crypto"; + +export async function handleBlock(block: SubstrateBlock): Promise { + // Do something with each block handler here +} + +export async function handleCall(extrinsic: SubstrateExtrinsic): Promise { + // Do something with a call handler here +} + +export async function handleEvent(event: SubstrateEvent): Promise { + logger.info( + `New transfer event found at block ${event.block.block.header.number.toString()}`, + ); + + // Get data from the event + // The balances.transfer event has the following payload \[from, to, value\] + // logger.info(JSON.stringify(event)); + const { + event: { + data: [from, to, amount], + }, + } = event; + + const blockNumber: number = event.block.block.header.number.toNumber(); + + const fromAccount = await checkAndGetAccount(from.toString(), blockNumber); + const toAccount = await checkAndGetAccount(to.toString(), blockNumber); + + // Create the new transfer entity + const transfer = Transfer.create({ + id: `${event.block.block.header.number.toNumber()}-${event.idx}`, + blockNumber, + date: event.block.timestamp, + fromId: fromAccount.id, + toId: toAccount.id, + amount: (amount as Balance).toBigInt(), + }); + + fromAccount.lastTransferBlock = blockNumber; + toAccount.lastTransferBlock = blockNumber; + + await Promise.all([fromAccount.save(), toAccount.save(), transfer.save()]); +} + +async function checkAndGetAccount( + id: string, + blockNumber: number, +): Promise { + let account = await Account.get(id.toLowerCase()); + if (!account) { + // We couldn't find the account + account = Account.create({ + id: id.toLowerCase(), + publicKey: decodeAddress(id).toString().toLowerCase(), + firstTransferBlock: blockNumber, + }); + } + return account; +} diff --git a/Avail/avail-turing-starter/src/test/mappingHandlers.test.ts b/Avail/avail-turing-starter/src/test/mappingHandlers.test.ts new file mode 100644 index 00000000..d3c8ed58 --- /dev/null +++ b/Avail/avail-turing-starter/src/test/mappingHandlers.test.ts @@ -0,0 +1,11 @@ +import { subqlTest } from "@subql/testing"; + +// See https://academy.subquery.network/build/testing.html + +subqlTest( + "handleTransfer test", // Test name + 191, // Block height to test at + [], // Dependent entities + [], // Expected entities + "handleEvent", // handler name +); diff --git a/Avail/avail-turing-starter/tsconfig.json b/Avail/avail-turing-starter/tsconfig.json new file mode 100644 index 00000000..3837c009 --- /dev/null +++ b/Avail/avail-turing-starter/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "emitDecoratorMetadata": true, + "experimentalDecorators": true, + "esModuleInterop": true, + "declaration": true, + "importHelpers": true, + "resolveJsonModule": true, + "module": "commonjs", + "outDir": "dist", + "rootDir": "src", + "target": "es2017", + "strict": true, + }, + "include": [ + "src/**/*", + "node_modules/@subql/types-core/dist/global.d.ts", + "node_modules/@subql/types/dist/global.d.ts", + ], +} diff --git a/Avail/avail-turing-starter/types.json b/Avail/avail-turing-starter/types.json new file mode 100644 index 00000000..c5583026 --- /dev/null +++ b/Avail/avail-turing-starter/types.json @@ -0,0 +1,205 @@ +{ + "types": { + "AppId": "Compact", + "DataLookupIndexItem": { + "appId": "AppId", + "start": "Compact" + }, + "DataLookup": { + "size": "Compact", + "index": "Vec" + }, + "KateCommitment": { + "rows": "Compact", + "cols": "Compact", + "commitment": "Vec", + "dataRoot": "H256" + }, + "V1HeaderExtension": { + "appLookup": "DataLookup", + "commitment": "KateCommitment" + }, + "V2HeaderExtension": { + "appLookup": "DataLookup", + "commitment": "KateCommitment" + }, + "V3HeaderExtension": { + "appLookup": "DataLookup", + "commitment": "KateCommitment" + }, + "HeaderExtension": { + "_enum": { + "V1": "V1HeaderExtension", + "V2": "V2HeaderExtension", + "V3": "V3HeaderExtension" + } + }, + "DaHeader": { + "parentHash": "Hash", + "number": "Compact", + "stateRoot": "Hash", + "extrinsicsRoot": "Hash", + "digest": "Digest", + "extension": "HeaderExtension" + }, + "Header": "DaHeader", + "CheckAppIdExtra": { + "appId": "AppId" + }, + "CheckAppIdTypes": {}, + "CheckAppId": { + "extra": "CheckAppIdExtra", + "types": "CheckAppIdTypes" + }, + "BlockLengthColumns": "Compact", + "BlockLengthRows": "Compact", + "BlockLength": { + "max": "PerDispatchClass", + "cols": "BlockLengthColumns", + "rows": "BlockLengthRows", + "chunkSize": "Compact" + }, + "PerDispatchClass": { + "normal": "u32", + "operational": "u32", + "mandatory": "u32" + }, + "DataProof": { + "root": "H256", + "proof": "Vec", + "numberOfLeaves": "Compact", + "leaf_index": "Compact", + "leaf": "H256" + }, + "DataProofV2": { + "dataRoot": "H256", + "blobRoot": "H256", + "bridgeRoot": "H256", + "proof": "Vec", + "numberOfLeaves": "Compact", + "leafIndex": "Compact", + "leaf": "H256" + }, + "ProofResponse": { + "dataProof": "DataProofV2", + "message": "Option" + }, + "Message": { + "messageType": "MessageType", + "from": "H256", + "to": "H256", + "originDomain": "u32", + "destinationDomain": "u32", + "data": "Vec", + "id": "u64" + }, + "MessageType": { + "_enum": [ + "ArbitraryMessage", + "FungibleToken" + ] + }, + "Cell": { + "row": "u32", + "col": "u32" + } + }, + "rpc": { + "kate": { + "blockLength": { + "description": "Get Block Length", + "params": [ + { + "name": "at", + "type": "Hash", + "isOptional": true + } + ], + "type": "BlockLength" + }, + "queryProof": { + "description": "Generate the kate proof for the given `cells`", + "params": [ + { + "name": "cells", + "type": "Vec" + }, + { + "name": "at", + "type": "Hash", + "isOptional": true + } + ], + "type": "Vec" + }, + "queryDataProof": { + "description": "Generate the data proof for the given `transaction_index`", + "params": [ + { + "name": "transaction_index", + "type": "u32" + }, + { + "name": "at", + "type": "Hash", + "isOptional": true + } + ], + "type": "DataProof" + }, + "queryDataProofV2": { + "description": "Generate the data proof for the given `transaction_index`", + "params": [ + { + "name": "transaction_index", + "type": "u32" + }, + { + "name": "at", + "type": "Hash", + "isOptional": true + } + ], + "type": "ProofResponse" + }, + "queryAppData": { + "description": "Fetches app data rows for the given app", + "params": [ + { + "name": "app_id", + "type": "AppId" + }, + { + "name": "at", + "type": "Hash", + "isOptional": true + } + ], + "type": "Vec>>" + }, + "queryRows": { + "description": "Query rows based on their indices", + "params": [ + { + "name": "rows", + "type": "Vec" + }, + { + "name": "at", + "type": "Hash", + "isOptional": true + } + ], + "type": "Vec>" + } + } + }, + "signedExtensions": { + "CheckAppId": { + "extrinsic": { + "appId": "AppId" + }, + "payload": {} + } + } +} \ No newline at end of file