diff --git a/.github/label-actions.yml b/.github/label-actions.yml index 0226a174c..ec2760e5a 100644 --- a/.github/label-actions.yml +++ b/.github/label-actions.yml @@ -13,6 +13,6 @@ solved: # Close the discussion close: true # Set a close reason - close-reason: 'resolved' + close-reason: "resolved" # Lock the discussion - lock: true \ No newline at end of file + lock: true diff --git a/.github/workflows/contentlayer.yml b/.github/workflows/contentlayer.yml index 3722b6d10..6371b2ead 100644 --- a/.github/workflows/contentlayer.yml +++ b/.github/workflows/contentlayer.yml @@ -1,8 +1,6 @@ name: Contentlayer Markdown Formatting on: - push: - branches: [main] pull_request: branches: [main] @@ -12,14 +10,20 @@ jobs: strategy: matrix: - node-version: [18.x] + node-version: [20] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 + - name: Install pnpm + uses: pnpm/action-setup@v4 + with: + version: 9 - name: use Node.js ${{ matrix.node-version }} uses: actions/setup-node@v3 with: node-version: ${{ matrix.node-version }} - - run: npm -g i yarn - - run: yarn install - - run: yarn contentlayer:build + cache: "pnpm" + - name: Install dependencies + run: pnpm install + - name: Build contentlayer + run: pnpm contentlayer:build diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 6571551b7..906dd536d 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -15,15 +15,19 @@ jobs: deploy-production: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - name: Install Vercel CLI and Yarn - run: npm install --global vercel@latest yarn + - uses: actions/checkout@v4 + - name: Install pnpm + uses: pnpm/action-setup@v4 + with: + version: 9 + - name: Install Vercel CLI and pnpm + run: npm install --global vercel@latest - name: Pull Vercel Environment Information run: vercel pull --yes --environment=production --token=${{secrets.VERCEL_TOKEN }} - - name: Install dependencies via yarn - run: yarn install + - name: Install dependencies via pnpm + run: pnpm install # todo: if not content files changed, skip uploading # todo: only upload the changed files - name: Upload latest content to crowdin diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml index 12ce0bf7f..b9c8ad1dc 100644 --- a/.github/workflows/formatting.yml +++ b/.github/workflows/formatting.yml @@ -8,21 +8,28 @@ jobs: Format: name: Test formatting via prettier/lint... runs-on: ubuntu-latest + strategy: + matrix: + node-version: [20] steps: - name: Checkout repository - uses: actions/checkout@v2 - - name: Use Node.js node - uses: actions/setup-node@v2 + uses: actions/checkout@v4 + - name: Install pnpm + uses: pnpm/action-setup@v4 with: - node-version: "18" + version: 9 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v4 + with: + node-version: ${{ matrix.node-version }} + cache: "pnpm" - name: before_install run: echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf && sudo sysctl -p - - - name: installing yarn - run: yarn install + - name: Install dependencies + run: pnpm install - name: prettier:check - run: yarn prettier + run: pnpm prettier - name: lint - run: yarn lint + run: pnpm lint diff --git a/.github/workflows/label-actions.yml b/.github/workflows/label-actions.yml index 64d0a10e3..692cbb2e1 100644 --- a/.github/workflows/label-actions.yml +++ b/.github/workflows/label-actions.yml @@ -1,63 +1,16 @@ name: "Label Actions" -# env: -# GH_PERSONAL_ACCESS_TOKEN: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} - on: - pull_request_target: + issues: + types: [labeled, unlabeled] + pull_request: types: [labeled, unlabeled] - # issues: - # types: [labeled, unlabeled] - # discussion: - # types: [labeled, unlabeled] - -permissions: - contents: read - issues: write - pull-requests: write - discussions: write jobs: - # run the local prettier config on the PR - prettier: - if: contains(github.event.pull_request.labels.*.name, 'prettier') - runs-on: ubuntu-latest - - permissions: - # Give the default GITHUB_TOKEN write permission to commit and push the - # added or changed files to the repository. - contents: write - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - - name: Use Node.js node - uses: actions/setup-node@v2 - with: - node-version: "18" - - - name: before_install - run: - echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf - && sudo sysctl -p - - # perform steps to modify as desired - - name: yarn install - run: yarn install - - name: yarn prettier:fix - run: yarn prettier:fix - - - name: Commit changes - uses: stefanzweifel/git-auto-commit-action@v5 - with: - commit_message: "chore: prettier" - - action: + label-actions: runs-on: ubuntu-latest steps: - - uses: dessant/label-actions@v4 + - uses: dessant/label-actions@v3 with: - # github-token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} - config-path: ".github/label-actions.yml" + github-token: ${{ secrets.GITHUB_TOKEN }} + config-path: .github/label-actions.yml diff --git a/.github/workflows/prettier-label-action.yml b/.github/workflows/prettier-label-action.yml new file mode 100644 index 000000000..ac7c8775b --- /dev/null +++ b/.github/workflows/prettier-label-action.yml @@ -0,0 +1,70 @@ +name: Prettier Formatting on Label + +# Explicitly set permissions +permissions: + contents: write + pull-requests: write + +on: + pull_request: + types: [labeled] + +jobs: + prettier-format: + # Only run if the 'prettier' label is added + if: ${{ contains(github.event.pull_request.labels.*.name, 'prettier') }} + runs-on: ubuntu-latest + steps: + # Checkout the PR branch + - uses: actions/checkout@v4 + with: + # Use the full PR head reference + ref: ${{ github.event.pull_request.head.ref }} + # Use the full repository name + repository: ${{ github.event.pull_request.head.repo.full_name }} + token: ${{ secrets.GITHUB_TOKEN }} + fetch-depth: 0 + + # Setup Node.js (required for Prettier) + - uses: actions/setup-node@v4 + with: + node-version: "lts/*" + + # Install Prettier + - name: Install Prettier + run: npm install --save-dev --save-exact prettier + + # Run Prettier using the repo's config + - name: Run Prettier + run: npx prettier --write . + + # Check if there are any changes + - name: Check for changes + id: check-changes + run: | + git diff --quiet || echo "changes=true" >> $GITHUB_OUTPUT + + # Commit and push changes if any + - name: Commit changes + if: steps.check-changes.outputs.changes == 'true' + run: | + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git add . + git commit -m "[chore] automatic prettier formatting" + git push origin HEAD:${{ github.event.pull_request.head.ref }} + + # Remove the label after formatting + # todo: label removal is flaky, disabled for now + # - name: Remove formatting label + # if: always() + # uses: actions/github-script@v7 + # with: + # github-token: ${{ secrets.GITHUB_TOKEN }} + # script: | + # github.rest.issues.removeLabel({ + # owner: context.repo.owner, + # repo: context.repo.repo, + # issue_number: context.issue.number, + # name: 'prettier' + # }) diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 000000000..8b7e30a3e --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,37 @@ +name: Mark Stale Issues and PRs + +on: + schedule: + # Run daily at midnight + - cron: "0 0 * * *" + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v9 + with: + # Issues configuration + stale-issue-message: + "This issue has been automatically marked as stale because it has + not had recent activity. Remove stale label or comment or this will + be closed in 5 days." + stale-issue-label: "no-issue-activity" + days-before-issue-stale: 30 # Mark as stale after 30 days of inactivity + days-before-issue-close: 5 # Close 5 days after being marked stale + + # Pull Request configuration + stale-pr-message: + "This pull request has been automatically marked as stale because it + has not had recent activity. Remove stale label or comment or this + will be closed in 7 days." + stale-pr-label: "no-pr-activity" + days-before-pr-stale: 21 # Mark PRs as stale after 21 days + days-before-pr-close: 7 # Close PRs 7 days after being marked stale + + # Exempt labels and other configurations + exempt-issue-labels: "bug,enhancement,in-progress,not-stale" + exempt-pr-labels: "work-in-progress,review-needed,not-stale" + + # Optional: Dry run to test configuration + dry-run: true diff --git a/.gitignore b/.gitignore index 21fa5828f..ac91c0ca7 100644 --- a/.gitignore +++ b/.gitignore @@ -104,4 +104,14 @@ typings/ package-lock.json # translations are stored in the `i18n` via crowdin -i18n \ No newline at end of file +i18n + + +# code-import +code/node_modules +code/package-lock.json +code/yarn.lock +code/pnpm-lock.yaml + +# vscode configuration +.vscode diff --git a/.husky/.gitignore b/.husky/.gitignore new file mode 100644 index 000000000..31354ec13 --- /dev/null +++ b/.husky/.gitignore @@ -0,0 +1 @@ +_ diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100755 index 000000000..2312dc587 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1 @@ +npx lint-staged diff --git a/.prettierignore b/.prettierignore index 3359ecdec..b134d689d 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1 +1,2 @@ -.github/*.md \ No newline at end of file +.github/*.md +CODEOWNERS \ No newline at end of file diff --git a/CODEOWNERS b/CODEOWNERS index 410750e42..157608479 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1,11 +1,12 @@ -# This is a comment. # Each line is a file pattern followed by one or more owners. # These global owners will be the default owners for everything in # the repo. Unless a later match takes precedence, will be requested for # review when someone opens a pull request. -* @nickfrosty +* @ZYJLiu # This list owns any file in the `/docs` directory in the root of # the repository and any of its subdirectories. # /docs/ @nickfrosty + +/docs/ @ZYJLiu diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3e596079c..bb8c7c0f7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -21,6 +21,9 @@ transparent as possible, whether it's: - publicly displayed via the UI of [solana.com](https://solana.com) (located in a different repo) - content translations are supported via Crowdin +- code blocks must use code-import for file snippets (via filesystem) +- code file should be [tests](https://nodejs.org/api/test.html) and should add + code ranges instead of whole test file ## Style guidelines @@ -45,9 +48,7 @@ In particular: and save the person reviewing your PR some time. We recommend [Grammarly](https://grammarly.com/). In [your Grammarly dictionary](https://account.grammarly.com/customize), you may - wish to add Solana-specific words like `lamport`, `blockhash`, etc. For VScode - users, there is a - [VScode extension for Grammarly](https://marketplace.visualstudio.com/items?itemName=znck.grammarly). + wish to add Solana-specific words like `lamport`, `blockhash`, etc. - Use US English rather than British English. Grammarly will catch this for you. - Use 'onchain' (not on-chain, definitely not smart contract) when referring to onchain apps. This comes from the Solana Foundation style guide, and is @@ -84,8 +85,8 @@ In particular: have `instruction handlers` that process `instructions`. Do not refer to [instruction handlers](https://solana.com/docs/terminology#instruction-handler) as instructions! The reason is simple: an instruction cannot process an - instruction. The `multiple` template in Anchor also calls the functions - `handler`. + instruction. The `multiple` template in Anchor also calls these functions + `handler`s. ### Code @@ -153,14 +154,14 @@ If your editor is not configured to auto-format on save using prettier, then you can run the following command to auto-format all files in your local repo/PR: ```shell -yarn prettier:fix +pnpm prettier:fix ``` You can also run the prettier check command to see which files do not follow the prettier formatting guidelines. ```shell -yarn prettier +pnpm prettier ``` #### Rust & Anchor @@ -185,7 +186,7 @@ yarn prettier ask them to run `anchor keys sync`. - Use the - [multiple files template](https://www.anchor-lang.com/docs/release-notes#multiple-files-template) + [multiple files template](https://www.anchor-lang.com/release-notes/0.29.0#multiple-files-template) to organize very large Anchor projects. ### Heading styles @@ -236,8 +237,8 @@ The heading above is invalid since it skips the `h3` heading (`###`) ### Links When linking to other developer content pages (i.e. docs, guides, etc) or -images, the markdown link should be use the absolute path to the file relative -to the repo root. Including the file extension. +images, the markdown link should use the absolute path to the file relative to +the repo root. Include the file extension. For example: `/docs/index.md` (good) vs `https://solana.com/docs` (bad) @@ -273,6 +274,52 @@ For images, you can use the path starting with `/public` like this: > links will be automatically adjusted to function on the website. Including > making the images viewable and removing `.md` file extensions. +### Code Blocks + +In addition to standard markdown "fenced" code blocks (i.e. using triple +backticks), the developer content repo requires the use of code-import for file +snippets. This ensures that code examples are always up-to-date with the actual +source files. + +#### Using code-import + +To use code-import, follow these steps: + +Ensure your code file is a test file located in the appropriate directory within +the repo. Use the following syntax to import code snippets: + +```javascript file="/path/to/your/file.ts#L1-L10,#L15-L20" + +``` + +This will import lines 1-10 and 15-20 from the specified file. + +Always use code ranges instead of importing whole files. This helps keep +examples concise and focused. + +#### Code-import Rules + +- The file path must start with a forward slash (/). +- You can specify multiple line ranges, separated by commas. +- Line ranges should be in ascending order and not overlap. +- Invalid ranges (e.g., #L4-L3) are not allowed. +- Line numbers start at 1, so #L0 is invalid. +- Trailing commas in the range specification are not allowed. + +Example of a valid code-import: + +```javascript file="/code/cookbook/wallets/check-public-key.ts#L1-L2,#L3-L18" + +``` + +Example of an invalid code-import: + +```javascript file=/code/cookbook/wallets/check-public-key.ts#L1-L2,#L3-L19,#L1-L3 + +``` + +This is invalid because the ranges are not in ascending order and overlap. + ### Table of contents When a content page is rendered on solana.com, a table of contents will be @@ -519,7 +566,7 @@ a list of available components content - [images](#images) - details about how to include images in a piece of content - [code blocks](#code-blocks) - additional functionality on top of standard - markdown code blocks + markdown code blocks, these support code file import from filesystem - [blockquote](#blockquote) - additional functionality on top of the standard HTML `blockquote` element - [Callout](#callout) - custom component used to render message to the reader in @@ -952,16 +999,16 @@ git clone https://github.com/solana-foundation/developer-content.git cd developer-content ``` -2. Install the dependencies via `yarn`: +2. Install the dependencies via `pnpm`: ```shell -yarn install +pnpm install ``` 3. Run the developer content API locally: ```shell -yarn dev +pnpm dev ``` > Note: The developer content API normally runs locally on port `3001` diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 137604e09..5f2c1f669 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -30,7 +30,7 @@ operations. Push all the English based content to the Crowdin platform to ```shell -yarn crowdin:upload +pnpm crowdin:upload ``` ### Download the current translations @@ -38,7 +38,7 @@ yarn crowdin:upload You can download the latest translations using the crowdin cli: ```shell -yarn crowdin:download +pnpm crowdin:download ``` This will store all the translated content files inside the `i18n` directory, @@ -53,3 +53,35 @@ translated content from Crowdin. It will first upload all the new content in the base language (i.e. when a page gets edited or newly created), then download all translations for all languages. The `deploy` action will then continue to build the content api normally. + +### Testing and fixing broken translation files + +Crowdin will often return altered and incorrectly formatted content files back +when performing `crowdin download`. It may be helpful (and faster) to work with +a single content file at a time by altering the content locally, uploading it to +Crowdin, the downloading the Crowdin-altered version to see if the changes will +actually work and not break the site. + +To upload single content file to Crowdin specify the source path (`-s` flag) and +the translation path (`-t` flag). Be sure to include the `%locale%` wildcard in +the translation path: + +```shell +pnpm crowdin upload -s docs/intro/installation.md -t i18n/%locale%/docs/intro/installation.md +``` + +To download the Crowdin formatted content, download the entire locale using the +`-l` flag and passing the locale short code. For example, downloading only the +German (DE) translated content: + +```shell +pnpm crowdin download -l de +``` + +It may be helpful to also run the prettier formatter on the translation files, +since this is normally performed by a GitHub action on deployment to catch and +fix many formatting issues that Crowdin causes due to altering content: + +```shell +pnpm prettier:i18n +``` diff --git a/README.md b/README.md index 3ec8a498f..14bb5d91a 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,22 @@ These markdown based content posts are gathered from contributors from around the Solana ecosystem and displayed on [solana.com/developers](https://solana.com/developers) for all to learn from. +## Setup Instructions + +This repository contains only the markdown files for content. The content is +rendered through a separate UI repository. + +1. Clone this repository (developer-content) +2. Clone the UI repository: + [solana-com](https://github.com/solana-foundation/solana-com) +3. Run both repositories locally to view the rendered content + +In the solana-com repo, you must add the following `.env` variable: + +``` +NEXT_PUBLIC_DEVELOPER_CONTENT_API_URL="http://localhost:3001" +``` + ## Structure There are a few primary types of Solana Developer content within this repo: diff --git a/code/cookbook/wallets/check-public-key.ts b/code/cookbook/wallets/check-public-key.ts new file mode 100644 index 000000000..f5b4c8392 --- /dev/null +++ b/code/cookbook/wallets/check-public-key.ts @@ -0,0 +1,19 @@ +import { PublicKey } from "@solana/web3.js"; + +// Note that Keypair.generate() will always give a public key that is valid for users + +// Valid public key +const key = new PublicKey("5oNDL3swdJJF1g9DzJiZ4ynHXgszjAEpUkxVYejchzrY"); +// Lies on the ed25519 curve and is suitable for users +console.log(PublicKey.isOnCurve(key.toBytes())); + +// Valid public key +const offCurveAddress = new PublicKey( + "4BJXYkfvg37zEmBbsacZjeQDpTNx91KppxFJxRqrz48e", +); + +// Not on the ed25519 curve, therefore not suitable for users +console.log(PublicKey.isOnCurve(offCurveAddress.toBytes())); + +// Not a valid public key +const errorPubkey = new PublicKey("testPubkey"); diff --git a/code/package.json b/code/package.json new file mode 100644 index 000000000..4429cdec8 --- /dev/null +++ b/code/package.json @@ -0,0 +1,15 @@ +{ + "name": "code", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "@solana/web3.js": "^1.95.2" + } +} diff --git a/coder.ts b/coder.ts new file mode 100644 index 000000000..ae53d4693 --- /dev/null +++ b/coder.ts @@ -0,0 +1,222 @@ +import { promises as fs } from "node:fs"; +import path from "node:path"; +import os from "node:os"; +import { unified } from "unified"; +import remarkParse from "remark-parse"; +import remarkStringify from "remark-stringify"; +import remarkFrontmatter from "remark-frontmatter"; +import { visit } from "unist-util-visit"; +import ignore, { type Ignore } from "ignore"; +import importCode from "./src/utils/code-import"; +import chokidar from "chokidar"; + +let debugMode = false; + +const debug = (...args: string[]) => { + if (debugMode) { + console.log("[DEBUG]", ...args); + } +}; + +const hasCodeComponentWithFileMeta = async ( + filePath: string, +): Promise => { + const content = await fs.readFile(filePath, "utf8"); + let hasMatch = false; + + const tree = unified().use(remarkParse).use(remarkFrontmatter).parse(content); + + visit(tree, "code", node => { + if (node.meta?.includes("file=")) { + hasMatch = true; + return false; // Stop visiting + } + }); + + return hasMatch; +}; + +const getIgnore = async (directory: string): Promise => { + const ig = ignore(); + + try { + const gitignoreContent = await fs.readFile( + path.join(directory, ".gitignore"), + "utf8", + ); + ig.add(gitignoreContent); + // ignore all dotfiles + ig.add([".*"]); + // ignore CONTRIBUTING.md because it mentions the code component example + ig.add("CONTRIBUTING.md"); + } catch (error) { + // If .gitignore doesn't exist, just continue without it + if ((error as NodeJS.ErrnoException).code !== "ENOENT") { + throw error; + } + } + + return ig; +}; + +const getMarkdownAndMDXFiles = async (directory: string): Promise => { + const ig = await getIgnore(directory); + + const walkDir = async (dir: string): Promise => { + const entries = await fs.readdir(dir, { withFileTypes: true }); + const files = await Promise.all( + entries.map(async entry => { + const res = path.resolve(dir, entry.name); + const relativePath = path.relative(directory, res); + + if (ig.ignores(relativePath) || entry.name === ".gitignore") { + debug(`Ignoring file: ${relativePath}`); + return []; + } + + if (entry.isDirectory()) { + return walkDir(res); + } + + if ( + entry.isFile() && + (entry.name.endsWith(".md") || entry.name.endsWith(".mdx")) + ) { + if (await hasCodeComponentWithFileMeta(res)) { + debug(`Found file with code component: ${relativePath}`); + return res; + } + debug( + `Skipping file (no code component with file meta): ${relativePath}`, + ); + } + + return []; + }), + ); + return files.flat(); + }; + + return walkDir(directory); +}; + +const processContent = async ( + content: string, + filePath: string, +): Promise => { + try { + const file = await unified() + .use(remarkParse) + .use(remarkFrontmatter) + .use(importCode, { + preserveTrailingNewline: false, + removeRedundantIndentations: true, + rootDir: process.cwd(), + }) + .use(remarkStringify, { + bullet: "-", + emphasis: "*", + fences: true, + listItemIndent: "one", + rule: "-", + ruleSpaces: false, + strong: "*", + tightDefinitions: true, + }) + .process(content); + return String(file); + } catch (error) { + if ((error as NodeJS.ErrnoException).code === "ENOENT") { + throw new Error( + `File not found: ${(error as NodeJS.ErrnoException).path}`, + ); + } + throw error; + } +}; + +const processFile = async (filePath: string): Promise => { + try { + if (!(await hasCodeComponentWithFileMeta(filePath))) { + debug(`Skipping ${filePath}: No code component with file meta found.`); + return; + } + + const originalContent = await fs.readFile(filePath, "utf8"); + const processedContent = await processContent(originalContent, filePath); + if (originalContent !== processedContent) { + await fs.writeFile(filePath, processedContent); + console.log(`Updated: ${filePath}`); + } else { + debug(`No changes needed for: ${filePath}`); + } + } catch (error) { + console.error(`Error processing ${filePath}: ${(error as Error).message}`); + } +}; + +const processInChunks = async ( + items: T[], + processItem: (item: T) => Promise, + chunkSize: number, +): Promise => { + for (let i = 0; i < items.length; i += chunkSize) { + const chunk = items.slice(i, i + chunkSize); + await Promise.all(chunk.map(processItem)); + } +}; + +const watchFiles = async (directory: string): Promise => { + const watcher = chokidar.watch(["**/*.md", "**/*.mdx"], { + ignored: [ + "**.**", + /(^|[\/\\])\../, + "**/node_modules/**", + "**/.git/**", + ".gitignore", + ], // ignore dotfiles, node_modules, .git, and .gitignore + persistent: true, + cwd: directory, + }); + + console.log("Watch mode started. Waiting for file changes..."); + + watcher + .on("add", filePath => processFile(path.join(directory, filePath))) + .on("change", filePath => processFile(path.join(directory, filePath))) + .on("unlink", filePath => console.log(`File ${filePath} has been removed`)); +}; + +const main = async (): Promise => { + const filePath = process.argv[2]; + const watchMode = + process.argv.includes("--watch") || process.argv.includes("-w"); + debugMode = process.argv.includes("--debug") || process.argv.includes("-d"); + + if (debugMode) { + console.log("Debug mode enabled"); + } + + if (filePath && !watchMode && !debugMode) { + // Process single file + const absolutePath = path.resolve(process.cwd(), filePath); + console.log(`Processing single file: ${absolutePath}`); + await processFile(absolutePath); + } else if (watchMode) { + // Watch mode + await watchFiles(process.cwd()); + } else { + // Process all files + const files = await getMarkdownAndMDXFiles(process.cwd()); + const chunkSize = Math.max(1, Math.ceil(files.length / os.cpus().length)); + + console.log(`Processing ${files.length} files...`); + await processInChunks(files, processFile, chunkSize); + } + + if (!watchMode) { + console.log("Sync process completed."); + } +}; + +main().catch(console.error); diff --git a/content/cookbook/accounts/calculate-rent.md b/content/cookbook/accounts/calculate-rent.md index 0ab617489..3d2fcdffe 100644 --- a/content/cookbook/accounts/calculate-rent.md +++ b/content/cookbook/accounts/calculate-rent.md @@ -2,26 +2,47 @@ title: How to Calculate Account Creation Cost sidebarSortOrder: 2 description: - "Every time you create an account, that creation costs a small amount of SOL. - Learn how to calculate how much an account costs at creation." + "Every time you create an account, that creation costs an amount of SOL. Learn + how to calculate how much an account costs at creation." --- Keeping accounts alive on Solana incurs a storage cost called rent. For the calculation, you need to consider the amount of data you intend to store in the account. Rent can be reclaimed in full if the account is closed. + + + + ```typescript filename="calculate-rent.ts" +import { createSolanaRpc } from "@solana/web3.js"; + +const rpc = createSolanaRpc("https://api.devnet.solana.com"); +// 1.5k bytes +const space = 1500n; + +const lamports = await rpc.getMinimumBalanceForRentExemption(space).send(); +console.log("Minimum balance for rent exception:", lamports); +``` + + + + + +```typescript import { Connection, clusterApiUrl } from "@solana/web3.js"; -(async () => { - const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); - - // length of data in bytes in the account to calculate rent for - const dataLength = 1500; - const rentExemptionAmount = - await connection.getMinimumBalanceForRentExemption(dataLength); - console.log({ - rentExemptionAmount, - }); -})(); +const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); + +// length of data in bytes in the account to calculate rent for +const dataLength = 1500; +const rentExemptionAmount = + await connection.getMinimumBalanceForRentExemption(dataLength); +console.log({ + rentExemptionAmount, +}); ``` + + + + diff --git a/content/cookbook/accounts/close-account.md b/content/cookbook/accounts/close-account.md index 76f289416..a9f03248d 100644 --- a/content/cookbook/accounts/close-account.md +++ b/content/cookbook/accounts/close-account.md @@ -37,8 +37,8 @@ fn process_instruction( .unwrap(); **source_account_info.lamports.borrow_mut() = 0; - let mut source_data = source_account_info.data.borrow_mut(); - source_data.fill(0); + source_account_info.assign(&system_program::ID); + source_account_info.realloc(0, false).map_err(Into::into) Ok(()) } diff --git a/content/cookbook/accounts/create-account.md b/content/cookbook/accounts/create-account.md index f1a570b64..9ed042bf3 100644 --- a/content/cookbook/accounts/create-account.md +++ b/content/cookbook/accounts/create-account.md @@ -7,10 +7,96 @@ description: --- Creating an account requires using the System Program `createAccount` -instruction. The Solana runtime will grant the owner of an account, access to -write to its data or transfer lamports. When creating an account, we have to -preallocate a fixed storage space in bytes (space) and enough lamports to cover -the rent. +instruction. The Solana runtime will grant the owner program of an account, +access to write to its data or transfer lamports. When creating an account, we +have to preallocate a fixed storage space in bytes (space) and enough lamports +to cover the rent. + + + + + +```typescript filename="create-account.ts" +import { + pipe, + createSolanaRpc, + appendTransactionMessageInstructions, + createSolanaRpcSubscriptions, + createTransactionMessage, + generateKeyPairSigner, + getSignatureFromTransaction, + sendAndConfirmTransactionFactory, + setTransactionMessageFeePayerSigner, + setTransactionMessageLifetimeUsingBlockhash, + signTransactionMessageWithSigners, +} from "@solana/web3.js"; +import { getSetComputeUnitPriceInstruction } from "@solana-program/compute-budget"; +import { + getCreateAccountInstruction, + SYSTEM_PROGRAM_ADDRESS, +} from "@solana-program/system"; + +const rpc = createSolanaRpc("https://api.devnet.solana.com"); +const rpcSubscriptions = createSolanaRpcSubscriptions( + "wss://api.devnet.solana.com", +); + +const sendAndConfirmTransaction = sendAndConfirmTransactionFactory({ + rpc, + rpcSubscriptions, +}); + +const space = 0n; // any extra space in the account +const rentLamports = await rpc.getMinimumBalanceForRentExemption(space).send(); +console.log("Minimum balance for rent exception:", rentLamports); + +// todo: load your own signer with SOL +const signer = await generateKeyPairSigner(); + +// generate a new keypair and address to create +const newAccountKeypair = await generateKeyPairSigner(); +console.log("New account address:", newAccountKeypair.address); + +const { value: latestBlockhash } = await rpc.getLatestBlockhash().send(); + +const transactionMessage = pipe( + createTransactionMessage({ version: "legacy" }), + tx => setTransactionMessageFeePayerSigner(signer, tx), + tx => setTransactionMessageLifetimeUsingBlockhash(latestBlockhash, tx), + tx => + appendTransactionMessageInstructions( + [ + // add a priority fee + getSetComputeUnitPriceInstruction({ + microLamports: 200_000, + }), + // create the new account + getCreateAccountInstruction({ + lamports: rentLamports, + newAccount: newAccountKeypair, + payer: signer, + space: space, + // "wallet" accounts are owned by the system program + programAddress: SYSTEM_PROGRAM_ADDRESS, + }), + ], + tx, + ), +); + +const signedTransaction = + await signTransactionMessageWithSigners(transactionMessage); +const signature = getSignatureFromTransaction(signedTransaction); + +await sendAndConfirmTransaction(signedTransaction, { + commitment: "confirmed", +}); +console.log("Signature:", signature); +``` + + + + ```typescript filename="create-account.ts" import { @@ -23,40 +109,42 @@ import { LAMPORTS_PER_SOL, } from "@solana/web3.js"; -(async () => { - const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); - const fromPubkey = Keypair.generate(); - - // Airdrop SOL for transferring lamports to the created account - const airdropSignature = await connection.requestAirdrop( - fromPubkey.publicKey, - LAMPORTS_PER_SOL, - ); - await connection.confirmTransaction(airdropSignature); - - // amount of space to reserve for the account - const space = 0; - - // Seed the created account with lamports for rent exemption - const rentExemptionAmount = - await connection.getMinimumBalanceForRentExemption(space); - - const newAccountPubkey = Keypair.generate(); - const createAccountParams = { - fromPubkey: fromPubkey.publicKey, - newAccountPubkey: newAccountPubkey.publicKey, - lamports: rentExemptionAmount, - space, - programId: SystemProgram.programId, - }; - - const createAccountTransaction = new Transaction().add( - SystemProgram.createAccount(createAccountParams), - ); - - await sendAndConfirmTransaction(connection, createAccountTransaction, [ - fromPubkey, - newAccountPubkey, - ]); -})(); +const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); +const fromPubkey = Keypair.generate(); + +// Airdrop SOL for transferring lamports to the created account +const airdropSignature = await connection.requestAirdrop( + fromPubkey.publicKey, + LAMPORTS_PER_SOL, +); +await connection.confirmTransaction(airdropSignature); + +// amount of space to reserve for the account +const space = 0; + +// Seed the created account with lamports for rent exemption +const rentExemptionAmount = + await connection.getMinimumBalanceForRentExemption(space); + +const newAccountPubkey = Keypair.generate(); +const createAccountParams = { + fromPubkey: fromPubkey.publicKey, + newAccountPubkey: newAccountPubkey.publicKey, + lamports: rentExemptionAmount, + space, + programId: SystemProgram.programId, +}; + +const createAccountTransaction = new Transaction().add( + SystemProgram.createAccount(createAccountParams), +); + +await sendAndConfirmTransaction(connection, createAccountTransaction, [ + fromPubkey, + newAccountPubkey, +]); ``` + + + + diff --git a/content/cookbook/accounts/get-account-balance.md b/content/cookbook/accounts/get-account-balance.md index 98a077947..8720ef709 100644 --- a/content/cookbook/accounts/get-account-balance.md +++ b/content/cookbook/accounts/get-account-balance.md @@ -6,7 +6,32 @@ description: that account balance on Solana." --- -```typescript filename="get-account-balance.ts" {13} + + + + +```typescript filename="get-account-balance.ts" +import { address, createSolanaRpc } from "@solana/web3.js"; + +const rpc = createSolanaRpc("https://api.devnet.solana.com"); +const LAMPORTS_PER_SOL = 1_000_000_000; // 1 billion lamports per SOL + +const wallet = address("nicktrLHhYzLmoVbuZQzHUTicd2sfP571orwo9jfc8c"); +const { value: balance } = await rpc.getBalance(wallet).send(); +console.log(`Balance: ${Number(balance) / LAMPORTS_PER_SOL} SOL`); +``` + +> As of `v2.0.0`, developers can use the default configurations within the main +> library (`@solana/web3.js`) or import any of its subpackages where better +> composition or more granular control over the imports is desired. See +> [Tree-Shakability](https://github.com/solana-labs/solana-web3.js?tab=readme-ov-file#tree-shakability) +> for more information. + + + + + +```typescript filename="get-account-balance.ts" import { clusterApiUrl, Connection, @@ -14,12 +39,13 @@ import { LAMPORTS_PER_SOL, } from "@solana/web3.js"; -(async () => { - const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); +const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); +const wallet = new PublicKey("nicktrLHhYzLmoVbuZQzHUTicd2sfP571orwo9jfc8c"); - let wallet = new PublicKey("G2FAbFQPFa5qKXCetoFZQEvF9BVvCKbvUZvodpVidnoY"); - console.log( - `${(await connection.getBalance(wallet)) / LAMPORTS_PER_SOL} SOL`, - ); -})(); +const balance = await connection.getBalance(wallet); +console.log(`Balance: ${balance / LAMPORTS_PER_SOL} SOL`); ``` + + + + diff --git a/content/cookbook/development/load-keypair-from-file.md b/content/cookbook/development/load-keypair-from-file.md new file mode 100644 index 000000000..6c7083cbc --- /dev/null +++ b/content/cookbook/development/load-keypair-from-file.md @@ -0,0 +1,86 @@ +--- +title: Load a local json file keypair +sidebarSortOrder: 6 +description: "Learn how to load a keypair from file." +--- + +When running your local project you probably want to use a file json keypair. +This can be very useful for all the cookbook examples as well. You can grind +yourself a keypair using `solana-keygen grind --starts-with a23:1` and then load +and use this one for your projects using the `loadKeypairFromFile` function. + +```typescript filename="load-keypair-from-file.ts" +import { + airdropFactory, + createKeyPairSignerFromBytes, + createSolanaRpc, + createSolanaRpcSubscriptions, + devnet, + generateKeyPair, + getAddressFromPublicKey, + KeyPairSigner, + lamports, +} from "@solana/web3.js"; +import fs from "fs"; +import path from "path"; +import os from "os"; + +// The new library takes a brand-new approach to Solana key pairs and addresses, +// which will feel quite different from the classes PublicKey and Keypair from version 1.x. +// All key operations now use the native Ed25519 implementation in JavaScript’s +// Web Crypto API. +async function createKeypair() { + const newKeypair: CryptoKeyPair = await generateKeyPair(); + const publicAddress = await getAddressFromPublicKey(newKeypair.publicKey); + + console.log(`Public key: ${publicAddress}`); +} + +export async function loadDefaultKeypair(): Promise> { + return await loadKeypairFromFile("~/.config/solana/id.json"); +} + +export async function loadDefaultKeypairWithAirdrop( + cluster: string, +): Promise> { + const keypair = await loadDefaultKeypair(); + const rpc = createSolanaRpc(devnet(`https://api.${cluster}.solana.com`)); + const rpcSubscriptions = createSolanaRpcSubscriptions( + devnet(`wss://api.${cluster}.solana.com`), + ); + try { + const result = await rpc.getBalance(keypair.address).send(); + + console.log(`Balance: ${result.value} lamports`); + if (result.value < lamports(500_000n)) { + console.log(`Balance low requesting airdrop`); + const airdrop = airdropFactory({ rpc, rpcSubscriptions }); + await airdrop({ + commitment: "confirmed", + lamports: lamports(1_000_000_000n), + recipientAddress: keypair.address, + }); + } + } catch (err) { + console.error("Error fetching balance:", err); + } + return keypair; +} + +export async function loadKeypairFromFile( + filePath: string, +): Promise> { + // This is here so you can also load the default keypair from the file system. + const resolvedPath = path.resolve( + filePath.startsWith("~") ? filePath.replace("~", os.homedir()) : filePath, + ); + const loadedKeyBytes = Uint8Array.from( + JSON.parse(fs.readFileSync(resolvedPath, "utf8")), + ); + // Here you can also set the second parameter to true in case you need to extract your private key. + const keypairSigner = await createKeyPairSignerFromBytes(loadedKeyBytes); + return keypairSigner; +} + +createKeypair(); +``` diff --git a/content/cookbook/development/start-local-validator.md b/content/cookbook/development/start-local-validator.md index bbd504a69..fbe780e71 100644 --- a/content/cookbook/development/start-local-validator.md +++ b/content/cookbook/development/start-local-validator.md @@ -8,7 +8,8 @@ Testing your program code locally can be a lot more reliable than testing on devnet, and can help you test before trying it out on devnet. You can setup your local-test-validator by installing the -[solana tool suite](/getting-started/installation.md#install-cli) and running +[Solana CLI tool suite](/docs/intro/installation.md) and running the following +command: ```shell solana-test-validator diff --git a/content/cookbook/development/subscribing-events.md b/content/cookbook/development/subscribing-events.md index 2fc52b7f2..8cc158a3d 100644 --- a/content/cookbook/development/subscribing-events.md +++ b/content/cookbook/development/subscribing-events.md @@ -9,7 +9,7 @@ Instead of pinging a typical HTTP endpoint at an interval to get frequent updates, you can instead receive those updates only when they happen. Solana's web3 -[`Connection`](https://solana-labs.github.io/solana-web3.js/classes/Connection.html) +[`Connection`](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Connection.html) under the hood generates a websocket endpoint and registers a websocket client when you create a new `Connection` instance (see source code [here](https://github.com/solana-labs/solana-web3.js/blob/45923ca00e4cc1ed079d8e55ecbee83e5b4dc174/src/connection.ts#L2100)). @@ -18,9 +18,9 @@ The `Connection` class exposes pub/sub methods - they all start with `on`, like event emitters. When you call these listener methods, it registers a new subscription to the websocket client of that `Connection` instance. The example pub/sub method we use below is -[`onAccountChange`](https://solana-labs.github.io/solana-web3.js/classes/Connection.html#onAccountChange). +[`onAccountChange`](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Connection.html#onAccountChange). The callback will provide the updated state data through arguments (see -[`AccountChangeCallback`](https://solana-labs.github.io/solana-web3.js/modules.html#AccountChangeCallback) +[`AccountChangeCallback`](https://solana-labs.github.io/solana-web3.js/v1.x/types/AccountChangeCallback.html) as an example). ```typescript filename="subscribe-to-events.ts" @@ -35,7 +35,7 @@ import { clusterApiUrl, Connection, Keypair } from "@solana/web3.js"; // Register a callback to listen to the wallet (ws subscription) connection.onAccountChange( - wallet.publicKey(), + wallet.publicKey, (updatedAccountInfo, context) => console.log("Updated account info: ", updatedAccountInfo), "confirmed", diff --git a/content/cookbook/index.md b/content/cookbook/index.md index 284cbb903..d985bbdf9 100644 --- a/content/cookbook/index.md +++ b/content/cookbook/index.md @@ -1,13 +1,110 @@ --- sidebarSortOrder: 0 title: Solana Cookbook -seoTitle: Code examples for Solana development +seoTitle: Solana Cookbook - Code examples for Solana development description: - "The Solana cookbook is a collection of useful examples and references for - building on Solana" + "The Solana Cookbook is a collection of code snippets, useful examples, and + references for building on Solana." --- -The _Solana Cookbook_ is a developer resource that provides examples and +The Solana Cookbook is a developer resource that provides examples and references for building applications on Solana. Each example and reference will focus on specific aspects of Solana development while providing additional details and usage examples. + +## Development Guides + +Development guides help developers set up and interact with the Solana ecosystem +using various tools and clients. + +| Guide | Client | Description | +| ------------------------------------------------------------------------------------------------------- | ---------- | ------------------------------------------ | +| [How to Start a Local Validator](/content/cookbook/development/start-local-validator.md) | Solana CLI | Set up and run a local Solana validator | +| [Connecting to a Solana Environment](/content/cookbook/development/connect-environment.md) | web3.js | Connect to different Solana networks | +| [Getting Test SOL](/content/cookbook/development/test-sol.md) | web3.js | Obtain SOL tokens for testing | +| [Subscribing to Events](/content/cookbook/development/subscribing-events.md) | web3.js | Listen to Solana program events | +| [Using Mainnet Accounts and Programs](/content/cookbook/development/using-mainnet-accounts-programs.md) | Solana CLI | Work with production accounts and programs | + +## Wallet Management + +Learn how to create, restore, and manage Solana wallets using various tools and +libraries. + +| Guide | Client | Description | +| ---------------------------------------------------------------------------------------------- | -------------- | ----------------------------------- | +| [How to Create a Keypair](/content/cookbook/wallets/create-keypair.md) | web3.js | Generate new Solana keypairs | +| [How to Restore a Keypair](/content/cookbook/wallets/restore-keypair.md) | web3.js | Recover existing keypairs | +| [How to Verify a Keypair](/content/cookbook/wallets/verify-keypair.md) | web3.js | Validate keypair authenticity | +| [How to Validate a Public Key](/content/cookbook/wallets/check-publickey.md) | web3.js | Check public key validity | +| [How to Generate Mnemonics for Keypairs](/content/cookbook/wallets/generate-mnemonic.md) | bip39 | Create seed phrases | +| [How to Restore a Keypair from a Mnemonic](/content/cookbook/wallets/restore-from-mnemonic.md) | web3.js, bip39 | Recover keypairs using seed phrases | +| [How to Generate a Vanity Address](/content/cookbook/wallets/generate-vanity-address.md) | Solana CLI | Create custom addresses | +| [How to Sign and Verify a Message](/content/cookbook/wallets/sign-message.md) | web3.js | Message signing and verification | +| [How to Connect a Wallet with React](/content/cookbook/wallets/connect-wallet-react.md) | React, web3.js | Integrate wallets in React apps | + +## Transaction Operations + +Explore various transaction-related operations on the Solana blockchain. + +| Guide | Client | Description | +| ------------------------------------------------------------------------------------------------ | ------- | ------------------------------ | +| [How to Send SOL](/content/cookbook/transactions/send-sol.md) | web3.js | Transfer SOL between accounts | +| [How to Send Tokens](/content/cookbook/transactions/send-tokens.md) | web3.js | Transfer SPL tokens | +| [How to Calculate Transaction Cost](/content/cookbook/transactions/calculate-cost.md) | web3.js | Estimate transaction fees | +| [How to Add a Memo to a Transaction](/content/cookbook/transactions/add-memo.md) | web3.js | Include memos in transactions | +| [How to Add Priority Fees to a Transaction](/content/cookbook/transactions/add-priority-fees.md) | web3.js | Set transaction priorities | +| [How to Optimize Compute Requested](/content/cookbook/transactions/optimize-compute.md) | web3.js | Improve transaction efficiency | +| [Offline Transactions](/content/cookbook/transactions/offline-transactions.md) | web3.js | Handle offline operations | + +## Account Management + +Learn how to manage Solana accounts effectively. + +| Guide | Client | Description | +| -------------------------------------------------------------------------------------- | ------------- | -------------------------- | +| [How to Create an Account](/content/cookbook/accounts/create-account.md) | web3.js | Create new Solana accounts | +| [How to Calculate Account Creation Cost](/content/cookbook/accounts/calculate-rent.md) | web3.js | Estimate account costs | +| [How to Create a PDA's Account](/content/cookbook/accounts/create-pda-account.md) | web3.js, Rust | Work with PDAs | +| [How to Sign with a PDA's Account](/content/cookbook/accounts/sign-with-pda.md) | Rust | PDA signing operations | +| [How to Close an Account](/content/cookbook/accounts/close-account.md) | Rust | Remove accounts | +| [How to Get Account Balance](/content/cookbook/accounts/get-account-balance.md) | web3.js | Check account balances | + +## Program Development + +Develop Solana programs with these comprehensive guides. + +| Guide | Client | Description | +| -------------------------------------------------------------------------------------------- | ------ | ---------------------------- | +| [How to Transfer SOL in a Solana Program](/content/cookbook/programs/transfer-sol.md) | Rust | Program-based SOL transfers | +| [How to Get Clock in a Program](/content/cookbook/programs/clock.md) | Rust | Access program clock | +| [How to Change Account Size](/content/cookbook/programs/change-account-size.md) | Rust | Modify account sizes | +| [How to Do Cross Program Invocation](/content/cookbook/programs/cross-program-invocation.md) | Rust | CPI operations | +| [How to Create a Program Derived Address](/content/cookbook/programs/create-pda.md) | Rust | Generate PDAs | +| [How to Read Accounts in a Program](/content/cookbook/programs/read-accounts.md) | Rust | Account data access | +| [Reading Multiple Instructions](/content/cookbook/programs/read-multiple-instructions.md) | Rust | Handle multiple instructions | +| [How to Verify Accounts in a Solana Program](/content/cookbook/programs/verify-accounts.md) | Rust | Account verification | + +## Token Operations + +Comprehensive guides for working with tokens on Solana. + +| Guide | Client | Description | +| --------------------------------------------------------------------------------------------------------- | ------------- | ------------------------------ | +| [How to Create a Token](/content/cookbook/tokens/create-mint-account.md) | web3.js | Create new tokens | +| [How to Get a Token Mint](/content/cookbook/tokens/get-token-mint.md) | web3.js | Access token mints | +| [How to Create a Token Account](/content/cookbook/tokens/create-token-account.md) | web3.js | Set up token accounts | +| [How to Get a Token Account](/content/cookbook/tokens/get-token-account.md) | web3.js | Retrieve token accounts | +| [How to Get a Token Account's Balance](/content/cookbook/tokens/get-token-balance.md) | web3.js, rust | Check token balances | +| [How to Mint Tokens](/content/cookbook/tokens/mint-tokens.md) | web3.js | Create new tokens | +| [How to Transfer Tokens](/content/cookbook/tokens/transfer-tokens.md) | web3.js | Move tokens between accounts | +| [How to Burn Tokens](/content/cookbook/tokens/burn-tokens.md) | web3.js | Remove tokens from circulation | +| [How to Close Token Accounts](/content/cookbook/tokens/close-token-accounts.md) | web3.js | Clean up token accounts | +| [How to Set Authority on Token Accounts or Mints](/content/cookbook/tokens/set-update-token-authority.md) | web3.js | Manage token permissions | +| [How to Delegate Token Accounts](/content/cookbook/tokens/approve-token-delegate.md) | web3.js | Set up token delegation | +| [How to Revoke a Token Delegate](/content/cookbook/tokens/revoke-token-delegate.md) | web3.js | Remove delegates | +| [How to Use Wrapped SOL](/content/cookbook/tokens/manage-wrapped-sol.md) | web3.js | Work with wrapped SOL | +| [How to Get All Token Accounts by Authority](/content/cookbook/tokens/get-all-token-accounts.md) | web3.js | List token accounts | +| [How to Create an NFT](/content/cookbook/tokens/create-nft.md) | web3.js | Mint NFTs | +| [How to Fetch the NFT Metadata](/content/cookbook/tokens/fetch-nft-metadata.md) | Javascript | Fetch NFT metadata | +| [How to Get the Owner of an NFT](/content/cookbook/tokens/get-nft-owner.md) | web3.js | Find NFT ownership | +| [How to Get All NFTs from a Wallet](/content/cookbook/tokens/fetch-all-nfts.md) | web3.js | List wallet NFTs | diff --git a/content/cookbook/programs/create-pda.md b/content/cookbook/programs/create-pda.md index a991fd856..c8e1be78b 100644 --- a/content/cookbook/programs/create-pda.md +++ b/content/cookbook/programs/create-pda.md @@ -128,7 +128,7 @@ const PAYER_KEYPAIR = Keypair.generate(); "6eW5nnSosr2LpkUGCdznsjRGDhVb26tLmiM1P8RV1QQp", ); - // Airdop to Payer + // Airdrop to Payer await connection.confirmTransaction( { blockhash: latestBlockHash.blockhash, diff --git a/content/cookbook/tokens/approve-token-delegate.md b/content/cookbook/tokens/approve-token-delegate.md index f2697b028..b64d457b1 100644 --- a/content/cookbook/tokens/approve-token-delegate.md +++ b/content/cookbook/tokens/approve-token-delegate.md @@ -62,7 +62,7 @@ import bs58 from "bs58"; tokenAccountPubkey, // token account randomGuy.publicKey, // delegate alice, // owner of token account - 1e8, // amount, if your deciamls is 8, 10^8 for 1 token + 1e8, // amount, if your decimals is 8, 10^8 for 1 token 8, // decimals ); console.log(`txhash: ${txhash}`); @@ -77,7 +77,7 @@ import bs58 from "bs58"; mintPubkey, // mint randomGuy.publicKey, // delegate alice.publicKey, // owner of token account - 1e8, // amount, if your deciamls is 8, 10^8 for 1 token + 1e8, // amount, if your decimals is 8, 10^8 for 1 token 8, // decimals ), ); diff --git a/content/cookbook/tokens/burn-tokens.md b/content/cookbook/tokens/burn-tokens.md index e5e997939..486b297af 100644 --- a/content/cookbook/tokens/burn-tokens.md +++ b/content/cookbook/tokens/burn-tokens.md @@ -52,7 +52,7 @@ import bs58 from "bs58"; tokenAccountPubkey, // token account mintPubkey, // mint alice, // owner - 1e8, // amount, if your deciamls is 8, 10^8 for 1 token + 1e8, // amount, if your decimals is 8, 10^8 for 1 token 8, ); console.log(`txhash: ${txhash}`); @@ -67,7 +67,7 @@ import bs58 from "bs58"; tokenAccountPubkey, // token account mintPubkey, // mint alice.publicKey, // owner of token account - 1e8, // amount, if your deciamls is 8, 10^8 for 1 token + 1e8, // amount, if your decimals is 8, 10^8 for 1 token 8, // decimals ), ); diff --git a/content/cookbook/tokens/create-mint-account.md b/content/cookbook/tokens/create-mint-account.md index fe9e8156e..a65119321 100644 --- a/content/cookbook/tokens/create-mint-account.md +++ b/content/cookbook/tokens/create-mint-account.md @@ -2,6 +2,8 @@ title: How to Create a Token sidebarSortOrder: 1 description: "Learn how to create tokens on Solana." +altRoutes: + - /developers/cookbook/tokens --- Creating tokens is done by creating what is called a "mint account". This mint @@ -46,7 +48,7 @@ import bs58 from "bs58"; // 1) use build-in function let mintPubkey = await createMint( - connection, // conneciton + connection, // connection feePayer, // fee payer alice.publicKey, // mint authority alice.publicKey, // freeze authority (you can use `null` to disable it. when you disable it, you can't turn it on again) diff --git a/content/cookbook/tokens/create-nft.md b/content/cookbook/tokens/create-nft.md new file mode 100644 index 000000000..06f2d5666 --- /dev/null +++ b/content/cookbook/tokens/create-nft.md @@ -0,0 +1,184 @@ +--- +title: How to create an NFT +sidebarSortOrder: 15 +description: "Learn how to create an NFT on Solana, using Arweave and Metaplex." +--- + +To create an NFT you have to: + +1. Upload the image to IPFS like Arweave +2. Upload the JSON metadata to Arweave or similar storage service. +3. Call metaplex to create an account for the NFT + +### Upload to Arweave + +```typescript filename="upload-to-arweave.ts" +import fs from "node:fs"; +import Arweave from "arweave"; + +(async () => { + const arweave = Arweave.init({ + host: "localhost", + port: 1984, + protocol: "http", + timeout: 20000, + logging: false, + }); + + const host = arweave.getConfig().api.host; + const port = arweave.getConfig().api.port; + const protocol = arweave.getConfig().api.protocol; + + // Upload image to Arweave + const data = fs.readFileSync("./code/nfts/upload-arweave/lowres-dog.png"); + + const transaction = await arweave.createTransaction({ + data: data, + }); + + transaction.addTag("Content-Type", "image/png"); + + // Instead of generating a new wallet, you can use an existing one from your file system + // useful in production environments + // const wallet = JSON.parse(fs.readFileSync("./code/nfts/upload-arweave/wallet.json", "utf-8")) + const wallet = await arweave.wallets.generate(); + const address = await arweave.wallets.getAddress(wallet); + console.log("address:, ", address); + + await arweave.api.get(`/mint/${encodeURI(addr)}/10000000000000000`); + await arweave.transactions.sign(transaction, wallet); + + const response = await arweave.transactions.post(transaction); + console.log(response); + + const id = transaction.id; + const imageUrl = id ? `${protocol}://${host}:${port}/${id}` : null; + console.log("imageUrl", imageUrl); + + // Upload metadata to Arweave + + const metadata = { + name: "Custom NFT #1", + symbol: "CNFT", + description: "A description about my custom NFT #1", + seller_fee_basis_points: 500, + external_url: "https://www.customnft.com/", + attributes: [ + { + trait_type: "NFT type", + value: "Custom", + }, + ], + collection: { + name: "Test Collection", + family: "Custom NFTs", + }, + properties: { + files: [ + { + uri: imageUrl, + type: "image/png", + }, + ], + category: "image", + maxSupply: 0, + creators: [ + { + address: "CBBUMHRmbVUck99mTCip5sHP16kzGj3QTYB8K3XxwmQx", + share: 100, + }, + ], + }, + image: imageUrl, + }; + + const metadataString = JSON.stringify(metadata); + + const metadataTransaction = await arweave.createTransaction({ + data: metadataString, + }); + + metadataTransaction.addTag("Content-Type", "application/json"); + + await arweave.transactions.sign(metadataTransaction, wallet); + + console.log("metadata txid", metadataTransaction.id); + + const txnResult = await arweave.transactions.post(metadataTransaction); + + console.log(txnResult); +})(); +``` + +### Mint the NFT + +```typescript filename="mint-nft.ts" +import { createUmi } from "@metaplex-foundation/umi-bundle-defaults"; +import { + generateSigner, + percentAmount, + keypairIdentity, +} from "@metaplex-foundation/umi"; +import { clusterApiUrl } from "@solana/web3.js"; +import { + createNft, + fetchDigitalAsset, + mplTokenMetadata, +} from "@metaplex-foundation/mpl-token-metadata"; +import "dotenv/config"; + +(async () => { + try { + console.log("Loading keypair from environment..."); + const privateKey = JSON.parse(process.env.SOLANA_PRIVATE_KEY || "[]"); + if (privateKey.length === 0) { + throw new Error("SOLANA_PRIVATE_KEY is not set in .env file"); + } + + console.log("Creating Umi instance..."); + const umi = createUmi(clusterApiUrl("devnet")); + + const keypair = umi.eddsa.createKeypairFromSecretKey( + new Uint8Array(privateKey), + ); + + // Use keypairIdentity to set the keypair as the signer + const signer = keypairIdentity(keypair); + umi.use(signer); + umi.use(mplTokenMetadata()); + + console.log("Keypair loaded. Public key:", keypair.publicKey); + + console.log("Generating new mint address..."); + const mint = generateSigner(umi); + + console.log("Creating NFT..."); + const { signature } = await createNft(umi, { + mint, + name: "My NFT", + // Replace this with your Arweave metadata URI + uri: "https://ffaaqinzhkt4ukhbohixfliubnvpjgyedi3f2iccrq4efh3s.arweave.net/KUAIIbk6p8oo4XHRcq0U__C2r0mwQaNl0gQow4Qp9yk", + maxSupply: 1, + sellerFeeBasisPoints: percentAmount(0), + creators: [ + { + address: keypair.publicKey, + share: 100, + verified: true, + }, + ], + }).sendAndConfirm(umi); + + console.log("NFT created successfully!"); + console.log("Mint address:", mint.publicKey); + console.log("Transaction signature:", signature); + + console.log("Fetching digital asset..."); + const asset = await fetchDigitalAsset(umi, mint.publicKey); + console.log("Digital Asset:", asset); + } catch (error) { + console.error("Error:", error); + console.error("Stack trace:", error.stack); + } +})(); +``` diff --git a/content/cookbook/tokens/fetch-all-nfts.md b/content/cookbook/tokens/fetch-all-nfts.md new file mode 100644 index 000000000..9c07bf8a7 --- /dev/null +++ b/content/cookbook/tokens/fetch-all-nfts.md @@ -0,0 +1,50 @@ +--- +title: How to get all NFTs from a wallet? +sidebarSortOrder: 18 +description: + "Learn how to fetch all non-fungible tokens (NFTs) from a wallet on Solana." +--- + +```typescript filename="get-nfts-by-wallet.ts" +import { createUmi } from "@metaplex-foundation/umi-bundle-defaults"; +import { publicKey } from "@metaplex-foundation/umi"; +import { fetchAllDigitalAssetWithTokenByOwner } from "@metaplex-foundation/mpl-token-metadata"; +import { clusterApiUrl } from "@solana/web3.js"; + +BigInt.prototype.toJSON = function () { + return this.toString(); +}; + +(async () => { + try { + // Create a UMI instance + const umi = createUmi(clusterApiUrl("devnet")); + + // The owner's public key + const ownerPublicKey = publicKey( + "2R4bHmSBHkHAskerTHE6GE1Fxbn31kaD5gHqpsPySVd7", + ); + + console.log("Fetching NFTs..."); + const allNFTs = await fetchAllDigitalAssetWithTokenByOwner( + umi, + ownerPublicKey, + ); + + console.log(`Found ${allNFTs.length} NFTs for the owner:`); + allNFTs.forEach((nft, index) => { + console.log(`\nNFT #${index + 1}:`); + console.log("Mint Address:", nft.publicKey); + console.log("Name:", nft.metadata.name); + console.log("Symbol:", nft.metadata.symbol); + console.log("URI:", nft.metadata.uri); + }); + + // If you need the full NFT data + console.log("\nFull NFT data:"); + console.log(JSON.stringify(allNFTs, null, 2)); + } catch (error) { + console.error("Error:", error); + } +})(); +``` diff --git a/content/cookbook/tokens/fetch-nft-metadata.md b/content/cookbook/tokens/fetch-nft-metadata.md new file mode 100644 index 000000000..2987d7792 --- /dev/null +++ b/content/cookbook/tokens/fetch-nft-metadata.md @@ -0,0 +1,59 @@ +--- +title: How to Fetch the NFT Metadata +sidebarSortOrder: 16 +description: + "Learn how to fetch the metadata of a non-fungible token (NFT) on Solana." +--- + +```typescript filename="get-nft-metadata.ts" +import { createUmi } from "@metaplex-foundation/umi-bundle-defaults"; +import { + createSignerFromKeypair, + generateSigner, + signerIdentity, +} from "@metaplex-foundation/umi"; +import { + fetchDigitalAsset, + mplTokenMetadata, +} from "@metaplex-foundation/mpl-token-metadata"; +import { PublicKey } from "@metaplex-foundation/js"; + +(async () => { + try { + // Create a UMI instance + const umi = createUmi("https://api.mainnet-beta.solana.com"); + + // Use the mplTokenMetadata plugin + umi.use(mplTokenMetadata()); + + // Generate a new keypair (you can replace this with your own keypair if needed) + const keypair = generateSigner(umi); + umi.use(signerIdentity(createSignerFromKeypair(umi, keypair))); + + // The mint address of the NFT you want to fetch + const mintAddress = new PublicKey( + "Ay1U9DWphDgc7hq58Yj1yHabt91zTzvV2YJbAWkPNbaK", + ); + + console.log("Fetching NFT metadata..."); + const asset = await fetchDigitalAsset(umi, mintAddress); + + console.log("NFT Metadata:"); + + // If you want to access specific metadata fields: + console.log("\nName:", asset.metadata.name); + console.log("Symbol:", asset.metadata.symbol); + console.log("URI:", asset.metadata.uri); + + // Fetch and log the JSON metadata + if (asset.metadata.uri) { + const response = await fetch(asset.metadata.uri); + const jsonMetadata = await response.json(); + console.log("\nJSON Metadata:"); + console.log(JSON.stringify(jsonMetadata, null, 2)); + } + } catch (error) { + console.error("Error:", error); + } +})(); +``` diff --git a/content/cookbook/tokens/get-nft-owner.md b/content/cookbook/tokens/get-nft-owner.md new file mode 100644 index 000000000..a8d15d134 --- /dev/null +++ b/content/cookbook/tokens/get-nft-owner.md @@ -0,0 +1,35 @@ +--- +title: How to get the owner of an NFT +sidebarSortOrder: 17 +description: + "Learn how to get the owner of a non-fungible token (NFT) on Solana." +--- + +If you have the mint key of an NFT, you can find its current owner by +sneak-peeking at the largest token account for that mint key. + +Remember that NFTs have a supply of 1, and they are indivisible, meaning that +only one token account will hold that token at any point in time, whilst all +other token accounts for that mint key will have a balance of 0. + +Once the largest token account is identified, we can retrieve its owner. + +```typescript filename="get-nft-owner.ts" +import { Connection, PublicKey } from "@solana/web3.js"; + +(async () => { + const connection = new Connection("https://api.mainnet-beta.solana.com"); + const tokenMint = "9ARngHhVaCtH5JFieRdSS5Y8cdZk2TMF4tfGSWFB9iSK"; + + const largestAccounts = await connection.getTokenLargestAccounts( + new PublicKey(tokenMint), + ); + const largestAccountInfo = await connection.getParsedAccountInfo( + largestAccounts.value[0].address, + ); + console.log(largestAccountInfo?.value?.data); + + const owner = largestAccountInfo?.value?.data?.parsed.info.owner; + console.log("NFT owner :", owner); +})(); +``` diff --git a/content/cookbook/tokens/manage-wrapped-sol.md b/content/cookbook/tokens/manage-wrapped-sol.md index acb5fc944..b1dc14fb1 100644 --- a/content/cookbook/tokens/manage-wrapped-sol.md +++ b/content/cookbook/tokens/manage-wrapped-sol.md @@ -11,9 +11,9 @@ and creating token accounts specifically on the `NATIVE_MINT` address. ## Create Token Account -Like -[Create Token Account](#https://solana.com/developers/cookbook/tokens/create-token-accounts) -but replace mint with `NATIVE_MINT` +Like creating +[SPL token accounts](/content/cookbook/tokens/create-token-account.md) but +replace mint with `NATIVE_MINT` ```js import { NATIVE_MINT } from "@solana/spl-token"; diff --git a/content/cookbook/tokens/set-update-token-authority.md b/content/cookbook/tokens/set-update-token-authority.md index d7ca156e1..032b64358 100644 --- a/content/cookbook/tokens/set-update-token-authority.md +++ b/content/cookbook/tokens/set-update-token-authority.md @@ -83,7 +83,7 @@ import bs58 from "bs58"; { let tx = new Transaction().add( createSetAuthorityInstruction( - mintPubkey, // mint acocunt || token account + mintPubkey, // mint account || token account alice.publicKey, // current auth AuthorityType.MintTokens, // authority type feePayer.publicKey, // new auth (you can pass `null` to close it) diff --git a/content/cookbook/tokens/transfer-tokens.md b/content/cookbook/tokens/transfer-tokens.md index 87a046660..29e120d34 100644 --- a/content/cookbook/tokens/transfer-tokens.md +++ b/content/cookbook/tokens/transfer-tokens.md @@ -60,7 +60,7 @@ import bs58 from "bs58"; mintPubkey, // mint tokenAccountYPubkey, // to (should be a token account) alice, // from's owner - 1e8, // amount, if your deciamls is 8, send 10^8 for 1 token + 1e8, // amount, if your decimals is 8, send 10^8 for 1 token 8, // decimals ); console.log(`txhash: ${txhash}`); @@ -76,7 +76,7 @@ import bs58 from "bs58"; mintPubkey, // mint tokenAccountYPubkey, // to (should be a token account) alice.publicKey, // from's owner - 1e8, // amount, if your deciamls is 8, send 10^8 for 1 token + 1e8, // amount, if your decimals is 8, send 10^8 for 1 token 8, // decimals ), ); diff --git a/content/cookbook/transactions/add-memo.md b/content/cookbook/transactions/add-memo.md index 93deb235e..490566307 100644 --- a/content/cookbook/transactions/add-memo.md +++ b/content/cookbook/transactions/add-memo.md @@ -6,9 +6,126 @@ description: how to add a memo to your transactions on Solana." --- -Any transaction can add a message making use of the memo program. Currently the -programID from the Memo Program has to be added manually -`MemoSq4gqABAXKb96qnH8TysNcWxMyWCqXgDLGmfcHr`. +Any transaction can add a message making use of the memo program. In web3.js@1 +the programID from the Memo Program has to be added manually +`MemoSq4gqABAXKb96qnH8TysNcWxMyWCqXgDLGmfcHr` in V2 you can use +`getAddMemoInstruction`. + + + + + +```typescript filename="add-memo.ts" {61-72} +import { + airdropFactory, + appendTransactionMessageInstructions, + createSolanaRpc, + createSolanaRpcSubscriptions, + createTransactionMessage, + devnet, + generateKeyPairSigner, + getComputeUnitEstimateForTransactionMessageFactory, + getSignatureFromTransaction, + lamports, + pipe, + prependTransactionMessageInstructions, + sendAndConfirmTransactionFactory, + setTransactionMessageFeePayerSigner, + setTransactionMessageLifetimeUsingBlockhash, + signTransactionMessageWithSigners, + type Transaction, +} from "@solana/web3.js"; +import { + getSetComputeUnitLimitInstruction, + getSetComputeUnitPriceInstruction, +} from "@solana-program/compute-budget"; +import { getAddMemoInstruction } from "@solana-program/memo"; + +async function writeMemo(message: string) { + // Create an RPC. + const CLUSTER = "devnet"; + const rpc = createSolanaRpc(devnet(`https://api.${CLUSTER}.solana.com`)); + const rpcSubscriptions = createSolanaRpcSubscriptions( + devnet(`wss://api.${CLUSTER}.solana.com`), + ); + + // Create an airdrop function. + const airdrop = airdropFactory({ rpc, rpcSubscriptions }); + + // Create a utility that estimates a transaction message's compute consumption. + const getComputeUnitEstimate = + getComputeUnitEstimateForTransactionMessageFactory({ rpc }); + + // Create a transaction sending function. + const sendAndConfirmTransaction = sendAndConfirmTransactionFactory({ + rpc, + rpcSubscriptions, + }); + + // Create and fund an account. + const keypairSigner = await generateKeyPairSigner(); + console.log("Created an account with address", keypairSigner.address); + console.log("Requesting airdrop"); + await airdrop({ + commitment: "confirmed", + lamports: lamports(1000_000n), + recipientAddress: keypairSigner.address, + }); + console.log("Airdrop confirmed"); + + // Create a memo transaction. + console.log("Creating a memo transaction"); + const { value: latestBlockhash } = await rpc.getLatestBlockhash().send(); + const transactionMessage = pipe( + createTransactionMessage({ version: "legacy" }), + m => setTransactionMessageFeePayerSigner(keypairSigner, m), + m => setTransactionMessageLifetimeUsingBlockhash(latestBlockhash, m), + m => + appendTransactionMessageInstructions( + [ + getSetComputeUnitPriceInstruction({ microLamports: 5000n }), + getAddMemoInstruction({ memo: message }), + ], + m, + ), + ); + + // Figure out how many compute units to budget for this transaction + // so that you can right-size the compute budget to maximize the + // chance that it will be selected for inclusion into a block. + console.log("Estimating the compute consumption of the transaction"); + const estimatedComputeUnits = + await getComputeUnitEstimate(transactionMessage); + console.log( + `Transaction is estimated to consume ${estimatedComputeUnits} compute units`, + ); + const budgetedTransactionMessage = prependTransactionMessageInstructions( + [getSetComputeUnitLimitInstruction({ units: estimatedComputeUnits })], + transactionMessage, + ); + + // Sign and send the transaction. + console.log("Signing and sending the transaction"); + const signedTx = await signTransactionMessageWithSigners( + budgetedTransactionMessage, + ); + const signature = getSignatureFromTransaction(signedTx); + console.log( + "Sending transaction https://explorer.solana.com/tx/" + + signature + + "/?cluster=" + + CLUSTER, + ); + await sendAndConfirmTransaction(signedTx, { commitment: "confirmed" }); + console.log("Transaction confirmed"); +} + +writeMemo("Hello, Solana!"); +``` + + + + ```typescript filename="add-memo.ts" {38-46} import { @@ -63,3 +180,7 @@ import { ]); })(); ``` + + + + diff --git a/content/cookbook/transactions/add-priority-fees.md b/content/cookbook/transactions/add-priority-fees.md index b2cd17646..ea02cb42f 100644 --- a/content/cookbook/transactions/add-priority-fees.md +++ b/content/cookbook/transactions/add-priority-fees.md @@ -12,7 +12,7 @@ Units (CU) \* number of instructions, with a max of 1.4M CU. The Base Fee is 5,000 Lamports per signature. A microLamport is 0.000001 Lamports. > You can find a detailed guide here on -> [how to use priority fees](https://solana.com/developers/guides/advanced/how-to-use-priority-fees). +> [how to use priority fees](/content/guides/advanced/how-to-use-priority-fees.md). The total compute budget or Prioritization Fee for a single TX can be changed by adding instructions from the ComputeBudgetProgram. @@ -29,6 +29,128 @@ compute budget. The value provided will replace the default value. Transactions should request the minimum amount of CU required for execution to maximize throughput, or minimize fees. + + + + +```typescript filename="add-priority-fees.ts" {61-72} {37-38} {77-87} +import { + airdropFactory, + appendTransactionMessageInstructions, + createSolanaRpc, + createSolanaRpcSubscriptions, + createTransactionMessage, + devnet, + generateKeyPairSigner, + getComputeUnitEstimateForTransactionMessageFactory, + getSignatureFromTransaction, + lamports, + pipe, + prependTransactionMessageInstructions, + sendAndConfirmTransactionFactory, + setTransactionMessageFeePayerSigner, + setTransactionMessageLifetimeUsingBlockhash, + signTransactionMessageWithSigners, +} from "@solana/web3.js"; +import { + getSetComputeUnitLimitInstruction, + getSetComputeUnitPriceInstruction, +} from "@solana-program/compute-budget"; +import { getAddMemoInstruction } from "@solana-program/memo"; + +async function writeMemoWithPriorityFees(message: string) { + // Create an RPC. + const CLUSTER = "devnet"; + const rpc = createSolanaRpc(devnet(`https://api.${CLUSTER}.solana.com`)); + const rpcSubscriptions = createSolanaRpcSubscriptions( + devnet(`wss://api.${CLUSTER}.solana.com`), + ); + + // Create an airdrop function. + const airdrop = airdropFactory({ rpc, rpcSubscriptions }); + + // Create a utility that estimates a transaction message's compute consumption. + const getComputeUnitEstimate = + getComputeUnitEstimateForTransactionMessageFactory({ rpc }); + + // Create a transaction sending function. + const sendAndConfirmTransaction = sendAndConfirmTransactionFactory({ + rpc, + rpcSubscriptions, + }); + + // Create and fund an account. + const keypairSigner = await generateKeyPairSigner(); + console.log("Created an account with address", keypairSigner.address); + console.log("Requesting airdrop"); + await airdrop({ + commitment: "confirmed", + lamports: lamports(1000_000n), + recipientAddress: keypairSigner.address, + }); + console.log("Airdrop confirmed"); + + // Create a memo transaction. + console.log("Creating a memo transaction"); + const { value: latestBlockhash } = await rpc.getLatestBlockhash().send(); + const transactionMessage = pipe( + createTransactionMessage({ version: "legacy" }), + m => setTransactionMessageFeePayerSigner(keypairSigner, m), + m => setTransactionMessageLifetimeUsingBlockhash(latestBlockhash, m), + m => + appendTransactionMessageInstructions( + [ + getSetComputeUnitPriceInstruction({ microLamports: 5000n }), + getAddMemoInstruction({ memo: message }), + ], + m, + ), + ); + + // Figure out how many compute units to budget for this transaction + // so that you can right-size the compute budget to maximize the + // chance that it will be selected for inclusion into a block. + console.log("Estimating the compute consumption of the transaction"); + var estimatedComputeUnits = await getComputeUnitEstimate(transactionMessage); + // While these estimates are quite accurate they are not perfect. So you may want to add a + // buffer if you expect that the transaction may consume more compute units than estimated. + // Its not possible to exactly know what the transaction will consume when + // you send it in the future. The state may change. You can add a buffer to the estimate to account for this. + // estimatedComputeUnits += 1000; + // estimatedComputeUnits *= 1.1; + // You can read more about the issue here: https://github.com/solana-labs/solana-web3.js/tree/master/packages/library#getcomputeunitestimatefortransactionmessagefactoryrpc + + console.log( + `Transaction is estimated to consume ${estimatedComputeUnits} compute units`, + ); + const budgetedTransactionMessage = prependTransactionMessageInstructions( + [getSetComputeUnitLimitInstruction({ units: estimatedComputeUnits })], + transactionMessage, + ); + + // Sign and send the transaction. + console.log("Signing and sending the transaction"); + const signedTx = await signTransactionMessageWithSigners( + budgetedTransactionMessage, + ); + const signature = getSignatureFromTransaction(signedTx); + console.log( + "Sending transaction https://explorer.solana.com/tx/" + + signature + + "/?cluster=" + + CLUSTER, + ); + await sendAndConfirmTransaction(signedTx, { commitment: "confirmed" }); + console.log("Transaction confirmed"); +} + +writeMemoWithPriorityFees("Hello, priority fees!"); +``` + + + + + ```typescript filename="add-priority-fees.ts" {25-28, 30-33} import { BN } from "@coral-xyz/anchor"; import { @@ -85,3 +207,7 @@ import { console.log(result); })(); ``` + + + + diff --git a/content/cookbook/transactions/calculate-cost.md b/content/cookbook/transactions/calculate-cost.md index d38739147..9bf42df2f 100644 --- a/content/cookbook/transactions/calculate-cost.md +++ b/content/cookbook/transactions/calculate-cost.md @@ -9,10 +9,154 @@ description: The number of signatures a transaction requires are used to calculate the transaction cost. As long as you are not creating an account, this will be the base transaction cost. To find out more about costs to create an account, check -out -[calculating rent costs](https://solana.com/developers/cookbook/accounts/calculate-rent). +out [calculating rent costs](/content/cookbook/accounts/calculate-rent.md). -```typescript filename="calculate-cost.ts" + + + + +```typescript filename="calculate-cost.ts" {101-118} +import { + airdropFactory, + appendTransactionMessageInstructions, + compileTransactionMessage, + createSignerFromKeyPair, + createSolanaRpc, + createSolanaRpcSubscriptions, + createTransactionMessage, + devnet, + generateKeyPairSigner, + getBase64Decoder, + getCompiledTransactionMessageEncoder, + getComputeUnitEstimateForTransactionMessageFactory, + getSignatureFromTransaction, + lamports, + pipe, + prependTransactionMessageInstructions, + sendAndConfirmTransactionFactory, + setTransactionMessageFeePayerSigner, + setTransactionMessageLifetimeUsingBlockhash, + signTransactionMessageWithSigners, + type TransactionMessageBytesBase64, +} from "@solana/web3.js"; +import { + getSetComputeUnitLimitInstruction, + getSetComputeUnitPriceInstruction, +} from "@solana-program/compute-budget"; +import { getAddMemoInstruction } from "@solana-program/memo"; +import { loadDefaultKeypairWithAirdrop } from "./CreateKeypair"; + +async function calculateCost(message: string) { + // Create an RPC. + const CLUSTER = "devnet"; + const rpc = createSolanaRpc(devnet(`https://api.${CLUSTER}.solana.com`)); + const rpcSubscriptions = createSolanaRpcSubscriptions( + devnet(`wss://api.${CLUSTER}.solana.com`), + ); + + // Create a utility that estimates a transaction message's compute consumption. + const getComputeUnitEstimate = + getComputeUnitEstimateForTransactionMessageFactory({ rpc }); + + // Create a transaction sending function. + const sendAndConfirmTransaction = sendAndConfirmTransactionFactory({ + rpc, + rpcSubscriptions, + }); + + // Create an airdrop function. + const airdrop = airdropFactory({ rpc, rpcSubscriptions }); + + // Create and fund an account. + const signer = await generateKeyPairSigner(); + console.log("Created an account with address", signer.address); + console.log("Requesting airdrop"); + await airdrop({ + commitment: "confirmed", + lamports: lamports(1000_000n), + recipientAddress: signer.address, + }); + console.log("Airdrop confirmed"); + + // Create a memo transaction. + console.log("Creating a memo transaction"); + const { value: latestBlockhash } = await rpc.getLatestBlockhash().send(); + const transactionMessage = pipe( + createTransactionMessage({ version: "legacy" }), + m => setTransactionMessageFeePayerSigner(signer, m), + m => setTransactionMessageLifetimeUsingBlockhash(latestBlockhash, m), + m => + appendTransactionMessageInstructions( + [ + getSetComputeUnitPriceInstruction({ microLamports: 5000n }), + getAddMemoInstruction({ memo: message }), + ], + m, + ), + ); + + // Figure out how many compute units to budget for this transaction + // so that you can right-size the compute budget to maximize the + // chance that it will be selected for inclusion into a block. + console.log("Estimating the compute consumption of the transaction"); + const estimatedComputeUnits = + await getComputeUnitEstimate(transactionMessage); + console.log( + `Transaction is estimated to consume ${estimatedComputeUnits} compute units`, + ); + + const budgetedTransactionMessage = prependTransactionMessageInstructions( + [getSetComputeUnitLimitInstruction({ units: estimatedComputeUnits })], + transactionMessage, + ); + + const base64EncodedMessage = pipe( + // Start with the message you want the fee for. + budgetedTransactionMessage, + + // Compile it. + compileTransactionMessage, + + // Convert the compiled message into a byte array. + getCompiledTransactionMessageEncoder().encode, + + // Encode that byte array as a base64 string. + getBase64Decoder().decode, + ) as TransactionMessageBytesBase64; + + const transactionCost = await rpc + .getFeeForMessage(base64EncodedMessage) + .send(); + + console.log( + "Transaction is estimated to cost " + transactionCost.value + " lamports", + ); + + // Sign and send the transaction. + console.log("Signing and sending the transaction"); + const signedTx = await signTransactionMessageWithSigners( + budgetedTransactionMessage, + ); + const signature = getSignatureFromTransaction(signedTx); + console.log( + "Sending transaction https://explorer.solana.com/tx/" + + signature + + "/?cluster=" + + CLUSTER, + ); + await sendAndConfirmTransaction(signedTx, { commitment: "confirmed" }); + console.log("Transaction confirmed"); + // Transaction is estimated to consume 6236 compute units + // Transaction is estimated to cost 5032 lamports +} + +calculateCost("Hello, Fees!"); +``` + + + + +```typescript filename="calculate-cost.ts {108-111}" import { clusterApiUrl, Connection, @@ -66,3 +210,7 @@ import bs58 from "bs58"; // Estimated SOL transfer cost: 5000 lamports })(); ``` + + + + diff --git a/content/cookbook/transactions/offline-transactions.md b/content/cookbook/transactions/offline-transactions.md index 3d95e4ca1..793f66d2c 100644 --- a/content/cookbook/transactions/offline-transactions.md +++ b/content/cookbook/transactions/offline-transactions.md @@ -22,7 +22,7 @@ import { import * as nacl from "tweetnacl"; import * as bs58 from "bs58"; -// to complete a offline transaction, I will seperate them into four steps +// to complete a offline transaction, I will separate them into four steps // 1. Create Transaction // 2. Sign Transaction // 3. Recover Transaction @@ -65,7 +65,7 @@ import * as bs58 from "bs58"; ); let aliceSignature = nacl.sign.detached(realDataNeedToSign, alice.secretKey); - // 3. Recover Tranasction + // 3. Recover Transaction // you can verify signatures before you recovering the transaction let verifyFeePayerSignatureResult = nacl.sign.detached.verify( @@ -83,7 +83,7 @@ import * as bs58 from "bs58"; console.log(`verify alice signature: ${verifyAliceSignatureResult}`); // there are two ways you can recover the tx - // 3.a Recover Tranasction (use populate then addSignauture) + // 3.a Recover Transaction (use populate then addSignature) { let recoverTx = Transaction.populate(Message.from(realDataNeedToSign)); recoverTx.addSignature(feePayer.publicKey, Buffer.from(feePayerSignature)); @@ -97,7 +97,7 @@ import * as bs58 from "bs58"; // or - // 3.b. Recover Tranasction (use populate with signature) + // 3.b. Recover Transaction (use populate with signature) { let recoverTx = Transaction.populate(Message.from(realDataNeedToSign), [ bs58.encode(feePayerSignature), diff --git a/content/cookbook/transactions/optimize-compute.md b/content/cookbook/transactions/optimize-compute.md index 033cab552..969e890db 100644 --- a/content/cookbook/transactions/optimize-compute.md +++ b/content/cookbook/transactions/optimize-compute.md @@ -8,10 +8,10 @@ the transaction is both processed in a timely manner as well as to avoid paying too much in priority fees. For more information about requesting optimal compute, -[check out the full guide](https://solana.com/developers/guides/advanced/how-to-request-optimal-compute). +[check out the full guide](/content/guides/advanced/how-to-request-optimal-compute.md). You can also find more information about -[using priority fees](https://solana.com/developers/guides/advanced/how-to-use-priority-fees) -in this detailed guide. +[using priority fees](/content/guides/advanced/how-to-use-priority-fees.md) in +this detailed guide. ```typescript filename="optimize-compute.ts" // import { ... } from "@solana/web3.js" diff --git a/content/cookbook/transactions/send-sol.md b/content/cookbook/transactions/send-sol.md index 5cfe36ee0..c79c8ae79 100644 --- a/content/cookbook/transactions/send-sol.md +++ b/content/cookbook/transactions/send-sol.md @@ -8,6 +8,127 @@ description: To send SOL, you will need to interact with the [SystemProgram][1]. + + + + +```typescript filename="send-sol.ts" {70-74} +import { + address, + airdropFactory, + appendTransactionMessageInstructions, + createSolanaRpc, + createSolanaRpcSubscriptions, + createTransactionMessage, + devnet, + generateKeyPairSigner, + getComputeUnitEstimateForTransactionMessageFactory, + getSignatureFromTransaction, + lamports, + pipe, + prependTransactionMessageInstructions, + sendAndConfirmTransactionFactory, + setTransactionMessageFeePayerSigner, + setTransactionMessageLifetimeUsingBlockhash, + signTransactionMessageWithSigners, +} from "@solana/web3.js"; +import { + getSetComputeUnitLimitInstruction, + getSetComputeUnitPriceInstruction, +} from "@solana-program/compute-budget"; +import { getAddMemoInstruction } from "@solana-program/memo"; +import { getTransferSolInstruction } from "@solana-program/system"; + +async function transferSol() { + // Create an RPC. Use localnet for solana-test-validator. This will get you easier airdrops. + const CLUSTER = "devnet"; + const rpc = createSolanaRpc(devnet(`https://api.${CLUSTER}.solana.com`)); + const rpcSubscriptions = createSolanaRpcSubscriptions( + devnet(`wss://api.${CLUSTER}.solana.com`), + ); + + // Create an airdrop function. + const airdrop = airdropFactory({ rpc, rpcSubscriptions }); + + // Create a utility that estimates a transaction message's compute consumption. + const getComputeUnitEstimate = + getComputeUnitEstimateForTransactionMessageFactory({ rpc }); + + // Create a transaction sending function. + const sendAndConfirmTransaction = sendAndConfirmTransactionFactory({ + rpc, + rpcSubscriptions, + }); + + // Create and fund an account. + const keypairSigner = await generateKeyPairSigner(); + console.log("Created an account with address", keypairSigner.address); + console.log("Requesting airdrop"); + await airdrop({ + commitment: "confirmed", + lamports: lamports(1_000_000_000n), + recipientAddress: keypairSigner.address, + }); + console.log("Airdrop confirmed"); + + // Create a memo transaction. + console.log("Creating a memo transaction"); + const { value: latestBlockhash } = await rpc.getLatestBlockhash().send(); + const transactionMessage = pipe( + createTransactionMessage({ version: "legacy" }), + m => setTransactionMessageFeePayerSigner(keypairSigner, m), + m => setTransactionMessageLifetimeUsingBlockhash(latestBlockhash, m), + m => + appendTransactionMessageInstructions( + [ + getSetComputeUnitPriceInstruction({ microLamports: 5000n }), + getTransferSolInstruction({ + source: keypairSigner, + destination: address("web3Qm5PuFapMJqe6PWRWfRBarkeqE2ZC8Eew3zwHH2"), + amount: lamports(1_000_000n), + }), + ], + m, + ), + ); + + // Figure out how many compute units to budget for this transaction + // so that you can right-size the compute budget to maximize the + // chance that it will be selected for inclusion into a block. + console.log("Estimating the compute consumption of the transaction"); + const estimatedComputeUnits = + await getComputeUnitEstimate(transactionMessage); + console.log( + `Transaction is estimated to consume ${estimatedComputeUnits} compute units`, + ); + const budgetedTransactionMessage = prependTransactionMessageInstructions( + [getSetComputeUnitLimitInstruction({ units: estimatedComputeUnits })], + transactionMessage, + ); + + // Sign and send the transaction. + console.log("Signing and sending the transaction"); + const signedTx = await signTransactionMessageWithSigners( + budgetedTransactionMessage, + ); + const signature = getSignatureFromTransaction(signedTx); + console.log( + "Sending transaction https://explorer.solana.com/tx/" + + signature + + "/?cluster=" + + CLUSTER, + ); + await sendAndConfirmTransaction(signedTx, { commitment: "confirmed" }); + console.log("Transaction confirmed"); +} + +transferSol(); +``` + + + + + ```typescript filename="send-sol.ts" {28-38} import { Connection, @@ -50,4 +171,8 @@ import { })(); ``` + + + + [1]: https://docs.solanalabs.com/runtime/programs#system-program diff --git a/content/cookbook/wallets/check-publickey.md b/content/cookbook/wallets/check-publickey.md index a16eb4ac5..3aa440c73 100644 --- a/content/cookbook/wallets/check-publickey.md +++ b/content/cookbook/wallets/check-publickey.md @@ -11,7 +11,36 @@ have a private key associated with them. You can check this by looking to see if the public key lies on the ed25519 curve. Only public keys that lie on the curve can be controlled by users with wallets. -```javascript file="check-public-key.ts" + + + + +```typescript +import { isAddress } from "@solana/web3.js"; + +// Note that generateKeyPair() will always give a public key that is valid for users + +// Valid public key +const key = "5oNDL3swdJJF1g9DzJiZ4ynHXgszjAEpUkxVYejchzrY"; + +// Lies on the ed25519 curve and is suitable for users +console.log("Valid Address: ", isAddress(key)); + +// // Valid public key +const offCurveAddress = "4BJXYkfvg37zEmBbsacZjeQDpTNx91KppxFJxRqrz48e"; + +// // Not on the ed25519 curve, therefore not suitable for users +console.log("Valid Off Curve Address: ", isAddress(offCurveAddress)); + +// // Not a valid public key +const errorPubkey = "testPubkey"; +console.log("Invalid Address: ", isAddress(errorPubkey)); +``` + + + + +```typescript import { PublicKey } from "@solana/web3.js"; // Note that Keypair.generate() will always give a public key that is valid for users @@ -31,4 +60,8 @@ console.log(PublicKey.isOnCurve(offCurveAddress.toBytes())); // Not a valid public key const errorPubkey = new PublicKey("testPubkey"); +console.log(PublicKey.isOnCurve(errorPubkey.toBytes())); ``` + + + diff --git a/content/cookbook/wallets/connect-wallet-react.md b/content/cookbook/wallets/connect-wallet-react.md index d1d73e2d4..2ddde85c0 100644 --- a/content/cookbook/wallets/connect-wallet-react.md +++ b/content/cookbook/wallets/connect-wallet-react.md @@ -7,12 +7,19 @@ description: --- Solana's [wallet-adapter](https://github.com/anza-xyz/wallet-adapter) library -make it easy to manage wallet connections client-side. For a full length guide, +makes it easy to manage wallet connections client-side. For a full length guide, check out -[how to add wallet-adapter to nextjs](https://solana.com/developers/guides/wallets/add-solana-wallet-adapter-to-nextjs). +[Add Solana Wallet Adapter to a NextJS application](/content/guides/wallets/add-solana-wallet-adapter-to-nextjs.md). + +> For web3.js v2, please reference the +> [react example](https://github.com/solana-labs/solana-web3.js/tree/master/examples/react-app) +> from the +> [Anza Web3js v2 Blog](https://www.anza.xyz/blog/solana-web3-js-2-release). ## How to Connect to a Wallet with React +> Currently, `create-solana-dapp` only works with Solana Web3.js v1. + For quick setup with React use: ```bash @@ -28,7 +35,7 @@ npm install --save \ @solana/wallet-adapter-react \ @solana/wallet-adapter-react-ui \ @solana/wallet-adapter-wallets \ - @solana/web3.js \ + @solana/web3.js@1 \ react ``` @@ -59,18 +66,6 @@ export const Wallet: FC = () => { const wallets = useMemo( () => [ - /** - * Wallets that implement either of these standards will be available automatically. - * - * - Solana Mobile Stack Mobile Wallet Adapter Protocol - * (https://github.com/solana-mobile/mobile-wallet-adapter) - * - Solana Wallet Standard - * (https://github.com/anza-xyz/wallet-standard) - * - * If you wish to support a wallet that supports neither of those standards, - * instantiate its legacy wallet adapter here. Common legacy adapters can be found - * in the npm package `@solana/wallet-adapter-wallets`. - */ new UnsafeBurnerWalletAdapter(), ], // eslint-disable-next-line react-hooks/exhaustive-deps diff --git a/content/cookbook/wallets/create-keypair.md b/content/cookbook/wallets/create-keypair.md index 9391c2b6b..d8d6b360e 100644 --- a/content/cookbook/wallets/create-keypair.md +++ b/content/cookbook/wallets/create-keypair.md @@ -2,7 +2,7 @@ title: How to Create a Keypair sidebarSortOrder: 1 description: - "Ever transaction requires a signature from a keypair on Solana. Learn how to + "Every transaction requires a signature from a keypair on Solana. Learn how to create Keypairs on Solana." --- @@ -11,8 +11,28 @@ are [connecting to a wallet](/content/cookbook/wallets/connect-wallet-react), you do not need to worry about the keypair. Otherwise a keypair must be generated for signing transactions. + + + + +```javascript +import { generateKeyPairSigner } from "@solana/web3.js"; + +const signer = await generateKeyPairSigner(); +console.log("address: ", signer.address); +``` + + + + + ```javascript import { Keypair } from "@solana/web3.js"; const keypair = Keypair.generate(); +console.log("address:", keypair.publicKey.toBase58()); ``` + + + + diff --git a/content/cookbook/wallets/generate-vanity-address.md b/content/cookbook/wallets/generate-vanity-address.md index e6e56c93e..c8663ec22 100644 --- a/content/cookbook/wallets/generate-vanity-address.md +++ b/content/cookbook/wallets/generate-vanity-address.md @@ -16,7 +16,7 @@ key more easily identifiable. **Note**: The more characters in your vanity address, the longer it will take. You can generate a vanity address using the -[Solana CLI](https://docs.solanalabs.com/cli): +[Solana CLI](/docs/intro/installation.md): ```bash solana-keygen grind --starts-with e1v1s:1 diff --git a/content/cookbook/wallets/restore-keypair.md b/content/cookbook/wallets/restore-keypair.md index 7cb5d570f..61aaba7e8 100644 --- a/content/cookbook/wallets/restore-keypair.md +++ b/content/cookbook/wallets/restore-keypair.md @@ -9,28 +9,69 @@ secret to test out your dApp. ## From Bytes -```typescript filename="restore-keypair-from-bytes.ts" + + + + +```typescript +import { createKeyPairFromBytes } from "@solana/web3.js"; + +const keypairBytes = new Uint8Array([ + 174, 47, 154, 16, 202, 193, 206, 113, 199, 190, 53, 133, 169, 175, 31, 56, + 222, 53, 138, 189, 224, 216, 117, 173, 10, 149, 53, 45, 73, 251, 237, 246, 15, + 185, 186, 82, 177, 240, 148, 69, 241, 227, 167, 80, 141, 89, 240, 121, 121, + 35, 172, 247, 68, 251, 226, 218, 48, 63, 176, 109, 168, 89, 238, 135, +]); + +const keypair = await createKeyPairFromBytes(keypairBytes); +``` + + + + +```typescript import { Keypair } from "@solana/web3.js"; -const keypair = Keypair.fromSecretKey( - Uint8Array.from([ - 174, 47, 154, 16, 202, 193, 206, 113, 199, 190, 53, 133, 169, 175, 31, 56, - 222, 53, 138, 189, 224, 216, 117, 173, 10, 149, 53, 45, 73, 251, 237, 246, - 15, 185, 186, 82, 177, 240, 148, 69, 241, 227, 167, 80, 141, 89, 240, 121, - 121, 35, 172, 247, 68, 251, 226, 218, 48, 63, 176, 109, 168, 89, 238, 135, - ]), -); +const keypairBytes = Uint8Array.from([ + 174, 47, 154, 16, 202, 193, 206, 113, 199, 190, 53, 133, 169, 175, 31, 56, + 222, 53, 138, 189, 224, 216, 117, 173, 10, 149, 53, 45, 73, 251, 237, 246, 15, + 185, 186, 82, 177, 240, 148, 69, 241, 227, 167, 80, 141, 89, 240, 121, 121, + 35, 172, 247, 68, 251, 226, 218, 48, 63, 176, 109, 168, 89, 238, 135, +]); + +const keypair = Keypair.fromSecretKey(keypairBytes); ``` -## From base58 String + + -```typescript filename="restore-keypair-from-base58.ts +## From Base58 String + + + + + +```typescript +import { createKeyPairFromBytes, getBase58Codec } from "@solana/web3.js"; + +const keypairBase58 = + "5MaiiCavjCmn9Hs1o3eznqDEhRwxo7pXiAYez7keQUviUkauRiTMD8DrESdrNjN8zd9mTmVhRvBJeg5vhyvgrAhG"; +const keypairBytes = getBase58Codec().decode(keypairBase58); +const keypair = await createKeyPairFromBytes(keypairBytes); +``` + + + + +```typescript import { Keypair } from "@solana/web3.js"; import * as bs58 from "bs58"; -const keypair = Keypair.fromSecretKey( - bs58.decode( - "5MaiiCavjCmn9Hs1o3eznqDEhRwxo7pXiAYez7keQUviUkauRiTMD8DrESdrNjN8zd9mTmVhRvBJeg5vhyvgrAhG", - ), -); +const keypairBase58 = + "5MaiiCavjCmn9Hs1o3eznqDEhRwxo7pXiAYez7keQUviUkauRiTMD8DrESdrNjN8zd9mTmVhRvBJeg5vhyvgrAhG"; +const keypairBytes = bs58.decode(keypairBase58); +const keypair = Keypair.fromSecretKey(keypairBytes); ``` + + + diff --git a/content/cookbook/wallets/sign-message.md b/content/cookbook/wallets/sign-message.md index d491d7a97..fe6488b23 100644 --- a/content/cookbook/wallets/sign-message.md +++ b/content/cookbook/wallets/sign-message.md @@ -4,14 +4,40 @@ sidebarSortOrder: 6 description: "Learn how to sign messages on Solana." --- -The primary function of a keypair is to sign messages and enable verification of -the signature. Verification of a signature allows the recipient to be sure that -the data was signed by the owner of a specific private key. +The primary function of a keypair is to sign messages, transactions and enable +verification of the signature. Verification of a signature allows the recipient +to be sure that the data was signed by the owner of a specific private key. -To do so, we can use the [TweetNaCl](https://www.npmjs.com/package/tweetnacl) -crypto library: + -```typescript filename="sign-message.ts" +```typescript +import { + generateKeyPair, + signBytes, + verifySignature, + getUtf8Encoder, + getBase58Decoder, +} from "@solana/web3.js"; + +const keys = await generateKeyPair(); +const message = getUtf8Encoder().encode("Hello, World!"); +const signedBytes = await signBytes(keys.privateKey, message); + +const decoded = getBase58Decoder().decode(signedBytes); +console.log("Signature:", decoded); + +const verified = await verifySignature(keys.publicKey, signedBytes, message); +console.log("Verified:", verified); +``` + + + + + +In Solana Web3.js v1, we can use the +[TweetNaCl](https://www.npmjs.com/package/tweetnacl) crypto library: + +```typescript import { Keypair } from "@solana/web3.js"; import nacl from "tweetnacl"; import { decodeUTF8 } from "tweetnacl-util"; @@ -37,3 +63,6 @@ const result = nacl.sign.detached.verify( console.log(result); ``` + + + diff --git a/content/courses/connecting-to-offchain-data/metadata.yml b/content/courses/connecting-to-offchain-data/metadata.yml index 2400160a0..90ac8ed84 100644 --- a/content/courses/connecting-to-offchain-data/metadata.yml +++ b/content/courses/connecting-to-offchain-data/metadata.yml @@ -5,6 +5,3 @@ lessons: - oracles - verifiable-randomness-functions priority: 20 -# Uses out of date repos -# TODO: Superteam to update -isHidden: true diff --git a/content/courses/connecting-to-offchain-data/oracles.md b/content/courses/connecting-to-offchain-data/oracles.md index fa638f4e2..80d79798f 100644 --- a/content/courses/connecting-to-offchain-data/oracles.md +++ b/content/courses/connecting-to-offchain-data/oracles.md @@ -12,22 +12,24 @@ description: Access real-world data inside a Solana program. ## Summary -- Oracles are services that provide external data to a blockchain network -- There are many - [Oracle providers on Solana](https://solana.com/ecosystem/explore?categories=oracle). -- You can build your own Oracle to create a custom data feed -- You have to be careful when choosing your data feed providers +- Oracles are services that provide external data to a blockchain network. +- Solana has a rich ecosystem of oracle providers. Some notable oracle providers + include [Pyth Network](https://pyth.network), + [Switchboard](https://switchboard.xyz), [Chainlink](https://chain.link), and + [DIA](https://www.diadata.org/solana-price-oracles/). +- You can build your own oracle to create a custom data feed. +- When choosing oracle providers, consider reliability, accuracy, + decentralization, update frequency, and cost. Be aware of security risks: + oracles can be potential points of failure or attack. For critical data, use + reputable providers and consider multiple independent oracles to mitigate + risks. ## Lesson -[Oracles](https://solana.com/ecosystem/explore?categories=oracle) are services -that provide external data to a blockchain network. Blockchains by nature are -siloed environments that do not know the outside world. This constraint -inherently puts a limit on the use cases for decentralized applications (dApps). -Oracles provide a solution to this limitation by creating a decentralized way to -get real-world data onchain. - -Oracles can provide just about any type of data onchain. Examples include: +Oracles are services that provide external data to a blockchain network. +Blockchains are siloed environments that do not inherently know the outside +world. Oracles solve this limitation by offering a decentralized way to get +various types of data onchain, such as: - Results of sporting events - Weather data @@ -35,232 +37,229 @@ Oracles can provide just about any type of data onchain. Examples include: - Market data - Randomness -While the exact implementation may differ from blockchain to blockchain, -generally Oracles work as follows: +While the implementation may differ across blockchains, oracles generally work +as follows: -1. Data is sourced off-chain. -2. That data is published onchain via a transaction, and stored in an account. -3. Programs can read the data stored in the account and use that data in the - program's logic. +1. Data is sourced offchain. +2. The data is published onchain via a transaction and stored in an account. +3. Programs can read the data stored in the account and use it in the program's + logic. -This lesson will go over the basics of how oracles work, the state of oracles on +This lesson will cover the basics of how oracles work, the state of oracles on Solana, and how to effectively use oracles in your Solana development. ### Trust and Oracle Networks -The primary hurdle oracles need to overcome is one of trust. Since blockchains -execute irreversible financial transactions, developers and users alike need to -know they can trust the validity and accuracy of oracle data. The first step in -trusting an oracle is understanding how it's implemented. - -Broadly speaking, there are three implementation types: - -1. Single, centralized oracle publishes data onchain. - 1. Pro: It’s simple; there's one source of truth. - 2. Con: nothing is stopping the oracle provider from providing inaccurate - data. -2. Network of oracles publish data and a consensus mechanism is used to - determine the final result. - 1. Pro: Consensus makes it less likely that bad data is pushed onchain. - 2. Con: There is no way to disincentivize bad actors from publishing bad data - and trying to sway the consensus. -3. Oracle network with some kind of proof of stake mechanism. I.e. require - oracles to stake tokens to participate in the consensus mechanism. On every - response, if an oracle deviates by some threshold from the accepted range of - results, their stake is taken by the protocol and they can no longer report. - 1. Pro: Ensures no single oracle can influence the final result too - drastically, while also incentivizing honest and accurate actions. - 2. Con: Building decentralized networks is challenging, incentives need to be - set up properly and be sufficient to get participation, etc. - -Depending on the use case of an oracle, any of the above solutions could be the -right approach. For example, you might be perfectly willing to participate in a -blockchain-based game that utilizes centralized oracles to publish gameplay -information onchain. - -On the other hand, you may be less willing to trust a centralized oracle -providing price information for trading applications. - -You may end up creating many standalone oracles for your own applications simply -as a way to get access to off-chain information that you need. However, those -oracles are unlikely to be used by the broader community where decentralization -is a core tenet. You should also be hesitant to use centralized, third-party -oracles yourself. - -In a perfect world, all important and/or valuable data would be provided onchain -through a highly efficient oracle network through a trustworthy proof of stake -consensus mechanism. By introducing a staking mechanism, it’s in the oracle -providers' best interest to ensure their data is accurate to keep their staked -funds. - -Even when an oracle network claims to have such a consensus mechanism, be sure -to know the risks involved with using the network. If the total value involved -of the downstream applications is greater than the oracle's allocated stake, -oracles still may have sufficient incentive to collude. - -It is your job to know how the oracle network is configured and judge if it can -be trusted. Generally, Oracles should only be used for non-mission-critical -functions and worst-case scenarios should be accounted for. +The primary challenge for oracles is trust. Since blockchains execute +irreversible financial transactions, developers and users need to trust the +validity and accuracy of oracle data. The first step in trusting an oracle is +understanding its implementation. + +Broadly speaking, there are three types of implementations: + +1. **Single, centralized oracle publishes data onchain.** + - **Pro:** It's simple; there's one source of truth. + - **Con:** Nothing prevents the oracle provider from supplying inaccurate + data. +2. **Network of oracles publishes data, with consensus determining the final + result.** + + - **Pro:** Consensus reduces the likelihood of bad data being pushed onchain. + - **Con:** There's no direct disincentive for bad actors to publish incorrect + data to sway consensus. + +3. **Oracle network with proof-of-stake mechanism:** Oracles are required to + stake tokens to participate. If an oracle's response deviates too far from + the consensus, its stake is taken by the protocol and it can no longer + report. + - **Pro:** This approach prevents any single oracle from overly influencing + the final result while incentivizing honest and accurate reporting. + - **Con:** Building decentralized networks is challenging; proper incentives + and sufficient participation are necessary for success. + +Each implementation has its place depending on the oracle's use case. For +example, using centralized oracles for a blockchain-based game may be +acceptable. However, you may be less comfortable with a centralized oracle +providing price data for trading applications. + +You may create standalone oracles for your own applications to access offchain +data. However, these are unlikely to be used by the broader community, where +decentralization is a core principle. Be cautious about using centralized +third-party oracles as well. + +In an ideal scenario, all important or valuable data would be provided onchain +via a highly efficient oracle network with a trustworthy proof-of-stake +consensus mechanism. A staking system incentivizes oracle providers to ensure +the accuracy of their data to protect their staked funds. + +Even when an oracle network claims to have a consensus mechanism, be aware of +the risks. If the total value at stake in downstream applications exceeds the +staked amount of the oracle network, there may still be sufficient incentive for +collusion among oracles. + +As a developer, it is your responsibility to understand how an oracle network is +configured and assess whether it can be trusted. Generally, oracles should only +be used for non-mission-critical functions, and worst-case scenarios should +always be accounted for. ### Oracles on Solana -There are many -[Oracle providers on Solana](https://solana.com/ecosystem/explore?categories=oracle). -Two of the most well known are [Pyth](https://pyth.network) and -[Switchboard](https://switchboard.xyz). They’re each unique and follow slightly -different design choices. - -**Pyth** is primarily focused on financial data published from top-tier -financial institutions. Pyth’s data providers publish the market data updates. -These updates are then aggregated and published onchain by the Pyth program. The -data sourced from Pyth is not completely decentralized as only approved data -providers can publish data. The selling point of Pyth is that its data is vetted -directly by the platform and sourced from financial institutions, ensuring -higher quality. - -**Switchboard** is a completely decentralized oracle network and has data of all -kinds available. Check out all of the feeds -[on their website](https://app.switchboard.xyz/solana/devnet/explore) -Additionally, anyone can run a Switchboard oracle and anyone can consume their -data. This means you'll have to be diligent about researching feeds. We'll talk -more about what to look for later in the lesson. - -Switchboard follows a variation of the stake weighted oracle network described -in the third option of the previous section. It does so by introducing what are -called TEEs (Trusted Execution Environments). TEEs are secure environments -isolated from the rest of the system where sensitive code can be executed. In -simple terms, given a program and an input, TEEs can execute and generate an -output along with a proof. If you’d like to learn more about TEEs, please read -[Switchboard’s documentation](https://docs.switchboard.xyz/functions). - -By introducing TEEs on top of stake weighted oracles, Switchboard is able to -verify each oracle’s software to allow participation in the network. If an -oracle operator acts maliciously and attempts to change the operation of the -approved code, a data quote verification will fail. This allows Switchboard -oracles to operate beyond quantitative value reporting, such as functions -- -running off-chain custom and confidential computations. +Solana has a diverse ecosystem of oracle providers, each with unique offerings. +Some notable ones include: + +- [**Pyth**](https://www.pyth.network/price-feeds) + Focuses primarily on financial data published by top-tier financial + institutions. Pyth's data providers are approved entities that publish market + data updates, which are then aggregated and made available onchain via the + Pyth program. This data is not fully decentralized since only approved + providers can publish it. However, the key advantage is that Pyth offers + high-quality, vetted data directly sourced from these institutions. +- [**Switchboard**](https://switchboard.xyz) + Completely decentralized oracle network with a variety of data feeds. You can + explore these feeds on + [Switchboard website](https://app.switchboard.xyz/solana/mainnet). Anyone can + run a Switchboard oracle or consume its data, but that means users need to be + diligent in researching the quality of the feeds they use. +- [**Chainlink**](https://chain.link) + Decentralized oracle network providing secure offchain computations and + real-world data across multiple blockchains. +- [**DIA**](https://www.diadata.org/solana-price-oracles/) + Open-source oracle platform delivering transparent and verified data for + digital assets and traditional financial instruments. + +In this lesson, we'll be using **Switchboard**. However, the concepts are +applicable to most oracles, so you should select the oracle provider that best +fits your needs. + +Switchboard follows a stake-weighted oracle network model, as discussed in the +previous section, but with an additional layer of security via +[**Trusted Execution Environments (TEEs)**](https://en.wikipedia.org/wiki/Trusted_execution_environment). +TEEs are secure environments isolated from the rest of the system where +sensitive code can be executed. In simple terms, TEEs can take a program and an +input, execute the program, and produce an output along with a proof. To learn +more about TEEs, check out +[Switchboard's Architecture Design documentation](https://docs.switchboard.xyz/docs/switchboard/readme/architecture-design#trusted-execution-environments-for-layered-security). + +By incorporating TEEs, Switchboard is able to verify each oracle's software, +ensuring its integrity within the network. If an oracle operator acts +maliciously or alters the approved code, the data quote verification process +will fail. This allows Switchboard to support more than just data reporting; it +can also run offchain custom and confidential computations. ### Switchboard Oracles -Switchboard oracles store data on Solana using data feeds. These data feeds, -also called aggregators, are each a collection of jobs that get aggregated to -produce a single result. These aggregators are represented onchain as a regular -Solana account managed by the Switchboard program. When an oracle updates, it -writes the data directly to these accounts. Let's go over a few terms to -understand how Switchboard works: - -- **[Aggregator (Data Feed)](https://github.com/switchboard-xyz/sbv2-solana/blob/0b5e0911a1851f9ca37042e6ff88db4cd840067b/rust/switchboard-solana/src/oracle_program/accounts/aggregator.rs#L60)** - - Contains the data feed configuration, dictating how data feed updates get - requested, updated, and resolved onchain from its assigned source. The - Aggregator is the account owned by the Switchboard Solana program and is where - the data is published onchain. -- **[Job](https://github.com/switchboard-xyz/sbv2-solana/blob/0b5e0911a1851f9ca37042e6ff88db4cd840067b/rust/switchboard-solana/src/oracle_program/accounts/job.rs)** - - Each data source should correspond to a job account. The job account is a - collection of Switchboard tasks used to instruct the oracles on how to fetch - and transform data. In other words, it stores the blueprints for how data is - fetched off-chain for a particular data source. -- **Oracle** - A separate program that sits between the internet and the - blockchain and facilitates the flow of information. An oracle reads a feed’s - job definitions, calculates the result, and submits its response onchain. -- **Oracle Queue** - A group of oracles that get assigned to update requests in - a round-robin fashion. The oracles in the queue must be actively heartbeating - onchain to provide updates. Data and configurations for this queue are stored - onchain in an - [account owned by the Switchboard program](https://github.com/switchboard-xyz/solana-sdk/blob/9dc3df8a5abe261e23d46d14f9e80a7032bb346c/javascript/solana.js/src/generated/oracle-program/accounts/OracleQueueAccountData.ts#L8). -- **Oracle Consensus** - Determines how oracles come to agreement on the - accepted onchain result. Switchboard oracles use the median oracle response as - the accepted result. A feed authority can control how many oracles are - requested and how many must respond to influence its security. - -Switchboard oracles are incentivized to update data feeds because they are -rewarded for doing so accurately. Each data feed has a `LeaseContract` account. -The lease contract is a pre-funded escrow account to reward oracles for -fulfilling update requests. Only the predefined `leaseAuthority` can withdraw -funds from the contract, but anyone can contribute to it. When a new round of -updates is requested for a data feed, the user who requested the update is -rewarded from the escrow. This is to incentivize users and crank turners (anyone -who runs software to systematically send update requests to Oracles) to keep -feeds updating based on a feed’s configurations. Once an update request has been -successfully fulfilled and submitted onchain by the oracles in the queue, the -oracles are transferred rewards from the escrow as well. These payments ensure -active participation. - -Additionally, oracles have to stake tokens before they can service update -requests and submit responses onchain. If an oracle submits a result onchain -that falls outside the queue’s configured parameters, their stake will be -slashed (if the queue has `slashingEnabled`). This helps ensure that oracles are -responding in good faith with accurate information. - -Now that you understand the terminology and economics, let’s take a look at how -data is published onchain: - -1. Oracle queue setup - When an update is requested from a queue, the next `N` - oracles are assigned to the update request and cycled to the back of the - queue. Each oracle queue in the Switchboard network is independent and - maintains its own configuration. The configuration influences its level of - security. This design choice enables users to tailor the oracle queue's - behavior to match their specific use case. An Oracle queue is stored onchain - as an account and contains metadata about the queue. A queue is created by - invoking the - [oracleQueueInit instruction](https://github.com/switchboard-xyz/solana-sdk/blob/9dc3df8a5abe261e23d46d14f9e80a7032bb346c/javascript/solana.js/src/generated/oracle-program/instructions/oracleQueueInit.ts#L13) - on the Switchboard Solana program. - 1. Some relevant Oracle Queue configurations: - 1. `oracle_timeout` - Interval when stale oracles will be removed if they - fail to heartbeat. - 2. `reward` - Rewards to provide oracles and round openers on this queue. - 3. `min_stake` - The minimum amount of stake that oracles must provide to - remain on the queue. - 4. `size` - The current number of oracles on a queue. - 5. `max_size` - The maximum number of oracles a queue can support. -2. Aggregator/data feed setup - The aggregator/feed account gets created. A feed - belongs to a single oracle queue. The feed’s configuration dictates how - update requests are invoked and routed through the network. -3. Job account setup - In addition to the feed, a job account for each data - source must be set up. This defines how oracles can fulfill the feed’s update - requests. This includes defining where the oracles should fetch the data the - feed is requesting. -4. Request assignment - Once an update has been requested with the feed account, - the oracle queue assigns the request to different oracles/nodes in the queue - to fulfill. The oracles will fetch the data from the data source defined in - each of the feed’s job accounts. Each job account has a weight associated - with it. The oracle will calculate the weighted median of the results from - across all the jobs. -5. After `minOracleResults` responses are received, the onchain program - calculates the result using the median of the oracle responses. Oracles who - respond within the queue’s configured parameters are rewarded, while the - oracles who respond outside this threshold are slashed (if the queue has - `slashingEnabled`). -6. The updated result is stored in the data feed account so it can be - read/consumed onchain. - -#### How to use Switchboard Oracles - -To use Switchboard oracles and incorporate off-chain data into a Solana program, -you first have to find a feed that provides the data you need. Switchboard feeds -are public and there are many -[already available that you can choose from](https://app.switchboard.xyz/solana/devnet/explore). -When looking for a feed, you have to decide how accurate/reliable you want the -feed, where you want to source the data from, as well as the feed’s update -cadence. When consuming a publicly available feed, you have no control over -these things, so choose carefully! - -For example, there is a Switchboard-sponsored -[BTC_USD feed](https://app.switchboard.xyz/solana/devnet/feed/8SXvChNYFhRq4EZuZvnhjrB3jJRQCv4k3P4W6hesH3Ee). -This feed is available on Solana devnet/mainnet with pubkey -`8SXvChNYFhRq4EZuZvnhjrB3jJRQCv4k3P4W6hesH3Ee`. It provides the current price of -Bitcoin in USD onchain. - -The actual onchain data for a Switchboard feed account looks a little like this: +Switchboard oracles store data on Solana using data feeds, also called +**aggregators**. These data feeds consist of multiple jobs that are aggregated +to produce a single result. Aggregators are represented onchain as regular +Solana accounts managed by the Switchboard program, with updates written +directly to these accounts. Let's review some key terms to understand how +Switchboard operates: + +- **[Aggregator (Data Feed)](https://github.com/switchboard-xyz/solana-sdk/blob/main/rust/switchboard-solana/src/oracle_program/accounts/aggregator.rs)** - + Contains the data feed configuration, including how updates are requested, + processed, and resolved onchain. The aggregator account, owned by the + Switchboard program stores the final data onchain. +- **[Job](https://github.com/switchboard-xyz/solana-sdk/blob/main/rust/switchboard-solana/src/oracle_program/accounts/job.rs)** - + Each data source corresponds to a job account, which defines the tasks for + fetching and transforming offchain data. It acts as the blueprint for how data + is retrieved for a particular source. +- **[Oracle](https://github.com/switchboard-xyz/solana-sdk/blob/main/rust/switchboard-solana/src/oracle_program/accounts/oracle.rs)** - + An oracle acts as the intermediary between the internet and the blockchain. It + reads job definitions from the feed, calculates results, and submits them + onchain. +- **Oracle Queue** - A pool of oracles that are assigned update requests in a + round-robin fashion. Oracles in the queue must continuously heartbeat onchain + to provide updates. The queue's data and configuration are stored in an + [onchain account](https://github.com/switchboard-xyz/solana-sdk/blob/main/javascript/solana.js/src/generated/oracle-program/accounts/OracleQueueAccountData.ts) + managed by the Switchboard program. +- **Oracle Consensus** - Oracles come to a consensus by using the median of the + responses as the accepted onchain result. The feed authority controls how many + oracles are required to respond for added security. + +Switchboard incentivizes oracles to update data feeds through a reward system. +Each data feed has a `LeaseContract` account, which is a pre-funded escrow that +rewards oracles for fulfilling update requests. The `leaseAuthority` can +withdraw funds, but anyone can contribute to the contract. When a user requests +a feed update, the escrow rewards both the user and the crank turners (those who +run software to systematically send update requests). Once oracles submit +results onchain, they are paid from this escrow. + +Oracles must also stake tokens to participate in updates. If an oracle submits a +result outside the queue's configured parameters, they can have their stake +slashed, provided the queue has `slashingEnabled`. This mechanism ensures that +oracles act in good faith by providing accurate data. + +#### How Data is Published Onchain + +1. **Oracle Queue Setup** - When an update request is made, the next `N` oracles + are assigned from the queue and moved to the back after completion. Each + queue has its own configuration that dictates security and behavior, tailored + to specific use cases. Queues are stored onchain as accounts and can be + created via the + [`oracleQueueInit` instruction](https://github.com/switchboard-xyz/solana-sdk/blob/main/javascript/solana.js/src/generated/oracle-program/instructions/oracleQueueInit.ts). + - Key + [Oracle Queue configurations](https://docs.rs/switchboard-solana/latest/switchboard_solana/oracle_program/accounts/queue/struct.OracleQueueAccountData.html): + - `oracle_timeout`: Removes stale oracles after a heartbeat timeout. + - `reward`: Defines rewards for oracles and round openers. + - `min_stake`: The minimum stake required for an oracle to participate. + - `size`: The current number of oracles in the queue. + - `max_size`: The maximum number of oracles a queue can support. +2. **[Aggregator/data feed setup](https://docs.rs/switchboard-solana/latest/switchboard_solana/oracle_program/accounts/aggregator/struct.AggregatorAccountData.html)** - + Each feed is linked to a single oracle queue and contains configuration + details on how updates are requested and processed. +3. **[Job Account Setup](https://docs.rs/switchboard-solana/latest/switchboard_solana/oracle_program/accounts/job/struct.JobAccountData.html)** - + Each data source requires a job account that defines how oracles retrieve and + fulfill the feed's update requests. These job accounts also specify where + data is sourced. +4. **Request Assignment** - When an update is requested, the oracle queue + assigns the task to different oracles in the queue. Each oracle processes + data from the sources defined in the feed's job accounts, calculating a + weighted median result based on the data. + +5. **Consensus and Result Calculation** - After the required number of oracle + responses + ([`minOracleResults`](https://docs.rs/switchboard-solana/latest/switchboard_solana/oracle_program/accounts/aggregator/struct.AggregatorAccountData.html#structfield.min_oracle_results)) + is received, the result is calculated as the median of the responses. Oracles + that submit responses within the set parameters are rewarded, while those + outside the threshold are penalized (if `slashingEnabled` is active). +6. **Data Storage** - The final result is stored in the aggregator account, + where it can be accessed onchain for consumption by other programs. + +#### How to Use Switchboard Oracles + +To incorporate offchain data into a Solana program using Switchboard oracles, +the first step is to find a data feed that suits your needs. Switchboard offers +many [publicly available feeds](https://app.switchboard.xyz/solana/mainnet) for +various data types. When selecting a feed, you should consider the following +factors: + +- **Accuracy/Reliability**: Evaluate how precise the data needs to be for your + application. +- **Data Source**: Choose a feed based on where the data is sourced from. +- **Update Cadence**: Understand how frequently the feed is updated to ensure it + meets your use case. + +When consuming public feeds, you won't have control over these aspects, so it's +important to choose carefully based on your requirements. + +For example, Switchboard offers a +[BTC/USD feed](https://app.switchboard.xyz/solana/mainnet/feed/8SXvChNYFhRq4EZuZvnhjrB3jJRQCv4k3P4W6hesH3Ee), +which provides the current Bitcoin price in USD. This feed is available on both +Solana devnet and mainnet with the following public key: +`8SXvChNYFhRq4EZuZvnhjrB3jJRQCv4k3P4W6hesH3Ee`. + +Here's a snapshot of what the onchain data for a Switchboard feed account looks +like: ```rust -// from the switchboard solana program -// https://github.com/switchboard-xyz/sbv2-solana/blob/0b5e0911a1851f9ca37042e6ff88db4cd840067b/rust/switchboard-solana/src/oracle_program/accounts/aggregator.rs#L60 +// From the switchboard solana program +// https://github.com/switchboard-xyz/solana-sdk/blob/main/rust/switchboard-solana/src/oracle_program/accounts/aggregator.rs#L60 pub struct AggregatorAccountData { /// Name of the aggregator to store onchain. pub name: [u8; 32], ... - ... + ... /// Pubkey of the queue the aggregator belongs to. pub queue_pubkey: Pubkey, ... @@ -274,19 +273,19 @@ pub struct AggregatorAccountData { /// Change percentage required between a previous round and the current round. If variance percentage is not met, reject new oracle responses. pub variance_threshold: SwitchboardDecimal, ... - /// Latest confirmed update request result that has been accepted as valid. This is where you will find the data you are requesting in latest_confirmed_round.result - pub latest_confirmed_round: AggregatorRound, - ... + /// Latest confirmed update request result that has been accepted as valid. This is where you will find the data you are requesting in latest_confirmed_round.result + pub latest_confirmed_round: AggregatorRound, + ... /// The previous confirmed round result. pub previous_confirmed_round_result: SwitchboardDecimal, /// The slot when the previous confirmed round was opened. pub previous_confirmed_round_slot: u64, - ... + ... } ``` You can view the full code for this data structure in the -[Switchboard program here](https://github.com/switchboard-xyz/sbv2-solana/blob/0b5e0911a1851f9ca37042e6ff88db4cd840067b/rust/switchboard-solana/src/oracle_program/accounts/aggregator.rs#L60). +[Switchboard program here](https://github.com/switchboard-xyz/solana-sdk/blob/main/rust/switchboard-solana/src/oracle_program/accounts/aggregator.rs#L60). Some relevant fields and configurations on the `AggregatorAccountData` type are: @@ -303,85 +302,82 @@ Some relevant fields and configurations on the `AggregatorAccountData` type are: - `min_update_delay_seconds` - Minimum number of seconds required between aggregator rounds. -The first three configs listed above are directly related to the accuracy and -reliability of a data feed. - -The `min_job_results` field represents the minimum amount of successful -responses from data sources an oracle must receive before it can submit its -response onchain. Meaning if `min_job_results` is three, each oracle has to pull -from three job sources. The higher this number, the more reliable and accurate -the data on the feed will be. This also limits the impact that a single data -source can have on the result. - -The `min_oracle_results` field is the minimum amount of oracle responses -required for a round to be successful. Remember, each oracle in a queue pulls -data from each source defined as a job. The oracle then takes the weighted -median of the responses from the sources and submits that median onchain. The -program then waits for `min_oracle_results` of weighted medians and takes the -median of that, which is the final result stored in the data feed account. - -The `min_update_delay_seconds` field is directly related to a feed’s update -cadence. `min_update_delay_seconds` must have passed between one round of -updates and the next one before the Switchboard program will accept results. - -It can help to look at the jobs tab of a feed in Switchboard's explorer. For -example, you can look at the +The first three configurations listed above directly impact the accuracy and +reliability of a data feed: + +- The `min_job_results` field represents the minimum number of successful + responses an oracle must receive from data sources before it can submit its + response onchain. For example, if `min_job_results` is set to three, each + oracle must pull data from at least three job sources. The higher this number, + the more reliable and accurate the data will be, reducing the influence of any + single data source. + +- The `min_oracle_results` field is the minimum number of oracle responses + required for a round to be successful. Each oracle in a queue pulls data from + each source defined as a job, takes the weighted median of those responses, + and submits that median onchain. The program then waits for + `min_oracle_results` of these weighted medians and calculates the median of + those, which is the final result stored in the data feed account. + +- The `min_update_delay_seconds` field is related to the feed's update cadence. + This value must have passed between rounds of updates before the Switchboard + program will accept results. + +It can help to view the jobs tab for a feed in Switchboard's explorer. For +example, check out the [BTC_USD feed in the explorer](https://app.switchboard.xyz/solana/devnet/feed/8SXvChNYFhRq4EZuZvnhjrB3jJRQCv4k3P4W6hesH3Ee). -Each job listed defines the source the oracles will fetch data from and the -weighting of each source. You can view the actual API endpoints that provide the -data for this specific feed. When determining what data feed to use in your -program, things like this are very important to consider. +Each job defines the data sources the oracles fetch from and the weight assigned +to each source. You can view the actual API endpoints that provide the data for +this feed. When selecting a feed for your program, these considerations are key. -Below is a two of the jobs related to the BTC_USD feed. It shows two sources of -data: [MEXC](https://www.mexc.com/) and [Coinbase](https://www.coinbase.com/). +Below are two of the jobs related to the BTC_USD feed, showing data from +[MEXC](https://www.mexc.com/) and [Coinbase](https://www.coinbase.com/). ![Oracle Jobs](/public/assets/courses/unboxed/oracle-jobs.png) -Once you’ve chosen a feed to use, you can start reading the data in that feed. -You do this by simply deserializing and reading the state stored in the account. -The easiest way to do that is by making use of the `AggregatorAccountData` -struct we defined above from the `switchboard_v2` crate in your program. +Once you've chosen a feed, you can start reading the data from that feed by +deserializing and reading the state stored in the account. The easiest way to do +this is by using the `AggregatorAccountData` struct from the +`switchboard_solana` crate in your program. ```rust -// import anchor and switchboard crates -use { - anchor_lang::prelude::*, - switchboard_v2::AggregatorAccountData, -}; +// Import anchor and switchboard crates +use {anchor_lang::prelude::*, switchboard_solana::AggregatorAccountData}; ... #[derive(Accounts)] pub struct ConsumeDataAccounts<'info> { - // pass in data feed account and deserialize to AggregatorAccountData - pub feed_aggregator: AccountLoader<'info, AggregatorAccountData>, - ... + // Pass in data feed account and deserialize to AggregatorAccountData + pub feed_aggregator: AccountLoader<'info, AggregatorAccountData>, + ... } ``` -Notice that we use the `AccountLoader` type here instead of the normal `Account` -type to deserialize the aggregator account. Due to the size of -`AggregatorAccountData`, the account uses what's called zero copy. This in -combination with `AccountLoader` prevents the account from being loaded into -memory and gives our program direct access to the data instead. When using -`AccountLoader` we can access the data stored in the account in one of three -ways: +Using zero-copy deserialization with `AccountLoader` allows the program to +access specific data within large accounts like `AggregatorAccountData` without +loading the entire account into memory. This improves memory efficiency and +performance by only accessing the necessary parts of the account. It avoids +deserializing the whole account, saving both time and resources. This is +especially useful for large account structures. + +When using `AccountLoader`, you can access the data in three ways: -- `load_init` after initializing an account (this will ignore the missing - account discriminator that gets added only after the user’s instruction code) -- `load` when the account is not mutable -- `load_mut` when the account is mutable +- `load_init`: Used after initializing an account (this ignores the missing + account discriminator that gets added only after the user's instruction code) +- `load`: Used when the account is immutable +- `load_mut`: Used when the account is mutable -If you’d like to learn more, check out the -[Advance Program Architecture lesson](/developers/courses/program-optimization/program-architecture) -where we touch on `Zero-Copy` and `AccountLoader`. +To dive deeper, check out the +[Advanced Program Architecture lesson](/content/courses/program-optimization/program-architecture.md), +where we discuss `Zero-Copy` and `AccountLoader` in more detail. -With the aggregator account passed into your program, you can use it to get the -latest oracle result. Specifically, you can use the type's `get_result()` -method: +With the aggregator account passed into your program, you can use it to retrieve +the latest oracle result. Specifically, you can use the `get_result()` method on +the aggregator type: ```rust -// inside an Anchor program +// Inside an Anchor program ... let feed = &ctx.accounts.feed_aggregator.load()?; @@ -394,8 +390,8 @@ than fetching the data with `latest_confirmed_round.result` because Switchboard has implemented some nifty safety checks. ```rust -// from switchboard program -// https://github.com/switchboard-xyz/sbv2-solana/blob/0b5e0911a1851f9ca37042e6ff88db4cd840067b/rust/switchboard-solana/src/oracle_program/accounts/aggregator.rs#L195 +// From switchboard program +// https://github.com/switchboard-xyz/solana-sdk/blob/main/rust/switchboard-solana/src/oracle_program/accounts/aggregator.rs#L206 pub fn get_result(&self) -> anchor_lang::Result { if self.resolution_mode == AggregatorResolutionMode::ModeSlidingResolution { @@ -414,27 +410,35 @@ You can also view the current value stored in an `AggregatorAccountData` account client-side in Typescript. ```typescript -import { AggregatorAccount, SwitchboardProgram} from '@switchboard-xyz/solana.js' - +import { AggregatorAccount, SwitchboardProgram } from "@switchboard-xyz/solana.js"; +import { PublicKey, SystemProgram, Connection } from "@solana/web3.js"; +import { Big } from "@switchboard-xyz/common"; ... ... -// create keypair for test user -let user = new anchor.web3.Keypair() -// fetch switchboard devnet program object +const DEVNET_RPC_URL = "https://api.devnet.solana.com"; +const SOL_USD_SWITCHBOARD_FEED = new PublicKey( + "GvDMxPzN1sCj7L26YDK2HnMRXEQmQ2aemov8YBtPS7vR", +); +// Create keypair for test user +let user = new anchor.web3.Keypair(); + +// Fetch switchboard devnet program object switchboardProgram = await SwitchboardProgram.load( - "devnet", - new anchor.web3.Connection("https://api.devnet.solana.com"), - user -) + new Connection(DEVNET_RPC_URL), + payer, +); -// pass switchboard program object and feed pubkey into AggregatorAccount constructor -aggregatorAccount = new AggregatorAccount(switchboardProgram, solUsedSwitchboardFeed) +// Pass switchboard program object and feed pubkey into AggregatorAccount constructor +aggregatorAccount = new AggregatorAccount( + switchboardProgram, + SOL_USD_SWITCHBOARD_FEED, +); -// fetch latest SOL price -const solPrice: Big | null = await aggregatorAccount.fetchLatestValue() +// Fetch latest SOL price +const solPrice: Big | null = await aggregatorAccount.fetchLatestValue(); if (solPrice === null) { - throw new Error('Aggregator holds no value') + throw new Error("Aggregator holds no value"); } ``` @@ -458,12 +462,12 @@ you can see its relevant configurations. ![Oracle Configs](/public/assets/courses/unboxed/oracle-configs.png) -The BTC_USD feed has Min Update Delay = 6 seconds. This means that the price of -BTC is only updated at a minimum of every 6 seconds on this feed. This is +The BTC_USD feed has a Min Update Delay = 6 seconds. This means that the price +of BTC is only updated at a minimum of every 6 seconds on this feed. This is probably fine for most use cases, but if you wanted to use this feed for -something latency sensitive, it’s probably not a good choice. +something latency sensitive, it's probably not a good choice. -It’s also worthwhile to audit a feed's sources in the Jobs section of the oracle +It's also worthwhile to audit a feed's sources in the Jobs section of the oracle explorer. Since the value that is persisted onchain is the weighted median result the oracles pull from each source, the sources directly influence what is stored in the feed. Check for shady links and potentially run the APIs yourself @@ -473,21 +477,21 @@ Once you have found a feed that fits your needs, you still need to make sure you're using the feed appropriately. For example, you should still implement necessary security checks on the account passed into your instruction. Any account can be passed into your program's instructions, so you should verify -it’s the account you expect it to be. +it's the account you expect it to be. In Anchor, if you deserialize the account to the `AggregatorAccountData` type -from the `switchboard_v2` crate, Anchor checks that the account is owned by the -Switchboard program. If your program expects that only a specific data feed will -be passed in the instruction, then you can also verify that the public key of -the account passed in matches what it should be. One way to do this is to hard -code the address in the program somewhere and use account constraints to verify -the address passed in matches what is expected. +from the `switchboard_solana` crate, Anchor checks that the account is owned by +the Switchboard program. If your program expects that only a specific data feed +will be passed in the instruction, then you can also verify that the public key +of the account passed in matches what it should be. One way to do this is to +hard code the address in the program somewhere and use account constraints to +verify the address passed in matches what is expected. ```rust use { - anchor_lang::prelude::*, - solana_program::{pubkey, pubkey::Pubkey}, - switchboard_v2::{AggregatorAccountData}, + anchor_lang::prelude::*, + solana_program::{pubkey, pubkey::Pubkey}, + switchboard_solana::AggregatorAccountData, }; pub static BTC_USDC_FEED: Pubkey = pubkey!("8SXvChNYFhRq4EZuZvnhjrB3jJRQCv4k3P4W6hesH3Ee"); @@ -497,11 +501,11 @@ pub static BTC_USDC_FEED: Pubkey = pubkey!("8SXvChNYFhRq4EZuZvnhjrB3jJRQCv4k3P4W #[derive(Accounts)] pub struct TestInstruction<'info> { - // Switchboard SOL feed aggregator - #[account( - address = BTC_USDC_FEED - )] - pub feed_aggregator: AccountLoader<'info, AggregatorAccountData>, + // Switchboard SOL feed aggregator + #[account( + address = BTC_USDC_FEED + )] + pub feed_aggregator: AccountLoader<'info, AggregatorAccountData>, } ``` @@ -511,9 +515,9 @@ common things to check for are data staleness and the confidence interval. Each data feed updates the current value stored in it when triggered by the oracles. This means the updates are dependent on the oracles in the queue that -it’s assigned to. Depending on what you intend to use the data feed for, it may +it's assigned to. Depending on what you intend to use the data feed for, it may be beneficial to verify that the value stored in the account was updated -recently. For example, a lending protocol that needs to determine if a loan’s +recently. For example, a lending protocol that needs to determine if a loan's collateral has fallen below a certain level may need the data to be no older than a few seconds. You can have your code check the timestamp of the most recent update stored in the aggregator account. The following code snippet @@ -524,23 +528,23 @@ than 30 seconds ago. use { anchor_lang::prelude::*, anchor_lang::solana_program::clock, - switchboard_v2::{AggregatorAccountData, SwitchboardDecimal}, + switchboard_solana::{AggregatorAccountData, SwitchboardDecimal}, }; ... ... let feed = &ctx.accounts.feed_aggregator.load()?; -if (clock::Clock::get().unwrap().unix_timestamp - feed.latest_confirmed_round.round_open_timestamp) <= 30{ +if (clock::Clock::get().unwrap().unix_timestamp - feed.latest_confirmed_round.round_open_timestamp) <= 30 { valid_transfer = true; - } +} ``` The `latest_confirmed_round` field on the `AggregatorAccountData` struct is of type `AggregatorRound` defined as: ```rust -// https://github.com/switchboard-xyz/sbv2-solana/blob/0b5e0911a1851f9ca37042e6ff88db4cd840067b/rust/switchboard-solana/src/oracle_program/accounts/aggregator.rs#L17 +// https://github.com/switchboard-xyz/solana-sdk/blob/main/rust/switchboard-solana/src/oracle_program/accounts/aggregator.rs#L17 pub struct AggregatorRound { /// Maintains the number of successful responses received from nodes. @@ -593,7 +597,7 @@ the results received from the oracle is greater than the given `max_confidence_interval`, it returns an error. ```rust -// https://github.com/switchboard-xyz/sbv2-solana/blob/0b5e0911a1851f9ca37042e6ff88db4cd840067b/rust/switchboard-solana/src/oracle_program/accounts/aggregator.rs#L228 +// https://github.com/switchboard-xyz/solana-sdk/blob/main/rust/switchboard-solana/src/oracle_program/accounts/aggregator.rs#L228 pub fn check_confidence_interval( &self, @@ -613,7 +617,7 @@ use { crate::{errors::*}, anchor_lang::prelude::*, std::convert::TryInto, - switchboard_v2::{AggregatorAccountData, SwitchboardDecimal}, + use switchboard_solana::{AggregatorAccountData, SwitchboardDecimal}, }; ... @@ -621,131 +625,136 @@ use { let feed = &ctx.accounts.feed_aggregator.load()?; -// check feed does not exceed max_confidence_interval +// Check feed does not exceed max_confidence_interval feed.check_confidence_interval(SwitchboardDecimal::from_f64(max_confidence_interval)) .map_err(|_| error!(ErrorCode::ConfidenceIntervalExceeded))?; ``` -Lastly, it’s important to plan for worst-case scenarios in your programs. Plan +Lastly, it's important to plan for worst-case scenarios in your programs. Plan for feeds going stale and plan for feed accounts closing. ### Conclusion If you want functional programs that can perform actions based on real-world -data, you’re going to have to use oracles. Fortunately, there are some -trustworthy oracle networks, like Switchboard, that make using oracles easier -than they would otherwise be. However, make sure to do your due diligence on the -oracles you use. You are ultimately responsible for your program's behavior! +data, you'll need to use oracles. Fortunately, there are reliable oracle +networks, such as Switchboard, that simplify the process. However, it's crucial +to perform thorough due diligence on any oracle network you choose, as you are +ultimately responsible for your program's behavior. ## Lab -Let's practice using oracles! We'll be building a "Michael Burry Escrow" program -that locks SOL in an escrow account until SOL is above a certain USD value. This -is named after the investor -[Michael Burry](https://en.wikipedia.org/wiki/Michael_Burry) who's famous for -predicting the 2008 housing market crash. +Let's practice working with oracles! We'll be building a "Michael Burry Escrow" +program, which locks SOL in an escrow account until its value surpasses a +specified USD threshold. The program is named after +[Michael Burry](https://en.wikipedia.org/wiki/Michael_Burry), the investor known +for predicting the 2008 housing market crash. -We will be using the devnet -[SOL_USD](https://app.switchboard.xyz/solana/devnet/feed/GvDMxPzN1sCj7L26YDK2HnMRXEQmQ2aemov8YBtPS7vR) -oracle from switchboard. The program will have two main instructions: +For this, we'll use the +[SOL_USD oracle on devnet](https://app.switchboard.xyz/solana/devnet/feed/GvDMxPzN1sCj7L26YDK2HnMRXEQmQ2aemov8YBtPS7vR) +from Switchboard. The program will have two key instructions: -- Deposit - Lock up the SOL and set a USD price to unlock it at. -- Withdraw - Check the USD price and withdraw the SOL if the price is met. +- **Deposit**: Lock up the SOL and set a USD price target for unlocking. +- **Withdraw**: Check the USD price, and if the target is met, withdraw the SOL. -#### 1. Program Setup +### 1. Program Setup -To get started, let’s create the program with +To get started, let's create the program with ```zsh -anchor init burry-escrow +anchor init burry-escrow --template=multiple ``` -Next, replace the program ID in `lib.rs` and `Anchor.toml` with the program ID -shown when you run `anchor keys list`. +Next, replace the program ID in `lib.rs` and `Anchor.toml` by running command +`anchor keys sync`. -Next, add the following to the bottom of your Anchor.toml file. This will tell +Next, add the following to the bottom of your `Anchor.toml` file. This will tell Anchor how to configure our local testing environment. This will allow us to test our program locally without having to deploy and send transactions to devnet. At the bottom of `Anchor.toml`: -```toml -[test.validator] -url="https://api.devnet.solana.com" - +```toml filename="Anchor.toml" [test] -startup_wait = 10000 +startup_wait = 5000 +shutdown_wait = 2000 +upgradeable = false -[[test.validator.clone]] # sbv2 devnet programID +[test.validator] +bind_address = "0.0.0.0" +url = "https://api.devnet.solana.com" +ledger = ".anchor/test-ledger" +rpc_port = 8899 + +[[test.validator.clone]] # switchboard-solana devnet programID address = "SW1TCH7qEPTdLsDHRgPuMQjbQxKdH2aBStViMFnt64f" -[[test.validator.clone]] # sbv2 devnet IDL +[[test.validator.clone]] # switchboard-solana devnet IDL address = "Fi8vncGpNKbq62gPo56G4toCehWNy77GgqGkTaAF5Lkk" -[[test.validator.clone]] # sbv2 SOL/USD Feed -address="GvDMxPzN1sCj7L26YDK2HnMRXEQmQ2aemov8YBtPS7vR" +[[test.validator.clone]] # switchboard-solana SOL/USD Feed +address = "GvDMxPzN1sCj7L26YDK2HnMRXEQmQ2aemov8YBtPS7vR" ``` -Additionally, we want to import the `switchboard-v2` crate in our `Cargo.toml` -file. Make sure your dependencies look as follows: +Additionally, we want to import the `switchboard-solana` crate in our +`Cargo.toml` file. Make sure your dependencies look as follows: -```toml +```toml filename="Cargo.toml" [dependencies] -anchor-lang = "0.28.0" -switchboard-v2 = "0.4.0" +anchor-lang = "0.30.1" +switchboard-solana = "0.30.4" ``` -Before we get started with the logic, let’s go over the structure of our -program. With small programs, it’s very easy to add all of the smart contract -code to a single `lib.rs` file and call it a day. To keep it more organized -though, it’s helpful to break it up across different files. Our program will -have the following files within the `programs/src` directory: - -`/instructions/deposit.rs` - -`/instructions/withdraw.rs` - -`/instructions/mod.rs` - -`errors.rs` - -`state.rs` - -`lib.rs` +Before diving into the program logic, let's review the structure of our smart +contract. For smaller programs, it's tempting to put all the code in a single +`lib.rs` file. However, organizing the code across different files helps +maintain clarity and scalability. Our program will be structured as follows +within the `programs/burry-escrow` directory: + +```sh +└── burry-escrow + ├── Cargo.toml + ├── Xargo.toml + └── src + ├── constants.rs + ├── error.rs + ├── instructions + │ ├── deposit.rs + │ ├── mod.rs + │ └── withdraw.rs + ├── lib.rs + └── state.rs +``` -The `lib.rs` file will still serve as the entry point to our program, but the -logic for each instruction will be contained in their own separate file. Go -ahead and create the program architecture described above and we’ll get started. +In this structure, `lib.rs` serves as the entry point to the program, while the +logic for each instruction handler is stored in separate files under the +`instructions` directory. Go ahead and set up the architecture as shown above, +and we'll proceed from there. -#### 2. `lib.rs` +### 2. Setup lib.rs -Before we write any logic, we are going to set up all of our boilerplate -information. Starting with `lib.rs`. Our actual logic will live in the +Before writing the logic, we'll set up the necessary boilerplate in `lib.rs`. +This file acts as the entry point for the program, defining the API endpoints +that all transactions will pass through. The actual logic will be housed in the `/instructions` directory. -The `lib.rs` file will serve as the entrypoint to our program. It will define -the API endpoints that all transactions must go through. - -```rust +```rust filename="lib.rs" use anchor_lang::prelude::*; -use instructions::deposit::*; -use instructions::withdraw::*; -use state::*; +use instructions::{deposit::*, withdraw::*}; +pub mod errors; pub mod instructions; pub mod state; -pub mod errors; +pub mod constants; declare_id!("YOUR_PROGRAM_KEY_HERE"); #[program] -mod burry_oracle_program { - +pub mod burry_escrow { use super::*; - pub fn deposit(ctx: Context, escrow_amt: u64, unlock_price: u64) -> Result<()> { - deposit_handler(ctx, escrow_amt, unlock_price) + pub fn deposit(ctx: Context, escrow_amount: u64, unlock_price: f64) -> Result<()> { + deposit_handler(ctx, escrow_amount, unlock_price) } pub fn withdraw(ctx: Context) -> Result<()> { @@ -754,39 +763,44 @@ mod burry_oracle_program { } ``` -#### 3. `state.rs` - -Next, let's define our data account for this program: `EscrowState`. Our data -account will store two pieces of info: +### 3. Define state.rs -- `unlock_price` - The price of SOL in USD at which point you can withdraw; you - can hard-code it to whatever you want (e.g. $21.53) -- `escrow_amount` - Keeps track of how many lamports are stored in the escrow - account +Next, let's define our program's data account: `Escrow`. This account will store +two key pieces of information: -We will also be defining our PDA seed of `"MICHAEL BURRY"` and our hardcoded -SOL_USD oracle pubkey `SOL_USDC_FEED`. +- `unlock_price`: The price of SOL in USD at which withdrawals are allowed + (e.g., hard-coded to $21.53). +- `escrow_amount`: Tracks the amount of lamports held in the escrow account. -```rust -// in state.rs +```rust filename="state.rs" use anchor_lang::prelude::*; -pub const ESCROW_SEED: &[u8] = b"MICHAEL BURRY"; -pub const SOL_USDC_FEED: &str = "GvDMxPzN1sCj7L26YDK2HnMRXEQmQ2aemov8YBtPS7vR"; - #[account] -pub struct EscrowState { +#[derive(InitSpace)] +pub struct Escrow { pub unlock_price: f64, pub escrow_amount: u64, } ``` -#### 4. Errors +### 4. Constants -Let’s define the custom errors we’ll use throughout the program. Inside the -`errors.rs` file, paste the following: +Next, we'll define `DISCRIMINATOR_SIZE` as 8, the PDA seed as `"MICHAEL BURRY"`, +and hard-code the SOL/USD oracle pubkey as `SOL_USDC_FEED` in the `constants.rs` +file. -```rust +```rust filename="constants.rs" +pub const DISCRIMINATOR_SIZE: usize = 8; +pub const ESCROW_SEED: &[u8] = b"MICHAEL BURRY"; +pub const SOL_USDC_FEED: &str = "GvDMxPzN1sCj7L26YDK2HnMRXEQmQ2aemov8YBtPS7vR"; +``` + +### 5. Errors + +Next, let's define the custom errors we'll use throughout the program. Inside +the `error.rs` file, paste the following: + +```rust filename="error.rs" use anchor_lang::prelude::*; #[error_code] @@ -799,203 +813,201 @@ pub enum EscrowErrorCode { #[msg("Switchboard feed exceeded provided confidence interval")] ConfidenceIntervalExceeded, #[msg("Current SOL price is not above Escrow unlock price.")] - SolPriceAboveUnlockPrice, + SolPriceBelowUnlockPrice, } ``` -#### 5. `mod.rs` +### 6. Setup mod.rs Let's set up our `instructions/mod.rs` file. -```rust -// inside mod.rs +```rust filename="mod.rs" pub mod deposit; pub mod withdraw; ``` -#### 6. **Deposit** +### 7. Deposit + +Now that we have all of the boilerplate out of the way, let's move on to our +`Deposit` instruction. This will live in the `/src/instructions/deposit.rs` +file. -Now that we have all of the boilerplate out of the way, lets move onto our -Deposit instruction. This will live in the `/src/instructions/deposit.rs` file. -When a user deposits, a PDA should be created with the “MICHAEL BURRY” string -and the user’s pubkey as seeds. This inherently means a user can only open one +When a user deposits, a PDA should be created with the "MICHAEL BURRY" string +and the user's pubkey as seeds. This ensures that a user can only open one escrow account at a time. The instruction should initialize an account at this -PDA and send the amount of SOL that the user wants to lock up to it. The user -will need to be a signer. +PDA and transfer the SOL that the user wants to lock up to it. The user will +need to be a signer. -Let’s build the Deposit Context struct first. To do that, we need to think about -what accounts will be necessary for this instruction. We start with the +Let's first build the `Deposit` context struct. To do this, we need to think +about what accounts will be necessary for this instruction. We start with the following: -```rust -//inside deposit.rs +```rust filename="deposit.rs" +use crate::constants::*; use crate::state::*; use anchor_lang::prelude::*; -use anchor_lang::solana_program::{ - system_instruction::transfer, - program::invoke -}; +use anchor_lang::solana_program::{program::invoke, system_instruction::transfer}; #[derive(Accounts)] pub struct Deposit<'info> { - // user account #[account(mut)] pub user: Signer<'info>, + #[account( - init, - seeds = [ESCROW_SEED, user.key().as_ref()], - bump, - payer = user, - space = std::mem::size_of::() + 8 + init, + seeds = [ESCROW_SEED, user.key().as_ref()], + bump, + payer = user, + space = DISCRIMINATOR_SIZE + Escrow::INIT_SPACE )] - pub escrow_account: Account<'info, EscrowState>, - // system program + pub escrow_account: Account<'info, Escrow>, + pub system_program: Program<'info, System>, } ``` Notice the constraints we added to the accounts: -- Because we'll be transferring SOL from the User account to the `escrow_state` +- Because we'll be transferring SOL from the User account to the `escrow` account, they both need to be mutable. - We know the `escrow_account` is supposed to be a PDA derived with the “MICHAEL - BURRY” string and the user’s pubkey. We can use Anchor account constraints to + BURRY” string and the user's pubkey. We can use Anchor account constraints to guarantee that the address passed in actually meets that requirement. - We also know that we have to initialize an account at this PDA to store some state for the program. We use the `init` constraint here. -Let’s move onto the actual logic. All we need to do is to initialize the state -of the `escrow_state` account and transfer the SOL. We expect the user to pass -in the amount of SOL they want to lock up in escrow and the price to unlock it -at. We will store these values in the `escrow_state` account. +Let's move onto the actual logic. All we need to do is to initialize the state +of the `escrow` account and transfer the SOL. We expect the user to pass in the +amount of SOL they want to lock up in escrow and the price to unlock it at. We +will store these values in the `escrow` account. After that, the method should execute the transfer. This program will be locking -up native SOL. Because of this, we don’t need to use token accounts or the -Solana token program. We’ll have to use the `system_program` to transfer the +up native SOL. Because of this, we don't need to use token accounts or the +Solana token program. We'll have to use the `system_program` to transfer the lamports the user wants to lock up in escrow and invoke the transfer instruction. -```rust -pub fn deposit_handler(ctx: Context, escrow_amt: u64, unlock_price: u64) -> Result<()> { - msg!("Depositing funds in escrow..."); +```rust filename="deposit.rs" +pub fn deposit_handler(ctx: Context, escrow_amount: u64, unlock_price: f64) -> Result<()> { + msg!("Depositing funds in escrow..."); - let escrow_state = &mut ctx.accounts.escrow_account; - escrow_state.unlock_price = unlock_price; - escrow_state.escrow_amount = escrow_amount; + let escrow = &mut ctx.accounts.escrow_account; + escrow.unlock_price = unlock_price; + escrow.escrow_amount = escrow_amount; - let transfer_ix = transfer( - &ctx.accounts.user.key(), - &escrow_state.key(), - escrow_amount - ); + let transfer_instruction = + transfer(&ctx.accounts.user.key(), &escrow.key(), escrow_amount); invoke( - &transfer_ix, + &transfer_instruction, &[ ctx.accounts.user.to_account_info(), ctx.accounts.escrow_account.to_account_info(), - ctx.accounts.system_program.to_account_info() - ] + ctx.accounts.system_program.to_account_info(), + ], )?; - msg!("Transfer complete. Escrow will unlock SOL at {}", &ctx.accounts.escrow_account.unlock_price); + msg!( + "Transfer complete. Escrow will unlock SOL at {}", + &ctx.accounts.escrow_account.unlock_price + ); + + Ok(()) } ``` -That’s is the gist of the deposit instruction! The final result of the +That's is the gist of the deposit instruction handler! The final result of the `deposit.rs` file should look as follows: -```rust +```rust filename="deposit.rs" +use crate::constants::*; use crate::state::*; use anchor_lang::prelude::*; -use anchor_lang::solana_program::{ - system_instruction::transfer, - program::invoke -}; +use anchor_lang::solana_program::{program::invoke, system_instruction::transfer}; pub fn deposit_handler(ctx: Context, escrow_amount: u64, unlock_price: f64) -> Result<()> { msg!("Depositing funds in escrow..."); - let escrow_state = &mut ctx.accounts.escrow_account; - escrow_state.unlock_price = unlock_price; - escrow_state.escrow_amount = escrow_amount; + let escrow = &mut ctx.accounts.escrow_account; + escrow.unlock_price = unlock_price; + escrow.escrow_amount = escrow_amount; - let transfer_ix = transfer( - &ctx.accounts.user.key(), - &escrow_state.key(), - escrow_amount - ); + let transfer_instruction = + transfer(&ctx.accounts.user.key(), &escrow.key(), escrow_amount); invoke( - &transfer_ix, + &transfer_instruction, &[ ctx.accounts.user.to_account_info(), ctx.accounts.escrow_account.to_account_info(), - ctx.accounts.system_program.to_account_info() - ] + ctx.accounts.system_program.to_account_info(), + ], )?; - msg!("Transfer complete. Escrow will unlock SOL at {}", &ctx.accounts.escrow_account.unlock_price); + msg!( + "Transfer complete. Escrow will unlock SOL at {}", + &ctx.accounts.escrow_account.unlock_price + ); Ok(()) } #[derive(Accounts)] pub struct Deposit<'info> { - // user account #[account(mut)] pub user: Signer<'info>, - // account to store SOL in escrow + #[account( init, seeds = [ESCROW_SEED, user.key().as_ref()], bump, payer = user, - space = std::mem::size_of::() + 8 + space = DISCRIMINATOR_SIZE + Escrow::INIT_SPACE )] - pub escrow_account: Account<'info, EscrowState>, + pub escrow_account: Account<'info, Escrow>, pub system_program: Program<'info, System>, } ``` -**Withdraw** +### 8. Withdraw -The withdraw instruction will require the same three accounts as the deposit -instruction plus the SOL_USDC Switchboard feed account. This code will go in the -`withdraw.rs` file. +The `Withdraw` instruction will require the same three accounts as the `Deposit` +instruction, plus the `SOL_USDC` Switchboard feed account. This code will be +placed in the `withdraw.rs` file. -```rust -use crate::state::*; +```rust filename="withdraw.rs" +use crate::constants::*; use crate::errors::*; -use std::str::FromStr; +use crate::state::*; use anchor_lang::prelude::*; -use switchboard_v2::AggregatorAccountData; use anchor_lang::solana_program::clock::Clock; +use std::str::FromStr; +use switchboard_solana::AggregatorAccountData; #[derive(Accounts)] pub struct Withdraw<'info> { - // user account #[account(mut)] pub user: Signer<'info>, - // escrow account + #[account( mut, seeds = [ESCROW_SEED, user.key().as_ref()], bump, close = user )] - pub escrow_account: Account<'info, EscrowState>, - // Switchboard SOL feed aggregator + pub escrow_account: Account<'info, Escrow>, + #[account( address = Pubkey::from_str(SOL_USDC_FEED).unwrap() )] pub feed_aggregator: AccountLoader<'info, AggregatorAccountData>, + pub system_program: Program<'info, System>, } ``` -Notice we’re using the close constraint because once the transaction completes, +Notice we're using the close constraint because once the transaction completes, we want to close the `escrow_account`. The SOL used as rent in the account will be transferred to the user account. @@ -1004,36 +1016,35 @@ actually the `usdc_sol` feed and not some other feed (we have the SOL_USDC_FEED address hard coded). In addition, the AggregatorAccountData struct that we deserialize comes from the Switchboard rust crate. It verifies that the given account is owned by the switchboard program and allows us to easily look at its -values. You’ll notice it’s wrapped in a `AccountLoader`. This is because the +values. You'll notice it's wrapped in a `AccountLoader`. This is because the feed is actually a fairly large account and it needs to be zero copied. -Now let's implement the withdraw instruction's logic. First, we check if the -feed is stale. Then we fetch the current price of SOL stored in the +Now let's implement the withdraw instruction handler's logic. First, we check if +the feed is stale. Then we fetch the current price of SOL stored in the `feed_aggregator` account. Lastly, we want to check that the current price is above the escrow `unlock_price`. If it is, then we transfer the SOL from the -escrow account back to the user and close the account. If it isn’t, then the -instruction should finish and return an error. +escrow account back to the user and close the account. If it isn't, then the +instruction handler should finish and return an error. -```rust -pub fn withdraw_handler(ctx: Context, params: WithdrawParams) -> Result<()> { +```rust filename="withdraw.rs" +pub fn withdraw_handler(ctx: Context) -> Result<()> { let feed = &ctx.accounts.feed_aggregator.load()?; - let escrow_state = &ctx.accounts.escrow_account; + let escrow = &ctx.accounts.escrow_account; - // get result - let val: f64 = feed.get_result()?.try_into()?; + let current_sol_price: f64 = feed.get_result()?.try_into()?; - // check whether the feed has been updated in the last 300 seconds + // Check if the feed has been updated in the last 5 minutes (300 seconds) feed.check_staleness(Clock::get().unwrap().unix_timestamp, 300) - .map_err(|_| error!(EscrowErrorCode::StaleFeed))?; + .map_err(|_| error!(EscrowErrorCode::StaleFeed))?; - msg!("Current feed result is {}!", val); - msg!("Unlock price is {}", escrow_state.unlock_price); + msg!("Current SOL price is {}", current_sol_price); + msg!("Unlock price is {}", escrow.unlock_price); - if val < escrow_state.unlock_price as f64 { - return Err(EscrowErrorCode::SolPriceAboveUnlockPrice.into()) + if current_sol_price < escrow.unlock_price { + return Err(EscrowErrorCode::SolPriceBelowUnlockPrice.into()); } - .... + .... } ``` @@ -1047,63 +1058,77 @@ following error. 'Transfer: `from` must not carry data' ``` -To account for this, we’ll use `try_borrow_mut_lamports()` on each account and +To account for this, we'll use `try_borrow_mut_lamports()` on each account and add/subtract the amount of lamports stored in each account. -```rust -// 'Transfer: `from` must not carry data' - **escrow_state.to_account_info().try_borrow_mut_lamports()? = escrow_state - .to_account_info() - .lamports() - .checked_sub(escrow_state.escrow_amount) - .ok_or(ProgramError::InvalidArgument)?; - - **ctx.accounts.user.to_account_info().try_borrow_mut_lamports()? = ctx.accounts.user - .to_account_info() - .lamports() - .checked_add(escrow_state.escrow_amount) - .ok_or(ProgramError::InvalidArgument)?; +```rust filename="withdraw.rs" +// Transfer lamports from escrow to user +**escrow.to_account_info().try_borrow_mut_lamports()? = escrow +.to_account_info() +.lamports() +.checked_sub(escrow_lamports) +.ok_or(ProgramError::InsufficientFunds)?; + +**ctx +.accounts +.user +.to_account_info() +.try_borrow_mut_lamports()? = ctx +.accounts +.user +.to_account_info() +.lamports() +.checked_add(escrow_lamports) +.ok_or(ProgramError::InvalidArgument)?; ``` The final withdraw method in the `withdraw.rs` file should look like this: -```rust -use crate::state::*; +```rust filename="withdraw.rs" +use crate::constants::*; use crate::errors::*; -use std::str::FromStr; +use crate::state::*; use anchor_lang::prelude::*; -use switchboard_v2::AggregatorAccountData; use anchor_lang::solana_program::clock::Clock; +use std::str::FromStr; +use switchboard_solana::AggregatorAccountData; pub fn withdraw_handler(ctx: Context) -> Result<()> { let feed = &ctx.accounts.feed_aggregator.load()?; - let escrow_state = &ctx.accounts.escrow_account; + let escrow = &ctx.accounts.escrow_account; - // get result - let val: f64 = feed.get_result()?.try_into()?; + let current_sol_price: f64 = feed.get_result()?.try_into()?; - // check whether the feed has been updated in the last 300 seconds + // Check if the feed has been updated in the last 5 minutes (300 seconds) feed.check_staleness(Clock::get().unwrap().unix_timestamp, 300) - .map_err(|_| error!(EscrowErrorCode::StaleFeed))?; + .map_err(|_| error!(EscrowErrorCode::StaleFeed))?; - msg!("Current feed result is {}!", val); - msg!("Unlock price is {}", escrow_state.unlock_price); + msg!("Current SOL price is {}", current_sol_price); + msg!("Unlock price is {}", escrow.unlock_price); - if val < escrow_state.unlock_price as f64 { - return Err(EscrowErrorCode::SolPriceAboveUnlockPrice.into()) + if current_sol_price < escrow.unlock_price { + return Err(EscrowErrorCode::SolPriceBelowUnlockPrice.into()); } - // 'Transfer: `from` must not carry data' - **escrow_state.to_account_info().try_borrow_mut_lamports()? = escrow_state + let escrow_lamports = escrow.escrow_amount; + + // Transfer lamports from escrow to user + **escrow.to_account_info().try_borrow_mut_lamports()? = escrow .to_account_info() .lamports() - .checked_sub(escrow_state.escrow_amount) - .ok_or(ProgramError::InvalidArgument)?; + .checked_sub(escrow_lamports) + .ok_or(ProgramError::InsufficientFunds)?; - **ctx.accounts.user.to_account_info().try_borrow_mut_lamports()? = ctx.accounts.user + **ctx + .accounts + .user + .to_account_info() + .try_borrow_mut_lamports()? = ctx + .accounts + .user .to_account_info() .lamports() - .checked_add(escrow_state.escrow_amount) + .checked_add(escrow_lamports) .ok_or(ProgramError::InvalidArgument)?; Ok(()) @@ -1111,41 +1136,30 @@ pub fn withdraw_handler(ctx: Context) -> Result<()> { #[derive(Accounts)] pub struct Withdraw<'info> { - // user account #[account(mut)] pub user: Signer<'info>, - // escrow account + #[account( mut, seeds = [ESCROW_SEED, user.key().as_ref()], bump, close = user )] - pub escrow_account: Account<'info, EscrowState>, - // Switchboard SOL feed aggregator + pub escrow_account: Account<'info, Escrow>, + #[account( address = Pubkey::from_str(SOL_USDC_FEED).unwrap() )] pub feed_aggregator: AccountLoader<'info, AggregatorAccountData>, + pub system_program: Program<'info, System>, } ``` -And that’s it for the program! At this point, you should be able to run +And that's it for the program! At this point, you should be able to run `anchor build` without any errors. - - -If you see an error like the one presented below, you can safely ignore it. - -```bash -Compiling switchboard-v2 v0.4.0 -Error: Function _ZN86_$LT$switchboard_v2..aggregator..AggregatorAccountData$u20$as$u20$core..fmt..Debug$GT$3fmt17hea9f7644392c2647E Stack offset of 4128 exceeded max offset of 4096 by 32 bytes, please minimize large stack variables -``` - - - -#### 7. Testing +### 9. Testing Let's write some tests. We should have four of them: @@ -1161,11 +1175,10 @@ Note that there can only be one escrow per user, so the above order matters. We'll provide all the testing code in one snippet. Take a look through to make sure you understand it before running `anchor test`. -```typescript -// tests/burry-escrow.ts - +```typescript filename="burry-escrow.ts" +// Inside tests/burry-escrow.ts import * as anchor from "@coral-xyz/anchor"; -import { Program } from "@coral-xyz/anchor"; +import { Program, AnchorError } from "@coral-xyz/anchor"; import { BurryEscrow } from "../target/types/burry_escrow"; import { Big } from "@switchboard-xyz/common"; import { @@ -1173,229 +1186,227 @@ import { AnchorWallet, SwitchboardProgram, } from "@switchboard-xyz/solana.js"; +import { PublicKey, SystemProgram, Connection } from "@solana/web3.js"; import { assert } from "chai"; +import { confirmTransaction } from "@solana-developers/helpers"; -export const solUsedSwitchboardFeed = new anchor.web3.PublicKey( +const SOL_USD_SWITCHBOARD_FEED = new PublicKey( "GvDMxPzN1sCj7L26YDK2HnMRXEQmQ2aemov8YBtPS7vR", ); +const ESCROW_SEED = "MICHAEL BURRY"; +const DEVNET_RPC_URL = "https://api.devnet.solana.com"; +const CONFIRMATION_COMMITMENT = "confirmed"; +const PRICE_OFFSET = 10; +const ESCROW_AMOUNT = new anchor.BN(100); +const EXPECTED_ERROR_MESSAGE = + "Current SOL price is not above Escrow unlock price."; + +const provider = anchor.AnchorProvider.env(); +anchor.setProvider(provider); + +const program = anchor.workspace.BurryEscrow as Program; +const payer = (provider.wallet as AnchorWallet).payer; + describe("burry-escrow", () => { - // Configure the client to use the local cluster. - anchor.setProvider(anchor.AnchorProvider.env()); - const provider = anchor.AnchorProvider.env(); - const program = anchor.workspace.BurryEscrow as Program; - const payer = (provider.wallet as AnchorWallet).payer; - - it("Create Burry Escrow Below Price", async () => { - // fetch switchboard devnet program object - const switchboardProgram = await SwitchboardProgram.load( - "devnet", - new anchor.web3.Connection("https://api.devnet.solana.com"), + let switchboardProgram: SwitchboardProgram; + let aggregatorAccount: AggregatorAccount; + + before(async () => { + switchboardProgram = await SwitchboardProgram.load( + new Connection(DEVNET_RPC_URL), payer, ); - const aggregatorAccount = new AggregatorAccount( + aggregatorAccount = new AggregatorAccount( switchboardProgram, - solUsedSwitchboardFeed, + SOL_USD_SWITCHBOARD_FEED, ); + }); - // derive escrow state account - const [escrowState] = await anchor.web3.PublicKey.findProgramAddressSync( - [Buffer.from("MICHAEL BURRY"), payer.publicKey.toBuffer()], + const createAndVerifyEscrow = async (unlockPrice: number) => { + const [escrow] = PublicKey.findProgramAddressSync( + [Buffer.from(ESCROW_SEED), payer.publicKey.toBuffer()], program.programId, ); - // fetch latest SOL price - const solPrice: Big | null = await aggregatorAccount.fetchLatestValue(); - if (solPrice === null) { - throw new Error("Aggregator holds no value"); - } - const failUnlockPrice = solPrice.minus(10).toNumber(); - const amountToLockUp = new anchor.BN(100); - - // Send transaction try { - const tx = await program.methods - .deposit(amountToLockUp, failUnlockPrice) - .accounts({ + const transaction = await program.methods + .deposit(ESCROW_AMOUNT, unlockPrice) + .accountsPartial({ user: payer.publicKey, - escrowAccount: escrowState, - systemProgram: anchor.web3.SystemProgram.programId, + escrowAccount: escrow, + systemProgram: SystemProgram.programId, }) .signers([payer]) .rpc(); - await provider.connection.confirmTransaction(tx, "confirmed"); - - // Fetch the created account - const newAccount = await program.account.escrowState.fetch(escrowState); + await confirmTransaction( + provider.connection, + transaction, + CONFIRMATION_COMMITMENT, + ); + const escrowAccount = await program.account.escrow.fetch(escrow); const escrowBalance = await provider.connection.getBalance( - escrowState, - "confirmed", + escrow, + CONFIRMATION_COMMITMENT, ); - console.log("Onchain unlock price:", newAccount.unlockPrice); + + console.log("Onchain unlock price:", escrowAccount.unlockPrice); console.log("Amount in escrow:", escrowBalance); - // Check whether the data onchain is equal to local 'data' - assert(failUnlockPrice == newAccount.unlockPrice); + assert(unlockPrice === escrowAccount.unlockPrice); assert(escrowBalance > 0); - } catch (e) { - console.log(e); - assert.fail(e); + } catch (error) { + console.error("Error details:", error); + throw new Error(`Failed to create escrow: ${error.message}`); } - }); - - it("Withdraw from escrow", async () => { - // derive escrow address - const [escrowState] = await anchor.web3.PublicKey.findProgramAddressSync( - [Buffer.from("MICHAEL BURRY"), payer.publicKey.toBuffer()], - program.programId, - ); + }; - // send tx - const tx = await program.methods - .withdraw() - .accounts({ - user: payer.publicKey, - escrowAccount: escrowState, - feedAggregator: solUsedSwitchboardFeed, - systemProgram: anchor.web3.SystemProgram.programId, - }) - .signers([payer]) - .rpc(); - - await provider.connection.confirmTransaction(tx, "confirmed"); - - // assert that the escrow account has been closed - let accountFetchDidFail = false; - try { - await program.account.escrowState.fetch(escrowState); - } catch (e) { - accountFetchDidFail = true; + it("creates Burry Escrow Below Current Price", async () => { + const solPrice: Big | null = await aggregatorAccount.fetchLatestValue(); + if (solPrice === null) { + throw new Error("Aggregator holds no value"); } + // Although `SOL_USD_SWITCHBOARD_FEED` is not changing we are changing the unlockPrice in test as given below to simulate the escrow behavior + const unlockPrice = solPrice.minus(PRICE_OFFSET).toNumber(); - assert(accountFetchDidFail); + await createAndVerifyEscrow(unlockPrice); }); - it("Create Burry Escrow Above Price", async () => { - // fetch switchboard devnet program object - const switchboardProgram = await SwitchboardProgram.load( - "devnet", - new anchor.web3.Connection("https://api.devnet.solana.com"), - payer, - ); - const aggregatorAccount = new AggregatorAccount( - switchboardProgram, - solUsedSwitchboardFeed, - ); - - // derive escrow state account - const [escrowState] = await anchor.web3.PublicKey.findProgramAddressSync( - [Buffer.from("MICHAEL BURRY"), payer.publicKey.toBuffer()], + it("withdraws from escrow", async () => { + const [escrow] = PublicKey.findProgramAddressSync( + [Buffer.from(ESCROW_SEED), payer.publicKey.toBuffer()], program.programId, ); - console.log("Escrow Account: ", escrowState.toBase58()); - // fetch latest SOL price - const solPrice: Big | null = await aggregatorAccount.fetchLatestValue(); - if (solPrice === null) { - throw new Error("Aggregator holds no value"); - } - const failUnlockPrice = solPrice.plus(10).toNumber(); - const amountToLockUp = new anchor.BN(100); + const userBalanceBefore = await provider.connection.getBalance( + payer.publicKey, + ); - // Send transaction try { - const tx = await program.methods - .deposit(amountToLockUp, failUnlockPrice) - .accounts({ + const transaction = await program.methods + .withdraw() + .accountsPartial({ user: payer.publicKey, - escrowAccount: escrowState, - systemProgram: anchor.web3.SystemProgram.programId, + escrowAccount: escrow, + feedAggregator: SOL_USD_SWITCHBOARD_FEED, + systemProgram: SystemProgram.programId, }) .signers([payer]) .rpc(); - await provider.connection.confirmTransaction(tx, "confirmed"); - console.log("Your transaction signature", tx); - - // Fetch the created account - const newAccount = await program.account.escrowState.fetch(escrowState); - - const escrowBalance = await provider.connection.getBalance( - escrowState, - "confirmed", + await confirmTransaction( + provider.connection, + transaction, + CONFIRMATION_COMMITMENT, ); - console.log("Onchain unlock price:", newAccount.unlockPrice); - console.log("Amount in escrow:", escrowBalance); - // Check whether the data onchain is equal to local 'data' - assert(failUnlockPrice == newAccount.unlockPrice); - assert(escrowBalance > 0); - } catch (e) { - console.log(e); - assert.fail(e); + // Verify escrow account is closed + try { + await program.account.escrow.fetch(escrow); + assert.fail("Escrow account should have been closed"); + } catch (error) { + console.log(error.message); + assert( + error.message.includes("Account does not exist"), + "Unexpected error: " + error.message, + ); + } + + // Verify user balance increased + const userBalanceAfter = await provider.connection.getBalance( + payer.publicKey, + ); + assert( + userBalanceAfter > userBalanceBefore, + "User balance should have increased", + ); + } catch (error) { + throw new Error(`Failed to withdraw from escrow: ${error.message}`); } }); - it("Attempt to withdraw while price is below UnlockPrice", async () => { - let didFail = false; + it("creates Burry Escrow Above Current Price", async () => { + const solPrice: Big | null = await aggregatorAccount.fetchLatestValue(); + if (solPrice === null) { + throw new Error("Aggregator holds no value"); + } + // Although `SOL_USD_SWITCHBOARD_FEED` is not changing we are changing the unlockPrice in test as given below to simulate the escrow behavior + const unlockPrice = solPrice.plus(PRICE_OFFSET).toNumber(); + await createAndVerifyEscrow(unlockPrice); + }); - // derive escrow address - const [escrowState] = await anchor.web3.PublicKey.findProgramAddressSync( - [Buffer.from("MICHAEL BURRY"), payer.publicKey.toBuffer()], + it("fails to withdraw while price is below UnlockPrice", async () => { + const [escrow] = PublicKey.findProgramAddressSync( + [Buffer.from(ESCROW_SEED), payer.publicKey.toBuffer()], program.programId, ); - // send tx try { - const tx = await program.methods + await program.methods .withdraw() - .accounts({ + .accountsPartial({ user: payer.publicKey, - escrowAccount: escrowState, - feedAggregator: solUsedSwitchboardFeed, - systemProgram: anchor.web3.SystemProgram.programId, + escrowAccount: escrow, + feedAggregator: SOL_USD_SWITCHBOARD_FEED, + systemProgram: SystemProgram.programId, }) .signers([payer]) .rpc(); - await provider.connection.confirmTransaction(tx, "confirmed"); - console.log("Your transaction signature", tx); - } catch (e) { - // verify tx returns expected error - didFail = true; - console.log(e.error.errorMessage); - assert( - e.error.errorMessage == - "Current SOL price is not above Escrow unlock price.", - ); + assert.fail("Withdrawal should have failed"); + } catch (error) { + console.log(error.message); + if (error instanceof AnchorError) { + assert.include(error.message, EXPECTED_ERROR_MESSAGE); + } else if (error instanceof Error) { + assert.include(error.message, EXPECTED_ERROR_MESSAGE); + } else { + throw new Error(`Unexpected error type: ${error}`); + } } - - assert(didFail); }); }); ``` -If you feel confident in the testing logic, go ahead and run `anchor test` in -your shell of choice. You should get four passing tests. +Once you're confident with the testing logic, run `anchor test` in your +terminal. You should see four tests pass. -If something went wrong, go back through the lab and make sure you got -everything right. Pay close attention to the intent behind the code rather than -just copy/pasting. Also feel free to review the working code -[on the `main` branch of its Github repository](https://github.com/Unboxed-Software/michael-burry-escrow). +```bash + burry-escrow +Onchain unlock price: 137.42243 +Amount in escrow: 1058020 + ✔ creates Burry Escrow Below Current Price (765ms) +Account does not exist or has no data LxDZ9DXNwSFsu2e6u37o6C2T3k59B6ySEHHVaNDrgBq + ✔ withdraws from escrow (353ms) +Onchain unlock price: 157.42243 +Amount in escrow: 1058020 + ✔ creates Burry Escrow Above Current Price (406ms) +AnchorError occurred. Error Code: SolPriceBelowUnlockPrice. Error Number: 6003. Error Message: Current SOL price is not above Escrow unlock price.. + ✔ fails to withdraw while price is below UnlockPrice + + + 4 passing (2s) +``` + +If something goes wrong, review the lab and ensure everything is correct. Focus +on understanding the intent behind the code instead of just copying/pasting. You +can also review the working code on the +[`main` branch of burry-escrow GitHub repository](https://github.com/solana-developers/burry-escrow/tree/main). ### Challenge As an independent challenge, create a fallback plan if the data feed ever goes down. If the Oracle queue has not updated the aggregator account in X time or if -the data feed account does not exist anymore, withdraw the user’s escrowed +the data feed account does not exist anymore, withdraw the user's escrowed funds. A potential solution to this challenge can be found -[in the Github repository on the `challenge-solution` branch](https://github.com/Unboxed-Software/michael-burry-escrow/tree/challenge-solution). +[in the Github repository on the `challenge-solution` branch](https://github.com/solana-developers/burry-escrow/tree/challenge-solution). + Push your code to GitHub and [tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=1a5d266c-f4c1-4c45-b986-2afd4be59991)! diff --git a/content/courses/connecting-to-offchain-data/verifiable-randomness-functions.md b/content/courses/connecting-to-offchain-data/verifiable-randomness-functions.md index d21841170..70fc691af 100644 --- a/content/courses/connecting-to-offchain-data/verifiable-randomness-functions.md +++ b/content/courses/connecting-to-offchain-data/verifiable-randomness-functions.md @@ -57,10 +57,10 @@ game as a seed. Unfortunately, neither type of randomness is natively available in Solana programs, because these programs have to be deterministic. All validators need -to come to the same conclusion. There is no way they’d all draw the same random -number, and if they used a seed, it’d be prone to attacks. See the +to come to the same conclusion. There is no way they'd all draw the same random +number, and if they used a seed, it'd be prone to attacks. See the [Solana FAQs](https://solana.com/docs/programs/lang-rust#depending-on-rand) for -more. So we’ll have to look outside of the blockchain for randomness with VRFs. +more. So we'll have to look outside of the blockchain for randomness with VRFs. ### What is Verifiable Randomness? @@ -68,7 +68,7 @@ A Verifiable Random Function (VRF) is a public-key pseudorandom function that provides proofs that its outputs were calculated correctly. This means we can use a cryptographic keypair to generate a random number with a proof, which can then be validated by anyone to ensure the value was calculated correctly without -the possibility of leaking the producer’s secret key. Once validated, the random +the possibility of leaking the producer's secret key. Once validated, the random value is stored onchain in an account. VRFs are a crucial component for achieving verifiable and unpredictable @@ -100,9 +100,9 @@ Switchboard is a decentralized Oracle network that offers VRFs on Solana. Oracles are services that provide external data to a blockchain, allowing them to interact with and respond to real-world events. The Switchboard network is made up of many different individual oracles run by third parties to provide -external data and service requests onchain. To learn more about Switchboard’s +external data and service requests onchain. To learn more about Switchboard's Oracle network, please refer to our -[Oracle lesson](/developers/courses/connecting-to-offchain-data/oracles). +[Oracle lesson](/content/courses/connecting-to-offchain-data/oracles.md) Switchboard's VRF allows users to request an oracle to produce a randomness output onchain. Once an oracle has been assigned the request, the proof of the @@ -112,13 +112,13 @@ verified, the Switchboard program will execute a onchain callback defined by the VRF Account during account creation. From there the program can consume the random data. -You might be wondering how they get paid. In Switchboard’s VRF implementation, +You might be wondering how they get paid. In Switchboard's VRF implementation, you actually pay per request. ### Requesting and Consuming VRF Now that we know what a VRF is and how it fits into the Switchboard Oracle -network, let’s take a closer look at how to actually request and consume +network, let's take a closer look at how to actually request and consume randomness from a Solana program. At a high level, the process for requesting and consuming randomness from Switchboard looks like this: @@ -137,7 +137,7 @@ and consuming randomness from Switchboard looks like this: pseudorandom number returned from the Oracle. 7. Program consumes the random number and can execute business logic with it! -There are a lot of steps here, but don’t worry, we'll be going through each step +There are a lot of steps here, but don't worry, we'll be going through each step of the process in detail. First there are a couple of accounts that we will have to create ourselves to @@ -191,15 +191,15 @@ Some important fields on this account are `authority`, `oracle_queue`, and `callback`. The `authority` should be a PDA of the program that has the ability to request randomness on this `vrf` account. That way, only that program can provide the signature needed for the vrf request. The `oracle_queue` field -allows you to specify which specific oracle queue you’d like to service the vrf -requests made with this account. If you aren’t familiar with oracle queues on +allows you to specify which specific oracle queue you'd like to service the vrf +requests made with this account. If you aren't familiar with oracle queues on Switchboard, checkout the [Oracles lesson in the Connecting to Offchain Data course](/content/courses/connecting-to-offchain-data/oracles)! Lastly, the `callback` field is where you define the callback instruction the Switchboard program should invoke once the randomness result has be verified. The `callback` field is of type -`[CallbackZC](https://github.com/switchboard-xyz/solana-sdk/blob/9dc3df8a5abe261e23d46d14f9e80a7032bb346c/rust/switchboard-solana/src/oracle_program/accounts/ecvrf.rs#L25)`. +[`CallbackZC`](https://github.com/switchboard-xyz/solana-sdk/blob/9dc3df8a5abe261e23d46d14f9e80a7032bb346c/rust/switchboard-solana/src/oracle_program/accounts/ecvrf.rs#L25). ```rust #[zero_copy(unsafe)] @@ -254,7 +254,7 @@ Now, you can create the `vrf` account. Now that we have all of our needed accounts we can finally call the `request_randomness` instruction on the Switchboard program. It's important to note you can invoke the `request_randomness` in a client or within a program -with a cross program invocation (CPI). Let’s take a look at what accounts are +with a cross program invocation (CPI). Let's take a look at what accounts are required for this request by checking out the Account struct definition in the actual [Switchboard program](https://github.com/switchboard-xyz/solana-sdk/blob/fbef37e4a78cbd8b8b6346fcb96af1e20204b861/rust/switchboard-solana/src/oracle_program/instructions/vrf_request_randomness.rs#L8). @@ -296,7 +296,7 @@ pub struct VrfRequestRandomness<'info> { } ``` -That’s a lot of accounts, let’s walk through each one and give them some +That's a lot of accounts, let's walk through each one and give them some context. - `authority` - PDA derived from our program @@ -320,7 +320,7 @@ context. [Recent Blockhashes Solana program](https://docs.rs/solana-program/latest/solana_program/sysvar/recent_blockhashes/index.html) - Token Program - Solana Token Program -That’s all the accounts needed for just the randomness request, now let's see +That's all the accounts needed for just the randomness request, now let's see what it looks like in a Solana program via CPI. To do this, we make use of the `VrfRequestRandomness` data struct from the [SwitchboardV2 rust crate.](https://github.com/switchboard-xyz/solana-sdk/blob/main/rust/switchboard-solana/src/oracle_program/instructions/vrf_request_randomness.rs) @@ -367,7 +367,7 @@ Ok(()) ``` Once the Switchboard program is invoked, it does some logic on its end and -assigns an oracle in the `vrf` account’s defined oracle queue to serve the +assigns an oracle in the `vrf` account's defined oracle queue to serve the randomness request. The assigned oracle then calculates a random value and sends it back to the Switchboard program. @@ -414,13 +414,13 @@ pub fn handler(ctx: Context) -> Result <()> { } ``` -Now you have randomness! Hooray! But there is one last thing we have not talked -about yet and that’s how the randomness is returned. Switchboard, gives you your -randomness calling -`[get_result()](https://github.com/switchboard-xyz/solana-sdk/blob/9dc3df8a5abe261e23d46d14f9e80a7032bb346c/rust/switchboard-solana/src/oracle_program/accounts/vrf.rs#L122)`. +Now you have randomness! Hooray! But there is one last thing we have not +discussed yet and that's how the randomness is returned. Switchboard, gives you +your randomness calling +[`get_result()`](https://github.com/switchboard-xyz/solana-sdk/blob/9dc3df8a5abe261e23d46d14f9e80a7032bb346c/rust/switchboard-solana/src/oracle_program/accounts/vrf.rs#L122). This method returns the `current_round.result` field of the `vrf` account SwitchboardDecimal format, which is really just a buffer of 32 random -`[u8](https://github.com/switchboard-xyz/solana-sdk/blob/9dc3df8a5abe261e23d46d14f9e80a7032bb346c/rust/switchboard-solana/src/oracle_program/accounts/ecvrf.rs#L65C26-L65C26)` +[`u8`](https://github.com/switchboard-xyz/solana-sdk/blob/9dc3df8a5abe261e23d46d14f9e80a7032bb346c/rust/switchboard-solana/src/oracle_program/accounts/ecvrf.rs#L65C26-L65C26) unsigned-integers. You can use these unsigned-integers however you see fit in your program, but a very common method is to treat each integer in the buffer as its own random number. For example, if you need a dice roll (1-6) just take the @@ -440,15 +440,15 @@ the steps involved in a VRF request, review this diagram. ## Lab -For this lesson’s lab, we will be picking up where we left off in the +For this lesson's lab, we will be picking up where we left off in the [Oracle lesson](/content/courses/connecting-to-offchain-data/oracles). If you haven't completed the Oracle lesson and demo, we strongly recommend you do as -there are a lot of overlapping concepts and we’ll be starting from the Oracle -lesson’s codebase. +there are a lot of overlapping concepts and we'll be starting from the Oracle +lesson's codebase. If you don't want to complete the Oracle lesson, the starter code for this lab is provided for you in -[the main branch of the lab Github repository](https://github.com/Unboxed-Software/michael-burry-escrow). +[the main branch of the lab Github repository](https://github.com/solana-developers/burry-escrow). The repo contains a "Michael Burry" escrow program. This is a program that allows a user to lock up some solana funds in escrow that cannot be withdrawn @@ -463,8 +463,8 @@ from escrow regardless of the SOL price. If you are cloning the repo from the previous lesson make sure to do the following: -1. `git clone https://github.com/Unboxed-Software/michael-burry-escrow` -2. `cd michael-burry-escrow` +1. `git clone https://github.com/solana-developers/burry-escrow` +2. `cd burry-escrow` 3. `anchor build` 4. `anchor keys list` 1. Take the resulting key and put it into `Anchor.toml` and @@ -475,8 +475,8 @@ following: 6. `yarn install` 7. `anchor test` -When all tests pass we’re ready to begin. We will start by filling in some -boilerplate stuff, then we’ll implement the functions. +When all tests pass we're ready to begin. We will start by filling in some +boilerplate stuff, then we'll implement the functions. #### 2. Cargo.toml @@ -485,8 +485,8 @@ in our `Cargo.toml` file. ```typescript [dependencies] -anchor-lang = "0.28.0" -anchor-spl = "0.28.0" +anchor-lang = "0.30.1" +anchor-spl = "0.30.1" switchboard-v2 = "0.4.0" ``` @@ -535,7 +535,7 @@ mod burry_escrow { init_vrf_client_handler(ctx) } - pub fn get_out_of_jail(ctx: Context, params: RequestRandomnessParams) -> Result<()>{ + pub fn get_out_of_jail(ctx: Context, params: RequestRandomnessParams) -> Result<()>{ get_out_of_jail_handler(ctx, params) } @@ -592,7 +592,7 @@ about `zero_copy`, take a look at our pub struct VrfClientState { pub bump: u8, pub result_buffer: [u8; 32], - pub dice_type: u8, // 6 sided + pub dice_type: u8, // 6 sided pub die_result_1: u8, pub die_result_2: u8, pub timestamp: i64, @@ -638,10 +638,10 @@ pub struct VrfClientState { } ``` -#### 5. Errors.rs +#### 5. Error.rs Next, let's take a quick pit stop and add one last error -`InvalidVrfAuthorityError` to `errors.rs`. We'll use this when the VRF authority +`InvalidVrfAuthorityError` to `error.rs`. We'll use this when the VRF authority is incorrect. ```rust @@ -681,7 +681,7 @@ pub mod consume_randomness; Lastly, let's update our `deposit.rs` and `withdraw.rs` files to reflect our soon-to-be new powers. -First, let’s initialize our `out_of_jail` flag to `false` in `deposit.rs`. +First, let's initialize our `out_of_jail` flag to `false` in `deposit.rs`. ```rust // in deposit.rs @@ -720,8 +720,8 @@ check, going straight to our withdrawal. #### 8. Using VRF -Now that we have the boilerplate out of the way, let’s move on to our first -addition: initializing our VRF Client. Let’s create a new file called +Now that we have the boilerplate out of the way, let's move on to our first +addition: initializing our VRF Client. Let's create a new file called `init_vrf_client.rs` in the `/instructions` folder. We'll add the needed crates, then create the `InitVrfClient` context. We'll need @@ -731,7 +731,7 @@ the following accounts: - `escrow_account` - the burry escrow account created when the user locked their funds up. - `vrf_client_state` - account we will be creating in this instruction to hold - state about the user’s dice rolls. + state about the user's dice rolls. - `vrf` - Our VRF owned by the Switchboard program, we will create this account client-side before we call `init_vrf_client`. - `system_program` - The system program since we use the init macro for @@ -758,7 +758,7 @@ pub struct InitVrfClient<'info> { #[account( init, seeds = [ - VRF_STATE_SEED, + VRF_STATE_SEED, user.key.as_ref(), escrow_account.key().as_ref(), vrf.key().as_ref(), @@ -786,7 +786,7 @@ only have one `escrow_account`. Since there is only one, If you wanted to be thorough, you might want to implement a `close_vrf_state` function to get your rent back. -Now, let’s write some basic initialization logic for this function. First we +Now, let's write some basic initialization logic for this function. First we load and initialize our `vrf_state` account by calling `load_init()`. Then we fill in the values for each field. @@ -926,7 +926,7 @@ pub struct RequestRandomness<'info> { } ``` -Lastly, we'll create a new struct `RequestRandomnessParams`. We’ll be passing in +Lastly, we'll create a new struct `RequestRandomnessParams`. We'll be passing in some account's bumps client-side. ```rust @@ -939,7 +939,7 @@ pub struct RequestRandomnessParams { Now, we can work on the logic of this instruction. The logic should gather all of the accounts needed and pass them to -`[VrfRequestRandomness](https://github.com/switchboard-xyz/solana-sdk/blob/fbef37e4a78cbd8b8b6346fcb96af1e20204b861/rust/switchboard-solana/src/oracle_program/instructions/vrf_request_randomness.rs#L8)`, +[VrfRequestRandomness](https://github.com/switchboard-xyz/solana-sdk/blob/fbef37e4a78cbd8b8b6346fcb96af1e20204b861/rust/switchboard-solana/src/oracle_program/instructions/vrf_request_randomness.rs#L8), which is a really nice struct from Switchboard. Then we'll sign the request and send it on it's way. @@ -971,7 +971,7 @@ pub fn get_out_of_jail_handler(ctx: Context, params: RequestR let escrow_key = ctx.accounts.escrow_account.key(); let user_key = ctx.accounts.user.key(); let state_seeds: &[&[&[u8]]] = &[&[ - &VRF_STATE_SEED, + &VRF_STATE_SEED, user_key.as_ref(), escrow_key.as_ref(), vrf_key.as_ref(), @@ -1006,7 +1006,7 @@ If doubles are rolled, set the `out_of_jail` field on `vrf_state` to true. First, let's create the `ConsumeRandomness` context. Fortunately, it only takes three accounts. -- `escrow_account` - state account for user’s escrowed funds. +- `escrow_account` - state account for user's escrowed funds. - `vrf_state` - state account to hold information about dice roll. - `vrf` - account with the random number that was just calculated by the Switchboard network. @@ -1048,7 +1048,7 @@ the randomness in the account. ```rust // inside consume_randomness.rs -pub fn consume_randomness_handler(ctx: Context) -> Result <()> { +pub fn consume_randomness_handler(ctx: Context) -> Result<()> { msg!("Consuming randomness..."); let vrf = ctx.accounts.vrf.load()?; @@ -1070,7 +1070,7 @@ randomness and dice rolls within it. We also want to check that the randomness is stale. ```rust -pub fn consume_randomness_handler(ctx: Context) -> Result <()> { +pub fn consume_randomness_handler(ctx: Context) -> Result<()> { msg!("Successfully consumed randomness."); let vrf = ctx.accounts.vrf.load()?; @@ -1092,7 +1092,7 @@ pub fn consume_randomness_handler(ctx: Context) -> Result <() } ``` -Now it’s time to actually use the random result. Since we only use two dice we +Now it's time to actually use the random result. Since we only use two dice we only need the first two bytes of the buffer. To convert these random values into “dice rolls”, we use modular arithmetic. For anyone not familiar with modular arithmetic, @@ -1201,12 +1201,12 @@ Please make sure your program builds successfully by running `anchor build`. #### 11. Testing -Alright, let’s test our program. Historically, we'd need to test the VRF on +Alright, let's test our program. Historically, we'd need to test the VRF on Devnet. Fortunately, the folks at Switchboard have created some really nice -functions to let us run our own VRF oracle locally. For this, we’ll need to set +functions to let us run our own VRF oracle locally. For this, we'll need to set up our local server, grab all of the right accounts, and then call our program. -The first thing we’ll do is pull in some more accounts in our `Anchor.toml` +The first thing we'll do is pull in some more accounts in our `Anchor.toml` file: ```toml @@ -1311,9 +1311,9 @@ describe("burry-escrow-vrf", () => { // Check whether the data onchain is equal to local 'data' assert(failUnlockPrice == newAccount.unlockPrice); assert(escrowBalance > 0); - } catch (e) { - console.log(e); - assert.fail(e); + } catch (error) { + console.log(error); + assert.fail(error); } }); @@ -1341,13 +1341,14 @@ describe("burry-escrow-vrf", () => { await provider.connection.confirmTransaction(tx, "confirmed"); console.log("Your transaction signature", tx); - } catch (e) { - // verify tx returns expected error + } catch (error) { didFail = true; - console.log(e.error.errorMessage); + assert( - e.error.errorMessage == + error.message.includes( "Current SOL price is not above Escrow unlock price.", + ), + "Unexpected error message: " + error.message, ); } @@ -1517,8 +1518,8 @@ it("Roll till you can withdraw", async () => { }) .signers([payer]) .rpc(); - } catch (e) { - console.log(e); + } catch (error) { + console.log(error); assert.fail(); } @@ -1580,8 +1581,8 @@ it("Roll till you can withdraw", async () => { console.log("Resetting die..."); await delay(5000); } - } catch (e) { - console.log(e); + } catch (error) { + console.log(error); assert.fail(); } } @@ -1619,7 +1620,7 @@ And there you have it! You should be able to run and pass all of the tests using If something is not working, go back and find where you went wrong. Alternatively feel free to try out the -[solution code on the `vrf` branch](https://github.com/Unboxed-Software/michael-burry-escrow/tree/vrf). +[solution code on the `vrf` branch](https://github.com/solana-developers/burry-escrow/tree/vrf). Remember to update your program keys and wallet path like we did in the [the Setup step](#1-program-setup). @@ -1632,7 +1633,7 @@ they roll 3 times without rolling doubles, they should be able to withdraw their funds, just like getting out of jail in Monopoly. If you get stuck, we have the solution in the -[`vrf-challenge-solution` branch](https://github.com/Unboxed-Software/michael-burry-escrow/tree/vrf-challenge-solution). +[`vrf-challenge-solution` branch](https://github.com/solana-developers/burry-escrow/tree/vrf-challenge-solution). Push your code to GitHub and diff --git a/content/courses/intro-to-solana/getting-started.md b/content/courses/intro-to-solana/getting-started.md index d2e5e3a73..15565d26c 100644 --- a/content/courses/intro-to-solana/getting-started.md +++ b/content/courses/intro-to-solana/getting-started.md @@ -8,7 +8,7 @@ objectives: description: "Understand what web3, blockchains, and Solana are." --- -## Welcome! +## Welcome Welcome to the best starting point for developers looking to learn web3 and blockchain! diff --git a/content/courses/intro-to-solana/interact-with-wallets.md b/content/courses/intro-to-solana/interact-with-wallets.md index db3c99240..76cfc9db1 100644 --- a/content/courses/intro-to-solana/interact-with-wallets.md +++ b/content/courses/intro-to-solana/interact-with-wallets.md @@ -16,8 +16,8 @@ description: "Connect with installed browser wallets from your React apps." software wallets are often **browser extensions** that add the ability to connect to a wallet from a website. On mobile, wallet apps have their own browsers. -- Solana’s **Wallet Adapter** allows you to build websites that can request a - user’s wallet address and propose transactions for them to sign +- Solana's **Wallet Adapter** allows you to build websites that can request a + user's wallet address and propose transactions for them to sign ## Lesson @@ -26,7 +26,7 @@ description: "Connect with installed browser wallets from your React apps." In the previous two lessons, we discussed keypairs. Keypairs are used to locate accounts and sign transactions. While the public key of a keypair is perfectly safe to share, the secret key should always be kept in a secure location. If a -user’s secret key is exposed, then a malicious actor could execute transactions +user's secret key is exposed, then a malicious actor could execute transactions with the authority of that user, allowing them to transfer all the assets inside. @@ -42,7 +42,7 @@ existing device(s). Both techniques allow websites to interact easily with the wallet, for example: -1. Seeing the wallet’s wallet address (their public key) +1. Seeing the wallet's wallet address (their public key) 2. Submitting transactions for a user's approval to sign 3. Sending signed transactions to the network @@ -51,20 +51,20 @@ transaction to your wallet and having the wallet handle the signing, you ensure that you never expose your secret key to the website. Instead, you only share the secret key with the wallet application. -Unless you’re creating a wallet application yourself, your code should never +Unless you're creating a wallet application yourself, your code should never need to ask a user for their secret key. Instead, you can ask users to connect to your site using a reputable wallet. -## Solana’s Wallet Adapter +## Solana's Wallet Adapter If you build web apps, and need users to be able to connect to their wallets and -sign transactions through your apps, you'll want Solana’s Wallet Adapter. Wallet +sign transactions through your apps, you'll want Solana's Wallet Adapter. Wallet Adapter is a suite of modular packages: - The core functionality is found in `@solana/wallet-adapter-base`. - React support is added by `@solana/wallet-adapter-react`. - Additional packages provide components for common UI frameworks. In this - lesson, and throughout this course, we’ll be using components from + lesson, and throughout this course, we'll be using components from `@solana/wallet-adapter-react-ui`. Finally, some packages are adapters for specific wallet apps. These are now no @@ -73,7 +73,7 @@ longer necessary in most cases - see below. ### Install Wallet-Adapter Libraries for React When adding wallet support to an existing React app, you start by installing the -appropriate packages. You’ll need `@solana/wallet-adapter-base`, +appropriate packages. You'll need `@solana/wallet-adapter-base`, `@solana/wallet-adapter-react`. If you plan to use the provided React components, you'll also need to add `@solana/wallet-adapter-react-ui`. @@ -113,15 +113,16 @@ ensure this is to wrap your entire app in `ConnectionProvider` and ```tsx import { NextPage } from "next"; -import { FC, ReactNode } from "react"; +import { FC, ReactNode, useMemo } from "react"; import { ConnectionProvider, WalletProvider, } from "@solana/wallet-adapter-react"; -import * as web3 from "@solana/web3.js"; +import { clusterApiUrl } from "@solana/web3.js"; +import "@solana/wallet-adapter-react-ui/styles.css"; export const Home: NextPage = props => { - const endpoint = web3.clusterApiUrl("devnet"); + const endpoint = clusterApiUrl("devnet"); const wallets = useMemo(() => [], []); return ( @@ -135,7 +136,7 @@ export const Home: NextPage = props => { ``` Note that `ConnectionProvider` requires an `endpoint` property and that -`WalletProvider` requires a `wallets` property. We’re continuing to use the +`WalletProvider` requires a `wallets` property. We're continuing to use the endpoint for the Devnet cluster, and since all major Solana wallet applications support the Wallet Standard, we don't need any wallet-specific adapters. At this point, you can connect with `wallet.connect()`, which will instruct the wallet @@ -144,9 +145,9 @@ for transactions. ![wallet connection prompt](/public/assets/courses/unboxed/wallet-connect-prompt.png) -While you could do this in a `useEffect` hook, you’ll usually want to provide +While you could do this in a `useEffect` hook, you'll usually want to provide more sophisticated functionality. For example, you may want users to be able to -choose from a list of supported wallet applications or disconnect after they’ve +choose from a list of supported wallet applications or disconnect after they've already connected. ### @solana/wallet-adapter-react-ui @@ -158,7 +159,7 @@ full-featured wallet experience is to use `WalletModalProvider` and ```tsx import { NextPage } from "next"; -import { FC, ReactNode } from "react"; +import { FC, ReactNode, useMemo } from "react"; import { ConnectionProvider, WalletProvider, @@ -167,10 +168,16 @@ import { WalletModalProvider, WalletMultiButton, } from "@solana/wallet-adapter-react-ui"; -import * as web3 from "@solana/web3.js"; +import { + clusterApiUrl, + Transaction, + PublicKey, + SystemProgram, +} from "@solana/web3.js"; +import "@solana/wallet-adapter-react-ui/styles.css"; const Home: NextPage = props => { - const endpoint = web3.clusterApiUrl("devnet"); + const endpoint = clusterApiUrl("devnet"); const wallets = useMemo(() => [], []); return ( @@ -189,7 +196,7 @@ export default Home; ``` The `WalletModalProvider` adds functionality for presenting a modal screen for -users to select which wallet they’d like to use. The `WalletMultiButton` changes +users to select which wallet they'd like to use. The `WalletMultiButton` changes behavior to match the connection status: ![multi button select wallet option](/public/assets/courses/unboxed/multi-button-select-wallet.png) @@ -214,7 +221,7 @@ functionality: Once your site is connected to a wallet, `useConnection` will retrieve a `Connection` object and `useWallet` will get the `WalletContextState`. `WalletContextState` has a property `publicKey` that is `null` when not -connected to a wallet and has the public key of the user’s account when a wallet +connected to a wallet and has the public key of the user's account when a wallet is connected. With a public key and a connection, you can fetch account info and more. @@ -229,26 +236,38 @@ export const BalanceDisplay: FC = () => { const { publicKey } = useWallet(); useEffect(() => { - if (!connection || !publicKey) { - return; - } - - connection.onAccountChange( - publicKey, - updatedAccountInfo => { - setBalance(updatedAccountInfo.lamports / LAMPORTS_PER_SOL); - }, - "confirmed", - ); - - connection.getAccountInfo(publicKey).then(info => { - setBalance(info.lamports); - }); + const updateBalance = async () => { + if (!connection || !publicKey) { + console.error("Wallet not connected or connection unavailable"); + } + + try { + connection.onAccountChange( + publicKey, + updatedAccountInfo => { + setBalance(updatedAccountInfo.lamports / LAMPORTS_PER_SOL); + }, + "confirmed", + ); + + const accountInfo = await connection.getAccountInfo(publicKey); + + if (accountInfo) { + setBalance(accountInfo.lamports / LAMPORTS_PER_SOL); + } else { + throw new Error("Account info not found"); + } + } catch (error) { + console.error("Failed to retrieve account info:", error); + } + }; + + updateBalance(); }, [connection, publicKey]); return (
-

{publicKey ? `Balance: ${balance / LAMPORTS_PER_SOL} SOL` : ""}

+

{publicKey ? `Balance: ${balance} SOL` : ""}

); }; @@ -269,30 +288,40 @@ const { connection } = useConnection(); const sendSol = async event => { event.preventDefault(); - const transaction = new web3.Transaction(); - const recipientPubKey = new web3.PublicKey(event.target.recipient.value); + if (!publicKey) { + console.error("Wallet not connected"); + return; + } + + try { + const recipientPubKey = new PublicKey(event.currentTarget.recipient.value); - const sendSolInstruction = web3.SystemProgram.transfer({ - fromPubkey: publicKey, - toPubkey: recipientPubKey, - lamports: 0.1 * LAMPORTS_PER_SOL, - }); + const transaction = new Transaction(); + const sendSolInstruction = SystemProgram.transfer({ + fromPubkey: publicKey, + toPubkey: recipientPubKey, + lamports: 0.1 * LAMPORTS_PER_SOL, + }); - transaction.add(sendSolInstruction); - const signature = sendTransaction(transaction, connection); - console.log(signature); + transaction.add(sendSolInstruction); + + const signature = await sendTransaction(transaction, connection); + console.log(`Transaction signature: ${signature}`); + } catch (error) { + console.error("Transaction failed", error); + } }; ``` When this function is called, the connected wallet will display the transaction -for the user’s approval. If approved, then the transaction will be sent. +for the user's approval. If approved, then the transaction will be sent. ![wallet transaction approval prompt](/public/assets/courses/unboxed/wallet-transaction-approval-prompt.png) ## Lab -Let’s take the Ping program from the last lesson and build a frontend that lets -users approve a transaction that pings the program. As a reminder, the program’s +Let's take the Ping program from the last lesson and build a frontend that lets +users approve a transaction that pings the program. As a reminder, the program's public key is `ChT1B39WKLS8qUrkLvFDXMhEJ4F1XZzwUNHUt4AU9aVa` and the public key for the data account is `Ah9K7dQ8EHaZqcAsgBW8w37yN2eAy3koFmUn4x3CJtod`. @@ -314,25 +343,27 @@ Then set your wallet to use Devnet, for example: - In Solflare, click **Settings** -> **General** -> **Network** -> **DevNet** - In Backpack, click **Preferences** -> **Developer Mode** -This ensures that your wallet app will be connected to the same network we’ll be +This ensures that your wallet app will be connected to the same network we'll be using in this lab. ### Download the starter code + + Download the [starter code for this project](https://github.com/Unboxed-Software/solana-ping-frontend/tree/starter). -This project is a simple Next.js application. It’s mostly empty except for the -`AppBar` component. We’ll build the rest throughout this lab. +This project is a simple Next.js application. It's mostly empty except for the +`AppBar` component. We'll build the rest throughout this lab. You can see its current state with the command `npm run dev` in the console. ### Wrap the app in context providers -To start, we’re going to create a new component to contain the various -Wallet-Adapter providers that we’ll be using. Create a new file inside the +To start, we're going to create a new component to contain the various +Wallet-Adapter providers that we'll be using. Create a new file inside the `components` folder called `WalletContextProvider.tsx`. -Let’s start with some of the boilerplate for a functional component: +Let's start with some of the boilerplate for a functional component: ```tsx import { FC, ReactNode } from "react"; @@ -346,7 +377,7 @@ const WalletContextProvider: FC<{ children: ReactNode }> = ({ children }) => { export default WalletContextProvider; ``` -To properly connect to the user’s wallet, we’ll need a `ConnectionProvider`, +To properly connect to the user's wallet, we'll need a `ConnectionProvider`, `WalletProvider`, and `WalletModalProvider`. Start by importing these components from `@solana/wallet-adapter-react` and `@solana/wallet-adapter-react-ui`. Then add them to the `WalletContextProvider` component. Note that @@ -378,9 +409,9 @@ export default WalletContextProvider; The last things we need are an actual endpoint for `ConnectionProvider` and the supported wallets for `WalletProvider`. -For the endpoint, we’ll use the same `clusterApiUrl` function from the -`@solana/web3.js` library that we’ve used before so you’ll need to import it. -For the array of wallets you’ll also need to import the +For the endpoint, we'll use the same `clusterApiUrl` function from the +`@solana/web3.js` library that we've used before so you'll need to import it. +For the array of wallets you'll also need to import the `@solana/wallet-adapter-wallets` library. After importing these libraries, create a constant `endpoint` that uses the @@ -395,18 +426,18 @@ To complete this component, add ensure proper styling and behavior of the Wallet Adapter library components. ```tsx -import { FC, ReactNode } from "react"; +import { FC, ReactNode, useMemo } from "react"; import { ConnectionProvider, WalletProvider, } from "@solana/wallet-adapter-react"; import { WalletModalProvider } from "@solana/wallet-adapter-react-ui"; -import * as web3 from "@solana/web3.js"; +import { clusterApiUrl } from "@solana/web3.js"; import * as walletAdapterWallets from "@solana/wallet-adapter-wallets"; require("@solana/wallet-adapter-react-ui/styles.css"); const WalletContextProvider: FC<{ children: ReactNode }> = ({ children }) => { - const endpoint = web3.clusterApiUrl("devnet"); + const endpoint = clusterApiUrl("devnet"); const wallets = useMemo(() => [], []); return ( @@ -423,10 +454,10 @@ export default WalletContextProvider; ### Add wallet multi-button -Next, let’s set up the Connect button. The current button is just a placeholder +Next, let's set up the Connect button. The current button is just a placeholder because rather than using a standard button or creating a custom component, -we’ll be using Wallet-Adapter’s “multi-button.” This button interfaces with the -providers we set up in `WalletContextProvider` and let’s users choose a wallet, +we'll be using Wallet-Adapter's “multi-button.” This button interfaces with the +providers we set up in `WalletContextProvider` and let's users choose a wallet, connect to a wallet, and disconnect from a wallet. If you ever need more custom functionality, you can create a custom component to handle this. @@ -465,7 +496,7 @@ export default Home; If you run the app, everything should still look the same since the current button on the top right is still just a placeholder. To remedy this, open `AppBar.tsx` and replace `` with ``. -You’ll need to import `WalletMultiButton` from +You'll need to import `WalletMultiButton` from `@solana/wallet-adapter-react-ui`. ```tsx @@ -492,21 +523,26 @@ button to connect your wallet to the site. ### Create button to ping program -Now that our app can connect to our wallet, let’s make the “Ping!” button +Now that our app can connect to our wallet, let's make the “Ping!” button actually do something. -Start by opening the `PingButton.tsx` file. We’re going to replace the +Start by opening the `PingButton.tsx` file. We're going to replace the `console.log` inside of `onClick` with code that will create a transaction and -submit it to the wallet app for the end user’s approval. +submit it to the wallet app for the end user's approval. -First, we need a connection, the wallet’s public key, and Wallet-Adapter’s +First, we need a connection, the wallet's public key, and Wallet-Adapter's `sendTransaction` function. To get this, we need to import `useConnection` and -`useWallet` from `@solana/wallet-adapter-react`. While we’re here, let’s also -import `@solana/web3.js` since we’ll need it to create our transaction. +`useWallet` from `@solana/wallet-adapter-react`. While we're here, let's also +import `@solana/web3.js` since we'll need it to create our transaction. ```tsx import { useConnection, useWallet } from "@solana/wallet-adapter-react"; -import * as web3 from "@solana/web3.js"; +import { + PublicKey, + Transaction, + TransactionInstruction, + sendTransaction, +} from "@solana/web3.js"; import { FC, useState } from "react"; import styles from "../styles/PingButton.module.css"; @@ -528,7 +564,12 @@ Now use the `useConnection` hook to create a `connection` constant and the ```tsx import { useConnection, useWallet } from "@solana/wallet-adapter-react"; -import * as web3 from "@solana/web3.js"; +import { + PublicKey, + Transaction, + TransactionInstruction, + sendTransaction, +} from "@solana/web3.js"; import { FC, useState } from "react"; import styles from "../styles/PingButton.module.css"; @@ -551,7 +592,7 @@ export const PingButton: FC = () => { With that, we can fill in the body of `onClick`. First, check that both `connection` and `publicKey` exist (if either does not -then the user’s wallet isn’t connected yet). +then the user's wallet isn't connected yet). Next, construct two instances of `PublicKey`, one for the program ID `ChT1B39WKLS8qUrkLvFDXMhEJ4F1XZzwUNHUt4AU9aVa` and one for the data account @@ -567,41 +608,46 @@ Finally, call `sendTransaction`. ```tsx const onClick = async () => { if (!connection || !publicKey) { - return; + console.error("Wallet not connected or connection unavailable"); } - const programId = new web3.PublicKey(PROGRAM_ID); - const programDataAccount = new web3.PublicKey(DATA_ACCOUNT_PUBKEY); - const transaction = new web3.Transaction(); - - const instruction = new web3.TransactionInstruction({ - keys: [ - { - pubkey: programDataAccount, - isSigner: false, - isWritable: true, - }, - ], - programId, - }); - - transaction.add(instruction); - const signature = await sendTransaction(transaction, connection); - console.log(sig); + try { + const programId = new PublicKey(PROGRAM_ID); + const programDataAccount = new PublicKey(DATA_ACCOUNT_PUBKEY); + const transaction = new Transaction(); + + const instruction = new TransactionInstruction({ + keys: [ + { + pubkey: programDataAccount, + isSigner: false, + isWritable: true, + }, + ], + programId, + }); + + transaction.add(instruction); + + const signature = await sendTransaction(transaction, connection); + console.log("Transaction Signature:", signature); + } catch (error) { + console.error("Transaction failed:", error); + } }; ``` -And that’s it! If you refresh the page, connect your wallet, and click the ping +And that's it! If you refresh the page, connect your wallet, and click the ping button, your wallet should present you with a popup to confirm the transaction. ### Add some polish -There’s a lot you could do to make the user experience here even better. For +There's a lot you could do to make the user experience here even better. For example, you could change the UI to only show you the Ping button when a wallet is connected and display some other prompt otherwise. You could link to the transaction on Solana Explorer after a user confirms a transaction so they can easily go look at the transaction details. The more you experiment with it, the -more comfortable you’ll get, so get creative! +more comfortable you'll get, so get creative! You can also download the [full source code from this lab](https://github.com/Unboxed-Software/solana-ping-frontend) @@ -609,7 +655,7 @@ to understand all of this in context. ## Challenge -Now it’s your turn to build something independently. Create an application that +Now it's your turn to build something independently. Create an application that lets a user connect their wallet and send SOL to another account. ![Send SOL App](/public/assets/courses/unboxed/solana-send-sol-app.png) @@ -617,7 +663,7 @@ lets a user connect their wallet and send SOL to another account. 1. You can build this from scratch or you can [download the starter code](https://github.com/Unboxed-Software/solana-send-sol-frontend/tree/starter). 2. Wrap the starter application in the appropriate context providers. -3. In the form component, set up the transaction and send it to the user’s +3. In the form component, set up the transaction and send it to the user's wallet for approval. 4. Get creative with the user experience. Add a link to let the user view the transaction on Solana Explorer or something else that seems cool to you! diff --git a/content/courses/intro-to-solana/intro-to-cryptography.md b/content/courses/intro-to-solana/intro-to-cryptography.md index 795850f41..f88af61a3 100644 --- a/content/courses/intro-to-solana/intro-to-cryptography.md +++ b/content/courses/intro-to-solana/intro-to-cryptography.md @@ -20,6 +20,9 @@ description: "Understand asymmetric cryptography and how Solana uses it." ## Lesson +In this lesson, we will explore the basics of cryptography and how it's applied +within the Solana ecosystem. + ### Symmetric and Asymmetric Cryptography 'Cryptography' the study of hiding information. There are two main types of @@ -99,10 +102,10 @@ You can use the Solana blockchain from either the browser or node.js with the to install `@solana/web3.js` ```shell -npm i @solana/web3.js +npm i @solana/web3.js@1 ``` -We’ll cover a lot of +We'll cover a lot of [web3.js](https://solana.com/docs/clients/javascript-reference) gradually throughout this course, but you can also check out the [official web3.js documentation](https://solana.com/docs/clients/javascript-reference). @@ -135,7 +138,7 @@ store secret keys in source code. Instead, we: ### Loading an existing keypair -If you already have a keypair you’d like to use, you can load a `Keypair` from +If you already have a keypair you'd like to use, you can load a `Keypair` from an existing secret key stored in the filesystem or an `.env` file. In node.js, the `@solana-developers/helpers` npm package includes some extra functions: @@ -153,10 +156,13 @@ import { getKeypairFromEnvironment } from "@solana-developers/helpers"; const keypair = getKeypairFromEnvironment("SECRET_KEY"); ``` -You know how to make and load keypairs! Let’s practice what we’ve learned. +You know how to make and load keypairs! Let's practice what we've learned. ## Lab +In this lab we will learn about keypairs, and how to store secret keys securely +on solana + ### Installation Make a new directory, install TypeScript, Solana web3.js and esrun: @@ -165,7 +171,7 @@ Make a new directory, install TypeScript, Solana web3.js and esrun: mkdir generate-keypair cd generate-keypair npm init -y -npm install typescript @solana/web3.js esrun @solana-developers/helpers +npm install typescript @solana/web3.js@1 esrun @solana-developers/helpers@2 ``` Make a new file called `generate-keypair.ts` diff --git a/content/courses/intro-to-solana/intro-to-custom-onchain-programs.md b/content/courses/intro-to-solana/intro-to-custom-onchain-programs.md index c939d6c6b..d0c2adc8f 100644 --- a/content/courses/intro-to-solana/intro-to-custom-onchain-programs.md +++ b/content/courses/intro-to-solana/intro-to-custom-onchain-programs.md @@ -17,17 +17,11 @@ invoked in the onchain program. ### Instructions -In previous chapters, we used: +In previous lessons, we used the `SystemProgram.transfer()` function from +`@solana/web3.js`, which creates an instruction for the System program to +transfer SOL. -- The `SystemProgram.transfer()` function from `@solana/web3.js` to make an - instruction for the System program to transfer SOL. -- The `mintTo()` and `transfer()` functions from `@solana/spl-token`, to make - instructions to the Token program to mint and transfer tokens -- The `createCreateMetadataAccountV3Instruction()` function from - `@metaplex-foundation/mpl-token-metadata@2` to make instructions to Metaplex - to create token Metadata. - -When working with other programs, however, you’ll need to create instructions +When working with other programs, however, you'll need to create instructions manually. With `@solana/web3.js`, you can create instructions with the `TransactionInstruction` constructor: @@ -47,7 +41,7 @@ const instruction = new TransactionInstruction({ `TransactionInstruction()` takes 3 fields: -- The `programId` field is fairly self-explanatory: it’s the public key (also +- The `programId` field is fairly self-explanatory: it's the public key (also called the 'address' or 'program ID') of the program. - `keys` is an array of accounts and how they will be used during the @@ -60,7 +54,7 @@ const instruction = new TransactionInstruction({ - `isWritable` - a boolean representing whether or not the account is written to during the transaction's execution -- an optional `Buffer` containing data to pass to the program. We’ll be ignoring +- an optional `Buffer` containing data to pass to the program. We'll be ignoring the `data` field for now, but we will revisit it in a future lesson. After making our instruction, we add it to a transaction, send the transaction @@ -99,7 +93,7 @@ for that signature in Solana Explorer, then see: ### Writing transactions for the ping counter program -We’re going to create a script to ping an onchain program that increments a +We're going to create a script to ping an onchain program that increments a counter each time it has been pinged. This program exists on the Solana Devnet at address `ChT1B39WKLS8qUrkLvFDXMhEJ4F1XZzwUNHUt4AU9aVa`. The program stores its data in a specific account at the address @@ -147,19 +141,16 @@ Now let's talk to the Ping program! To do this, we need to: Remember, the most challenging piece here is including the right information in the instructions. We know the address of the program that we are calling. We also know that the program writes data to a separate account whose address we -also have. Let’s add the string versions of both of those as constants at the +also have. Let's add the string versions of both of those as constants at the top of the file: ```typescript -const PING_PROGRAM_ADDRESS = new web3.PublicKey( - "ChT1B39WKLS8qUrkLvFDXMhEJ4F1XZzwUNHUt4AU9aVa", -); -const PING_PROGRAM_DATA_ADDRESS = new web3.PublicKey( - "Ah9K7dQ8EHaZqcAsgBW8w37yN2eAy3koFmUn4x3CJtod", -); +const PING_PROGRAM_ADDRESS = "ChT1B39WKLS8qUrkLvFDXMhEJ4F1XZzwUNHUt4AU9aVa"; +const PING_PROGRAM_DATA_ADDRESS = + "Ah9K7dQ8EHaZqcAsgBW8w37yN2eAy3koFmUn4x3CJtod"; ``` -Now let’s create a new transaction, then initialize a `PublicKey` for the +Now let's create a new transaction, then initialize a `PublicKey` for the program account, and another for the data account. ```typescript @@ -168,7 +159,7 @@ const programId = new web3.PublicKey(PING_PROGRAM_ADDRESS); const pingProgramDataId = new web3.PublicKey(PING_PROGRAM_DATA_ADDRESS); ``` -Next, let’s create the instruction. Remember, the instruction needs to include +Next, let's create the instruction. Remember, the instruction needs to include the public key for the Ping program and it also needs to include an array with all the accounts that will be read from or written to. In this example program, only the data account referenced above is needed. @@ -190,9 +181,9 @@ const instruction = new web3.TransactionInstruction({ }); ``` -Next, let’s add this instruction to the transaction we created. Then, call +Next, let's add this instruction to the transaction we created. Then, call `sendAndConfirmTransaction()` by passing in the connection, transaction, and -payer. Finally, let’s log the result of that function call so we can look it up +payer. Finally, let's log the result of that function call so we can look it up on Solana Explorer. ```typescript @@ -251,10 +242,10 @@ console.log( ); ``` -And just like that you’re calling programs on the Solana network and writing +And just like that you're calling programs on the Solana network and writing data onchain! -In the next few lessons, you’ll learn how to +In the next few lessons, you'll learn how to 1. Send transactions safely from the browser instead of running a script 2. Add custom data to your instructions diff --git a/content/courses/intro-to-solana/intro-to-reading-data.md b/content/courses/intro-to-solana/intro-to-reading-data.md index 9ea60e35c..4a8dcb97a 100644 --- a/content/courses/intro-to-solana/intro-to-reading-data.md +++ b/content/courses/intro-to-solana/intro-to-reading-data.md @@ -10,9 +10,9 @@ description: ## Summary -- **SOL** is the name of Solana’s native token. Each SOL is made from 1 billion +- **SOL** is the name of Solana's native token. Each SOL is made from 1 billion **Lamports**. -- **Accounts** store tokens, NFTs, programs, and data. For now, we’ll focus on +- **Accounts** store tokens, NFTs, programs, and data. For now, we'll focus on accounts that store SOL. - **Addresses** point to accounts on the Solana network. Anyone can read the data at a given address. Most addresses are also **public keys**. @@ -57,7 +57,7 @@ Solana. We'll also install TypeScript and `esrun`, so we can run `.ts` files on the command line: ```bash -npm install typescript @solana/web3.js esrun +npm install typescript @solana/web3.js@1 esrun ``` ### Connect to the Network @@ -127,7 +127,7 @@ The balance of the account at CenYq6bDRB7p73EjsPEpiYN7uveyPUTdXkDkgUduboaN is 0. ## Lab -Let’s practice what we’ve learned, and check the balance at a particular +Let's practice what we've learned, and check the balance at a particular address. ### Load a keypair diff --git a/content/courses/intro-to-solana/intro-to-writing-data.md b/content/courses/intro-to-solana/intro-to-writing-data.md index 5e2999a12..4fde2db16 100644 --- a/content/courses/intro-to-solana/intro-to-writing-data.md +++ b/content/courses/intro-to-solana/intro-to-writing-data.md @@ -120,8 +120,8 @@ dropped with an error like: > Transaction simulation failed: Attempt to debit an account but found no record of a prior credit. ``` -If you get this error, it’s because your keypair is brand new and doesn’t have -any SOL to cover the transaction fees. Let’s fix this by adding the following +If you get this error, it's because your keypair is brand new and doesn't have +any SOL to cover the transaction fees. Let's fix this by adding the following lines just after we've set up the connection: ```typescript @@ -134,7 +134,7 @@ await airdropIfRequired( ``` This will deposit 1 SOL into your account which you can use for testing. This -won’t work on Mainnet where it would have value. But it's incredibly convenient +won't work on Mainnet where it would have value. But it's incredibly convenient for testing locally and on Devnet. You can also use the Solana CLI command `solana airdrop 1` to get free test SOL @@ -158,12 +158,12 @@ for that signature in the Solana Explorer, then see: ## Lab -We’re going to create a script to send SOL to other students. +We're going to create a script to send SOL to other students. ### Basic scaffolding We'll start by using the same packages and `.env` file we made earlier in -[Intro to Cryptography](/content/courses/intro-to-solana/intro-to-cryptography). +[Intro to Cryptography](/content/courses/intro-to-solana/intro-to-cryptography.md). Create a file called `transfer.ts`: @@ -235,7 +235,7 @@ console.log( console.log(`Transaction signature is ${signature}!`); ``` -### Experiment! +### Experiment Send SOL to other students in the class. diff --git a/content/courses/mobile/intro-to-solana-mobile.md b/content/courses/mobile/intro-to-solana-mobile.md index 0f8b33b21..0bd866010 100644 --- a/content/courses/mobile/intro-to-solana-mobile.md +++ b/content/courses/mobile/intro-to-solana-mobile.md @@ -1,50 +1,65 @@ --- title: Introduction to Solana Mobile objectives: - - Explain the benefits of creating mobile-first dApp experiences + - Explain the benefits of creating mobile-first App experiences - Explain the high-level Mobile Wallet Adapter (MWA) flow - Explain the high-level differences between React and React Native - - Create a simple Android Solana dApp using React Native + - Create a simple Android Solana App using React Native description: "Learn how to build native mobile apps using blockchain functionality" --- ## Summary -- The Solana Mobile Wallet Adapter (MWA) creates a web socket connection between - mobile apps and mobile wallets, allowing native mobile apps to submit - transactions for signing -- The simplest way to get started creating Solana mobile applications is with - Solana Mobile's - [React Native packages](https://docs.solanamobile.com/react-native/setup) +- The **Solana Mobile Wallet Adapter** (**MWA**) allows mobile apps to submit + transactions for signing via a WebSocket connection to mobile wallets. +- The easiest way to start building Solana mobile applications is by using + Solana Mobile’s + [React Native packages](https://docs.solanamobile.com/react-native/setup) - `@solana-mobile/mobile-wallet-adapter-protocol` and `@solana-mobile/mobile-wallet-adapter-protocol-web3js` -- React Native is very similar to React with a few mobile quirks -## Lesson +## Lesson Overview -Solana Mobile Stack (SMS) is designed to help developers create mobile dApps -with a seamless UX. It consists of the -[Mobile Wallet Adapter (MWA)](https://docs.solanamobile.com/getting-started/overview#mobile-wallet-adapter), +In these lessons, we will develop mobile apps that interact with the Solana +network, this opens up a whole new paradigm of blockchain use cases and +behaviors. The **Solana Mobile Stack** (**SMS**) is designed to help developers +seamlessly create mobile apps. It includes the +[Mobile Wallet Adapter (MWA)](https://docs.solanamobile.com/getting-started/overview#mobile-wallet-adapter) +, a Solana Mobile SDK that uses React Native, [Seed Vault](https://docs.solanamobile.com/getting-started/overview#seed-vault), and the -[Solana dApp Store](https://docs.solanamobile.com/getting-started/overview#solana-dapp-store). - -Most relevant to your development journey is the Mobile Wallet Adapter (MWA). -The simplest way to get started is to use the Mobile Wallet Adapter with React -Native to create a simple Android app. This lesson assumes you're familiar with -React and Solana programming. If that's not the case, -[start our course from the beginning](/content/courses/intro-to-solana/intro-to-cryptography) -and come back here when you feel ready! - -### Intro To Solana Mobile - -In these units, we'll develop mobile apps that interact with the Solana network. -This opens up a whole new paradigm of crypto use cases and behaviors. +[Solana app Store](https://docs.solanamobile.com/getting-started/overview#solana-app-store). +These resources simplify mobile development with a similar experience but with +mobile-specific features. + +This lesson focuses on using React Native to create a simple Android app that +integrates with the Solana network. If you're not familiar with programming in +React or Solana, we recommend starting with our +[Intro to Solana lesson](https://github.com/solana-foundation/developer-content/tree/main/content/courses/intro-to-solana) +and returning when you're ready. If you are, let's dive in! + +## Intro to Solana Mobile + +Native mobile wallets hold your secret keys and use them to sign and send +transactions just like web extension wallets. However native mobile wallets use +the +[Mobile Wallet Adapter](https://github.com/solana-mobile/mobile-wallet-adapter) +(MWA) standard instead of the +[Wallet Adapter](https://github.com/anza-xyz/wallet-adapter) to ensure any apps +can work with any wallet. + +We will dig into the specifics of the MWA in a +[later lesson](/content/courses/mobile/mwa-deep-dive.md), but it effectively +opens a WebSocket between applications to facilitate communication. That way a +separate app can provide the wallet app with the transaction to be signed and +sent, and the wallet app can respond with appropriate status updates. -#### Solana Mobile Use Cases +### Mobile Use Cases with Solana -Here are a few examples of what Solana mobile development can unlock: +Before development, it is important to understand the current landscape of Web3 +mobile development to foresee potential blockers and opportunities. Here are a +few examples of what Solana mobile development can unlock: **Mobile Banking and Trading (DeFi)** @@ -54,7 +69,7 @@ you hold your own keys. **Mobile Gaming with Solana Micropayments** -Mobile games account for roughly 50% of the video game industry’s total value, +Mobile games account for roughly 50% of the video game industry's total value, largely due to small in-game purchases. However, payment processing fees usually mean these in-game purchases have a minimum of $0.99 USD. With Solana, it's possible to unlock true micropayments. Need an extra life? That'll be 0.0001 @@ -66,55 +81,22 @@ SMS can enable a new wave of mobile e-commerce shoppers to pay directly from their favorite Solana wallet. Imagine a world where you can use your Solana wallet as seamlessly as you can use Apple Pay. -To summarize, mobile crypto opens up many doors. Let’s dive in and learn how we -can be part of it: - -#### How Solana development differs between native mobile apps and web - -Solana wallet interaction differs slightly on mobile compared to the web. The -core wallet functionality is the same: the wallet holds your private keys and -uses them to sign and send transactions. To avoid having different interfaces -between wallets, developers abstracted that functionality into the Solana Wallet -Adapter standard. This remains the standard on the web. The mobile counterpart -is the Mobile Wallet Adapter (MWA). - -The differences between the two standards are due to the different construction -of web vs mobile wallets. Web wallets are just browser extensions that inject -wallet adapter functions into the `window` object of your webpage. This gives -your site access to them. Mobile wallets, however, are native applications on a -mobile operating system. There's no way to surface functions from one native -application to another. The Mobile Wallet Adapter exists to enable any app, -written in any language, to connect to a native wallet app. - -We'll dig into the specifics of the Mobile Wallet Adapter in a -[later lesson](/content/courses/mobile/mwa-deep-dive), but it effectively opens -a WebSocket between applications to facilitate communication. That way a -separate app can provide the wallet app with the transaction to be signed and -sent, and the wallet app can respond with appropriate status updates. - -#### Supported Operating Systems - -At the time of writing, Android is the only mobile OS supported by the Mobile -Wallet Adapter. +In summary, mobile blockchain transactions can open many opportunities. Let's +start building! -On Android, a WebSocket connection can persist between apps, even when the -wallet app is in the background. +### Supported Operating Systems -On iOS, the lifetime of a connection between apps is purposefully limited by the -operating system. Specifically, iOS will quickly suspend connections when an app -is pushed to the background. This kills the MWA WebSocket connection. This is an -inherent design difference between iOS and Android (probably made to preserve -battery, network usage, etc). +Currently, the MWA only supports Android. On Android, a WebSocket connection can +persist between apps, even when the wallet app is in the background. -However, this doesn’t mean that Solana dApps can’t run on iOS at all. You can -still create a Mobile Web App using the -[standard wallet adapter](https://github.com/solana-labs/wallet-adapter) -library. Your users can then install a mobile-friendly wallet like -the [Glow Wallet](https://glow.app/). +On iOS, the OS quickly suspends websocket connections when an app is +backgrounded, so the standard +[Wallet Adapter](https://github.com/solana-labs/wallet-adapter) library is used +instead. The remainder of this lesson will focus on developing Android apps with the MWA. -#### Supported Frameworks +### Supported Frameworks Solana Mobile supports a number of different frameworks. Officially supported are React Native and native Android, with community SDKs for Flutter, Unity, and @@ -132,297 +114,315 @@ Unreal Engine. - [Unity](https://docs.solanamobile.com/unity/unity_sdk) - [Unreal Engine](https://docs.solanamobile.com/unreal/unreal_sdk) -To keep the development experience as close as possible to other lessons, we'll -be working exclusively with React Native. - -### From React to React Native - -React Native takes the React web framework and applies it to mobile -applications. However, while React and React Native feel very similar, there are -differences. The best way to understand these differences is to experience them -while coding. But, to give you a head start here is a list of some differences -to keep in mind: - -- React Native compiles down to native iOS and Android applications while React - compiles down to a collection of web pages. -- In React, you use JSX to program with HTML and CSS. With React Native, you use - similar syntax to manipulate native UI components. It's more like using a UI - library like Chakra or Tailwind UI. Instead of `
`, `

`, and `` - you'll be using ``, ``, and ``. -- Interactions are different. Instead of `onClick`, you'll use `onPress` and - other gestures. -- Many standard React and Node packages may not be compatible with React Native. - Fortunately, there are React Native counterparts to the most popular libraries - and you can often use polyfills to make Node packages available. If you're not - familiar with polyfills, take a look at the - [MDN docs](https://developer.mozilla.org/en-US/docs/Glossary/Polyfill). In - short, polyfills actively replace Node-native libraries to make them work - anywhere Node is not running. -- Setting up a development environment in React Native can be challenging. This - will require setting up Android Studio to compile to Android and XCode for - iOS. React Native has a - [really good guide](https://reactnative.dev/docs/environment-setup?guide=native) - for this. -- For regular development and testing, you'll use a physical mobile device or an - emulator to run your code. This relies on a tool called Metro that comes - pre-installed. React Native's setup guide also covers this. -- React Native gives you access to the phone's hardware that React can't - provide. This includes things like the phone's camera, accelerometer, and - more. -- React Native introduces new config files and build folders. For example, the - `ios` and `android` directories contain platform-specific information. - Additionally, there are config files like `Gemfile` and `metro.config.js`. - Generally, leave all configurations alone and just worry about writing your - code, the starting point for which will be in `App.tsx`. - -There is a learning curve, but if you know React you're not nearly as far from -being able to develop mobile apps as you think! It may feel jarring to start, -but after a few hours of React Native development, you'll start to feel much -more comfortable. You'll likely feel much more confident even after -[this lesson's lab](#lab). - -### Creating a Solana dApp with React Native - -Solana React Native dApps are virtually identical to React dApps. The primary +To keep the development experience as close as possible to other lessons, we +will be working exclusively with React Native. + +## From React to React Native + +React Native is very similar to React but designed for mobile. Here are some key +points to note: + +- React Native compiles down to native Android and iOS apps while React compiles + down to a collection of web pages. +- Instead of using web elements like `

`, you will use mobile-native + elements like ``. +- React Native allows access to mobile hardware, such as the camera and + accelerometer, which React web apps cannot access. +- Many standard React and Node packages may not be compatible with React Native + and setting up React Native can be challenging. Fortunately, the + [React Native Docs](https://reactnative.dev/docs/environment-setup?guide=native) + contains everything you may need. +- For development, you will need to set up + [Android Studio](https://developer.android.com/studio/intro/) for Android apps + and an emulator or physical device for testing. + + +**NOTE:** There is a learning curve, but if you know React you're not nearly as far from being able to develop mobile apps as you think! It may feel jarring to start, but after a few hours of React Native development, you will start to feel much more comfortable. We have included a [Lab](#lab) section below to help you. + + +## Creating a React Native App on Solana + +Solana React Native apps are virtually identical to React apps. The primary difference is in the wallet interaction. Instead of the wallet being available -in the browser, your dApp will create an MWA session with the wallet app of your +in the browser, your app will create an MWA session with the wallet app of your choosing using a WebSocket. Fortunately, this is abstracted for you in the MWA -library. The only difference you'll need to know is anytime you need to make a -call to the wallet you'll be using the `transact` function, which we'll talk -about soon. +library. The only difference is that anytime you need to make a call to the +wallet, the `transact` function will be used, more details on this function in +later parts of this lesson. -![dApp Flow](/public/assets/courses/unboxed/basic-solana-mobile-flow.png) +![App Flow](/public/assets/courses/unboxed/basic-solana-mobile-flow.png) -#### Reading data +## Reading Data -Reading data from a Solana cluster in React Native is the exact same as in -React. You use the `useConnection` hook to grab the `Connection` object. Using -that, you can get account info. Since reading is free, we don’t need to actually -connect to the wallet. +Reading data from a Solana cluster in React Native works the same way as in +React. You can use the `useConnection` hook to access the `connection` object, +which is responsible for interacting with the Solana network. -```tsx -const account = await connection.getAccountInfo(account); +In Solana, an account refers to any object stored on-chain, and is typically +referenced by a +[public key](https://solana.com/docs/terminology#public-key-pubkey). + +Here’s an example of how you can read an account information using the +`getAccountInfo` method: + +```javascript +const { connection } = useConnection(); +const publicKey = new PublicKey("your-wallet-public-key-here"); // Replace with a valid public key +const account = await connection.getAccountInfo(publicKey); ``` -If you need a refresher on this, check out our -[lesson on reading data from the blockchain](/content/courses/intro-to-solana/intro-to-reading-data). +> **NOTE:** If you need a refresher, refer to our +> [Intro to Reading Data lesson](/content/courses/intro-to-solana/intro-to-reading-data.md). -#### Connecting to a wallet +## Connecting to a Wallet -Writing data to the blockchain has to happen through a transaction. Transactions -have to be signed by one or more private keys and sent to an RPC provider. This -virtually always happens through a wallet application. +When writing data to the blockchain, it must be done through a **transaction**. +Transactions need to be signed by one or more secret keys and sent to an +[RPC provider](https://academy.subquery.network/subquery_network/node_operators/rpc_providers/introduction.html) +for processing. In almost all cases, this interaction is facilitated through a +wallet application. -Typical wallet interaction happens by calling out to a browser extension. On -mobile, you use a WebSocket to start an MWA session. Specifically, you use -Android intents where the dApp broadcasts its intent with the `solana-wallet://` -scheme. +### Web vs. Mobile Wallet Interactions +The websocket that connects the app and the wallet is managed using the MWA, and +initiated using **Android intents**, with the dApp broadcasting its intent using +the `solana-wallet://` scheme. ![Connecting](/public/assets/courses/unboxed/basic-solana-mobile-connect.png) -When the wallet app receives this intent, it opens a connection with the dApp -that initiated the session. Your dApp sends this intent using the `transact` -function: +When the wallet app receives the intent broadcast, it opens a WebSocket +connection with the app that initiated the session. The app initiates this +connection using the `transact` function, as shown below: ```tsx transact(async (wallet: Web3MobileWallet) => { - // Wallet Action code here -} + // Your wallet action code goes here +}); ``` -This will give you access to the `Web3MobileWallet` object. You can then use -this to send transactions to the wallet. Again, when you want to access the -wallet, it has to be through the function `transact` function's callback. +This function provides access to the `Web3MobileWallet` object, allowing you to +perform actions such as signing transactions or interacting with wallet data. +Remember, all wallet interactions must occur inside the callback of the +`transact` function. -#### Signing and sending transactions +### Signing and sending transactions -Sending a transaction happens inside the `transact` callback. The flow is as -follows: +The overall flow for signing and sending a transaction is as follows: -1. Establish a session with a wallet using `transact` which will have a callback - of `async (wallet: Web3MobileWallet) => {...}`. -2. Inside the callback, request authorization with the `wallet.authorize` or - `wallet.reauthorize` method depending on the state of the wallet. -3. Sign the transaction with `wallet.signTransactions` or sign and send with - `wallet.signAndSendTransactions`. +- Use the `transact` function to establish a session with the wallet. This + function takes an asynchronous callback: + `async (wallet: Web3MobileWallet) => {...}`. +- Inside the callback, request wallet authorization using `wallet.authorize()` + or `wallet.reauthorize()`, depending on the wallet's state (whether it has an + active session or requires reauthorization). +- Once the wallet is authorized, you can either: + - Sign the transaction using `wallet.signTransactions()`, or + - Sign and send the transaction directly using + `wallet.signAndSendTransactions()`. ![Transacting](/public/assets/courses/unboxed/basic-solana-mobile-transact.png) +To manage the wallet's authorization state, consider creating a +`useAuthorization()` hook. This hook can streamline the process of handling +authorization within your app, especially if you have multiple interactions with +the wallet. -You may want to create a `useAuthorization()` hook to -manage the wallet's authorization state. We'll practice this in the -[Lab](#lab). +> We will explore the use of this hook and practice managing the wallet's state +> in more detail during the lab exercises. Here is an example of sending a transaction using MWA: ```tsx +//import required dependencies if any + const { authorizeSession } = useAuthorization(); const { connection } = useConnection(); -const sendTransactions = (transaction: Transaction) => { - transact(async (wallet: Web3MobileWallet) => { - const latestBlockhashResult = await connection.getLatestBlockhash(); - const authResult = await authorizeSession(wallet); - - const updatedTransaction = new Transaction({ - ...transaction, - ...latestBlockhashResult, - feePayer: authResult.publicKey, - }); +const sendTransactions = async (transaction: Transaction) => { + try { + // Start a session with the wallet + await transact(async (wallet: Web3MobileWallet) => { + // Get the latest blockhash for the transaction + const { blockhash, lastValidBlockHeight } = + await connection.getLatestBlockhash(); + + // Authorize the wallet session + const authResult = await authorizeSession(wallet); + + // Create an updated transaction with the latest blockhash and feePayer + const updatedTransaction = new Transaction({ + recentBlockhash: blockhash, + feePayer: authResult.publicKey, + }).add(transaction); + + // Sign and send the transaction via the wallet + const signatures = await wallet.signAndSendTransactions({ + transactions: [updatedTransaction], + }); - const signature = await wallet.signAndSendTransactions({ - transactions: [transaction], + console.log(`Transaction successful! Signature: ${signatures[0]}`); }); - }); + } catch (error) { + console.error("Error sending transaction:", error); + throw new Error("Transaction failed"); + } }; ``` -#### Debugging - -Since two applications are involved in sending transactions, debugging can be -tricky. Specifically, you won’t be able to see the wallet's debug logs the way -you can see your dApps logs. - -Fortunately, -[Logcat on Android Studio](https://developer.android.com/studio/debug/logcat) -makes it possible to see logs from all applications on your device. - -If you prefer not to use Logcat, the other method you could try is to only use -the wallet to sign transactions, and then send them in your code. This allows -you to better debug the transaction if you’re running into problems. - -#### Releasing - -Deploying mobile applications can be difficult on its own. It's often even more -difficult when it's a crypto app. There are two main reasons for this: customer -safety and financial incentives. - -First, most of the mobile app marketplaces have policies restricting blockchain -involvement. Crypto is new enough that it's a regulatory wildcard. Platforms -feel they're protecting users by being strict with blockchain-related apps. - -Second, if you use crypto for "purchases" in-app, you’ll be seen as -circumnavigating the platform’s fee (anywhere from 15-30%). This is explicitly -against app store policies as the platform is trying to protect its revenue -stream. - -These are hurdles for sure, but there's hope. Here are some things to keep in -mind for each marketplace: - -- **App Store (iOS) -** We only talked about Android today for the technical MWA - reason. However, their policies are also some of the most strict and make it - hard for Solana dApps to exist. For now, Apple has some pretty strict - anti-crypto policies. Wallets seem to be fine, but they'll flag and likely - reject anything that seems like a purchase using crypto. -- **Google Play (Android) -** Google is generally more relaxed, but there are - still a few things to be aware of. As of this writing in November ‘23, Google - is rolling out - [new crypto policies](https://www.theverge.com/2023/7/12/23792720/android-google-play-blockchain-crypto-nft-apps) - to make it more clear what they will and will not allow. Take a look. -- **Steam -** Does not allow crypto games at all - > “built on blockchain technology that issue or allow the exchange of - > cryptocurrencies or NFTs.” -- **Download Sites / Your Site -** Depending on the target platform, you can - make your dApp available for download on your own site. However, most users - are wary of downloading mobile applications from websites. -- **dApp Store (Solana) -** Solana saw the issues with mobile dApp distribution - on other platform app stores and decided to make their own. As part of the SMS - stack, they created the - [Solana dApp Store](https://docs.solanamobile.com/getting-started/overview#solana-dapp-store). - -### Conclusion - -Getting started with mobile Solana development is fairly straightforward thanks -to SMS. While React Native is slightly different than React, the code you have -to write is more similar than different. The primary difference is that the -portion of your code that interacts with wallets will exist within the -`transact` callback. Remember to look at our other lessons if you need a -refresher on Solana development more broadly. - -## Lab - -Let's practice this together by building a simple Android mobile counter dApp -with React Native. The app will interact with the Anchor counter program that we -made in the -[Intro to client-side Anchor development](https://www.soldev.app/course/intro-to-anchor-frontend) -lesson. This dApp simply displays a counter and allows users to increment the -count through a Solana program. In this app, we’ll be able to see the current -count, connect our wallet, and increment the count. We’ll be doing this all on -Devnet and will be compiling only for Android. - -This program already exists and is already deployed on Devnet. Feel free to -check out the -[deployed program's code](https://github.com/Unboxed-Software/anchor-ping-frontend/tree/solution-decrement) -if you want more context. - -We’ll write this application in vanilla React Native without a starting -template. Solana Mobile provides a -[React Native template](https://docs.solanamobile.com/react-native/react-native-scaffold) -that shortcuts some of the boilerplate, but there's no better way to learn than -to do something from scratch. - -#### 0. Prerequisites - -React Native allows us to write mobile applications using similar patterns as -React. However, under the hood, our React code needs to be compiled down to -languages and frameworks that work with the device's native OS. This requires a -few prerequisite setup items: - -1. [Set up a React Native dev environment](https://reactnative.dev/docs/environment-setup?guide=native#creating-a-new-application). - Go through the - [**_entire article_**](https://reactnative.dev/docs/environment-setup?guide=native#creating-a-new-application), - using Android as the target OS. For convenience, we've typed out the - high-level steps below. Keep in mind that the source article might change - from the time of writing to when you're reading this. The source article is - your source of truth here. - - 1. Install dependencies - 2. Install Android Studio - 3. Configure **ANDROID_HOME** environment variable - 4. Create a new sample project (this is only used to set up the emulator) - - 1. If you run into the error `✖ Copying template`, add the `--npm` flag - at the end - - ```bash - npx react-native@latest init AwesomeProject - ✔ Downloading template - ✖ Copying template - - npx react-native@latest init AwesomeProject --npm - ✔ Downloading template - ✔ Copying template - ``` - - 5. Run and compile the sample project on your emulator - -2. Install and run the Solana fake wallet - - 1. Install the repo - - ```bash - git clone https://github.com/solana-mobile/mobile-wallet-adapter.git - ``` - - 2. In Android - Studio, `Open project > Navigate to the cloned directory > Select mobile-wallet-adapter/android` - 3. After Android Studio finishes loading the project, select `fakewallet` in - the build/run configuration dropdown in the top right - - ![Fake Wallet](/public/assets/courses/unboxed/basic-solana-mobile-fake-wallet.png) - - 4. For debugging, you’ll want to use `Logcat`. Now that your fake wallet is - running on the emulator, go to `View -> Tool Windows -> Logcat`. This will - open up a console logging out what’s happening with fake wallet. - -3. (Optional) Install other - [Solana wallets](https://solana.com/ecosystem/explore?categories=wallet) on - the Google Play store. - -Lastly, if you run into Java versioning issues - you’ll want to be on Java -version 11. To check what you’re currently running type `java --version` in your -terminal. - -#### 1. Plan out the App's Structure +## Debugging + +Debugging can be challenging when working with Solana mobile transactions, as +two separate applications are involved: your app and the mobile wallet. Unlike +typical single-application setups, you won't have direct access to the wallet’s +logs, which makes tracking issues more complex. + +However, Android Studio’s +[Logcat](https://developer.android.com/studio/debug/logcat) provides a useful +solution - enabling you to view logs from all applications running on your +device including the wallet. By leveraging Logcat, you can monitor the +interaction between your app and the wallet, helping you identify any issues +that arise during transaction signing and submission. + +If Logcat is not your preferred tool, an alternative approach is to use the +wallet solely for signing transactions, while handling the actual transaction +submission in your app’s code. This method allows for greater control over +debugging, as you can inspect the transaction flow more thoroughly on the client +side. + +## Deploying for Solana Mobile + +Deploying mobile applications can be challenging, and the complexity increases +when dealing with blockchain-based apps. Two primary factors contribute to this +difficulty: customer safety and financial incentives. + +### Customer Safety and Regulatory Uncertainty: + +Most mobile app marketplaces, such as the Apple App Store and Google Play Store, +have policies that restrict blockchain-related apps. Since blockchain is still a +relatively new and evolving technology, platforms are cautious about regulatory +compliance. They often adopt strict guidelines to protect users from potential +risks associated with blockchain apps. + +### In-App Purchases and Platform Fees: + +Another significant challenge arises when using blockchain transactions for +in-app purchases. Many platforms impose a transaction fee on purchases made +within their apps (ranging from 15% to 30%). Payment via the blockchain is often +seen as a way to bypass these fees, which is explicitly prohibited by most app +stores. These platforms prioritize protecting their revenue streams and +therefore enforce strict policies against apps that facilitate blockchain +payments for in-app purchases. + +> While traditional app stores impose strict policies around blockchain +> transactions to protect their revenue and comply with regulations, alternative +> distribution methods like the Solana app Store offers developers a more +> flexible platform for deploying Solana-based mobile applications. This +> decentralized approach bypasses many of the restrictions seen in centralized +> app marketplaces, allowing apps to thrive in a more blockchain-friendly +> ecosystem. + +## Conclusion + +Getting started with Solana mobile development is more accessible than ever, +thanks to the Solana Mobile Stack (SMS). Although React Native introduces some +differences compared to React, much of the code you will write remains familiar, +particularly when it comes to structuring the UI and handling state. The main +distinction lies in how you interact with wallets, which requires using the +`transact` callback to establish wallet sessions, sign transactions, and +communicate with Solana’s blockchain. + +As you continue building Solana mobile apps, it's essential to keep learning and +refining your skills. Be sure to explore additional resources like: + +- [The official Solana Developer Docs](https://solana.com/docs) for in-depth + guides on Solana’s core libraries and best practices. + +- [Solana Stack Exchange](https://solana.stackexchange.com/) forum for + troubleshooting, sharing insights, and staying updated on the latest ecosystem + changes. + +Mastering mobile Solana development will open up new opportunities in +decentralized finance (DeFi), gaming, and e-commerce, allowing you to build +cutting-edge applications with a seamless user experience. Stay curious and +experiment with different tools to push the boundaries of what you can achieve +with mobile apps. Let's put our knowledge to test by building a counting app +with React Native for Android OS! + +## Lab: Building a Mobile Counter app with React Native + +This app will display a counter and allow users to make increments via a +transaction on the Solana blockchain. The app will also connect to a wallet for +signing transactions. + +We will use the **Anchor framework** to interact with the on-chain counter +program. The client side has already been developed in one of our previous +lessons called +[Intro to client-side Anchor development](https://solana.com/developers/courses/onchain-development/intro-to-anchor-frontend), +feel free to check out its code for more context. + +To ensure you fully understand the core concepts, we will write this application +in vanilla React Native without a starting template. While Solana Mobile offers +templates that handle some boilerplate, building from scratch provides a much +deeper understanding. + +### Getting Started + +To get started, you will need to properly set up a React Native development +environment if you didn't already. This +[article](https://reactnative.dev/docs/set-up-your-environment) shows you how. +Remember that this step is not required if you are using a +[Framework](https://reactnative.dev/architecture/glossary#react-native-framework). + +Ensure you have [Node.js](https://nodejs.org/en/download) installed on your +system. These will manage your JavaScript packages. Install Android Studio: + +Android Studio is required to run the Android emulator and to compile your React +Native app for Android devices. Configure the ANDROID_HOME Environment Variable: + +> **NOTE:** You will need to configure the `ANDROID_HOME` environment variable +> so that your terminal can recognize Android’s SDK tools. This step is critical +> for running and building your app on Android. + +## Project Setup + +Create a Sample Project for the Emulator Setup to ensure your Android +environment is set up correctly. In your terminal, run the code below within +your preferred directory to scaffold a new React Native project, where +`SampleProject` is your preferred project name. You can open the project in +Android Studio and ensure it runs correctly on the Android emulator. + +```bash + npx react-native init SampleProject --npm +``` + +### Cloning and Running MWA + +1. Clone the repo in `SampleProject` + + ```bash + git clone https://github.com/solana-mobile/mobile-wallet-adapter.git + ``` + +2. In Android Studio, _Open project > Navigate to the cloned directory > Select + mobile-wallet-adapter/android_ +3. After Android Studio finishes loading the project, select `fakewallet` in + the build/run configuration dropdown in the top right + + ![Fake Wallet](/public/assets/courses/unboxed/basic-solana-mobile-fake-wallet.png) + +4. For easier debugging, use **Logcat**. Check the + [Logcat installation guide](https://developer.android.com/studio/debug/logcat) + if you are interested. +5. Now that your fake wallet is running on the emulator, go to _View -> Tool + Windows -> Logcat_. This will open up a console logging out what’s happening + with fake wallet. + +6. (Optional) Install other + [Solana wallets](https://play.google.com/store/search?q=solana%20wallet&c=apps) + on the Google Play store. + +Lastly, we recommend installing _java version 11_ to avoid dependency errors. To +know what version you have installed, run `java --version` in your terminal. + +### 1. Plan out the App Structure Before we do any coding, let's conceptualize the outline of the app. Again, this app will connect to and interact with the counter program we've already deployed @@ -437,9 +437,9 @@ to Devnet. To do this, we'll need the following: There will be more files and considerations, but these are the most important files we'll be creating and working with. -#### 2. Create the App +### 2. Create the App -Now that we've got some of the basic setup and structure down, let’s scaffold a +Now that we've got some of the basic setup and structure down, let's scaffold a new app with the following command: ```bash @@ -458,34 +458,35 @@ npm run android This should open and run the app in your Android emulator. If you run into problems, check to make sure you’ve accomplished everything in the -[prerequisites section](#0-prerequisites). +[_Getting Started_](#getting-started) section. -#### 3. Install Dependencies +### 3. Install Dependencies -We’ll need to add in our Solana dependencies. +We will need to import our Solana dependencies. [The Solana Mobile docs provide a nice list of packages](https://docs.solanamobile.com/react-native/setup) and explanations for why we need them: - `@solana-mobile/mobile-wallet-adapter-protocol`: A React Native/Javascript API enabling interaction with MWA-compatible wallets - `@solana-mobile/mobile-wallet-adapter-protocol-web3js`: A convenience wrapper - to use common primitives - from [@solana/web3.js](https://github.com/solana-labs/solana-web3.js), such - as `Transaction` and `Uint8Array` + to use common primitives from + [@solana/web3.js](https://github.com/solana-labs/solana-web3.js), such as + `Transaction` and `Uint8Array` - `@solana/web3.js`: Solana Web Library for interacting with the Solana network - through the [JSON RPC API](https://docs.solana.com/api/http) -- `react-native-get-random-values` Secure random number generator polyfill - for `web3.js` underlying Crypto library on React Native -- `buffer`: Buffer polyfill; also needed for `web3.js` on React Native + through th + [JSON RPC API](https://github.com/solana-foundation/developer-content/blob/main/docs/rpc/http/index.mdx) +- `@react-native-get-random-values` Secure random number generator polyfill for +- `web3.js` underlying library on React Native +- `buffer`: Buffer polyfill; also needed for `web3.js` on React Native -In addition to this list, we'll add two more packages: +In addition to this list, we will add two more packages: - `@coral-xyz/anchor`: The Anchor TS client. - `assert`: A polyfill that lets Anchor do its thing. - `text-encoding-polyfill`: A polyfill needed to create the `Program` object -If you’re not familiar: polyfills actively replace Node-native libraries to make -them work anywhere Node is not running. We’ll finish our polyfill setup shortly. +If you’re not familiar: polyfills provide Node-native libraries to make them +work anywhere Node is not running. We will finish our polyfill setup shortly. For now, install dependencies using the following command: ```bash @@ -500,9 +501,9 @@ npm install \ text-encoding-polyfill ``` -#### 4. Create ConnectionProvider.tsx +### 4. Create ConnectionProvider.tsx file -Let’s start adding our Solana functionality. Create a new folder called +Let's start adding our Solana functionality. Create a new folder called `components` and within it, a file called `ConnectionProvider.tsx`. This provider will wrap the entire application and make our `Connection` object available throughout. Hopefully, you're noticing a pattern: this is identical to @@ -526,12 +527,11 @@ const ConnectionContext = createContext( {} as ConnectionContextState, ); -export function ConnectionProvider(props: ConnectionProviderProps) { - const { - children, - endpoint, - config = { commitment: "confirmed" }, - } = { ...props }; +export function ConnectionProvider({ + children, + endpoint, + config = { commitment: "confirmed" }, +}: ConnectionProviderProps) { const connection = useMemo( () => new Connection(endpoint, config), [config, endpoint], @@ -548,10 +548,10 @@ export const useConnection = (): ConnectionContextState => useContext(ConnectionContext); ``` -#### 5. Create AuthProvider.tsx +### 5. Create AuthProvider.tsx file -The next Solana provision we’ll need is the auth provider. This is one of the -main differences between mobile and web development. What we’re implementing +The next Solana provision we will need is the **auth provider**. This is one of +the main differences between mobile and web development. What we’re implementing here is roughly equivalent to the `WalletProvider` that we’re used to in web apps. However, since we're using Android and its natively installed wallets, the flow to connect and utilize them is a bit different. Most notably, we need to @@ -570,20 +570,20 @@ We do this by providing the following in our `AuthProvider`: - `deauthorizeSession(wallet)`: Deauthorizes the `wallet`. - `onChangeAccount`: Acts as a handler when `selectedAccount` is changed. -We’re also going to throw in some utility methods: +We are also going to throw in some utility methods: - `getPublicKeyFromAddress(base64Address)`: Creates a new Public Key object from the Base64 address given from the `wallet` object - `getAuthorizationFromAuthResult`: Handles the authorization result, extracts relevant data from the result, and returns the `Authorization` context object -We’ll expose all of this through a `useAuthorization` hook. +We will expose all of this through a `useAuthorization` hook. -Since this provider is the same across virtually all apps, we're going to give -you the full implementation that you can copy/paste. We'll dig into the details -of MWA in a future lesson. +Since this provider is the same across all apps, we are going to give you the +full implementation that you can copy and paste. We will dig into the details of +MWA in a future lesson. -Create the file `AuthProvider.tsx` in the `components` and paste in the +Create the file `AuthProvider.tsx` in the `components` folder and paste in the following: ```tsx @@ -601,25 +601,19 @@ import { toUint8Array } from "js-base64"; import { useState, useCallback, useMemo, ReactNode } from "react"; import React from "react"; -export const AuthUtils = { +const AuthUtils = { getAuthorizationFromAuthResult: ( authResult: AuthorizationResult, previousAccount?: Account, ): Authorization => { - let selectedAccount: Account; - if ( - //no wallet selected yet - previousAccount === null || - //the selected wallet is no longer authorized + const selectedAccount = + previousAccount === undefined || !authResult.accounts.some( ({ address }) => address === previousAccount.address, ) - ) { - const firstAccount = authResult.accounts[0]; - selectedAccount = AuthUtils.getAccountFromAuthorizedAccount(firstAccount); - } else { - selectedAccount = previousAccount; - } + ? AuthUtils.getAccountFromAuthorizedAccount(authResult.accounts[0]) + : previousAccount; + return { accounts: authResult.accounts.map( AuthUtils.getAccountFromAuthorizedAccount, @@ -631,19 +625,13 @@ export const AuthUtils = { getAccountFromAuthorizedAccount: ( authAccount: AuthorizedAccount, - ): Account => { - return { - ...authAccount, - publicKey: AuthUtils.getPublicKeyFromAddress(authAccount.address), - }; - }, - - getPublicKeyFromAddress: (address: Base64EncodedAddress) => { - return new PublicKey(toUint8Array(address)); - }, + ): Account => ({ + ...authAccount, + publicKey: new PublicKey(toUint8Array(authAccount.address)), + }), }; -export type Account = Readonly<{ +type Account = Readonly<{ address: Base64EncodedAddress; label?: string; publicKey: PublicKey; @@ -655,11 +643,11 @@ type Authorization = Readonly<{ selectedAccount: Account; }>; -export const AppIdentity = { +const APP_IDENTITY = { name: "Solana Counter Incrementor", }; -export type AuthorizationProviderContext = { +type AuthorizationProviderContext = { accounts: Account[] | null; authorizeSession: (wallet: AuthorizeAPI & ReauthorizeAPI) => Promise; deauthorizeSession: (wallet: DeauthorizeAPI) => void; @@ -669,25 +657,24 @@ export type AuthorizationProviderContext = { const AuthorizationContext = React.createContext({ accounts: null, - authorizeSession: (_wallet: AuthorizeAPI & ReauthorizeAPI) => { + authorizeSession: () => { throw new Error("Provider not initialized"); }, - deauthorizeSession: (_wallet: DeauthorizeAPI) => { + deauthorizeSession: () => { throw new Error("Provider not initialized"); }, - onChangeAccount: (_nextSelectedAccount: Account) => { + onChangeAccount: () => { throw new Error("Provider not initialized"); }, selectedAccount: null, }); -export type AuthProviderProps = { +type AuthProviderProps = { children: ReactNode; cluster: Cluster; }; -export function AuthorizationProvider(props: AuthProviderProps) { - const { children, cluster } = { ...props }; +function AuthorizationProvider({ children, cluster }: AuthProviderProps) { const [authorization, setAuthorization] = useState( null, ); @@ -699,55 +686,47 @@ export function AuthorizationProvider(props: AuthProviderProps) { authorization?.selectedAccount, ); setAuthorization(nextAuthorization); - return nextAuthorization; }, - [authorization, setAuthorization], + [authorization], ); const authorizeSession = useCallback( async (wallet: AuthorizeAPI & ReauthorizeAPI) => { - const authorizationResult = await (authorization - ? wallet.reauthorize({ + const authorizationResult = authorization + ? await wallet.reauthorize({ auth_token: authorization.authToken, - identity: AppIdentity, + identity: APP_IDENTITY, }) - : wallet.authorize({ cluster, identity: AppIdentity })); + : await wallet.authorize({ cluster, identity: APP_IDENTITY }); return (await handleAuthorizationResult(authorizationResult)) .selectedAccount; }, - [authorization, handleAuthorizationResult], + [authorization, cluster, handleAuthorizationResult], ); const deauthorizeSession = useCallback( async (wallet: DeauthorizeAPI) => { - if (authorization?.authToken === null) { - return; + if (authorization?.authToken) { + await wallet.deauthorize({ auth_token: authorization.authToken }); + setAuthorization(null); } - - await wallet.deauthorize({ auth_token: authorization.authToken }); - setAuthorization(null); }, - [authorization, setAuthorization], + [authorization], ); - const onChangeAccount = useCallback( - (nextAccount: Account) => { - setAuthorization(currentAuthorization => { - if ( - //check if the account is no longer authorized - !currentAuthorization?.accounts.some( - ({ address }) => address === nextAccount.address, - ) - ) { - throw new Error(`${nextAccount.address} is no longer authorized`); - } - + const onChangeAccount = useCallback((nextAccount: Account) => { + setAuthorization(currentAuthorization => { + if ( + currentAuthorization?.accounts.some( + ({ address }) => address === nextAccount.address, + ) + ) { return { ...currentAuthorization, selectedAccount: nextAccount }; - }); - }, - [setAuthorization], - ); + } + throw new Error(`${nextAccount.address} is no longer authorized`); + }); + }, []); const value = useMemo( () => ({ @@ -767,21 +746,28 @@ export function AuthorizationProvider(props: AuthProviderProps) { ); } -export const useAuthorization = () => React.useContext(AuthorizationContext); +const useAuthorization = () => React.useContext(AuthorizationContext); + +export { + AuthorizationProvider, + useAuthorization, + type Account, + type AuthProviderProps, + type AuthorizationProviderContext, +}; ``` -#### 6. Create ProgramProvider.tsx +### 6. Create ProgramProvider.tsx file The last provider we need is our program provider. This will expose the counter program we want to interact with. -Since we're using the Anchor TS client to interact with our program, we need the -program's IDL. Start by creating a root-level folder called `models`, then -create a new file `anchor-counter.ts`. Paste the contents of the -[Anchor Counter IDL](/public/assets/courses/unboxed/counter-rn-idl.ts) into this -new file. +Since we are using the Anchor TS client to interact with our program, we need +the program's IDL. Start by creating a root-level folder called `models`, then +create a new file `anchor-counter.ts`. Paste the contents of the Anchor Counter +IDL into this new file. -Next, create the file `ProgramProvider.tsx` inside of `components`. Inside we'll +Next, create the file `ProgramProvider.tsx` inside of components. Inside we will create the program provider to surface our program and the counter PDA: ```tsx @@ -820,8 +806,7 @@ export type ProgramProviderProps = { children: ReactNode; }; -export function ProgramProvider(props: ProgramProviderProps) { - const { children } = props; +export function ProgramProvider({ children }: ProgramProviderProps) { const { connection } = useConnection(); const [program, setProgram] = useState | null>(null); const [counterAddress, setCounterAddress] = useState(null); @@ -831,6 +816,11 @@ export function ProgramProvider(props: ProgramProviderProps) { "ALeaCzuJpZpoCgTxMjJbNjREVqSwuvYFRZUfc151AKHU", ); + // MockWallet is a placeholder wallet used for initializing the AnchorProvider. + // In a mobile app, we don't need a real wallet here because the actual signing + // will be done by the user's mobile wallet app. This mock wallet allows us to + // set up the provider without a real wallet instance. + const MockWallet = { signTransaction: () => Promise.reject(), signAllTransactions: () => Promise.reject(), @@ -875,9 +865,9 @@ export function ProgramProvider(props: ProgramProviderProps) { export const useProgram = () => useContext(ProgramContext); ``` -#### 7. Modify App.tsx +### 7. Modify App.tsx file -Now that we have all our providers, let’s wrap our app with them. We're going to +Now that we have all our providers, let's wrap our app with them. We're going to re-write the default `App.tsx` with the following changes: - Import our providers and add in our polyfills @@ -907,11 +897,14 @@ export default function App() { const endpoint = clusterApiUrl(cluster); return ( + // ConnectionProvider: Manages the connection to the Solana network + // AuthorizationProvider: Handles wallet authorization + // ProgramProvider: Provides access to the Solana program @@ -921,9 +914,9 @@ export default function App() { } ``` -#### 8. Create MainScreen.tsx +### 8. Create MainScreen.tsx file -Now, let’s put everything together to create our UI. Create a new folder called +Now, let's put everything together to create our UI. Create a new folder called `screens` and a new file called `MainScreen.tsx` inside of it. In this file, we are only structuring the screen to display two yet-to-be-created components: `CounterView` and `CounterButton`. @@ -935,51 +928,49 @@ to CSS. In `screens/MainScreen.tsx` paste the following: ```tsx +import React from "react"; import { StatusBar, StyleSheet, View } from "react-native"; import { CounterView } from "../components/CounterView"; import { CounterButton } from "../components/CounterButton"; -import React from "react"; -const mainScreenStyles = StyleSheet.create({ +export function MainScreen() { + return ( + + + + + + + + + + ); +} + +const styles = StyleSheet.create({ container: { height: "100%", width: "100%", backgroundColor: "lightgray", }, - - incrementButtonContainer: { position: "absolute", right: "5%", bottom: "3%" }, + incrementButtonContainer: { + position: "absolute", + right: "5%", + bottom: "3%", + }, counterContainer: { alignContent: "center", alignItems: "center", justifyContent: "center", }, }); - -export function MainScreen() { - return ( - - - - - - - - - - ); -} ``` -#### 9. Create CounterView.tsx +### 9. Create CounterView.tsx file The `CounterView` is the first of our two program-specific files. `CounterView`'s only job is to fetch and listen for updates on our `Counter` -account. Since we’re only listening here, we don’t have to do anything +account. Since we're only listening here, we don't have to do anything MWA-related. It should look identical to a web application. We'll use our `Connection` object to listen for the `programAddress` specified in `ProgramProvider.tsx`. When the account is changed, we update the UI. @@ -1045,7 +1036,7 @@ export function CounterView() { } ``` -#### 10. Create CounterButton.tsx +### 10. Create CounterButton.tsx file Finally, we have our last component, the `CounterButton`. This floating action button will do the following in a new function `incrementCounter`: @@ -1181,9 +1172,9 @@ export function CounterButton() { } ``` -#### 11. Build and Run +### 11. Build and Run -Now it’s time to test that everything works! Build and run with the following +Now it's time to test that everything works! Build and run with the following command: ```bash @@ -1200,30 +1191,34 @@ test your app: If you run into problems, here are some examples of what they could be and how to fix them: -- Application does not build → Exit Metro with ctrl+c and try again +- Application does not build → Exit Metro with _Ctrl+C_ and try again - Nothing happens when you press the `CounterButton` → Make sure you have Solana wallet installed ( like the fake wallet we installed in Prerequisites ) - You get stuck in a forever loop while calling `increment` → This is likely due to you reaching a Devnet airdrop rate limit. Take out the airdrop section in - `CounterButton` and manually send some Devnet sol to your wallet’s address + `CounterButton` and manually send some Devnet SOL to your wallet's address (printed in the console) -That's it! You've made your first Solana Mobile dApp. If you get stuck, feel -free to check out the -[full solution code](https://github.com/Unboxed-Software/solana-react-native-counter) +That's it! You've made your first Solana Mobile app. If you get stuck, feel free +to check out the + +[full solution code](https://github.com/solana-developers/react-native-counter) on the `main` branch of the repository. ## Challenge -Your challenge today is to take our app and add a decrement function. Simply add -another button and call the `decrement` function on our program. This -instruction already exists on the program and its IDL, so you simply need to -write client code to call it. +Your next challenge is to expand the app by adding a `decrement` function. You +need to create another button that will call the `decrement` method on the +Solana program. The logic for the decrement function already exists in the +program’s **IDL** (**Interface Description Language**), so your task is to write +the client-side code that interacts with it. + +Once you've completed this, you can check your solution against the solution +code available on the -After you give it a try on your own, feel free to take a look at the -[solution code on the `solution` branch](https://github.com/Unboxed-Software/solana-react-native-counter/tree/solution). +[solution branch](https://github.com/solana-developers/react-native-counter). - -Push your code to GitHub and -[tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=c15928ce-8302-4437-9b1b-9aa1d65af864)! + +If you’ve successfully completed the lab, push your code to GitHub and share +your feedback on this lesson through this [form](https://form.typeform.com/to/IPH0UGz7#answers-lesson=c15928ce-8302-4437-9b1b-9aa1d65af864) diff --git a/content/courses/mobile/metadata.yml b/content/courses/mobile/metadata.yml index f0dff73dd..0d22afc5b 100644 --- a/content/courses/mobile/metadata.yml +++ b/content/courses/mobile/metadata.yml @@ -7,7 +7,3 @@ lessons: - mwa-deep-dive - solana-mobile-dapps-with-expo priority: 99 -# Uses deprecated @metaplex-foundation/js library for NFTs -# which also uses old RPC methods and no longer functions. -# TODO: Superteam to update -isHidden: true diff --git a/content/courses/mobile/mwa-deep-dive.md b/content/courses/mobile/mwa-deep-dive.md index a4dd34390..0ceaa2fd7 100644 --- a/content/courses/mobile/mwa-deep-dive.md +++ b/content/courses/mobile/mwa-deep-dive.md @@ -15,17 +15,18 @@ description: - Wallets are just wrappers around a keypair, but they're essential for secure key management - Mobile and Web dApps handle their wallet-app connection differently -- MWA handles all of its wallet interaction within the `transact` function +- MWA handles all of its wallet interaction by wrapping all the wallet's + functionalities within the `transact` function for easier intergration. - Solana Mobile's `walletlib` does the heavy lifting for surfacing wallet requests to wallet apps ## Lesson Wallets exist to protect your secret keys. While some applications might have -app-specific keys, many crypto use cases rely on a single identity used across -multiple apps. In these cases, you very much want to be careful about how you -expose signing across these apps. You don't want to share your secret key with -all of them, which means you need a standard for allowing apps to submit +app-specific keys, many blockchain use cases rely on a single identity used +across multiple apps. In these cases, you very much want to be careful about how +you expose signing across these apps. You don't want to share your secret key +with all of them, which means you need a standard for allowing apps to submit transactions for signature to a secure wallet app that holds your secret key. This is where the Mobile Wallet Adapter (MWA) comes in. It's the transport layer to connect your mobile dApps to your wallet. @@ -40,7 +41,7 @@ app-wallet connection differently. At its core, a wallet app is fairly straightforward. It's a secure wrapper around your keypair. External applications can request that the wallet sign -transactions without ever having access to your private key. Both the web and +transactions without ever having access to your secret key. Both the web and mobile wallet adapters define this interaction for their respective platforms. #### How does a web wallet work? @@ -147,7 +148,10 @@ authorization request. The returned `AuthorizationResult` will indicate the user's acceptance or rejection. If accepted, this result object provides you with the user's account as well as an `auth_token` you can use in `wallet.reauthorize()` for subsequent calls. This auth token ensures that other -apps can't pretend to be your app. +apps can't pretend to be your app. The auth token is generated during the +`authorize()` call, and subsequent requests from the dApp can use the +`reauthorize()` method with the stored token to maintain secure communication +without repeatedly prompting the user. ```tsx transact(async (wallet: Web3MobileWallet) => { @@ -194,7 +198,7 @@ transact(async (wallet: Web3MobileWallet) => { Note that the above example does not handle errors or user rejections. In production, it's a good idea to wrap the authorization state and methods with a custom `useAuthorization` hook. For reference, we built this -[in the previous lesson](/content/courses/mobile/intro-to-solana-mobile). +[in the previous lesson](/content/courses/mobile/intro-to-solana-mobile.md). #### Interact with a wallet @@ -212,7 +216,6 @@ if ( connected ) { signAllTransactions(...); signMessage(...); sendTransaction(...); -} ``` For MWA, simply call the functions on the `wallet` context provided by the @@ -252,6 +255,24 @@ transact(async (wallet: Web3MobileWallet) => { Every time you want to call these methods, you will have to call `wallet.authorize()` or `wallet.reauthorize()`. +When invoking `wallet.signAndSendTransactions(...)`, it's essential to handle +transaction failures gracefully. Transactions can fail due to various reasons +such as network issues, signature mismatches, or insufficient funds. Proper +error handling ensures a smooth user experience, even when the transaction +process encounters issues: + +```tsx +transact(async (wallet: Web3MobileWallet) => { + try { + const result = await wallet.signAndSendTransactions(...); + // Handle success + } catch (error) { + console.error("Failed to sign and send transactions:", error); + // Implement error handling logic + } + }); +``` + And that's it! You should have enough information to get started. The Solana mobile team has put in a lot of work to make the development experience as seamless as possible between the two. @@ -269,8 +290,11 @@ sections; simply try to get a sense of the overall flow. Solana Mobile has done the vast majority of the heavy lifting by creating the `mobile-wallet-adapter-walletlib`. This library handles all the low-level -communication between dApps and wallets. However, this package is still in -development and is not available through npm. From their GitHub: +communication between dApps and wallets: + +```bash +npm i @solana-mobile/mobile-wallet-adapter-walletlib +``` > This package is still in alpha and is not production ready. However, the API > is stable and will not change drastically, so you can begin integration with @@ -351,7 +375,7 @@ user's secret key to sign the transaction provided by the request, send the request to an RPC provider, and then respond to the requesting dApp using a `resolve` function. -All the `resolve` function does is tell the dApp what happened and close the +The `resolve` function simply tells the dApp what happened and closes the session. The `resolve` function takes two arguments: `request` and `response`. The types of `request` and `response` are different depending on what the original request was. So in the example of @@ -383,7 +407,7 @@ Which response you send would depend on the result of attempting to sign and send the transaction. You can dig into the -[`walletlib` source](https://github.com/solana-mobile/mobile-wallet-adapter/tree/main/js/packages/mobile-wallet-adapter-walletlib) +[`walletlib` source](https://github.com/solana-mobile/mobile-wallet-adapter/blob/main/js/packages/mobile-wallet-adapter-walletlib/src/resolve.ts) if you'd like to know all of the types associated with `resolve`. One final point is that the component used for interacting with `walletlib` also @@ -426,60 +450,57 @@ app-wallet relationship. Before we start programming our wallet, we need to do some setup. You will need a React Native development environment and a Solana dApp to test on. If you have completed the -[Basic Solana Mobile lesson](/content/courses/mobile/basic-solana-mobile), both -of these requirements should be met with the counter app installed on your +[Introduction to Solana Mobile lab](/content/courses/mobile/intro-to-solana-mobile.md), +both of these requirements should be met and the counter app installed on your Android device/emulator. -If you _haven't_ completed the last lesson you will need to: +If you _haven't_ completed/done the +[intro to solana mobile](https://github.com/solana-developers/react-native-fake-wallet) +you will need to: 1. Setup an - [Android React Native developer environment](https://reactnative.dev/docs/environment-setup) + [Android React Native developer environment](https://github.com/solana-developers/react-native-fake-wallet) with a device or emulator 2. Install a - [Devnet Solana dApp](https://github.com/Unboxed-Software/solana-react-native-counter.git) - -If you want to install the app from the previous lesson, you can: + [Devnet Solana dApp](https://github.com/solana-developers/react-native-fake-wallet) + by doing the following steps in your terminal: ```bash -git clone https://github.com/Unboxed-Software/solana-react-native-counter.git +git clone https://github.com/solana-developers/react-native-fake-wallet cd solana-react-native-counter -git checkout solution npm run install ``` -#### 1. Plan out the app's structure +#### 1. Planning out the app's structure We are making the wallet from scratch, so let's look at our major building blocks. -First, we'll make the actual wallet app (popup not included). This will include -creating or modifying the following: +First, we'll make the actual wallet app (popup not included). This will include: -- WalletProvider.tsx -- MainScreen.tsx -- App.tsx +- Creating a `WalletProvider.tsx` +- Modifying the `MainScreen.tsx` +- Modifying `App.tsx` Next, we'll make a boilerplate MWA app that displays 'Im a Wallet' anytime the -wallet is requested from a different dApp. This will include creating or -modifying the following: +wallet is requested from a different dApp. This will include: -- MWAApp.tsx -- index.js +- Creating a `MWAApp.tsx` +- Modifying `index.js` -Then we'll set up all of our UI and request routing. This will mean creating or -modifying the following: +Then we'll set up all of our UI and request routing. This will mean: -- MWAApp.tsx -- ButtonGroup.tsx -- AppInfo.tsx +- Modifying the `MWAApp.tsx` +- Creating a `ButtonGroup.tsx` +- Creating a `AppInfo.tsx` Finally, we'll implement two actual request functions, authorize and sign and send transactions. This entails creating the following: -- AuthorizeDappRequestScreen.tsx -- SignAndSendTransactionScreen.tsx +- `AuthorizeDappRequestScreen.tsx` +- `SignAndSendTransactionScreen.tsx` -#### 2. Scaffold the app +#### 2. Scaffold the Wallet app Let's scaffold the app with: @@ -518,45 +539,17 @@ npm install \ fast-text-encoding ``` -The next step is a bit messy. We need to depend on Solana's -`mobile-wallet-adapter-walletlib` package, which handles all of the low-level -communication. However, this package is still in development and is not -available through npm. From their github: +We need to depend on Solana's `mobile-wallet-adapter-walletlib` package, which +handles all of the low-level communication. -> This package is still in alpha and is not production ready. However, the API -> is stable and will not change drastically, so you can begin integration with -> your wallet. - -However, we have extracted the package and made it available on GitHub. If -you're interested in how we did that, take a look at the README -[on the GitHub repo where we've made this package available](https://github.com/Unboxed-Software/mobile-wallet-adapter-walletlib) +> Note: A reminder that this package is still in alpha and is not production +> ready. However, the API is stable and will not change drastically, so you can +> begin integration with your wallet. Let's install the package in a new folder `lib`: ```bash -mkdir lib -cd lib -git clone https://github.com/Unboxed-Software/mobile-wallet-adapter-walletlib.git -``` - -Next, we have to manually link it by adding -`@solana-mobile/mobile-wallet-adapter-walletlib` to our `package.json` -dependencies with the file path as the resolution: - -```json -"dependencies": { - ... - "@solana-mobile/mobile-wallet-adapter-walletlib": "file:./lib/mobile-wallet-adapter-walletlib", - ... -} -``` - -Let npm know about the new package by installing again in the root of your -project: - -```bash -cd .. -npm install +npm i @solana-mobile/mobile-wallet-adapter-walletlib ``` Next, in `android/build.gradle`, change the `minSdkVersion` to version `23`. @@ -566,7 +559,8 @@ Next, in `android/build.gradle`, change the `minSdkVersion` to version `23`. ``` Finally, finish the initial setup by building the app. You should get the -default React Native app showing up on your device. +default React Native app showing up on your +device./environment-setup?os=linux&platform=android&guide=native#jdk-studio ```bash npm run android @@ -604,8 +598,8 @@ provider should generate and store a keypair. The `WalletProvider` will then return its context including the `wallet` and `connection`. The rest of the app can access this context using the `useWallet()` hook. -**_AGAIN_**, async storage is not fit to store private keys in production. -Please use something like +**_AGAIN_**, async storage is not fit to store secret keys in production. Please +use something like [Android's keystore system](https://developer.android.com/privacy-and-security/keystore). Let's create the `WalletProvider.tsx` within a new directory named `components`: @@ -666,21 +660,21 @@ export function WalletProvider(props: WalletProviderProps) { try { const storedKey = await AsyncStorage.getItem(ASYNC_STORAGE_KEY); let keyPair; - if (storedKey && storedKey !== null) { + if (storedKey) { const encodedKeypair: EncodedKeypair = JSON.parse(storedKey); keyPair = decodeKeypair(encodedKeypair); } else { // Generate a new random pair of keys and store them in local storage for later retrieval // This is not secure! Async storage is used for demo purpose. Never store keys like this! - keyPair = await Keypair.generate(); + keyPair = Keypair.generate(); await AsyncStorage.setItem( ASYNC_STORAGE_KEY, JSON.stringify(encodeKeypair(keyPair)), ); } setKeyPair(keyPair); - } catch (e) { - console.log("error getting keypair: ", e); + } catch (error) { + console.log("error getting keypair: ", error); } }; @@ -688,9 +682,14 @@ export function WalletProvider(props: WalletProviderProps) { fetchOrGenerateKeypair(); }, []); + const connection = useMemo( + () => new Connection(rpcUrl ?? "https://api.devnet.solana.com"), + [rpcUrl], + ); + const value = { wallet: keyPair, - connection: new Connection(rpcUrl ?? "https://api.devnet.solana.com"), + connection, }; return ( @@ -728,6 +727,7 @@ function MainScreen() { const [isLoading, setIsLoading] = useState(false); const [balance, setBalance] = useState(null); const { wallet, connection } = useWallet(); + const [errorMessage, setErrorMessage] = useState(null); useEffect(() => { updateBalance(); @@ -735,9 +735,13 @@ function MainScreen() { const updateBalance = async () => { if (wallet) { - await connection.getBalance(wallet.publicKey).then(lamports => { + try { + const lamports = await connection.getBalance(wallet.publicKey); setBalance(lamports / LAMPORTS_PER_SOL); - }); + } catch (error) { + console.error("Failed to fetch / update balance:", error); + setErrorMessage("Failed to fetch balance"); + } } }; @@ -751,8 +755,9 @@ function MainScreen() { ); await connection.confirmTransaction(signature, "max"); await updateBalance(); - } catch (e) { - console.log(e); + } catch (error) { + console.log("error requesting airdrop", error); + setErrorMessage("Airdrop failed"); } setIsLoading(false); @@ -766,7 +771,8 @@ function MainScreen() { Balance: {balance?.toFixed(5) ?? ""} {isLoading && Loading...} - {balance != null && !isLoading && balance < 0.005 && ( + {errorMessage && {errorMessage}} + {balance !== null && !isLoading && balance < 0.005 && ( - )} - - {MovieCoordinator.accounts.length > page * 2 && ( - - )} - - + +
+ {page > 1 && ( + + )} + {movies.length === 5 && ( + + )} +
); ``` @@ -336,7 +433,7 @@ At this point, you should be able to run the project and click between pages! #### 3. Order reviews alphabetically by title -If you look at the reviews, you might notice they aren’t in any specific order. +If you look at the reviews, you might notice they aren't in any specific order. We can fix this by adding back just enough data into our data slice to help us do some sorting. The various properties in the movie review data buffer are laid out as follows @@ -348,37 +445,70 @@ out as follows Based on this, the offset we need to provide to the data slice to access `title` is 2. The length, however, is indeterminate, so we can just provide what seems -to be a reasonable length. I’ll stick with 18 as that will cover the length of +to be a reasonable length. I'll stick with 18 as that will cover the length of most titles without fetching too much data every time. -Once we’ve modified the data slice in `getProgramAccounts`, we then need to +Once we've modified the data slice in `getProgramAccounts`, we then need to actually sort the returned array. To do this, we need to compare the part of the data buffer that actually corresponds to `title`. The first 4 bytes of a dynamic field in Borsh are used to store the length of the field in bytes. So in any given buffer `data` that is sliced the way we discussed above, the string portion is `data.slice(4, 4 + data[0])`. -Now that we’ve thought through this, let’s modify the implementation of +Now that we've thought through this, let's modify the implementation of `prefetchAccounts` in `MovieCoordinator`: ```tsx -static async prefetchAccounts(connection: web3.Connection, filters: AccountFilter[]) { - const accounts = await connection.getProgramAccounts( - new web3.PublicKey(MOVIE_REVIEW_PROGRAM_ID), +// account type as returned by getProgramAccounts() +type ProgramAccount = { + pubkey: PublicKey; + account: AccountInfo; +}; + +const DATA_OFFSET = 2; // Skip the first 2 bytes, which store versioning information for the data schema of the account. This versioning ensures that changes to the account's structure can be tracked and managed over time. +const DATA_LENGTH = 18; // Retrieve 18 bytes of data, including the part of the account's data that stores the user's public key for comparison. +// Define a constant for the size of the header in each account buffer +const HEADER_SIZE = 4; // 4 bytes for length header + +static async prefetchAccounts(connection: Connection) { + // Get readonly accounts response + const readonlyAccounts = (await connection.getProgramAccounts( + new PublicKey(MOVIE_REVIEW_PROGRAM_ID), { - dataSlice: { offset: 2, length: 18 }, + dataSlice:{ offset: DATA_OFFSET, length: DATA_LENGTH }, + } + )) + + const accounts: Array = Array.from(readonlyAccounts); // Make a mutable copy of the readonly array + + accounts.sort((a, b) => { + try { + // Check if buffers are long enough to avoid out-of-bounds access + const lengthA = a.account.data.readUInt32LE(0); // Reads the first 4 bytes for length + const lengthB = b.account.data.readUInt32LE(0); + + if ( + a.account.data.length < HEADER_SIZE + lengthA || + b.account.data.length < HEADER_SIZE + lengthB + ) { + throw new Error('Buffer length is insufficient'); + } + + const dataA = a.account.data.subarray(HEADER_SIZE, HEADER_SIZE + lengthA); + const dataB = b.account.data.subarray(HEADER_SIZE, HEADER_SIZE + lengthB); + + return dataA.compare(dataB); + } catch (error) { + console.error('Error sorting accounts: ', error); + return 0; // Default sort order in case of error } - ) + }); - accounts.sort( (a, b) => { - const lengthA = a.account.data.readUInt32LE(0) - const lengthB = b.account.data.readUInt32LE(0) - const dataA = a.account.data.slice(4, 4 + lengthA) - const dataB = b.account.data.slice(4, 4 + lengthB) - return dataA.compare(dataB) - }) + this.accounts = accounts.map(account => account.pubkey) - this.accounts = accounts.map(account => account.pubkey) + } catch (error) { + console.error("Error prefetching accounts:", error); + } } ``` @@ -387,149 +517,186 @@ reviews ordered alphabetically. #### 4. Add search -The last thing we’ll do to improve this app is to add some basic search -capability. Let’s add a `search` parameter to `prefetchAccounts` and reconfigure +The last thing we'll do to improve this app is to add some basic search +capability. Let's add a `search` parameter to `prefetchAccounts` and reconfigure the body of the function to use it. We can use the `filters` property of the `config` parameter of `getProgramAccounts` to filter accounts by specific data. The offset to the `title` fields is 2, but the first 4 bytes are the length of the title so the actual offset to the string itself is 6. Remember that the bytes need to be base -58 encoded, so let’s install and import `bs58`. +58 encoded, so let's install and import `bs58`. ```tsx import bs58 from 'bs58' ... -static async prefetchAccounts(connection: web3.Connection, search: string) { - const accounts = await connection.getProgramAccounts( - new web3.PublicKey(MOVIE_REVIEW_PROGRAM_ID), - { - dataSlice: { offset: 2, length: 18 }, - filters: search === '' ? [] : [ - { - memcmp: - { - offset: 6, - bytes: bs58.encode(Buffer.from(search)) - } - } - ] - } - ) - - accounts.sort( (a, b) => { - const lengthA = a.account.data.readUInt32LE(0) - const lengthB = b.account.data.readUInt32LE(0) - const dataA = a.account.data.slice(4, 4 + lengthA) - const dataB = b.account.data.slice(4, 4 + lengthB) - return dataA.compare(dataB) - }) - - this.accounts = accounts.map(account => account.pubkey) +static async prefetchAccounts(connection: Connection, search: string) { + const readonlyAccounts = (await connection.getProgramAccounts( + new PublicKey(MOVIE_REVIEW_PROGRAM_ID), + { + dataSlice: { offset: DATA_OFFSET, length: DATA_LENGTH }, + filters: + search === "" + ? [] + : [ + { + memcmp: { + offset: 6, + bytes: bs58.encode(Buffer.from(search)), + }, + }, + ], + } + )); + + const accounts: Array = Array.from(readonlyAccounts); // Make a mutable copy of the readonly array + + accounts.sort((a, b) => { + try { + const lengthA = a.account.data.readUInt32LE(0); + const lengthB = b.account.data.readUInt32LE(0); + + if ( + a.account.data.length < HEADER_SIZE + lengthA || + b.account.data.length < HEADER_SIZE + lengthB + ) { + throw new Error('Buffer length is insufficient'); + } + + const dataA = a.account.data.subarray(HEADER_SIZE, HEADER_SIZE + lengthA); + const dataB = b.account.data.subarray(HEADER_SIZE, HEADER_SIZE + lengthB); + + return dataA.compare(dataB); + } catch (error) { + console.error("Error sorting accounts: ", error); + return 0; + } + }); + + this.accounts = accounts.map((account) => account.pubkey); } ``` Now, add a `search` parameter to `fetchPage` and update its call to -`prefetchAccounts` to pass it along. We’ll also need to add a `reload` boolean +`prefetchAccounts` to pass it along. We'll also need to add a `reload` boolean parameter to `fetchPage` so that we can force a refresh of the account prefetching every time the search value changes. ```tsx -static async fetchPage(connection: web3.Connection, page: number, perPage: number, search: string, reload: boolean = false): Promise { +static async fetchPage( + connection: Connection, + page: number, + perPage: number, + search: string, + reload = false +): Promise> { if (this.accounts.length === 0 || reload) { - await this.prefetchAccounts(connection, search) + await this.prefetchAccounts(connection, search); } const paginatedPublicKeys = this.accounts.slice( (page - 1) * perPage, - page * perPage, - ) + page * perPage + ); if (paginatedPublicKeys.length === 0) { - return [] + return []; } - const accounts = await connection.getMultipleAccountsInfo(paginatedPublicKeys) - - const movies = accounts.reduce((accum: Movie[], account) => { - const movie = Movie.deserialize(account?.data) - if (!movie) { - return accum - } - - return [...accum, movie] - }, []) + const accounts = await connection.getMultipleAccountsInfo( + paginatedPublicKeys + ); - return movies -} + const movies = accounts.reduce((accumulator: >, account) => { + try { + const movie = Movie.deserialize(account?.data); + if (movie) { + accumulator.push(movie); + } + } catch (error) { + console.error('Error deserializing movie data: ', error); + } + return accumulator; + }, []); + + return movies; + } ``` -With that in place, let’s update the code in `MovieList` to call this properly. +With that in place, let's update the code in `MovieList` to call this properly. First, add `const [search, setSearch] = useState('')` near the other `useState` calls. Then update the call to `MovieCoordinator.fetchPage` in the `useEffect` to pass the `search` parameter and to reload when `search !== ''`. ```tsx -const { connection } = useConnection(); -const [movies, setMovies] = useState([]); +const connection = new Connection(clusterApiUrl("devnet")); +const [movies, setMovies] = useState>([]); const [page, setPage] = useState(1); const [search, setSearch] = useState(""); useEffect(() => { - MovieCoordinator.fetchPage(connection, page, 2, search, search !== "").then( - setMovies, - ); -}, [page, search]); + const fetchMovies = async () => { + try { + const movies = await MovieCoordinator.fetchPage( + connection, + page, + 5, + search, + search !== "", + ); + setMovies(movies); + } catch (error) { + console.error("Failed to fetch movies:", error); + } + }; + + fetchMovies(); +}, [connection, page, search]); ``` Finally, add a search bar that will set the value of `search`: ```tsx return ( -
-
- setSearch(event.currentTarget.value)} - placeholder="Search" - w="97%" - mt={2} - mb={2} - /> -
+
+ setSearch(e.target.value)} + placeholder="Search" + /> ...
); ``` -And that’s it! The app now has ordered reviews, paging, and search. +And that's it! The app now has ordered reviews, paging, and search. That was a lot to digest, but you made it through. If you need to spend some more time with the concepts, feel free to reread the sections that were most challenging for you and/or have a look at the -[solution code](https://github.com/Unboxed-Software/solana-movie-frontend/tree/solution-paging-account-data). +[solution code](https://github.com/solana-developers/movie-review-frontend/tree/solutions-paging-account-data). ## Challenge -Now it’s your turn to try and do this on your own. Using the Student Intros app +Now it's your turn to try and do this on your own. Using the Student Intros app from last lesson, add paging, ordering alphabetically by name, and searching by name. -![Student Intros frontend](/public/assets/courses/unboxed/student-intros-frontend.png) +![Student Intros frontend](/public/assets/courses/student-intros-frontend.png) 1. You can build this from scratch or you can download the - [starter code](https://github.com/Unboxed-Software/solana-student-intros-frontend/tree/solution-deserialize-account-data) + [starter code](https://github.com/solana-developers/solana-student-intro-frontend/tree/solution-deserialize-account-data) 2. Add paging to the project by prefetching accounts without data, then only - fetching the account data for each account when it’s needed. + fetching the account data for each account when it's needed. 3. Order the accounts displayed in the app alphabetically by name. -4. Add the ability to search through introductions by a student’s name. +4. Add the ability to search through introductions by a student's name. This is challenging. If you get stuck, feel free to reference the -[solution code](https://github.com/Unboxed-Software/solana-student-intros-frontend/tree/solution-paging-account-data). +[solution code](https://github.com/solana-developers/solana-student-intro-frontend/tree/solution-paging-account-data). As always, get creative with these challenges and take them beyond the instructions if you want! diff --git a/content/courses/native-onchain-development/program-derived-addresses.md b/content/courses/native-onchain-development/program-derived-addresses.md index ed5dd66de..128b53fed 100644 --- a/content/courses/native-onchain-development/program-derived-addresses.md +++ b/content/courses/native-onchain-development/program-derived-addresses.md @@ -12,7 +12,7 @@ description: "Get a deeper understanding of PDAs." - A **Program Derived Address** (PDA) is derived from a **program ID** and an optional list of **seeds** -- PDAs are owned and controlled by the program they are derived from +- The program that derives PDAs owns and controls them. - PDA derivation provides a deterministic way to find data based on the seeds used for the derivation - Seeds can be used to map to the data stored in a separate PDA account @@ -22,91 +22,102 @@ description: "Get a deeper understanding of PDAs." ### What is a Program Derived Address? -Program Derived Addresses (PDAs) are account addresses designed to be signed for -by a program rather than a secret key. As the name suggests, PDAs are derived -using a program ID. Optionally, these derived accounts can also be found using -the ID along with a set of "seeds." More on this later, but these seeds will -play an important role in how we use PDAs for data storage and retrieval. +Program Derived Addresses (PDAs) are addresses that, instead of being public +keys, are calculated (or 'found') based on a combination of: + +- The program ID +- A set of "seeds" determined by the programmer. + +More on this later, but these seeds will play a role in using PDAs for data +storage and retrieval. PDAs serve two main functions: 1. Provide a deterministic way to find a given item of data for a program -2. Authorize the program from which a PDA was derived to sign on its behalf in - the same way a user may sign with their secret key +2. Authorize the program that owns a PDA to sign on the PDAs behalf, just like a + user signs for their own account using their secret key. -In this lesson we'll focus on using PDAs to find and store data. We'll discuss -signing with a PDA more thoroughly in a future lesson where we cover Cross -Program Invocations (CPIs). +This lesson will focus on using PDAs to find and store data. We'll discuss +signing with a PDA more thoroughly in a future lesson, where we will cover +Cross-Program Invocations (CPIs). ### Finding PDAs -PDAs are not technically created. Rather, they are _found_ or _derived_ based on -a program ID and one or more input seeds. +Technically, PDAs are _found_ or _derived_ based on a program ID and one or more +input seeds. -Solana keypairs can be found on what is called the Ed25519 Elliptic Curve -(Ed25519). Ed25519 is a deterministic signature scheme that Solana uses to -generate corresponding public and secret keys. Together, we call these keypairs. +Unlike other Solana accounts, PDAs are not public keys and don't have secret +keys. Since public keys are on Solana's Ed25519 curve, PDAs are sometimes called +'off curve addresses'. -Alternatively, PDAs are addresses that lie _off_ the Ed25519 curve. This means -PDAs are not public keys, and don't have private keys. This property of PDAs is -essential for programs to be able to sign on their behalf, but we'll cover that -in a future lesson. +PDAs are found using a hashing function that deterministically generates a PDA +using the program ID and seeds. Both Solana frontend and backend code can +determine an address using the program ID and seeds, and the same program with +the same seeds always results in the same Program Derived Address. -To find a PDA within a Solana program, we'll use the `find_program_address` -function. This function takes an optional list of “seeds” and a program ID as -inputs, and then returns the PDA and a bump seed. +### Seeds -```rust -let (pda, bump_seed) = Pubkey::find_program_address(&[user.key.as_ref(), user_input.as_bytes().as_ref(), "SEED".as_bytes()], program_id) -``` +"Seeds" are inputs in the `find_program_address` function. While you, the +developer, determine the seeds to pass into the `find_program_address` method, +`find_program_address` method adds an additional numeric seed called a bump seed +that is used to ensure the address is _off_ the Ed25519 curve, ie, is not a +valid public key and does not have a corresponding secret key. -#### Seeds +`find_program_address` uses a loop to calculate the off curve address, starting +with the bump seed value 255 and checks if the output is a public key address +(on the curve) or not a valid public key (off the curve). If an an off-curve +address is not found, the method decrements the bump seed by subtracting one and +tries again (`255`, `254`, `253`, et cetera). When the method finds a valid PDA, +it returns the PDA and the canonical bump seed that derived it. -“Seeds” are optional inputs used in the `find_program_address` function to -derive a PDA. For example, seeds can be any combination of public keys, inputs -provided by a user, or hardcoded values. A PDA can also be derived using only -the program ID and no additional seeds. Using seeds to find our PDAs, however, -allows us to create an arbitrary number of accounts that our program can own. +If the resulting PDA is on the Ed25519 curve, then an error +`PubkeyError::InvalidSeeds` is returned. -While you, the developer, determine the seeds to pass into the -`find_program_address` function, the function itself provides an additional seed -called a "bump seed." The cryptographic function for deriving a PDA results in a -key that lies _on_ the Ed25519 curve about 50% of the time. To ensure that the -result _is not_ on the Ed25519 curve and therefore does not have a secret key, -the `find_program_address` function adds a numeric seed called a bump seed. +A PDA allows a maximum of `16` seeds, with each seed limited to `32` bytes in +length. If a seed exceeds this length or the number of seeds surpasses the +limit, the system returns the error `PubkeyError::MaxSeedLengthExceeded,` +indicating that the `Length of the seed is too long for address generation`. +Developers commonly use static strings and public keys as seeds. -The function starts by using the value `255` as the bump seed, then checks to -see if the output is a valid PDA. If the result is not a valid PDA, the function -decreases the bump seed by 1 and tries again (`255`, `254`, `253`, et cetera). -Once a valid PDA is found, the function returns both the PDA and the bump that -was used to derive the PDA. +The +[PublicKey](https://docs.rs/solana-program/latest/solana_program/pubkey/struct.Pubkey.html#) +type has multiple methods that find a PDA within a Solana program: -#### Under the hood of `find_program_address` +1. `find_program_address` +2. `try_find_program_address` +3. `create_program_address` -Let's take a look at the source code for `find_program_address`. +These methods takes an optional list of "seeds" and a `program ID` as inputs and +can return the PDA and a bump seed or an error and a PDA. + +### 1. find_program_address + +The source code for `find_program_address`: ```rust - pub fn find_program_address(seeds: &[&[u8]], program_id: &Pubkey) -> (Pubkey, u8) { +pub fn find_program_address(seeds: &[&[u8]], program_id: &Pubkey) -> (Pubkey, u8) { Self::try_find_program_address(seeds, program_id) .unwrap_or_else(|| panic!("Unable to find a viable program address bump seed")) } ``` -Under the hood, the `find_program_address` function passes the input `seeds` and -`program_id` to the `try_find_program_address` function. +Under the hood, the `find_program_address` method passes the input `seeds` and +`program_id` to the `try_find_program_address` method. + +### 2. try_find_program_address -The `try_find_program_address` function then introduces the `bump_seed`. The -`bump_seed` is a `u8` variable with a value ranging between 0 to 255. Iterating -over a descending range starting from 255, a `bump_seed` is appended to the -optional input seeds which are then passed to the `create_program_address` -function. If the output of `create_program_address` is not a valid PDA, then the -`bump_seed` is decreased by 1 and the loop continues until a valid PDA is found. +The `try_find_program_address` method then introduces the `bump_seed`. The +`bump_seed` is a `u8` variable with a value between 0 and 255. Iterating over a +descending range starting from 255, a `bump_seed` is appended to the optional +input seeds passed to the `create_program_address` method. If the output of +`create_program_address` is not a valid PDA, the `bump_seed` is decreased by one +and continues the loop until it finds a valid PDA. ```rust pub fn try_find_program_address(seeds: &[&[u8]], program_id: &Pubkey) -> Option<(Pubkey, u8)> { - - let mut bump_seed = [std::u8::MAX]; - for _ in 0..std::u8::MAX { + //.. + let mut bump_seed = [u8::MAX]; + for _ in 0..u8::MAX { { let mut seeds_with_bump = seeds.to_vec(); seeds_with_bump.push(&bump_seed); @@ -120,21 +131,50 @@ pub fn try_find_program_address(seeds: &[&[u8]], program_id: &Pubkey) -> Option< } None + // ... } ``` -The `create_program_address` function performs a set of hash operations over the -seeds and `program_id`. These operations compute a key, then verify if the -computed key lies on the Ed25519 elliptic curve or not. If a valid PDA is found -(i.e. an address that is _off_ the curve), then the PDA is returned. Otherwise, -an error is returned. +We can see that the `try_find_program_address` calls the +`create_program_address` method. ```rust -pub fn create_program_address( - seeds: &[&[u8]], - program_id: &Pubkey, -) -> Result { +pub fn try_find_program_address(seeds: &[&[u8]], program_id: &Pubkey) -> Option<(Pubkey, u8)> { + // ... + for _ in 0..std::u8::MAX { + { + // `create_program_address` is called here + match Self::create_program_address(&seeds_with_bump, program_id) { + //... + } + } + //... + } +} + +``` + +### 3. create_program_address +The `create_program_address` method performs a hashing operation over the seeds +and `program_id`. These operations compute a key and verify whether it lies on +the Ed25519 elliptic curve. If a valid PDA is found (i.e., an address that is +_off_ the curve), then either the PDA or an error is returned. + +The source code for `create_program_address`: + +```rust +pub fn create_program_address(seeds: &[&[u8]], program_id: &Pubkey) -> Result { + if seeds.len() > MAX_SEEDS { + return Err(PubkeyError::MaxSeedLengthExceeded); + } + for seed in seeds.iter() { + if seed.len() > MAX_SEED_LEN { + return Err(PubkeyError::MaxSeedLengthExceeded); + } + } + + //.. let mut hasher = crate::hash::Hasher::default(); for seed in seeds.iter() { hasher.hash(seed); @@ -146,134 +186,170 @@ pub fn create_program_address( return Err(PubkeyError::InvalidSeeds); } - Ok(Pubkey::new(hash.as_ref())) + Ok(Pubkey::from(hash.to_bytes())) + // ... } ``` -In summary, the `find_program_address` function passes our input seeds and -`program_id` to the `try_find_program_address` function. The -`try_find_program_address` function adds a `bump_seed` (starting from 255) to -our input seeds, then calls the `create_program_address` function until a valid -PDA is found. Once found, both the PDA and the `bump_seed` are returned. - -Note that for the same input seeds, different valid bumps will generate -different valid PDAs. The `bump_seed` returned by `find_program_address` will -always be the first valid PDA found. Because the function starts with a -`bump_seed` value of 255 and iterates downwards to zero, the `bump_seed` that -ultimately gets returned will always be the largest valid 8-bit value possible. -This `bump_seed` is commonly referred to as the "_canonical bump_". To avoid -confusion, it's recommended to only use the canonical bump, and to _always -validate every PDA passed into your program._ - -One point to emphasize is that the `find_program_address` function only returns -a Program Derived Address and the bump seed used to derive it. The -`find_program_address` function does _not_ initialize a new account, nor is any -PDA returned by the function necessarily associated with an account that stores -data. +When an error occurs during the invocation of the `find_program_address` method, +it's essential to handle it effectively. Though statistically improbable, the +system returns the error `Unable to find a viable program address bump seed` +whenever it finds a PDA that lies on the curve. The `try_find_program_address` +method is used instead of panicking. + +Locating a valid PDA off the Ed25519 curve can be time-consuming due to the +iterations on the canonical bump seed. This operation can consume a variable +amount of the program's compute budget. Developers can optimize the performance +and lower the compute budget of programs by passing the `bump_seed`(also called +the canonical bump), and the user-supplied seeds as part of the instruction +data, and then deserialize the seed and canonical bump. These deserialized +outputs can then be passed to the `create_program_address` method to derive the +PDA. It's important to note that the `create_program_address` method incurs a +fixed cost to the compute budget. + +Address collisions can occur since the seeds are passed as a slice of bytes, +meaning that the seeds `{abcdef}`, `{abc, def}` and `{ab, cd, ef}` will result +in the same PDA being generated. In some cases, developers may wish to prevent +collisions by adding separator characters like hyphens. + +In summary, the `find_program_address` method passes the input seeds and +`program_id` to the `try_find_program_address` method. The +`try_find_program_address` method starts with a `bump_seed` of 255, adds it to +the input seeds, and then repeatedly calls the `create_program_address` method +until it finds a valid PDA. Once found, both the PDA and the `bump_seed` are +returned. + +Note that different valid bumps generate different valid PDAs for the same input +seeds. The `bump_seed` returned by `find_program_address` will always be the +first valid PDA found. + +Using the canonical bump when generating a PDA onchain is crucial. Starting with +a `bump_seed` value of `255` and iterating downward to `0` ensures that the +returned seed will always be the most significant valid `8-bit` value possible. +This `bump_seed` is commonly known as the "_canonical bump_". It's a best +practice always to use the canonical bump and validate every PDA passed into +your program to ensure the integrity of the process. + +One point to emphasize is that the `find_program_address` method only returns a +Program-Derived Address and the bump seed used to derive it. The method does not +initialize a new account, nor is any PDA returned by the method necessarily +associated with an account that stores data. ### Use PDA accounts to store data -Since programs themselves are stateless, program state is managed through -external accounts. Given that you can use seeds for mapping and that programs -can sign on their behalf, using PDA accounts to store data related to the -program is an extremely common design choice. While programs can invoke the -System Program to create non-PDA accounts and use those to store data as well, -PDAs tend to be the way to go. +Solana programs are stateless, so state is stored in separate accounts from +where the program's executable is stored. Although programs can use the System +Program to create non-PDA accounts for data storage, PDAs are the choice for +storing program-related data. This choice is popular because the seeds and +canonical bump directly map to the same PDA, and the program specified as the +program ID can sign on its behalf. + +Program Derived Addresses (PDAs) are account keys only the program can sign on +its behalf. During cross-program invocations, the program can "sign" for the key +by calling `invoke_signed` and providing the same seeds used to generate the +address, along with the calculated bump seed. The runtime then verifies that the +program associated with the address is the caller and thus authorized to sign. If you need a refresher on how to store data in PDAs, have a look at the [State Management lesson](/content/courses/native-onchain-development/program-state-management). ### Map to data stored in PDA accounts -Storing data in PDA accounts is only half of the equation. You also need a way -to retrieve that data. We'll talk about two approaches: +Storing data in PDA accounts is only half of the equation. Retrieving the data +is the other half. We'll talk about two approaches: 1. Creating a PDA "map" account that stores the addresses of various accounts where data is stored 2. Strategically using seeds to locate the appropriate PDA accounts and retrieve the necessary data -#### Map to data using PDA "map" accounts - -One approach to organizing data storage is to store clusters of relevant data in -their own PDAs and then to have a separate PDA account that stores a mapping of -where all of the data is. - -For example, you might have a note-taking app whose backing program uses random -seeds to generate PDA accounts and stores one note in each account. The program -would also have a single global PDA "map" account that stores a mapping of -users' public keys to the list of PDAs where their notes are stored. This map -account would be derived using a static seed, e.g. "GLOBAL_MAPPING". - -When it comes time to retrieve a user's notes, you could then look at the map -account, see the list of addresses associated with a user's public key, then -retrieve the account for each of those addresses. - -While such a solution is perhaps more approachable for traditional web -developers, it does come with some drawbacks that are particular to web3 -development. Since the size of the mapping stored in the map account will grow -over time, you'll either need to allocate more size than necessary to the -account when you first create it, or you'll need to reallocate space for it -every time a new note is created. On top of that, you'll eventually reach the -account size limit of 10 megabytes. - -You could mitigate this issue to some degree by creating a separate map account -for each user. For example, rather than having a single PDA map account for the -entire program, you would construct a PDA map account per user. Each of these -map accounts could be derived with the user's public key. The addresses for each -note could then be stored inside the corresponding user's map account. - -This approach reduces the size required for each map account, but ultimately +### Map to data using PDA "map" accounts + +For example, imagine a note-taking app where the underlying program generates +PDA accounts using random seeds, with each account storing an individual note. +Additionally, the program derives a single global PDA account, called the "map" +account, using a static seed like "GLOBAL_MAPPING." This map account maintains a +mapping of users' public keys to the list of PDAs where their notes are stored. + +To retrieve a user's notes, a lookup of the map account is performed to check +the list of addresses associated with a user's public key and retrieve the +account for each address. + +While such a solution is more approachable for traditional web developers, it +has some drawbacks that are particular to web3 development. Since the map size +stored in the map account will grow over time, each time you create a new note, +you must either allocate more space than necessary when creating the account or +reallocate space. Additionally, you will eventually reach the account size limit +of 10 megabytes. + +You can mitigate this issue to a certain degree by creating a separate map +account for each user. For example, you can construct a PDA map account per user +rather than having a single PDA map account for the entire program. These map +accounts are with the user's public key. You can then store the addresses for +each note inside the corresponding user's map account. + +This approach reduces the size required for each map account but ultimately still adds an unnecessary requirement to the process: having to read the information on the map account _before_ being able to find the accounts with the relevant note data. -There may be times where using this approach makes sense for your application, -but we don't recommend it as your "go to" strategy. +There are instances where this approach is a viable choice for an application, +but it should be different from the default or recommended strategy. -#### Map to data using PDA derivation +### Map to data using PDA derivation If you're strategic about the seeds you use to derive PDAs, you can embed the -required mappings into the seeds themselves. This is the natural evolution of -the note-taking app example we just discussed. If you start to use the note -creator's public key as a seed to create one map account per user, then why not -use both the creator's public key and some other known piece of information to -derive a PDA for the note itself? - -Now, without talking about it explicitly, we’ve been mapping seeds to accounts -this entire course. Think about the Movie Review program we've been built in -previous lessons. This program uses a review creator's public key and the title -of the movie they're reviewing to find the address that _should_ be used to -store the review. This approach lets the program create a unique address for -every new review while also making it easy to locate a review when needed. When -you want to find a user's review of "Spiderman," you know that it is stored at -the PDA account whose address can be derived using the user's public key and the -text "Spiderman" as seeds. +required mappings into them. It is the natural evolution of the note-taking app +example we just discussed. If you start to use the note creator's public key as +a seed to create one map account per user, then why not use both the creator's +public key and some other known piece of information to derive a PDA for the +note? + +We've been mapping seeds to accounts this entire course and have yet to discuss +it explicitly. Think about the Movie Review program we've built in previous +lessons. This program uses a review creator's public key and the title of the +movie they're reviewing to find the address that _should_ be used to store the +review. This approach lets the program create a unique address for every new +review while making it easy to locate a review when needed. When you want to +find a user's review of "Spiderman", you can derive the PDA account's address +using the user's public key and the text "Spiderman" as seeds. ```rust -let (pda, bump_seed) = Pubkey::find_program_address(&[ - initializer.key.as_ref(), - title.as_bytes().as_ref() - ], - program_id) +let (pda, bump_seed) = Pubkey::find_program_address( + &[initializer.key.as_ref(), title.as_bytes().as_ref()], + program_id, +); ``` -#### Associated token account addresses +### Associated token account addresses + +Another practical example of this mapping type is determining associated token +account (ATA) addresses. An ATA is an address used to hold the tokens for a +specific account - for example, Jane's USDC account. The ATA address is derived +using: -Another practical example of this type of mapping is how associated token -account (ATA) addresses are determined. Tokens are often held in an ATA whose -address was derived using a wallet address and the mint address of a specific -token. The address for an ATA is found using the `get_associated_token_address` -function which takes a `wallet_address` and `token_mint_address` as inputs. +- the wallet address of the user +- the mint address of the token +- the token program used - either the older token program or the newer + [token extensions program ID](https://docs.rs/spl-token-2022/latest/spl_token_2022/fn.id.html). + +```toml +# ... +[dependencies] +spl-token-2022 = "" +spl-associated-token-account = "" +``` ```rust -let associated_token_address = get_associated_token_address(&wallet_address, &token_mint_address); +// Get the token extensions program ID +let token2022_program = spl_token_2022::id(); +let associated_token_address = spl_associated_token_account::get_associated_token_address_with_program_id(&wallet_address, &token_mint_address, &token2022_program); ``` Under the hood, the associated token address is a PDA found using the -`wallet_address`, `token_program_id`, and `token_mint_address` as seeds. This -provides a deterministic way to find a token account associated with any wallet +`wallet_address`, `token_program_id`, and `token_mint_address` as seeds, +providing a deterministic way to find a token account associated with any wallet address for a specific token mint. ```rust @@ -294,50 +370,55 @@ fn get_associated_token_address_and_bump_seed_internal( } ``` -The mappings between seeds and PDA accounts that you use will be highly -dependent on your specific program. While this isn't a lesson on system design -or architecture, it's worth calling out a few guidelines: +The mappings between seeds and PDA accounts you use will depend highly on your +specific program. While this isn't a lesson on system design or architecture, +it's worth calling out a few guidelines: -- Use seeds that will be known at the time of PDA derivation -- Be thoughtful about what data is grouped together into a single account +- Use seeds known at the time of PDA derivation +- Be thoughtful about how you group data into a single account - Be thoughtful about the data structure used within each account - Simpler is usually better ## Lab -Let’s practice together with the Movie Review program we've worked on in -previous lessons. No worries if you’re just jumping into this lesson without -having done the previous lesson - it should be possible to follow along either -way. +Let's practice with the Movie Review program we've worked on in previous +lessons. No worries if you're jumping into this lesson without doing the last +lesson - it should be possible to follow along either way. As a refresher, the Movie Review program lets users create movie reviews. These -reviews are stored in an account using a PDA derived with the initializer’s -public key and the title of the movie they are reviewing. +reviews are stored in an account using a PDA derived from the initializer's +public key and the movie title they are reviewing. -Previously, we finished implementing the ability to update a movie review in a -secure manner. In this lab, we'll add the ability for users to comment on a -movie review. We'll use building this feature as an opportunity to work through -how to structure the comment storage using PDA accounts. +Previously, we finished implementing the ability to update a movie review +securely. In this lab, we'll add the ability for users to comment on a movie +review. We'll use building this feature as an opportunity to work through how to +structure the comment storage using PDA accounts. -#### 1. Get the starter code +### 1. Get the starter code To begin, you can find -[the movie program starter code](https://github.com/Unboxed-Software/solana-movie-program/tree/starter) +[the movie program starter code](https://github.com/solana-developers/movie-program/tree/starter) on the `starter` branch. If you've been following along with the Movie Review labs, you'll notice that -this is the program we’ve built out so far. Previously, we +this is the program we've built out so far. Previously, we used [Solana Playground](https://beta.solpg.io/) to write, build, and deploy our -code. In this lesson, we’ll build and deploy the program locally. +code. In this lesson, we'll develop and deploy the program locally. Ensure that +`solana-test-validator` is running. -Open the folder, then run `cargo-build-bpf` to build the program. The -`cargo-build-bpf` command will output instruction to deploy the program. +Open the folder, then run `cargo build-bpf` to build the program. The +`cargo build-bpf` command will output a shared library for deployment inside the +`./target/deploy/` path. + +The `./target/deploy/` directory contains the shared library in the format +`.so` and the keypair that includes the public key +of the program in the format `-keypair.json`. ```sh -cargo-build-bpf +cargo build-bpf ``` -Deploy the program by copying the output of `cargo-build-bpf` and running the +Deploy the program by copying the output of `cargo build-bpf` and running the `solana program deploy` command. ```sh @@ -345,14 +426,14 @@ solana program deploy ``` You can test the program by using the movie review -[frontend](https://github.com/Unboxed-Software/solana-movie-frontend/tree/solution-update-reviews) -and updating the program ID with the one you’ve just deployed. Make sure you use +[frontend](https://github.com/solana-developers/movie-frontend/tree/solution-update-reviews) +and updating the program ID with the one you've just deployed. Make sure you use the `solution-update-reviews` branch. -#### 2. Plan out the account structure +### 2. Plan out the account structure -Adding comments means we need to make a few decisions about how to store the -data associated with each comment. The criteria for a good structure here are: +Adding comments means we must make a few decisions about storing the data +associated with each comment. The criteria for a good structure here are: - Not overly complicated - Data is easily retrievable @@ -363,28 +444,28 @@ To do this, we'll create two new account types: - Comment counter account - Comment account -There will be one comment counter account per review and one comment account per -comment. The comment counter account will be linked to a given review by using a -review's address as a seed for finding the comment counter PDA. It will also use -the static string "comment" as a seed. +There will be one comment counter account per review, and one account linked to +each comment posted. The comment counter account will be linked to a given +review by using the review's address as a seed for finding the comment counter +PDA. It will also use the static string "comment" as a seed. -The comment account will be linked to a review in the same way. However, it will -not include the "comment" string as a seed and will instead use the _actual -comment count_ as a seed. That way the client can easily retrieve comments for a -given review by doing the following: +Link the comment account to a review in the same way. However, it will not +include the "comment" string as a seed; instead, it will use the _actual comment +count_ as a seed. That way, the client can easily retrieve comments for a given +review by doing the following: 1. Read the data on the comment counter account to determine the number of comments on a review. 2. Where `n` is the total number of comments on the review, loop `n` times. Each - iteration of the loop will derive a PDA using the review address and the - current number as seeds. The result is `n` number of PDAs, each of which is - the address of an account that stores a comment. -3. Fetch the accounts for each of the `n` PDAs and read the data stored in each. + loop iteration will derive a PDA using the review address and the current + number as seeds. The result is the `n` number of PDAs, each of which is the + address of an account that stores a comment. +3. Fetch the accounts for each of the `n` PDAs and read the stored data. -This ensures that every one of our accounts can be deterministically retrieved -using data that is already known ahead of time. +Every one of our accounts can be deterministically retrieved using data that is +already known ahead of time. -To implement these changes, we'll need to do the following: +To implement these changes, do the following: - Define structs to represent the comment counter and comment accounts - Update the existing `MovieAccountState` to contain a discriminator (more on @@ -394,23 +475,22 @@ To implement these changes, we'll need to do the following: include creating the comment counter account - Create a new `add_comment` instruction processing function -#### 3. Define `MovieCommentCounter` and `MovieComment` structs +### 3. Define MovieCommentCounter and MovieComment structs Recall that the `state.rs` file defines the structs our program uses to populate the data field of a new account. -We’ll need to define two new structs to enable commenting. +We'll need to define two new structs to enable commenting. 1. `MovieCommentCounter` - to store a counter for the number of comments associated with a review 2. `MovieComment` - to store data associated with each comment -To start, let’s define the structs we’ll be using for our program. Note that we -are adding a `discriminator` field to each struct, including the existing -`MovieAccountState`. Since we now have multiple account types, we need a way to -only fetch the account type we need from the client. This discriminator is a -string that can be used to filter through accounts when we fetch our program -accounts. +Let's define the structs we'll be using for our program. We add a +`discriminator` field to each struct, including the existing +`MovieAccountState`. Since we now have multiple account types, we only need a +way to fetch the account type we need from the client. This discriminator is a +string that will filter through accounts when we fetch our program accounts. ```rust #[derive(BorshSerialize, BorshDeserialize)] @@ -427,7 +507,7 @@ pub struct MovieAccountState { pub struct MovieCommentCounter { pub discriminator: String, pub is_initialized: bool, - pub counter: u64 + pub counter: u64, } #[derive(BorshSerialize, BorshDeserialize)] @@ -437,7 +517,7 @@ pub struct MovieComment { pub review: Pubkey, pub commenter: Pubkey, pub comment: String, - pub count: u64 + pub count: u64, } impl Sealed for MovieAccountState {} @@ -462,11 +542,10 @@ impl IsInitialized for MovieComment { ``` Since we've added a new `discriminator` field to our existing struct, the -account size calculation needs to change. Let's use this as an opportunity to -clean up some of our code a bit. We'll add an implementation for each of the -three structs above that adds a constant `DISCRIMINATOR` and either a constant -`SIZE` or function `get_account_size` so we can quickly get the size needed when -initializing an account. +account size calculation needs to change. Let's clean up some of our code. We'll +add an implementation for each of the three structs above that adds a constant +`DISCRIMINATOR` and either a constant `SIZE` or method `get_account_size` to +quickly get the size needed when initializing an account. ```rust impl MovieAccountState { @@ -495,132 +574,136 @@ impl MovieComment { } ``` -Now everywhere we need the discriminator or account size we can use this -implementation and not risk unintentional typos. +Now, we can use this implementation everywhere we need the discriminator or +account size and not risk unintentional typos. -#### 4. Create `AddComment` instruction +### 4. Create AddComment instruction Recall that the `instruction.rs` file defines the instructions our program will accept and how to deserialize the data for each. We need to add a new -instruction variant for adding comments. Let’s start by adding a new variant -`AddComment` to the `MovieInstruction` enum. +instruction variant for adding comments. Let's start by adding a new variant +`AddComment,` to the `MovieInstruction` enum. ```rust pub enum MovieInstruction { AddMovieReview { title: String, rating: u8, - description: String + description: String, }, UpdateMovieReview { title: String, rating: u8, - description: String + description: String, }, AddComment { - comment: String - } + comment: String, + }, } ``` Next, let's create a `CommentPayload` struct to represent the instruction data associated with this new instruction. Most of the data we'll include in the account are public keys associated with accounts passed into the program, so the -only thing we actually need here is a single field to represent the comment -text. +only thing we need here is a single field to represent the comment text. ```rust #[derive(BorshDeserialize)] struct CommentPayload { - comment: String + comment: String, } ``` -Now let’s update how we unpack the instruction data. Notice that we’ve moved the +Now, update the unpacking of the instruction data. Notice that we've moved the deserialization of instruction data into each matching case using the associated payload struct for each instruction. ```rust impl MovieInstruction { pub fn unpack(input: &[u8]) -> Result { - let (&variant, rest) = input.split_first().ok_or(ProgramError::InvalidInstructionData)?; + let (&variant, rest) = input + .split_first() + .ok_or(ProgramError::InvalidInstructionData)?; Ok(match variant { 0 => { let payload = MovieReviewPayload::try_from_slice(rest).unwrap(); Self::AddMovieReview { - title: payload.title, - rating: payload.rating, - description: payload.description } - }, + title: payload.title, + rating: payload.rating, + description: payload.description, + } + } 1 => { let payload = MovieReviewPayload::try_from_slice(rest).unwrap(); Self::UpdateMovieReview { title: payload.title, rating: payload.rating, - description: payload.description + description: payload.description, } - }, + } 2 => { let payload = CommentPayload::try_from_slice(rest).unwrap(); Self::AddComment { - comment: payload.comment + comment: payload.comment, } } - _ => return Err(ProgramError::InvalidInstructionData) + _ => return Err(ProgramError::InvalidInstructionData), }) } } ``` -Lastly, let's update the `process_instruction` function in `processor.rs` to use -the new instruction variant we've created. +Lastly, update the `process_instruction` function in `processor.rs` to use our +new instruction variant. -In `processor.rs`, bring into scope the new structs from `state.rs`. +In `processor.rs`, import the new structs from `state.rs` into scope. ```rust use crate::state::{MovieAccountState, MovieCommentCounter, MovieComment}; ``` -Then in `process_instruction` let’s match our deserialized `AddComment` -instruction data to the `add_comment` function we’ll be implementing shortly. +Then in `process_instruction`, match our deserialized `AddComment` instruction +data to the `add_comment` function we will be implementing shortly. ```rust pub fn process_instruction( program_id: &Pubkey, accounts: &[AccountInfo], - instruction_data: &[u8] + instruction_data: &[u8], ) -> ProgramResult { let instruction = MovieInstruction::unpack(instruction_data)?; match instruction { - MovieInstruction::AddMovieReview { title, rating, description } => { - add_movie_review(program_id, accounts, title, rating, description) - }, - MovieInstruction::UpdateMovieReview { title, rating, description } => { - update_movie_review(program_id, accounts, title, rating, description) - }, - - MovieInstruction::AddComment { comment } => { - add_comment(program_id, accounts, comment) - } + MovieInstruction::AddMovieReview { + title, + rating, + description, + } => add_movie_review(program_id, accounts, title, rating, description), + MovieInstruction::UpdateMovieReview { + title, + rating, + description, + } => update_movie_review(program_id, accounts, title, rating, description), + + MovieInstruction::AddComment { comment } => add_comment(program_id, accounts, comment), } } ``` -#### 5. Update `add_movie_review` to create comment counter account +### 5. Update add_movie_review to create a comment counter account. -Before we implement the `add_comment` function, we need to update the +Before implementing the `add_comment` function, we need to update the `add_movie_review` function to create the review's comment counter account. -Remember that this account will keep track of the total number of comments that -exist for an associated review. It's address will be a PDA derived using the -movie review address and the word “comment” as seeds. Note that how we store the -counter is simply a design choice. We could also add a “counter” field to the -original movie review account. +Remember that this account will keep track of the total number of comments for +an associated review. Its address will be a PDA derived using the movie review +address and the word "comment" as seeds. Note that how we store the counter is +simply a design choice. We could add a "counter" field to the original movie +review account. -Within the `add_movie_review` function, let’s add a `pda_counter` to represent -the new counter account we’ll be initializing along with the movie review -account. This means we now expect four accounts to be passed into -the `add_movie_review` function through the `accounts` argument. +Within the `add_movie_review` function, let's add a `pda_counter` to represent +the new counter account we'll be initializing along with the movie review +account. Now, expect four accounts passed into the `add_movie_review` function +through the `accounts` argument. ```rust let account_info_iter = &mut accounts.iter(); @@ -631,7 +714,7 @@ let pda_counter = next_account_info(account_info_iter)?; let system_program = next_account_info(account_info_iter)?; ``` -Next, there's a check to make sure `total_len` is less than 1000 bytes, but +Next, there's a check to ensure `total_len` is less than 1000 bytes, but `total_len` is no longer accurate since we added the discriminator. Let's replace `total_len` with a call to `MovieAccountState::get_account_size`: @@ -644,10 +727,10 @@ if MovieAccountState::get_account_size(title.clone(), description.clone()) > acc } ``` -Note that this also needs to be updated in the `update_movie_review` function -for that instruction to work properly. +Remember to update the code within the `update_movie_review` function for that +instruction to work correctly. -Once we’ve initialized the review account, we’ll also need to update the +Once we've initialized the review account, we'll also need to update the `account_data` with the new fields we specified in the `MovieAccountState` struct. @@ -660,8 +743,8 @@ account_data.description = description; account_data.is_initialized = true; ``` -Finally, let’s add the logic to initialize the counter account within the -`add_movie_review` function. This means: +Finally, let's add the logic to initialize the counter account within the +`add_movie_review` function by: 1. Calculating the rent exemption amount for the counter account 2. Deriving the counter PDA using the review address and the string "comment" as @@ -670,8 +753,8 @@ Finally, let’s add the logic to initialize the counter account within the 4. Set the starting counter value 5. Serialize the account data and return from the function -All of this should be added to the end of the `add_movie_review` function before -the `Ok(())`. +Add these steps to the end of the `add_movie_review` function before the +`Ok(())`. ```rust msg!("create comment counter"); @@ -718,30 +801,29 @@ msg!("comment count: {}", counter_data.counter); counter_data.serialize(&mut &mut pda_counter.data.borrow_mut()[..])?; ``` -Now when a new review is created, two accounts are initialized: +The function initializes two accounts whenever it creates a new review: -1. The first is the review account that stores the contents of the review. This - is unchanged from the version of the program we started with. +1. The first is the review account, which stores the review's contents. This is + unchanged from the program's version we started with. 2. The second account stores the counter for comments -#### 6. Implement `add_comment` +### 6. Implement add_comment -Finally, let’s implement our `add_comment` function to create new comment -accounts. +Finally, implement the `add_comment` function to create new comment accounts. -When a new comment is created for a review, we will increment the count on the -comment counter PDA account and derive the PDA for the comment account using the -review address and current count. +When creating a new comment for a review, the counter will be incremented on the +comment counter PDA account, and the PDA for the comment account will be derived +using the review address and current count. -Like in other instruction processing functions, we'll start by iterating through -accounts passed into the program. Then before we do anything else we need to -deserialize the counter account so we have access to the current comment count: +Like other instruction processing functions, we'll start by iterating through +accounts passed into the program. Then, before we do anything else, we need to +deserialize the counter account, so we have access to the current comment count: ```rust pub fn add_comment( program_id: &Pubkey, accounts: &[AccountInfo], - comment: String + comment: String, ) -> ProgramResult { msg!("Adding Comment..."); msg!("Comment: {}", comment); @@ -754,7 +836,8 @@ pub fn add_comment( let pda_comment = next_account_info(account_info_iter)?; let system_program = next_account_info(account_info_iter)?; - let mut counter_data = try_from_slice_unchecked::(&pda_counter.data.borrow()).unwrap(); + let mut counter_data = + try_from_slice_unchecked::(&pda_counter.data.borrow()).unwrap(); Ok(()) } @@ -763,18 +846,18 @@ pub fn add_comment( Now that we have access to the counter data, we can continue with the remaining steps: -1. Calculate the rent exempt amount for the new comment account +1. Calculate the rent-exempt amount for the new comment account 2. Derive the PDA for the comment account using the review address and the current comment count as seeds 3. Invoke the System Program to create the new comment account 4. Set the appropriate values to the newly created account -5. Serialize the account data and return from the function +5. Serialize the account data and return from the method ```rust pub fn add_comment( program_id: &Pubkey, accounts: &[AccountInfo], - comment: String + comment: String, ) -> ProgramResult { msg!("Adding Comment..."); msg!("Comment: {}", comment); @@ -787,34 +870,50 @@ pub fn add_comment( let pda_comment = next_account_info(account_info_iter)?; let system_program = next_account_info(account_info_iter)?; - let mut counter_data = try_from_slice_unchecked::(&pda_counter.data.borrow()).unwrap(); + let mut counter_data = + try_from_slice_unchecked::(&pda_counter.data.borrow()).unwrap(); let account_len = MovieComment::get_account_size(comment.clone()); let rent = Rent::get()?; let rent_lamports = rent.minimum_balance(account_len); - let (pda, bump_seed) = Pubkey::find_program_address(&[pda_review.key.as_ref(), counter_data.counter.to_be_bytes().as_ref(),], program_id); + let (pda, bump_seed) = Pubkey::find_program_address( + &[ + pda_review.key.as_ref(), + counter_data.counter.to_be_bytes().as_ref(), + ], + program_id, + ); if pda != *pda_comment.key { msg!("Invalid seeds for PDA"); - return Err(ReviewError::InvalidPDA.into()) + return Err(ReviewError::InvalidPDA.into()); } invoke_signed( &system_instruction::create_account( - commenter.key, - pda_comment.key, - rent_lamports, - account_len.try_into().unwrap(), - program_id, + commenter.key, + pda_comment.key, + rent_lamports, + account_len.try_into().unwrap(), + program_id, ), - &[commenter.clone(), pda_comment.clone(), system_program.clone()], - &[&[pda_review.key.as_ref(), counter_data.counter.to_be_bytes().as_ref(), &[bump_seed]]], + &[ + commenter.clone(), + pda_comment.clone(), + system_program.clone(), + ], + &[&[ + pda_review.key.as_ref(), + counter_data.counter.to_be_bytes().as_ref(), + &[bump_seed], + ]], )?; msg!("Created Comment Account"); - let mut comment_data = try_from_slice_unchecked::(&pda_comment.data.borrow()).unwrap(); + let mut comment_data = + try_from_slice_unchecked::(&pda_comment.data.borrow()).unwrap(); msg!("checking if comment account is already initialized"); if comment_data.is_initialized() { @@ -837,53 +936,51 @@ pub fn add_comment( } ``` -#### 7. Build and deploy +### 7. Build and deploy We're ready to build and deploy our program! -Build the updated program by running `cargo-build-bpf`. Then deploy the program -by running the `solana program deploy` command printed to the console. +Build the updated program by running `cargo build-bpf`. Run the command +`solana program deploy ` to deploy the program. -You can test your program by submitting a transaction with the right instruction -data. You can create your own script or feel free to use -[this frontend](https://github.com/Unboxed-Software/solana-movie-frontend/tree/solution-add-comments). +You can test your program by submitting a transaction with the correct +instruction data. You can create your script or use +[this frontend](https://github.com/solana-developers/movie-frontend/tree/solution-add-comments). Be sure to use the `solution-add-comments` branch and replace the -`MOVIE_REVIEW_PROGRAM_ID` in `utils/constants.ts` with your program's ID or the +`MOVIE_REVIEW_PROGRAM_ID` in `utils/constants.ts` with your program's ID, or the frontend won't work with your program. -Keep in mind that we made breaking changes to the review accounts (i.e. adding a -discriminator). If you were to use the same program ID that you've used -previously when deploying this program, none of the reviews you created -previously will show on this frontend due to a data mismatch. +Remember that we made breaking changes to the review accounts (i.e., adding a +discriminator). If you were to use the same program ID you've used before adding +the discriminator when deploying this program, none of the reviews you created +will show on this frontend due to a data mismatch. If you need more time with this project to feel comfortable with these concepts, have a look at -the [solution code](https://github.com/Unboxed-Software/solana-movie-program/tree/solution-add-comments) +the [solution code](https://github.com/solana-developers/movie-program/tree/solution-add-comments) before continuing. Note that the solution code is on the `solution-add-comments` branch of the linked repository. ## Challenge -Now it’s your turn to build something independently! Go ahead and work with the +Now it's your turn to build something independently! Go ahead and work with the Student Intro program that we've used in past lessons. The Student Intro program is a Solana program that lets students introduce themselves. This program takes a user's name and a short message as the `instruction_data` and creates an -account to store the data onchain. For this challenge you should: +account to store the data onchain. For this challenge, you should: 1. Add an instruction allowing other users to reply to an intro 2. Build and deploy the program locally If you haven't been following along with past lessons or haven't saved your work from before, feel free to use the starter code on the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-student-intro-program/tree/starter). +[solana-student-intro-program](https://github.com/solana-developers/student-intro-program/tree/starter). -Try to do this independently if you can! If you get stuck though, feel free to -reference the -[solution code](https://github.com/Unboxed-Software/solana-student-intro-program/tree/solution-add-replies). +Try to do this independently! If you get stuck, though, you can reference the +[solution code](https://github.com/solana-developers/student-intro-program/tree/solution-add-replies). Note that the solution code is on the `solution-add-replies` branch and that your code may look slightly different. -Push your code to GitHub and -[tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=89d367b4-5102-4237-a7f4-4f96050fe57e)! +Push your code to GitHub and [tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=89d367b4-5102-4237-a7f4-4f96050fe57e)! diff --git a/content/courses/native-onchain-development/program-security.md b/content/courses/native-onchain-development/program-security.md index bdf694fcd..61872f316 100644 --- a/content/courses/native-onchain-development/program-security.md +++ b/content/courses/native-onchain-development/program-security.md @@ -1,119 +1,125 @@ --- title: Create a Basic Program, Part 3 - Basic Security and Validation objectives: - - Explain the importance of "thinking like an attacker" - - Understand basic security practices - - Perform owner checks - - Perform signer checks - - Validate accounts passed into the program - - Perform basic data validation -description: "How to implement account checks and validate instruction data." + - Understand why "thinking like an attacker" is essential in securing Solana + programs. + - Learn and implement core security practices to protect your program. + - Perform owner and signer checks to verify account ownership and transaction + authenticity. + - Validate the accounts passed into your program to ensure they are what you + expect. + - Conduct basic data validation to prevent invalid or malicious input from + compromising your program. +description: + "Learn how to secure your Solana program with ownership, signer, and account + validation checks." --- ## Summary -- **Thinking like an attacker** means asking "How do I break this?" -- Perform **owner checks** to ensure that the provided account is owned by the - public key you expect, e.g. ensuring that an account you expect to be a PDA is - owned by `program_id` -- Perform **signer checks** to ensure that any account modification has been - signed by the right party or parties -- **Account validation** entails ensuring that provided accounts are the - accounts you expect them to be, e.g. deriving PDAs with the expected seeds to - make sure the address matches the provided account -- **Data validation** entails ensuring that any provided data meets the criteria - required by the program +- **Thinking like an attacker** is about shifting your mindset to proactively + identify potential security gaps by asking, "How do I break this?" +- **Owner checks** ensure that an account is controlled by the expected public + key, such as verifying that a PDA (Program Derived Address) is owned by the + program. +- **Signer checks** confirm that the right parties have signed the transaction, + allowing for safe modifications to accounts. +- **Account validation** is used to ensure that the accounts passed into your + program match your expectations, like checking the correctness of a PDA's + derivation. +- **Data validation** verifies that the instruction data provided to your + program adheres to specific rules or constraints, ensuring it doesn't lead to + unintended behavior. ## Lesson -In the last two lessons we worked through building a Movie Review program -together. The end result is pretty cool! It's exciting to get something working -in a new development environment. - -Proper program development, however, doesn't end at "get it working." It's -important to think through the possible failure points in your code to mitigate -them. Failure points are where undesirable behavior in your code could -potentially occur. Whether the undesirable behavior happens due to users -interacting with your program in unexpected ways or bad actors intentionally -trying to exploit your program, anticipating failure points is essential to -secure program development. +In the previous lessons +[deserialize instruction data](/content/courses/native-onchain-development/deserialize-instruction-data.md) +and +[program state management](/content/courses/native-onchain-development/program-state-management.md), +we built a Movie Review program, and while getting it to function was exciting, +secure development doesn't stop at "just working." It's critical to understand +potential failure points and take proactive steps to secure your program against +both accidental misuse and intentional exploitation. Remember, **you have no control over the transactions that will be sent to your -program once it’s deployed**. You can only control how your program handles +program once it's deployed**. You can only control how your program handles them. While this lesson is far from a comprehensive overview of program security, we'll cover some of the basic pitfalls to look out for. -### Think like an attacker - -[Neodyme](https://workshop.neodyme.io/) gave a presentation at Breakpoint 2021 -entitled "Think Like An Attacker: Bringing Smart Contracts to Their Break(ing) -Point." If there's one thing you take away from this lesson, it's that you -should think like an attacker. - -In this lesson, of course, we cannot cover everything that could possibly go -wrong with your programs. Ultimately, every program will have different security -risks associated with it. While understanding common pitfalls is _essential_ to -engineering good programs, it is _insufficient_ for deploying secure ones. To -have the broadest security coverage possible, you have to approach your code -with the right mindset. +### Think Like an Attacker -As Neodyme mentioned in their presentation, the right mindset requires moving -from the question "Is this broken?" to "How do I break this?" This is the first -and most essential step in understanding what your code _actually does_ as -opposed to what you wrote it to do. +A fundamental principle in secure programming is adopting an "attacker's +mindset." This means considering every possible angle someone might use to break +or exploit your program. -#### All programs can be broken +In their presentation at Breakpoint 2021, +[Neodyme](https://workshop.neodyme.io/) emphasized that secure program +development isn't just about identifying when something is broken; it's about +exploring how it can be broken. By asking, "How do I break this?" you shift from +simply testing expected functionality to uncovering potential weaknesses in the +implementation itself. -It's not a question of "if." +All programs, regardless of complexity, can be exploited. The goal isn't to +achieve absolute security (which is impossible) but to make it as difficult as +possible for malicious actors to exploit weaknesses. By adopting this mindset, +you're better prepared to identify and close gaps in your program's security. -Rather, it's a question of "how much effort and dedication would it take." +#### All Programs Can Be Broken -Our job as developers is to close as many holes as possible and increase the -effort and dedication required to break our code. For example, in the Movie -Review program we built together over the last two lessons, we wrote code to -create new accounts to store movie reviews. If we take a closer look at the -code, however, we'll notice how the program also facilitates a lot of -unintentional behavior we could easily catch by asking "How do I break this?" -We'll dig into some of these problems and how to fix them in this lesson, but -remember that memorizing a few pitfalls isn't sufficient. It's up to you to -change your mindset toward security. +Every program has vulnerabilities. The question isn't whether it can be broken, +but how much effort it takes. As developers, our goal is to close as many +security gaps as possible and increase the effort required to break our code. +For example, while our Movie Review program creates accounts to store reviews, +there may be unintentional behaviors that could be caught by thinking like an +attacker. In this lesson, we'll explore these issues and how to address them. ### Error handling Before we dive into some of the common security pitfalls and how to avoid them, -it's important to know how to use errors in your program. While your code can -handle some issues gracefully, other issues will require that your program stop -execution and return a program error. +it's important to know how to use errors in your program. Security issues in a +Solana program often requires terminating the execution with a meaningful error. +Not all errors are catastrophic, but some should result in stopping the program +and returning an appropriate error code to prevent further processing. -#### How to create errors +#### Creating Custom Errors -While the `solana_program` crate provides a `ProgramError` enum with a list of -generic errors we can use, it will often be useful to create your own. Your -custom errors will be able to provide more context and detail while you're -debugging your code. +Solana's +[`solana_program`](https://docs.rs/solana-program/latest/solana_program/) crate +provides a generic +[`ProgramError`](https://docs.rs/solana-program/latest/solana_program/program_error/enum.ProgramError.html) +enum for error handling. However, custom errors allow you to provide more +detailed, context-specific information that helps during debugging and testing. We can define our own errors by creating an enum type listing the errors we want to use. For example, the `NoteError` contains variants `Forbidden` and `InvalidLength`. The enum is made into a Rust `Error` type by using the `derive` -attribute macro to implement the `Error` trait from the `thiserror` library. -Each error type also has its own `#[error("...")]` notation. This lets you -provide an error message for each particular error type. +attribute macro to implement the `Error` trait from the +[`thiserror`](https://docs.rs/thiserror/latest/thiserror/) library. Each error +type also has its own `#[error("...")]` notation. This lets you provide an error +message for each particular error type. + +Here's an example of how you can define custom errors in your program: ```rust -use solana_program::{program_error::ProgramError}; +use solana_program::program_error::ProgramError; use thiserror::Error; -#[derive(Error)] +#[derive(Error, Debug)] pub enum NoteError { - #[error("Wrong note owner")] + #[error("Unauthorized access - You don't own this note.")] Forbidden, - #[error("Text is too long")] + #[error("Invalid note length - The text exceeds the allowed limit.")] InvalidLength, } ``` -#### How to return errors +In this example, we create custom errors for unauthorized access and invalid +data input (such as note length). Defining custom errors gives us greater +flexibility when debugging or explaining what went wrong during execution. + +#### Returning Errors The compiler expects errors returned by the program to be of type `ProgramError` from the `solana_program` crate. That means we won't be able to return our @@ -138,54 +144,66 @@ if pda != *note_pda.key { } ``` -### Basic security checks +This ensures the program gracefully handles errors and provides meaningful +feedback when things go wrong. -While these won't comprehensively secure your program, there are a few security -checks you can keep in mind to fill in some of the larger gaps in your code: +### Basic Security Checks -- Ownership checks - used to verify that an account is owned by the program -- Signer checks - used to verify that an account has signed a transaction -- General Account Validation - used to verify that an account is the expected - account -- Data Validation - used to verify the inputs provided by a user +To ensure your Solana program is resilient against common vulnerabilities, you +should incorporate key security checks. These are critical for detecting invalid +accounts or unauthorized transactions and preventing undesired behavior. #### Ownership checks -An ownership check verifies that an account is owned by the expected public key. -Let's use the note-taking app example that we've referenced in previous lessons. -In this app, users can create, update, and delete notes that are stored by the -program in PDA accounts. - -When a user invokes the `update` instruction, they also provide a `pda_account`. -We presume the provided `pda_account` is for the particular note they want to -update, but the user can input any instruction data they want. They could even -potentially send data which matches the data format of a note account but was -not also created by the note-taking program. This security vulnerability is one -potential way to introduce malicious code. +An ownership check verifies that an account is owned by the expected program. +For instance, if your program relies on PDAs (Program Derived Addresses), you +want to ensure that those PDAs are controlled by your program and not by an +external party. + +Let's use the note-taking app example that we've referenced in the +[deserialize instruction data](/content/courses/native-onchain-development/deserialize-instruction-data.md) +and +[program state management](/content/courses/native-onchain-development/program-state-management.md) +lessons. In this app, users can create, update, and delete notes that are stored +by the program in PDA accounts. + +When a user invokes the `update` instruction handler, they also provide a +`pda_account`. We presume the provided `pda_account` is for the particular note +they want to update, but the user can input any instruction data they want. They +could even potentially send data that matches the data format of a note account +but was not also created by the note-taking program. This security vulnerability +is one potential way to introduce malicious code. The simplest way to avoid this problem is to always check that the owner of an account is the public key you expect it to be. In this case, we expect the note account to be a PDA account owned by the program itself. When this is not the case, we can report it as an error accordingly. +Here's how you can perform an ownership check to verify that an account is owned +by the program: + ```rust if note_pda.owner != program_id { return Err(ProgramError::InvalidNoteAccount); } ``` -As a side note, using PDAs whenever possible is more secure than trusting -externally-owned accounts, even if they are owned by the transaction signer. The -only accounts that the program has complete control over are PDA accounts, -making them the most secure. +In this example, we check if the `note_pda` is owned by the program itself +(denoted by `program_id`). Ownership checks like these prevent unauthorized +entities from tampering with critical accounts. + + + +PDAs are often considered to be trusted stores of a program's state. Ensuring +the correct program owns the PDAs is a fundamental way to prevent malicious +behavior. -#### Signer checks +#### Signer Checks -A signer check simply verifies that the right parties have signed a transaction. -In the note-taking app, for example, we would want to verify that the note -creator signed the transaction before we process the `update` instruction. -Otherwise, anyone can update another user's notes by simply passing in the -user's public key as the initializer. +Signer checks confirm that a transaction has been signed by the correct parties. +In the note-taking app, for example, we want to verify that only the note +creator can update the note. Without this check, anyone could attempt to modify +another user's note by passing in their public key. ```rust if !initializer.is_signer { @@ -194,39 +212,48 @@ if !initializer.is_signer { } ``` -#### General account validation +By verifying that the initializer has signed the transaction, we ensure that +only the legitimate owner of the account can perform actions on it. -In addition to checking the signers and owners of accounts, it's important to -ensure that the provided accounts are what your code expects them to be. For -example, you would want to validate that a provided PDA account's address can be -derived with the expected seeds. This ensures that it is the account you expect -it to be. +#### Account Validation + +Account validation checks that the accounts passed into the program are correct +and valid. This is often done by deriving the expected account using known seeds +(for PDAs) and comparing it to the passed account. -In the note-taking app example, that would mean ensuring that you can derive a -matching PDA using the note creator's public key and the ID as seeds (that's -what we're assuming was used when creating the note). That way a user couldn't -accidentally pass in a PDA account for the wrong note or, more importantly, that -the user isn't passing in a PDA account that represents somebody else's note -entirely. +For instance, in the note-taking app, you can derive the expected PDA using the +creator's public key and note ID, and then validate that it matches the provided +account: ```rust -let (pda, bump_seed) = Pubkey::find_program_address(&[note_creator.key.as_ref(), id.as_bytes().as_ref(),], program_id); +let (expected_pda, bump_seed) = Pubkey::find_program_address( + &[ + note_creator.key.as_ref(), + id.as_bytes().as_ref(), + ], + program_id +); -if pda != *note_pda.key { +if expected_pda != *note_pda.key { msg!("Invalid seeds for PDA"); return Err(ProgramError::InvalidArgument) } ``` -### Data validation +This check prevents a user from accidentally (or maliciously) passing the wrong +PDA or one that belongs to someone else. By validating the PDA's derivation, you +ensure the program is acting on the correct account. -Similar to validating accounts, you should also validate any data provided by -the client. +### Data Validation -For example, you may have a game program where a user can allocate character -attribute points to various categories. You may have a maximum limit in each -category of 100, in which case you would want to verify that the existing -allocation of points plus the new allocation doesn't exceed the maximum. +Data validation ensures that the input provided to your program meets the +expected criteria. This is crucial for avoiding incorrect or malicious data that +could cause the program to behave unpredictably. + +For example, let's say your program allows users to allocate points to a +character's attributes, but each attribute has a maximum allowed value. Before +making any updates, you should check that the new allocation does not exceed the +defined limit: ```rust if character.agility + new_agility > 100 { @@ -235,8 +262,8 @@ if character.agility + new_agility > 100 { } ``` -Or, the character may have an allowance of attribute points they can allocate -and you want to make sure they don't exceed that allowance. +Similarly, you should check that the user is not exceeding their allowed number +of points: ```rust if attribute_allowance < new_agility { @@ -245,10 +272,9 @@ if attribute_allowance < new_agility { } ``` -Without these checks, program behavior would differ from what you expect. In -some cases, however, it's more than just an issue of undefined behavior. -Sometimes failure to validate data can result in security loopholes that are -financially devastating. +Without these validations, the program could end up in an undefined state or be +exploited by malicious actors, potentially causing financial loss or +inconsistent behavior. For example, imagine that the character referenced in these examples is an NFT. Further, imagine that the program allows the NFT to be staked to earn token @@ -260,45 +286,50 @@ stakers. #### Integer overflow and underflow -Rust integers have fixed sizes. This means they can only support a specific -range of numbers. An arithmetic operation that results in a higher or lower -value than what is supported by the range will cause the resulting value to wrap -around. For example, a `u8` only supports numbers 0-255, so the result of -addition that would be 256 would actually be 0, 257 would be 1, etc. +One of the common pitfalls when working with integers in Rust (and in Solana +programs) is handling integer overflow and underflow. Rust integers have fixed +sizes and can only hold values within a certain range. When a value exceeds that +range, it wraps around, leading to unexpected results. -This is always important to keep in mind, but especially so when dealing with -any code that represents true value, such as depositing and withdrawing tokens. +For example, with a `u8` (which holds values between 0 and 255), adding 1 to 255 +results in a value of 0 (overflow). To avoid this, you should use checked math +functions like +[`checked_add()`](https://doc.rust-lang.org/std/primitive.u8.html#method.checked_add) +and +[`checked_sub()`](https://doc.rust-lang.org/std/primitive.u8.html#method.checked_sub): To avoid integer overflow and underflow, either: 1. Have logic in place that ensures overflow or underflow _cannot_ happen or -2. Use checked math like `checked_add` instead of `+` +2. Use checked math like `checked_add()` instead of `+` + ```rust let first_int: u8 = 5; let second_int: u8 = 255; - let sum = first_int.checked_add(second_int); + let sum = first_int.checked_add(second_int) + .ok_or(ProgramError::ArithmeticOverflow)?; ``` ## Lab -Let’s practice together with the Movie Review program we've worked on in -previous lessons. No worries if you’re just jumping into this lesson without -having done the previous lesson - it should be possible to follow along either -way. +In this lab, we will build upon the Movie Review program that allows users to +store movie reviews in PDA accounts. If you haven't completed the previous +lessons +[deserialize instruction data](/content/courses/native-onchain-development/deserialize-instruction-data.md) +and +[program state management](/content/courses/native-onchain-development/program-state-management.md), +don't worry—this guide is self-contained. -As a refresher, the Movie Review program lets users store movie reviews in PDA -accounts. Last lesson, we finished implementing the basic functionality of -adding a movie review. Now, we'll add some security checks to the functionality -we've already created and add the ability to update a movie review in a secure -manner. - -Just as before, we'll be using [Solana Playground](https://beta.solpg.io/) to -write, build, and deploy our code. +The Movie Review program lets users add and update reviews in PDA accounts. In +previous lessons, we implemented basic functionality for adding reviews. Now, +we'll add security checks and implement an update feature in a secure manner. +We'll use [Solana Playground](https://beta.solpg.io/) to write, build, and +deploy our program. ### 1. Get the starter code To begin, you can find -[the movie review starter code](https://beta.solpg.io/62b552f3f6273245aca4f5c9). +[the movie review starter code](https://beta.solpg.io/62b552f3f6273245aca4f5c9). If you've been following along with the Movie Review labs, you'll notice that we've refactored our program. @@ -317,15 +348,15 @@ defining custom errors. The complete file structure is as follows: - **state.rs -** serialize and deserialize state - **error.rs -** custom program errors -In addition to some changes to file structure, we've updated a small amount of -code that will let this lab be more focused on security without having you write -unnecessary boiler plate. +In addition to some changes to the file structure, we've updated a small amount +of code that will let this lab be more focused on security without having you +write unnecessary boilerplate. Since we'll be allowing updates to movie reviews, we also changed `account_len` -in the `add_movie_review` function (now in `processor.rs`). Instead of +in the `add_movie_review()` function (now in `processor.rs`). Instead of calculating the size of the review and setting the account length to only as large as it needs to be, we're simply going to allocate 1000 bytes to each -review account. This way, we don’t have to worry about reallocating size or +review account. This way, we don't have to worry about reallocating size or re-calculating rent when a user updates their movie review. We went from this: @@ -343,7 +374,7 @@ let account_len: usize = 1000; The [realloc](https://docs.rs/solana-sdk/latest/solana_sdk/account_info/struct.AccountInfo.html#method.realloc) method was just recently enabled by Solana Labs which allows you to dynamically change the size of your accounts. We will not be using this method for this lab, but -it’s something to be aware of. +it's something to be aware of. Finally, we've also implemented some additional functionality for our `MovieAccountState` struct in `state.rs` using the `impl` keyword. @@ -356,8 +387,7 @@ that checks the `is_initialized` field on the `MovieAccountState` struct. `MovieAccountState` has a known size and provides for some compiler optimizations. -```rust -// inside state.rs +```rust filename="state.rs" impl Sealed for MovieAccountState {} impl IsInitialized for MovieAccountState { @@ -367,27 +397,21 @@ impl IsInitialized for MovieAccountState { } ``` -Before moving on, make sure you have a solid grasp on the current state of the +Before moving on, make sure you have a solid grasp of the current state of the program. Look through the code and spend some time thinking through any spots that are confusing to you. It may be helpful to compare the starter code to the [solution code from the previous lesson](https://beta.solpg.io/62b23597f6273245aca4f5b4). ### 2. Custom Errors -Let's begin by writing our custom program errors. We'll need errors that we can -use in the following situations: - -- The update instruction has been invoked on an account that hasn't been - initialized yet -- The provided PDA doesn't match the expected or derived PDA -- The input data is larger than the program allows -- The rating provided does not fall in the 1-5 range +We'll define custom errors to handle cases like uninitialized accounts, invalid +PDA matches, exceeding data limits, and invalid ratings (ratings must be between +1 and 5). These errors will be added to the `error.rs` file: The starter code includes an empty `error.rs` file. Open that file and add errors for each of the above cases. -```rust -// inside error.rs +```rust filename="error.rs" use solana_program::{program_error::ProgramError}; use thiserror::Error; @@ -414,19 +438,16 @@ impl From for ProgramError { } ``` -Note that in addition to adding the error cases, we also added the -implementation that lets us convert our error into a `ProgramError` type as -needed. +Note that in addition to adding the error cases, we also added an implementation +that lets us convert our error into a `ProgramError` type as needed. -Before moving on, let’s bring `ReviewError` into scope in the `processor.rs`. We -will be using these errors shortly when we add our security checks. +After adding the errors, import `ReviewError` in `processor.rs` to use them. -```rust -// inside processor.rs +```rust filename="processor.rs" use crate::error::ReviewError; ``` -### 3. Add security checks to `add_movie_review` +### 3. Add Security Checks to add_movie_review Now that we have errors to use, let's implement some security checks to our `add_movie_review` function. @@ -438,7 +459,7 @@ also a signer on the transaction. This ensures that you can't submit movie reviews impersonating somebody else. We'll put this check right after iterating through the accounts. -```rust +```rust filename="processor.rs" let account_info_iter = &mut accounts.iter(); let initializer = next_account_info(account_info_iter)?; @@ -455,11 +476,11 @@ if !initializer.is_signer { Next, let's make sure the `pda_account` passed in by the user is the `pda` we expect. Recall we derived the `pda` for a movie review using the `initializer` -and `title` as seeds. Within our instruction we’ll derive the `pda` again and -then check if it matches the `pda_account`. If the addresses do not match, we’ll +and `title` as seeds. Within our instruction, we'll derive the `pda` again and +then check if it matches the `pda_account`. If the addresses do not match, we'll return our custom `InvalidPDA` error. -```rust +```rust filename="processor.rs" // Derive PDA and check that it matches client let (pda, _bump_seed) = Pubkey::find_program_address(&[initializer.key.as_ref(), account_data.title.as_bytes().as_ref(),], program_id); @@ -474,21 +495,21 @@ if pda != *pda_account.key { Now let's perform some data validation. We'll start by making sure `rating` falls within the 1 to 5 scale. If the rating -provided by the user outside of this range, we’ll return our custom +provided by the user outside of this range, we'll return our custom `InvalidRating` error. -```rust +```rust filename="processor.rs" if rating > 5 || rating < 1 { msg!("Rating cannot be higher than 5"); return Err(ReviewError::InvalidRating.into()) } ``` -Next, let’s check that the content of the review does not exceed the 1000 bytes -we’ve allocated for the account. If the size exceeds 1000 bytes, we’ll return +Next, let's check that the content of the review does not exceed the 1000 bytes +we've allocated for the account. If the size exceeds 1000 bytes, we'll return our custom `InvalidDataLength` error. -```rust +```rust filename="processor.rs" let total_len: usize = 1 + 1 + (4 + title.len()) + (4 + description.len()); if total_len > 1000 { msg!("Data length is larger than 1000 bytes"); @@ -496,20 +517,20 @@ if total_len > 1000 { } ``` -Lastly, let's checking if the account has already been initialized by calling -the `is_initialized` function we implemented for our `MovieAccountState`. If the +Lastly, let's check if the account has already been initialized by calling the +`is_initialized` function we implemented for our `MovieAccountState`. If the account already exists, then we will return an error. -```rust +```rust filename="processor.rs" if account_data.is_initialized() { msg!("Account already initialized"); return Err(ProgramError::AccountAlreadyInitialized); } ``` -All together, the `add_movie_review` function should look something like this: +Altogether, the `add_movie_review()` function should look something like this: -```rust +```rust filename="processor.rs" pub fn add_movie_review( program_id: &Pubkey, accounts: &[AccountInfo], @@ -592,17 +613,12 @@ pub fn add_movie_review( } ``` -### 4. Support movie review updates in `MovieInstruction` - -Now that `add_movie_review` is more secure, let's turn our attention to -supporting the ability to update a movie review. +### 4. Support Movie Review Updates in MovieInstruction -Let’s begin by updating `instruction.rs`. We’ll start by adding an -`UpdateMovieReview` variant to `MovieInstruction` that includes embedded data -for the new title, rating, and description. +Next, we'll modify `instruction.rs` to add support for updating movie reviews. +We'll introduce a new `UpdateMovieReview()` variant in `MovieInstruction`: -```rust -// inside instruction.rs +```rust filename="instruction.rs" pub enum MovieInstruction { AddMovieReview { title: String, @@ -618,13 +634,12 @@ pub enum MovieInstruction { ``` The payload struct can stay the same since aside from the variant type, the -instruction data is the same as what we used for `AddMovieReview`. +instruction data is the same as what we used for `AddMovieReview()`. -Lastly, in the `unpack` function we need to add `UpdateMovieReview` to the match -statement. +We'll also update the `unpack()` function to handle `UpdateMovieReview()`. -```rust -// inside instruction.rs +```rust filename="instruction.rs" +// Inside instruction.rs impl MovieInstruction { pub fn unpack(input: &[u8]) -> Result { let (&variant, rest) = input.split_first().ok_or(ProgramError::InvalidInstructionData)?; @@ -644,38 +659,38 @@ impl MovieInstruction { } ``` -### 5. Define `update_movie_review` function +### 5. Define update_movie_review Function Now that we can unpack our `instruction_data` and determine which instruction of -the program to run, we can add `UpdateMovieReview` to the match statement in -the `process_instruction` function in the `processor.rs` file. +the program to run, we can add `UpdateMovieReview()` to the match statement in +the `process_instruction()` function in the `processor.rs` file. -```rust -// inside processor.rs +```rust filename="processor.rs" +// Inside processor.rs pub fn process_instruction( program_id: &Pubkey, accounts: &[AccountInfo], instruction_data: &[u8] ) -> ProgramResult { - // unpack instruction data + // Unpack instruction data let instruction = MovieInstruction::unpack(instruction_data)?; match instruction { MovieInstruction::AddMovieReview { title, rating, description } => { add_movie_review(program_id, accounts, title, rating, description) }, - // add UpdateMovieReview to match against our new data structure + // Add UpdateMovieReview to match against our new data structure MovieInstruction::UpdateMovieReview { title, rating, description } => { - // make call to update function that we'll define next + // Make call to update function that we'll define next update_movie_review(program_id, accounts, title, rating, description) } } } ``` -Next, we can define the new `update_movie_review` function. The definition +Next, we can define the new `update_movie_review()` function. The definition should have the same parameters as the definition of `add_movie_review`. -```rust +```rust filename="processor.rs" pub fn update_movie_review( program_id: &Pubkey, accounts: &[AccountInfo], @@ -687,16 +702,16 @@ pub fn update_movie_review( } ``` -### 6. Implement `update_movie_review` function +### 6. Implement update_movie_review Function All that's left now is to fill in the logic for updating a movie review. Only let's make it secure from the start. -Just like the `add_movie_review` function, let's start by iterating through the -accounts. The only accounts we'll need are the first two: `initializer` and +Just like the `add_movie_review()` function, let's start by iterating through +the accounts. The only accounts we'll need are the first two: `initializer` and `pda_account`. -```rust +```rust filename="processor.rs" pub fn update_movie_review( program_id: &Pubkey, accounts: &[AccountInfo], @@ -722,7 +737,7 @@ Before we continue, let's implement some basic security checks. We'll start with an ownership check on for `pda_account` to verify that it is owned by our program. If it isn't, we'll return an `InvalidOwner` error. -```rust +```rust filename="processor.rs" if pda_account.owner != program_id { return Err(ProgramError::InvalidOwner) } @@ -730,13 +745,13 @@ if pda_account.owner != program_id { #### Signer Check -Next, let’s perform a signer check to verify that the `initializer` of the +Next, let's perform a signer check to verify that the `initializer` of the update instruction has also signed the transaction. Since we are updating the data for a movie review, we want to ensure that the original `initializer` of the review has approved the changes by signing the transaction. If the -`initializer` did not sign the transaction, we’ll return an error. +`initializer` did not sign the transaction, we'll return an error. -```rust +```rust filename="processor.rs" if !initializer.is_signer { msg!("Missing required signature"); return Err(ProgramError::MissingRequiredSignature) @@ -745,12 +760,12 @@ if !initializer.is_signer { #### Account Validation -Next, let’s check that the `pda_account` passed in by the user is the PDA we +Next, let's check that the `pda_account` passed in by the user is the PDA we expect by deriving the PDA using `initializer` and `title` as seeds. If the -addresses do not match, we’ll return our custom `InvalidPDA` error. We'll -implement this the same way we did in the `add_movie_review` function. +addresses do not match, we'll return our custom `InvalidPDA` error. We'll +implement this the same way we did in the `add_movie_review()` function. -```rust +```rust filename="processor.rs" // Derive PDA and check that it matches client let (pda, _bump_seed) = Pubkey::find_program_address(&[initializer.key.as_ref(), account_data.title.as_bytes().as_ref(),], program_id); @@ -760,13 +775,13 @@ if pda != *pda_account.key { } ``` -#### Unpack `pda_account` and perform data validation +#### Unpack pda_account and Perform Data Validation Now that our code ensures we can trust the passed in accounts, let's unpack the `pda_account` and perform some data validation. We'll start by unpacking `pda_account` and assigning it to a mutable variable `account_data`. -```rust +```rust filename="processor.rs" msg!("unpacking state account"); let mut account_data = try_from_slice_unchecked::(&pda_account.data.borrow()).unwrap(); msg!("borrowed account data"); @@ -785,13 +800,13 @@ if !account_data.is_initialized() { ``` Next, we need to validate the `rating`, `title`, and `description` data just -like in the `add_movie_review` function. We want to limit the `rating` to a +like in the `add_movie_review()` function. We want to limit the `rating` to a scale of 1 to 5 and limit the overall size of the review to be fewer than 1000 -bytes. If the rating provided by the user outside of this range, then we’ll +bytes. If the rating provided by the user is outside of this range, then we'll return our custom `InvalidRating` error. If the review is too long, then we'll return our custom `InvalidDataLength` error. -```rust +```rust filename="processor.rs" if rating > 5 || rating < 1 { msg!("Rating cannot be higher than 5"); return Err(ReviewError::InvalidRating.into()) @@ -810,7 +825,7 @@ Now that we've implemented all of the security checks, we can finally update the movie review account by updating `account_data` and re-serializing it. At that point, we can return `Ok` from our program. -```rust +```rust filename="processor.rs" account_data.rating = rating; account_data.description = description; @@ -819,11 +834,11 @@ account_data.serialize(&mut &mut pda_account.data.borrow_mut()[..])?; Ok(()) ``` -All together, the `update_movie_review` function should look something like the -code snippet below. We've included some additional logging for clarity in +All together, the `update_movie_review()` function should look something like +the code snippet below. We've included some additional logging for clarity in debugging. -```rust +```rust filename="processor.rs" pub fn update_movie_review( program_id: &Pubkey, accounts: &[AccountInfo], @@ -900,7 +915,7 @@ pub fn update_movie_review( We're ready to build and upgrade our program! You can test your program by submitting a transaction with the right instruction data. For that, feel free to use this -[frontend](https://github.com/Unboxed-Software/solana-movie-frontend/tree/solution-update-reviews). +[frontend](https://github.com/solana-developers/movie-frontend/tree/solution-update-reviews). Remember, to make sure you're testing the right program you'll need to replace `MOVIE_REVIEW_PROGRAM_ID` with your program ID in `Form.tsx` and `MovieCoordinator.ts`. @@ -912,9 +927,9 @@ continuing. ## Challenge -Now it’s your turn to build something independently by building on top of the +Now it's your turn to build something independently by building on top of the Student Intro program that you've used in previous lessons. If you haven't been -following along or haven't saved your code from before, feel free to use +following along or haven't saved your code before, feel free to use [this starter code](https://beta.solpg.io/62b11ce4f6273245aca4f5b2). The Student Intro program is a Solana Program that lets students introduce @@ -933,6 +948,7 @@ Note that your code may look slightly different than the solution code depending on the checks you implement and the errors you write. + Push your code to GitHub and [tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=3dfb98cc-7ba9-463d-8065-7bdb1c841d43)! diff --git a/content/courses/native-onchain-development/program-state-management.md b/content/courses/native-onchain-development/program-state-management.md index c2ecfe3fb..f8dd25ff8 100644 --- a/content/courses/native-onchain-development/program-state-management.md +++ b/content/courses/native-onchain-development/program-state-management.md @@ -3,66 +3,63 @@ title: Create a Basic Program, Part 2 - State Management objectives: - Describe the process of creating a new account using a Program Derived Address (PDA) - - Use seeds to derive a PDA + - Demonstrate how to use seeds to derive a PDA - Use the space required by an account to calculate the amount of rent (in lamports) a user must allocate - Use a Cross Program Invocation (CPI) to initialize an account with a PDA as the address of the new account - Explain how to update the data stored on a new account description: - "Learn how programs store data, using Solana's inbuilt -key-value store." + "Learn how programs store data using Solana's built-in key-value store." --- ## Summary -- Program state is stored in other accounts rather than in the program itself -- A Program Derived Address (PDA) is derived from a program ID and an optional - list of seeds. Once derived, PDAs are subsequently used as the address for a - storage account. -- Creating an account requires that we calculate the space required and the - corresponding rent to allocate for the new account -- Creating a new account requires a Cross Program Invocation (CPI) to the - `create_account` instruction on the System Program -- Updating the data field on an account requires that we serialize (convert to - byte array) the data into the account +- Program state is stored in other accounts, not in the program itself. +- State is stored in Program Derived Address (PDA) accounts, which are generated + from a program ID and optional seeds. The data within a PDA is defined by the + programmer. +- Creating an account requires calculating the necessary space and corresponding + rent in lamports. +- A Cross Program Invocation (CPI) to the `create_account` instruction handler + on the System Program is needed to create a new account. +- Updating the data field on an account involves serializing (converting to a + byte array) the data into the account. ## Lesson -Solana maintains speed, efficiency, and extensibility in part by making programs -stateless. Rather than having state stored on the program itself, programs use -Solana's account model to read state from and write state to separate PDA -accounts. +Solana maintains speed, efficiency, and extensibility by making programs +stateless. Instead of storing state alongside the program's executable, programs +use Solana's account model to read and write state to separate PDA accounts. -While this is an extremely flexible model, it's also a paradigm that can be -difficult to work in if its unfamiliar. But don't worry! We'll start simple in -this lesson and work up to more complex programs in the next unit. +This model provides a simple, user-friendly key-value store for managing data +and allows programs to be upgraded without affecting their data. However, if +you're familiar with older blockchains, this might be challenging. In this +lesson, we'll begin with the basics and gradually introduce more complex onchain +programs. You'll learn the fundamentals of state management in a Solana program, +including representing state as a Rust type, creating accounts using PDAs, and +serializing account data. -In this lesson we'll learn the basics of state management for a Solana program, -including representing state as a Rust type, creating accounts using Program -Derived Addresses, and serializing account data. +### Program State -### Program state +All Solana accounts have a data field that holds a byte array, making accounts +as flexible as files on a computer. You can store anything in an account, as +long as it has the necessary storage space. -All Solana accounts have a `data` field that holds a byte array. This makes -accounts as flexible as files on a computer. You can store literally anything in -an account (so long as the account has the storage space for it). +Just like files in a traditional filesystem conform to specific formats like PDF +or MP3, data stored in a Solana account must follow a pattern to be retrieved +and deserialized into something usable. -Just as files in a traditional filesystem conform to specific data formats like -PDF or MP3, the data stored in a Solana account needs to follow some kind of -pattern so that the data can be retrieved and deserialized into something -usable. - -#### Represent state as a Rust type +#### Represent State as a Rust Type When writing a program in Rust, we typically create this "format" by defining a -Rust data type. If you went through the -[first part of this lesson](basic-program-pt-1), this is very similar to what we -did when we created an enum to represent discrete instructions. +Rust data type. This is similar to how we created an enum to represent discrete +instructions in the +[first part of deserialize instruction data lesson](/content/courses/native-onchain-development/deserialize-instruction-data.md#enumerations). -While this type should reflect the structure of your data, for most use cases a -simple struct is sufficient. For example, a note-taking program that stores -notes in separate accounts would likely have data for a title, body, and maybe -an ID of some kind. We could create a struct to represent that as follows: +A simple `struct` is usually sufficient for most use cases. For example, a +note-taking program that stores notes in separate accounts might have fields for +a title, body, and an ID: ```rust struct NoteState { @@ -72,17 +69,15 @@ struct NoteState { } ``` -#### Using Borsh for serialization and deserialization +#### Using Borsh for Serialization and Deserialization -Just as with instruction data, we need a mechanism for converting from our Rust -data type to a byte array, and vice versa. **Serialization** is the process of -converting an object into a byte array. **Deserialization** is the process of -reconstructing an object from a byte array. +Just as with instruction data, we need to convert our Rust data type to a byte +array and vice versa. **Serialization** converts an object into a byte array, +while **deserialization** reconstructs an object from a byte array. -We'll continue to use Borsh for serialization and deserialization. In Rust, we -can use the `borsh` crate to get access to the `BorshSerialize` and -`BorshDeserialize` traits. We can then apply those traits using the `derive` -attribute macro. +We'll continue using Borsh for serialization and deserialization. In Rust, the +`borsh` crate provides the `BorshSerialize` and `BorshDeserialize` traits. We +apply these traits using the `derive` attribute macro: ```rust use borsh::{BorshSerialize, BorshDeserialize}; @@ -95,49 +90,43 @@ struct NoteState { } ``` -These traits will provide methods on `NoteState` that we can use to serialize -and deserialize the data as needed. +These traits provide methods on `NoteState` for serializing and deserializing +data. -### Creating accounts +### Creating Accounts -Before we can update the data field of an account, we have to first create that +Before we can update the data field of an account, we must first create the account. -To create a new account within our program we must: +To create a new account in our program, we need to: -1. Calculate the space and rent required for the account -2. Have an address to assign the new account -3. Invoke the system program to create the new account +1. Calculate the space and rent required for the account. +2. Determine an address for the new account. +3. Invoke the system program to create the new account. #### Space and rent -Recall that storing data on the Solana network requires users to allocate rent -in the form of lamports. The amount of rent required by a new account depends on -the amount of space you would like allocated to that account. That means we need -to know before creating the account how much space to allocate. +Storing data on the Solana network requires users to allocate rent in the form +of lamports. The required rent depends on the amount of space allocated to the +account, so we must determine the space needed before creating the account. -Note that rent is more like a deposit. All the lamports allocated for rent can -be fully refunded when an account is closed. Additionally, all new accounts are -now required to be +Note that rent is more like a deposit; all lamports allocated for rent can be +fully refunded when an account is closed. Additionally, all new accounts must be [rent-exempt](https://twitter.com/jacobvcreech/status/1524790032938287105), -meaning lamports are not deducted from the account over time. An account is -considered rent-exempt if it holds at least 2 years worth of rent. In other -words, accounts are stored onchain permanently until the owner closes the -account and withdraws the rent. +meaning lamports are not deducted over time. An account is rent-exempt if it +holds at least 2 years' worth of rent, ensuring accounts are stored onchain +permanently until the owner closes the account and withdraws the rent. -In our note-taking app example, the `NoteState` struct specifies three fields -that need to be stored in an account: `title`, `body`, and `id`. To calculate -the size the account needs to be, you would simply add up the size required to -store the data in each field. +In our note-taking app example, the `NoteState` struct has three fields: +`title`, `body`, and `id`. To calculate the required account size, we add up the +space needed for each field. -For dynamic data, like strings, Borsh adds an additional 4 bytes at the -beginning to store the length of that particular field. That means `title` and -`body` are each 4 bytes plus their respective sizes. The `id` field is a 64-bit -integer, or 8 bytes. +For dynamic data like strings, Borsh adds an additional 4 bytes to store the +field's length. This means `title` and `body` each require 4 bytes plus their +respective sizes. The `id` field is a 64-bit integer or 8 bytes. -You can add up those lengths and then calculate the rent required for that -amount of space using the `minimum_balance` function from the `rent` module of -the `solana_program` crate. +We can add these lengths and calculate the required rent using the +`minimum_balance` function from the `rent` module of the `solana_program` crate: ```rust // Calculate account size required for struct NoteState @@ -150,31 +139,23 @@ let rent_lamports = rent.minimum_balance(account_len); #### Program Derived Addresses (PDA) -Before creating an account, we also need to have an address to assign the -account. For program owned accounts, this will be a program derived address -(PDA) found using the `find_program_address` function. - -As the name implies, PDAs are derived using the program ID (address of the -program creating the account) and an optional list of “seeds”. Optional seeds -are additional inputs used in the `find_program_address` function to derive the -PDA. The function used to derive PDAs will return the same address every time -when given the same inputs. This gives us the ability to create any number of -PDA accounts and a deterministic way to find each account. - -In addition to the seeds you provide for deriving a PDA, the -`find_program_address` function will provide one additional "bump seed." What -makes PDAs unique from other Solana account addresses is that they do not have a -corresponding secret key. This ensures that only the program that owns the -address can sign on behalf of the PDA. When the `find_program_address` function -attempts to derive a PDA using the provided seeds, it passes in the number 255 -as the "bump seed." If the resulting address is invalid (i.e. has a -corresponding secret key), then the function decreases the bump seed by 1 and -derives a new PDA with that bump seed. Once a valid PDA is found, the function -returns both the PDA and the bump that was used to derive the PDA. - -For our note-taking program, we will use the note creator's public key and the -ID as the optional seeds to derive the PDA. Deriving the PDA this way allows us -to deterministically find the account for each note. +Before creating an account, we also need an address to assign the account. For +program-owned accounts, this will be a Program Derived Address (PDA) found using +the `find_program_address` function. + +PDAs are derived using the program ID (the address of the program creating the +account) and optional seeds. The `find_program_address` function returns the +same address every time with the same inputs, allowing us to deterministically +create and find any number of PDA accounts. + +The `find_program_address` function also provides a "bump seed" to ensure the +PDA doesn't have a corresponding secret key, making it secure for program +ownership. The function starts with a bump seed of 255, decreasing it until a +valid PDA is found. + +For our note-taking program, we'll use the note creator's public key and the ID +as seeds to derive the PDA. This allows us to deterministically find the account +for each note: ```rust let (note_pda_account, bump_seed) = Pubkey::find_program_address(&[note_creator.key.as_ref(), id.as_bytes().as_ref(),], program_id); @@ -182,12 +163,10 @@ let (note_pda_account, bump_seed) = Pubkey::find_program_address(&[note_creator. #### Cross Program Invocation (CPI) -Once we’ve calculated the rent required for our account and found a valid PDA to -assign as the address of the new account, we are finally ready to create the -account. Creating a new account within our program requires a Cross Program -Invocation (CPI). A CPI is when one program invokes an instruction on another -program. To create a new account within our program, we will invoke the -`create_account` instruction on the system program. +Once we've calculated the rent and derived a valid PDA, we can create the +account using a Cross Program Invocation (CPI). A CPI is when one program +invokes an instruction on another program. To create a new account, we'll invoke +the `create_account` instruction on the system program. CPIs can be done using either `invoke` or `invoke_signed`. @@ -206,17 +185,21 @@ pub fn invoke_signed( ) -> ProgramResult ``` -For this lesson we will use `invoke_signed`. Unlike a regular signature where a -secret key is used to sign, `invoke_signed` uses the optional seeds, bump seed, -and program ID to derive a PDA and sign an instruction. This is done by -comparing the derived PDA against all accounts passed into the instruction. If -any of the accounts match the PDA, then the signer field for that account is set -to true. +In this lesson, we'll explore `invoke_signed`, a function that allows a program +to authorize actions for a Program Derived Address (PDA) without using a +traditional secret key. Here's how it operates: + +1. `invoke_signed` derives a PDA using seeds, a bump seed, and the program ID. +2. It compares this derived PDA against all accounts in the instruction. +3. If an account matches the derived PDA, that account's signer field becomes + true. -A program can securely sign transactions this way because `invoke_signed` -generates the PDA used for signing with the program ID of the program invoking -the instruction. Therefore, it is not possible for one program to generate a -matching PDA to sign for an account with a PDA derived using another program ID. +This method ensures security because `invoke_signed` generates the PDA using the +invoking program's ID, preventing other programs from producing matching PDAs to +authorize accounts derived with a different program ID. It's crucial to +understand that while we describe the PDA as "authorizing," it doesn't use a +secret key like traditional signatures. Instead, this mechanism enables programs +to approve actions onchain for PDA accounts they control. ```rust invoke_signed( @@ -235,24 +218,18 @@ invoke_signed( )?; ``` -### Serializing and deserializing account data +### Serializing and Deserializing Account Data -Once we've created a new account, we need to access and update the account's -data field. This means deserializing its byte array into an instance of the type -we created, updating the fields on that instance, then serializing that instance -back into a byte array. +After creating an account, we need to update its data field by deserializing its +byte array into the Rust type, updating the fields, and then serializing it +back. -#### Deserialize account data +#### Deserialize Account Data -The first step to updating an account's data is to deserialize its `data` byte -array into its Rust type. You can do this by first borrowing the data field on -the account. This allows you to access the data without taking ownership. - -You can then use the `try_from_slice_unchecked` function to deserialize the data -field of the borrowed account using the format of the type you created to -represent the data. This gives you an instance of your Rust type so you can -easily update fields using dot notation. If we were to do this with the -note-taking app example we've been using, it would look like this: +To update an account's data, first, deserialize its data byte array into its +Rust type. Borrow the data field on the account to access it without taking +ownership. Then, use the `try_from_slice_unchecked()` function to deserialize +the data into the appropriate Rust type: ```rust let mut account_data = try_from_slice_unchecked::(note_pda_account.data.borrow()).unwrap(); @@ -262,7 +239,7 @@ account_data.body = rating; account_data.id = id; ``` -#### Serialize account data +#### Serialize Account Data Once the Rust instance representing the account's data has been updated with the appropriate values, you can "save" the data on the account. @@ -280,7 +257,7 @@ The above example converts the `account_data` object to a byte array and sets it to the `data` property on `note_pda_account`. This saves the updated `account_data` variable to the data field of the new account. Now when a user fetches the `note_pda_account` and deserializes the data, it will display the -updated data we’ve serialized into the account. +updated data we've serialized into the account. ### Iterators @@ -293,49 +270,52 @@ is a Rust trait used to give sequential access to each element in a collection of values. Iterators are used in Solana programs to safely iterate over the list of accounts passed into the program entry point through the `accounts` argument. -#### Rust iterator +#### Rust Iterator -The iterator pattern allows you to perform some task on a sequence of items. The -`iter()` method creates an iterator object that references a collection. An -iterator is responsible for the logic of iterating over each item and -determining when the sequence has finished. In Rust, iterators are lazy, meaning -they have no effect until you call methods that consume the iterator to use it -up. Once you've created an iterator, you must call the `next()` function on it -to get the next item. +The iterator pattern allows you to perform tasks on a sequence of items. The +`iter()` method creates an iterator object that references a collection. In +Rust, iterators are lazy and have no effect until methods that consume the +iterator are called. Use the `next()` function to get the next element in the +sequence, advancing the iterator each time. ```rust let v1 = vec![1, 2, 3]; -// create the iterator over the vec +// Create the iterator over the vec let v1_iter = v1.iter(); -// use the iterator to get the first item +// Use the iterator to get the first item let first_item = v1_iter.next(); -// use the iterator to get the second item +// Use the iterator to get the second item let second_item = v1_iter.next(); ``` -#### Solana accounts iterator +#### Solana Accounts Iterator -Recall that the `AccountInfo` for all accounts required by an instruction are -passing through a single `accounts` argument. To parse through the accounts and -use them within our instruction, we will need to create an iterator with a -mutable reference to the `accounts`. +In Solana programs, the instruction handler receives an `accounts` argument +containing `AccountInfo` items for all required accounts. To use these accounts +within your instruction handler, create an iterator with a mutable reference to +`accounts`. This approach allows you to process the account information +sequentially and access the data you need for your instruction handler logic. -At that point, instead of using the iterator directly, we pass it to the +Instead of using the iterator directly, you can pass the iterator to the `next_account_info` function from the `account_info` module provided by the `solana_program` crate. -For example, the instruction to create a new note in a note-taking program would -at minimum require the accounts for the user creating the note, a PDA to store -the note, and the `system_program` to initialize a new account. All three -accounts would be passed into the program entry point through the `accounts` -argument. An iterator of `accounts` is then used to separate out the +For example, consider an instruction to create a new note in a note-taking +program. This instruction would minimally require the following accounts: + +- The account of the user creating the note. +- A PDA to store the note. +- The `system_program` account to initialize a new account. + +All three accounts would be passed into the program entry point via the +`accounts` argument. An iterator of `accounts` is then used to separate the `AccountInfo` associated with each account to process the instruction. -Note that `&mut` means a mutable reference to the `accounts` argument. You can -read more about +Note: The `&mut` keyword indicates a mutable reference to the `accounts` +argument. For more details, refer to [references in Rust](https://doc.rust-lang.org/book/ch04-02-references-and-borrowing.html) and [the `mut` keyword](https://doc.rust-lang.org/std/keyword.mut.html). @@ -351,98 +331,97 @@ let system_program = next_account_info(account_info_iter)?; ## Lab -This overview covered a lot of new concepts. Let’s practice them together by -continuing to work on the Movie Review program from the last lesson. No worries -if you’re just jumping into this lesson without having done the previous -lesson - it should be possible to follow along either way. We'll be using the -[Solana Playground](https://beta.solpg.io) to write, build, and deploy our code. +This section introduces several new concepts. Let's practice them together by +continuing with the Movie Review program from the previous lesson. Even if +you're starting with this lesson, you should be able to follow along. We'll be +using the [Solana Playground](https://beta.solpg.io) to write, build, and deploy +our code. -As a refresher, we are building a Solana program which lets users review movies. -Last lesson, we deserialized the instruction data passed in by the user but we -have not yet stored this data in an account. Let’s now update our program to -create new accounts to store the user’s movie review. +As a refresher, we are building a Solana program that lets users review movies. +In +[the previous lesson deserialize instruction data](/content/courses/native-onchain-development/deserialize-instruction-data.md), +we deserialized the instruction data passed in by the user but did not store +this data in an account. Let's now update our program to create new accounts to +store the user's movie review. -#### 1. Get the starter code +### 1. Get the starter code -If you didn’t complete the lab from the last lesson or just want to make sure -that you didn’t miss anything, you can reference -[the starter code](https://beta.solpg.io/6295b25b0e6ab1eb92d947f7). +If you didn't complete the lab from the last lesson or just want to make sure +that you didn't miss anything, you can reference +[the starter code](https://beta.solpg.io/66d67d97cffcf4b13384d333). -Our program currently includes the `instruction.rs` file we use to deserialize -the `instruction_data` passed into the program entry point. We have also -completed `lib.rs` file to the point where we can print our deserialized -instruction data to the program log using the `msg!` macro. +Our program currently includes an `instruction.rs` file used to deserialize the +`instruction_data` passed into the program entry point. We've also completed the +`lib.rs` file to the point where we can print our deserialized instruction data +to the program log using the `msg!` macro. -#### 2. Create struct to represent account data +### 2. Create struct to represent account data -Let’s begin by creating a new file named `state.rs`. +Let's begin by creating a new file named `state.rs`. This file will: -1. Define the struct our program uses to populate the data field of a new - account +1. Define the `struct` used to populate the data field of a new account. 2. Add `BorshSerialize` and `BorshDeserialize` traits to this struct -First, let’s bring into scope everything we’ll need from the `borsh` crate. +First, import the necessary items from the `borsh` crate: ```rust use borsh::{BorshSerialize, BorshDeserialize}; ``` -Next, let’s create our `MovieAccountState` struct. This struct will define the -parameters that each new movie review account will store in its data field. Our -`MovieAccountState` struct will require the following parameters: +Next, create the `MovieAccountState` struct, which defines the parameters that +each new movie review account will store in its data field. The struct includes +the following fields: -- `is_initialized` - shows whether or not the account has been initialized -- `rating` - user’s rating of the movie -- `description` - user’s description of the movie -- `title` - title of the movie the user is reviewing +- `is_initialized` - indicates whether the account has been initialized. +- `rating` - the user's rating of the movie. +- `description` - the user's description of the movie. +- `title` - the title of the movie being reviewed. ```rust -#[derive(BorshSerialize, BorshDeserialize)] +#[derive(BorshSerialize, BorshDeserialize, Default)] pub struct MovieAccountState { pub is_initialized: bool, pub rating: u8, pub title: String, - pub description: String + pub description: String, } ``` -#### 3. Update `lib.rs` +### 3. Update lib.rs -Next, let’s update our `lib.rs` file. First, we’ll bring into scope everything -we will need to complete our Movie Review program. You can read more about the -details each item we are using from +Next, update the `lib.rs` file. Start by importing everything needed to complete +the Movie Review program. For more details on each item, refer to [the `solana_program` crate](https://docs.rs/solana-program/latest/solana_program/). ```rust use solana_program::{ + account_info::{next_account_info, AccountInfo}, entrypoint, entrypoint::ProgramResult, - pubkey::Pubkey, msg, - account_info::{next_account_info, AccountInfo}, + program::invoke_signed, + pubkey::Pubkey, + rent::Rent, system_instruction, - program_error::ProgramError, - sysvar::{rent::Rent, Sysvar}, - program::{invoke_signed}, - borsh::try_from_slice_unchecked, + sysvar::Sysvar, }; -use std::convert::TryInto; +use borsh::{BorshDeserialize, BorshSerialize}; + pub mod instruction; pub mod state; + use instruction::MovieInstruction; use state::MovieAccountState; -use borsh::BorshSerialize; ``` -#### 4. Iterate through `accounts` +### 4. Iterate through accounts -Next, let’s continue building out our `add_movie_review` function. Recall that -an array of accounts is passed into the `add_movie_review` function through a -single `accounts` argument. To process our instruction, we will need to iterate -through `accounts` and assign the `AccountInfo` for each account to its own -variable. +Continue building out the `add_movie_review` function. Recall that an array of +accounts is passed into the `add_movie_review` function through a single +`accounts` argument. To process the instruction, iterate through `accounts` and +assign the `AccountInfo` for each account to a variable. ```rust // Get Account iterator @@ -454,30 +433,31 @@ let pda_account = next_account_info(account_info_iter)?; let system_program = next_account_info(account_info_iter)?; ``` -#### 5. Derive PDA +### 5. Derive PDA -Next, within our `add_movie_review` function, let’s independently derive the PDA -we expect the user to have passed in. We'll need to provide the bump seed for -the derivation later, so even though `pda_account` should reference the same -account, we still need to call `find_program_address`. +Within the `add_movie_review` function, derive the PDA you expect the user to +have passed in. Even though `pda_account` should reference the same account, you +still need to call `find_program_address()` as the bump seed is required for the +derivation. -Note that we derive the PDA for each new account using the initializer’s public -key and the movie title as optional seeds. Setting up the PDA this way restricts -each user to only one review for any one movie title. However, it still allows -the same user to review movies with different titles and different users to -review movies with the same title. +The PDA for each new account is derived using the initializer's public key and +the movie title as seeds. This setup restricts each user to only one review per +movie title but allows different users to review the same movie and the same +user to review different movies. ```rust // Derive PDA -let (pda, bump_seed) = Pubkey::find_program_address(&[initializer.key.as_ref(), title.as_bytes().as_ref(),], program_id); +let (pda, bump_seed) = Pubkey::find_program_address( + &[initializer.key.as_ref(), title.as_bytes().as_ref()], + program_id, +); ``` -#### 6. Calculate space and rent +### 6. Calculate space and rent -Next, let’s calculate the rent that our new account will need. Recall that rent -is the amount of lamports a user must allocate to an account for storing data on -the Solana network. To calculate rent, we must first calculate the amount of -space our new account requires. +Calculate the rent required for the new account. Rent is the amount of lamports +a user must allocate to an account for storing data on the Solana network. To +calculate rent, first determine the space required by the new account. The `MovieAccountState` struct has four fields. We will allocate 1 byte each for `rating` and `is_initialized`. For both `title` and `description` we will @@ -492,14 +472,13 @@ let rent = Rent::get()?; let rent_lamports = rent.minimum_balance(account_len); ``` -#### 7. Create new account +### 7. Create new account -Once we’ve calculated the rent and verified the PDA, we are ready to create our -new account. To create a new account, we must call the `create_account` -instruction from the system program. We do this with a Cross Program Invocation -(CPI) using the `invoke_signed` function. We use `invoke_signed` because we are -creating the account using a PDA and need the Movie Review program to “sign” the -instruction. +Once rent is calculated and the PDA is verified, create the new account. To do +this, call the `create_account` instruction from the system program using a +Cross Program Invocation (CPI) with the `invoke_signed` function. Use +`invoke_signed` because the account is being created with a PDA and the Movie +The review program needs to “sign” the instructions. ```rust // Create the account @@ -511,24 +490,34 @@ invoke_signed( account_len.try_into().unwrap(), program_id, ), - &[initializer.clone(), pda_account.clone(), system_program.clone()], - &[&[initializer.key.as_ref(), title.as_bytes().as_ref(), &[bump_seed]]], -)?; + &[ + initializer.clone(), + pda_account.clone(), + system_program.clone(), + ], + &[&[ + initializer.key.as_ref(), + title.as_bytes().as_ref(), + &[bump_seed], + ]], + )?; msg!("PDA created: {}", pda); ``` -#### 8. Update account data +### 8. Update account data -Now that we’ve created a new account, we are ready to update the data field of +Now that we've created a new account, we are ready to update the data field of the new account using the format of the `MovieAccountState` struct from our `state.rs` file. We first deserialize the account data from `pda_account` using `try_from_slice_unchecked`, then set the values of each field. ```rust -msg!("unpacking state account"); -let mut account_data = try_from_slice_unchecked::(&pda_account.data.borrow()).unwrap(); -msg!("borrowed account data"); +msg!("Unpacking state account"); +let mut account_data = + MovieAccountState::try_from_slice(&pda_account.data.borrow()) + .unwrap_or(MovieAccountState::default()); +msg!("Borrowed account data"); account_data.title = title; account_data.rating = rating; @@ -536,45 +525,47 @@ account_data.description = description; account_data.is_initialized = true; ``` -Lastly, we serialize the updated `account_data` into the data field of our +Finally, serialize the updated `account_data` into the data field of `pda_account`. ```rust -msg!("serializing account"); +msg!("Serializing account"); account_data.serialize(&mut &mut pda_account.data.borrow_mut()[..])?; -msg!("state account serialized"); +msg!("State account serialized"); ``` -#### 9. Build and deploy +### 9. Build and deploy -We're ready to build and deploy our program! +You're now ready to build and deploy your program! ![Gif Build and Deploy Program](/public/assets/courses/unboxed/movie-review-pt2-build-deploy.gif) You can test your program by submitting a transaction with the right instruction data. For that, feel free to use -[this script](https://github.com/Unboxed-Software/solana-movie-client) or -[the frontend](https://github.com/Unboxed-Software/solana-movie-frontend) we +[this script](https://github.com/solana-developers/movie-review-program-client) +or [the frontend](https://github.com/solana-developers/movie-review-frontend) we built in the -[Deserialize Custom Instruction Data lesson](deserialize-custom-data). In both -cases, make sure you copy and paste the program ID for your program into the -appropriate area of the source code to make sure you're testing the right +[Deserialize Custom Instruction Data lesson](/content/courses/native-onchain-development/deserialize-custom-data-frontend.md). +In both cases, set the program ID for your program in the appropriate file +`web/components/ui/review-form.ts` to make sure you're testing the right program. -If you use the frontend, simply replace the `MOVIE_REVIEW_PROGRAM_ID` in both -the `MovieList.tsx` and `Form.tsx` components with the address of the program -you’ve deployed. Then run the frontend, submit a view, and refresh the browser -to see the review. +- If you're using the script, simply replace the value assigned to + `movieProgramId` in the `index.ts` component with the public key of the + program you've deployed. +- If you use the frontend, simply replace the `MOVIE_REVIEW_PROGRAM_ID` in the + `review-form.tsx` components with the address of the program you've deployed. +Then run the frontend, submit a view, and refresh the browser to see the review. If you need more time with this project to feel comfortable with these concepts, have a look at the -[solution code](https://beta.solpg.io/62b23597f6273245aca4f5b4) before +[solution code](https://beta.solpg.io/66d67f31cffcf4b13384d334) before continuing. ## Challenge -Now it’s your turn to build something independently. Equipped with the concepts -intoduced in this lesson, you now know everything you'll need to recreate the +Now it's your turn to build something independently. Equipped with the concepts +introduced in this lesson, you now know everything you'll need to recreate the entirety of the Student Intro program from Module 1. The Student Intro program is a Solana Program that lets students introduce @@ -589,7 +580,7 @@ taking a name a short message as instruction data, the program should: string in each account You can test your program by building the -[frontend](https://github.com/Unboxed-Software/solana-student-intros-frontend) +[frontend](https://github.com/solana-developers/solana-student-intro-frontend) we created in the [Page, Order, and Filter Program Data lesson](/content/courses/native-onchain-development/paging-ordering-filtering-data-frontend). Remember to replace the program ID in the frontend code with the one you've @@ -599,6 +590,7 @@ Try to do this independently if you can! But if you get stuck, feel free to reference the [solution code](https://beta.solpg.io/62b11ce4f6273245aca4f5b2). + Push your code to GitHub and [tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=8320fc87-2b6d-4b3a-8b1a-54b55afed781)! diff --git a/content/courses/native-onchain-development/serialize-instruction-data-frontend.md b/content/courses/native-onchain-development/serialize-instruction-data-frontend.md index 62bb684d8..410b2abf2 100644 --- a/content/courses/native-onchain-development/serialize-instruction-data-frontend.md +++ b/content/courses/native-onchain-development/serialize-instruction-data-frontend.md @@ -27,21 +27,21 @@ description: How to deserialize data fetched from Solana accounts. buffer. To facilitate this process of serialization, we will be using [Borsh](https://borsh.io/). - Transactions can fail to be processed by the blockchain for any number of - reasons, we’ll discuss some of the most common ones here. + reasons, we'll discuss some of the most common ones here. ## Lesson ### Transactions This course requires completing -[Introduction to Solana](/developers/courses/intro-to-solana) or equivalent +[Introduction to Solana](/content/courses/intro-to-solana) or equivalent knowledge. It's also aimed at advanced developers that prefer more control over the ease of use and safe defaults Anchor provides. If you're new to developing onchain programs you may prefer -[Anchor](/developers/courses/onchain-development) +[Anchor](/content/courses/onchain-development) -In [Introduction to Solana](/developers/courses/intro-to-solana) we learned how -to create transactions with instructions for common Solana programs. +In [Introduction to Solana](/content/courses/intro-to-solana) we learned how to +create transactions with instructions for common Solana programs. This lessons shows how to create instructions for our own native Solana programs, which we will develop in a few lessons. Specifically, we're going to @@ -86,14 +86,14 @@ if every instruction succeeds then the transaction as a whole will be successful, but if a single instruction fails then the entire transaction will fail immediately with no side-effects. -The account array is not just an array of the accounts’ public keys. Each object -in the array includes the account’s public key, whether or not it is a signer on +The account array is not just an array of the accounts' public keys. Each object +in the array includes the account's public key, whether or not it is a signer on the transaction, and whether or not it is writable. Including whether or not an account is writable during the execution of an instruction allows the runtime to facilitate parallel processing of smart contracts. Because you must define which accounts are read-only and which you will write to, the runtime can determine which transactions are non-overlapping or read-only and allow them to execute -concurrently. To learn more about Solana’s runtime, check out this +concurrently. To learn more about Solana's runtime, check out this [blog post on Sealevel](https://solana.com/news/sealevel---parallel-processing-thousands-of-smart-contracts). #### Instruction Data @@ -104,17 +104,17 @@ an HTTP request lets you build dynamic and flexible REST APIs. Just as the structure of the body of an HTTP request is dependent on the endpoint you intend to call, the structure of the byte buffer used as -instruction data is entirely dependent on the recipient program. If you’re -building a full-stack dApp on your own, then you’ll need to copy the same +instruction data is entirely dependent on the recipient program. If you're +building a full-stack dApp on your own, then you'll need to copy the same structure that you used when building the program over to the client-side code. -If you’re working with another developer who is handling the program +If you're working with another developer who is handling the program development, you can coordinate to ensure matching buffer layouts. -Let’s think about a concrete example. Imagine working on a Web3 game and being +Let's think about a concrete example. Imagine working on a Web3 game and being responsible for writing client-side code that interacts with a player inventory program. The program was designed to allow the client to: -- Add inventory based on a player’s game-play results +- Add inventory based on a player's game-play results - Transfer inventory from one player to another - Equip a player with selected inventory items @@ -125,11 +125,11 @@ Each program, however, only has one entry point. You would instruct the program on which of these functions to run through the instruction data. You would also include in the instruction data any information the function -needs to execute properly, e.g. an inventory item’s ID, a player to transfer +needs to execute properly, e.g. an inventory item's ID, a player to transfer inventory to, etc. Exactly _how_ this data would be structured would depend on how the program was -written, but it’s common to have the first field in instruction data be a number +written, but it's common to have the first field in instruction data be a number that the program can map to a function, after which additional fields act as function arguments. @@ -145,10 +145,10 @@ in Solana is [Borsh](https://borsh.io). Per the website: Borsh maintains a [JS library](https://github.com/near/borsh-js) that handles serializing common types into a buffer. There are also other packages built on -top of Borsh that try to make this process even easier. We’ll be using the +top of Borsh that try to make this process even easier. We'll be using the `@coral-xyz/borsh` library which can be installed using `npm`. -Building off of the previous game inventory example, let’s look at a +Building off of the previous game inventory example, let's look at a hypothetical scenario where we are instructing the program to equip a player with a given item. Assume the program is designed to accept a buffer that represents a struct with the following properties: @@ -176,9 +176,9 @@ const equipPlayerSchema = borsh.struct([ You can then encode data using this schema with the `encode` method. This method accepts as arguments an object representing the data to be serialized and a -buffer. In the below example, we allocate a new buffer that’s much larger than +buffer. In the below example, we allocate a new buffer that's much larger than needed, then encode the data into that buffer and slice the original buffer down -into a new buffer that’s only as large as needed. +into a new buffer that's only as large as needed. ```typescript import * as borsh from "@coral-xyz/borsh"; @@ -195,23 +195,30 @@ equipPlayerSchema.encode( buffer, ); -const instructionBuffer = buffer.slice(0, equipPlayerSchema.getSpan(buffer)); +const instructionBuffer = buffer.subarray(0, equipPlayerSchema.getSpan(buffer)); ``` -Once a buffer is properly created and the data serialized, all that’s left is -building the transaction. This is similar to what you’ve done in previous +Once a buffer is properly created and the data serialized, all that's left is +building the transaction. This is similar to what you've done in previous lessons. The example below assumes that: - `player`, `playerInfoAccount`, and `PROGRAM_ID` are already defined somewhere outside the code snippet -- `player` is a user’s public key +- `player` is a user's public key - `playerInfoAccount` is the public key of the account where inventory changes will be written - `SystemProgram` will be used in the process of executing the instruction. ```typescript import * as borsh from "@coral-xyz/borsh"; -import * as web3 from "@solana/web3.js"; +import { + clusterApiUrl, + Connection, + SystemProgram, + Transaction, + TransactionInstruction, + sendAndConfirmTransaction, +} from "@solana/web3.js"; const equipPlayerSchema = borsh.struct([ borsh.u8("variant"), @@ -225,13 +232,13 @@ equipPlayerSchema.encode( buffer, ); -const instructionBuffer = buffer.slice(0, equipPlayerSchema.getSpan(buffer)); +const instructionBuffer = buffer.subarray(0, equipPlayerSchema.getSpan(buffer)); -const endpoint = web3.clusterApiUrl("devnet"); -const connection = new web3.Connection(endpoint); +const endpoint = clusterApiUrl("devnet"); +const connection = new Connection(endpoint); -const transaction = new web3.Transaction(); -const instruction = new web3.TransactionInstruction({ +const transaction = new Transaction(); +const instruction = new TransactionInstruction({ keys: [ { pubkey: player.publicKey, @@ -244,7 +251,7 @@ const instruction = new web3.TransactionInstruction({ isWritable: true, }, { - pubkey: web3.SystemProgram.programId, + pubkey: SystemProgram.programId, isSigner: false, isWritable: false, }, @@ -255,33 +262,39 @@ const instruction = new web3.TransactionInstruction({ transaction.add(instruction); -web3.sendAndConfirmTransaction(connection, transaction, [player]).then(txid => { - console.log( - `Transaction submitted: https://explorer.solana.com/tx/${txid}?cluster=devnet`, +try { + const transactionId = await sendAndConfirmTransaction( + connection, + transaction, + [player], ); -}); + const explorerLink = getExplorerLink("transaction", transactionId, "devnet"); + console.log(`Transaction submitted: ${explorerLink}`); +} catch (error) { + alert(error); +} ``` ## Lab -Let’s practice this together by building a Movie Review app that lets users -submit a movie review and have it stored on Solana’s network. We’ll build this +Let's practice this together by building a Movie Review app that lets users +submit a movie review and have it stored on Solana's network. We'll build this app a little bit at a time over the next few lessons, adding new functionality each lesson. -![Movie review frontend](/public/assets/courses/unboxed/movie-reviews-frontend.png) +![Movie review frontend](/public/assets/courses/movie-review-dapp.png) Here's a quick diagram of the program we'll build: ![Solana stores data items in PDAs, which can be found using their seeds](/public/assets/courses/unboxed/movie-review-program.svg) -The public key of the Solana program we’ll use for this application is +The public key of the Solana program we'll use for this application is `CenYq6bDRB7p73EjsPEpiYN7uveyPUTdXkDkgUduboaN`. #### 1. Download the starter code Before we get started, go ahead and download the -[starter code](https://github.com/Unboxed-Software/solana-movie-frontend/tree/starter). +[starter code](https://github.com/solana-developers/movie-review-frontend/tree/starter). The project is a fairly simple Next.js application. It includes the `WalletContextProvider` we created in the Wallets lesson, a `Card` component for @@ -290,8 +303,8 @@ list, a `Form` component for submitting a new review, and a `Movie.ts` file that contains a class definition for a `Movie` object. Note that for now, the movies displayed on the page when you run `npm run dev` -are mocks. In this lesson, we’ll focus on adding a new review but we won’t be -able to see that review displayed. Next lesson, we’ll focus on deserializing +are mocks. In this lesson, we'll focus on adding a new review but we won't be +able to see that review displayed. Next lesson, we'll focus on deserializing custom data from onchain accounts. #### 2. Create the buffer layout @@ -309,7 +322,7 @@ data to contain: 4. `description` as a string representing the written portion of the review you are leaving for the movie. -Let’s configure a `borsh` layout in the `Movie` class. Start by importing +Let's configure a `borsh` layout in the `Movie` class. Start by importing `@coral-xyz/borsh`. Next, create a `borshInstructionSchema` property and set it to the appropriate `borsh` struct containing the properties listed above. @@ -337,31 +350,75 @@ how the program is structured, the transaction will fail. #### 3. Create a method to serialize data -Now that we have the buffer layout set up, let’s create a method in `Movie` -called `serialize()` that will return a `Buffer` with a `Movie` object’s +Now that we have the buffer layout set up, let's create a method in `Movie` +called `serialize()` that will return a `Buffer` with a `Movie` object's properties encoded into the appropriate layout. +Instead of allocating a fixed buffer size, we'll calculate the size dynamically +using known constants for the space required by each field in the `Movie` +object. Specifically, we'll use `INIT_SPACE` (to account for string length +metadata) and `ANCHOR_DISCRIMINATOR` (to account for the 8-byte discriminator +used by Anchor). + ```typescript -import * as borsh from '@coral-xyz/borsh' +import * as borsh from "@coral-xyz/borsh"; + +// Constants for size calculations +const ANCHOR_DISCRIMINATOR = 8; // 8 bytes for the account discriminator used by Anchor +const STRING_LENGTH_SPACE = 4; // 4 bytes to store the length of each string + +// Specific sizes for 'title' and 'description' strings +const TITLE_SIZE = 100; // Allocate 100 bytes for the 'title' +const DESCRIPTION_SIZE = 500; // Allocate 500 bytes for the 'description' + +// Total space calculation for the Movie review structure +const MOVIE_REVIEW_SPACE = + ANCHOR_DISCRIMINATOR + // 8 bytes for the account discriminator + STRING_LENGTH_SPACE + + TITLE_SIZE + // 4 bytes for the title length + 100 bytes for the title + STRING_LENGTH_SPACE + + DESCRIPTION_SIZE + // 4 bytes for the description length + 500 bytes for the description + 1 + // 1 byte for 'variant' + 1; // 1 byte for 'rating' export class Movie { title: string; rating: number; description: string; - ... + constructor(title: string, rating: number, description: string) { + // Enforce specific sizes for title and description + if (title.length > TITLE_SIZE) { + throw new Error(`Title cannot exceed ${TITLE_SIZE} characters.`); + } + if (description.length > DESCRIPTION_SIZE) { + throw new Error( + `Description cannot exceed ${DESCRIPTION_SIZE} characters.`, + ); + } + + this.title = title; + this.rating = rating; + this.description = description; + } borshInstructionSchema = borsh.struct([ - borsh.u8('variant'), - borsh.str('title'), - borsh.u8('rating'), - borsh.str('description'), - ]) + borsh.u8("variant"), + borsh.str("title"), + borsh.u8("rating"), + borsh.str("description"), + ]); serialize(): Buffer { - const buffer = Buffer.alloc(1000) - this.borshInstructionSchema.encode({ ...this, variant: 0 }, buffer) - return buffer.slice(0, this.borshInstructionSchema.getSpan(buffer)) + try { + // Allocate a buffer with the exact space needed + const buffer = Buffer.alloc(MOVIE_REVIEW_SPACE); + this.borshInstructionSchema.encode({ ...this, variant: 0 }, buffer); + return buffer.subarray(0, this.borshInstructionSchema.getSpan(buffer)); + } catch (error) { + console.error("Serialization error:", error); + return Buffer.alloc(0); + } } } ``` @@ -380,7 +437,7 @@ send the transaction when a user submits the form. Open `Form.tsx` and locate the `handleTransactionSubmit` function. This gets called by `handleSubmit` each time a user submits the Movie Review form. -Inside this function, we’ll be creating and sending the transaction that +Inside this function, we'll be creating and sending the transaction that contains the data submitted through the form. Start by importing `@solana/web3.js` and importing `useConnection` and @@ -391,19 +448,12 @@ import { FC } from "react"; import { Movie } from "../models/Movie"; import { useState } from "react"; import { - Box, - Button, - FormControl, - FormLabel, - Input, - NumberDecrementStepper, - NumberIncrementStepper, - NumberInput, - NumberInputField, - NumberInputStepper, - Textarea, -} from "@chakra-ui/react"; -import * as web3 from "@solana/web3.js"; + Connection, + PublicKey, + SystemProgram, + Transaction, + TransactionInstruction, +} from "@solana/web3.js"; import { useConnection, useWallet } from "@solana/wallet-adapter-react"; ``` @@ -415,9 +465,15 @@ Next, before the `handleSubmit` function, call `useConnection()` to get a import { FC } from 'react' import { Movie } from '../models/Movie' import { useState } from 'react' -import { Box, Button, FormControl, FormLabel, Input, NumberDecrementStepper, NumberIncrementStepper, NumberInput, NumberInputField, NumberInputStepper, Textarea } from '@chakra-ui/react' -import * as web3 from '@solana/web3.js' +import { + Connection, + PublicKey, + SystemProgram, + Transaction, + TransactionInstruction, +} from "@solana/web3.js" import { useConnection, useWallet } from '@solana/wallet-adapter-react' +import { getExplorerLink } from "@solana-developers/helpers"; const MOVIE_REVIEW_PROGRAM_ID = 'CenYq6bDRB7p73EjsPEpiYN7uveyPUTdXkDkgUduboaN' @@ -439,7 +495,7 @@ export const Form: FC = () => { } ``` -Before we implement `handleTransactionSubmit`, let’s talk about what needs to be +Before we implement `handleTransactionSubmit`, let's talk about what needs to be done. We need to: 1. Check that `publicKey` exists to ensure that the user has connected their @@ -450,12 +506,12 @@ done. We need to: 4. Get all of the accounts that the transaction will read or write. 5. Create a new `Instruction` object that includes all of these accounts in the `keys` argument, includes the buffer in the `data` argument, and includes the - program’s public key in the `programId` argument. + program's public key in the `programId` argument. 6. Add the instruction from the last step to the transaction. 7. Call `sendTransaction`, passing in the assembled transaction. -That’s quite a lot to process! But don’t worry, it gets easier the more you do -it. Let’s start with the first 3 steps from above: +That's quite a lot to process! But don't worry, it gets easier the more you do +it. Let's start with the first 3 steps from above: ```typescript const handleTransactionSubmit = async (movie: Movie) => { @@ -465,26 +521,26 @@ const handleTransactionSubmit = async (movie: Movie) => { } const buffer = movie.serialize(); - const transaction = new web3.Transaction(); + const transaction = new Transaction(); }; ``` The next step is to get all of the accounts that the transaction will read or write. In past lessons, the account where data will be stored has been given to -you. This time, the account’s address is more dynamic, so it needs to be -computed. We’ll cover this in-depth in the next lesson, but for now, you can use +you. This time, the account's address is more dynamic, so it needs to be +computed. We'll cover this in-depth in the next lesson, but for now, you can use the following, where `pda` is the address to the account where data will be stored: ```typescript -const [pda] = await web3.PublicKey.findProgramAddress( +const [pda] = await PublicKey.findProgramAddressSync( [publicKey.toBuffer(), Buffer.from(movie.title)], - new web3.PublicKey(MOVIE_REVIEW_PROGRAM_ID), + new PublicKey(MOVIE_REVIEW_PROGRAM_ID), ); ``` In addition to this account, the program will also need to read from -`SystemProgram`, so our array needs to include `web3.SystemProgram.programId` as +`SystemProgram`, so our array needs to include `SystemProgram.programId` as well. With that, we can finish the remaining steps: @@ -497,14 +553,14 @@ const handleTransactionSubmit = async (movie: Movie) => { } const buffer = movie.serialize(); - const transaction = new web3.Transaction(); + const transaction = new Transaction(); - const [pda] = await web3.PublicKey.findProgramAddress( + const [pda] = await PublicKey.findProgramAddressSync( [publicKey.toBuffer(), new TextEncoder().encode(movie.title)], - new web3.PublicKey(MOVIE_REVIEW_PROGRAM_ID), + new PublicKey(MOVIE_REVIEW_PROGRAM_ID), ); - const instruction = new web3.TransactionInstruction({ + const instruction = new TransactionInstruction({ keys: [ { pubkey: publicKey, @@ -517,47 +573,50 @@ const handleTransactionSubmit = async (movie: Movie) => { isWritable: true, }, { - pubkey: web3.SystemProgram.programId, + pubkey: SystemProgram.programId, isSigner: false, isWritable: false, }, ], data: buffer, - programId: new web3.PublicKey(MOVIE_REVIEW_PROGRAM_ID), + programId: new PublicKey(MOVIE_REVIEW_PROGRAM_ID), }); transaction.add(instruction); try { - let txid = await sendTransaction(transaction, connection); - console.log( - `Transaction submitted: https://explorer.solana.com/tx/${txid}?cluster=devnet`, + let transactionId = await sendTransaction(transaction, connection); + const explorerLink = getExplorerLink( + "transaction", + transactionId, + "devnet", ); - } catch (e) { - alert(JSON.stringify(e)); + console.log(`Transaction submitted: ${explorerLink}`); + } catch (error) { + alert(error); } }; ``` -And that’s it! You should now be able to use the form on the site to submit a -movie review. While you won’t see the UI update to reflect the new review, you -can look at the transaction’s program logs on Solana Explorer to see that it was +And that's it! You should now be able to use the form on the site to submit a +movie review. While you won't see the UI update to reflect the new review, you +can look at the transaction's program logs on Solana Explorer to see that it was successful. If you need a bit more time with this project to feel comfortable, have a look at the complete -[solution code](https://github.com/Unboxed-Software/solana-movie-frontend/tree/solution-serialize-instruction-data). +[solution code](https://github.com/solana-developers/movie-review-frontend/tree/solution-serialize-instruction-data). ## Challenge -Now it’s your turn to build something independently. Create an application that +Now it's your turn to build something independently. Create an application that lets students of this course introduce themselves! The Solana program that supports this is at `HdE95RSVsdb315jfJtaykXhXY478h53X6okDupVfY9yf`. -![Student Intros frontend](/public/assets/courses/unboxed/student-intros-frontend.png) +![Student Intros frontend](/public/assets/courses/student-intros-frontend.png) 1. You can build this from scratch or you can - [download the starter code](https://github.com/Unboxed-Software/solana-student-intros-frontend/tree/starter). + [download the starter code](https://github.com/solana-developers/solana-student-intro-frontend/tree/starter). 2. Create the instruction buffer layout in `StudentIntro.ts`. The program expects instruction data to contain: 1. `variant` as an unsigned, 8-bit integer representing the instruction to @@ -575,7 +634,7 @@ supports this is at `HdE95RSVsdb315jfJtaykXhXY478h53X6okDupVfY9yf`. Explorer to verify that it worked. If you get stumped, you can -[check out the solution code](https://github.com/Unboxed-Software/solana-student-intros-frontend/tree/solution-serialize-instruction-data). +[check out the solution code](https://github.com/solana-developers/solana-student-intro-frontend/tree/solution-serialize-instruction-data). Feel free to get creative with these challenges and take them even further. The instructions aren't here to hold you back! diff --git a/content/courses/offline-transactions/durable-nonces.md b/content/courses/offline-transactions/durable-nonces.md index 4d166979e..994213b3b 100644 --- a/content/courses/offline-transactions/durable-nonces.md +++ b/content/courses/offline-transactions/durable-nonces.md @@ -14,31 +14,32 @@ description: "Use durable nonces to sign transactions ahead of time." have an expiration date of 150 blocks (~80-90 seconds). - After signing a durable transaction you can store it in a database or a file or send it to another device to submit it later. -- A durable transactions is made using a nonce account. A nonce account holds +- A durable transaction is created using a nonce account. A nonce account holds the authority and the nonce value which replaces the recent blockhash to make a durable transaction - Durable transactions must start with an `advanceNonce` instruction, and the - nonce authority has to be a signer in the transaction. -- If the transaction fails for any reason other than the nonce advanced - instruction the nonce will still get advanced, even though all other - instruction will get reverted. + nonce authority must be a signer of the transaction. +- If the transaction fails for any reason other than the `advanceNonce` + instruction, the nonce will still be advanced, even though all other + instructions will be reverted. ## Overview Durable Nonces are a way to bypass the expiration date of regular transactions. -To understand that better, we'll start by looking at the concepts behind regular +To understand this, we'll start by looking at the concepts behind regular transactions. -In Solana, transactions are made of three main parts: +In Solana, transactions have three main parts: -1. **Instructions**: Instructions are the operations that you want to perform on - the blockchain, like transferring tokens, creating accounts, or calling a +1. **Instructions**: Instructions are the operations you want to perform on the + blockchain, like transferring tokens, creating accounts, or calling a program. These are executed in order. -2. **Signatures**: Signatures are the proof that the transaction was signed by - the required singers/authorities. For instance, if you are transferring SOL - from your wallet to another, you'll need to sign the transaction so the - network can verify that the transaction is valid. +2. **Signatures**: Signatures are proof that the transaction was made by someone + with the signer's private key - which should usually be the signer + themselves. For instance, if you are transferring SOL from your wallet to + another, you'll need to sign the transaction so the network can verify that + the transaction is valid. 3. **Recent Blockhash**: The recent blockhash is a unique identifier for each transaction. It is used to prevent replay attacks, where an attacker records @@ -46,7 +47,7 @@ In Solana, transactions are made of three main parts: that each transaction is unique and can only be submitted once. A recent blockhash is only valid for 150 blocks. -In durable transactions, the first two concepts will remain the same. Durable +In durable transactions, the first two concepts remain the same. Durable transactions are possible by playing with recent blockhashes. Let's dive deep into the recent blockhash, to understand the blockhash better @@ -54,11 +55,10 @@ let's look at the problem that it tries to solve, the [double-spend](https://solana.com/developers/guides/advanced/introduction-to-durable-nonces#double-spend) problem. -Imagine you're buying an NFT on MagicEden or Tensor. You have to sign a -transaction that allows the marketplace's program to extract some SOL from your -wallet. After signing the transaction the marketplace will submit it to the -network. If the marketplace submits it again, without checks, you could be -charged twice. +Imagine you're buying an NFT on MagicEden or Tensor. You must sign a transaction +that allows the marketplace's program to extract some SOL from your wallet. +After signing the transaction the marketplace will submit it to the network. If +the marketplace submits it again, without checks, you could be charged twice. This is known as the double-spend problem and is one of the core issues that blockchains, like Solana, solve. A naive solution could be to crosscheck all @@ -68,7 +68,7 @@ is >80 TB. So to solve this, Solana uses recent blockhashs. A recent blockhash is a 32-byte SHA-256 hash of a valid block's last [entry id](https://solana.com/docs/terminology#blockhash) within the last 150 -blocks. Since this recent blockhash is part of the transaction before it was +blocks. Since this recent blockhash was part of the transaction before it was signed, we can guarantee the signer has signed it within the last 150 blocks. Checking 150 blocks is much more reasonable than the entire ledger. @@ -80,7 +80,7 @@ When the transaction is submitted, the Solana validators will do the following: 2. If the transaction signature has not been found, it will check the recent blockhash to see if it exists within the last 150 blocks - if it does not, it will return a "Blockhash not found" error. If it does, the transaction goes - through to its execution checks. + through its execution checks. While this solution is great for most use cases, it has some limitations. Mainly, the transaction needs to get signed and submitted to the network within @@ -96,7 +96,7 @@ From the > transfers, contract interactions, or even executing pre-determined > investment strategies. > 2. **Multisig Wallets**: Durable Nonces are very useful for multi-signature -> wallets where one party signs a transaction, and others may confirm at a +> wallets where one party signs a transaction, and others may confirm it at a > later time. This feature enables the proposal, review, and later execution > of a transaction within a trustless system. > 3. **Programs Requiring Future Interaction**: If a program on Solana requires @@ -105,8 +105,8 @@ From the > This ensures the contract interaction happens at the correct time without > necessitating the presence of the transaction creator. > 4. **Cross-chain Interactions**: When you need to interact with another -> blockchain, and it requires waiting for confirmations, you could sign the -> transaction with a Durable Nonce and then execute it once the required +> blockchain and it requires waiting for confirmations, you can sign the +> transaction with a Durable Nonce and execute it once the required > confirmations are received. > 5. **Decentralized Derivatives Platforms**: In a decentralized derivatives > platform, complex transactions might need to be executed based on specific @@ -115,30 +115,55 @@ From the ### Considerations -Durable transactions should be treated with care, and are why you should always -trust the transactions you sign. - -Say you blindly signed a malicious durable transaction. This transaction signs -away 500 SOL to the attacker, and changes the nonce authority to said attacker. -Let's say you don't have this much yet, but in the future, you would. This is -insidious, as the attacker would wait to cash this check as soon as your balance -goes above 500 SOL. And you'll have no recollection of what you clicked on. It -can lay dormant for days, weeks, or years. +### Considerations -This is not meant to provoke hysteria, just as a PSA of what's possible. This is -why you should only put into hot wallets what you're willing to lose and don't -sign with your cold wallet. +Durable transactions should be treated with care, which is why users should +always trust the transactions they sign. + +As developers, it's important to inform users that their durable nonce +transactions may be flagged by wallets. Durable nonces are often used for +malicious transactions, and understanding the risks can help users make informed +decisions. + +For example, imagine a user blindly signed a malicious durable transaction. This +transaction could sign away 500 SOL to an attacker and change the nonce +authority to the attacker as well. Even if the user doesn't have this amount +yet, the attacker could wait to cash this check as soon as the user's balance +exceeds 500 SOL. The user would have no recollection of what they clicked on, +and the transaction could remain dormant for days, weeks, or years. + +To mitigate these risks, developers should educate users on the following +points: + +1. **Trust the Source**: Users should only sign transactions from trusted + sources. Encourage users to verify the origin of the transaction before + signing. +2. **Use Hot Wallets Cautiously**: Users should only keep in hot wallets what + they're willing to lose. Hot wallets are more susceptible to attacks, so it's + wise to limit the amount of funds stored in them. +3. **Protect Cold Wallets**: Users should avoid signing transactions with their + cold wallets unless absolutely necessary. Cold wallets are more secure and + should be used to store larger amounts of funds. +4. **Monitor Transactions**: Encourage users to regularly monitor their + transaction history and account balances. Promptly reporting any suspicious + activity can help mitigate potential losses. + +By providing this information, developers can help users understand the +potential dangers of durable nonce transactions and take appropriate +precautions. This is not meant to provoke hysteria but serves to show what's +possible and emphasize the importance of security in handling durable +transactions. -### Using Durable nonces overcome the short lifespan of regular transactions +### Using Durable Nonces to Overcome the Short Lifespan of Regular Transactions -Durable nonces are a way to sign transactions off-chain and keep them in storage -until they are ready to be submitted to the network. And this allows us to -create durable transactions. +Durable nonces are a way to sign transactions offchain and keep them in storage +until they are ready to be submitted to the network. This allows us to create +durable transactions. Durable nonces, which are 32 bytes in length (usually represented as base58 -encoded strings), are used in place of recent blockhashes to make every -transaction unique (to avoid double-spending) while removing the mortality on -the unexecuted transaction. +encoded strings), are used in place of recent blockhashes to make each +transaction unique (to avoid double-spending) while removing the expiration of +the unexecuted transactions. If nonces are used in place of recent blockhashes, the first instruction of the transaction needs to be a `nonceAdvance` instruction, which changes or advances @@ -150,55 +175,55 @@ It is important to note that durable nonces require to function, thus they have some special rules that don't apply normally. We'll see this as we deep dive into the technicals. -### Durable nonces in-depth +### Durable Nonces In-Depth Durable transactions differ from regular transactions in the following ways: 1. Durable Nonces replace the recent blockhash with a nonce. This nonce is - stored in a `nonce account` and will be used only once in one transaction. - The nonce is a unique blockhash. -2. Each durable transaction must start with the `nonce advance instruction`, - which will change the nonce in the `nonce account`. This will ensure that the - nonce is unique and can't be used again in another transaction. + stored in a nonce account and will be used only once in one transaction. The + nonce is a unique blockhash. +2. Each durable transaction must start with the `nonceAdvance` instruction, + which will change the nonce in the nonce account. This ensures that the nonce + is unique and cannot be reused in another transaction. -The nonce account is an account that holds a couple of values: +The nonce account is an account that holds the following values: 1. nonce value: the nonce value that will be used in the transaction. 2. authority: the public key that can change the nonce value. 3. fee calculator: the fee calculator for the transaction. -Again, every durable transaction must start with the `nonce advance instruction` +Again, every durable transaction must start with the `nonceAdvance` instruction, and the `authority` must be a signer. Lastly, there is a special rule - if a durable transaction fails because of any -instruction other than the `nonce advance instruction`, the nonce will still +instruction other than the `nonceAdvance` instruction, the nonce will still advance, while the rest of the transaction is rolled back. This behavior is unique only to durable nonces. -### Durable nonce operations +### Durable Nonce Operations Durable nonces have a few helpers and constants in the `@solana/web3.js` package: -1. `SystemProgram.nonceInitialize`: This instruction will create a new nonce +1. `SystemProgram.nonceInitialize`: This instruction creates a new nonce account. -2. `SystemProgram.nonceAdvance`: This instruction will change the Nonce in the +2. `SystemProgram.nonceAdvance`: This instruction changes the Nonce in the nonce + account. +3. `SystemProgram.nonceWithdraw`: This instruction withdraws funds from the + nonce account. To delete the nonce account, withdraw all the funds from it. +4. `SystemProgram.nonceAuthorize`: This instruction changes the authority of the nonce account. -3. `SystemProgram.nonceWithdraw`: This instruction will withdraw the funds from - the nonce account, to delete the nonce account withdraw all the funds in it. -4. `SystemProgram.nonceAuthorize`: This instruction will change the Authority of - the nonce account. -5. `NONCE_ACCOUNT_LENGTH`: a constant that represents the length of the nonce +5. `NONCE_ACCOUNT_LENGTH`: A constant that represents the length of the nonce account data. -6. `NonceAccount`: a class that represents the nonce account, it contains a +6. `NonceAccount`: A class that represents the nonce account. It contains a static function `fromAccountData` that can take the nonce account data and return a nonce account object. Let's look into each one of the helper functions in detail. -#### Using nonceInitialize +#### `nonceInitialize` -The `nonceInitialize` instruction is used to create a new nonce account, it +The `nonceInitialize` instruction is used to create a new nonce account. It takes two parameters: 1. `noncePubkey`: the public key of the nonce account. @@ -206,16 +231,19 @@ takes two parameters: Here is a code example for it: -```ts +```typescript // 1. Generate/get a keypair for the nonce account, and the authority. const [nonceKeypair, nonceAuthority] = makeKeypairs(2); // from '@solana-developers/helpers' +// Calculate the minimum balance required for rent exemption +const rentExemptBalance = + await connection.getMinimumBalanceForRentExemption(NONCE_ACCOUNT_LENGTH); const tx = new Transaction().add( - // 2. Allocate the account and transfer funds to it (the least amount is 0.0015 SOL) + // 2. Allocate the account and transfer funds to it (the rent-exempt balance) SystemProgram.createAccount({ fromPubkey: payer.publicKey, newAccountPubkey: nonceKeypair.publicKey, - lamports: 0.0015 * LAMPORTS_PER_SOL, + lamports: rentExemptBalance, space: NONCE_ACCOUNT_LENGTH, programId: SystemProgram.programId, }), @@ -226,7 +254,7 @@ const tx = new Transaction().add( }), ); -// send the transaction +// Send the transaction await sendAndConfirmTransaction(connection, tx, [payer, nonceKeypair]); ``` @@ -243,7 +271,7 @@ takes two parameters: Here is a code example for it: -```ts +```typescript const instruction = SystemProgram.nonceAdvance({ authorizedPubkey: nonceAuthority.publicKey, noncePubkey: nonceKeypair.publicKey, @@ -268,7 +296,7 @@ four parameters: Here is a code example for it: -```ts +```typescript const instruction = SystemProgram.nonceWithdraw({ noncePubkey: nonceKeypair.publicKey, toPubkey: payer.publicKey, @@ -293,7 +321,7 @@ three parameters: Here is a code example for it: -```ts +```typescript const instruction = SystemProgram.nonceAuthorize({ noncePubkey: nonceKeypair.publicKey, authorizedPubkey: nonceAuthority.publicKey, @@ -317,7 +345,7 @@ We'll discuss: We can fetch the nonce account to get the nonce value by fetching the account and serializing it: -```ts +```typescript const nonceAccount = await connection.getAccountInfo(nonceKeypair.publicKey); const nonce = NonceAccount.fromAccountData(nonceAccount.data); @@ -335,7 +363,7 @@ After building and signing the transaction we can serialize it and encode it into a base58 string, and we can save this string in some store to submit it later. -```ts +```typescript // Assemble the durable transaction const durableTx = new Transaction(); durableTx.feePayer = payer.publicKey; @@ -374,7 +402,7 @@ const serializedTx = base58.encode( Now that we have a base58 encoded transaction, we can decode it and submit it: -```ts +```typescript const tx = base58.decode(serializedTx); const sig = await sendAndConfirmRawTransaction(connection, tx as Buffer); ``` @@ -424,7 +452,7 @@ git checkout starter npm install ``` -In the starter code you will find a file inside `test/index.ts`, with a testing +In the starter code, you will find a file inside `test/index.ts`, with a testing skeleton, we'll write all of our code here. We're going to use the local validator for this lab. However, feel free to use @@ -432,8 +460,7 @@ devnet if you'd like. ( If you have issues airdropping on devnet, check out [Solana's Faucet](https://faucet.solana.com/) ) To run the local validator, you'll need to have it installed, if you don't you -can refer to -[installing the Solana CLI](https://docs.solanalabs.com/cli/install), once you +can refer to [installing the Solana CLI](/docs/intro/installation.md), once you install the CLI you'll have access to the `solana-test-validator`. In a separate terminal run: @@ -450,7 +477,7 @@ We'll discuss each test case in depth. ### 1. Create the nonce account Before we write any tests, let's create a helper function above the `describe` -block, called `createNonceAccount`. +block called `createNonceAccount`. It will take the following parameters: @@ -470,20 +497,22 @@ It will: Paste the following somewhere above the `describe` block. -```ts +```typescript async function createNonceAccount( connection: Connection, payer: Keypair, nonceKeypair: Keypair, authority: PublicKey, ) { + const rentExemptBalance = + await connection.getMinimumBalanceForRentExemption(NONCE_ACCOUNT_LENGTH); // 2. Assemble and submit a transaction that will: const tx = new Transaction().add( // 2.1. Allocate the account that will be the nonce account. SystemProgram.createAccount({ fromPubkey: payer.publicKey, newAccountPubkey: nonceKeypair.publicKey, - lamports: 0.0015 * LAMPORTS_PER_SOL, + lamports: rentExemptBalance, space: NONCE_ACCOUNT_LENGTH, programId: SystemProgram.programId, }), @@ -498,10 +527,7 @@ async function createNonceAccount( payer, nonceKeypair, ]); - console.log( - "Creating Nonce TX:", - `https://explorer.solana.com/tx/${sig}?cluster=custom&customUrl=http%3A%2F%2Flocalhost%3A8899`, - ); + console.log("Creating Nonce TX:", getExplorerLink("tx", sig, "localnet")); // 3. Fetch the nonce account. const accountInfo = await connection.getAccountInfo(nonceKeypair.publicKey); @@ -532,17 +558,18 @@ To create and submit a durable transaction we must follow these steps: We can put all of this together in our first test: -```ts +```typescript it("Creates a durable transaction and submits it", async () => { + // Step 1: Initialize the payer const payer = await initializeKeypair(connection, { - airdropAmount: 3 * LAMPORTS_PER_SOL, - minimumBalance: 1 * LAMPORTS_PER_SOL, + airdropAmount: AIRDROP_AMOUNT, + minimumBalance: MINIMUM_BALANCE, }); - // 1. Create a Durable Transaction. + // Step 1.1: Create keypairs for nonce account and recipient const [nonceKeypair, recipient] = makeKeypairs(2); - // 1.1 Create the nonce account. + // Step 1.2: Create the nonce account const nonceAccount = await createNonceAccount( connection, payer, @@ -550,14 +577,14 @@ it("Creates a durable transaction and submits it", async () => { payer.publicKey, ); - // 1.2 Create a new Transaction. + // Step 1.3: Create a new transaction const durableTx = new Transaction(); durableTx.feePayer = payer.publicKey; - // 1.3 Set the recentBlockhash to be the nonce value. + // Step 1.4: Set the recentBlockhash to the nonce value from the nonce account durableTx.recentBlockhash = nonceAccount.nonce; - // 1.4 Add the `nonceAdvance` instruction as the first instruction in the transaction. + // Step 1.5: Add the `nonceAdvance` instruction as the first instruction durableTx.add( SystemProgram.nonceAdvance({ authorizedPubkey: payer.publicKey, @@ -565,39 +592,38 @@ it("Creates a durable transaction and submits it", async () => { }), ); - // 1.5 Add the transfer instruction (you can add any instruction you want here). + // Step 1.6: Add the transfer instruction durableTx.add( SystemProgram.transfer({ fromPubkey: payer.publicKey, toPubkey: recipient.publicKey, - lamports: 0.1 * LAMPORTS_PER_SOL, + lamports: TRANSFER_AMOUNT, }), ); - // 1.6 Sign the transaction with the keyPairs that need to sign it, and make sure to add the nonce authority as a signer as well. - // In this particular example the nonce auth is the payer, and the only signer needed for our transfer instruction is the payer as well, so the payer here as a sign is sufficient. - durableTx.sign(payer); + // Step 1.7: Sign the transaction with the payer's keypair + await durableTx.partialSign(payer); - // 1.7 Serialize the transaction and encode it. - const serializedTx = base58.encode( - durableTx.serialize({ requireAllSignatures: false }), - ); - // 1.8 At this point you have a durable transaction, you can store it in a database or a file or send it somewhere else, etc. - // ---------------------------------------------------------------- + // Step 1.8: Serialize the transaction (base64 encoding for easier handling) + const serializedTx = durableTx + .serialize({ requireAllSignatures: false }) + .toString("base64"); - // 2. Submit the durable transaction. - // 2.1 Decode the serialized transaction. - const tx = base58.decode(serializedTx); + // Step 1.9: At this point, you can store the durable transaction for future use. + // ------------------------------------------------------------------ + + // Step 2: Submit the durable transaction - // 2.2 Submit it using the `sendAndConfirmRawTransaction` function. - const sig = await sendAndConfirmRawTransaction(connection, tx as Buffer, { + // Step 2.1: Decode the serialized transaction + const tx = Buffer.from(serializedTx, "base64"); + + // Step 2.2: Submit the transaction using `sendAndConfirmRawTransaction` + const sig = await sendAndConfirmRawTransaction(connection, tx, { skipPreflight: true, }); - console.log( - "Transaction Signature:", - `https://explorer.solana.com/tx/${sig}?cluster=custom&customUrl=http%3A%2F%2Flocalhost%3A8899`, - ); + // Step 2.3: Generate and log the explorer link using `getExplorerLink` + console.log("Transaction Signature:", getExplorerLink("tx", sig, "localnet")); }); ``` @@ -606,7 +632,7 @@ it("Creates a durable transaction and submits it", async () => { Because we are using the nonce in place of the recent blockhash, the system will check to ensure that the nonce we provided matches the nonce in the `nonce_account`. Additionally with each transaction, we need to add the -`nonceAdvance` instruction as the first instruction. This ensures that if the +`nonceAdvance` instruction is the first instruction. This ensures that if the transaction goes through, the nonce will change, and no one will be able to submit it twice. @@ -616,74 +642,85 @@ Here is what we'll test: 2. Advance the nonce. 3. Try to submit the transaction, and it should fail. -```ts +```typescript it("Fails if the nonce has advanced", async () => { - const payer = await initializeKeypair(connection, { - airdropAmount: 3 * LAMPORTS_PER_SOL, - minimumBalance: 1 * LAMPORTS_PER_SOL, - }); - - const [nonceKeypair, nonceAuthority, recipient] = makeKeypairs(3); - - // 1. Create a Durable Transaction. - const nonceAccount = await createNonceAccount( - connection, - payer, - nonceKeypair, - nonceAuthority.publicKey, - ); - - const durableTx = new Transaction(); - durableTx.feePayer = payer.publicKey; - - // use the nonceAccount's stored nonce as the recentBlockhash - durableTx.recentBlockhash = nonceAccount.nonce; - - // make a nonce advance instruction - durableTx.add( - SystemProgram.nonceAdvance({ - authorizedPubkey: nonceAuthority.publicKey, - noncePubkey: nonceKeypair.publicKey, - }), - ); - - durableTx.add( - SystemProgram.transfer({ - fromPubkey: payer.publicKey, - toPubkey: recipient.publicKey, - lamports: 0.1 * LAMPORTS_PER_SOL, - }), - ); - - // sign the tx with both the payer and nonce authority's keypair - durableTx.sign(payer, nonceAuthority); - - // once you have the signed tx, you can serialize it and store it in a database, or send it to another device - const serializedTx = base58.encode( - durableTx.serialize({ requireAllSignatures: false }), - ); - - // 2. Advance the nonce - const nonceAdvanceSig = await sendAndConfirmTransaction( - connection, - new Transaction().add( + try { + const payer = await initializeKeypair(connection, { + airdropAmount: AIRDROP_AMOUNT, + minimumBalance: MINIMUM_BALANCE, + }); + + const [nonceKeypair, nonceAuthority, recipient] = makeKeypairs(3); + + // Step 1: Create a Durable Transaction + const nonceAccount = await createNonceAccount( + connection, + payer, + nonceKeypair, + nonceAuthority.publicKey, + ); + + const durableTransaction = new Transaction(); + durableTransaction.feePayer = payer.publicKey; + durableTransaction.recentBlockhash = nonceAccount.nonce; + + // Add a nonce advance instruction + durableTransaction.add( SystemProgram.nonceAdvance({ - noncePubkey: nonceKeypair.publicKey, authorizedPubkey: nonceAuthority.publicKey, + noncePubkey: nonceKeypair.publicKey, }), - ), - [payer, nonceAuthority], - ); - - console.log( - "Nonce Advance Signature:", - `https://explorer.solana.com/tx/${nonceAdvanceSig}?cluster=custom&customUrl=http%3A%2F%2Flocalhost%3A8899`, - ); - - const tx = base58.decode(serializedTx); - - // 3. Try to submit the transaction, and it should fail. - await assert.rejects(sendAndConfirmRawTransaction(connection, tx as Buffer)); + ); + + // Add a transfer instruction + durableTransaction.add( + SystemProgram.transfer({ + fromPubkey: payer.publicKey, + toPubkey: recipient.publicKey, + lamports: TRANSFER_AMOUNT, + }), + ); + + // Sign the transaction with both the payer and nonce authority's keypairs + await durableTransaction.partialSign(payer, nonceAuthority); + + // Serialize the transaction (in base64 format for simplicity) + const serializedTransaction = durableTransaction + .serialize({ requireAllSignatures: false }) + .toString("base64"); + + // Step 2: Advance the nonce + const nonceAdvanceSignature = await sendAndConfirmTransaction( + connection, + new Transaction().add( + SystemProgram.nonceAdvance({ + noncePubkey: nonceKeypair.publicKey, + authorizedPubkey: nonceAuthority.publicKey, + }), + ), + [payer, nonceAuthority], + ); + + // Using getExplorerLink from solana-helpers + console.log( + "Nonce Advance Signature:", + getExplorerLink("tx", nonceAdvanceSignature, "localnet"), + ); + + // Deserialize the transaction + const deserializedTransaction = Buffer.from( + serializedTransaction, + "base64", + ); + + // Step 3: Try to submit the transaction, expecting it to fail due to nonce advancement + await assert.rejects( + sendAndConfirmRawTransaction(connection, deserializedTransaction), + ); + } catch (error) { + console.error("Test failed:", error); + throw error; + } }); ``` @@ -700,7 +737,7 @@ transaction to transfer 50 SOL from the payer to the recipient. However, the payer doesn't have enough SOL for the transfer, so the transaction will fail, but the nonce will still advance. -```ts +```typescript it("Advances the nonce account even if the transaction fails", async () => { const TRANSFER_AMOUNT = 50; const payer = await initializeKeypair(connection, { @@ -708,9 +745,10 @@ it("Advances the nonce account even if the transaction fails", async () => { minimumBalance: 1 * LAMPORTS_PER_SOL, }); + // Generate keypairs for nonce account, nonce authority, and recipient const [nonceKeypair, nonceAuthority, recipient] = makeKeypairs(3); - // Create the nonce account + // Step 1: Create the nonce account const nonceAccount = await createNonceAccount( connection, payer, @@ -721,23 +759,23 @@ it("Advances the nonce account even if the transaction fails", async () => { console.log("Nonce Before Advancing:", nonceBeforeAdvancing); - // Assemble a durable transaction that will fail - + // Step 2: Check payer's balance to ensure it doesn't have enough to transfer const balance = await connection.getBalance(payer.publicKey); - // making sure that we don't have 50 SOL in the account + // Ensure the balance is less than the transfer amount (50 SOL) assert( balance < TRANSFER_AMOUNT * LAMPORTS_PER_SOL, - `Too much balance, try to change the transfer amount constant 'TRANSFER_AMOUNT' at the top of the function to be more than ${balance / LAMPORTS_PER_SOL}`, + `Balance too high! Adjust 'TRANSFER_AMOUNT' to be higher than the current balance of ${balance / LAMPORTS_PER_SOL} SOL.`, ); + // Step 3: Create a durable transaction that will fail const durableTx = new Transaction(); durableTx.feePayer = payer.publicKey; - // use the nonceAccount's stored nonce as the recentBlockhash + // Set the recent blockhash to the nonce value from the nonce account durableTx.recentBlockhash = nonceAccount.nonce; - // make a nonce advance instruction + // Step 4: Add the nonce advance instruction as the first instruction durableTx.add( SystemProgram.nonceAdvance({ authorizedPubkey: nonceAuthority.publicKey, @@ -745,8 +783,7 @@ it("Advances the nonce account even if the transaction fails", async () => { }), ); - // Transfer 50 sols instruction - // This will fail because the account doesn't have enough balance + // Step 5: Add a transfer instruction that will fail (since the payer has insufficient funds) durableTx.add( SystemProgram.transfer({ fromPubkey: payer.publicKey, @@ -755,24 +792,24 @@ it("Advances the nonce account even if the transaction fails", async () => { }), ); - // sign the tx with both the payer and nonce authority's keypair + // Step 6: Sign the transaction with both the payer and nonce authority durableTx.sign(payer, nonceAuthority); - // once you have the signed tx, you can serialize it and store it in a database, or send it to another device + // Serialize the transaction and store or send it (if needed) const serializedTx = base58.encode( durableTx.serialize({ requireAllSignatures: false }), ); const tx = base58.decode(serializedTx); - // assert the promise to throw an error + // Step 7: Send the transaction and expect it to fail (due to insufficient funds) await assert.rejects( sendAndConfirmRawTransaction(connection, tx as Buffer, { - // If we don't skip preflight this transaction will never reach the network, and the library will reject it and throw an error, therefore it will fail but the nonce will not advance - skipPreflight: true, + skipPreflight: true, // Ensure the transaction reaches the network despite the expected failure }), ); + // Step 8: Fetch the nonce account again after the failed transaction const nonceAccountAfterAdvancing = await connection.getAccountInfo( nonceKeypair.publicKey, ); @@ -780,7 +817,7 @@ it("Advances the nonce account even if the transaction fails", async () => { nonceAccountAfterAdvancing!.data, ).nonce; - // We can see that even though the transitions failed, the nonce has advanced + // Step 9: Verify that the nonce has advanced even though the transaction failed assert.notEqual(nonceBeforeAdvancing, nonceAfterAdvancing); }); ``` @@ -805,16 +842,18 @@ authority did not sign the transaction. Let's see this in action. -```ts -it("The nonce account will not advance if the transaction fails because the nonce auth did not sign the transaction", async () => { +```typescript +it("The nonce account will not advance if the transaction fails because the nonce authority did not sign the transaction", async () => { + // Step 1: Initialize payer with SOL airdrop const payer = await initializeKeypair(connection, { airdropAmount: 3 * LAMPORTS_PER_SOL, minimumBalance: 1 * LAMPORTS_PER_SOL, }); + // Step 2: Generate keypairs for nonce account, nonce authority, and recipient const [nonceKeypair, nonceAuthority, recipient] = makeKeypairs(3); - // Create the nonce account + // Step 3: Create the nonce account const nonceAccount = await createNonceAccount( connection, payer, @@ -825,15 +864,14 @@ it("The nonce account will not advance if the transaction fails because the nonc console.log("Nonce before submitting:", nonceBeforeAdvancing); - // Assemble a durable transaction that will fail - + // Step 4: Create a durable transaction that will fail (due to missing nonce authority signature) const durableTx = new Transaction(); durableTx.feePayer = payer.publicKey; - // use the nonceAccount's stored nonce as the recentBlockhash + // Use the nonce account's stored nonce as the recent blockhash durableTx.recentBlockhash = nonceAccount.nonce; - // make a nonce advance instruction + // Add nonce advance instruction durableTx.add( SystemProgram.nonceAdvance({ authorizedPubkey: nonceAuthority.publicKey, @@ -841,6 +879,7 @@ it("The nonce account will not advance if the transaction fails because the nonc }), ); + // Add transfer instruction durableTx.add( SystemProgram.transfer({ fromPubkey: payer.publicKey, @@ -849,23 +888,25 @@ it("The nonce account will not advance if the transaction fails because the nonc }), ); - // sign the tx with the payer keypair + // Sign the transaction only with the payer, omitting nonce authority signature (this will cause the failure) durableTx.sign(payer); - // once you have the signed tx, you can serialize it and store it in a database, or send it to another device + // Step 5: Serialize the transaction const serializedTx = base58.encode( durableTx.serialize({ requireAllSignatures: false }), ); + // Decode the serialized transaction const tx = base58.decode(serializedTx); - // assert the promise to throw an error + // Step 6: Send the transaction and expect it to fail (due to missing nonce authority signature) await assert.rejects( sendAndConfirmRawTransaction(connection, tx as Buffer, { - skipPreflight: true, + skipPreflight: true, // Ensure the transaction reaches the network despite the expected failure }), ); + // Step 7: Fetch the nonce account again after the failed transaction const nonceAccountAfterAdvancing = await connection.getAccountInfo( nonceKeypair.publicKey, ); @@ -873,7 +914,7 @@ it("The nonce account will not advance if the transaction fails because the nonc nonceAccountAfterAdvancing!.data, ).nonce; - // We can see that the nonce did not advance, because the error was in the nonce advance instruction + // Step 8: Verify that the nonce has not advanced, as the failure was due to the nonce advance instruction assert.equal(nonceBeforeAdvancing, nonceAfterAdvancing); }); ``` @@ -884,107 +925,114 @@ The last test case we'll go over is creating a durable transaction. Try to send it with the wrong nonce authority (it will fail). Change the nonce authority and send it with the correct one this time and it will succeed. -```ts -it("Submits after changing the nonce auth to an already signed address", async () => { - const payer = await initializeKeypair(connection, { - airdropAmount: 3 * LAMPORTS_PER_SOL, - minimumBalance: 1 * LAMPORTS_PER_SOL, - }); - - const [nonceKeypair, nonceAuthority, recipient] = makeKeypairs(3); - - // Create the nonce account - const nonceAccount = await createNonceAccount( - connection, - payer, - nonceKeypair, - nonceAuthority.publicKey, - ); - const nonceBeforeAdvancing = nonceAccount.nonce; - - console.log("Nonce before submitting:", nonceBeforeAdvancing); - - // Assemble a durable transaction that will fail - - const durableTx = new Transaction(); - durableTx.feePayer = payer.publicKey; - - // use the nonceAccount's stored nonce as the recentBlockhash - durableTx.recentBlockhash = nonceAccount.nonce; - - // make a nonce advance instruction - durableTx.add( - SystemProgram.nonceAdvance({ - // The nonce auth is not the payer at this point in time, so the transaction will fail - // But in the future we can change the nonce auth to be the payer and submit the transaction whenever we want - authorizedPubkey: payer.publicKey, - noncePubkey: nonceKeypair.publicKey, - }), - ); - - durableTx.add( - SystemProgram.transfer({ - fromPubkey: payer.publicKey, - toPubkey: recipient.publicKey, - lamports: 0.1 * LAMPORTS_PER_SOL, - }), - ); - - // sign the tx with the payer keypair - durableTx.sign(payer); - - // once you have the signed tx, you can serialize it and store it in a database, or send it to another device - const serializedTx = base58.encode( - durableTx.serialize({ requireAllSignatures: false }), - ); - - const tx = base58.decode(serializedTx); - - // assert the promise to throw an error - // It will fail because the nonce auth is not the payer - await assert.rejects( - sendAndConfirmRawTransaction(connection, tx as Buffer, { - skipPreflight: true, - }), - ); - - const nonceAccountAfterAdvancing = await connection.getAccountInfo( - nonceKeypair.publicKey, - ); - const nonceAfterAdvancing = NonceAccount.fromAccountData( - nonceAccountAfterAdvancing!.data, - ).nonce; - - // We can see that the nonce did not advance, because the error was in the nonce advance instruction - assert.equal(nonceBeforeAdvancing, nonceAfterAdvancing); - - // Now we can change the nonce auth to be the payer - const nonceAuthSig = await sendAndConfirmTransaction( - connection, - new Transaction().add( - SystemProgram.nonceAuthorize({ +```typescript +it("Submits after changing the nonce authority to an already signed address", async () => { + try { + // Step 1: Initialize payer with an airdrop + const payer = await initializeKeypair(connection, { + airdropAmount: AIRDROP_AMOUNT, + minimumBalance: MINIMUM_BALANCE, + }); + + // Step 2: Generate keypairs for nonce account, nonce authority, and recipient + const [nonceKeypair, nonceAuthority, recipient] = makeKeypairs(3); + + // Step 3: Create the nonce account + const nonceAccount = await createNonceAccount( + connection, + payer, + nonceKeypair, + nonceAuthority.publicKey, + ); + const nonceBeforeAdvancing = nonceAccount.nonce; + + console.log("Nonce before submitting:", nonceBeforeAdvancing); + + // Step 4: Create a durable transaction that will initially fail + const durableTransaction = new Transaction(); + durableTransaction.feePayer = payer.publicKey; + + // Use the nonceAccount's stored nonce as the recent blockhash + durableTransaction.recentBlockhash = nonceAccount.nonce; + + // Add nonce advance instruction + durableTransaction.add( + SystemProgram.nonceAdvance({ + authorizedPubkey: payer.publicKey, // should be nonce authority, will fail noncePubkey: nonceKeypair.publicKey, - authorizedPubkey: nonceAuthority.publicKey, - newAuthorizedPubkey: payer.publicKey, }), - ), - [payer, nonceAuthority], - ); + ); + + // Add a transfer instruction + durableTransaction.add( + SystemProgram.transfer({ + fromPubkey: payer.publicKey, + toPubkey: recipient.publicKey, + lamports: TRANSACTION_LAMPORTS, + }), + ); - console.log( - "Nonce Auth Signature:", - `https://explorer.solana.com/tx/${nonceAuthSig}?cluster=custom&customUrl=http%3A%2F%2Flocalhost%3A8899`, - ); + // Sign the transaction with the payer + durableTransaction.sign(payer); - // At any time in the future we can submit the transaction and it will go through - const txSig = await sendAndConfirmRawTransaction(connection, tx as Buffer, { - skipPreflight: true, - }); + // Step 5: Serialize and store the transaction + const serializedTransaction = base58.encode( + durableTransaction.serialize({ requireAllSignatures: false }), + ); - console.log( - "Transaction Signature:", - `https://explorer.solana.com/tx/${txSig}?cluster=custom&customUrl=http%3A%2F%2Flocalhost%3A8899`, - ); + const deserializedTx = base58.decode(serializedTransaction); + + // Step 6: Attempt to send the transaction, expect it to fail (due to incorrect authority) + await assert.rejects( + sendAndConfirmRawTransaction(connection, deserializedTx as Buffer, { + skipPreflight: true, // Ensures the transaction hits the network despite failure + }), + ); + + // Step 7: Verify that the nonce did not advance after the failed transaction + const nonceAccountAfterAdvancing = await connection.getAccountInfo( + nonceKeypair.publicKey, + ); + const nonceAfterAdvancing = NonceAccount.fromAccountData( + nonceAccountAfterAdvancing!.data, + ).nonce; + assert.equal(nonceBeforeAdvancing, nonceAfterAdvancing); + + // Step 8: Change the nonce authority to the payer + const nonceAuthSignature = await sendAndConfirmTransaction( + connection, + new Transaction().add( + SystemProgram.nonceAuthorize({ + noncePubkey: nonceKeypair.publicKey, + authorizedPubkey: nonceAuthority.publicKey, + newAuthorizedPubkey: payer.publicKey, // changing authority to payer + }), + ), + [payer, nonceAuthority], + ); + + console.log( + "Nonce Auth Signature:", + getExplorerLink("tx", nonceAuthSignature, "localnet"), + ); + + // Step 9: Submit the transaction again, which should now succeed + const transactionSignature = await sendAndConfirmRawTransaction( + connection, + deserializedTx as Buffer, + { + skipPreflight: true, // Ensures submission without preflight checks + }, + ); + + console.log( + "Transaction Signature:", + getExplorerLink("tx", transactionSignature, "localnet"), + ); + } catch (error) { + console.error("Test failed:", error); + throw error; + } }); ``` @@ -996,9 +1044,16 @@ Finally, let's run the tests: npm start ``` -Make sure they are all passing. +Ensure that all tests pass successfully. + +For your reference, here is a screenshot showing the successful execution of the +tests: + +![image](https://github.com/user-attachments/assets/03b2396a-f146-49e2-872b-6a657a209cd4) + +If you see this result, it means your durable nonce implementation is correct! -And congratulations! You now know how durable nonces work! +Congratulations! You now know how durable nonces work! ## Challenge diff --git a/content/courses/onchain-development/anchor-cpi.md b/content/courses/onchain-development/anchor-cpi.md index bda672450..83286aebd 100644 --- a/content/courses/onchain-development/anchor-cpi.md +++ b/content/courses/onchain-development/anchor-cpi.md @@ -52,10 +52,11 @@ The `CpiContext` type specifies non-argument inputs for cross program invocations: - `accounts` - the list of accounts required for the instruction being invoked -- `remaining_accounts` - any remaining accounts +- `remaining_accounts` - accounts that are not part of this instruction but may + be used elsewhere (for example, by inner instructions) - `program` - the program ID of the program being invoked -- `signer_seeds` - if a PDA is signing, include the seeds required to derive the - PDA +- `signer_seeds` - if one or more PDAs are signing the transaction, the seeds + required to derive the PDAs ```rust pub struct CpiContext<'a, 'b, 'c, 'info, T> @@ -114,21 +115,20 @@ pub fn new_with_signer( #### CPI accounts -One of the main things about `CpiContext` that simplifies cross-program -invocations is that the `accounts` argument is a generic type that lets you pass -in any object that adopts the `ToAccountMetas` and `ToAccountInfos<'info>` -traits. +One of the key features of `CpiContext` is that the `accounts` argument is +generic, allowing you to pass in any object that implements the `ToAccountMetas` +and `ToAccountInfos<'info>` traits. -These traits are added by the `#[derive(Accounts)]` attribute macro that you've -used before when creating structs to represent instruction accounts. That means -you can use similar structs with `CpiContext`. +These traits are added by the `#[derive(Accounts)]` attribute macro you've used +before, to specify the accounts required by your instruction handlers. You can +use also use `#[derive(Accounts)]` structs with `CpiContext`. This helps with code organization and type safety. -#### Invoke an instruction on another Anchor program +#### Invoke an instruction handler on another Anchor program -When the program you're calling is an Anchor program with a published crate, -Anchor can generate instruction builders and CPI helper functions for you. +When calling another Anchor program with a published crate, Anchor can generate +instruction builders and CPI helper functions for you. Simply declare your program's dependency on the program you're calling in your program's `Cargo.toml` file as follows: @@ -141,11 +141,11 @@ callee = { path = "../callee", features = ["cpi"]} By adding `features = ["cpi"]`, you enable the `cpi` feature and your program gains access to the `callee::cpi` module. -The `cpi` module exposes `callee`'s instructions as a Rust function that takes -as arguments a `CpiContext` and any additional instruction data. These functions -use the same format as the instruction functions in your Anchor programs, only -with `CpiContext` instead of `Context`. The `cpi` module also exposes the -accounts structs required for calling the instructions. +The `cpi` module turns `callee`'s instruction handlers into Rust functions. +These functions take a `CpiContext` and any extra data needed for the +instruction. They work just like the instruction handlers in your Anchor +programs, but use `CpiContext` instead of `Context`. The `cpi` module also +provides the account structs needed for these instruction handlers. For example, if `callee` has the instruction `do_something` that requires the accounts defined in the `DoSomething` struct, you could invoke `do_something` as @@ -180,14 +180,15 @@ pub mod lootbox_program { When the program you're calling is _not_ an Anchor program, there are two possible options: -1. It's possible that the program maintainers have published a crate with their - own helper functions for calling into their program. For example, the - `anchor_spl` crate provides helper functions that are virtually identical - from a call-site perspective to what you would get with the `cpi` module of - an Anchor program. E.g. you can mint using the +1. The program maintainers may have published a crate with their own helper + functions for calling into their program. For example, the `anchor_spl` crate + provides virtually identical helper functions from a call-site perspective to + what you would get with the `cpi` module of an Anchor program. E.g. you can + mint using the [`mint_to` helper function](https://docs.rs/anchor-spl/latest/src/anchor_spl/token.rs.html#36-58) and use the [`MintTo` accounts struct](https://docs.rs/anchor-spl/latest/anchor_spl/token/struct.MintTo.html). + ```rust token::mint_to( CpiContext::new_with_signer( @@ -205,18 +206,20 @@ possible options: amount, )?; ``` + 2. If there is no helper module for the program whose instruction(s) you need to invoke, you can fall back to using `invoke` and `invoke_signed`. In fact, the source code of the `mint_to` helper function referenced above shows an example using `invoke_signed` when given a `CpiContext`. You can follow a - similar pattern if you decide to use an accounts struct and `CpiContext` to - organize and prepare your CPI. + similar pattern if you use an accounts struct and `CpiContext` to organize + and prepare your CPI. + ```rust - pub fn mint_to<'a, 'b, 'c, 'info>( - ctx: CpiContext<'a, 'b, 'c, 'info, MintTo<'info>>, + pub fn mint_to<'info>( + ctx: CpiContext<'_foo, '_bar, '_baz, 'info, MintTo<'info>>, amount: u64, ) -> Result<()> { - let ix = spl_token::instruction::mint_to( + let instruction_handler = spl_token::instruction::mint_to( &spl_token::ID, ctx.accounts.mint.key, ctx.accounts.to.key, @@ -224,12 +227,12 @@ possible options: &[], amount, )?; - solana_program::program::invoke_signed( - &ix, + anchor_lang::solana_program::program::invoke_signed( + &instruction_handler, &[ - ctx.accounts.to.clone(), - ctx.accounts.mint.clone(), - ctx.accounts.authority.clone(), + ctx.accounts.to, + ctx.accounts.mint, + ctx.accounts.authority ], ctx.signer_seeds, ) @@ -239,12 +242,12 @@ possible options: ### Throw errors in Anchor -We're deep enough into Anchor at this point that it's important to know how to +We're deep enough into Anchor at this point that it's essential to know how to create custom errors. -Ultimately, all programs return the same error -type: [`ProgramError`](https://docs.rs/solana-program/latest/solana_program/program_error/enum.ProgramError.html). -However, when writing a program using Anchor you can use `AnchorError` as an +Ultimately, all programs return the same error type:  +[`ProgramError`](https://docs.rs/solana-program/latest/solana_program/program_error/enum.ProgramError.html). +However, when writing a program using Anchor, you can use `AnchorError` as an abstraction on top of `ProgramError`. This abstraction provides additional information when a program fails, including: @@ -265,13 +268,13 @@ pub struct AnchorError { Anchor Errors can be divided into: - Anchor Internal Errors that the framework returns from inside its own code -- Custom errors that you the developer can create +- Custom errors that you, the developer, can create You can add errors unique to your program by using the `error_code` attribute. -Simply add this attribute to a custom `enum` type. You can then use the variants -of the `enum` as errors in your program. Additionally, you can add an error -message to each variant using the `msg` attribute. Clients can then display this -error message if the error occurs. +Simply add this attribute to a custom `enum` type. You can then use the `enum` +variants as errors in your program. Additionally, you can add an error message +to each variant using the `msg` attribute. Clients can then display this error +message if the error occurs. ```rust #[error_code] @@ -281,11 +284,10 @@ pub enum MyError { } ``` -To return a custom error you can use -the [err](https://docs.rs/anchor-lang/latest/anchor_lang/macro.err.html) or -the [error](https://docs.rs/anchor-lang/latest/anchor_lang/prelude/macro.error.html) -macro from an instruction function. These add file and line information to the -error that is then logged by Anchor to help you with debugging. +To return a custom error from an instruction handler you can use +the [err](https://docs.rs/anchor-lang/latest/anchor_lang/macro.err.html) or [error](https://docs.rs/anchor-lang/latest/anchor_lang/prelude/macro.error.html) +macro. These add helpful file and line information to the error that Anchor logs +to help with debugging: ```rust #[program] @@ -331,10 +333,10 @@ pub enum MyError { ## Lab -Let’s practice the concepts we’ve gone over in this lesson by building on top of +Let's practice the concepts we've gone over in this lesson by building on top of the Movie Review program from previous lessons. -In this lab we’ll update the program to mint tokens to users when they submit a +In this lab we'll update the program to mint tokens to users when they submit a new movie review. @@ -342,8 +344,8 @@ new movie review. ### Starter To get started, we will be using the final state of the Anchor Movie Review -program from the previous lesson. So, if you just completed that lesson then -you’re all set and ready to go. If you are just jumping in here, no worries, you +program from the previous lesson. So, if you just completed that lesson, then +you're all set and ready to go. If you are just jumping in here, no worries, you can [download the starter code](https://github.com/Unboxed-Software/anchor-movie-review-program/tree/solution-pdas). We'll be using the `solution-pdas` branch as our starting point. @@ -352,7 +354,7 @@ We'll be using the `solution-pdas` branch as our starting point. Before we get started we need enable the `init-if-needed` feature and add the `anchor-spl` crate to the dependencies in `Cargo.toml`. If you need to brush up on the `init-if-needed` feature take a look at the -[Anchor PDAs and Accounts lesson](anchor-pdas). +[Anchor PDAs and Accounts lesson](/content/courses/onchain-development/anchor-pdas.md)). ```rust [dependencies] @@ -360,6 +362,17 @@ anchor-lang = { version = "0.30.1", features = ["init-if-needed"] } anchor-spl = "0.30.1" ``` +Since we are adding `anchor-spl` as a dependency we also need to add the +`idl-build` for it in the features section of `Cargo.toml`. This is because all +types that will be used in the `Accounts` structures that we are adding in this +lesson require the `IdlBuild` trait implementation to generate an IDL. + +```rust +[features] +# All lines remain unchanged, except for this idl-build line +idl-build = ["anchor-lang/idl-build", "anchor-spl/idl-build"] +``` + ### Initialize reward token Next, navigate to `lib.rs` and implement the `InitializeMint` context type and @@ -381,14 +394,14 @@ pub struct InitializeMint<'info> { bump, payer = user, mint::decimals = 6, - mint::authority = mint, + mint::authority = user, )] pub mint: Account<'info, Mint>, #[account(mut)] pub user: Signer<'info>, pub token_program: Program<'info, Token>, pub rent: Sysvar<'info, Rent>, - pub system_program: Program<'info, System> + pub system_program: Program<'info, System>, } ``` @@ -411,7 +424,7 @@ pub fn initialize_token_mint(_ctx: Context) -> Result<()> { ### Anchor Error -Next, let’s create an Anchor Error that we’ll use to validate the following: +Next, let's create an Anchor Error that we'll use to validate the following: - The `rating` passed to either the `add_movie_review` or `update_movie_review` instruction. @@ -433,7 +446,7 @@ enum MovieReviewError { ### Update add_movie_review instruction -Now that we've done some setup, let’s update the `add_movie_review` instruction +Now that we've done some setup, let's update the `add_movie_review` instruction and `AddMovieReview` context type to mint tokens to the reviewer. Next, update the `AddMovieReview` context type to add the following accounts: @@ -455,7 +468,7 @@ pub struct AddMovieReview<'info> { seeds=[title.as_bytes(), initializer.key().as_ref()], bump, payer = initializer, - space = MovieAccountState::INIT_SPACE + title.len() + description.len() + space = DISCRIMINATOR + MovieAccountState::INIT_SPACE )] pub movie_review: Account<'info, MovieAccountState>, #[account(mut)] @@ -486,13 +499,7 @@ been initialized, it will be initialized as an associated token account for the specified mint and authority. Also, the payer for the costs related with the account initialization will be set under the constraint `payer`. -If you're unfamiliar with the `INIT_SPACE` constant used for the `movie_review` -account space allocation, please refer to the -[`solution-pdas`](https://github.com/solana-foundation/developer-content/blob/4c8eada3053061e66b907c9b49701b064544681d/content/courses/onchain-development/anchor-pdas.md?plain=1#L467) -branch that is being used as our starting point. In there, we discuss the -implementation of the `Space` trait and the `INIT_SPACE` constant. - -Next, let’s update the `add_movie_review` instruction to do the following: +Next, let's update the `add_movie_review` instruction to do the following: - Check that `rating` is valid. If it is not a valid rating, return the `InvalidRating` error. @@ -500,7 +507,7 @@ Next, let’s update the `add_movie_review` instruction to do the following: `TitleTooLong` error. - Check that `description` length is valid. If it is not a valid length, return the `DescriptionTooLong` error. -- Make a CPI to the token program’s `mint_to` instruction using the mint +- Make a CPI to the token program's `mint_to` instruction using the mint authority PDA as a signer. Note that we'll mint 10 tokens to the user but need to adjust for the mint decimals by making it `10*10^6`. @@ -520,15 +527,29 @@ use anchor_spl::associated_token::AssociatedToken; Next, update the `add_movie_review` function to: ```rust -pub fn add_movie_review(ctx: Context, title: String, description: String, rating: u8) -> Result<()> { +pub fn add_movie_review( + ctx: Context, + title: String, + description: String, + rating: u8 +) -> Result<()> { // We require that the rating is between 1 and 5 - require!(rating >= MIN_RATING && rating <= MAX_RATING, MovieReviewError::InvalidRating); + require!( + rating >= MIN_RATING && rating <= MAX_RATING, + MovieReviewError::InvalidRating + ); // We require that the title is not longer than 20 characters - require!(title.len() <= MAX_TITLE_LENGTH, MovieReviewError::TitleTooLong); + require!( + title.len() <= MAX_TITLE_LENGTH, + MovieReviewError::TitleTooLong + ); // We require that the description is not longer than 50 characters - require!(description.len() <= MAX_DESCRIPTION_LENGTH, MovieReviewError::DescriptionTooLong); + require!( + description.len() <= MAX_DESCRIPTION_LENGTH, + MovieReviewError::DescriptionTooLong + ); msg!("Movie review account created"); msg!("Title: {}", title); @@ -545,7 +566,7 @@ pub fn add_movie_review(ctx: Context, title: String, description CpiContext::new_with_signer( ctx.accounts.token_program.to_account_info(), MintTo { - authority: ctx.accounts.mint.to_account_info(), + authority: ctx.accounts.initializer.to_account_info(), to: ctx.accounts.token_account.to_account_info(), mint: ctx.accounts.mint.to_account_info() }, @@ -568,12 +589,23 @@ pub fn add_movie_review(ctx: Context, title: String, description Here we are only adding the check that `rating` and `description` are valid. ```rust -pub fn update_movie_review(ctx: Context, title: String, description: String, rating: u8) -> Result<()> { +pub fn update_movie_review( + ctx: Context, + title: String, + description: String, + rating: u8 +) -> Result<()> { // We require that the rating is between 1 and 5 - require!(rating >= MIN_RATING && rating <= MAX_RATING, MovieReviewError::InvalidRating); + require!( + rating >= MIN_RATING && rating <= MAX_RATING, + MovieReviewError::InvalidRating + ); // We require that the description is not longer than 50 characters - require!(description.len() <= MAX_DESCRIPTION_LENGTH, MovieReviewError::DescriptionTooLong); + require!( + description.len() <= MAX_DESCRIPTION_LENGTH, + MovieReviewError::DescriptionTooLong + ); msg!("Movie review account space reallocated"); msg!("Title: {}", title); @@ -590,7 +622,7 @@ pub fn update_movie_review(ctx: Context, title: String, descr ### Test -Those are all of the changes we need to make to the program! Now, let’s update +Those are all of the changes we need to make to the program! Now, let's update our tests. Start by making sure your imports and `describe` function look like this: @@ -640,7 +672,7 @@ it("Initializes the reward token", async () => { }); ``` -Notice that we didn't have to add `.accounts` because they call be inferred, +Notice that we didn't have to add `.accounts` because they can be inferred, including the `mint` account (assuming you have seed inference enabled). Next, update the test for the `addMovieReview` instruction. The primary @@ -682,7 +714,7 @@ After that, neither the test for `updateMovieReview` nor the test for At this point, run `anchor test` and you should see the following output -```shell +```bash anchor-movie-review-program ✔ Initializes the reward token (458ms) ✔ Movie review is added (410ms) diff --git a/content/courses/onchain-development/anchor-pdas.md b/content/courses/onchain-development/anchor-pdas.md index a1e704dbd..6df47a41a 100644 --- a/content/courses/onchain-development/anchor-pdas.md +++ b/content/courses/onchain-development/anchor-pdas.md @@ -24,20 +24,18 @@ In this lesson you'll learn how to work with PDAs, reallocate accounts, and close accounts in Anchor. Recall that Anchor programs separate instruction logic from account validation. -Account validation primarily happens within structs that represent the list of -accounts needed for a given instruction. Each field of the struct represents a -different account, and you can customize the validation performed on the account -using the `#[account(...)]` attribute macro. +Account validation happens in structs that list the accounts needed for an +instruction. Each field in the struct represents an account, and you can +customize the validation using the `#[account(...)]` attribute macro. -In addition to using constraints for account validation, some constraints can -handle repeatable tasks that would otherwise require a lot of boilerplate inside -our instruction logic. This lesson will introduce the `seeds`, `bump`, -`realloc`, and `close` constraints to help you initialize and validate PDAs, -reallocate accounts, and close accounts. +In addition to validating accounts, some constraints can automate tasks that +would otherwise require repetitive code in our instructions. This lesson will +cover the `seeds`, `bump`, `realloc`, and `close` constraints to help you easily +handle PDAs, reallocate space, and close accounts. ### PDAs with Anchor -PDAs store data, at addressed specified by the onchain programmer, using a list +PDAs store data, at addresses specified by the onchain programmer, using a list of seeds, a bump seed, and a program ID. Anchor provides a convenient way to validate a PDA with the `seeds` and `bump` @@ -54,31 +52,38 @@ struct ExampleAccounts { } ``` -During account validation, Anchor will derive a PDA using the seeds specified in -the `seeds` constraint and verify that the account passed into the instruction -matches the PDA found using the specified `seeds`. +During account validation, Anchor will use the specified seeds to derive a PDA +and check if the provided account matches the derived PDA. When the `bump` constraint is included without specifying a specific bump, -Anchor will default to using the canonical bump (the first bump that results in -a valid PDA). In most cases you should use the canonical bump. +Anchor will use the canonical bump (the first bump that results in a valid PDA, +with a value of 255). Typically, you should use the canonical bump. -You can access other fields from within the struct from constraints, so you can -specify seeds that are dependent on other accounts like the signer's public key. +You can also use other fields from within the struct as seeds, such as the +signer's public key. You can also reference the deserialized instruction data if you add the `#[instruction(...)]` attribute macro to the struct. -For example, the following example shows a list of accounts that include -`pda_account` and `user`. The `pda_account` is constrained such that the seeds -must be the string "example_seed," the public key of `user`, and the string -passed into the instruction as `instruction_data`. +For example, the following example shows a list of accounts that include: + +- `pda_account` +- `user` + +The `pda_account` is constrained such that the seeds must be the string +"example_seed," the public key of `user`, and the string passed into the +instruction as `instruction_data`. ```rust #[derive(Accounts)] #[instruction(instruction_data: String)] pub struct Example<'info> { #[account( - seeds = [b"example_seed", user.key().as_ref(), instruction_data.as_ref()], + seeds = [ + b"example_seed", + user.key().as_ref(), + instruction_data.as_ref() + ], bump )] pub pda_account: Account<'info, AccountType>, @@ -96,11 +101,12 @@ validation will fail. You can combine the `seeds` and `bump` constraints with the `init` constraint to initialize an account using a PDA. -Recall that the `init` constraint must be used in combination with the `payer` -and `space` constraints to specify the account that will pay for account -initialization and the space to allocate on the new account. Additionally, you -must include `system_program` as one of the fields of the account validation -struct. +Recall that the `init` constraint must be used with the `payer` and `space` +constraints to specify who pays for the account initialization and how much +space to allocate. + +Additionally, you need to include `system_program` to handle the creation and +funding of the new account. ```rust #[derive(Accounts)] @@ -110,7 +116,7 @@ pub struct InitializePda<'info> { seeds = [b"example_seed", user.key().as_ref()], bump, payer = user, - space = 8 + 8 + space = DISCRIMINATOR + Accountype::INIT_SPACE )] pub pda_account: Account<'info, AccountType>, #[account(mut)] @@ -119,9 +125,12 @@ pub struct InitializePda<'info> { } #[account] +#[derive(InitSpace)] pub struct AccountType { pub data: u64, } + +const DISCRIMINATOR: usize = 8; ``` When using `init` for non-PDA accounts, Anchor defaults to setting the owner of @@ -134,18 +143,11 @@ words, the signature verification for the initialization of the PDA account would fail if the program ID used to derive the PDA did not match the program ID of the executing program. -When determining the value of `space` for an account initialized and owned by -the executing Anchor program, remember that the first 8 bytes are reserved for -the account discriminator. This is an 8-byte value that Anchor calculates and -uses to identify the program account types. You can use this -[reference](https://www.anchor-lang.com/docs/space) to calculate how much space -you should allocate for an account. - #### Seed inference The account list for an instruction can get really long for some programs. To simplify the client-side experience when invoking an Anchor program instruction, -we can turn on seed inference. +we can turn on **seed inference**. Seed inference adds information about PDA seeds to the IDL so that Anchor can infer PDA seeds from existing call-site information. In the previous example, @@ -233,8 +235,8 @@ To use `init_if_needed`, you must first enable the feature in `Cargo.toml`. anchor-lang = { version = "0.30.1", features = ["init-if-needed"] } ``` -Once you’ve enabled the feature, you can include the constraint in the -`#[account(…)]` attribute macro. The example below demonstrates using the +Once you've enabled the feature, you can include the constraint in the +`#[account(...)]` attribute macro. The example below demonstrates using the `init_if_needed` constraint to initialize a new associated token account if one does not already exist. @@ -299,7 +301,7 @@ pub struct ReallocExample<'info> { mut, seeds = [b"example_seed", user.key().as_ref()], bump, - realloc = 8 + 4 + instruction_data.len(), + realloc = DISCRIMINATOR + STRING_SIZE_SPACE + instruction_data.len(), realloc::payer = user, realloc::zero = false, )] @@ -310,29 +312,35 @@ pub struct ReallocExample<'info> { } #[account] +#[derive(InitSpace)] pub struct AccountType { pub data: String, } + +const DISCRIMINATOR: usize = 8; +const STRING_SIZE_SPACE: usize = 4; ``` -Notice that `realloc` is set to `8 + 4 + instruction_data.len()`. This breaks -down as follows: +The `realloc` constraint from the above example can be broken down as follows: -- `8` is for the account discriminator -- `4` is for the 4 bytes of space that BORSH uses to store the length of the - string +- the `DISCRIMINATOR` is `8` +- the `STRING_SIZE_SPACE` is `4` for the space required to store the length of + the string. As required by BORSH serialization - `instruction_data.len()` is the length of the string itself +> [BORSH](https://solanacookbook.com/guides/serialization.html) stands for +> _Binary Object Representation Serializer for Hashing_ and is used to +> efficiently and compactly serialize and deserialize data structures. + If the change in account data length is additive, lamports will be transferred from the `realloc::payer` to the account to maintain rent exemption. Likewise, if the change is subtractive, lamports will be transferred from the account back to the `realloc::payer`. -The `realloc::zero` constraint is required to determine whether the new memory -should be zero initialized after reallocation. This constraint should be set to -true in cases where you expect the memory of an account to shrink and expand -multiple times. That way you zero out space that would otherwise show as stale -data. +The `realloc::zero` constraint ensures that any new memory allocated during +reallocation is set to zero. This should be set to true if you expect the memory +of an account to change size frequently. This way, you clear out any old data +that might otherwise remain. ### Close @@ -340,12 +348,10 @@ The `close` constraint provides a simple and secure way to close an existing account. The `close` constraint marks the account as closed at the end of the -instruction’s execution by setting its discriminator to -the `CLOSED_ACCOUNT_DISCRIMINATOR` and sends its lamports to a specified -account. Setting the discriminator to a special variant makes account revival -attacks (where a subsequent instruction adds the rent exemption lamports again) -impossible. If someone tries to reinitialize the account, the reinitialization -will fail the discriminator check and be considered invalid by the program. +instruction's execution by setting its discriminator to a _special value_ called +`CLOSED_ACCOUNT_DISCRIMINATOR` and sends its lamports to a specified account. +This _special value_ prevents the account from being reopened because any +attempt to reinitialize the account will fail the discriminator check. The example below uses the `close` constraint to close the `data_account` and sends the lamports allocated for rent to the `receiver` account. @@ -366,7 +372,7 @@ pub struct Close<'info> { ## Lab -Let’s practice the concepts we’ve gone over in this lesson by creating a Movie +Let's practice the concepts we've gone over in this lesson by creating a Movie Review program using the Anchor framework. This program will allow users to: @@ -379,9 +385,9 @@ This program will allow users to: ### Create a new Anchor project -To begin, let’s create a new project using `anchor init`. +To begin, let's create a new project using `anchor init`. -```shell +```bash anchor init anchor-movie-review-program ``` @@ -422,14 +428,14 @@ pub mod anchor_movie_review_program { ### MovieAccountState -First, let’s use the `#[account]` attribute macro to define the +First, let's use the `#[account]` attribute macro to define the `MovieAccountState` that will represent the data structure of the movie review accounts. As a reminder, the `#[account]` attribute macro implements various traits that help with serialization and deserialization of the account, set the discriminator for the account, and set the owner of a new account as the program ID defined in the `declare_id!` macro. -Within each movie review account, we’ll store the: +Within each movie review account, we'll store the: - `reviewer` - user creating the review - `rating` - rating for the movie @@ -448,45 +454,32 @@ pub mod anchor_movie_review_program { } #[account] +#[derive(InitSpace)] pub struct MovieAccountState { pub reviewer: Pubkey, // 32 pub rating: u8, // 1 + #[max_len(20)] pub title: String, // 4 + len() + #[max_len(50)] pub description: String, // 4 + len() } -``` - -For this account struct, we will be implementing the space trait: -```rust -/* - For the MovieAccountState account, since it is dynamic, we implement the Space trait to calculate the space required for the account. - We add the STRING_LENGTH_PREFIX twice to the space to account for the title and description string prefix. - We need to add the length of the title and description to the space upon initialization. - */ -impl Space for MovieAccountState { - const INIT_SPACE: usize = ANCHOR_DISCRIMINATOR + PUBKEY_SIZE + U8_SIZE + STRING_LENGTH_PREFIX + STRING_LENGTH_PREFIX; -} +const DISCRIMINATOR: usize = 8; ``` -The `Space` trait will force us to define the space of our account for -initialization, by defining the `INIT_SPACE` constant. This constant can then be -used during the account initalization. +Using the `#[derive(InitSpace)]` macro on the `AccountStruct` automatically +calculates the `INIT_SPACE` constant which represents the space required for the +account fields, including fixed-size fields and the length-prefixed strings. -Note that, in this case, since the account state is dynamic (`title` and -`description` are strings without a fixed size), we will add -`STRING_LENGTH_PREFIX` that represents 4 bytes (required to store their length) -but we still need to add the length of the actual context of both strings during -our account initialization (You will see that in the following steps). - -In sum, our `INIT_SPACE` constant will be 8 bytes for the anchor discriminator + -32 bytes for the reviewer Pubkey + 1 byte for the rating + 4 bytes for the title -length storage + 4 bytes for the description length storage. +In cases of dynamic fields like strings, we can use the `#[max_len]` macro to +specify the maximum length of these fields to determining the space needed for +the account during initialization. Here, we have chosen the `title` string to be +of length 20 (max), and the `description` string to be of length 50 (max). ### Custom error codes During our implementation, we will be doing some checks and throwing some custom -errors in case those checks are bot successful. +errors in case those checks are not successful. For, that let's go ahead and create an enum that will contain the different type of errors as well as the error messages associated: @@ -511,8 +504,8 @@ more detail in the next chapter. ### Add Movie Review -Next, let’s implement the `add_movie_review` instruction. The `add_movie_review` -instruction will require a `Context` of type `AddMovieReview` that we’ll +Next, let's implement the `add_movie_review` instruction. The `add_movie_review` +instruction will require a `Context` of type `AddMovieReview` that we'll implement shortly. The instruction will require three additional arguments as instruction data @@ -522,8 +515,8 @@ provided by a reviewer: - `description` - details of the review as a `String` - `rating` - rating for the movie as a `u8` -Within the instruction logic, we’ll populate the data of the new `movie_review` -account with the instruction data. We’ll also set the `reviewer` field as the +Within the instruction logic, we'll populate the data of the new `movie_review` +account with the instruction data. We'll also set the `reviewer` field as the `initializer` account from the instruction context. We will also perform some checks, using the `require!` macro, to make sure that: @@ -536,6 +529,11 @@ The `require!` macro will perform a check and throw a custom error in case that check is not successful. ```rust +const MIN_RATING: u8 = 1; +const MAX_RATING: u8 = 5; +const MAX_TITLE_LENGTH: usize = 20; +const MAX_DESCRIPTION_LENGTH: usize = 50; + #[program] pub mod anchor_movie_review_program{ use super::*; @@ -570,7 +568,7 @@ pub mod anchor_movie_review_program{ } ``` -Next, let’s create the `AddMovieReview` struct that we used as the generic in +Next, let's create the `AddMovieReview` struct that we used as the generic in the instruction's context. This struct will list the accounts the `add_movie_review` instruction requires. @@ -585,9 +583,7 @@ Remember, you'll need the following macros: The `movie_review` account is a PDA that needs to be initialized, so we'll add the `seeds` and `bump` constraints as well as the `init` constraint with its -required `payer` and `space` constraints. Regarding the required space, we will -be using the `INIT_SPACE` constant that we defined in the account struct, and we -will add the string length of the both the title and the description. +required `payer` and `space` constraints. For the PDA seeds, we'll use the movie title and the reviewer's public key. The payer for the initialization should be the reviewer, and the space allocated on @@ -596,14 +592,14 @@ public key, and the movie review's rating, title, and description. ```rust #[derive(Accounts)] -#[instruction(title:String, description:String)] +#[instruction(title:String)] pub struct AddMovieReview<'info> { #[account( init, seeds = [title.as_bytes(), initializer.key().as_ref()], bump, payer = initializer, - space = MovieAccountState::INIT_SPACE + title.len() + description.len() // We add the length of the title and description to the init space + space = DISCRIMINATOR + MovieAccountState::INIT_SPACE )] pub movie_review: Account<'info, MovieAccountState>, #[account(mut)] @@ -614,7 +610,7 @@ pub struct AddMovieReview<'info> { ### Update Movie Review -Next, let’s implement the `update_movie_review` instruction with a context whose +Next, let's implement the `update_movie_review` instruction with a context whose generic type is `UpdateMovieReview`. Just as before, the instruction will require three additional arguments as @@ -624,7 +620,7 @@ instruction data provided by a reviewer: - `description` - details of the review - `rating` - rating for the movie -Within the instruction logic we’ll update the `rating` and `description` stored +Within the instruction logic we'll update the `rating` and `description` stored on the `movie_review` account. While the `title` doesn't get used in the instruction function itself, we'll @@ -643,6 +639,16 @@ pub mod anchor_movie_review_program { description: String, rating: u8, ) -> Result<()> { + + // We require that the rating is between 1 and 5 + require!(rating >= MIN_RATING && rating <= MAX_RATING, MovieReviewError::InvalidRating); + + // We require that the title is not longer than 20 characters + require!(title.len() <= MAX_TITLE_LENGTH, MovieReviewError::TitleTooLong); + + // We require that the description is not longer than 50 characters + require!(description.len() <= MAX_DESCRIPTION_LENGTH, MovieReviewError::DescriptionTooLong); + msg!("Movie review account space reallocated"); msg!("Title: {}", title); msg!("Description: {}", description); @@ -658,7 +664,7 @@ pub mod anchor_movie_review_program { } ``` -Next, let’s create the `UpdateMovieReview` struct to define the accounts that +Next, let's create the `UpdateMovieReview` struct to define the accounts that the `update_movie_review` instruction needs. Since the `movie_review` account will have already been initialized by this @@ -672,13 +678,13 @@ We'll also still need the `seeds` and `bump` constraints as we had them in ```rust #[derive(Accounts)] -#[instruction(title:String, description:String)] +#[instruction(title:String)] pub struct UpdateMovieReview<'info> { #[account( mut, seeds = [title.as_bytes(), initializer.key().as_ref()], bump, - realloc = MovieAccountState::INIT_SPACE + title.len() + description.len(), // We add the length of the title and description to the init space + realloc = DISCRIMINATOR + MovieAccountState::INIT_SPACE, realloc::payer = initializer, realloc::zero = true, )] @@ -702,7 +708,7 @@ expanding the space allocated to the account. ### Delete Movie Review -Lastly, let’s implement the `delete_movie_review` instruction to close an +Lastly, let's implement the `delete_movie_review` instruction to close an existing `movie_review` account. We'll use a context whose generic type is `DeleteMovieReview` and won't include @@ -726,7 +732,7 @@ pub mod anchor_movie_review_program { } ``` -Next, let’s implement the `DeleteMovieReview` struct. +Next, let's implement the `DeleteMovieReview` struct. ```rust #[derive(Accounts)] @@ -854,7 +860,7 @@ it("Deletes a movie review", async () => { Lastly, run `anchor test` and you should see the following output in the console. -```shell +```bash anchor-movie-review-program ✔ Movie review is added` (139ms) ✔ Movie review is updated` (404ms) @@ -873,7 +879,7 @@ continuing. ## Challenge -Now it’s your turn to build something independently. Equipped with the concepts +Now it's your turn to build something independently. Equipped with the concepts introduced in this lesson, try to recreate the Student Intro program that we've used before using the Anchor framework. diff --git a/content/courses/onchain-development/intro-to-anchor-frontend.md b/content/courses/onchain-development/intro-to-anchor-frontend.md index 39803ac8d..0afe4ba99 100644 --- a/content/courses/onchain-development/intro-to-anchor-frontend.md +++ b/content/courses/onchain-development/intro-to-anchor-frontend.md @@ -16,7 +16,7 @@ description: - An **IDL** is a file representing the structure of a Solana program. Programs written and built using Anchor automatically generate a corresponding IDL. IDL stands for Interface Description Language. -- `@coral-xyz/anchor` is a Typescript client that includes everything you’ll +- `@coral-xyz/anchor` is a Typescript client that includes everything you'll need to interact with Anchor programs - An **Anchor `Provider`** object combines a `connection` to a cluster and a specified `wallet` to enable transaction signing @@ -56,11 +56,10 @@ writing to the program. To create an instance of `Program`, you'll need the following: -- IDL - file representing the structure of a program +- `IDL` - file representing the structure of a program - `Connection` - the cluster connection - `Wallet` - default keypair used to pay for and sign transactions - `Provider` - encapsulates the `Connection` to a Solana cluster and a `Wallet` -- `ProgramId` - the program’s onchain address ![Anchor structure](/public/assets/courses/unboxed/anchor-client-structure.png) @@ -84,45 +83,82 @@ counter program you built previously: ```json { - "version": "0.1.0", - "name": "counter", + "address": "9sMy4hnC9MML6mioESFZmzpntt3focqwUq1ymPgbMf64", + "metadata": { + "name": "anchor_counter", + "version": "0.1.0", + "spec": "0.1.0", + "description": "Created with Anchor" + }, "instructions": [ { - "name": "initialize", + "name": "increment", + "discriminator": [11, 18, 104, 9, 104, 174, 59, 33], "accounts": [ - { "name": "counter", "isMut": true, "isSigner": true }, - { "name": "user", "isMut": true, "isSigner": true }, - { "name": "systemProgram", "isMut": false, "isSigner": false } + { + "name": "counter", + "writable": true + }, + { + "name": "user", + "signer": true + } ], "args": [] }, { - "name": "increment", + "name": "initialize", + "discriminator": [175, 175, 109, 31, 13, 152, 155, 237], "accounts": [ - { "name": "counter", "isMut": true, "isSigner": false }, - { "name": "user", "isMut": false, "isSigner": true } + { + "name": "counter", + "writable": true, + "signer": true + }, + { + "name": "user", + "writable": true, + "signer": true + }, + { + "name": "system_program", + "address": "11111111111111111111111111111111" + } ], "args": [] } ], "accounts": [ + { + "name": "Counter", + "discriminator": [255, 176, 4, 245, 188, 253, 124, 25] + } + ], + "types": [ { "name": "Counter", "type": { "kind": "struct", - "fields": [{ "name": "count", "type": "u64" }] + "fields": [ + { + "name": "count", + "type": "u64" + } + ] } } ] } ``` -Inspecting the IDL, you can see that this program contains two instructions -(`initialize` and `increment`). +Inspecting the IDL, you can see the `programId` and the `metadata` object which +have been added in anchor 0.30.0 + +This program contains two instruction handlers, `initialize` and `increment`. -Notice that in addition to specifying the instructions, it species the accounts -and inputs for each instruction. The `initialize` instruction requires three -accounts: +Notice that in addition to specifying the instruction handlers, it specifies the +accounts and inputs for each instruction. The `initialize` instruction requires +three accounts: 1. `counter` - the new account being initialized in the instruction 2. `user` - the payer for the transaction and initialization @@ -139,8 +175,12 @@ neither instructions require any additional instruction data since the `args` section is blank for both. Looking further down at the `accounts` section, you can see that the program -contains one account type named `Counter` with a single `count` field of type -`u64`. +contains one account type named `Counter` with a the `discriminator` field, +which is used to distinguish between various type of accounts present. + +Last, we have the `types` section, which contains types of account in the +`accounts` section , in this case, for account type `Counter` it contains a +single field named `count` of type `u64` Although the IDL does not provide the implementation details for each instruction, we can get a basic idea of how the onchain program expects @@ -148,12 +188,94 @@ instructions to be constructed and see the structure of the program accounts. Regardless of how you get it, you _need_ an IDL file to interact with a program using the `@coral-xyz/anchor` package. To use the IDL, you'll need to include -the IDL file in your project and then import the file. +the IDL file along with the types in your project and then import the file. ```typescript import idl from "./idl.json"; ``` +You would _ideally_ also require types for the IDL which would make it easier to +interact with the program. The types can be found at `/target/types` folder +after you have built your program. Here are the types for the above IDL which +when you notice has the exact same structure as the IDL but are just as type +helper. + +```typescript +/** + * Program IDL in camelCase format in order to be used in JS/TS. + * + * Note that this is only a type helper and is not the actual IDL. The original + * IDL can be found at `target/idl/anchor_counter.json`. + */ +export type AnchorCounter = { + address: "9sMy4hnC9MML6mioESFZmzpntt3focqwUq1ymPgbMf64"; + metadata: { + name: "anchorCounter"; + version: "0.1.0"; + spec: "0.1.0"; + description: "Created with Anchor"; + }; + instructions: [ + { + name: "increment"; + discriminator: [11, 18, 104, 9, 104, 174, 59, 33]; + accounts: [ + { + name: "counter"; + writable: true; + }, + { + name: "user"; + signer: true; + }, + ]; + args: []; + }, + { + name: "initialize"; + discriminator: [175, 175, 109, 31, 13, 152, 155, 237]; + accounts: [ + { + name: "counter"; + writable: true; + signer: true; + }, + { + name: "user"; + writable: true; + signer: true; + }, + { + name: "systemProgram"; + address: "11111111111111111111111111111111"; + }, + ]; + args: []; + }, + ]; + accounts: [ + { + name: "counter"; + discriminator: [255, 176, 4, 245, 188, 253, 124, 25]; + }, + ]; + types: [ + { + name: "counter"; + type: { + kind: "struct"; + fields: [ + { + name: "count"; + type: "u64"; + }, + ]; + }; + }, + ]; +}; +``` + #### Provider Before you can create a `Program` object using the IDL, you first need to create @@ -166,7 +288,7 @@ The `Provider` object combines two things: - `Wallet` - a specified address used to pay for and sign transactions The `Provider` is then able to send transactions to the Solana blockchain on -behalf of a `Wallet` by including the wallet’s signature to outgoing +behalf of a `Wallet` by including the wallet's signature to outgoing transactions. When using a frontend with a Solana wallet provider, all outgoing transactions must still be approved by the user via their wallet browser extension. @@ -239,7 +361,7 @@ The `AnchorProvider` constructor takes three parameters: - `opts` - optional parameter that specifies the confirmation options, using a default setting if one is not provided -Once you’ve created the `Provider` object, you then set it as the default +Once you've created the `Provider` object, you then set it as the default provider using `setProvider`. ```typescript @@ -248,7 +370,9 @@ import { AnchorProvider, setProvider } from "@coral-xyz/anchor"; const { connection } = useConnection(); const wallet = useAnchorWallet(); -const provider = new AnchorProvider(connection, wallet, {}); +const provider = new AnchorProvider(connection, wallet, { + commitment: "confirmed", +}); setProvider(provider); ``` @@ -258,7 +382,6 @@ Once you have the IDL and a provider, you can create an instance of `Program`. The constructor requires three parameters: - `idl` - the IDL as type `Idl` -- `programId` - the onchain address of the program as a `string` or `PublicKey` - `Provider` - the provider discussed in the previous section The `Program` object creates a custom API you can use to interact with a Solana @@ -271,16 +394,17 @@ and listen to events. You can also To create the `Program` object, first import `Program` and `Idl` from `@coral-xyz/anchor`. `Idl` is a type you can use when working with Typescript. -Next, specify the `programId` of the program. We have to explicitly state the -`programId` since there can be multiple programs with the same IDL structure -(i.e. if the same program is deployed multiple times using different addresses). When creating the `Program` object, the default `Provider` is used if one is not explicitly specified. +To enable type support, import the types to your project from `/target/types` +present in your anchor project, and declare the type for the program object. + All together, the final setup looks something like this: ```typescript import idl from "./idl.json"; +import type { CounterProgram } from "@/types"; import { useAnchorWallet, useConnection } from "@solana/wallet-adapter-react"; import { Program, Idl, AnchorProvider, setProvider } from "@coral-xyz/anchor"; @@ -290,8 +414,10 @@ const wallet = useAnchorWallet(); const provider = new AnchorProvider(connection, wallet, {}); setProvider(provider); -const programId = new PublicKey("JPLockxtkngHkaQT5AuRYow3HyUv5qWzmhwsCPd653n"); -const program = new Program(idl as Idl, programId); +const program = new Program(idl as CounterProgram); + +// we can also explicitly mention the provider +const program = new Program(idl as CounterProgram, provider); ``` ### Anchor `MethodsBuilder` @@ -433,7 +559,7 @@ const accounts = await program.account.counter.fetchMultiple([ ## Lab -Let’s practice this together by building a frontend for the Counter program from +Let's practice this together by building a frontend for the Counter program from last lesson. As a reminder, the Counter program has two instructions: - `initialize` - initializes a new `Counter` account and sets the `count` to `0` @@ -442,183 +568,151 @@ last lesson. As a reminder, the Counter program has two instructions: #### 1. Download the starter code Download -[the starter code for this project](https://github.com/Unboxed-Software/anchor-ping-frontend/tree/starter). +[the starter code for this project](https://github.com/solana-developers/anchor-ping-frontend/tree/starter). Once you have the starter code, take a look around. Install the dependencies with `npm install` and then run the app with `npm run dev`. -This project is a simple Next.js application. It includes the -`WalletContextProvider` we created in the -[Wallets lesson](https://github.com/Unboxed-Software/solana-course/blob/main/content/interact-with-wallets), -the `idl.json` file for the Counter program, and the `Initialize` and -`Increment` components we’ll be building throughout this lab. The `programId` of -the program we’ll be invoking is also included in the starter code. +This project is a simple Next.js application, created using +`npx create-next-dapp` + +The `idl.json` file for the Counter program, and the `Initialize` and +`Increment` components we'll be building throughout this lab. #### 2. `Initialize` -To begin, let’s complete the setup to create the `Program` object in -`Initialize.tsx` component. +To begin, let's complete the setup to create the `useCounterProgram` hook in +`components/counter/counter-data-access.tsx` component. -Remember, we’ll need an instance of `Program` to use the Anchor `MethodsBuilder` -to invoke the instructions on our program. For that, we'll need an Anchor wallet -and a connection, which we can get from the `useAnchorWallet` and -`useConnection` hooks. Let's also create a `useState` to capture the program -instance. +Remember, we'll need an instance of `Program` to use the Anchor `MethodsBuilder` +to invoke the instructions on our program. `create-solana-dapp` already creates +a `getCounterProgram` for us, which will return us the `Program` instance. ```typescript -export const Initialize: FC = ({ setCounter }) => { - const [program, setProgram] = useState("") +// This is a helper function to get the Counter Anchor program. +export function getCounterProgram(provider: AnchorProvider) { + return new Program(CounterIDL as AnchorCounter, provider); +} +``` - const { connection } = useConnection() - const wallet = useAnchorWallet() +Now, in the `useCounterProgram` hook, we'll create a program instance - ... -} +```typescript +const provider = useAnchorProvider(); +const program = getCounterProgram(provider); ``` -With that, we can work on creating the actual `Program` instance. Let's do this -in a `useEffect`. +- `useAnchorProvider` is an helper function at + `components/solana/solana-provider` which returns the provider. -First we need to either get the default provider if it already exists, or create -it if it doesn't. We can do that by calling `getProvider` inside a try/catch -block. If an error is thrown, that means there is no default provider and we -need to create one. +Now that we've the program instance, we can actually invoke the program's +`initialize` instruction. We'll do this using `useMutation`. -Once we have a provider, we can construct a `Program` instance. +Remember, We'll need to generate a new `Keypair` for the new `Counter` account +since we are initializing an account for the first time. ```typescript -useEffect(() => { - let provider: anchor.Provider; - - try { - provider = anchor.getProvider(); - } catch { - provider = new anchor.AnchorProvider(connection, wallet, {}); - anchor.setProvider(provider); - } - - const program = new anchor.Program(idl as anchor.Idl, PROGRAM_ID); - setProgram(program); -}, []); +const initialize = useMutation({ + mutationKey: ["counter", "initialize", { cluster }], + + mutationFn: (keypair: Keypair) => + program.methods + .initialize() + .accounts({ counter: keypair.publicKey }) + .signers([keypair]) + .rpc(), + + onSuccess: signature => { + transactionToast(signature); + return accounts.refetch(); + }, + onError: () => toast.error("Failed to initialize account"), +}); ``` -Now that we've finished the Anchor setup, we can actually invoke the program's -`initialize` instruction. We'll do this inside the `onClick` function. +Just focus on the `mutationFn` which accepts a `keypair` which we'll be passing. +We are using the Anchor `MethodsBuilder` to create and send a new transaction. +Remember, Anchor can infer some of the accounts required, like the `user` and +`systemAccount` accounts. However, it can't infer the `counter` account because +we generate that dynamically, so you'll need to add it with `.accounts`. You'll +also need to add that keypair as a sign with `.signers`. Lastly, you can use +`.rpc()` to submit the transaction to the user's wallet. -First, we’ll need to generate a new `Keypair` for the new `Counter` account -since we are initializing an account for the first time. +Once the transaction goes through,we are calling `onSuccess` with the signature +and then fetching `accounts`. -Then we can use the Anchor `MethodsBuilder` to create and send a new -transaction. Remember, Anchor can infer some of the accounts required, like the -`user` and `systemAccount` accounts. However, it can't infer the `counter` -account because we generate that dynamically, so you'll need to add it with -`.accounts`. You'll also need to add that keypair as a sign with `.signers`. -Lastly, you can use `.rpc()` to submit the transaction to the user's wallet. +#### 3. `Accounts` -Once the transaction goes through, call `setUrl` with the explorer URL and then -call `setCounter`, passing in the counter account. +In the above `initialize` mutation, we are calling `accounts.refetch()`. This is +a to refresh the accounts that we have stored, every time a new account is +initialized. ```typescript -const onClick = async () => { - const sig = await program.methods - .initialize() - .accounts({ - counter: newAccount.publicKey, - user: wallet.publicKey, - systemAccount: anchor.web3.SystemProgram.programId, - }) - .signers([newAccount]) - .rpc(); - - setTransactionUrl(`https://explorer.solana.com/tx/${sig}?cluster=devnet`); - setCounter(newAccount.publicKey); -}; +const accounts = useQuery({ + queryKey: ["counter", "all", { cluster }], + queryFn: () => program.account.counter.all(), +}); ``` -#### 3. `Increment` +We now use `account` from `program` instance to get all `counter` accounts +created. This method internally calls, `getProgramAccounts`. -Next, let’s move on the the `Increment.tsx` component. Just as before, complete -the setup to create the `Program` object. In addition to calling `setProgram`, -the `useEffect` should call `refreshCount`. +#### 4. `Increment` + +Next, let's move on the the `useCounterProgramAccount` hook. As we have earlier +already created `program` and `accounts` function in previous hook, we'll call +the hooks to access them and not redefine them. Add the following code for the initial set up: ```typescript -export const Increment: FC = ({ counter, setTransactionUrl }) => { - const [count, setCount] = useState(0) - const [program, setProgram] = useState() - const { connection } = useConnection() - const wallet = useAnchorWallet() - - useEffect(() => { - let provider: anchor.Provider - - try { - provider = anchor.getProvider() - } catch { - provider = new anchor.AnchorProvider(connection, wallet, {}) - anchor.setProvider(provider) - } - - const program = new anchor.Program(idl as anchor.Idl, PROGRAM_ID) - setProgram(program) - refreshCount(program) - }, []) +export function useCounterProgramAccount({ account }: { account: PublicKey }) { ... + + const { program, accounts } = useCounterProgram(); } + ``` -Next, let’s use the Anchor `MethodsBuilder` to build a new instruction to invoke +Next, let's use the Anchor `MethodsBuilder` to build a new instruction to invoke the `increment` instruction. Again, Anchor can infer the `user` account from the wallet so we only need to include the `counter` account. ```typescript -const incrementCount = async () => { - const sig = await program.methods - .increment() - .accounts({ - counter: counter, - user: wallet.publicKey, - }) - .rpc(); - - setTransactionUrl(`https://explorer.solana.com/tx/${sig}?cluster=devnet`); -}; -``` - -#### 4. Display the correct count +const incrementMutation = useMutation({ + mutationKey: ["counter", "increment", { cluster, account }], -Now that we can initialize the counter program and increment the count, we need -to get our UI to show the count stored in the counter account. + mutationFn: () => + program.methods.increment().accounts({ counter: account }).rpc(), -We'll show how to observe account changes in a future lesson, but for now we -just have a button that calls `refreshCount` so you can click it to show the new -count after each `increment` invocation. + onSuccess: tx => { + transactionToast(tx); + return accountQuery.refetch(); + }, +}); +``` -Inside `refreshCount`, let's use `program` to fetch the counter account, then -use `setCount` to set the count to the number stored on the program: +As the counter is getting updated, we'll update the counter count by calling +`accountQuery.refetch()` when the transaction is success. ```typescript -const refreshCount = async program => { - const counterAccount = await program.account.counter.fetch(counter); - setCount(counterAccount.count.toNumber()); -}; +const accountQuery = useQuery({ + queryKey: ["counter", "fetch", { cluster, account }], + queryFn: () => program.account.counter.fetch(account), +}); ``` -Super simple with Anchor! - -#### 5. Test the frontend +#### 6. Test the frontend At this point, everything should work! You can test the frontend by running -`npm run dev`. +`yarn dev`. -1. Connect your wallet and you should see the `Initialize Counter` button -2. Click the `Initialize Counter` button, and then approve the transaction -3. You should then see a link at the bottom of the screen to Solana Explorer for - the `initialize` transaction. The `Increment Counter` button, `Refresh Count` - button, and the count should also all appear. -4. Click the `Increment Counter` button, and then approve the transaction -5. Wait a few seconds and click `Refresh Count`. The count should increment on - the screen. +1. Connect your wallet and head to `Counter Program` tab +2. Click the `Create` button, and then approve the transaction +3. You should then see a link at the bottom right of the screen to Solana + Explorer for the `initialize` transaction. The `Increment` button and the + count appear. +4. Click the `Increment` button, and then approve the transaction +5. Wait a few seconds . The count should increment on the screen. ![Anchor Frontend Demo](/public/assets/courses/unboxed/anchor-frontend-demo.gif) @@ -638,22 +732,22 @@ continuing. ## Challenge -Now it’s your turn to build something independently. Building on top of what -we’ve done in the lab, try to create a new component in the frontend that +Now it's your turn to build something independently. Building on top of what +we've done in the lab, try to create a new component in the frontend that implements a button to decrements the counter. -Before building the component in the frontend, you’ll first need to: +Before building the component in the frontend, you'll first need to: 1. Build and deploy a new program that implements a `decrement` instruction 2. Update the IDL file in the frontend with the one from your new program 3. Update the `programId` with the one from your new program If you need some help, feel free to -[reference this program](https://github.com/Unboxed-Software/anchor-counter-program/tree/solution-decrement). +[reference this program](https://github.com/solana-developers/anchor-ping-frontend/tree/solution-increment). Try to do this independently if you can! But if you get stuck, feel free to reference -the [solution code](https://github.com/Unboxed-Software/anchor-ping-frontend/tree/solution-decrement). +the [solution code](https://github.com/solana-developers/anchor-ping-frontend/tree/solution-decrement). Push your code to GitHub and diff --git a/content/courses/onchain-development/intro-to-anchor.md b/content/courses/onchain-development/intro-to-anchor.md index 2d3305a34..76aff362d 100644 --- a/content/courses/onchain-development/intro-to-anchor.md +++ b/content/courses/onchain-development/intro-to-anchor.md @@ -1,7 +1,7 @@ --- title: Intro to Anchor development objectives: - - Use the Anchor framework to build a basic program + - Use the Anchor framework to build a basic Solana program - Describe the basic structure of an Anchor program - Explain how to implement basic account validation and security checks with Anchor @@ -10,8 +10,9 @@ description: "Create your first Solana onchain program in Anchor." ## Summary -- **Programs** on Solana have **instruction handlers** that execute instruction - logic. +- **Programs** on Solana have **instruction handlers**, which are functions that + take arguments from incoming instructions. They are the entry point for any + operation in a program. - **Rust** is the most common language for building Solana programs. The **Anchor** framework takes care of common grunt work - like reading data from incoming instructions, and checking the right accounts are provided - so you @@ -19,15 +20,26 @@ description: "Create your first Solana onchain program in Anchor." ## Lesson -Solana's ability to run arbitrary executable code is part of what makes it so -powerful. Solana programs, similar to "smart contracts" in other blockchain -environments, are quite literally the backbone of the Solana ecosystem. And the -collection of programs grows daily as developers and creators dream up and -deploy new programs. +Before we begin, make sure you have Anchor installed. You can follow this lesson +on [local-setup](/developers/courses/onchain-development/local-setup.md). + +Solana's capacity to execute arbitrary code is a key part of its power. Solana +programs, (sometimes called "smart contracts"), are the very foundation of the +Solana ecosystem. And as developers and creators continuously conceive and +deploy new programs, the collection of Solana programs continues to expand +daily. + +Every popular Solana exchange, borrow-lend app, digital art auction house, perps +platform, and prediction market is a program. This lesson will give you a basic introduction to writing and deploying a Solana program using the Rust programming language and the Anchor framework. +> This and the further lessons in this course will give a good base to start +> building Solana programs with Anchor, however if you want to get more into +> Anchor, we would recommend checking out the +> [The Anchor Book](https://book.anchor-lang.com/). + ### What is Anchor? Anchor makes writing Solana programs easier, faster, and more secure, making it @@ -38,27 +50,32 @@ with writing a Solana program. ### Anchor program structure -Anchor uses macros and traits to generate boilerplate Rust code for you. These -provide a clear structure to your program so you can more easily reason about -your code. The main high-level macros and attributes are: +Anchor uses macros and traits to simplify Rust code for you. These provide a +clear structure to your program so you can focus more on its functionality. -- `declare_id` - a macro for declaring the program’s onchain address +Some important macros provided by Anchor are: + +> From here on out, you'll see a lot of Rust. We assume that you are familiar +> with Rust, if not, we recommend you to check out +> [The Rust Book](https://doc.rust-lang.org/book/). + +- `declare_id!` - a macro for declaring the program’s onchain address - `#[program]` - an attribute macro used to denote the module containing the - program’s instruction logic + program’s instruction handlers. - `Accounts` - a trait applied to structs representing the list of accounts - required for an instruction + required for an instruction. - `#[account]` - an attribute macro used to define custom account types for the - program + program. Let's talk about each of them before putting all the pieces together. ### Declare your program ID -The `declare_id` macro is used to specify the onchain address of the program -(i.e. the `programId`). When you build an Anchor program for the first time, the -framework will generate a new keypair. This becomes the default keypair used to -deploy the program unless specified otherwise. The corresponding public key -should be used as the `programId` specified in the `declare_id!` macro. +The `declare_id` macro sets the onchain address of the Anchor program (i.e. the +`programId`). When you create a new Anchor program, the framework generates a +default keypair. This keypair is used to deploy the program unless specified +otherwise. The public key of this keypair is used as the `programId` in the +`declare_id!` macro. ```rust declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); @@ -67,16 +84,16 @@ declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); ### Define instruction logic The `#[program]` attribute macro defines the module containing all of your -program's instructions. This is where you implement the business logic for each -instruction in your program. +program's instruction handlers. This is where you implement the business logic +for each operation in your program. Each public function in the module with the `#[program]` attribute will be -treated as a separate instruction. +treated as a separate instruction handler. -Each instruction function requires a parameter of type `Context` and can -optionally include additional function parameters representing instruction data. -Anchor will automatically handle instruction data deserialization so that you -can work with instruction data as Rust types. +Each instruction handler (function) requires a parameter of type `Context` and +can include more parameters as needed. Anchor will automatically handle +instruction data deserialization so that you can work with instruction data as +Rust types. ```rust #[program] @@ -90,13 +107,20 @@ mod program_module_name { } ``` -#### Instruction `Context` +- The `#[program]` attribute macro is used to denote the module containing the + program’s instruction logic. +- `use super::*;` is used to bring all the items from the parent module into + scope, which are needed to define the instruction logic. +- Next, there is the instruction handler function. This function just writes + some data (`instruction_data` in this case) to an account. + +### Instruction `Context` The `Context` type exposes instruction metadata and accounts to your instruction logic. ```rust -pub struct Context<'a, 'b, 'c, 'info, T> { +pub struct Context<'a, 'b, 'c, 'info, T: Bumps> { /// Currently executing program id. pub program_id: &'a Pubkey, /// Deserialized accounts. @@ -107,43 +131,55 @@ pub struct Context<'a, 'b, 'c, 'info, T> { /// Bump seeds found during constraint validation. This is provided as a /// convenience so that handlers don't have to recalculate bump seeds or /// pass them in as arguments. - pub bumps: BTreeMap, + /// Type is the bumps struct generated by #[derive(Accounts)] + pub bumps: T::Bumps, } ``` `Context` is a generic type where `T` defines the list of accounts an -instruction requires. When you use `Context`, you specify the concrete type of -`T` as a struct that adopts the `Accounts` trait (e.g. -`Context`). Through this context argument the -instruction can then access: +instruction handler requires. When you use `Context`, you specify the concrete +type of `T` as a struct that adopts the `Accounts` trait. + +The first argument of every instruction handler must be `Context`. `Context` +takes a generic of your `Accounts` struct, eg, if `AddMovieReview` was the +struct holding the accounts, the context for the `add_movie_review()` function +would be `Context`. + + + Yes, the Accounts struct is typically named the same thing as the instruction handler, just in TitleCase. Eg, the struct with the accounts for add_movie_review() is called AddMovieReview! + + +Through this context argument the instruction can then access: - The accounts passed into the instruction (`ctx.accounts`) - The program ID (`ctx.program_id`) of the executing program - The remaining accounts (`ctx.remaining_accounts`). The `remaining_accounts` is - a vector that contains all accounts that were passed into the instruction but - are not declared in the `Accounts` struct. + a vector that contains all accounts that were passed into the instruction + handler but are not declared in the `Accounts` struct. - The bumps for any PDA accounts in the `Accounts` struct (`ctx.bumps`) +- The seeds for any PDA accounts in the `Accounts` struct (`ctx.seeds`) + +> The design of Contexts can be different across different programs to serve +> their purpose; and the name of the context could be anything (not limited to +> Context) to better reflect it's usage. This example is to help understand how +> contexts work in Anchor. ### Define instruction accounts -The `Accounts` trait defines a data structure of validated accounts. Structs -adopting this trait define the list of accounts required for a given -instruction. These accounts are then exposed through an instruction's `Context` -so that manual account iteration and deserialization is no longer necessary. +The `Accounts` trait: -You typically apply the `Accounts` trait through the `derive` macro (e.g. -`#[derive(Accounts)]`). This implements an `Accounts` deserializer on the given -struct and removes the need to deserialize each account manually. +- Defines a structure of validated accounts for an instruction handler +- Makes accounts accessible through an instruction handler's `Context` +- Is typically applied using `#[derive(Accounts)]` +- Implements an `Accounts` deserializer on the struct +- Performs constraint checks for secure program execution -Implementations of the `Accounts` trait are responsible for performing all -requisite constraint checks to ensure the accounts meet the conditions required -for the program to run securely. Constraints are provided for each field using -the `#account(..)` attribute (more on that shortly). +Example: -For example, `instruction_one` requires a `Context` argument of type -`InstructionAccounts`. The `#[derive(Accounts)]` macro is used to implement the -`InstructionAccounts` struct which includes three accounts: `account_name`, -`user`, and `system_program`. +- `instruction_one` requires a `Context` +- `InstructionAccounts` struct is implemented with `#[derive(Accounts)]` +- It includes accounts like `account_name`, `user`, and `system_program` +- Constraints are specified using the `#account(..)` attribute ```rust #[program] @@ -156,25 +192,30 @@ mod program_module_name { } #[derive(Accounts)] -pub struct InstructionAccounts { - #[account(init, payer = user, space = 8 + 8)] +pub struct InstructionAccounts<'info> { + #[account( + init, + payer = user, + space = DISCRIMINATOR + AccountStruct::INIT_SPACE + )] pub account_name: Account<'info, AccountStruct>, + #[account(mut)] pub user: Signer<'info>, - pub system_program: Program<'info, System>, + pub system_program: Program<'info, System>, } ``` When `instruction_one` is invoked, the program: -- Checks that the accounts passed into the instruction match the account types - specified in the `InstructionAccounts` struct +- Checks that the accounts passed into the instruction handler match the account + types specified in the `InstructionAccounts` struct - Checks the accounts against any additional constraints specified -If any accounts passed into `instruction_one` fail the account validation or -security checks specified in the `InstructionAccounts` struct, then the -instruction fails before even reaching the program logic. +> If any accounts passed into `instruction_one` fail the account validation or +> security checks specified in the `InstructionAccounts` struct, then the +> instruction fails before even reaching the program logic. ### Account validation @@ -183,7 +224,7 @@ You may have noticed in the previous example that one of the accounts in was of type `Program`. Anchor provides a number of account types that can be used to represent -accounts. Each type implements different account validation. We’ll go over a few +accounts. Each type implements different account validation. We'll go over a few of the common types you may encounter, but be sure to look through the [full list of account types](https://docs.rs/anchor-lang/latest/anchor_lang/accounts/index.html). @@ -264,7 +305,7 @@ account_info.key == expected_program account_info.executable == true ``` -### Add constraints with `#[account(..)]` +### Add constraints with Account The `#[account(..)]` attribute macro is used to apply constraints to accounts. We'll go over a few constraint examples in this and future lessons, but at some @@ -274,7 +315,11 @@ point be sure to look at the full Recall again the `account_name` field from the `InstructionAccounts` example. ```rust -#[account(init, payer = user, space = 8 + 8)] +#[account( + init, + payer = user, + space = DISCRIMINATOR + AccountStruct::INIT_SPACE +)] pub account_name: Account<'info, AccountStruct>, #[account(mut)] pub user: Signer<'info>, @@ -287,10 +332,14 @@ values: it (sets its account discriminator) - `payer` - specifies the payer for the account initialization to be the `user` account defined in the struct -- `space`- specifies that the space allocated for the account should be `8 + 8` - bytes. The first 8 bytes are for a discriminator that Anchor automatically - adds to identify the account type. The next 8 bytes allocate space for the - data stored on the account as defined in the `AccountStruct` type. +- `space`- the space allocated on the blockchain to store the account. + - `DISCRIMINATOR` is the first 8 bytes of an account, which Anchor uses to + save the type of the account. + - `AccountStruct::INIT_SPACE` is the total size of space required for all the + items in the `AccountStruct`. + - The very need of using this `space` constraint can be eliminated by using + `#[derive(InitSpace)]` macro. We'll see how to use that further in this + lesson. For `user` we use the `#[account(..)]` attribute to specify that the given account is mutable. The `user` account must be marked as mutable because @@ -305,7 +354,7 @@ pub user: Signer<'info>, Note that the `init` constraint placed on `account_name` automatically includes a `mut` constraint so that both `account_name` and `user` are mutable accounts. -### `#[account]` +### Account The `#[account]` attribute is applied to structs representing the data structure of a Solana account. It implements the following traits: @@ -329,8 +378,8 @@ from the first 8 bytes of the SHA256 hash of the account type's name. The first 8 bytes are reserved for the account discriminator when implementing account serialization traits (which is almost always in an Anchor program). -As a result, any calls to `AccountDeserialize`’s `try_deserialize` will check -this discriminator. If it doesn’t match, an invalid account was given, and the +As a result, any calls to `AccountDeserialize`'s `try_deserialize` will check +this discriminator. If it doesn't match, an invalid account was given, and the account deserialization will exit with an error. The `#[account]` attribute also implements the `Owner` trait for a struct using @@ -344,15 +393,21 @@ As an example, let's look at `AccountStruct` used by the `account_name` of ```rust #[derive(Accounts)] pub struct InstructionAccounts { - #[account(init, payer = user, space = 8 + 8)] + #[account(init, + payer = user, + space = DISCRIMINATOR + AnchorStruct::INIT_SPACE + )] pub account_name: Account<'info, AccountStruct>, ... } #[account] +#[derive(InitSpace)] pub struct AccountStruct { data: u64 } + +const DISCRIMINATOR: usize = 8; ``` The `#[account]` attribute ensures that it can be used as an account in @@ -360,10 +415,14 @@ The `#[account]` attribute ensures that it can be used as an account in When the `account_name` account is initialized: -- The first 8 bytes is set as the `AccountStruct` discriminator +- The first 8 bytes is set as the `AccountStruct` discriminator using the + `DISCRIMINATOR` constant. - The data field of the account will match `AccountStruct` - The account owner is set as the `programId` from `declare_id` +> It is considered a good practice to use the `#[derive(InitSpace)]` macro which +> makes the code more readable and maintainable. + ### Bring it all together When you combine all of these Anchor types you end up with a complete program. @@ -393,21 +452,37 @@ mod program_module_name { // Validate incoming accounts for instructions #[derive(Accounts)] pub struct InstructionAccounts<'info> { - #[account(init, payer = user, space = 8 + 8)] + #[account(init, + payer = user, + space = DISCRIMINATOR + AccountStruct::INIT_SPACE + )] pub account_name: Account<'info, AccountStruct>, #[account(mut)] pub user: Signer<'info>, pub system_program: Program<'info, System>, - } // Define custom program account type #[account] +#[derive(InitSpace)] pub struct AccountStruct { data: u64 } + +const DISCRIMINATOR: usize = 8; ``` +#### Key takeaways: + +- The whole program structure can be broadly divided into three parts: + 1. Account constraints: define the accounts required for the instructions, as + well as rules to apply to them - e.g., whether they need to sign the + transaction, if they should be created on demand, how addresses for PDAs, + etc. + 2. Instruction handlers: implement program logic, as functions inside + the`#[program]` module. + 3. Accounts: define the format used for data accounts. + You are now ready to build your own Solana program using the Anchor framework! ## Lab @@ -444,7 +519,7 @@ Open the file `lib.rs` and look at `declare_id!`: declare_id!("BouTUP7a3MZLtXqMAm1NrkJSKwAjmid8abqiNjUyBJSr"); ``` -Run `anchor keys sync` +and then run... ```shell anchor keys sync @@ -473,12 +548,11 @@ following: ```rust use anchor_lang::prelude::*; -declare_id!("your-private-key"); +declare_id!("onchain-program-address"); #[program] pub mod anchor_counter { use super::*; - } ``` @@ -489,18 +563,22 @@ type. The `Counter` struct defines one `count` field of type `u64`. This means that we can expect any new accounts initialized as a `Counter` type to have a matching data structure. The `#[account]` attribute also automatically sets the discriminator for a new account and sets the owner of the account as the -`programId` from the `declare_id!` macro. +`programId` from the `declare_id!` macro. We also use the `#[derive(InitSpace)]` +macro for convenient space allocation. ```rust #[account] +#[derive(InitSpace)] pub struct Counter { pub count: u64, } + +const DISCRIMINATOR: usize = 8; ``` #### 3. Implement `Context` type `Initialize` -Next, using the `#[derive(Accounts)]` macro, let’s implement the `Initialize` +Next, using the `#[derive(Accounts)]` macro, let's implement the `Initialize` type that lists and validates the accounts used by the `initialize` instruction. It'll need the following accounts: @@ -512,7 +590,10 @@ It'll need the following accounts: ```rust #[derive(Accounts)] pub struct Initialize<'info> { - #[account(init, payer = user, space = 8 + 8)] + #[account(init, + payer = user, + space = DISCRIMINATOR + Counter::INIT_SPACE + )] pub counter: Account<'info, Counter>, #[account(mut)] pub user: Signer<'info>, @@ -520,13 +601,13 @@ pub struct Initialize<'info> { } ``` -#### 4. Add the `initialize` instruction +#### 4. Add the `initialize` instruction handler -Now that we have our `Counter` account and `Initialize` type , let’s implement -the `initialize` instruction within `#[program]`. This instruction requires a -`Context` of type `Initialize` and takes no additional instruction data. In the -instruction logic, we are simply setting the `counter` account’s `count` field -to `0`. +Now that we have our `Counter` account and `Initialize` type , let's implement +the `initialize` instruction handler within `#[program]`. This instruction +handler requires a `Context` of type `Initialize` and takes no additional +instruction data. In the instruction logic, we are simply setting the `counter` +account's `count` field to `0`. ```rust pub fn initialize(ctx: Context) -> Result<()> { @@ -540,14 +621,14 @@ pub fn initialize(ctx: Context) -> Result<()> { #### 5. Implement `Context` type `Update` -Now, using the `#[derive(Accounts)]` macro again, let’s create the `Update` type -that lists the accounts that the `increment` instruction requires. It'll need -the following accounts: +Now, using the `#[derive(Accounts)]` macro again, let's create the `Update` type +that lists the accounts that the `increment` instruction handler requires. It'll +need the following accounts: - `counter` - an existing counter account to increment - `user` - payer for the transaction fee -Again, we’ll need to specify any constraints using the `#[account(..)]` +Again, we'll need to specify any constraints using the `#[account(..)]` attribute: ```rust @@ -559,14 +640,14 @@ pub struct Update<'info> { } ``` -#### 6. Add `increment` instruction +#### 6. Add `increment` instruction handler -Lastly, within `#[program]`, let’s implement an `increment` instruction to -increment the `count` once a `counter` account is initialized by the first -instruction. This instruction requires a `Context` of type `Update` (implemented -in the next step) and takes no additional instruction data. In the instruction -logic, we are simply incrementing an existing `counter` account’s `count` field -by `1`. +Lastly, within `#[program]`, let's implement an `increment` instruction handler +to increment the `count` once a `counter` account is initialized by the first +instruction handler. This instruction handler requires a `Context` of type +`Update` (implemented in the next step) and takes no additional instruction +data. In the instruction logic, we are simply incrementing an existing `counter` +account's `count` field by `1`. ```rust pub fn increment(ctx: Context) -> Result<()> { @@ -609,7 +690,10 @@ pub mod anchor_counter { #[derive(Accounts)] pub struct Initialize<'info> { - #[account(init, payer = user, space = 8 + 8)] + #[account(init, + payer = user, + space = DISCRIMINATOR + Counter::INIT_SPACE + )] pub counter: Account<'info, Counter>, #[account(mut)] pub user: Signer<'info>, @@ -624,9 +708,12 @@ pub struct Update<'info> { } #[account] +#[derive(InitSpace)] pub struct Counter { pub count: u64, } + +const DISCRIMINATOR: usize = 8; ``` Run `anchor build` to build the program. @@ -713,7 +800,7 @@ if you need some more time with it. ## Challenge -Now it’s your turn to build something independently. Because we're starting with +Now it's your turn to build something independently. Because we're starting with simple programs, yours will look almost identical to what we just created. It's useful to try and get to the point where you can write it from scratch without referencing prior code, so try not to copy and paste here. diff --git a/content/courses/onchain-development/intro-to-onchain.md b/content/courses/onchain-development/intro-to-onchain.md index 73e545d50..9d90919ba 100644 --- a/content/courses/onchain-development/intro-to-onchain.md +++ b/content/courses/onchain-development/intro-to-onchain.md @@ -1,44 +1,57 @@ --- -title: Intro to onchain development +title: Intro to Solana Onchain Development objectives: - Understand how Solana onchain programs work - - Describe the structure and operation of onchain programs + - Know about the structure and operation of Solana programs - Build a basic program description: - "How onchain programs (often called 'smart contracts') work on Solana." + "Discover how onchain programs ( often called 'smart contracts') work on + Solana and learn to build your own." --- ## Summary -- **Onchain programs** are programs that run on Solana. -- Programs have one or more **instruction handlers**. Instruction handlers are - functions that process the **instructions** in Solana transactions. -- Instruction handlers write their data to Solana **accounts** and then either - succeed or fail. -- Solana programs are most commonly written in **Rust**, typically with the - **Anchor** framework. -- Anchor programs have IDLs, which are small JSON files describing programs. - IDLs can be used to automatically create JS/TS libraries for a program. +- **Onchain programs** (sometimes called 'smart contracts') run directly on + Solana, just like programs on your computer. +- These programs consist of **instruction handlers** - functions that process + instructions from transactions. +- Programs interact with the blockchain by reading from and writing to Solana + **accounts**. +- Solana programs are most commonly written in **Rust**, often using the + **Anchor** framework for simplified development. +- Anchor generates **Interface Description Language (IDL)** files, which: + - Describe the program's structure and functionality + - Enable automatic creation of JavaScript/TypeScript client libraries +- Solana's architecture allows for parallel execution of non-overlapping + transactions, contributing to its high speed and efficiency. +- Rent is a concept in Solana where accounts must maintain a minimum balance to + stay alive on the blockchain. ## Overview -Each Solana cluster (`mainnet-beta`, `testnet`, `devnet`, `localnet`) is -effectively a single computer with a globally synchronized state. The programs -that run on Solana - the ones that create tokens, swap tokens, art marketplaces, -escrows, market makers, DePIN apps, auctions, retail payments platforms, etc - -are called **Solana apps**. +Solana operates on various clusters, each functioning as a unified, globally +synchronized system: -The most popular way to build onchain apps is using **Rust** language and the -**Anchor** framework. +- **mainnet-beta**: The main production network +- **testnet**: For testing new features +- **devnet**: For application development +- **localnet**: For local testing + +The program that run on Solana - the ones that create tokens, swap tokens, art +marketplaces, escrows, market makers, DePIN apps, auctions, retail payments +platforms, etc - are called **Solana apps**. -These frameworks implement common security checks automatically, and handle -common tasks like: +The most popular way to build onchain apps is using **Rust** language and the +**Anchor** framework. There is also another way of developing Solana programs +that is, by using the **native onchain program development**, however **Anchor** +makes things a lot simpler and safer. Some pros of using Anchor are: -- sending incoming instructions to the right instruction handlers -- deserializing data from incoming transactions -- checking the accounts provided with incoming instructions, for example, to - check that certain accounts are of a particular type, or are distinct from - other accounts. +- Security checks are implemented automatically +- Automatic routing of incoming instructions to the correct instruction handler +- Automatic serialization and deserialization of the data inside transactions +- Account validation, including: + - Type checking + - Ensuring account uniqueness Regardless of the language and framework you choose, Solana works the same. Let's refresh how programs work on Solana. @@ -53,6 +66,8 @@ created during `anchor init`, and the private key is saved in the `target/deploy` directory of your project. A program's public key is sometimes called a 'program ID' or 'program address'. +Which can be seen in the `programs//src/lib.rs` and +`Anchor.toml` files. ### Programs have instruction handlers @@ -71,48 +86,75 @@ functionality by instruction handlers. ### Instruction handlers write their state to Solana accounts -If you have previously done web development, you can think of instruction -handlers like an HTTP route handler, and incoming instructions like HTTP -requests. +If you have done web development before, think of instruction handlers like HTTP +route handlers, and incoming instructions like HTTP requests. + +However, unlike HTTP route handlers, Solana instruction handlers don't return +data. Instead, they write their data to accounts on Solana. -But unlike HTTP route handlers, Solana instruction handlers don't return data. -Instead, the instruction handlers write their data to accounts on Solana. +Programs on Solana can transfer tokens to user wallet addresses (for SOL) or +user token accounts (for other tokens). -Programs on Solana can transfer tokens, which end up in user wallet addresses -(for SOL) for the user's token accounts (for other tokens). +More importantly, programs can create additional addresses to store data as +needed. -But more importantly, programs on Solana can create additional addresses as -needed, to store items of data. +This is how Solana programs store their state. -### Programs store data in Program Derived Addresses (PDAs), a key-value store +### Program Derived Addresses (PDAs): Solana's Key-Value Store Data for Solana programs are stored in **program-derived addresses (PDAs)**. -Solana's PDAs can be thought of as a **key/value store**: - -- The 'key' is the address of the PDA, which is determined by `seeds` chosen by - you, the programmer. - - Want an account to store USD to AUD exchange rate? Your program can use the - seeds `USD` and `AUD` to make a Program Derived Address - - Want to store information about the relationship of two users? You can use - **both those users' wallet addresses** as seeds to make a PDA to store that - information. - - Want an account to store Steve's review of Titanic? Your program can use - Steve's **wallet address** and the string `titanic` (or maybe the IMDB ID if - you prefer) to make a Program Derived Address. - - Want some global information for your entire program? You can use a string - like `'config'`. Your program's PDAs are unique, so they won't conflict with - other programs. -- The value is the data inside the account at the given address. - - The data inside the PDA is determined by you, the programmer. - -Key value stores allow your onchain program, and client software, to -consistently determine the address for a data item because the same seeds will -always return the same address. +Solana's PDAs can be thought of as a **key/value store**. A PDA can be designed +to store any form of data as required by the program. + +#### Key Concepts + +1. **Structure** + + - **Key**: The PDA's address + - **Value**: Data stored in the account at that address + +2. **Address Generation** + + - **Seed**: chosen by the programmer + - **Bump**: An additional value to ensure unique PDA creation + - **Deterministic**: Same combination of seed and bump always produce the + same address. This helps the program and the client to accurately determine + the address of the data. + +3. **Data Storage** + + - Programmers define the structure of data stored in PDAs + - Can store any type of program-specific information + +4. **Some properties**: + - PDAs are off the Ed25519 elliptic curve. While the data type web3.js uses + is a `PublicKey`, PDA addresses are not public keys and do not have a + matching private key. + - A program's PDAs are unique so, they won't conflict with other programs. + - PDAs can also act as signer in an instruction. We'll learn more about this + in further lessons. + +#### Examples of PDA Usage + +| Purpose | Seeds | Resulting PDA | +| ----------------- | -------------------------- | ---------------------------- | +| Exchange Rate | `"USD"`, `"AUD"` | Stores USD to AUD rate | +| User Relationship | User1 wallet, User2 wallet | Stores relationship data | +| Product Review | User wallet, Product ID | Stores user's review | +| Global Config | `"config"` | Stores program-wide settings | + +#### Benefits + +1. **Uniqueness**: PDAs are specific to your program, avoiding conflicts +2. **Determinism**: Consistent address generation across clients and on-chain + programs +3. **Flexibility**: Can store various types of data structures +4. **Efficiency**: Quick lookup and access to program-specific data ### Solana instructions need to specify all the accounts they will use As you may already know, Solana is fast because it can process transactions that -don't overlap at the same time. I.e., just like in the real world, Alice sending +don't overlap at the same time i.e., just like in the real world, Alice sending to Bob doesn't stop Chris from sending something to Diana. Your front-end apps need to specify the addresses of all the accounts they will use. diff --git a/content/courses/onchain-development/local-setup.md b/content/courses/onchain-development/local-setup.md index 1f3bfcadf..2d5fb49e7 100644 --- a/content/courses/onchain-development/local-setup.md +++ b/content/courses/onchain-development/local-setup.md @@ -2,77 +2,137 @@ title: Local Program Development objectives: - Set up a local environment for Solana program development, with Solana CLI - tools, Rust and Anchor. - - Ensure Anchor works out of the box with no errors or warnings + tools, Rust, and Anchor. + - Ensure Anchor works out of the box with no errors or warnings. description: "Setup a local development environment for building onchain programs." --- ## Summary -- To develop onchain programs on your machine, you need **Solana CLI**, **Rust** +- To develop onchain programs locally, you need the **Solana CLI**, **Rust**, and (optional, but recommended) **Anchor**. -- You can use `anchor init` to create a new blank Anchor project -- `anchor test` runs your tests, and also builds your code. +- You can use `anchor init` to create a new blank Anchor project. +- `anchor test` runs your tests and also builds your code. ## Lesson -There's no lesson here! Let's install Solana CLI tools, the Rust SDK, and -Anchor, and create a test program to ensure that our setup works. +This lesson is a guide to installing the tools required for developing onchain +programs. Let's install Solana CLI tools, the Rust SDK, and Anchor, and create a +test program to ensure that our setup works. ## Lab -#### Extra steps for Windows users +### Extra steps for Windows users -Firstly install -[Windows Terminal](https://apps.microsoft.com/detail/9N0DX20HK701) from the -Microsoft store. +> macOS and Linux users can skip this section. If you're on Windows, you can +> follow along with these extra steps. -Then +Firstly, make sure you have Windows Terminal installed, otherwise you can +install Windows Terminal from the +[Microsoft store](https://apps.microsoft.com/detail/9N0DX20HK701). + +Then, [install Windows Subsystem for Linux (WSL)](https://learn.microsoft.com/en-us/windows/wsl/install). -WSL provides a Linux environment that launches instantly whenever you need it -and doesn't slow your computer down. +WSL provides a Linux environment that launches instantly when needed without +slowing down your computer. -Start Windows Terminal, launch an 'Ubuntu' session inside the terminal, and -proceed with the rest of these steps. +Open Windows Terminal, start an 'Ubuntu' session and proceed with the rest of +these steps. -#### Download Rust +### Download Rust -First, download Rust by +First, install Rust by [following the instructions](https://www.rust-lang.org/tools/install): -``` +```bash curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh ``` -#### Download the Solana CLI tools +### Download the Solana CLI tools -Next -[download the Solana CLI tools](https://docs.solana.com/cli/install-solana-cli-tools). +Next, +[download the Solana CLI tools](/docs/intro/installation.md#install-the-solana-cli): -``` +```bash sh -c "$(curl -sSfL https://release.anza.xyz/stable/install)" ``` -Afterwards, `solana -V` should show `solana-cli 1.18.x` (any number for `x` is -fine). +After installation, `solana -V` should display `solana-cli 1.18.x` (where `x` +can be any number). -#### Download Anchor +### Running the Solana Test Validator -Finally [download Anchor](https://www.anchor-lang.com/docs/installation): +The Solana Test Validator is a local emulator for the Solana blockchain. It +provides developers with a private and controlled environment to build and test +Solana programs without needing to connect to a public testnet or mainnet. +To start the Solana Test Validator, run the following command: + +```bash +solana-test-validator ``` + +When running `solana-test-validator`, you should see output indicating that the +validator is working correctly. Below is an example of what the output should +look like: + +```bash +$ solana-test-validator +--faucet-sol argument ignored, ledger already exists +Ledger location: test-ledger +Log: test-ledger/validator.log +⠴ Initializing... +Waiting for fees to stabilize 1... +Identity: J8yKZJa5NtcmCQqmBRC6Fe8X6AECo8Vc3d7L3dF9JPiM +Genesis Hash: FTPnCMDzTEthZxE6DvHbsWWv83F2hFe1GFvpVFBMUoys +Version: 1.18.22 +Shred Version: 49491 +Gossip Address: 127.0.0.1:1024 +TPU Address: 127.0.0.1:1027 +JSON RPC URL: http://127.0.0.1:8899 +WebSocket PubSub URL: ws://127.0.0.1:8900 +⠄ 00:00:25 | Processed Slot: 114 | Confirmed Slot: 114 | Finalized Slot: 82 | Full Snapshot Slot: - | Incremental Snapshot Slot: - | Transactions: 111 | ◎499.999445000 +``` + +If you see this output, it means the Solana test validator is running correctly. +You should cancel the process by pressing CTRL + C, as you'll need to run the +anchor test command next. + +For more detailed information, you can refer to the +[Solana Test Validator guide](https://solana.com/developers/guides/getstarted/solana-test-validator). + +### Download Anchor + +Finally, [download Anchor](https://www.anchor-lang.com/docs/installation): + +```bash cargo install --git https://github.com/coral-xyz/anchor avm --locked --force +``` + +you may need to install additional dependencies in Linux (or WSL): + +```bash +sudo apt-get update && \ +sudo apt-get upgrade && \ +sudo apt-get install -y pkg-config build-essential libudev-dev libssl-dev +``` + +proceed... + +```bash avm install latest avm use latest ``` -Afterwards, `anchor -V` should show `anchor-cli 0.30.0`. +After installation, `anchor -V` should display `anchor-cli 0.30.1`. For more +detailed information on Anchor, refer to +[The Anchor Book](https://book.anchor-lang.com). -#### Check your Anchor installation +### Verify your Anchor Installation -Create a temporary project, with the default contents, using Anchor and make -sure it compiles and runs our tests: +Create a temporary project with the default contents using Anchor and ensure it +compiles and runs: ```bash anchor init temp-project @@ -81,37 +141,71 @@ anchor test ``` **The `anchor test` command should complete with no errors or warnings**. -However you may encounter issues, and we'll fix them below: -##### `package `solana-program v1.18.12` cannot be built because it requires rustc 1.75.0 or newer` error +**However you may encounter issues, and we'll fix them below:** + +#### `package `solana-program + +v1.18.12` cannot be built because it requires rustc 1.75.0 or newer` error +This error is due to incompatible versions of `solana-program` and `solana-cli`. Run `cargo add solana-program@"=1.18.x"`, where `x` matches your version of `solana-cli`. Then re-run `anchor test`. -##### `Error: Unable to read keypair file` +#### Error: `Unable to read keypair file` Add a keypair to `.config/solana/id.json`. You can either copy a keypair from an `.env` file (just the array of numbers) into a file or use the command `solana-keygen new --no-bip39-passphrase` to create a new keypair file. Then re-run `anchor test`. -##### `unused variable: 'ctx'` warning +#### error: no such command: `build-sbf` -This simply means the `initialize` instruction handler isn't doing anything yet. -You can open `programs/favorites/src/lib.rs` and change `ctx` to `_ctx` or just -go onto the next step. +If you see this message, this error typically occurs because the relevant +binaries are not in your shell's PATH variable. -##### `No license field in package.json` warning +Run this command to add this folder to your shell, and also add this to your +`~/.zshrc` or `~/.bashrc` file to make the change permanent. -Open package.json, add `"license": "MIT"` or `"license": "UNLICENSED"` depending -on preferences +```bash +export PATH=~"/.local/share/solana/install/active_release/bin:$PATH" +``` -#### All done? +#### Unable to get latest blockhash. Test validator does not look started. + +There's multiple versions of the 'tar' (tape archiver) command Solana used for +archiving. macOS comes with BSD tar, but Solana CLI wants the GNU version +installed. + +- Install [Homebrew](https://brew.sh/) and use it to install GNU tar: + + ```bash + # Install Homebrew; you can skip this step if you already have Homebrew installed + /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" + # Install GNU tar + brew install gnu-tar + ``` + +- Add this to your ~/.zshrc or ~/.bashrc file to make the change permanent. + + ```bash + export PATH=/opt/homebrew/opt/gnu-tar/libexec/gnubin:$PATH + ``` + +#### Error: `Your configured rpc port: 8899 is already in use` + +If you are running `solana-test-validator`, you may encounter the error +`Error: Your configured rpc port: 8899 is already in use` when running +`anchor test`. To resolve this, stop the `solana-test-validator` before running +`anchor test`. + +### All done? Ensure `anchor test` completes successfully - with no warnings and no errors - before continuing. + Push your code to GitHub and [tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=aa0b56d6-02a9-4b36-95c0-a817e2c5b19d)! diff --git a/content/courses/program-optimization/lookup-tables.md b/content/courses/program-optimization/lookup-tables.md index fac7b6682..6576eb3d5 100644 --- a/content/courses/program-optimization/lookup-tables.md +++ b/content/courses/program-optimization/lookup-tables.md @@ -10,36 +10,33 @@ description: "Use large amounts of accounts by using lookup tables." ## Summary -- **Versioned Transactions** refers to a way to support both legacy versions and - newer versions of transaction formats. The original transaction format is - "legacy" and new transaction versions start at version 0. Versioned - transactions were implemented to support the use of Address Lookup Tables - (also called lookup tables or LUTs). -- **Address Lookup Tables** are accounts used to store addresses of other - accounts, which can then be referenced in versioned transactions using a 1 - byte index instead of the full 32 bytes per address. This enables the creation - of more complex transactions than what was possible prior to the introduction - of LUTs. +- **Versioned Transactions** in Solana allows support for both legacy and newer + transaction formats. The original format is referred to as "legacy," while new + formats begin at version 0. Versioned transactions were introduced to + accommodate the use of Address Lookup Tables (LUTs). +- **Address Lookup Tables** are special accounts that store the addresses of + other accounts. In versioned transactions, these addresses can be referenced + by a 1-byte index instead of the full 32-byte address. This optimization + enables more complex transactions than previously possible. ## Lesson By design, Solana transactions are limited to 1232 bytes. Transactions exceeding -this size will fail. While this enables a number of network optimizations, it -can also limit the types of atomic operations that can be performed on the -network. +this limit will fail, which restricts the size of atomic operations that can be +performed. While this limit allows for optimizations at the network level, it +imposes restrictions on transaction complexity. -To help get around the transaction size limitation, Solana released a new -transaction format that allows support for multiple versions of transaction -formats. At the time of writing, Solana supports two transaction versions: +To address transaction size limitations, Solana introduced a new transaction +format supporting multiple versions. Currently, two transaction versions are +supported: -1. `legacy` - the original transaction format -2. `0` - the newest transaction format that includes support for Address Lookup - Tables +1. `legacy` - The original transaction format +2. `0` - The latest format, which supports Address Lookup Tables. -Versioned transactions don't require any modifications to existing Solana -programs, but any client-side code created prior to the release of versioned -transactions should be updated. In this lesson, we'll cover the basics of -versioned transactions and how to use them, including: +Existing Solana programs do not require changes to support versioned +transactions. However, client-side code created prior to their introduction +should be updated. In this lesson, we'll cover the basics of versioned +transactions and how to use them, including: - Creating versioned transactions - Creating and managing lookup tables @@ -47,125 +44,114 @@ versioned transactions and how to use them, including: ### Versioned Transactions -One of the items taking up the most space in Solana transactions is the -inclusion of full account addresses. At 32 bytes each, 39 accounts will render a -transaction too large. That's not even accounting for instruction data. In -practice, most transactions will be too large with around 20 accounts. +In Solana transactions, one of the largest space consumers is account addresses, +which are 32 bytes each. For transactions with 39 accounts, the size limit is +exceeded even before accounting for instruction data. Typically, transactions +become too large with around 20 accounts. -Solana released versioned transactions to support multiple transaction formats. -Alongside the release of versioned transactions, Solana released version 0 of -transactions to support Address Lookup Tables. Lookup tables are separate -accounts that store account addresses and then allow them to be referenced in a -transaction using a 1 byte index. This significantly decreases the size of a -transaction since each included account now only needs to use 1 byte instead of -32 bytes. +Versioned transactions address this issue by introducing Address Lookup Tables, +which allow addresses to be stored separately and referenced via a 1-byte index. +This greatly reduces transaction size by minimizing the space needed for account +addresses. -Even if you don't need to use lookup tables, you'll need to know how to support -versioned transactions in your client-side code. Fortunately, everything you -need to work with versioned transactions and lookup tables is included in the -`@solana/web3.js` library. +Even if Address Lookup Tables are not required for your use case, understanding +versioned transactions is crucial for maintaining compatibility with the latest +Solana features. The `@solana/web3.js` library provides all necessary tools to +work with versioned transactions and lookup tables. #### Create versioned transactions -To create a versioned transaction, you simply create a `TransactionMessage` with +To create a versioned transaction, you first create a `TransactionMessage` with the following parameters: - `payerKey` - the public key of the account that will pay for the transaction - `recentBlockhash` - a recent blockhash from the network -- `instructions` - the instructions to include in the transaction +- `instructions` - the instructions to be executed in the transaction. -You then transform this message object into a version `0` transaction using the -`compileToV0Message()` method. +Once the message object is created, you can convert it into a version `0` +transaction using the `compileToV0Message()` method. ```typescript import * as web3 from "@solana/web3.js"; // Example transfer instruction -const transferInstruction = [ - web3.SystemProgram.transfer({ - fromPubkey: payer.publicKey, // Public key of account that will send the funds - toPubkey: toAccount.publicKey, // Public key of the account that will receive the funds - lamports: 1 * LAMPORTS_PER_SOL, // Amount of lamports to be transferred - }), -]; +const transferInstruction = SystemProgram.transfer({ + fromPubkey: payer.publicKey, // Public key of the sender account + toPubkey: toAccount.publicKey, // Public key of the receiver account + lamports: 1 * LAMPORTS_PER_SOL, // Amount to transfer in lamports +}); // Get the latest blockhash -let { blockhash } = await connection.getLatestBlockhash(); +const { blockhash } = await connection.getLatestBlockhash(); // Create the transaction message -const message = new web3.TransactionMessage({ - payerKey: payer.publicKey, // Public key of the account that will pay for the transaction - recentBlockhash: blockhash, // Latest blockhash - instructions: transferInstruction, // Instructions included in transaction +const message = new TransactionMessage({ + payerKey: payer.publicKey, // Public key of the payer account + recentBlockhash: blockhash, // Most recent blockhash + instructions: [transferInstruction], // Transaction instructions }).compileToV0Message(); ``` -Finally, you pass the compiled message into the `VersionedTransaction` -constructor to create a new versioned transaction. Your code can then sign and -send the transaction to the network, similar to a legacy transaction. +Next, pass the compiled message into the `VersionedTransaction` constructor to +create a versioned transaction. The transaction is then signed and sent to the +network, similar to how legacy transactions are handled. ```typescript -// Create the versioned transaction using the message -const transaction = new web3.VersionedTransaction(message); +// Create the versioned transaction from the compiled message +const transaction = new VersionedTransaction(message); -// Sign the transaction +// Sign the transaction with the payer's keypair transaction.sign([payer]); // Send the signed transaction to the network -const transactionSignature = await connection.sendTransaction(transaction); +const signature = await connection.sendTransaction(transaction); ``` ### Address Lookup Table -Address Lookup Tables (also called lookup tables or LUTs) are accounts that -store a lookup table of other account addresses. These LUT accounts are owned by -the Address Lookup Table Program and are used to increase the number of accounts -that can be included in a single transaction. +Address Lookup Tables (LUTs) are accounts that store references to other account +addresses. These LUT accounts, owned by the Address Lookup Table Program, +increase the number of accounts that can be included in a transaction. -Versioned transactions can include the address of an LUT account and then -reference additional accounts with a 1-byte index instead of including the full -address of those accounts. This significantly reduces the amount of space used -for referencing accounts in a transaction. +In versioned transactions, LUT addresses are included, and additional accounts +are referenced with a 1-byte index instead of the full 32-byte address, reducing +space used by the transaction. -To simplify the process of working with LUTs, the `@solana/web3.js` library -includes an `AddressLookupTableProgram` class which provides a set of methods to -create instructions for managing LUTs. These methods include: +The `@solana/web3.js` library offers an `AddressLookupTableProgram` class, +providing methods to manage LUTs: -- `createLookupTable` - creates a new LUT account -- `freezeLookupTable` - makes an existing LUT immutable -- `extendLookupTable` - adds addresses to an existing LUT -- `deactivateLookupTable` - puts an LUT in a “deactivation” period before it can - be closed -- `closeLookupTable` - permanently closes an LUT account +- `createLookupTable` - creates a new LUT account. +- `freezeLookupTable` - makes a LUT immutable. +- `extendLookupTable` - adds addresses to an existing LUT. +- `deactivateLookupTable` - begins the deactivation period for an LUT. +- `closeLookupTable` - permanently closes an LUT account. #### Create a lookup table -You use the `createLookupTable` method to construct the instruction that creates -a lookup table. The function requires the following parameters: +You can use the `createLookupTable` method to construct the instruction for +creating a lookup table. This requires the following parameters: -- `authority` - the account that will have permission to modify the lookup table -- `payer` - the account that will pay for the account creation -- `recentSlot` - a recent slot to derive the lookup table's address +- `authority` - the account authorized to modify the lookup table. +- `payer` - the account responsible for paying the account creation fees. +- `recentSlot` - a recent slot used to derive the lookup table's address. -The function returns both the instruction to create the lookup table and the -address of the lookup table. +The function returns both the instruction for creating the LUT and its address. ```typescript // Get the current slot const slot = await connection.getSlot(); -// Create an instruction for creating a lookup table -// and retrieve the address of the new lookup table +// Create the lookup table creation instruction and retrieve its address const [lookupTableInst, lookupTableAddress] = - web3.AddressLookupTableProgram.createLookupTable({ - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - payer: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - recentSlot: slot - 1, // The recent slot to derive lookup table's address + AddressLookupTableProgram.createLookupTable({ + authority: user.publicKey, // Account authorized to modify the LUT + payer: user.publicKey, // Account paying for transaction fees + recentSlot: slot - 1, // Use a recent slot to derive the LUT address }); ``` -Under the hood, the lookup table address is simply a PDA derived using the -`authority` and `recentSlot` as seeds. +Under the hood, the lookup table address is a Program Derived Address (PDA) +generated using the `authority` and `recentSlot` as seeds. ```typescript const [lookupTableAddress, bumpSeed] = PublicKey.findProgramAddressSync( @@ -174,10 +160,12 @@ const [lookupTableAddress, bumpSeed] = PublicKey.findProgramAddressSync( ); ``` -Note that using the most recent slot sometimes results in an error after sending -the transaction. To avoid this, you can use a slot that is one slot prior the -most recent one (e.g. `recentSlot: slot - 1`). However, if you still encounter -an error when sending the transaction, you can try resending the transaction. + +Using the most recent slot sometimes results in errors when submitting the +transaction. To avoid this, it’s recommended to use a slot that is one slot +before the most recent one (`recentSlot: currentSlot - 1`). If you still +encounter errors when sending the transaction, try resubmitting it. + ``` "Program AddressLookupTab1e1111111111111111111111111 invoke [1]", @@ -187,57 +175,57 @@ an error when sending the transaction, you can try resending the transaction. #### Extend a lookup table -You use the `extendLookupTable` method to create an instruction that adds -addresses to an existing lookup table. It takes the following parameters: +The `extendLookupTable` method creates an instruction to add addresses to an +existing lookup table. It requires the following parameters: -- `payer` - the account that will pay for the transaction fees and any increased - rent -- `authority` - the account that has permission to change the lookup table -- `lookupTable` - the address of the lookup table to extend -- `addresses` - the addresses to add to the lookup table +- `payer` - the account responsible for paying transaction fees and any + additional rent. +- `authority` - the account authorized to modify the lookup table. +- `lookupTable` - the address of the lookup table to be extended. +- `addresses` - the list of addresses to add to the lookup table. The function returns an instruction to extend the lookup table. ```typescript const addresses = [ - new web3.PublicKey("31Jy3nFeb5hKVdB4GS4Y7MhU7zhNMFxwF7RGVhPc1TzR"), - new web3.PublicKey("HKSeapcvwJ7ri6mf3HwBtspLFTDKqaJrMsozdfXfg5y2"), - // add more addresses + new PublicKey("31Jy3nFeb5hKVdB4GS4Y7MhU7zhNMFxwF7RGVhPc1TzR"), + new PublicKey("HKSeapcvwJ7ri6mf3HwBtspLFTDKqaJrMsozdfXfg5y2"), + // Add more addresses here ]; -// Create an instruction to extend a lookup table with the provided addresses -const extendInstruction = web3.AddressLookupTableProgram.extendLookupTable({ - payer: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - lookupTable: lookupTableAddress, // The address of the lookup table to extend - addresses: addresses, // The addresses to add to the lookup table +// Create the instruction to extend the lookup table with the provided addresses +const extendInstruction = AddressLookupTableProgram.extendLookupTable({ + payer: user.publicKey, // Account paying for transaction fees + authority: user.publicKey, // Account authorized to modify the lookup table + lookupTable: lookupTableAddress, // Address of the lookup table to extend + addresses: addresses, // Addresses to add to the lookup table }); ``` Note that when extending a lookup table, the number of addresses that can be -added in one instruction is limited by the transaction size limit, which is 1232 -bytes. This means you can add 30 addresses to a lookup table at a time. If you -need to add more than that, you'll need to send multiple transactions. Each -lookup table can store a maximum of 256 addresses. +added in a single instruction is limited by the transaction size limit of 1232 +bytes. You can add approximately 30 addresses in one transaction. If you need to +add more than that, multiple transactions are required. Each lookup table can +store up to 256 addresses. #### Send Transaction -After creating the instructions, you can add them to a transaction and sent it -to the network. +After creating the instructions, you can add them to a transaction and send it +to the network: ```typescript // Get the latest blockhash -let { blockhash } = await connection.getLatestBlockhash(); +const { blockhash } = await connection.getLatestBlockhash(); // Create the transaction message -const message = new web3.TransactionMessage({ - payerKey: payer.publicKey, // Public key of the account that will pay for the transaction +const message = new TransactionMessage({ + payerKey: payer.publicKey, // Account paying for the transaction recentBlockhash: blockhash, // Latest blockhash - instructions: [lookupTableInst, extendInstruction], // Instructions included in transaction + instructions: [lookupTableInst, extendInstruction], // Instructions to be included in the transaction }).compileToV0Message(); -// Create the versioned transaction using the message -const transaction = new web3.VersionedTransaction(message); +// Create the versioned transaction from the message +const transaction = new VersionedTransaction(message); // Sign the transaction transaction.sign([payer]); @@ -246,65 +234,62 @@ transaction.sign([payer]); const transactionSignature = await connection.sendTransaction(transaction); ``` -Note that when you first create or extend a lookup table, it needs to "warm up" -for one slot before the LUT or new addresses can be used in transactions. In -other words, you can only use lookup tables and access addresses that were added -prior to the current slot. +Note that after you create or extend a lookup table, it must "warm up" for one +slot before the lookup table or newly added addresses can be used in +transactions. You can only access lookup tables and addresses added in slots +prior to the current one. + +If you encounter the following error, it may indicate that you're trying to +access a lookup table or an address before the warm-up period has completed: ```typescript SendTransactionError: failed to send transaction: invalid transaction: Transaction address table lookup uses an invalid index ``` -If you encounter the error above or are unable to access addresses in a lookup -table immediately after extending it, it's likely because you're attempting to -access the lookup table or a specific address prior to the end of the warm up -period. To avoid this issue, add a delay after extending the lookup table before -sending a transaction that references the table. +To avoid this issue, ensure you add a delay after extending the lookup table +before attempting to reference the table in a transaction. #### Deactivate a lookup table -When a lookup table is no longer needed, you can deactivate and close it to -reclaim its rent balance. Address lookup tables can be deactivated at any time, -but they can continue to be used by transactions until a specified -"deactivation" slot is no longer "recent". This "cool-down" period ensures that -in-flight transactions can't be censored by LUTs being closed and recreated in -the same slot. The deactivation period is approximately 513 slots. +When a lookup table (LUT) is no longer needed, you can deactivate it to reclaim +its rent balance. Deactivating a LUT puts it into a "cool-down" period +(approximately 513 slots) during which it can still be used by transactions. +This prevents transactions from being censored by deactivating and recreating +LUTs within the same slot. -To deactivate an LUT, use the `deactivateLookupTable` method and pass in the -following parameters: +To deactivate a LUT, use the `deactivateLookupTable` method with the following +parameters: -- `lookupTable` - the address of the LUT to be deactivated -- `authority` - the account with permission to deactivate the LUT +- `lookupTable` - the address of the lookup table to be deactivated. +- `authority` - the account with the authority to deactivate the LUT. ```typescript -const deactivateInstruction = - web3.AddressLookupTableProgram.deactivateLookupTable({ - lookupTable: lookupTableAddress, // The address of the lookup table to deactivate - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - }); +const deactivateInstruction = AddressLookupTableProgram.deactivateLookupTable({ + lookupTable: lookupTableAddress, // Address of the lookup table to deactivate + authority: user.publicKey, // Authority to modify the lookup table +}); ``` #### Close a lookup table -To close a lookup table after its deactivation period, use the -`closeLookupTable` method. This method creates an instruction to close a -deactivated lookup table and reclaim its rent balance. It takes the following -parameters: +Once a LUT has been deactivated and the cool-down period has passed, you can +close the lookup table to reclaim its rent balance. Use the `closeLookupTable` +method, which requires the following parameters: -- `lookupTable` - the address of the LUT to be closed -- `authority` - the account with permission to close the LUT -- `recipient` - the account that will receive the reclaimed rent balance +- `lookupTable` - the address of the LUT to be closed. +- `authority` - the account with the authority to close the LUT. +- `recipient` - the account that will receive the reclaimed rent balance. ```typescript -const closeInstruction = web3.AddressLookupTableProgram.closeLookupTable({ - lookupTable: lookupTableAddress, // The address of the lookup table to close - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - recipient: user.publicKey, // The recipient of closed account lamports +const closeInstruction = AddressLookupTableProgram.closeLookupTable({ + lookupTable: lookupTableAddress, // Address of the lookup table to close + authority: user.publicKey, // Authority to close the LUT + recipient: user.publicKey, // Recipient of the reclaimed rent balance }); ``` -Attempting to close a lookup table before it's been fully deactivated will -result in an error. +Attempting to close a LUT before it has been fully deactivated will result in +the following error: ``` "Program AddressLookupTab1e1111111111111111111111111 invoke [1]", @@ -317,21 +302,21 @@ result in an error. In addition to standard CRUD operations, you can "freeze" a lookup table. This makes it immutable so that it can no longer be extended, deactivated, or closed. -You freeze a lookup table with the `freezeLookupTable` method. It takes the +The `freezeLookupTable` method is used for this operation and takes the following parameters: -- `lookupTable` - the address of the LUT to be frozen -- `authority` - the account with permission to freeze the LUT +- `lookupTable` - the address of the LUT to freeze. +- `authority` - the account with the authority to freeze the LUT. ```typescript -const freezeInstruction = web3.AddressLookupTableProgram.freezeLookupTable({ - lookupTable: lookupTableAddress, // The address of the lookup table to freeze - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) +const freezeInstruction = AddressLookupTableProgram.freezeLookupTable({ + lookupTable: lookupTableAddress, // Address of the lookup table to freeze + authority: user.publicKey, // Authority to freeze the LUT }); ``` -Once an LUT is frozen, any further attempts to modify it will result in an -error. +Once a LUT is frozen, any attempt to modify it will result in an error like the +following: ``` "Program AddressLookupTab1e1111111111111111111111111 invoke [1]", @@ -341,28 +326,29 @@ error. #### Using lookup tables in versioned transactions -To use a lookup table in a versioned transaction, you need to retrieve the -lookup table account using its address. +To utilize a lookup table in a versioned transaction, first retrieve the lookup +table account using its address: ```typescript +// Fetch the lookup table account from the blockchain using its address const lookupTableAccount = ( - await connection.getAddressLookupTable(lookupTableAddress) + await connection.getAddressLookupTable(new PublicKey(lookupTableAddress)) ).value; ``` -You can then create a list of instructions to include in a transaction as usual. -When creating the `TransactionMessage`, you can include any lookup table -accounts by passing them as an array to the `compileToV0Message()` method. You -can also provide multiple lookup table accounts. +Once you have the lookup table account, you can create the list of instructions +for the transaction. When constructing the `TransactionMessage`, pass the lookup +table accounts as an array to the `compileToV0Message()` method. You can include +multiple lookup table accounts if needed. ```typescript const message = new web3.TransactionMessage({ - payerKey: payer.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - recentBlockhash: blockhash, // The blockhash of the most recent block - instructions: instructions, // The instructions to include in the transaction + payerKey: payer.publicKey, // Public key of the account paying for the transaction + recentBlockhash: blockhash, // Blockhash of the most recent block + instructions: instructions, // Instructions to be included in the transaction }).compileToV0Message([lookupTableAccount]); // Include lookup table accounts -// Create the versioned transaction using the message +// Create a versioned transaction using the compiled message const transaction = new web3.VersionedTransaction(message); // Sign the transaction @@ -376,261 +362,324 @@ const transactionSignature = await connection.sendTransaction(transaction); Let's go ahead and practice using lookup tables! -this lab will guide you through the steps of creating, extending, and then using -a lookup table in a versioned transaction. - -#### 1. Get the starter code - -To begin, download the starter code from the starter branch of this -[repository](https://github.com/Unboxed-Software/solana-versioned-transactions/tree/starter). -Once you have the starter code, run `npm install` in the terminal to install the -required dependencies. - -The starter code includes an example of creating a legacy transaction that -intends to atomically transfer SOL to 22 recipients. The transaction contains 22 -instructions where each instruction transfers SOL from the signer to a different -recipient. - -The purpose of the starter code is to illustrate the limitation on the number of -addresses that can be included in a legacy transaction. The transaction built in -the starter code is expected to fail when sent. - -The following starter code can be found in the `index.ts` file. - -```typescript -import { initializeKeypair } from "./initializeKeypair"; -import * as web3 from "@solana/web3.js"; +This lab will guide you through creating, extending, and using a lookup table in +a versioned transaction. + +#### 1. Create the `try-large-transaction.ts` file + +To begin, create a new file named `try-large-transaction.ts` in your project +directory. This file will contain the code to illustrate a scenario where a +legacy transaction is created to transfer SOL to 22 recipients in a single +atomic transaction. The transaction will include 22 separate instructions, each +transferring SOL from the payer (signer) to a different recipient. + +This example highlights a key limitation of legacy transactions when trying to +accommodate many account addresses within a single transaction. As expected, +when attempting to send this transaction, it will likely fail due to exceeding +the transaction size limits. + +Here’s the code to include in `try-large-transaction.ts`: + +```typescript filename="try-large-transaction.ts" +import { + Connection, + clusterApiUrl, + Keypair, + Transaction, + SystemProgram, + LAMPORTS_PER_SOL, + sendAndConfirmTransaction, +} from "@solana/web3.js"; +import { + initializeKeypair, + makeKeypairs, + getExplorerLink, +} from "@solana-developers/helpers"; +import dotenv from "dotenv"; +dotenv.config(); async function main() { - // Connect to the devnet cluster - const connection = new web3.Connection(web3.clusterApiUrl("devnet")); + // Connect to the local Solana cluster + const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); - // Initialize the user's keypair - const user = await initializeKeypair(connection); - console.log("PublicKey:", user.publicKey.toBase58()); + // Initialize the keypair from the environment variable or create a new one + const payer = await initializeKeypair(connection); - // Generate 22 addresses - const recipients = []; - for (let i = 0; i < 22; i++) { - recipients.push(web3.Keypair.generate().publicKey); - } + // Generate 22 recipient keypairs using makeKeypairs + const recipients = makeKeypairs(22).map(keypair => keypair.publicKey); - // Create an array of transfer instructions - const transferInstructions = []; + // Create a legacy transaction + const transaction = new Transaction(); - // Add a transfer instruction for each address - for (const address of recipients) { - transferInstructions.push( - web3.SystemProgram.transfer({ - fromPubkey: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - toPubkey: address, // The destination account for the transfer - lamports: web3.LAMPORTS_PER_SOL * 0.01, // The amount of lamports to transfer + // Add 22 transfer instructions to the transaction + recipients.forEach(recipient => { + transaction.add( + SystemProgram.transfer({ + fromPubkey: payer.publicKey, + toPubkey: recipient, + lamports: LAMPORTS_PER_SOL * 0.01, // Transfer 0.01 SOL to each recipient }), ); - } - - // Create a transaction and add the transfer instructions - const transaction = new web3.Transaction().add(...transferInstructions); - - // Send the transaction to the cluster (this will fail in this example if addresses > 21) - const txid = await connection.sendTransaction(transaction, [user]); - - // Get the latest blockhash and last valid block height - const { lastValidBlockHeight, blockhash } = - await connection.getLatestBlockhash(); - - // Confirm the transaction - await connection.confirmTransaction({ - blockhash: blockhash, - lastValidBlockHeight: lastValidBlockHeight, - signature: txid, }); - // Log the transaction URL on the Solana Explorer - console.log(`https://explorer.solana.com/tx/${txid}?cluster=devnet`); + // Sign and send the transaction + try { + const signature = await sendAndConfirmTransaction(connection, transaction, [ + payer, + ]); + console.log( + `Transaction successful with signature: ${getExplorerLink("tx", signature, "devnet")}`, + ); + } catch (error) { + console.error("Transaction failed:", error); + } } ``` -To execute the code, run `npm start`. This will create a new keypair, write it -to the `.env` file, airdrop devnet SOL to the keypair, and send the transaction -built in the starter code. The transaction is expected to fail with the error -message `Transaction too large`. +To run the example, execute `npx esrun try-large-transaction.ts`. This process +will: + +- Generate a new keypair. +- Store the keypair details in the `.env` file. +- Request airdrop of devnet SOL to the generated keypair. +- Attempt to send the transaction. +- Since the transaction includes 22 instructions, it is expected to fail with + the error: "Transaction too large". ``` Creating .env file Current balance is 0 Airdropping 1 SOL... New balance is 1 -PublicKey: 5ZZzcDbabFHmoZU8vm3VzRzN5sSQhkf91VJzHAJGNM7B +PublicKey: 7YsGYC4EBs6Dxespe4ZM3wfCp856xULWoLw7QUcVb6VG Error: Transaction too large: 1244 > 1232 ``` -In the next steps, we'll go over how to use lookup tables with versioned -transactions to increase the number of addresses that can be included in a -single transaction. - -Before we start, go ahead and delete the content of the `main` function to leave -only the following: +#### 2. Create the `use-lookup-tables.ts` File + +Next, we'll explore how to use lookup tables in combination with versioned +transactions to overcome the limitation of legacy transactions and include a +greater number of addresses in a single transaction. + +Create a new file named `use-lookup-tables.ts` in your project directory. This +file will contain the code to demonstrate the use of lookup tables. + +Here’s the starter code to include in `use-lookup-tables.ts` file: + +```typescript filename="use-lookup-tables.ts" +import { + Connection, + clusterApiUrl, + Keypair, + TransactionInstruction, + AddressLookupTableAccount, + SystemProgram, + VersionedTransaction, + TransactionMessage, + AddressLookupTableProgram, + LAMPORTS_PER_SOL, + getSlot, +} from "@solana/web3.js"; +import { + initializeKeypair, + makeKeypairs, + getExplorerLink, +} from "@solana-developers/helpers"; +import dotenv from "dotenv"; +dotenv.config(); -```typescript async function main() { - // Connect to the devnet cluster - const connection = new web3.Connection(web3.clusterApiUrl("devnet")); + // Connect to the local Solana cluster + const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); - // Initialize the user's keypair - const user = await initializeKeypair(connection); - console.log("PublicKey:", user.publicKey.toBase58()); + // Initialize the keypair from the environment variable or create a new one + const payer = await initializeKeypair(connection); - // Generate 22 addresses - const addresses = []; - for (let i = 0; i < 22; i++) { - addresses.push(web3.Keypair.generate().publicKey); - } + // Generate 22 recipient keypairs using makeKeypairs + const recipients = makeKeypairs(22).map(keypair => keypair.publicKey); } ``` -#### 2. Create a `sendV0Transaction` helper function +Next, we will create a few helper functions that will be crucial for working +with versioned transactions and lookup tables. These functions will simplify our +process and make our code more modular and reusable. -We'll be sending multiple "version 0" transactions, so let's create a helper -function to facilitate this. +#### 3. Create a `sendV0Transaction` helper function -This function should take parameters for a connection, a user's keypair, an -array of transaction instructions, and an optional array of lookup table -accounts. +To handle versioned transactions, we will create a helper function in +`use-lookup-tables.ts` file, called `sendV0Transaction`, to simplify the +process. This function will accept the following parameters: -The function then performs the following tasks: +- `connection`: the solana connection to the cluster (e.g., devnet). +- `user`: the keypair of the user (payer) signing the transaction. +- `instructions`: an array of TransactionInstruction objects to include in the + transaction. +- `lookupTableAccounts` (optional): an array of lookup table accounts, if + applicable, to reference additional addresses. -- Retrieves the latest blockhash and last valid block height from the Solana - network -- Creates a new transaction message using the provided instructions -- Signs the transaction using the user's keypair -- Sends the transaction to the Solana network -- Confirms the transaction -- Logs the transaction URL on the Solana Explorer +This helper function will: -```typescript +- Retrieve the latest blockhash and last valid block height from the Solana + network. +- Compile a versioned transaction message using the provided instructions. +- Sign the transaction using the user's keypair. +- Send the transaction to the network. +- Confirm the transaction and log the transaction's URL using Solana Explorer. + +```typescript filename="use-lookup-tables.ts" async function sendV0Transaction( - connection: web3.Connection, - user: web3.Keypair, - instructions: web3.TransactionInstruction[], - lookupTableAccounts?: web3.AddressLookupTableAccount[], + connection: Connection, + user: Keypair, + instructions: TransactionInstruction[], + lookupTableAccounts?: AddressLookupTableAccount[], ) { // Get the latest blockhash and last valid block height - const { lastValidBlockHeight, blockhash } = + const { blockhash, lastValidBlockHeight } = await connection.getLatestBlockhash(); // Create a new transaction message with the provided instructions - const messageV0 = new web3.TransactionMessage({ + const messageV0 = new TransactionMessage({ payerKey: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) recentBlockhash: blockhash, // The blockhash of the most recent block instructions, // The instructions to include in the transaction - }).compileToV0Message(lookupTableAccounts ? lookupTableAccounts : undefined); - - // Create a new transaction object with the message - const transaction = new web3.VersionedTransaction(messageV0); + }).compileToV0Message(lookupTableAccounts); - // Sign the transaction with the user's keypair - transaction.sign([user]); + // Create a versioned transaction from the message + const transaction = new VersionedTransaction(messageV0); - // Send the transaction to the cluster - const txid = await connection.sendTransaction(transaction); - - // Confirm the transaction - await connection.confirmTransaction( + // Use the helper function to send and confirm the transaction + const txid = await sendAndConfirmTransactionV0( + connection, + transaction, + [user], { - blockhash: blockhash, - lastValidBlockHeight: lastValidBlockHeight, - signature: txid, + commitment: "finalized", // Ensures the transaction is confirmed at the highest level }, - "finalized", ); - // Log the transaction URL on the Solana Explorer - console.log(`https://explorer.solana.com/tx/${txid}?cluster=devnet`); + // Log the transaction URL on the Solana Explorer using the helper + const explorerLink = getExplorerLink("tx", txid, "devnet"); + console.log( + `Transaction successful! View it on Solana Explorer: ${explorerLink}`, + ); } ``` -#### 3. Create a `waitForNewBlock` helper function +#### 4. Create a `waitForNewBlock` helper function -Recall that lookup tables and the addresses contained in them can't be -referenced immediately after creation or extension. This means we'll end up -needing to wait for a new block before submitting transactions that reference -the newly created or extended lookup table. To make this simpler down the road, -let's create a `waitForNewBlock` helper function that we'll use to wait for -lookup tables to activate between sending transactions. +When working with lookup tables, it's important to remember that newly created +or extended lookup tables cannot be referenced immediately. Therefore, before +submitting transactions that reference these tables, we need to wait for a new +block to be generated. -This function will have parameters for a connection and a target block height. -It then starts an interval that checks the current block height of the network -every 1000ms. Once the new block height exceeds the target height, the interval -is cleared and the promise is resolved. +We will create a `waitForNewBlock` helper function that accepts: -```typescript -function waitForNewBlock(connection: web3.Connection, targetHeight: number) { - console.log(`Waiting for ${targetHeight} new blocks`); - return new Promise(async (resolve: any) => { - // Get the last valid block height of the blockchain - const { lastValidBlockHeight } = await connection.getLatestBlockhash(); +- `connection`: the Solana network connection. +- `targetBlockHeight`: the target block height to wait for. + +This function will: + +- Start an interval that checks the current block height of the network every + second (1000ms). +- Resolve the promise once the current block height exceeds the target block + height. - // Set an interval to check for new blocks every 1000ms +```typescript filename="use-lookup-tables.ts" +async function waitForNewBlock( + connection: Connection, + targetHeight: number, +): Promise { + console.log(`Waiting for ${targetHeight} new blocks...`); + + // Get the initial block height of the blockchain + const { lastValidBlockHeight: initialBlockHeight } = + await connection.getLatestBlockhash(); + + return new Promise(resolve => { + const SECOND = 1000; + const checkInterval = 1 * SECOND; // Interval to check for new blocks (1000ms) + + // Set an interval to check for new block heights const intervalId = setInterval(async () => { - // Get the new valid block height - const { lastValidBlockHeight: newValidBlockHeight } = - await connection.getLatestBlockhash(); - // console.log(newValidBlockHeight) - - // Check if the new valid block height is greater than the target block height - if (newValidBlockHeight > lastValidBlockHeight + targetHeight) { - // If the target block height is reached, clear the interval and resolve the promise + try { + // Get the current block height + const { lastValidBlockHeight: currentBlockHeight } = + await connection.getLatestBlockhash(); + + // If the current block height exceeds the target, resolve and clear interval + if (currentBlockHeight >= initialBlockHeight + targetHeight) { + clearInterval(intervalId); + console.log(`New block height reached: ${currentBlockHeight}`); + resolve(); + } + } catch (error) { + console.error("Error fetching block height:", error); clearInterval(intervalId); - resolve(); + resolve(); // Resolve to avoid hanging in case of errors } - }, 1000); + }, checkInterval); }); } ``` -#### 4. Create an `initializeLookupTable` function +#### 5. Create an `initializeLookupTable` function -Now that we have some helper functions ready to go, declare a function named -`initializeLookupTable`. This function has parameters `user`, `connection`, and -`addresses`. The function will: +Next, we need to initialize a lookup table to hold the addresses of the +recipients. The `initializeLookupTable` function will accept the following +parameters: -1. Retrieve the current slot -2. Generate an instruction for creating a lookup table -3. Generate an instruction for extending the lookup table with the provided - addresses -4. Send and confirm a transaction with the instructions for creating and - extending the lookup table -5. Return the address of the lookup table +- `user`: the user's keypair (payer and authority). +- `connection`: the Solana network connection. +- `addresses`: an array of recipient addresses (public keys) to add to the + lookup table. -```typescript +The function will: + +- Retrieve the current slot to derive the lookup table's address. +- Generate the necessary instructions to create and extend the lookup table with + the provided recipient addresses. +- Send and confirm a transaction that includes these instructions. +- Return the address of the newly created lookup table. + +Although the transaction includes the full recipient addresses, using the lookup +table allows Solana to reference those addresses with significantly fewer bytes +in the actual transaction. By including the lookup table in the versioned +transaction, the framework optimizes the transaction size, replacing addresses +with pointers to the lookup table. + +This design is crucial for enabling the transaction to support more recipients +by staying within Solana’s transaction size limits. + +```typescript filename="use-lookup-tables.ts" async function initializeLookupTable( - user: web3.Keypair, - connection: web3.Connection, - addresses: web3.PublicKey[], -): Promise { - // Get the current slot - const slot = await connection.getSlot(); + user: Keypair, + connection: Connection, + addresses: PublicKey[], +): Promise { + // Get the current slot using a helper function from @solana/web3.js + const slot = await getSlot(connection); // Create an instruction for creating a lookup table // and retrieve the address of the new lookup table const [lookupTableInst, lookupTableAddress] = - web3.AddressLookupTableProgram.createLookupTable({ - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - payer: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - recentSlot: slot - 1, // The recent slot to derive lookup table's address + AddressLookupTableProgram.createLookupTable({ + authority: user.publicKey, // The authority to modify the lookup table + payer: user.publicKey, // The payer for transaction fees + recentSlot: slot - 1, // The slot for lookup table address derivation }); - console.log("lookup table address:", lookupTableAddress.toBase58()); + + console.log("Lookup Table Address:", lookupTableAddress.toBase58()); // Create an instruction to extend a lookup table with the provided addresses - const extendInstruction = web3.AddressLookupTableProgram.extendLookupTable({ - payer: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - lookupTable: lookupTableAddress, // The address of the lookup table to extend - addresses: addresses.slice(0, 30), // The addresses to add to the lookup table + const extendInstruction = AddressLookupTableProgram.extendLookupTable({ + payer: user.publicKey, // The payer of transaction fees + authority: user.publicKey, // The authority to extend the lookup table + lookupTable: lookupTableAddress, // Address of the lookup table to extend + addresses: addresses.slice(0, 30), // Add up to 30 addresses per instruction }); - await sendV0Transaction(connection, user, [ + // Use the helper function to send a versioned transaction + await sendVersionedTransaction(connection, user, [ lookupTableInst, extendInstruction, ]); @@ -639,73 +688,84 @@ async function initializeLookupTable( } ``` -#### 5. Modify `main` to use lookup tables +#### 6. Modify `main` to use lookup tables -Now that we can initialize a lookup table with all of the recipients' addresses, -let's update `main` to use versioned transactions and lookup tables. We'll need -to: +With the helper functions in place, we are now ready to modify the `main` +function to utilize versioned transactions and address lookup tables. To do so, +we will follow these steps: -1. Call `initializeLookupTable` -2. Call `waitForNewBlock` -3. Get the lookup table using `connection.getAddressLookupTable` -4. Create the transfer instruction for each recipient -5. Send the v0 transaction with all of the transfer instructions +1. Call `initializeLookupTable`: Create and extend the lookup table with the + recipients' addresses. +2. Call `waitForNewBlock`: Ensure the lookup table is activated by waiting for a + new block. +3. Retrieve the Lookup Table: Use `connection.getAddressLookupTabl`e to fetch + the lookup table and reference it in the transaction. +4. Create Transfer Instructions: Generate a transfer instruction for each + recipient. +5. Send the Versioned Transaction: Use `sendV0Transaction` to send a single + transaction with all transfer instructions, referencing the lookup table. -```typescript +```typescript filename="use-lookup-tables.ts" async function main() { - // Connect to the devnet cluster - const connection = new web3.Connection(web3.clusterApiUrl("devnet")); + // Connect to the devnet Solana cluster + const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); - // Initialize the user's keypair - const user = await initializeKeypair(connection); - console.log("PublicKey:", user.publicKey.toBase58()); - - // Generate 22 addresses - const recipients = []; - for (let i = 0; i < 22; i++) { - recipients.push(web3.Keypair.generate().publicKey); - } + // Initialize the keypair from the environment variable or create a new one + const payer = await initializeKeypair(connection); + // Generate 22 recipient keypairs using makeKeypairs + const recipients = makeKeypairs(22).map(keypair => keypair.publicKey); + // Initialize the lookup table with the generated recipients const lookupTableAddress = await initializeLookupTable( user, connection, recipients, ); + // Wait for a new block before using the lookup table await waitForNewBlock(connection, 1); + // Fetch the lookup table account const lookupTableAccount = ( await connection.getAddressLookupTable(lookupTableAddress) ).value; + // Check if the lookup table was successfully fetched if (!lookupTableAccount) { throw new Error("Lookup table not found"); } - const transferInstructions = recipients.map(recipient => { - return web3.SystemProgram.transfer({ - fromPubkey: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - toPubkey: recipient, // The destination account for the transfer - lamports: web3.LAMPORTS_PER_SOL * 0.01, // The amount of lamports to transfer - }); - }); + // Create transfer instructions for each recipient + const transferInstructions = recipients.map(recipient => + SystemProgram.transfer({ + fromPubkey: user.publicKey, // The payer + toPubkey: recipient, // The recipient + lamports: LAMPORTS_PER_SOL * 0.01, // Amount to transfer + }), + ); - await sendV0Transaction(connection, user, transferInstructions, [ - lookupTableAccount, - ]); + // Send the versioned transaction including the lookup table + const txid = await sendVersionedTransaction( + connection, + user, + transferInstructions, + [lookupTableAccount], + ); + + // Log the transaction link for easy access + console.log(`Transaction URL: ${getExplorerLink("tx", txid, "devnet")}`); } ``` -Notice that you create the transfer instructions with the full recipient address -even though we created a lookup table. That's because by including the lookup -table in the versioned transaction, you tell the `web3.js` framework to replace -any recipient addresses that match addresses in the lookup table with pointers -to the lookup table instead. By the time the transaction is sent to the network, -addresses that exist in the lookup table will be referenced by a single byte -rather than the full 32 bytes. +Even though we will create transfer instructions with full recipient addresses, +the use of lookup tables allows the `@solana/web3.js` framework to optimize the +transaction size. The addresses in the transaction that match entries in the +lookup table will be replaced with compact pointers referencing the lookup +table. By doing this, addresses will be represented using only a single byte in +the final transaction, significantly reducing the transaction's size. -Use `npm start` in the command line to execute the `main` function. You should -see an output similar to the following: +Use `npx esrun use-lookup-tables.ts` in the command line to execute the `main` +function. You should see an output similar to the following: ```bash Current balance is 1.38866636 @@ -726,7 +786,7 @@ Remember, this same transaction was failing when you first downloaded the starter code. Now that we're using lookup tables, we can do all 22 transfers in a single transaction. -#### 6. Add more address to the lookup table +#### 6. Add more addresses to the lookup table Keep in mind that the solution we've come up with so far only supports transfers to up to 30 accounts since we only extend the lookup table once. When you factor @@ -738,55 +798,54 @@ All we need to do is go into `initializeLookupTable` and do two things: 1. Modify the existing call to `extendLookupTable` to only add the first 30 addresses (any more than that and the transaction will be too large) -2. Add a loop that will keep extending a lookup table 30 addresses at a time +2. Add a loop that will keep extending a lookup table of 30 addresses at a time until all addresses have been added -```typescript +```typescript filename="use-lookup-tables.ts" async function initializeLookupTable( - user: web3.Keypair, - connection: web3.Connection, - addresses: web3.PublicKey[], -): Promise { + user: Keypair, + connection: Connection, + addresses: PublicKey[], +): Promise { // Get the current slot const slot = await connection.getSlot(); - // Create an instruction for creating a lookup table - // and retrieve the address of the new lookup table + // Create the lookup table and retrieve its address const [lookupTableInst, lookupTableAddress] = - web3.AddressLookupTableProgram.createLookupTable({ - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - payer: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - recentSlot: slot - 1, // The recent slot to derive lookup table's address + AddressLookupTableProgram.createLookupTable({ + authority: user.publicKey, // The authority to modify the lookup table + payer: user.publicKey, // The payer for the transaction fees + recentSlot: slot - 1, // Recent slot to derive lookup table's address }); - console.log("lookup table address:", lookupTableAddress.toBase58()); + console.log("Lookup table address:", lookupTableAddress.toBase58()); - // Create an instruction to extend a lookup table with the provided addresses - const extendInstruction = web3.AddressLookupTableProgram.extendLookupTable({ - payer: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - lookupTable: lookupTableAddress, // The address of the lookup table to extend - addresses: addresses.slice(0, 30), // The addresses to add to the lookup table - }); + // Helper function to extend the lookup table in batches + const extendLookupTable = async (remainingAddresses: PublicKey[]) => { + while (remainingAddresses.length > 0) { + const toAdd = remainingAddresses.slice(0, 30); // Add up to 30 addresses + remainingAddresses = remainingAddresses.slice(30); - await sendV0Transaction(connection, user, [ - lookupTableInst, - extendInstruction, - ]); + const extendInstruction = AddressLookupTableProgram.extendLookupTable({ + payer: user.publicKey, + authority: user.publicKey, + lookupTable: lookupTableAddress, + addresses: toAdd, + }); - var remaining = addresses.slice(30); + // Send the transaction to extend the lookup table with the new addresses + await sendVersionedTransaction(connection, user, [extendInstruction]); + } + }; - while (remaining.length > 0) { - const toAdd = remaining.slice(0, 30); - remaining = remaining.slice(30); - const extendInstruction = web3.AddressLookupTableProgram.extendLookupTable({ - payer: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - lookupTable: lookupTableAddress, // The address of the lookup table to extend - addresses: toAdd, // The addresses to add to the lookup table - }); + // Send the initial transaction to create the lookup table and add the first 30 addresses + const initialBatch = addresses.slice(0, 30); + const remainingAddresses = addresses.slice(30); - await sendV0Transaction(connection, user, [extendInstruction]); - } + await sendVersionedTransaction(connection, user, [lookupTableInst]); + + // Extend the lookup table with the remaining addresses, if any + await extendLookupTable(initialBatch); + await extendLookupTable(remainingAddresses); return lookupTableAddress; } @@ -799,7 +858,7 @@ look at the final solution code you can ## Challenge -As a challenge, experiment with deactivating, closing and freezing lookup +As a challenge, experiment with deactivating, closing, and freezing lookup tables. Remember that you need to wait for a lookup table to finish deactivating before you can close it. Also, if a lookup table is frozen, it cannot be modified (deactivated or closed), so you will have to test separately or use diff --git a/content/courses/program-optimization/program-architecture.md b/content/courses/program-optimization/program-architecture.md index 0af3b57a3..2eaa0bd57 100644 --- a/content/courses/program-optimization/program-architecture.md +++ b/content/courses/program-optimization/program-architecture.md @@ -13,7 +13,7 @@ description: "Design your Solana programs efficiently." - If your data accounts are too large for the Stack, wrap them in `Box` to allocate them to the Heap - Use Zero-Copy to deal with accounts that are too large for `Box` (< 10MB) -- The size and the order of fields in an account matter; put variable length +- The size and the order of fields in an account matter; put the variable length fields at the end - Solana can process in parallel, but you can still run into bottlenecks; be mindful of "shared" accounts that all users interacting with the program have @@ -31,28 +31,29 @@ with the code. And you, as the designer, need to think about: These questions are even more important when developing for a blockchain. Not only are resources more limited than in a typical computing environment, you're -also dealing with people’s assets; code has a cost now. +also dealing with people's assets. We'll leave most of the asset handling discussion to -[security course lesson](/content/courses/program-security/security-intro), but -it's important to note the nature of resource limitations in Solana development. -There are, of course, limitations in a typical development environment, but -there are limitations unique to blockchain and Solana development such as how -much data can be stored in an account, the cost to store that data, and how many -compute units are available per transaction. You, the program designer, have to -be mindful of these limitations to create programs that are affordable, fast, -safe, and functional. Today we will be delving into some of the more advance -considerations that should be taken when creating Solana programs. +[security course lesson](/content/courses/program-security/security-intro.md), +but it's important to note the nature of resource limitations in Solana +development. There are, of course, limitations in a typical development +environment, but there are limitations unique to blockchain and Solana +development such as how much data can be stored in an account, the cost to store +that data, and how many compute units are available per transaction. You, the +program designer, have to be mindful of these limitations to create programs +that are affordable, fast, safe, and functional. Today we will be delving into +some of the more advanced considerations that should be taken when creating +Solana programs. ### Dealing With Large Accounts -In modern application programming, we don’t often have to think about the size -of the data structures we are using. You want to make a string? You can put a -4000 character limit on it if you want to avoid abuse, but it's probably not an -issue. Want an integer? They’re pretty much always 32-bit for convenience. +In modern application programming, we don't often have to think about the size +of the data structures we are using. Do you want to make a string? You can put a +4000-character limit on it if you want to avoid abuse, but it's probably not an +issue. Want an integer? They're pretty much always 32-bit for convenience. -In high level languages, you are in the data-land-o-plenty! Now, in Solana land, -we pay per byte stored (rent) and have limits on heap, stack and account sizes. +In high-level languages, you are in the data-land-o-plenty! Now, in Solana land, +we pay per byte stored (rent) and have limits on heap, stack, and account sizes. We have to be a little more crafty with our bytes. There are two main concerns we are going to be looking at in this section: @@ -61,31 +62,32 @@ we are going to be looking at in this section: introduce you to the concept of data sizes here. 2. When operating on larger data, we run into - [Stack](https://solana.com/docs/onchain-programs/faq#stack) and - [Heap](https://solana.com/docs/onchain-programs/faq#heap-size) constraints - - to get around these, we’ll look at using Box and Zero-Copy. + [Stack](https://solana.com/docs/programs/faq#stack) and + [Heap](https://solana.com/docs/programs/faq#heap-size) constraints - to get + around these, we'll look at using Box and Zero-Copy. #### Sizes -In Solana a transaction's fee payer pays for each byte stored onchain. We call -this [rent](https://solana.com/docs/core/fees). - -rent is a bit of a misnomer since it never actually gets -permanently taken. Once you deposit rent into the account, that data can stay -there forever or you can get refunded the rent if you close the account. Rent -used to be an actual thing, but now there's an enforced minimum rent exemption. -You can read about it in -[the Solana documentation](https://solana.com/docs/intro/rent). - -Rent etymology aside, putting data on the blockchain can be expensive. It’s why -NFT attributes and associated files, like the image, are stored off-chain. You -ultimately want to strike a balance that leaves your program highly functional -without becoming so expensive that your users don’t want to pay to open the data -account. - -The first thing you need to know before you can start optimizing for space in -your program is the size of each of your structs. Below is a very helpful list -from the +In Solana, a transaction's fee payer pays for each byte stored onchain. This is +called [rent](https://solana.com/docs/core/fees#rent). + + + +Rent is a bit of a misnomer since it never gets permanently taken. Once you +deposit rent into the account, that data can stay there forever, or you can get +refunded the rent if you close the account. Previously, rent was paid in +intervals, similar to traditional rent, but now there's an enforced minimum +balance for rent exemption. You can read more about it in +[the Solana documentation](https://solana.com/docs/core/fees#rent-exempt). + + +Putting data on the blockchain can be expensive, which is why NFT attributes and +associated files, like images, are stored offchain. The goal is to strike a +balance between keeping your program highly functional and ensuring that users +aren't discouraged by the cost of storing data onchain. + +The first step in optimizing for space in your program is understanding the size +of your structs. Below is a helpful reference from the [Anchor Book](https://book.anchor-lang.com/anchor_references/space.html). @@ -109,8 +111,8 @@ from the Knowing these, start thinking about little optimizations you might take in a program. For example, if you have an integer field that will only ever reach -100, don’t use a u64/i64, use a u8. Why? Because a u64 takes up 8 bytes, with a -max value of 2^64 or 1.84 \* 10^19. Thats a waste of space since you only need +100, don't use a u64/i64, use a u8. Why? Because a u64 takes up 8 bytes, with a +max value of 2^64 or 1.84 \* 10^19. That's a waste of space since you only need to accommodate numbers up to 100. A single byte will give you a max value of 255 which, in this case, would be sufficient. Similarly, there's no reason to use i8 if you'll never have negative numbers. @@ -127,8 +129,8 @@ If you want to read more about Anchor sizes, take a look at #### Box -Now that you know a little bit about data sizes, let’s skip forward and look at -a problem you’ll run into if you want to deal with larger data accounts. Say you +Now that you know a little bit about data sizes, let's skip forward and look at +a problem you'll run into if you want to deal with larger data accounts. Say you have the following data account: ```rust @@ -144,7 +146,7 @@ pub struct SomeFunctionContext<'info> { ``` If you try to pass `SomeBigDataStruct` into the function with the -`SomeFunctionContext` context, you’ll run into the following compiler warning: +`SomeFunctionContext` context, you'll run into the following compiler warning: `// Stack offset of XXXX exceeded max offset of 4096 by XXXX bytes, please minimize large stack variables` @@ -158,7 +160,9 @@ where that entire `SomeBigDataStruct` gets stored in memory and since 5000 bytes, or 5KB, is greater than the 4KB limit, it will throw a stack error. So how do we fix this? -The answer is the **`Box`** type! +The answer is the +[**`Box`**](https://docs.rs/anchor-lang/latest/anchor_lang/accounts/boxed/index.html) +type! ```rust #[account] @@ -174,7 +178,7 @@ pub struct SomeFunctionContext<'info> { In Anchor, **`Box`** is used to allocate the account to the Heap, not the Stack. Which is great since the Heap gives us 32KB to work with. The best part -is you don’t have to do anything different within the function. All you need to +is you don't have to do anything different within the function. All you need to do is add `Box<…>` around all of your big data accounts. But Box is not perfect. You can still overflow the stack with sufficiently large @@ -182,7 +186,7 @@ accounts. We'll learn how to fix this in the next section. #### Zero Copy -Okay, so now you can deal with medium sized accounts using `Box`. But what if +Okay, so now you can deal with medium-sized accounts using `Box`. But what if you need to use really big accounts like the max size of 10MB? Take the following as an example: @@ -220,13 +224,13 @@ To understand what's happening here, take a look at the [rust Anchor documentation](https://docs.rs/anchor-lang/latest/anchor_lang/attr.account.html) > Other than being more efficient, the most salient benefit [`zero_copy`] -> provides is the ability to define account types larger than the max stack or -> heap size. When using borsh, the account has to be copied and deserialized -> into a new data structure and thus is constrained by stack and heap limits -> imposed by the BPF VM. With zero copy deserialization, all bytes from the -> account’s backing `RefCell<&mut [u8]>` are simply re-interpreted as a -> reference to the data structure. No allocations or copies necessary. Hence the -> ability to get around stack and heap limitations. +> provides the ability to define account types larger than the max stack or heap +> size. When using borsh, the account has to be copied and deserialized into a +> new data structure and thus is constrained by stack and heap limits imposed by +> the BPF VM. With zero copy deserialization, all bytes from the account's +> backing `RefCell<&mut [u8]>` are simply re-interpreted as a reference to the +> data structure. No allocations or copies are necessary. Hence the ability to +> get around stack and heap limitations. Basically, your program never actually loads zero-copy account data into the stack or heap. It instead gets pointer access to the raw data. The @@ -244,7 +248,7 @@ pub struct ConceptZeroCopy<'info> { } ``` -Instead, your client has to create the large account and pay for it’s rent in a +Instead, your client has to create a large account and pay for its rent in a separate instruction. ```typescript @@ -271,16 +275,16 @@ const txHash = await program.methods .rpc(); ``` -The second caveat is that your'll have to call one of the following methods from -inside your rust instruction function to load the account: +The second caveat is that you'll have to call one of the following methods from +inside your rust instruction handler to load the account: - `load_init` when first initializing an account (this will ignore the missing - account discriminator that gets added only after the user’s instruction code) + account discriminator that gets added only after the user's instruction code) - `load` when the account is not mutable - `load_mut` when the account is mutable For example, if you wanted to init and manipulate the `SomeReallyBigDataStruct` -from above, you’d call the following in the function +from above, you'd call the following in the function ```rust let some_really_big_data = &mut ctx.accounts.some_really_big_data.load_init()?; @@ -289,16 +293,16 @@ let some_really_big_data = &mut ctx.accounts.some_really_big_data.load_init()?; After you do that, then you can treat the account like normal! Go ahead and experiment with this in the code yourself to see everything in action! -For a better understanding on how this all works, Solana put together a really +For a better understanding of how this all works, Solana put together a really nice [video](https://www.youtube.com/watch?v=zs_yU0IuJxc&feature=youtu.be) and [code](https://github.com/solana-developers/anchor-zero-copy-example) explaining Box and Zero-Copy in vanilla Solana. ### Dealing with Accounts -Now that you know the nuts and bolts of space consideration on Solana, let’s -look at some higher level considerations. In Solana, everything is an account, -so for the next couple sections we'll look at some account architecture +Now that you know the nuts and bolts of space consideration on Solana, let's +look at some higher-level considerations. In Solana, everything is an account, +so for the next couple sections, we'll look at some account architecture concepts. #### Data Order @@ -320,17 +324,18 @@ the location of `id` on the memory map. To make this more clear, observe what this account's data looks like onchain when `flags` has four items in the vector vs eight items. If you were to call -`solana account ACCOUNT_KEY` you’d get a data dump like the following: +`solana account ACCOUNT_KEY` you'd get a data dump like the following: +`solana account ACCOUNT_KEY` you'd get a data dump like the following: ```rust 0000: 74 e4 28 4e d9 ec 31 0a -> Account Discriminator (8) -0008: 04 00 00 00 11 22 33 44 -> Vec Size (4) | Data 4*(1) +0008: 04 00 00 00 11 22 33 44 -> Vec Size (4) | Data 4*(1) 0010: DE AD BE EF -> id (4) --- vs --- 0000: 74 e4 28 4e d9 ec 31 0a -> Account Discriminator (8) -0008: 08 00 00 00 11 22 33 44 -> Vec Size (8) | Data 4*(1) +0008: 08 00 00 00 11 22 33 44 -> Vec Size (8) | Data 4*(1) 0010: 55 66 77 88 DE AD BE EF -> Data 4*(1) | id (4) ``` @@ -344,11 +349,10 @@ the data in the `flags` field took up four more bytes. The main problem with this is lookup. When you query Solana, you use filters that look at the raw data of an account. These are called a `memcmp` filters, or memory compare filters. You give the filter an `offset` and `bytes`, and the -filter then looks directly at the memory, offsetting from the start by the -`offset` you provide, and compares the bytes in memory to the `bytes` you -provide. +filter then looks directly at the memory, offset from the start by the `offset` +you provide, and compares the bytes in memory to the `bytes` you provide. -For example, you know that the `flags` struct will always start at address +For example, you know that the `flags` struct will always start at the address 0x0008 since the first 8 bytes contain the account discriminator. Querying all accounts where the `flags` length is equal to four is possible because we _know_ that the four bytes at 0x0008 represent the length of the data in `flags`. Since @@ -367,13 +371,14 @@ const states = await program.account.badState.all([ However, if you wanted to query by the `id`, you wouldn't know what to put for the `offset` since the location of `id` is variable based on the length of -`flags`. That doesn’t seem very helpful. IDs are usually there to help with +`flags`. That doesn't seem very helpful. IDs are usually there to help with +`flags`. That doesn't seem very helpful. IDs are usually there to help with queries! The simple fix is to flip the order. ```rust -#[account] // Anchor hides the account disriminator +#[account] // Anchor hides the account discriminator pub struct GoodState { - pub id: u32 // 0xDEAD_BEEF + pub id: u32 // 0xDEAD_BEEF pub flags: Vec, // 0x11, 0x22, 0x33 ... } ``` @@ -383,83 +388,229 @@ accounts based on all the fields up to the first variable length field. To echo the beginning of this section: As a rule of thumb, keep all variable length structs at the end of the account. -#### For Future Use +#### Account Flexibility and Future-Proofing -In certain cases, consider adding extra, unused bytes to you accounts. These are -held in reserve for flexibility and backward compatibility. Take the following -example: +When developing Solana programs, it's crucial to design your account structures +with future upgrades and backward compatibility in mind. Solana offers powerful +features like account resizing and Anchor's `InitSpace` attribute to handle +these challenges efficiently. Let's explore a more dynamic and flexible approach +using a game state example: ```rust +use anchor_lang::prelude::*; + #[account] -pub struct GameState { +#[derive(InitSpace)] +pub struct GameState { // V1 + pub version: u8, pub health: u64, pub mana: u64, - pub event_log: Vec + pub experience: Option, + #[max_len(50)] + pub event_log: Vec } ``` -In this simple game state, a character has `health`, `mana`, and an event log. -If at some point you are making game improvements and want to add an -`experience` field, you'd hit a snag. The `experience` field should be a number -like a `u64`, which is simple enough to add. You can -[reallocate the account](/developers/courses/onchain-development/anchor-pdas) -and add space. - -However, to keep dynamic length fields, like `event_log`, at the end of the -struct, you would need to do some memory manipulation on all reallocated -accounts to move the location of `event_log`. This can be complicated and makes -querying accounts far more difficult. You'll end up in a state where -non-migrated accounts have `event_log` in one location and migrated accounts in -another. The old `GameState` without `experience` and the new `GameState` with -`experience` in it are no longer compatible. Old accounts won't serialize when -used where new accounts are expected. Queries will be far more difficult. You'll -likely need to create a migration system and ongoing logic to maintain backward -compatibility. Ultimately, it begins to seem like a bad idea. - -Fortunately, if you think ahead, you can add a `for_future_use` field that -reserves some bytes where you expect to need them most. +In this GameState, we have: + +- A `version` field to track account structure changes +- Basic character attributes (`health`, `mana`) +- An `experience` field as `Option` for backward compatibility +- An `event_log` with a specified maximum length + +Key advantages of this approach: + +1. **Automatic Space Calculation**: The `InitSpace` attribute automatically + calculates the required account space. +2. **Versioning**: The `version` field allows for easy identification of account + structure versions. +3. **Flexible Fields**: Using `Option` for new fields maintains compatibility + with older versions. +4. **Defined Limits**: The `max_len` attribute on `Vec` fields clearly + communicates size constraints. + +When you need to upgrade your account structure, such as increasing the length +of `event_log` or adding new fields, you can use a single upgrade instruction +with Anchor's `realloc` constraint: + +1. Update the `GameState` struct with new fields or increased `max_len` + attributes: + + ```rust + #[account] + #[derive(InitSpace)] + pub struct GameState { + pub version: u8, + pub health: u64, + pub mana: u64, + pub experience: Option, + #[max_len(100)] // Increased from 50 + pub event_log: Vec, + pub new_field: Option, // Added new field + } + ``` + +2. Use a single `UpgradeGameState` context for all upgrades with Anchor's + `realloc` constraint for `GameState`: + + ```rust + #[derive(Accounts)] + pub struct UpgradeGameState<'info> { + #[account( + mut, + realloc = GameState::INIT_SPACE, + realloc::payer = payer, + realloc::zero = false, + )] + pub game_state: Account<'info, GameState>, + #[account(mut)] + pub payer: Signer<'info>, + pub system_program: Program<'info, System>, + } + ``` + +3. Implement the upgrade logic in a single function: + + ```rust + pub fn upgrade_game_state(ctx: Context) -> Result<()> { + let game_state = &mut ctx.accounts.game_state; + + match game_state.version { + 1 => { + game_state.version = 2; + game_state.experience = Some(0); + msg!("Upgraded to version 2"); + }, + 2 => { + game_state.version = 3; + game_state.new_field = Some(0); + msg!("Upgraded to version 3"); + }, + _ => return Err(ErrorCode::AlreadyUpgraded.into()), + } + + Ok(()) + } + ``` + +The example to demonstrate this approach: ```rust +use anchor_lang::prelude::*; + #[account] -pub struct GameState { //V1 +#[derive(InitSpace)] +pub struct GameState { + pub version: u8, pub health: u64, pub mana: u64, - pub for_future_use: [u8; 128], - pub event_log: Vec + pub experience: Option, + #[max_len(100)] // Increased from 50 + pub event_log: Vec, + pub new_field: Option, } -``` -That way, when you go to add `experience` or something similar, it looks like -this and both the old and new accounts are compatible. +#[derive(Accounts)] +pub struct UpgradeGameState<'info> { + #[account( + mut, + realloc = GameState::INIT_SPACE, + realloc::payer = payer, + realloc::zero = false, + )] + pub game_state: Account<'info, GameState>, + #[account(mut)] + pub payer: Signer<'info>, + pub system_program: Program<'info, System>, +} -```rust -#[account] -pub struct GameState { //V2 - pub health: u64, - pub mana: u64, - pub experience: u64, - pub for_future_use: [u8; 120], - pub event_log: Vec +#[program] +pub mod your_program { + use super::*; + + // ... other instructions ... + + pub fn upgrade_game_state(ctx: Context) -> Result<()> { + let game_state = &mut ctx.accounts.game_state; + + match game_state.version { + 1 => { + game_state.version = 2; + game_state.experience = Some(0); + msg!("Upgraded to version 2"); + }, + 2 => { + game_state.version = 3; + game_state.new_field = Some(0); + msg!("Upgraded to version 3"); + }, + _ => return Err(ErrorCode::AlreadyUpgraded.into()), + } + + Ok(()) + } +} + +#[error_code] +pub enum ErrorCode { + #[msg("Account is already at the latest version")] + AlreadyUpgraded, } ``` -These extra bytes do add to the cost of using your program. However, it seems -well worth the benefit in most cases. +This approach: + +- Uses the Anchor's + [`realloc`](https://docs.rs/anchor-lang/latest/anchor_lang/derive.Accounts.html#normal-constraints) + constraint to automatically handle account resizing. +- The + [`InitSpace`](https://docs.rs/anchor-lang/latest/anchor_lang/derive.InitSpace.html) + derive macro automatically implements the `Space` trait for the `GameState` + struct. This trait includes the + [`INIT_SPACE`](https://docs.rs/anchor-lang/latest/anchor_lang/trait.Space.html#associatedconstant.INIT_SPACE) + associated constant , which calculates the total space required for the + account. +- Designates a payer for any additional rent with `realloc::payer = payer`. +- Keeps existing data with `realloc::zero = false`. + + + +Account data can be increased within a single call by up to +`solana_program::entrypoint::MAX_PERMITTED_DATA_INCREASE` bytes. + +Memory used to grow is already zero-initialized upon program entrypoint and +re-zeroing it wastes compute units. If within the same call a program reallocs +from larger to smaller and back to larger again the new space could contain +stale data. Pass `true` for `zero_init` in this case, otherwise compute units +will be wasted re-zero-initializing. -So as a general rule of thumb: anytime you think your account types have the -potential to change in a way that will require some kind of complex migration, -add in some `for_future_use` bytes. + + +While account resizing is powerful, use it judiciously. Consider the trade-offs +between frequent resizing and initial allocation based on your specific use case +and expected growth patterns. + +- Always ensure your account remains rent-exempt before resizing. +- The payer of the transaction is responsible for providing the additional + lamports. +- Consider the cost implications of frequent resizing in your program design. + + +In native Rust, you can resize accounts using the `realloc()` method. For more +details, refer to the +[account resizing program](/content/cookbook/programs/change-account-size.md). #### Data Optimization The idea here is to be aware of wasted bits. For example, if you have a field -that represents the month of the year, don’t use a `u64`. There will only ever +that represents the month of the year, don't use a `u64`. There will only ever +that represents the month of the year, don't use a `u64`. There will only ever be 12 months. Use a `u8`. Better yet, use a `u8` Enum and label the months. To get even more aggressive on bit savings, be careful with booleans. Look at the below struct composed of eight boolean flags. While a boolean _can_ be represented as a single bit, borsh deserialization will allocate an entire byte -to each of these fields. that means that eight booleans winds up being eight +to each of these fields. That means that eight booleans wind up being eight bytes instead of eight bits, an eight times increase in size. ```rust @@ -536,11 +687,10 @@ Depending on the seeding you can create all sorts of relationships: program. For example, if your program needs a lookup table, you could seed it with `seeds=[b"Lookup"]`. Just be careful to provide appropriate access restrictions. -- One-Per-Owner - Say you’re creating a video game player account and you only - want one player account per wallet. Then you’d seed the account with - `seeds=[b"PLAYER", owner.key().as_ref()]`. This way, you’ll always know where - to look for a wallet’s player account **and** there can only ever be one of - them. +- One-Per-Owner - Say you're creating a video game player account and you only + want one player account per wallet. Then you'd seed the account with + `seeds=[b"PLAYER", owner.key().as_ref()]`. This way, you'll always know where + to look for a wallet's player account **and** there can only ever be one of - Multiple-Per-Owner - Okay, but what if you want multiple accounts per wallet? Say you want to mint podcast episodes. Then you could seed your `Podcast` account like this: @@ -555,8 +705,8 @@ From there you can mix and match in all sorts of clever ways! But the preceding list should give you enough to get started. The big benefit of really paying attention to this aspect of design is answering -the ‘indexing’ problem. Without PDAs and seeds, all users would have to keep -track of all of the addresses of all of the accounts they’ve ever used. This +the ‘indexing' problem. Without PDAs and seeds, all users would have to keep +track of all of the addresses of all of the accounts they've ever used. This isn't feasible for users, so they'd have to depend on a centralized entity to store their addresses in a database. In many ways that defeats the purpose of a globally distributed network. PDAs are a much better solution. @@ -583,7 +733,7 @@ seeds=[b"Podcast", channel_account.key().as_ref(), episode_number.to_be_bytes(). You can always find the channel account for a particular owner. And since the channel stores the number of episodes created, you always know the upper bound -of where to search for queries. Additionally you always know what index to +of where to search for queries. Additionally, you always know what index to create a new episode at: `index = episodes_created`. ```rust @@ -599,24 +749,24 @@ Podcast X: seeds=[b"Podcast", channel_account.key().as_ref(), X.to_be_bytes().as One of the main reasons to choose Solana for your blockchain environment is its parallel transaction execution. That is, Solana can run transactions in parallel as long as those transactions aren't trying to write data to the same account. -This improves program throughput out of the box, but with some proper planning +This improves program throughput out of the box, but with some proper planning, you can avoid concurrency issues and really boost your program's performance. #### Shared Accounts -If you’ve been around crypto for a while, you may have experienced a big NFT -mint event. A new NFT project is coming out, everyone is really excited for it, -and then the candymachine goes live. It’s a mad dash to click +If you've been around crypto for a while, you may have experienced a big NFT +mint event. A new NFT project is coming out, everyone is really excited about +it, and then the candymachine goes live. It's a mad dash to click `accept transaction` as fast as you can. If you were clever, you may have -written a bot to enter in the transactions faster that the website’s UI could. -This mad rush to mint creates a lot of failed transactions. But why? Because -everyone is trying to write data to the same Candy Machine account. +written a bot to enter the transactions faster than the website's UI could. This +mad rush to mint creates a lot of failed transactions. But why? Because everyone +is trying to write data to the same Candy Machine account. Take a look at a simple example: Alice and Bob are trying to pay their friends Carol and Dean respectively. All -four accounts change, but neither depend on each other. Both transactions can -run at the same time. +four accounts change, but neither depends on other. Both transactions can run at +the same time. ```rust Alice -- pays --> Carol @@ -629,18 +779,18 @@ issues. ```rust Alice -- pays --> | - -- > Carol + -- > Carol Bob -- pays --- | ``` Since both of these transactions write to Carol's token account, only one of -them can go through at a time. Fortunately, Solana is wicked fast, so it’ll +them can go through at a time. Fortunately, Solana is very fast, so it'll probably seem like they get paid at the same time. But what happens if more than just Alice and Bob try to pay Carol? ```rust Alice -- pays --> | - -- > Carol + -- > Carol x1000 -- pays --- | Bob -- pays --- | ``` @@ -658,7 +808,7 @@ trying to write data to the same account all at once. Imagine you create a super popular program and you want to take a fee on every transaction you process. For accounting reasons, you want all of those fees to go to one wallet. With that setup, on a surge of users, your protocol will -become slow and or become unreliable. Not great. So what’s the solution? +become slow and or become unreliable. Not great. So what's the solution? Separate the data transaction from the fee transaction. For example, imagine you have a data account called `DonationTally`. Its only @@ -675,7 +825,7 @@ pub struct DonationTally { } ``` -First let’s look at the suboptimal solution. +First, let's look at the suboptimal solution. ```rust pub fn run_concept_shared_account_bottleneck(ctx: Context, lamports_to_donate: u64) -> Result<()> { @@ -707,8 +857,8 @@ pub fn run_concept_shared_account_bottleneck(ctx: ContextThis lab was created with Anchor version `0.28.0` in mind. -If there are problems compiling, please refer to the -[solution code](https://github.com/Unboxed-Software/anchor-rpg/tree/challenge-solution) -for the environment setup. + + +This lab was created with Anchor version `0.30.1` in mind. If there are problems +compiling, please refer to the +[solution code](https://github.com/solana-developers/anchor-rpg/tree/main) for +the environment setup. -Next, replace the program ID in `programs/rpg/lib.rs` and `Anchor.toml` with the -program ID shown when you run `anchor keys list`. +Next, run the command `anchor keys sync` that will automatically sync your +program ID. This command updates the program IDs in your program files +(including `Anchor.toml`) with the actual `pubkey` from the program keypair +file. -Finally, let's scaffold out the program in the `lib.rs` file. To make following -along easier, we're going to keep everything in one file. We'll augment this -with section comments for better organization and navigation. Copy the following +Finally, let's scaffold out the program in the `lib.rs` file. Copy the following into your file before we get started: -```rust +```rust filename="lib.rs" use anchor_lang::prelude::*; -use anchor_lang::system_program::{Transfer, transfer}; use anchor_lang::solana_program::log::sol_log_compute_units; declare_id!("YOUR_KEY_HERE__YOUR_KEY_HERE"); -// ----------- ACCOUNTS ---------- - -// ----------- GAME CONFIG ---------- - -// ----------- STATUS ---------- - -// ----------- INVENTORY ---------- - -// ----------- HELPER ---------- - -// ----------- CREATE GAME ---------- - -// ----------- CREATE PLAYER ---------- +#[program] +pub mod rpg { + use super::*; -// ----------- SPAWN MONSTER ---------- + pub fn create_game(ctx: Context, max_items_per_player: u8) -> Result<()> { + run_create_game(ctx, max_items_per_player)?; + sol_log_compute_units(); + Ok(()) + } -// ----------- ATTACK MONSTER ---------- + pub fn create_player(ctx: Context) -> Result<()> { + run_create_player(ctx)?; + sol_log_compute_units(); + Ok(()) + } -// ----------- REDEEM TO TREASURY ---------- + pub fn spawn_monster(ctx: Context) -> Result<()> { + run_spawn_monster(ctx)?; + sol_log_compute_units(); + Ok(()) + } -#[program] -pub mod rpg { - use super::*; + pub fn attack_monster(ctx: Context) -> Result<()> { + run_attack_monster(ctx)?; + sol_log_compute_units(); + Ok(()) + } + pub fn deposit_action_points(ctx: Context) -> Result<()> { + run_collect_action_points(ctx)?; + sol_log_compute_units(); + Ok(()) + } } ``` -#### 2. Create Account Structures +### 2. Create Account Structures Now that our initial setup is ready, let's create our accounts. We'll have 3: @@ -903,7 +1063,6 @@ Now that our initial setup is ready, let's create our accounts. We'll have 3: - `experience` - the player's experience - `kills` - number of monsters killed - `next_monster_index` - the index of the next monster to face - - `for_future_use` - 256 bytes reserved for future use - `inventory` - a vector of the player's inventory 3. `Monster` - A PDA account whose address is derived using the game account address, the player's wallet address, and an index (the one stored as @@ -912,21 +1071,55 @@ Now that our initial setup is ready, let's create our accounts. We'll have 3: - `game` - the game the monster is associated with - `hitpoints` - how many hit points the monster has left +This is the final project structure: + +```bash +src/ +├── constants.rs # Constants used throughout the program +├── error/ # Error module +│ ├── errors.rs # Custom error definitions +│ └── mod.rs # Module declarations for error handling +├── helpers.rs # Helper functions used across the program +├── instructions/ # Instruction handlers for different game actions +│ ├── attack_monster.rs # Handles attacking a monster +│ ├── collect_points.rs # Handles collecting points +│ ├── create_game.rs # Handles game creation +│ ├── create_player.rs # Handles player creation +│ ├── mod.rs # Module declarations for instructions +│ └── spawn_monster.rs # Handles spawning a new monster +├── lib.rs # Main entry point for the program +└── state/ # State module for game data structures + ├── game.rs # Game state representation + ├── mod.rs # Module declarations for state + ├── monster.rs # Monster state representation + └── player.rs # Player state representation +``` + When added to the program, the accounts should look like this: ```rust // ----------- ACCOUNTS ---------- -#[account] -pub struct Game { // 8 bytes - pub game_master: Pubkey, // 32 bytes - pub treasury: Pubkey, // 32 bytes - - pub action_points_collected: u64, // 8 bytes +// Inside `state/game.rs` +use anchor_lang::prelude::*; +#[account] +#[derive(InitSpace)] +pub struct Game { + pub game_master: Pubkey, + pub treasury: Pubkey, + pub action_points_collected: u64, pub game_config: GameConfig, } +#[derive(AnchorSerialize, AnchorDeserialize, Clone, InitSpace)] +pub struct GameConfig { + pub max_items_per_player: u8 +} + +// Inside `state/player.rs` +use anchor_lang::prelude::*; #[account] +#[derive(InitSpace)] pub struct Player { // 8 bytes pub player: Pubkey, // 32 bytes pub game: Pubkey, // 32 bytes @@ -939,92 +1132,125 @@ pub struct Player { // 8 bytes pub kills: u64, // 8 bytes pub next_monster_index: u64, // 8 bytes - pub for_future_use: [u8; 256], // Attack/Speed/Defense/Health/Mana?? Metadata?? - pub inventory: Vec, // Max 8 items } -#[account] -pub struct Monster { // 8 bytes - pub player: Pubkey, // 32 bytes - pub game: Pubkey, // 32 bytes +#[derive(AnchorSerialize, AnchorDeserialize, Clone, InitSpace)] +pub struct InventoryItem { + pub name: [u8; 32], // Fixed Name up to 32 bytes + pub amount: u64 +} + - pub hitpoints: u64, // 8 bytes +// Inside `state/monster.rs` +use anchor_lang::prelude::*; +#[account] +#[derive(InitSpace)] +pub struct Monster { + pub player: Pubkey, + pub game: Pubkey, + pub hitpoints: u64, } ``` There aren't a lot of complicated design decisions here, but let's talk about -the `inventory` and `for_future_use` fields on the `Player` struct. Since -`inventory` is variable in length we decided to place it at the end of the -account to make querying easier. We've also decided it's worth spending a little -extra money on rent exemption to have 256 bytes of reserved space in the -`for_future_use` field. We could exclude this and simply reallocate accounts if -we need to add fields in the future, but adding it now simplifies things for us -in the future. +the `inventory` field on the `Player` struct. Since `inventory` is variable in +length we decided to place it at the end of the account to make querying easier. -If we chose to reallocate in the future, we'd need to write more complicated -queries and likely couldn't query in a single call based on `inventory`. -Reallocating and adding a field would move the memory position of `inventory`, -leaving us to write complex logic to query accounts with various structures. - -#### 3. Create ancillary types +### 3. Create Ancillary Types The next thing we need to do is add some of the types our accounts reference that we haven't created yet. Let's start with the game config struct. Technically, this could have gone in the `Game` account, but it's nice to have some separation and encapsulation. -This struct should store the max items allowed per player and some bytes for -future use. Again, the bytes for future use here help us avoid complexity in the -future. Reallocating accounts works best when you're adding fields at the end of -an account rather than in the middle. If you anticipate adding fields in the -middle of existing date, it might make sense to add some "future use" bytes up -front. +This struct should store the max items allowed per player. -```rust +```rust filename="game.rs" // ----------- GAME CONFIG ---------- - -#[derive(Clone, AnchorSerialize, AnchorDeserialize)] +// Inside `state/game.rs` +#[derive(AnchorSerialize, AnchorDeserialize, Clone, InitSpace)] pub struct GameConfig { - pub max_items_per_player: u8, - pub for_future_use: [u64; 16], // Health of Enemies?? Experience per item?? Action Points per Action?? + pub max_items_per_player: u8 } ``` +Reallocating accounts in Solana programs has become more flexible due to +Anchor's +[`realloc`](https://docs.rs/anchor-lang/latest/anchor_lang/derive.Accounts.html#normal-constraints) +account constraint and Solana's account resizing capabilities. While adding +fields at the end of an account structure remains straightforward, modern +practices allow for more adaptable designs: + +1. Use Anchor's `realloc` constraint in the `#[account()]` attribute to specify + resizing parameters: + + ```rust + #[account( + mut, + realloc = AccountStruct::INIT_SPACE, + realloc::payer = payer, + realloc::zero = false, + )] + ``` + +2. Use Anchor's `InitSpace` attribute to automatically calculate account space. +3. For variable-length fields like `Vec` or `String`, use the `max_len` + attribute to specify maximum size. +4. When adding new fields, consider using `Option` for backward + compatibility. +5. Implement a versioning system in your account structure to manage different + layouts. +6. Ensure the payer account is mutable and a signer to cover reallocation costs: + + ```rust + #[account(mut)] + pub payer: Signer<'info>, + ``` + +This approach allows for easier account structure evolution, regardless of where +new fields are added, while maintaining efficient querying and +serialization/deserialization through Anchor's built-in capabilities. It enables +resizing accounts as needed, automatically handling rent-exemption. + Next, let's create our status flags. Remember, we _could_ store our flags as booleans but we save space by storing multiple flags in a single byte. Each flag takes up a different bit within the byte. We can use the `<<` operator to place `1` in the correct bit. -```rust +```rust filename="constants.rs" // ----------- STATUS ---------- -const IS_FROZEN_FLAG: u8 = 1 << 0; -const IS_POISONED_FLAG: u8 = 1 << 1; -const IS_BURNING_FLAG: u8 = 1 << 2; -const IS_BLESSED_FLAG: u8 = 1 << 3; -const IS_CURSED_FLAG: u8 = 1 << 4; -const IS_STUNNED_FLAG: u8 = 1 << 5; -const IS_SLOWED_FLAG: u8 = 1 << 6; -const IS_BLEEDING_FLAG: u8 = 1 << 7; -const NO_EFFECT_FLAG: u8 = 0b00000000; +pub const IS_FROZEN_FLAG: u8 = 1 << 0; +pub const IS_POISONED_FLAG: u8 = 1 << 1; +pub const IS_BURNING_FLAG: u8 = 1 << 2; +pub const IS_BLESSED_FLAG: u8 = 1 << 3; +pub const IS_CURSED_FLAG: u8 = 1 << 4; +pub const IS_STUNNED_FLAG: u8 = 1 << 5; +pub const IS_SLOWED_FLAG: u8 = 1 << 6; +pub const IS_BLEEDING_FLAG: u8 = 1 << 7; + +pub const NO_EFFECT_FLAG: u8 = 0b00000000; +pub const ANCHOR_DISCRIMINATOR: usize = 8; +pub const MAX_INVENTORY_ITEMS: usize = 8; ``` Finally, let's create our `InventoryItem`. This should have fields for the -item's name, amount, and some bytes reserved for future use. +item's name and amount. -```rust +```rust filename="player.rs" // ----------- INVENTORY ---------- -#[derive(Clone, AnchorSerialize, AnchorDeserialize)] +// Inside `state/player.rs` +#[derive(AnchorSerialize, AnchorDeserialize, Clone, InitSpace)] pub struct InventoryItem { pub name: [u8; 32], // Fixed Name up to 32 bytes - pub amount: u64, - pub for_future_use: [u8; 128], // Metadata?? // Effects // Flags? + pub amount: u64 } + ``` -#### 4. Create helper function for spending action points +### 4. Create a helper function for spending action points The last thing we'll do before writing the program's instructions is create a helper function for spending action points. Players will send action points @@ -1040,26 +1266,40 @@ that will send the lamports from that account to the treasury in one fell swoop. This alleviates any concurrency issues since every player has their own account, but also allows the program to retrieve those lamports at any time. -```rust +```rust filename="helper.rs" // ----------- HELPER ---------- +// Inside /src/helpers.rs +use anchor_lang::{prelude::*, system_program}; + +use crate::{error::RpgError, Player}; + pub fn spend_action_points<'info>( action_points: u64, player_account: &mut Account<'info, Player>, player: &AccountInfo<'info>, system_program: &AccountInfo<'info>, ) -> Result<()> { - - player_account.action_points_spent = player_account.action_points_spent.checked_add(action_points).unwrap(); - player_account.action_points_to_be_collected = player_account.action_points_to_be_collected.checked_add(action_points).unwrap(); - - let cpi_context = CpiContext::new( - system_program.clone(), - Transfer { - from: player.clone(), - to: player_account.to_account_info().clone(), - }); - transfer(cpi_context, action_points)?; + player_account.action_points_spent = player_account + .action_points_spent + .checked_add(action_points) + .ok_or(error!(RpgError::ArithmeticOverflow))?; + + player_account.action_points_to_be_collected = player_account + .action_points_to_be_collected + .checked_add(action_points) + .ok_or(error!(RpgError::ArithmeticOverflow))?; + + system_program::transfer( + CpiContext::new( + system_program.to_account_info(), + system_program::Transfer { + from: player.to_account_info(), + to: player_account.to_account_info(), + }, + ), + action_points, + )?; msg!("Minus {} action points", action_points); @@ -1067,7 +1307,7 @@ pub fn spend_action_points<'info>( } ``` -#### 5. Create Game +### 5. Create Game Our first instruction will create the `game` account. Anyone can be a `game_master` and create their own game, but once a game has been created there @@ -1077,48 +1317,54 @@ For one, the `game` account is a PDA using its `treasury` wallet. This ensures that the same `game_master` can run multiple games if they use a different treasury for each. -Also note that the `treasury` is a signer on the instruction. This is to make -sure whoever is creating the game has the private keys to the `treasury`. This -is a design decision rather than "the right way." Ultimately, it's a security -measure to ensure the game master will be able to retrieve their funds. + -```rust +The `treasury` is a signer on the instruction. This is to make sure whoever is +creating the game has the private keys to the `treasury`. This is a design +decision rather than "the right way." Ultimately, it's a security measure to +ensure the game master will be able to retrieve their funds. + +```rust filename="create_game.rs" // ----------- CREATE GAME ---------- +// Inside src/instructions/create_game.rs +use anchor_lang::prelude::*; + +use crate::{error::RpgError, Game, ANCHOR_DISCRIMINATOR}; + #[derive(Accounts)] pub struct CreateGame<'info> { #[account( init, - seeds=[b"GAME", treasury.key().as_ref()], + seeds = [b"GAME", treasury.key().as_ref()], bump, payer = game_master, - space = std::mem::size_of::()+ 8 + space = ANCHOR_DISCRIMINATOR + Game::INIT_SPACE )] pub game: Account<'info, Game>, - #[account(mut)] pub game_master: Signer<'info>, - - /// CHECK: Need to know they own the treasury pub treasury: Signer<'info>, pub system_program: Program<'info, System>, } pub fn run_create_game(ctx: Context, max_items_per_player: u8) -> Result<()> { + if max_items_per_player == 0 { + return Err(error!(RpgError::InvalidGameConfig)); + } - ctx.accounts.game.game_master = ctx.accounts.game_master.key().clone(); - ctx.accounts.game.treasury = ctx.accounts.treasury.key().clone(); - - ctx.accounts.game.action_points_collected = 0; - ctx.accounts.game.game_config.max_items_per_player = max_items_per_player; + let game = &mut ctx.accounts.game; + game.game_master = ctx.accounts.game_master.key(); + game.treasury = ctx.accounts.treasury.key(); + game.action_points_collected = 0; + game.game_config.max_items_per_player = max_items_per_player; msg!("Game created!"); - Ok(()) } ``` -#### 6. Create Player +### 6. Create Player Our second instruction will create the `player` account. There are three tradeoffs to note about this instruction: @@ -1133,62 +1379,67 @@ tradeoffs to note about this instruction: 100 lamports, but this could be something added to the game config in the future. -```rust +```rust filename="create_player.rs" // ----------- CREATE PLAYER ---------- + +// Inside src/instructions/create_player.rs +use anchor_lang::prelude::*; + +use crate::{ + error::RpgError, helpers::spend_action_points, Game, Player, ANCHOR_DISCRIMINATOR, + CREATE_PLAYER_ACTION_POINTS, NO_EFFECT_FLAG, +}; + #[derive(Accounts)] pub struct CreatePlayer<'info> { pub game: Box>, - #[account( init, - seeds=[ + seeds = [ b"PLAYER", game.key().as_ref(), player.key().as_ref() ], bump, payer = player, - space = std::mem::size_of::() + std::mem::size_of::() * game.game_config.max_items_per_player as usize + 8) - ] + space = ANCHOR_DISCRIMINATOR + Player::INIT_SPACE + )] pub player_account: Account<'info, Player>, - #[account(mut)] pub player: Signer<'info>, - pub system_program: Program<'info, System>, } pub fn run_create_player(ctx: Context) -> Result<()> { - - ctx.accounts.player_account.player = ctx.accounts.player.key().clone(); - ctx.accounts.player_account.game = ctx.accounts.game.key().clone(); - - ctx.accounts.player_account.status_flag = NO_EFFECT_FLAG; - ctx.accounts.player_account.experience = 0; - ctx.accounts.player_account.kills = 0; + let player_account = &mut ctx.accounts.player_account; + player_account.player = ctx.accounts.player.key(); + player_account.game = ctx.accounts.game.key(); + player_account.status_flag = NO_EFFECT_FLAG; + player_account.experience = 0; + player_account.kills = 0; msg!("Hero has entered the game!"); - { // Spend 100 lamports to create player - let action_points_to_spend = 100; + // Spend 100 lamports to create player + let action_points_to_spend = CREATE_PLAYER_ACTION_POINTS; - spend_action_points( - action_points_to_spend, - &mut ctx.accounts.player_account, - &ctx.accounts.player.to_account_info(), - &ctx.accounts.system_program.to_account_info() - )?; - } + spend_action_points( + action_points_to_spend, + player_account, + &ctx.accounts.player.to_account_info(), + &ctx.accounts.system_program.to_account_info(), + ) + .map_err(|_| error!(RpgError::InsufficientActionPoints))?; Ok(()) } ``` -#### 7. Spawn Monster +### 7. Spawn Monster Now that we have a way to create players, we need a way to spawn monsters for them to fight. This instruction will create a new `Monster` account whose -address is a PDA derived with the `game` account, `player` account, and an index +address is a PDA derived from the `game` account, `player` account, and an index representing the number of monsters the player has faced. There are two design decisions here we should talk about: @@ -1196,21 +1447,26 @@ decisions here we should talk about: 2. We wrap both the `game` and `player` accounts in `Box` to allocate them to the Heap -```rust +```rust filename="spawn_monster.rs" // ----------- SPAWN MONSTER ---------- + +// Inside src/instructions/spawn_monster.rs +use anchor_lang::prelude::*; + +use crate::{helpers::spend_action_points, Game, Monster, Player, SPAWN_MONSTER_ACTION_POINTS, ANCHOR_DISCRIMINATOR}; + #[derive(Accounts)] pub struct SpawnMonster<'info> { pub game: Box>, - - #[account(mut, + #[account( + mut, has_one = game, has_one = player, )] pub player_account: Box>, - #[account( init, - seeds=[ + seeds = [ b"MONSTER", game.key().as_ref(), player.key().as_ref(), @@ -1218,48 +1474,41 @@ pub struct SpawnMonster<'info> { ], bump, payer = player, - space = std::mem::size_of::() + 8) - ] + space = ANCHOR_DISCRIMINATOR + Monster::INIT_SPACE + )] pub monster: Account<'info, Monster>, - #[account(mut)] pub player: Signer<'info>, - pub system_program: Program<'info, System>, } pub fn run_spawn_monster(ctx: Context) -> Result<()> { + let monster = &mut ctx.accounts.monster; + monster.player = ctx.accounts.player.key(); + monster.game = ctx.accounts.game.key(); + monster.hitpoints = 100; - { - ctx.accounts.monster.player = ctx.accounts.player.key().clone(); - ctx.accounts.monster.game = ctx.accounts.game.key().clone(); - ctx.accounts.monster.hitpoints = 100; + let player_account = &mut ctx.accounts.player_account; + player_account.next_monster_index = player_account.next_monster_index.checked_add(1).unwrap(); - msg!("Monster Spawned!"); - } + msg!("Monster Spawned!"); - { - ctx.accounts.player_account.next_monster_index = ctx.accounts.player_account.next_monster_index.checked_add(1).unwrap(); - } - - { // Spend 5 lamports to spawn monster - let action_point_to_spend = 5; - - spend_action_points( - action_point_to_spend, - &mut ctx.accounts.player_account, - &ctx.accounts.player.to_account_info(), - &ctx.accounts.system_program.to_account_info() - )?; - } + // Spend 5 lamports to spawn monster + let action_point_to_spend = SPAWN_MONSTER_ACTION_POINTS; + spend_action_points( + action_point_to_spend, + player_account, + &ctx.accounts.player.to_account_info(), + &ctx.accounts.system_program.to_account_info(), + )?; Ok(()) } ``` -#### 8. Attack Monster +### 8. Attack Monster -Now! Let’s attack those monsters and start gaining some exp! +Now! Let's attack those monsters and start gaining some exp! The logic here is as follows: @@ -1273,139 +1522,234 @@ incrementing experience and kill counts. The `saturating_add` function ensures the number will never overflow. Say the `kills` was a u8 and my current kill count was 255 (0xFF). If I killed another and added normally, e.g. `255 + 1 = 0 (0xFF + 0x01 = 0x00) = 0`, the kill count -would end up as 0. `saturating_add` will keep it at its max if it’s about to +would end up as 0. `saturating_add` will keep it at its max if it's about to +would end up as 0. `saturating_add` will keep it at its max if it's about to roll over, so `255 + 1 = 255`. The `checked_add` function will throw an error if -it’s about to overflow. Keep this in mind when doing math in Rust. Even though -`kills` is a u64 and will never roll with it’s current programming, it’s good +it's about to overflow. Keep this in mind when doing math in Rust. Even though +`kills` is a u64 and will never roll with it's current programming, it's good practice to use safe math and consider roll-overs. -```rust +```rust filename="attack_monster.rs" // ----------- ATTACK MONSTER ---------- + +// Inside src/instructions/attack_monster.rs +use anchor_lang::prelude::*; +use crate::{helpers::spend_action_points, Monster, Player, ATTACK_ACTION_POINTS, error::RpgError}; + #[derive(Accounts)] pub struct AttackMonster<'info> { - #[account( mut, has_one = player, )] pub player_account: Box>, - #[account( mut, has_one = player, - constraint = monster.game == player_account.game + constraint = monster.game == player_account.game @ RpgError::GameMismatch )] pub monster: Box>, - #[account(mut)] pub player: Signer<'info>, - pub system_program: Program<'info, System>, } pub fn run_attack_monster(ctx: Context) -> Result<()> { + let player_account = &mut ctx.accounts.player_account; + let monster = &mut ctx.accounts.monster; - let mut did_kill = false; - - { - let hp_before_attack = ctx.accounts.monster.hitpoints; - let hp_after_attack = ctx.accounts.monster.hitpoints.saturating_sub(1); - let damage_dealt = hp_before_attack - hp_after_attack; - ctx.accounts.monster.hitpoints = hp_after_attack; - - - - if hp_before_attack > 0 && hp_after_attack == 0 { - did_kill = true; - } - - if damage_dealt > 0 { - msg!("Damage Dealt: {}", damage_dealt); - } else { - msg!("Stop it's already dead!"); - } - } + let hp_before_attack = monster.hitpoints; + let hp_after_attack = monster.hitpoints.saturating_sub(1); + let damage_dealt = hp_before_attack.saturating_sub(hp_after_attack); + monster.hitpoints = hp_after_attack; - { - ctx.accounts.player_account.experience = ctx.accounts.player_account.experience.saturating_add(1); + if damage_dealt > 0 { + msg!("Damage Dealt: {}", damage_dealt); + player_account.experience = player_account.experience.saturating_add(1); msg!("+1 EXP"); - if did_kill { - ctx.accounts.player_account.kills = ctx.accounts.player_account.kills.saturating_add(1); + if hp_after_attack == 0 { + player_account.kills = player_account.kills.saturating_add(1); msg!("You killed the monster!"); } + } else { + msg!("Stop it's already dead!"); } - { // Spend 1 lamports to attack monster - let action_point_to_spend = 1; + // Spend 1 lamport to attack monster + let action_point_to_spend = ATTACK_ACTION_POINTS; - spend_action_points( - action_point_to_spend, - &mut ctx.accounts.player_account, - &ctx.accounts.player.to_account_info(), - &ctx.accounts.system_program.to_account_info() - )?; - } + spend_action_points( + action_point_to_spend, + player_account, + &ctx.accounts.player.to_account_info(), + &ctx.accounts.system_program.to_account_info() + )?; Ok(()) } ``` -#### Redeem to Treasury +### 9. Redeem to Treasury This is our last instruction. This instruction lets anyone send the spent `action_points` to the `treasury` wallet. Again, let's box the rpg accounts and use safe math. -```rust +```rust filename="collect_points.rs" // ----------- REDEEM TO TREASUREY ---------- + +// Inside src/instructions/collect_points.rs +use anchor_lang::prelude::*; +use crate::{error::RpgError, Game, Player}; + #[derive(Accounts)] pub struct CollectActionPoints<'info> { - #[account( mut, - has_one=treasury + has_one = treasury @ RpgError::InvalidTreasury )] pub game: Box>, - #[account( mut, - has_one=game + has_one = game @ RpgError::PlayerGameMismatch )] pub player: Box>, - #[account(mut)] /// CHECK: It's being checked in the game account - pub treasury: AccountInfo<'info>, - + pub treasury: UncheckedAccount<'info>, pub system_program: Program<'info, System>, } -// literally anyone who pays for the TX fee can run this command - give it to a clockwork bot +// Literally anyone who pays for the TX fee can run this command - give it to a clockwork bot pub fn run_collect_action_points(ctx: Context) -> Result<()> { - let transfer_amount: u64 = ctx.accounts.player.action_points_to_be_collected; + let transfer_amount = ctx.accounts.player.action_points_to_be_collected; + + // Transfer lamports from player to treasury + let player_info = ctx.accounts.player.to_account_info(); + let treasury_info = ctx.accounts.treasury.to_account_info(); - **ctx.accounts.player.to_account_info().try_borrow_mut_lamports()? -= transfer_amount; - **ctx.accounts.treasury.to_account_info().try_borrow_mut_lamports()? += transfer_amount; + **player_info.try_borrow_mut_lamports()? = player_info + .lamports() + .checked_sub(transfer_amount) + .ok_or(RpgError::InsufficientFunds)?; + + **treasury_info.try_borrow_mut_lamports()? = treasury_info + .lamports() + .checked_add(transfer_amount) + .ok_or(RpgError::ArithmeticOverflow)?; ctx.accounts.player.action_points_to_be_collected = 0; - ctx.accounts.game.action_points_collected = ctx.accounts.game.action_points_collected.checked_add(transfer_amount).unwrap(); + ctx.accounts.game.action_points_collected = ctx.accounts.game + .action_points_collected + .checked_add(transfer_amount) + .ok_or(RpgError::ArithmeticOverflow)?; - msg!("The treasury collected {} action points to treasury", transfer_amount); + msg!("The treasury collected {} action points", transfer_amount); Ok(()) } ``` -#### Putting it all Together +### 10. Error Handling + +Now, let's add all the errors that we have used till now in `errors.rs` file. + +```rust filename="errors.rs" +// ------------RPG ERRORS-------------- + +// Inside src/error/errors.rs + +use anchor_lang::prelude::*; + +#[error_code] +pub enum RpgError { + #[msg("Arithmetic overflow occurred")] + ArithmeticOverflow, + #[msg("Invalid game configuration")] + InvalidGameConfig, + #[msg("Player not found")] + PlayerNotFound, + #[msg("Monster not found")] + MonsterNotFound, + #[msg("Insufficient action points")] + InsufficientActionPoints, + #[msg("Invalid attack")] + InvalidAttack, + #[msg("Maximum inventory size reached")] + MaxInventoryReached, + #[msg("Invalid item operation")] + InvalidItemOperation, + #[msg("Monster and player are not in the same game")] + GameMismatch, + #[msg("Invalid treasury account")] + InvalidTreasury, + #[msg("Player does not belong to the specified game")] + PlayerGameMismatch, + #[msg("Insufficient funds for transfer")] + InsufficientFunds +} +``` + +### 11. Module Declarations + +We need to declare all the modules used in the project as follows: + +```rust + +// Inside src/error/mod.rs +pub mod errors; +pub use errors::RpgError; // Expose the custom error type + +// Inside src/instructions/mod.rs +pub mod attack_monster; +pub mod collect_points; +pub mod create_game; +pub mod create_player; +pub mod spawn_monster; + +pub use attack_monster::*; // Expose attack_monster functions +pub use collect_points::*; // Expose collect_points functions +pub use create_game::*; // Expose create_game functions +pub use create_player::*; // Expose create_player functions +pub use spawn_monster::*; // Expose spawn_monster functions + +// Inside src/state/mod.rs +pub mod game; +pub mod monster; +pub mod player; + +pub use game::*; // Expose game state +pub use monster::*; // Expose monster state +pub use player::*; // Expose player state +``` + +### 12. Putting it all Together Now that all of our instruction logic is written, let's add these functions to actual instructions in the program. It can also be helpful to log compute units for each instruction. -```rust +```rust filename="lib.rs" + +// Insider src/lib.rs +use anchor_lang::prelude::*; +use anchor_lang::solana_program::log::sol_log_compute_units; + +mod state; +mod instructions; +mod constants; +mod helpers; +mod error; + +use state::*; +use constants::*; +use instructions::*; + +declare_id!("5Sc3gJv4tvPiFzE75boYMJabbNRs44zRhtT23fLdKewz"); + #[program] pub mod rpg { use super::*; @@ -1439,7 +1783,6 @@ pub mod rpg { sol_log_compute_units(); Ok(()) } - } ``` @@ -1450,73 +1793,134 @@ successfully. anchor build ``` -#### Testing +### Testing -Now, let’s see this baby work! +Now, let's put everything together and see it in action! -Let’s set up the `tests/rpg.ts` file. We will be filling out each test in turn. -But first, we needed to set up a couple of different accounts. Mainly the -`gameMaster` and the `treasury`. +We'll begin by setting up the `tests/rpg.ts` file. We will be writing each test +step by step. But before diving into the tests, we need to initialize a few +important accounts, specifically the `gameMaster` and the `treasury` accounts. -```typescript +```typescript filename="rpg.ts" import * as anchor from "@coral-xyz/anchor"; import { Program } from "@coral-xyz/anchor"; -import { Rpg, IDL } from "../target/types/rpg"; +import { Rpg } from "../target/types/rpg"; import { assert } from "chai"; +import { + Keypair, + LAMPORTS_PER_SOL, + PublicKey, + TransactionSignature, + TransactionConfirmationStrategy, +} from "@solana/web3.js"; import NodeWallet from "@coral-xyz/anchor/dist/cjs/nodewallet"; -describe("RPG", () => { - // Configure the client to use the local cluster. - anchor.setProvider(anchor.AnchorProvider.env()); - - const program = anchor.workspace.Rpg as Program; - const wallet = anchor.workspace.Rpg.provider.wallet - .payer as anchor.web3.Keypair; - const gameMaster = wallet; - const player = wallet; - - const treasury = anchor.web3.Keypair.generate(); - - it("Create Game", async () => {}); - - it("Create Player", async () => {}); - - it("Spawn Monster", async () => {}); - - it("Attack Monster", async () => {}); - - it("Deposit Action Points", async () => {}); +const GAME_SEED = "GAME"; +const PLAYER_SEED = "PLAYER"; +const MONSTER_SEED = "MONSTER"; +const MAX_ITEMS_PER_PLAYER = 8; +const INITIAL_MONSTER_HITPOINTS = 100; +const AIRDROP_AMOUNT = 10 * LAMPORTS_PER_SOL; +const CREATE_PLAYER_ACTION_POINTS = 100; +const SPAWN_MONSTER_ACTION_POINTS = 5; +const ATTACK_MONSTER_ACTION_POINTS = 1; +const MONSTER_INDEX_BYTE_LENGTH = 8; + +const provider = anchor.AnchorProvider.env(); +anchor.setProvider(provider); + +const program = anchor.workspace.Rpg as Program; +const wallet = provider.wallet as NodeWallet; +const gameMaster = wallet; +const player = wallet; + +const treasury = Keypair.generate(); + +const findProgramAddress = (seeds: Buffer[]): [PublicKey, number] => + PublicKey.findProgramAddressSync(seeds, program.programId); + +const confirmTransaction = async ( + signature: TransactionSignature, + provider: anchor.Provider, +) => { + const latestBlockhash = await provider.connection.getLatestBlockhash(); + const confirmationStrategy: TransactionConfirmationStrategy = { + signature, + blockhash: latestBlockhash.blockhash, + lastValidBlockHeight: latestBlockhash.lastValidBlockHeight, + }; + + try { + const confirmation = + await provider.connection.confirmTransaction(confirmationStrategy); + if (confirmation.value.err) { + throw new Error( + `Transaction failed: ${confirmation.value.err.toString()}`, + ); + } + } catch (error) { + throw new Error(`Transaction confirmation failed: ${error.message}`); + } +}; + +const createGameAddress = () => + findProgramAddress([Buffer.from(GAME_SEED), treasury.publicKey.toBuffer()]); + +const createPlayerAddress = (gameAddress: PublicKey) => + findProgramAddress([ + Buffer.from(PLAYER_SEED), + gameAddress.toBuffer(), + player.publicKey.toBuffer(), + ]); + +const createMonsterAddress = ( + gameAddress: PublicKey, + monsterIndex: anchor.BN, +) => + findProgramAddress([ + Buffer.from(MONSTER_SEED), + gameAddress.toBuffer(), + player.publicKey.toBuffer(), + monsterIndex.toArrayLike(Buffer, "le", MONSTER_INDEX_BYTE_LENGTH), + ]); + +describe("RPG game", () => { + it("creates a new game", async () => {}); + + it("creates a new player", async () => {}); + + it("spawns a monster", async () => {}); + + it("attacks a monster", async () => {}); + + it("deposits action points", async () => {}); }); ``` -Now lets add in the `Create Game` test. Just call `createGame` with eight items, -be sure to pass in all the accounts, and make sure the `treasury` account signs -the transaction. +Now lets add in the `creates a new game` test. Just call `createGame` with eight +items, be sure to pass in all the accounts, and make sure the `treasury` account +signs the transaction. ```typescript -it("Create Game", async () => { - const [gameKey] = anchor.web3.PublicKey.findProgramAddressSync( - [Buffer.from("GAME"), treasury.publicKey.toBuffer()], - program.programId, - ); - - const txHash = await program.methods - .createGame( - 8, // 8 Items per player - ) - .accounts({ - game: gameKey, - gameMaster: gameMaster.publicKey, - treasury: treasury.publicKey, - systemProgram: anchor.web3.SystemProgram.programId, - }) - .signers([treasury]) - .rpc(); - - await program.provider.connection.confirmTransaction(txHash); - - // Print out if you'd like - // const account = await program.account.game.fetch(gameKey); +it("creates a new game", async () => { + try { + const [gameAddress] = createGameAddress(); + + const createGameSignature = await program.methods + .createGame(MAX_ITEMS_PER_PLAYER) + .accounts({ + game: gameAddress, + gameMaster: gameMaster.publicKey, + treasury: treasury.publicKey, + systemProgram: anchor.web3.SystemProgram.programId, + }) + .signers([treasury]) + .rpc(); + + await confirmTransaction(createGameSignature, provider); + } catch (error) { + throw new Error(`Failed to create game: ${error.message}`); + } }); ``` @@ -1531,118 +1935,91 @@ anchor test some `.pnp.*` files and no `node_modules`, you may want to call `rm -rf .pnp.*` followed by `npm i` and then `yarn install`. That should work. -Now that everything is running, let’s implement the `Create Player`, -`Spawn Monster`, and `Attack Monster` tests. Run each test as you complete them -to make sure things are running smoothly. +Now that everything is running, let's implement the `creates a new player`, +`spawns a monster`, and `attacks a monster` tests. Run each test as you complete +them to make sure things are running smoothly. ```typescript -it("Create Player", async () => { - const [gameKey] = anchor.web3.PublicKey.findProgramAddressSync( - [Buffer.from("GAME"), treasury.publicKey.toBuffer()], - program.programId, - ); - - const [playerKey] = anchor.web3.PublicKey.findProgramAddressSync( - [Buffer.from("PLAYER"), gameKey.toBuffer(), player.publicKey.toBuffer()], - program.programId, - ); - - const txHash = await program.methods - .createPlayer() - .accounts({ - game: gameKey, - playerAccount: playerKey, - player: player.publicKey, - systemProgram: anchor.web3.SystemProgram.programId, - }) - .rpc(); - - await program.provider.connection.confirmTransaction(txHash); - - // Print out if you'd like - // const account = await program.account.player.fetch(playerKey); +it("creates a new player", async () => { + try { + const [gameAddress] = createGameAddress(); + const [playerAddress] = createPlayerAddress(gameAddress); + + const createPlayerSignature = await program.methods + .createPlayer() + .accounts({ + game: gameAddress, + playerAccount: playerAddress, + player: player.publicKey, + systemProgram: anchor.web3.SystemProgram.programId, + }) + .rpc(); + + await confirmTransaction(createPlayerSignature, provider); + } catch (error) { + throw new Error(`Failed to create player: ${error.message}`); + } }); -it("Spawn Monster", async () => { - const [gameKey] = anchor.web3.PublicKey.findProgramAddressSync( - [Buffer.from("GAME"), treasury.publicKey.toBuffer()], - program.programId, - ); +it("spawns a monster", async () => { + try { + const [gameAddress] = createGameAddress(); + const [playerAddress] = createPlayerAddress(gameAddress); - const [playerKey] = anchor.web3.PublicKey.findProgramAddressSync( - [Buffer.from("PLAYER"), gameKey.toBuffer(), player.publicKey.toBuffer()], - program.programId, - ); - - const playerAccount = await program.account.player.fetch(playerKey); + const playerAccount = await program.account.player.fetch(playerAddress); + const [monsterAddress] = createMonsterAddress( + gameAddress, + playerAccount.nextMonsterIndex, + ); - const [monsterKey] = anchor.web3.PublicKey.findProgramAddressSync( - [ - Buffer.from("MONSTER"), - gameKey.toBuffer(), - player.publicKey.toBuffer(), - playerAccount.nextMonsterIndex.toBuffer("le", 8), - ], - program.programId, - ); - - const txHash = await program.methods - .spawnMonster() - .accounts({ - game: gameKey, - playerAccount: playerKey, - monster: monsterKey, - player: player.publicKey, - systemProgram: anchor.web3.SystemProgram.programId, - }) - .rpc(); - - await program.provider.connection.confirmTransaction(txHash); - - // Print out if you'd like - // const account = await program.account.monster.fetch(monsterKey); + const spawnMonsterSignature = await program.methods + .spawnMonster() + .accounts({ + game: gameAddress, + playerAccount: playerAddress, + monster: monsterAddress, + player: player.publicKey, + systemProgram: anchor.web3.SystemProgram.programId, + }) + .rpc(); + + await confirmTransaction(spawnMonsterSignature, provider); + } catch (error) { + throw new Error(`Failed to spawn monster: ${error.message}`); + } }); -it("Attack Monster", async () => { - const [gameKey] = anchor.web3.PublicKey.findProgramAddressSync( - [Buffer.from("GAME"), treasury.publicKey.toBuffer()], - program.programId, - ); +it("attacks a monster", async () => { + try { + const [gameAddress] = createGameAddress(); + const [playerAddress] = createPlayerAddress(gameAddress); - const [playerKey] = anchor.web3.PublicKey.findProgramAddressSync( - [Buffer.from("PLAYER"), gameKey.toBuffer(), player.publicKey.toBuffer()], - program.programId, - ); + const playerAccount = await program.account.player.fetch(playerAddress); + const [monsterAddress] = createMonsterAddress( + gameAddress, + playerAccount.nextMonsterIndex.subn(1), + ); - // Fetch the latest monster created - const playerAccount = await program.account.player.fetch(playerKey); - const [monsterKey] = anchor.web3.PublicKey.findProgramAddressSync( - [ - Buffer.from("MONSTER"), - gameKey.toBuffer(), - player.publicKey.toBuffer(), - playerAccount.nextMonsterIndex.subn(1).toBuffer("le", 8), - ], - program.programId, - ); - - const txHash = await program.methods - .attackMonster() - .accounts({ - playerAccount: playerKey, - monster: monsterKey, - player: player.publicKey, - systemProgram: anchor.web3.SystemProgram.programId, - }) - .rpc(); - - await program.provider.connection.confirmTransaction(txHash); - - // Print out if you'd like - // const account = await program.account.monster.fetch(monsterKey); - - const monsterAccount = await program.account.monster.fetch(monsterKey); - assert(monsterAccount.hitpoints.eqn(99)); + const attackMonsterSignature = await program.methods + .attackMonster() + .accounts({ + playerAccount: playerAddress, + monster: monsterAddress, + player: player.publicKey, + systemProgram: anchor.web3.SystemProgram.programId, + }) + .rpc(); + + await confirmTransaction(attackMonsterSignature, provider); + + const monsterAccount = await program.account.monster.fetch(monsterAddress); + assert( + monsterAccount.hitpoints.eqn(INITIAL_MONSTER_HITPOINTS - 1), + "Monster hitpoints should decrease by 1 after attack", + ); + } catch (error) { + throw new Error(`Failed to attack monster: ${error.message}`); + } }); ``` @@ -1650,7 +2027,7 @@ Notice the monster that we choose to attack is `playerAccount.nextMonsterIndex.subn(1).toBuffer('le', 8)`. This allows us to attack the most recent monster spawned. Anything below the `nextMonsterIndex` should be okay. Lastly, since seeds are just an array of bytes we have to turn -the index into the u64, which is little endian `le` at 8 bytes. +the index into the u64, which is a little endian `le` at 8 bytes. Run `anchor test` to deal some damage! @@ -1662,109 +2039,119 @@ game were running continuously, it probably makes sense to use something like [clockwork](https://www.clockwork.xyz/) cron jobs. ```typescript -it("Deposit Action Points", async () => { - const [gameKey] = anchor.web3.PublicKey.findProgramAddressSync( - [Buffer.from("GAME"), treasury.publicKey.toBuffer()], - program.programId, - ); - - const [playerKey] = anchor.web3.PublicKey.findProgramAddressSync( - [Buffer.from("PLAYER"), gameKey.toBuffer(), player.publicKey.toBuffer()], - program.programId, - ); - - // To show that anyone can deposit the action points - // Ie, give this to a clockwork bot - const clockworkWallet = anchor.web3.Keypair.generate(); - - // To give it a starting balance - const clockworkProvider = new anchor.AnchorProvider( - program.provider.connection, - new NodeWallet(clockworkWallet), - anchor.AnchorProvider.defaultOptions(), - ); - const clockworkProgram = new anchor.Program( - IDL, - program.programId, - clockworkProvider, - ); - - // Have to give the accounts some lamports else the tx will fail - const amountToInitialize = 10000000000; - - const clockworkAirdropTx = - await clockworkProgram.provider.connection.requestAirdrop( - clockworkWallet.publicKey, - amountToInitialize, +it("deposits action points", async () => { + try { + const [gameAddress] = createGameAddress(); + const [playerAddress] = createPlayerAddress(gameAddress); + + // To show that anyone can deposit the action points + // Ie, give this to a clockwork bot + const clockworkWallet = anchor.web3.Keypair.generate(); + + // To give it a starting balance + const clockworkProvider = new anchor.AnchorProvider( + program.provider.connection, + new NodeWallet(clockworkWallet), + anchor.AnchorProvider.defaultOptions(), ); - await program.provider.connection.confirmTransaction( - clockworkAirdropTx, - "confirmed", - ); - const treasuryAirdropTx = - await clockworkProgram.provider.connection.requestAirdrop( + // Have to give the accounts some lamports else the tx will fail + const amountToInitialize = 10000000000; + + const clockworkAirdropTx = + await clockworkProvider.connection.requestAirdrop( + clockworkWallet.publicKey, + amountToInitialize, + ); + + await confirmTransaction(clockworkAirdropTx, clockworkProvider); + + const treasuryAirdropTx = await clockworkProvider.connection.requestAirdrop( treasury.publicKey, amountToInitialize, ); - await program.provider.connection.confirmTransaction( - treasuryAirdropTx, - "confirmed", - ); - - const txHash = await clockworkProgram.methods - .depositActionPoints() - .accounts({ - game: gameKey, - player: playerKey, - treasury: treasury.publicKey, - systemProgram: anchor.web3.SystemProgram.programId, - }) - .rpc(); - - await program.provider.connection.confirmTransaction(txHash); - - const expectedActionPoints = 100 + 5 + 1; // Player Create ( 100 ) + Monster Spawn ( 5 ) + Monster Attack ( 1 ) - const treasuryBalance = await program.provider.connection.getBalance( - treasury.publicKey, - ); - assert( - treasuryBalance == amountToInitialize + expectedActionPoints, // Player Create ( 100 ) + Monster Spawn ( 5 ) + Monster Attack ( 1 ) - ); - - const gameAccount = await program.account.game.fetch(gameKey); - assert(gameAccount.actionPointsCollected.eqn(expectedActionPoints)); - - const playerAccount = await program.account.player.fetch(playerKey); - assert(playerAccount.actionPointsSpent.eqn(expectedActionPoints)); - assert(playerAccount.actionPointsToBeCollected.eqn(0)); + + await confirmTransaction(treasuryAirdropTx, clockworkProvider); + + const depositActionPointsSignature = await program.methods + .depositActionPoints() + .accounts({ + game: gameAddress, + player: playerAddress, + treasury: treasury.publicKey, + systemProgram: anchor.web3.SystemProgram.programId, + }) + .rpc(); + + await confirmTransaction(depositActionPointsSignature, provider); + + const expectedActionPoints = + CREATE_PLAYER_ACTION_POINTS + + SPAWN_MONSTER_ACTION_POINTS + + ATTACK_MONSTER_ACTION_POINTS; + const treasuryBalance = await provider.connection.getBalance( + treasury.publicKey, + ); + assert( + treasuryBalance === AIRDROP_AMOUNT + expectedActionPoints, + "Treasury balance should match expected action points", + ); + + const gameAccount = await program.account.game.fetch(gameAddress); + assert( + gameAccount.actionPointsCollected.eqn(expectedActionPoints), + "Game action points collected should match expected", + ); + + const playerAccount = await program.account.player.fetch(playerAddress); + assert( + playerAccount.actionPointsSpent.eqn(expectedActionPoints), + "Player action points spent should match expected", + ); + assert( + playerAccount.actionPointsToBeCollected.eqn(0), + "Player should have no action points to be collected", + ); + } catch (error) { + throw new Error(`Failed to deposit action points: ${error.message}`); + } }); ``` Finally, run `anchor test` to see everything working. +```bash + +RPG game + ✔ creates a new game (317ms) + ✔ creates a new player (399ms) + ✔ spawns a monster (411ms) + ✔ attacks a monster (413ms) + ✔ deposits action points (1232ms) +``` + Congratulations! This was a lot to cover, but you now have a mini RPG game engine. If things aren't quite working, go back through the lab and find where -you went wrong. If you need, you can refer to the -[`main` branch of the solution code](https://github.com/Unboxed-Software/anchor-rpg). +you went wrong. If you need to, you can refer to the +[`main` branch of the solution code](https://github.com/solana-developers/anchor-rpg). Be sure to put these concepts into practice in your own programs. Each little optimization adds up! ## Challenge -Now it’s your turn to practice independently. Go back through the lab code -looking for additional optimizations and/or expansion you can make. Think +Now it's your turn to practice independently. Go back through the lab code +looking for additional optimizations and/or expansions you can make. Think through new systems and features you would add and how you would optimize them. -You can find some example modifications on the `challenge-solution` branch of -the -[RPG repository](https://github.com/Unboxed-Software/anchor-rpg/tree/challenge-solution). +You can find some example modifications on the +[`challenge-solution` branch of the RPG repository](https://github.com/solana-developers/anchor-rpg/tree/challenge-solution). Finally, go through one of your own programs and think about optimizations you can make to improve memory management, storage size, and/or concurrency. + Push your code to GitHub and [tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=4a628916-91f5-46a9-8eb0-6ba453aa6ca6)! diff --git a/content/courses/program-optimization/program-configuration.md b/content/courses/program-optimization/program-configuration.md index 637fc751a..58760c4b0 100644 --- a/content/courses/program-optimization/program-configuration.md +++ b/content/courses/program-optimization/program-configuration.md @@ -2,10 +2,10 @@ title: Program Configuration objectives: - Define program features in the `Cargo.toml` file - - Use the Rust `cfg` attribute to conditionally compile code based on which - features are or are not enabled - - Use the Rust `cfg!` macro to conditionally compile code based on which - features are or are not enabled + - Use the native Rust `cfg` attribute to conditionally compile code based on + which features are or are not enabled + - Use the native Rust `cfg!` macro to conditionally compile code based on + which features are or are not enabled - Create an admin-only instruction to set up a program account that can be used to store program configuration values description: @@ -17,15 +17,17 @@ description: - There are no "out of the box" solutions for creating distinct environments in an onchain program, but you can achieve something similar to environment variables if you get creative. -- You can use the `cfg` attribute with **Rust features** - (`#[cfg(feature = ...)]`) to run different code or provide different variable - values based on the Rust feature provided. _This happens at compile-time and - doesn't allow you to swap values after a program has been deployed_. -- Similarly, you can use the `cfg!` **macro** to compile different code paths - based on the features that are enabled. -- Alternatively, you can achieve something similar to environment variables that - can be modified after deployment by creating accounts and instructions that - are only accessible by the program’s upgrade authority. +- You can use the + [`cfg` attribute](https://doc.rust-lang.org/rust-by-example/attribute/cfg.html) + with **Rust features** (`#[cfg(feature = ...)]`) to run different code or + provide different variable values based on the Rust feature provided. _This + happens at compile-time and doesn't allow you to swap values after a program + has been deployed_. +- Similarly, you can use the + [`cfg!` **macro**](https://doc.rust-lang.org/std/macro.cfg.html) to compile + different code paths based on the enabled features. +- For environment-like variables post-deployment, create program accounts and + admin-only instructions accessible by the program's upgrade authority. ## Lesson @@ -33,30 +35,31 @@ One of the difficulties engineers face across all types of software development is that of writing testable code and creating distinct environments for local development, testing, production, etc. -This can be particularly difficult in Solana program development. For example, -imagine creating an NFT staking program that rewards each staked NFT with 10 -reward tokens per day. How do you test the ability to claim rewards when tests -run in a few hundred milliseconds, not nearly long enough to earn rewards? +This is especially difficult in Solana program development. For instance, +imagine building an NFT staking program where each staked NFT earns 10 reward +tokens daily. How can you test the ability to claim rewards when tests run in +just a few hundred milliseconds—not nearly long enough to accrue rewards? -Traditional web development solves some of this with environment variables whose -values can differ in each distinct "environment." Currently, there's no formal -concept of environment variables in a Solana program. If there were, you could -just make it so that rewards in your test environment are 10,000,000 tokens per -day and it would be easier to test the ability to claim rewards. +In traditional web development, this is often addressed through environment +variables, allowing different values in distinct "environments." However, Solana +programs currently lack a formal concept of environment variables. If they +existed, you could easily modify the rewards in your test environment to +something like 10,000,000 tokens per day, making it easier to test claiming +rewards. -Fortunately, you can achieve similar functionality if you get creative. The best -approach is probably a combination of two things: +Luckily, you can mimic this functionality with a bit of creativity. The most +effective solution involves a combination of two techniques: -1. Rust feature flags that allow you to specify in your build command the - "environment" of the build, coupled with code that adjusts specific values - accordingly -2. Program "admin-only" accounts and instructions that are only accessible by - the program's upgrade authority +1. **Native Rust** feature flags that let you specify the "environment" during + your build, allowing the code to adjust values based on the specified build. +2. **Admin-only** program accounts and instructions that are only accessible by + the program's upgrade `authority` for setting and managing configuration + values post-deployment. -### Rust feature flags +### Native Rust Feature Flags One of the simplest ways to create environments is to use Rust features. -Features are defined in the `[features]` table of the program’s `Cargo.toml` +Features are defined in the `[features]` table of the program's `Cargo.toml` file. You may define multiple features for different use cases. ```toml @@ -79,7 +82,7 @@ You can also specify multiple features by separating them with a comma. anchor test -- --features "feature-one", "feature-two" ``` -#### Make code conditional using the `cfg` attribute +#### Make Code Conditional Using the cfg Attribute With a feature defined, you can then use the `cfg` attribute within your code to conditionally compile code based on whether or not a given feature is enabled. @@ -167,7 +170,7 @@ different implementations of the `constants` module. This allows the program to use different values for the `USDC_MINT_PUBKEY` constant depending on whether or not the `local-testing` feature is enabled. -#### Make code conditional using the `cfg!` macro +#### Make Code Conditional using the cfg! Macro Similar to the `cfg` attribute, the `cfg!` **macro** in Rust allows you to check the values of certain configuration flags at runtime. This can be useful if you @@ -207,7 +210,7 @@ the `local-testing` feature at runtime. If the `local-testing` feature is enabled, the first code path is executed. If the `local-testing` feature is not enabled, the second code path is executed instead. -### Admin-only instructions +### Admin-only Instructions Feature flags are great for adjusting values and code paths at compilation, but they don't help much if you end up needing to adjust something after you've @@ -269,8 +272,8 @@ simplest way to do this is to hard-code an admin's public key in your code and then add a simple signer check into your instruction's account validation comparing the signer to this public key. -In Anchor, constraining an `update_program_config` instruction to only be usable -by a hard-coded admin might look like this: +In Anchor, constraining an `update_program_config` instruction handler to only +be usable by a hard-coded admin might look like this: ```rust #[program] @@ -301,11 +304,11 @@ pub struct UpdateProgramConfig<'info> { } ``` -Before instruction logic even executes, a check will be performed to make sure -the instruction's signer matches the hard-coded `ADMIN_PUBKEY`. Notice that the -example above doesn't show the instruction that initializes the config account, -but it should have similar constraints to ensure that an attacker can't -initialize the account with unexpected values. +Before instruction handler logic even executes, a check will be performed to +make sure the instruction's signer matches the hard-coded `ADMIN_PUBKEY`. Notice +that the example above doesn't show the instruction handler that initializes the +config account, but it should have similar constraints to ensure that an +attacker can't initialize the account with unexpected values. While this approach works, it also means keeping track of an admin wallet on top of keeping track of a program's upgrade authority. With a few more lines of @@ -350,7 +353,7 @@ pub struct UpdateProgramConfig<'info> { ``` Again, the example above doesn't show the instruction that initializes the -config account, but it should have the same constraints to ensure that an +config account, but it should have the same constraints to ensure that the attacker can't initialize the account with unexpected values. If this is the first time you've heard about the program data account, it's @@ -393,7 +396,7 @@ pub struct UpdateProgramConfig<'info> { ``` There's one catch here: in the time between deploying a program and initializing -the config account, _there is no admin_. Which means that the instruction for +the config account, _there is no admin_. This means that the instruction for initializing the config account can't be constrained to only allow admins as callers. That means it could be called by an attacker looking to set themselves as the admin. @@ -419,25 +422,25 @@ We'll quickly learn while testing our program that it could benefit from the flexibility provided by an admin-controlled configuration account and some feature flags. -#### 1. Starter +### 1. Starter -Download the starter code from the `starter` branch -of [this repository](https://github.com/Unboxed-Software/solana-admin-instructions/tree/starter). -The code contains a program with a single instruction and a single test in the -`tests` directory. +Download the starter code from +the [`starter` branch of this repository](https://github.com/solana-developers/admin-instructions/tree/starter). +The code contains a program with a single instruction handler and a single test +in the `tests` directory. Let's quickly walk through how the program works. The `lib.rs` file includes a constant for the USDC address and a single `payment` instruction. The `payment` instruction simply calls the -`payment_handler` function in the `instructions/payment.rs` file where the -instruction logic is contained. +`payment_handler` instruction handler in the `instructions/payment.rs` file +where the instruction handler logic is contained. The `instructions/payment.rs` file contains both the `payment_handler` function as well as the `Payment` account validation struct representing the accounts -required by the `payment` instruction. The `payment_handler` function calculates -a 1% fee from the payment amount, transfers the fee to a designated token -account, and transfers the remaining amount to the payment recipient. +required by the `payment` instruction. The `payment_handler` instruction handler +calculates a 1% fee from the payment amount, transfers the fee to a designated +token account, and transfers the remaining amount to the payment recipient. Finally, the `tests` directory has a single test file, `config.ts` that simply invokes the `payment` instruction and asserts that the corresponding token @@ -446,7 +449,7 @@ account balances have been debited and credited accordingly. Before we continue, take a few minutes to familiarize yourself with these files and their contents. -#### 2. Run the existing test +### 2. Run the existing test Let's start by running the existing test. @@ -459,7 +462,7 @@ use _your_ key. Finally, run `anchor test` to start the test. It should fail with the following output: -``` +```shell Error: failed to send transaction: Transaction simulation failed: Error processing Instruction 0: incorrect program id for instruction ``` @@ -467,7 +470,7 @@ The reason for this error is that we're attempting to use the mainnet USDC mint address (as hard-coded in the `lib.rs` file of the program), but that mint doesn't exist in the local environment. -#### 3. Adding a `local-testing` feature +### 3. Adding a local-testing feature To fix this, we need a mint we can use locally _and_ hard-code into the program. Since the local environment is reset often during testing, you'll need to store @@ -481,16 +484,24 @@ make the program use our local mint but otherwise use the production USDC mint. Generate a new keypair by running `solana-keygen grind`. Run the following command to generate a keypair with a public key that begins with "env". -``` +```shell solana-keygen grind --starts-with env:1 ``` Once a keypair is found, you should see an output similar to the following: -``` +```shell Wrote keypair to env9Y3szLdqMLU9rXpEGPqkjdvVn8YNHtxYNvCKXmHe.json ``` + + +Make sure to add the generated keypair file +(`env9Y3szLdqMLU9rXpEGPqkjdvVn8YNHtxYNvCKXmHe.json`) to your `.gitignore` file +to prevent accidentally committing and leaking your keypair to GitHub or other +version control platforms. If you plan to use the keypair later, securing it +properly is critical. + The keypair is written to a file in your working directory. Now that we have a placeholder USDC address, let's modify the `lib.rs` file. Use the `cfg` attribute to define the `USDC_MINT_PUBKEY` constant depending on whether the @@ -500,7 +511,6 @@ previous step rather than copying the one below. ```rust use anchor_lang::prelude::*; -use solana_program::{pubkey, pubkey::Pubkey}; mod instructions; use instructions::*; @@ -527,7 +537,7 @@ pub mod config { Next, add the `local-testing` feature to the `Cargo.toml` file located in `/programs`. -``` +```shell [features] ... local-testing = [] @@ -537,9 +547,67 @@ Next, update the `config.ts` test file to create a mint using the generated keypair. Start by deleting the `mint` constant. ```typescript -const mint = new anchor.web3.PublicKey( - "EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v", -); +const USDC_MINT = new PublicKey("EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v"); +``` + + + +The `anchor test` command, when run on a local network, starts a new test +validator using `solana-test-validator`. This test validator uses a +non-upgradeable loader. The non-upgradeable loader makes it so the program's +`program_data` account isn't initialized when the validator starts. You'll +recall from the lesson that this account is how we access the upgrade authority +from the program. + +To work around this, you can add a `deploy` function to the test file that runs +the deploy command for the program with an upgradeable loader. To use it, run +`anchor test --skip-deploy`, and call the `deploy` function within the test to +run the deploy command after the test validator has started. + +```typescript +import { execSync } from "child_process"; +import path from "path"; + +... + +const deploy = () => { + const workingDirectory = process.cwd(); + const programKeypairPath = path.join( + workingDirectory, + "target", + "deploy", + "config-keypair.json", + ); + const programBinaryPath = path.join( + workingDirectory, + "target", + "deploy", + "config.so", + ); + + const deploy_command = `solana program deploy --url localhost -v --program-id "${programKeypairPath}" "${programBinaryPath}"`; + + try { + execSync(deploy_command, { stdio: "inherit" }); + console.log("Program deployed successfully"); + } catch (error) { + console.error("Error deploying program:", error.message); + throw error; + } +}; + +... + +before(async () => { + deploy(); + ... +}); +``` + +For example, the command to run the test with features would look like this: + +```shell +anchor test --skip-deploy -- --features "local-testing" ``` Next, update the test to create a mint using the keypair, which will enable us @@ -547,48 +615,75 @@ to reuse the same mint address each time the tests are run. Remember to replace the file name with the one generated in the previous step. ```typescript -let mint: anchor.web3.PublicKey +let tokenMint: PublicKey; + +const deploy = () => { + const workingDirectory = process.cwd(); + const programKeypairPath = path.join( + workingDirectory, + "target", + "deploy", + "config-keypair.json", + ); + const programBinaryPath = path.join( + workingDirectory, + "target", + "deploy", + "config.so", + ); + + const deploy_command = `solana program deploy --url localhost -v --program-id "${programKeypairPath}" "${programBinaryPath}"`; + + try { + execSync(deploy_command, { stdio: "inherit" }); + console.log("Program deployed successfully"); + } catch (error) { + console.error("Error deploying program:", error.message); + throw error; + } +}; before(async () => { - let data = fs.readFileSync( - "env9Y3szLdqMLU9rXpEGPqkjdvVn8YNHtxYNvCKXmHe.json" - ) - - let keypair = anchor.web3.Keypair.fromSecretKey( - new Uint8Array(JSON.parse(data)) - ) - - const mint = await spl.createMint( - connection, - wallet.payer, - wallet.publicKey, - null, - 0, - keypair - ) + try { + deploy(); + const mintKeypairData = fs.readFileSync( + "envYcAnc9BvWEqDy4VKJsiECCbbc72Fynz87rBih6DV.json" + ); + const mintKeypair = Keypair.fromSecretKey( + new Uint8Array(JSON.parse(mintKeypairData)) + ); + + tokenMint = await createMint( + connection, + walletAuthority.payer, + walletAuthority.publicKey, + null, + 0, + mintKeypair + ); ... ``` Lastly, run the test with the `local-testing` feature enabled. -``` -anchor test -- --features "local-testing" +```shell +anchor test --skip-deploy -- --features "local-testing" ``` You should see the following output: -``` -config - ✔ Payment completes successfully (406ms) +```shell +Config + ✔ completes payment successfully (432ms) -1 passing (3s) + 1 passing (21s) ``` Boom. Just like that, you've used features to run two different code paths for different environments. -#### 4. Program Config +### 4. Program Config Features are great for setting different values at compilation, but what if you wanted to be able to dynamically update the fee percentage used by the program? @@ -609,48 +704,45 @@ To begin, let's first update the `lib.rs` file to: ```rust use anchor_lang::prelude::*; -use solana_program::{pubkey, pubkey::Pubkey}; mod instructions; -mod state; use instructions::*; +mod state; -declare_id!("BC3RMBvVa88zSDzPXnBXxpnNYCrKsxnhR3HwwHhuKKei"); - -#[cfg(feature = "local-testing")] -#[constant] -pub const USDC_MINT_PUBKEY: Pubkey = pubkey!("envgiPXWwmpkHFKdy4QLv2cypgAWmVTVEm71YbNpYRu"); +declare_id!("FF3eGbZnharYruJNwRV7jqnDYvpLkyvgbSv5gsGbJHps"); #[cfg(not(feature = "local-testing"))] #[constant] pub const USDC_MINT_PUBKEY: Pubkey = pubkey!("EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v"); +#[cfg(feature = "local-testing")] +#[constant] +pub const USDC_MINT_PUBKEY: Pubkey = pubkey!("envYcAnc9BvWEqDy4VKJsiECCbbc72Fynz87rBih6DV"); + pub const SEED_PROGRAM_CONFIG: &[u8] = b"program_config"; #[constant] -pub const ADMIN: Pubkey = pubkey!("..."); +pub const ADMIN: Pubkey = pubkey!("GprrWv9r8BMxQiWea9MrbCyK7ig7Mj8CcseEbJhDDZXM"); #[program] pub mod config { + use super::*; + pub fn payment(ctx: Context, amount: u64) -> Result<()> { + instructions::payment_handler(ctx, amount) + } + pub fn initialize_program_config(ctx: Context) -> Result<()> { instructions::initialize_program_config_handler(ctx) } - pub fn update_program_config( - ctx: Context, - new_fee: u64, - ) -> Result<()> { + pub fn update_program_config(ctx: Context, new_fee: u64) -> Result<()> { instructions::update_program_config_handler(ctx, new_fee) } - - pub fn payment(ctx: Context, amount: u64) -> Result<()> { - instructions::payment_handler(ctx, amount) - } } ``` -#### 5. Program Config State +### 5. Program Config State Next, let's define the structure for the `ProgramConfig` state. This account will store the admin, the token account where fees are sent, and the fee rate. @@ -663,18 +755,15 @@ following code. use anchor_lang::prelude::*; #[account] +#[derive(InitSpace)] pub struct ProgramConfig { pub admin: Pubkey, pub fee_destination: Pubkey, pub fee_basis_points: u64, } - -impl ProgramConfig { - pub const LEN: usize = 8 + 32 + 32 + 8; -} ``` -#### 6. Add Initialize Program Config Account Instruction +### 6. Add Initialize Program Config Account Instruction Now let's create the instruction logic for initializing the program config account. It should only be callable by a transaction signed by the `ADMIN` key @@ -689,17 +778,23 @@ Within the `program_config` folder, create a file called ```rust use crate::state::ProgramConfig; -use crate::ADMIN; -use crate::SEED_PROGRAM_CONFIG; -use crate::USDC_MINT_PUBKEY; +use crate::{ADMIN, SEED_PROGRAM_CONFIG, USDC_MINT_PUBKEY}; use anchor_lang::prelude::*; use anchor_spl::token::TokenAccount; +pub const DISCRIMINATOR_SIZE: usize = 8; + #[derive(Accounts)] pub struct InitializeProgramConfig<'info> { - #[account(init, seeds = [SEED_PROGRAM_CONFIG], bump, payer = authority, space = ProgramConfig::LEN)] + #[account( + init, + seeds = [SEED_PROGRAM_CONFIG], + bump, + payer = authority, + space = DISCRIMINATOR_SIZE + ProgramConfig::INIT_SPACE + )] pub program_config: Account<'info, ProgramConfig>, - #[account( token::mint = USDC_MINT_PUBKEY)] + #[account(token::mint = USDC_MINT_PUBKEY)] pub fee_destination: Account<'info, TokenAccount>, #[account(mut, address = ADMIN)] pub authority: Signer<'info>, @@ -707,14 +802,16 @@ pub struct InitializeProgramConfig<'info> { } pub fn initialize_program_config_handler(ctx: Context) -> Result<()> { - ctx.accounts.program_config.admin = ctx.accounts.authority.key(); - ctx.accounts.program_config.fee_destination = ctx.accounts.fee_destination.key(); - ctx.accounts.program_config.fee_basis_points = 100; + ctx.accounts.program_config.set_inner(ProgramConfig { + admin: ctx.accounts.authority.key(), + fee_destination: ctx.accounts.fee_destination.key(), + fee_basis_points: 100, + }); Ok(()) } ``` -#### 7. Add Update Program Config Fee Instruction +### 7. Add Update Program Config Fee Instruction Next, implement the instruction logic for updating the config account. The instruction should require that the signer match the `admin` stored in the @@ -725,8 +822,7 @@ Within the `program_config` folder, create a file called ```rust use crate::state::ProgramConfig; -use crate::SEED_PROGRAM_CONFIG; -use crate::USDC_MINT_PUBKEY; +use crate::{SEED_PROGRAM_CONFIG, USDC_MINT_PUBKEY}; use anchor_lang::prelude::*; use anchor_spl::token::TokenAccount; @@ -734,12 +830,9 @@ use anchor_spl::token::TokenAccount; pub struct UpdateProgramConfig<'info> { #[account(mut, seeds = [SEED_PROGRAM_CONFIG], bump)] pub program_config: Account<'info, ProgramConfig>, - #[account( token::mint = USDC_MINT_PUBKEY)] + #[account(token::mint = USDC_MINT_PUBKEY)] pub fee_destination: Account<'info, TokenAccount>, - #[account( - mut, - address = program_config.admin, - )] + #[account(mut, address = program_config.admin)] pub admin: Signer<'info>, /// CHECK: arbitrarily assigned by existing admin pub new_admin: UncheckedAccount<'info>, @@ -756,7 +849,7 @@ pub fn update_program_config_handler( } ``` -#### 8. Add mod.rs and update instructions.rs +### 8. Add mod.rs and update instructions.rs Next, let's expose the instruction handlers we created so that the call from `lib.rs` doesn't show an error. Start by adding a file `mod.rs` in the @@ -782,7 +875,7 @@ mod payment; pub use payment::*; ``` -#### 9. Update Payment Instruction +### 9. Update Payment Instruction Lastly, let's update the payment instruction to check that the `fee_destination` account in the instruction matches the `fee_destination` stored in the program @@ -791,8 +884,7 @@ config account. Then update the instruction's fee calculation to be based on the ```rust use crate::state::ProgramConfig; -use crate::SEED_PROGRAM_CONFIG; -use crate::USDC_MINT_PUBKEY; +use crate::{SEED_PROGRAM_CONFIG, USDC_MINT_PUBKEY}; use anchor_lang::prelude::*; use anchor_spl::token::{self, Token, TokenAccount}; @@ -804,20 +896,11 @@ pub struct Payment<'info> { has_one = fee_destination )] pub program_config: Account<'info, ProgramConfig>, - #[account( - mut, - token::mint = USDC_MINT_PUBKEY - )] + #[account(mut, token::mint = USDC_MINT_PUBKEY)] pub fee_destination: Account<'info, TokenAccount>, - #[account( - mut, - token::mint = USDC_MINT_PUBKEY - )] + #[account(mut, token::mint = USDC_MINT_PUBKEY)] pub sender_token_account: Account<'info, TokenAccount>, - #[account( - mut, - token::mint = USDC_MINT_PUBKEY - )] + #[account(mut, token::mint = USDC_MINT_PUBKEY)] pub receiver_token_account: Account<'info, TokenAccount>, pub token_program: Program<'info, Token>, #[account(mut)] @@ -827,10 +910,10 @@ pub struct Payment<'info> { pub fn payment_handler(ctx: Context, amount: u64) -> Result<()> { let fee_amount = amount .checked_mul(ctx.accounts.program_config.fee_basis_points) - .unwrap() + .ok_or(ProgramError::ArithmeticOverflow)? .checked_div(10000) - .unwrap(); - let remaining_amount = amount.checked_sub(fee_amount).unwrap(); + .ok_or(ProgramError::ArithmeticOverflow)?; + let remaining_amount = amount.checked_sub(fee_amount).ok_or(ProgramError::ArithmeticOverflow)?; msg!("Amount: {}", amount); msg!("Fee Amount: {}", fee_amount); @@ -864,14 +947,14 @@ pub fn payment_handler(ctx: Context, amount: u64) -> Result<()> { } ``` -#### 10. Test +### 10. Test Now that we're done implementing our new program configuration struct and instructions, let's move on to testing our updated program. To begin, add the PDA for the program config account to the test file. ```typescript -describe("config", () => { +describe("Config", () => { ... const programConfig = findProgramAddressSync( [Buffer.from("program_config")], @@ -892,27 +975,30 @@ correct fee is set and that the correct admin is stored on the program config account. ```typescript -it("Initialize Program Config Account", async () => { - const tx = await program.methods - .initializeProgramConfig() - .accounts({ - programConfig: programConfig, - feeDestination: feeDestination, - authority: wallet.publicKey, - systemProgram: anchor.web3.SystemProgram.programId, - }) - .rpc(); - - assert.strictEqual( - ( - await program.account.programConfig.fetch(programConfig) - ).feeBasisPoints.toNumber(), - 100, - ); - assert.strictEqual( - (await program.account.programConfig.fetch(programConfig)).admin.toString(), - wallet.publicKey.toString(), - ); +it("initializes program config account", async () => { + try { + await program.methods + .initializeProgramConfig() + .accounts({ + programConfig: programConfig, + feeDestination: feeDestination, + authority: walletAuthority.publicKey, + systemProgram: anchor.web3.SystemProgram.programId, + }) + .rpc(); + + const configAccount = + await program.account.programConfig.fetch(programConfig); + expect(configAccount.feeBasisPoints.toNumber()).to.equal( + INITIAL_FEE_BASIS_POINTS, + ); + expect(configAccount.admin.toString()).to.equal( + walletAuthority.publicKey.toString(), + ); + } catch (error) { + console.error("Program config initialization failed:", error); + throw error; + } }); ``` @@ -922,36 +1008,38 @@ transferred to the receiver. Here we update the existing test to include the `programConfig` account. ```typescript -it("Payment completes successfully", async () => { - const tx = await program.methods - .payment(new anchor.BN(10000)) - .accounts({ - programConfig: programConfig, - feeDestination: feeDestination, - senderTokenAccount: senderTokenAccount, - receiverTokenAccount: receiverTokenAccount, - sender: sender.publicKey, - }) - .transaction(); - - await anchor.web3.sendAndConfirmTransaction(connection, tx, [sender]); - - assert.strictEqual( - (await connection.getTokenAccountBalance(senderTokenAccount)).value - .uiAmount, - 0, - ); - - assert.strictEqual( - (await connection.getTokenAccountBalance(feeDestination)).value.uiAmount, - 100, - ); +it("completes payment successfully", async () => { + try { + const transaction = await program.methods + .payment(new anchor.BN(PAYMENT_AMOUNT)) + .accounts({ + programConfig: programConfig, + feeDestination: feeDestination, + senderTokenAccount: senderTokenAccount, + receiverTokenAccount: receiverTokenAccount, + sender: sender.publicKey, + }) + .transaction(); - assert.strictEqual( - (await connection.getTokenAccountBalance(receiverTokenAccount)).value - .uiAmount, - 9900, - ); + await anchor.web3.sendAndConfirmTransaction(connection, transaction, [ + sender, + ]); + + const senderBalance = await getAccount(connection, senderTokenAccount); + const feeDestinationBalance = await getAccount(connection, feeDestination); + const receiverBalance = await getAccount(connection, receiverTokenAccount); + + expect(Number(senderBalance.amount)).to.equal(0); + expect(Number(feeDestinationBalance.amount)).to.equal( + (PAYMENT_AMOUNT * INITIAL_FEE_BASIS_POINTS) / 10000, + ); + expect(Number(receiverBalance.amount)).to.equal( + (PAYMENT_AMOUNT * (10000 - INITIAL_FEE_BASIS_POINTS)) / 10000, + ); + } catch (error) { + console.error("Payment failed:", error); + throw error; + } }); ``` @@ -959,23 +1047,27 @@ The third test attempts to update the fee on the program config account, which should be successful. ```typescript -it("Update Program Config Account", async () => { - const tx = await program.methods - .updateProgramConfig(new anchor.BN(200)) - .accounts({ - programConfig: programConfig, - admin: wallet.publicKey, - feeDestination: feeDestination, - newAdmin: sender.publicKey, - }) - .rpc(); - - assert.strictEqual( - ( - await program.account.programConfig.fetch(programConfig) - ).feeBasisPoints.toNumber(), - 200, - ); +it("updates program config account", async () => { + try { + await program.methods + .updateProgramConfig(new anchor.BN(UPDATED_FEE_BASIS_POINTS)) + .accounts({ + programConfig: programConfig, + admin: walletAuthority.publicKey, + feeDestination: feeDestination, + newAdmin: walletAuthority.publicKey, + }) + .rpc(); + + const configAccount = + await program.account.programConfig.fetch(programConfig); + expect(configAccount.feeBasisPoints.toNumber()).to.equal( + UPDATED_FEE_BASIS_POINTS, + ); + } catch (error) { + console.error("Program config update failed:", error); + throw error; + } }); ``` @@ -983,9 +1075,9 @@ The fourth test tries to update the fee on the program config account, where the admin is not the one stored on the program config account, and this should fail. ```typescript -it("Update Program Config Account with unauthorized admin (expect fail)", async () => { +it("fails to update program config account with unauthorized admin", async () => { try { - const tx = await program.methods + const transaction = await program.methods .updateProgramConfig(new anchor.BN(300)) .accounts({ programConfig: programConfig, @@ -995,35 +1087,54 @@ it("Update Program Config Account with unauthorized admin (expect fail)", async }) .transaction(); - await anchor.web3.sendAndConfirmTransaction(connection, tx, [sender]); - } catch (err) { - expect(err); + await anchor.web3.sendAndConfirmTransaction(connection, transaction, [ + sender, + ]); + throw new Error("Expected transaction to fail, but it succeeded"); + } catch (error) { + expect(error).to.exist; + console.log("Transaction failed as expected:", error.message); } }); ``` Finally, run the test using the following command: -``` -anchor test -- --features "local-testing" +```shell +anchor test --skip-deploy -- --features "local-testing" ``` You should see the following output: -``` -config - ✔ Initialize Program Config Account (199ms) - ✔ Payment completes successfully (405ms) - ✔ Update Program Config Account (403ms) - ✔ Update Program Config Account with unauthorized admin (expect fail) - -4 passing (8s) +```shell +Config + ✔ initializes program config account (430ms) + ✔ completes payment successfully (438ms) + ✔ updates program config account (416ms) +Transaction failed as expected: Simulation failed. +Message: Transaction simulation failed: Error processing Instruction 0: custom program error: 0x7dc. +Logs: +[ + "Program FF3eGbZnharYruJNwRV7jqnDYvpLkyvgbSv5gsGbJHps invoke [1]", + "Program log: Instruction: UpdateProgramConfig", + "Program log: AnchorError caused by account: admin. Error Code: ConstraintAddress. Error Number: 2012. Error Message: An address constraint was violated.", + "Program log: Left:", + "Program log: F32dEMPn4BtQjHBgXXwfuEMo5qBQJySs8cCDrtwWQdBr", + "Program log: Right:", + "Program log: GprrWv9r8BMxQiWea9MrbCyK7ig7Mj8CcseEbJhDDZXM", + "Program FF3eGbZnharYruJNwRV7jqnDYvpLkyvgbSv5gsGbJHps consumed 7868 of 200000 compute units", + "Program FF3eGbZnharYruJNwRV7jqnDYvpLkyvgbSv5gsGbJHps failed: custom program error: 0x7dc" +]. +Catch the `SendTransactionError` and call `getLogs()` on it for full details. + ✔ fails to update program config account with unauthorized admin + + + 4 passing (22s) ``` And that's it! You've made the program a lot easier to work with moving forward. If you want to take a look at the final solution code you can find it on -the `solution` branch -of [the same repository](https://github.com/Unboxed-Software/solana-admin-instructions/tree/solution). +the [`solution` branch of the same](https://github.com/solana-developers/admin-instructions/tree/solution). ## Challenge @@ -1032,48 +1143,12 @@ use the program's upgrade authority as the initial admin. Go ahead and update the lab's `initialize_program_config` so that only the upgrade authority can call it rather than having a hardcoded `ADMIN`. -Note that the `anchor test` command, when run on a local network, starts a new -test validator using `solana-test-validator`. This test validator uses a -non-upgradeable loader. The non-upgradeable loader makes it so the program's -`program_data` account isn't initialized when the validator starts. You'll -recall from the lesson that this account is how we access the upgrade authority -from the program. - -To work around this, you can add a `deploy` function to the test file that runs -the deploy command for the program with an upgradeable loader. To use it, run -`anchor test --skip-deploy`, and call the `deploy` function within the test to -run the deploy command after the test validator has started. - -```typescript -import { execSync } from "child_process" - -... - -const deploy = () => { - const deployCmd = `solana program deploy --url localhost -v --program-id $(pwd)/target/deploy/config-keypair.json $(pwd)/target/deploy/config.so` - execSync(deployCmd) -} - -... - -before(async () => { - ... - deploy() -}) -``` - -For example, the command to run the test with features would look like this: - -``` -anchor test --skip-deploy -- --features "local-testing" -``` - Try doing this on your own, but if you get stuck, feel free to reference the -`challenge` branch of -[the same repository](https://github.com/Unboxed-Software/solana-admin-instructions/tree/challenge) +[`challenge` branch of the same repository](https://github.com/solana-developers/admin-instructions/tree/challenge) to see one possible solution. + Push your code to GitHub and [tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=02a7dab7-d9c1-495b-928c-a4412006ec20)! diff --git a/content/courses/program-optimization/rust-macros.md b/content/courses/program-optimization/rust-macros.md index 1f55c436b..6f9e106a1 100644 --- a/content/courses/program-optimization/rust-macros.md +++ b/content/courses/program-optimization/rust-macros.md @@ -9,62 +9,61 @@ description: "Use Rust macros to generate code at compile time." ## Summary -- **Procedural macros** are a special kind of Rust macros that allow the +- **Procedural macros** are a special kind of Rust macro that allows the programmer to generate code at compile time based on custom input. -- In the Anchor framework, procedural macros are used to generate code that - reduces the amount of boilerplate required when writing Solana programs. -- An **Abstract Syntax Tree (AST)** is a representation of the syntax and - structure of the input code that is passed to a procedural macro. When - creating a macro, you use elements of the AST like tokens and items to - generate the appropriate code. -- A **Token** is the smallest unit of source code that can be parsed by the - compiler in Rust. +- In the Anchor framework, procedural macros generate code that reduces the + boilerplate required when writing Solana programs. +- An **Abstract Syntax Tree (AST)** represents the syntax and structure of the + input code that is passed to a procedural macro. When creating a macro, you + use elements of the AST, like tokens and items, to generate the appropriate + code. +- A **Token** is the smallest source code unit that the Rust compiler can parse. - An **Item** is a declaration that defines something that can be used in a Rust program, such as a struct, an enum, a trait, a function, or a method. -- A **TokenStream** is a sequence of tokens that represents a piece of source - code, and can be passed to a procedural macro to allow it to access and - manipulate the individual tokens in the code. +- A **TokenStream** is a sequence of tokens representing a piece of source code. + It can be passed to a procedural macro, allowing it to access and manipulate + the individual tokens in the code. ## Lesson -In Rust, a macro is a piece of code that you can write once and then "expand" to -generate code at compile time. This can be useful when you need to generate code -that is repetitive or complex, or when you want to use the same code in multiple -places in your program. +In Rust, a macro is a piece of code you can write once and then "expand" to +generate code at compile time. This code generation can be helpful when you need +to generate repetitive or complex code or when you want to use the same code in +multiple places in your program. There are two different types of macros: declarative macros and procedural macros. - Declarative macros are defined using the `macro_rules!` macro, which allows - you to match against patterns of code and generate code based on the matching + you to match against code patterns and generate code based on the matching pattern. - Procedural macros in Rust are defined using Rust code and operate on the abstract syntax tree (AST) of the input TokenStream, which allows them to manipulate and generate code at a finer level of detail. -In this lesson, we'll focus on procedural macros, which are commonly used in the -Anchor framework. +This lesson will focus on procedural macros, which are standard in the Anchor +framework. ### Rust concepts -Before we dig into macros, specifically, let's talk about some of the important -terminology, concepts, and tools we'll be using throughout the lesson. +Before we discuss macros specifically, let's review some of the important +terminology, concepts, and tools we'll use throughout the lesson. -#### Token +### Token -In the context of Rust programming, a -[token](https://doc.rust-lang.org/reference/tokens.html) is a basic element of -the language syntax like an identifier or literal value. Tokens represent the -smallest unit of source code that are recognized by the Rust compiler, and they -are used to build up more complex expressions and statements in a program. +In Rust programming, a [token](https://doc.rust-lang.org/reference/tokens.html) +is an essential element of the language syntax, like an identifier or literal +value. Tokens represent the smallest unit of source code recognized by the Rust +compiler, and they are used to build more complex expressions and statements in +a program. Examples of Rust tokens include: - [Keywords](https://doc.rust-lang.org/reference/keywords.html), such as `fn`, - `let`, and `match`, are reserved words in the Rust language that have special + `let`, and `match`, are reserved words in the Rust language with special meanings. - [Identifiers](https://doc.rust-lang.org/reference/identifiers.html), such as - variable and function names, are used to refer to values and functions. + variable and function names, refer to values and functions. - [Punctuation](https://doc.rust-lang.org/reference/tokens.html#punctuation) marks, such as `{`, `}`, and `;`, are used to structure and delimit blocks of code. @@ -74,11 +73,11 @@ Examples of Rust tokens include: You can [read more about Rust tokens](https://doc.rust-lang.org/reference/tokens.html). -#### Item +### Item -Items are named, self-contained pieces of code in Rust. They provide a way to -group related code together and give it a name by which the group can be -referenced. This allows you to reuse and organize your code in a modular way. +Items are named self-contained pieces of code in Rust. They provide a way to +group related code and give it a name by which the group can be referenced, +allowing you to reuse and organize your code modularly. There are several different kinds of items, such as: @@ -92,14 +91,14 @@ There are several different kinds of items, such as: You can [read more about Rust items](https://doc.rust-lang.org/reference/items.html). -#### Token Streams +### Token Streams -The `TokenStream` type is a data type that represents a sequence of tokens. This -type is defined in the `proc_macro` crate and is surfaced as a way for you to -write macros based on other code in the codebase. +The `TokenStream` data type represents a sequence of tokens. It is defined in +the `proc_macro` crate and is surfaced so that macros can be written based on +other code in the codebase. When defining a procedural macro, the macro input is passed to the macro as a -`TokenStream`, which can then be parsed and transformed as needed. The resulting +`TokenStream`, which can then be parsed and transformed. The resulting `TokenStream` can then be expanded into the final code output by the macro. ```rust @@ -111,27 +110,27 @@ pub fn my_macro(input: TokenStream) -> TokenStream { } ``` -#### Abstract syntax tree +### Abstract syntax tree -In the context of a Rust procedural macro, an abstract syntax tree (AST) is a -data structure that represents the hierarchical structure of the input tokens -and their meaning in the Rust language. It's typically used as an intermediate -representation of the input that can be easily processed and transformed by the +In a Rust procedural macro context, an abstract syntax tree (AST) is a data +structure that represents the hierarchical structure of the input tokens and +their meaning in the Rust language. It's typically used as an intermediate +representation of the input that can be quickly processed and transformed by the procedural macro. The macro can use the AST to analyze the input code and make changes to it, such -as adding or removing tokens, or transforming the meaning of the code in some -way. It can then use this transformed AST to generate new code, which can be -returned as the output of the proc macro. +as adding or removing tokens or transforming the meaning of the code. It can +then use this transformed AST to generate new code, which can be returned as the +output of the proc macro. -#### The `syn` crate +### The `syn` crate The `syn` crate is available to help parse a token stream into an AST that macro code can traverse and manipulate. When a procedural macro is invoked in a Rust program, the macro function is called with a token stream as the input. Parsing this input is the first step to virtually any macro. -Take as an example a proc macro that you invoke using `my_macro!`as follows: +Take as an example a proc macro that you invoke using `my_macro!` as follows: ```rust my_macro!("hello, world"); @@ -147,42 +146,43 @@ use syn::parse_macro_input; #[proc_macro] pub fn my_macro(input: TokenStream) -> TokenStream { let ast = parse_macro_input!(input as syn::LitStr); - eprintln! {"{:#?}", ast}; + eprintln!("{:#?}", ast.token()); ... } ``` Inside the proc macro, the code uses the `parse_macro_input!` macro from the `syn` crate to parse the input `TokenStream` into an abstract syntax tree (AST). -Specifically, this example parses it as an instance of `LitStr` that represents -a string literal in Rust. The `eprintln!` macro is then used to print the -`LitStr` AST for debugging purposes. +Specifically, this example parses it as an instance of `LitStr` representing a +UTF-8 string literal in Rust. Call the `.token()` method to return a +[Literal](https://docs.rs/proc-macro2/1.0.86/proc_macro2/struct.Literal.html) +that we pass to the `eprintln!` to print the AST for debugging purposes. ```rust -LitStr { - token: Literal { - kind: Str, - symbol: "hello, world", - suffix: None, - span: #0 bytes(172..186), - }, +Literal { + kind: Str, + symbol: "hello, world", + suffix: None, + // Shows the byte offsets 31 to 45 of the literal "hello, world" + // in the portion of the source code from which the `TokenStream` was parsed. + span: #0 bytes(31..45), } ``` -The output of the `eprintln!` macro shows the structure of the `LitStr` AST that -was generated from the input tokens. It shows the string literal value +The output of the `eprintln!` macro shows the structure of the `Literal` AST +that was generated from the input tokens. It shows the string literal value (`"hello, world"`) and other metadata about the token, such as its kind (`Str`), suffix (`None`), and span. -#### The `quote` crate +### The `quote` crate -Another important crate is the `quote` crate. This crate is pivotal in the code +Another important crate is the `quote` crate, which is pivotal in the code generation portion of the macro. Once a proc macro has finished analyzing and transforming the AST, it can use -the `quote` crate or a similar code generation library to convert the AST back -into a token stream. After that, it returns the `TokenStream`, which the Rust -compiler uses to replace the original stream in the source code. +the `quote` crate or a similar code generation library to convert it back into a +token stream. After that, it returns the `TokenStream`, which the Rust compiler +uses to replace the original stream in the source code. Take the below example of `my_macro`: @@ -194,10 +194,8 @@ use quote::quote; #[proc_macro] pub fn my_macro(input: TokenStream) -> TokenStream { let ast = parse_macro_input!(input as syn::LitStr); - eprintln! {"{:#?}", ast}; - let expanded = { - quote! {println!("The input is: {}", #ast)} - }; + eprintln!("{:#?}", ast.token()); + let expanded = quote! {println!("The input is: {}", #ast)}; expanded.into() } ``` @@ -207,33 +205,32 @@ of a `println!` macro call with the `LitStr` AST as its argument. Note that the `quote!` macro generates a `TokenStream` of type `proc_macro2::TokenStream`. To return this `TokenStream` to the Rust compiler, -you need to use the `.into()` method to convert it to `proc_macro::TokenStream`. -The Rust compiler will then use this `TokenStream` to replace the original proc -macro call in the source code. +use the `.into()` method to convert it to `proc_macro::TokenStream`. The Rust +compiler will then use this `TokenStream` to replace the original proc macro +call in the source code. ```text The input is: hello, world ``` -This allows you to create procedural macros that perform powerful code -generation and metaprogramming tasks. +Using procedural macros allows you to create procedural macros that perform +powerful code generation and metaprogramming tasks. ### Procedural Macro Procedural macros in Rust are a powerful way to extend the language and create -custom syntax. These macros are written in Rust and are compiled along with the -rest of the code. There are three types of procedural macros: +custom syntax. These macros are written in Rust and compiled with the rest of +the code. There are three types of procedural macros: - Function-like macros - `custom!(...)` - Derive macros - `#[derive(CustomDerive)]` - Attribute macros - `#[CustomAttribute]` This section will discuss the three types of procedural macros and provide an -example implementation of one. The process of writing a procedural macro is -consistent across all three types, so the example provided can be adapted to the -other types. +example implementation of one. Writing a procedural macro is consistent across +all three types, making this example adaptable to the other types. -#### Function-like macros +### Function-like macros Function-like procedural macros are the simplest of the three types of procedural macros. These macros are defined using a function preceded by the @@ -243,13 +240,13 @@ return a new `TokenStream` as output to replace the original code. ```rust #[proc_macro] pub fn my_macro(input: TokenStream) -> TokenStream { - ... + ... } ``` -These macros are invoked using the name of the function followed by the `!` -operator. They can be used in various places in a Rust program, such as in -expressions, statements, and function definitions. +These macros are invoked using the function's name followed by the `!` operator. +They can be used in various places in a Rust program, such as in expressions, +statements, and function definitions. ```rust my_macro!(input); @@ -257,25 +254,24 @@ my_macro!(input); Function-like procedural macros are best suited for simple code generation tasks that require only a single input and output stream. They are easy to understand -and use, and they provide a straightforward way to generate code at compile -time. +and use and provide a straightforward way to generate code at compile time. -#### Attribute macros +### Attribute macros Attribute macros define new attributes that are attached to items in a Rust -program such as functions and structs. +program, such as functions and structs. ```rust #[my_macro] fn my_function() { - ... + ... } ``` Attribute macros are defined with a function preceded by the `#[proc_macro_attribute]` attribute. The function requires two token streams as -input and returns a single `TokenStream` as output that replaces the original -item with an arbitrary number of new items. +input and returns a single `TokenStream` output that replaces the original item +with an arbitrary number of new items. ```rust #[proc_macro_attribute] @@ -295,22 +291,21 @@ fn my_function() { } ``` -For example, an attribute macro could process the arguments passed to the -attribute to enable or disable certain features, and then use the second token -stream to modify the original item in some way. By having access to both token -streams, attribute macros can provide greater flexibility and functionality -compared to using only a single token stream. +For example, an attribute macro could process the arguments passed to it to turn +certain features on or off and then use the second token stream to modify the +original item. With access to both token streams, attribute macros can provide +greater flexibility and functionality than using only a single token stream. -#### Derive macros +### Derive macros Derive macros are invoked using the `#[derive]` attribute on a struct, enum, or -union. They are typically used to automatically implement traits for the input -types. +union. They are typically used to implement traits for the input types +automatically. ```rust #[derive(MyMacro)] struct Input { - field: String + field: String } ``` @@ -320,20 +315,20 @@ They take a single token stream as input and return a single token stream as output. Unlike the other procedural macros, the returned token stream doesn't replace -the original code. Rather, the returned token stream gets appended to the module -or block that the original item belongs to. This allows developers to extend the -functionality of the original item without modifying the original code. +the original code. Instead, it gets appended to the module or block to which the +original item belongs, allowing developers to extend the functionality of the +original item without modifying the original code. ```rust #[proc_macro_derive(MyMacro)] pub fn my_macro(input: TokenStream) -> TokenStream { - ... + ... } ``` In addition to implementing traits, derive macros can define helper attributes. -Helper attributes can be used in the scope of the item that the derive macro is -applied to and customize the code generation process. +Helper attributes can be used in the scope of the item to which the derive macro +is applied and customize the code generation process. ```rust #[proc_macro_derive(MyMacro, attributes(helper))] @@ -342,9 +337,8 @@ pub fn my_macro(body: TokenStream) -> TokenStream { } ``` -Helper attributes are inert, which means they do not have any effect on their -own, and their only purpose is to be used as input to the derive macro that -defined them. +Helper attributes are inert, which means they have no effect on their own. Their +only purpose is to be used as input to the derive macro that defined them. ```rust #[derive(MyMacro)] @@ -355,11 +349,11 @@ struct Input { ``` For example, a derive macro could define a helper attribute to perform -additional operations depending on the presence of the attribute. This allows -developers to further extend the functionality of derive macros and customize -the code they generate in a more flexible way. +additional operations depending on its presence, allowing developers to extend +the functionality of derive macros and customize the code they generate more +flexibly. -#### Example of a procedural macro +### Example of a procedural macro This example shows how to use a derive procedural macro to automatically generate an implementation of a `describe()` method for a struct. @@ -385,9 +379,9 @@ console. MyStruct is a struct with these named fields: my_string, my_number. ``` -The first step is to define the procedural macro using the using the -`#[proc_macro_derive]` attribute. The input `TokenStream` is parsed using the -`parse_macro_input!()` macro to extract the struct's identifier and data. +The first step is to define the procedural macro using the +`#[proc_macro_derive]` attribute. To extract the struct's identifier and data, +the input `TokenStream` is parsed using the `parse_macro_input!()` macro. ```rust use proc_macro::{self, TokenStream}; @@ -404,7 +398,7 @@ pub fn describe_struct(input: TokenStream) -> TokenStream { The next step is to use the `match` keyword to perform pattern matching on the `data` value to extract the names of the fields in the struct. -The first `match` has two arms: one for the `syn::Data::Struct` variant, and one +The first `match` has two arms: one for the `syn::Data::Struct` variant and one for the "catch-all" `_` arm that handles all other variants of `syn::Data`. The second `match` has two arms as well: one for the `syn::Fields::Named` @@ -427,9 +421,9 @@ pub fn describe_struct(input: TokenStream) -> TokenStream { syn::Data::Struct(s) => match s.fields { syn::Fields::Named(FieldsNamed { named, .. }) => { let idents = named.iter().map(|f| &f.ident); - format!( + format!( "a struct with these named fields: {}", - quote! {#(#idents), *}, + quote! {#(#idents), *}, ) } _ => panic!("The syn::Fields variant is not supported"), @@ -440,7 +434,7 @@ pub fn describe_struct(input: TokenStream) -> TokenStream { } ``` -The last step is to implement a `describe()` method for a struct. The `expanded` +The last step implements a `describe()` method for a struct. The `expanded` variable is defined using the `quote!` macro and the `impl` keyword to create an implementation for the struct name stored in the `#ident` variable. @@ -463,9 +457,9 @@ pub fn describe(input: TokenStream) -> TokenStream { syn::Data::Struct(s) => match s.fields { syn::Fields::Named(FieldsNamed { named, .. }) => { let idents = named.iter().map(|f| &f.ident); - format!( + format!( "a struct with these named fields: {}", - quote! {#(#idents), *}, + quote! {#(#idents), *}, ) } _ => panic!("The syn::Fields variant is not supported"), @@ -476,7 +470,7 @@ pub fn describe(input: TokenStream) -> TokenStream { let expanded = quote! { impl #ident { fn describe() { - println!("{} is {}.", stringify!(#ident), #field_names); + println!("{} is {}.", stringify!(#ident), #field_names); } } }; @@ -497,9 +491,9 @@ struct MyStruct { } ``` -The `cargo expand` command from the `cargo-expand` crate can be used to expand -Rust code that uses procedural macros. For example, the code for the `MyStruct` -struct generated using the the `#[derive(Describe)]` attribute looks like this: +The `cargo expand` command from the `cargo-expand` crate can expand Rust code +that uses procedural macros. For example, the code for the `MyStruct` struct +generated using the `#[derive(Describe)]` attribute looks like this: ```rust struct MyStruct { @@ -527,12 +521,12 @@ impl MyStruct { ### Anchor procedural macros -Procedural macros are the magic behind the Anchor library that is commonly used -in Solana development. Anchor macros allow for more succinct code, common -security checks, and more. Let's go through a few examples of how Anchor uses -procedural macros. +Procedural macros are the magic behind the Anchor library commonly used in +Solana development. Anchor macros allow for more concise code, standard security +checks, and more. Let's go through a few examples of how Anchor uses procedural +macros. -#### Function-like macro +### Function-like macro The `declare_id` macro shows how function-like macros are used in Anchor. This macro takes in a string of characters representing a program's ID as input and @@ -548,21 +542,30 @@ indicating that it's a function-like proc macro. ```rust #[proc_macro] pub fn declare_id(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let address = input.clone().to_string(); + let id = parse_macro_input!(input as id::Id); - proc_macro::TokenStream::from(quote! {#id}) + let ret = quote! { #id }; + ... + let idl_print = anchor_syn::idl::gen_idl_print_fn_address(address); + return proc_macro::TokenStream::from(quote! { + #ret + #idl_print + }); + ... } ``` -#### Derive macro +### Derive macro -The `#[derive(Accounts)]` is an example of just one of many derive macros that -are used in Anchor. +The `#[derive(Accounts)]` is an example of just one of many derive macros used +in Anchor. The `#[derive(Accounts)]` macro generates code that implements the `Accounts` -trait for the given struct. This trait does a number of things, including -validating and deserializing the accounts passed into an instruction. This -allows the struct to be used as a list of accounts required by an instruction in -an Anchor program. +trait for the given struct. This trait does several things, including validating +and deserializing the accounts passed into an instruction, allowing the struct +to be used as a list of accounts required by an instruction in an Anchor +program. Any constraints specified on fields by the `#[account(..)]` attribute are applied during deserialization. The `#[instruction(..)]` attribute can also be @@ -573,7 +576,7 @@ macro. #[derive(Accounts)] #[instruction(input: String)] pub struct Initialize<'info> { - #[account(init, payer = payer, space = 8 + input.len())] + #[account(init, payer = payer, space = MyData::DISCRIMINATOR.len() + MyData::INIT_SPACE + input.len())] pub data_account: Account<'info, MyData>, #[account(mut)] pub payer: Signer<'info>, @@ -587,6 +590,25 @@ to be used as a derive macro that can be applied to a struct. The line that this is a derive macro that processes `account` and `instruction` helper attributes. +The INIT_SPACE is used to calculate the initial size of an account. It is +implemented by derive macro on `MyData` automatically implementing the +[anchor_lang::Space](https://docs.rs/anchor-lang/latest/anchor_lang/trait.Space.html#associatedconstant.INIT_SPACE). + +```rust +#[account] +#[derive(InitSpace)] +pub struct NewAccount { + data: u64, +} +``` + +The `#[account]` macro also automatically derives the _DISCRIMINANT_ of an +anchor account which implements the +[anchor_lang::Discriminator](https://docs.rs/anchor-lang/latest/anchor_lang/trait.Discriminator.html) +trait. This trait exposes an array of 8 bytes containing the discriminator, +which can be exposed using `NewAccount::DISCRIMINATOR`. Calling the `.len()` on +this array of 8 bytes gives us the length of the discriminator; + ```rust #[proc_macro_derive(Accounts, attributes(account, instruction))] pub fn derive_anchor_deserialize(item: TokenStream) -> TokenStream { @@ -596,9 +618,9 @@ pub fn derive_anchor_deserialize(item: TokenStream) -> TokenStream { } ``` -#### Attribute macro `#[program]` +### Attribute macro `#[program]` -The `#[program]` attribute macro is an example of an attribute macro used in +The `#[program]` attribute macro is an example of an attribute macro used in Anchor to define the module containing instruction handlers for a Solana program. @@ -613,8 +635,8 @@ pub mod my_program { } ``` -In this case, the `#[program]` attribute is applied to a module, and it is used -to specify that the module contains instruction handlers for a Solana program. +In this case, the `#[program]` attribute is applied to a module to specify that +it contains instruction handlers for a Solana program. ```rust #[proc_macro_attribute] @@ -622,17 +644,17 @@ pub fn program( _args: proc_macro::TokenStream, input: proc_macro::TokenStream, ) -> proc_macro::TokenStream { - parse_macro_input!(input as anchor_syn::Program) + parse_macro_input!(input as anchor_syn::Program) .to_token_stream() .into() } ``` -Overall, the use of proc macros in Anchor greatly reduces the amount of -repetitive code that Solana developers have to write. By reducing the amount of -boilerplate code, developers can focus on their program's core functionality and -avoid mistakes caused by manual repetition. This ultimately results in a faster -and more efficient development process. +Overall, using proc macros in Anchor dramatically reduces the repetitive code +that Solana developers have to write. By reducing the boilerplate code, +developers can focus on their program's core functionality and avoid mistakes +caused by manual repetition, resulting in a faster and more efficient +development process. ## Lab @@ -640,21 +662,22 @@ Let's practice this by creating a new derive macro! Our new macro will let us automatically generate instruction logic for updating each field on an account in an Anchor program. -#### 1. Starter +### 1. Starter To get started, download the starter code from the `starter` branch of -[this repository](https://github.com/Unboxed-Software/anchor-custom-macro/tree/starter). +[the `anchor-custom-macro` repository](https://github.com/solana-developers/anchor-custom-macro/tree/starter). The starter code includes a simple Anchor program that allows you to initialize -and update a `Config` account. This is similar to what we did with the +and update a `Config` account, similar to what we did with the [Program Configuration lesson](/content/courses/program-optimization/program-configuration). The account in question is structured as follows: ```rust -use anchor_lang::prelude::*; +use anchor_lang::{Discriminator, prelude::*}; #[account] +#[derive(InitSpace)] pub struct Config { pub auth: Pubkey, pub bool: bool, @@ -663,7 +686,7 @@ pub struct Config { } impl Config { - pub const LEN: usize = 8 + 32 + 1 + 1 + 8; + pub const LEN: usize = Config::DISCRIMINATOR.len() + Config::INIT_SPACE; } ``` @@ -674,18 +697,19 @@ field for updating the field. The `programs/admin/src/admin_config` directory contains the program's instruction logic and state. Take a look through each of these files. You'll -notice that instruction logic for each field is duplicated for each instruction. +notice that the instruction logic for each field is duplicated for each +instruction. The goal of this lab is to implement a procedural macro that will allow us to replace all of the instruction logic functions and automatically generate functions for each instruction. -#### 2. Set up the custom macro declaration +### 2. Set up the custom macro declaration -Let's get started by creating a separate crate for our custom macro. In the -project's root directory, run `cargo new custom-macro`. This will create a new -`custom-macro` directory with its own `Cargo.toml`. Update the new `Cargo.toml` -file to be the following: +Let's get started by creating a separate crate for our custom macro. Run +`cargo new- lib custom-macro` in the project's root directory. The command +creates a new `custom-macro` directory with its own `Cargo.toml`. Update the new +`Cargo.toml` file to be the following: ```text [package] @@ -697,17 +721,14 @@ edition = "2021" proc-macro = true [dependencies] -syn = "1.0.105" -quote = "1.0.21" -proc-macro2 = "0.4" -anchor-lang = "0.25.0" +syn = "2.0.77" +quote = "1.0.73" +proc-macro2 = "1.0.86" +anchor-lang.workspace = true ``` The `proc-macro = true` line defines this crate as containing a procedural -macro. The dependencies are all crates we'll be using to create our derive -macro. - -Next, change `src/main.rs` to `src/lib.rs`. +macro. The dependencies are all crates we'll use to create our derive macro. Next, update the project root's `Cargo.toml` file's `members` field to include `"custom-macro"`: @@ -715,16 +736,25 @@ Next, update the project root's `Cargo.toml` file's `members` field to include ```text [workspace] members = [ - "programs/*", - "custom-macro" + "programs/*", + "custom-macro" ] + +[workspace.dependencies] +anchor-lang = "0.30.1" ``` -Now our crate is set up and ready to go. But before we move on, let's create one -more crate at the root level that we can use to test out our macro as we create -it. Use `cargo new custom-macro-test` at the project root. Then update the newly -created `Cargo.toml` to add `anchor-lang` and the `custom-macro` crates as -dependencies: +The `[workspace.dependecies]` has _anchor-lang_ as a dependency, which allows us +to define the version of _anchor-lang_ in the root project configuration and +then inherit that version in all other members of the workspace that depend on +it, by registering `.workspace = true`, like the _custom-macro_ +crate and _custom-macro-test_ crate which will be defined next. + +Now, our crate is set up and ready to go. But before we move on, let's create +one more crate at the root level that we can use to test out our macro as we +create it. Use `cargo new custom-macro-test` at the project root. Then update +the newly created `Cargo.toml` to add `anchor-lang` and the `custom-macro` +crates as dependencies: ```text [package] @@ -733,7 +763,7 @@ version = "0.1.0" edition = "2021" [dependencies] -anchor-lang = "0.25.0" +anchor-lang.workspace = true custom-macro = { path = "../custom-macro" } ``` @@ -743,9 +773,9 @@ Next, update the root project's `Cargo.toml` to include the new ```text [workspace] members = [ - "programs/*", - "custom-macro", - "custom-macro-test" + "programs/*", + "custom-macro", + "custom-macro-test" ] ``` @@ -765,14 +795,14 @@ pub struct Config { } ``` -#### 3. Define the custom macro +### 3. Define the custom macro Now, in the `custom-macro/src/lib.rs` file, let's add our new macro's -declaration. In this file, we’ll use the `parse_macro_input!` macro to parse the +declaration. In this file, we'll use the `parse_macro_input!` macro to parse the input `TokenStream` and extract the `ident` and `data` fields from a -`DeriveInput` struct. Then, we’ll use the `eprintln!` macro to print the values -of `ident` and `data`. For now, we will use `TokenStream::new()` to return an -empty `TokenStream`. +`DeriveInput` struct. Then, we'll use the `eprintln!` macro to print the values +of `ident` and `data`. We will now use `TokenStream::new()` to return an empty +`TokenStream`. ```rust use proc_macro::TokenStream; @@ -783,8 +813,8 @@ use syn::*; pub fn instruction_builder(input: TokenStream) -> TokenStream { let DeriveInput { ident, data, .. } = parse_macro_input!(input); - eprintln! {"{:#?}", ident}; - eprintln! {"{:#?}", data}; + eprintln! ("{:#?}", ident); + eprintln! ("{:#?}", data); TokenStream::new() } @@ -794,18 +824,17 @@ Let's test what this prints. To do this, you first need to install the `cargo-expand` command by running `cargo install cargo-expand`. You'll also need to install the nightly version of Rust by running `rustup install nightly`. -Once you've done this, you can see the output of the code described above by -navigating to the `custom-macro-test` directory and running `cargo expand`. +Once you've done this, you can see the code output described above by navigating +to the `custom-macro-test` directory and running `cargo expand`. This command expands macros in the crate. Since the `main.rs` file uses the newly created `InstructionBuilder` macro, this will print the syntax tree for -the `ident` and `data` of the struct to the console. Once you have confirmed -that the input `TokenStream` is parsing correctly, feel free to remove the -`eprintln!` statements. +the `ident` and `data` of the struct to the console. Once you confirm that the +input `TokenStream` parses correctly, remove the `eprintln!` statements. -#### 4. Get the struct's fields +### 4. Get the struct's fields -Next, let’s use `match` statements to get the named fields from the `data` of +Next, let's use `match` statements to get the named fields from the `data` of the struct. Then we'll use the `eprintln!` macro to print the values of the fields. @@ -826,7 +855,7 @@ pub fn instruction_builder(input: TokenStream) -> TokenStream { _ => panic!("The syn::Data variant is not supported: {:#?}", data), }; - eprintln! {"{:#?}", fields}; + eprintln! ("{:#?}", fields); TokenStream::new() } @@ -836,12 +865,12 @@ Once again, use `cargo expand` in the terminal to see the output of this code. Once you have confirmed that the fields are being extracted and printed correctly, you can remove the `eprintln!` statement. -#### 5. Build update instructions +### 5. Build update instructions -Next, let’s iterate over the fields of the struct and generate an update +Next, let's iterate over the fields of the struct and generate an update instruction for each field. The instruction will be generated using the `quote!` -macro and will include the field's name and type, as well as a new function name -for the update instruction. +macro, including the field's name and type and a new function name for the +update instruction. ```rust use proc_macro::TokenStream; @@ -865,7 +894,7 @@ pub fn instruction_builder(input: TokenStream) -> TokenStream { let ty = &f.ty; let fname = format_ident!("update_{}", name.clone().unwrap()); - quote! { + quote! { pub fn #fname(ctx: Context, new_value: #ty) -> Result<()> { let admin_account = &mut ctx.accounts.admin_account; admin_account.#name = new_value; @@ -878,13 +907,13 @@ pub fn instruction_builder(input: TokenStream) -> TokenStream { } ``` -#### 6. Return new `TokenStream` +### 6. Return new `TokenStream` -Lastly, let’s use the `quote!` macro to generate an implementation for the +Lastly, let's use the `quote!` macro to generate an implementation for the struct with the name specified by the `ident` variable. The implementation -includes the update instructions that were generated for each field in the -struct. The generated code is then converted to a `TokenStream` using the -`into()` method and returned as the result of the macro. +includes the update instructions generated for each field in the struct. The +generated code is then converted to a `TokenStream` using the `into()` method +and returned as the result of the macro. ```rust use proc_macro::TokenStream; @@ -908,7 +937,7 @@ pub fn instruction_builder(input: TokenStream) -> TokenStream { let ty = &f.ty; let fname = format_ident!("update_{}", name.clone().unwrap()); - quote! { + quote! { pub fn #fname(ctx: Context, new_value: #ty) -> Result<()> { let admin_account = &mut ctx.accounts.admin_account; admin_account.#name = new_value; @@ -919,7 +948,7 @@ pub fn instruction_builder(input: TokenStream) -> TokenStream { let expanded = quote! { impl #ident { - #(#update_instruction)* + #(#update_instruction)* } }; expanded.into() @@ -927,7 +956,7 @@ pub fn instruction_builder(input: TokenStream) -> TokenStream { ``` To verify that the macro is generating the correct code, use the `cargo expand` -command to see the expanded form of the macro. The output of this look like the +command to see the expanded form of the macro. The output of this looks like the following: ```rust @@ -972,7 +1001,7 @@ impl Config { } ``` -#### 7. Update the program to use your new macro +### 7. Update the program to use your new macro To use the new macro to generate update instructions for the `Config` struct, first add the `custom-macro` crate as a dependency to the program in its @@ -980,7 +1009,7 @@ first add the `custom-macro` crate as a dependency to the program in its ```text [dependencies] -anchor-lang = "0.25.0" +anchor-lang.workspace = true custom-macro = { path = "../../custom-macro" } ``` @@ -1002,13 +1031,12 @@ pub struct Config { } impl Config { - pub const LEN: usize = 8 + 32 + 1 + 1 + 8; + pub const LEN: usize = Config::DISCRIMINATOR.len() + Config::INIT_SPACE; } ``` Next, navigate to the `admin_update.rs` file and delete the existing update -instructions. This should leave only the `UpdateAdminAccount` context struct in -the file. +instructions, leaving only the `UpdateAdminAccount` context struct in the file. ```rust use crate::state::Config; @@ -1019,7 +1047,7 @@ pub struct UpdateAdminAccount<'info> { pub auth: Signer<'info>, #[account( mut, - has_one = auth, + has_one = auth, )] pub admin_account: Account<'info, Config>, } @@ -1061,37 +1089,36 @@ pub mod admin { } ``` -Lastly, navigate to the `admin` directory and run `anchor test` to verify that -the update instructions generated by the `InstructionBuilder` macro are working -correctly. +Lastly, navigate to the `admin` directory and run the `anchor test` to verify +that the update instructions generated by the `InstructionBuilder` macro are +working correctly. ``` - admin - ✔ Is initialized! (160ms) - ✔ Update bool! (409ms) - ✔ Update u8! (403ms) - ✔ Update u64! (406ms) - ✔ Update Admin! (405ms) + admin + ✔ Is initialized! (160ms) + ✔ Update bool! (409ms) + ✔ Update u8! (403ms) + ✔ Update u64! (406ms) + ✔ Update Admin! (405ms) - 5 passing (2s) + 5 passing (2s) ``` Nice work! At this point, you can create procedural macros to help in your development process. We encourage you to make the most of the Rust language and -use macros where they make sense. But even if you don't, knowing how they work -helps to understand what's happening with Anchor under the hood. +use macros where they make sense. But even if you don't know how they work, it +helps you understand what's happening with Anchor under the hood. -If you need to spend more time with the solution code, feel free to reference -the `solution` branch of -[the repository](https://github.com/Unboxed-Software/anchor-custom-macro/tree/solution). +If you need more time with the solution code, reference the `solution` branch of +[the `anchor-custom-macro` repository](https://github.com/solana-developers/anchor-custom-macro/tree/solution). ## Challenge -To solidify what you've learned, go ahead and create another procedural macro on -your own. Think about code you've written that could be reduced or improved by a -macro and try it out! Since this is still practice, it's okay if it doesn't work -out the way you want or expect. Just jump in and experiment! +To solidify what you've learned: Create another procedural macro. Think about +code you've written that could be reduced or improved by a macro, and try it +out! Since this is still practice, it's okay if it doesn't work out how you want +or expect. Just jump in and experiment! Push your code to GitHub and diff --git a/content/courses/program-security/account-data-matching.md b/content/courses/program-security/account-data-matching.md index f9963a123..6ecb10f5a 100644 --- a/content/courses/program-security/account-data-matching.md +++ b/content/courses/program-security/account-data-matching.md @@ -12,39 +12,43 @@ description: - Use **data validation checks** to verify that account data matches an expected value. Without appropriate data validation checks, unexpected accounts may be - used in an instruction. + used in an instruction handler. - To implement data validation checks in Rust, simply compare the data stored on an account to an expected value. + ```rust if ctx.accounts.user.key() != ctx.accounts.user_data.user { return Err(ProgramError::InvalidAccountData.into()); } ``` -- In Anchor, you can use `constraint` to checks whether the given expression - evaluates to true. Alternatively, you can use `has_one` to check that a target - account field stored on the account matches the key of an account in the - `Accounts` struct. + +- In Anchor, you can use a + [`constraint`](https://www.anchor-lang.com/docs/account-constraints) to check + whether the given expression evaluates to true. Alternatively, you can use + `has_one` to check that a target account field stored on the account matches + the key of an account in the `Accounts` struct. ## Lesson Account data matching refers to data validation checks used to verify the data stored on an account matches an expected value. Data validation checks provide a way to include additional constraints to ensure the appropriate accounts are -passed into an instruction. +passed into an instruction handler. -This can be useful when accounts required by an instruction have dependencies on -values stored in other accounts or if an instruction is dependent on the data -stored in an account. +This can be useful when accounts required by an instruction handler have +dependencies on values stored in other accounts or if an instruction handler is +dependent on the data stored in an account. -#### Missing data validation check +### Missing data validation check -The example below includes an `update_admin` instruction that updates the -`admin` field stored on an `admin_config` account. +The example below includes an `update_admin` instruction handler that updates +the `admin` field stored on an `admin_config` account. -The instruction is missing a data validation check to verify the `admin` account -signing the transaction matches the `admin` stored on the `admin_config` +The instruction handler is missing a data validation check to verify the `admin` +account signing the transaction matches the `admin` stored on the `admin_config` account. This means any account signing the transaction and passed into the -instruction as the `admin` account can update the `admin_config` account. +instruction handler as the `admin` account can update the `admin_config` +account. ```rust use anchor_lang::prelude::*; @@ -67,7 +71,8 @@ pub struct UpdateAdmin<'info> { pub admin_config: Account<'info, AdminConfig>, #[account(mut)] pub admin: Signer<'info>, - pub new_admin: SystemAccount<'info>, + /// CHECK: This account will not be checked by anchor + pub new_admin: UncheckedAccount<'info>, } #[account] @@ -76,11 +81,11 @@ pub struct AdminConfig { } ``` -#### Add data validation check +### Add Data Validation Check The basic Rust approach to solve this problem is to simply compare the passed in `admin` key to the `admin` key stored in the `admin_config` account, throwing an -error if they don’t match. +error if they don't match. ```rust if ctx.accounts.admin.key() != ctx.accounts.admin_config.admin { @@ -88,9 +93,9 @@ if ctx.accounts.admin.key() != ctx.accounts.admin_config.admin { } ``` -By adding a data validation check, the `update_admin` instruction would only -process if the `admin` signer of the transaction matched the `admin` stored on -the `admin_config` account. +By adding a data validation check, the `update_admin` instruction handler would +only process if the `admin` signer of the transaction matched the `admin` stored +on the `admin_config` account. ```rust use anchor_lang::prelude::*; @@ -116,7 +121,8 @@ pub struct UpdateAdmin<'info> { pub admin_config: Account<'info, AdminConfig>, #[account(mut)] pub admin: Signer<'info>, - pub new_admin: SystemAccount<'info>, + /// CHECK: This account will not be checked by anchor + pub new_admin: UncheckedAccount<'info>, } #[account] @@ -125,11 +131,11 @@ pub struct AdminConfig { } ``` -#### Use Anchor constraints +### Use Anchor Constraints Anchor simplifies this with the `has_one` constraint. You can use the `has_one` -constraint to move the data validation check from the instruction logic to the -`UpdateAdmin` struct. +constraint to move the data validation check from the instruction handler logic +to the `UpdateAdmin` struct. In the example below, `has_one = admin` specifies that the `admin` account signing the transaction must match the `admin` field stored on the @@ -161,7 +167,8 @@ pub struct UpdateAdmin<'info> { pub admin_config: Account<'info, AdminConfig>, #[account(mut)] pub admin: Signer<'info>, - pub new_admin: SystemAccount<'info>, + /// CHECK: This account will not be checked by anchor + pub new_admin: UncheckedAccount<'info>, } #[account] @@ -172,7 +179,7 @@ pub struct AdminConfig { Alternatively, you can use `constraint` to manually add an expression that must evaluate to true in order for execution to continue. This is useful when for -some reason naming can’t be consistent or when you need a more complex +some reason naming can't be consistent or when you need a more complex expression to fully validate the incoming data. ```rust @@ -185,46 +192,51 @@ pub struct UpdateAdmin<'info> { pub admin_config: Account<'info, AdminConfig>, #[account(mut)] pub admin: Signer<'info>, - pub new_admin: SystemAccount<'info>, + /// CHECK: This account will not be checked by anchor + pub new_admin: UncheckedAccount<'info>, } ``` ## Lab -For this lab we’ll create a simple “vault” program similar to the program we +For this lab, we'll create a simple “vault” program similar to the program we used in the Signer Authorization lesson and the Owner Check lesson. Similar to -those labs, we’ll show in this lab how a missing data validation check could +those labs, we'll show in this lab how a missing data validation check could allow the vault to be drained. -#### 1. Starter +### 1. Starter -To get started, download the starter code from the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-account-data-matching). +To get started, download the starter code from the +[`starter` branch of this repository](https://github.com/solana-developers/account-data-matching/tree/starter). The starter code includes a program with two instructions and the boilerplate setup for the test file. -The `initialize_vault` instruction initializes a new `Vault` account and a new -`TokenAccount`. The `Vault` account will store the address of a token account, -the authority of the vault, and a withdraw destination token account. +The `initialize_vault` instruction handler initializes a new `Vault` account and +a new `TokenAccount`. The `Vault` account will store the address of a token +account, the authority of the vault, and a withdraw destination token account. The authority of the new token account will be set as the `vault`, a PDA of the program. This allows the `vault` account to sign for the transfer of tokens from the token account. -The `insecure_withdraw` instruction transfers all the tokens in the `vault` -account’s token account to a `withdraw_destination` token account. +The `insecure_withdraw` instruction handler transfers all the tokens in the +`vault` account's token account to a `withdraw_destination` token account. -Notice that this instruction \***\*does\*\*** have a signer check for + + +Notice that this instruction handler \***\*does\*\*** have a signer check for `authority` and an owner check for `vault`. However, nowhere in the account -validation or instruction logic is there code that checks that the `authority` -account passed into the instruction matches the `authority` account on the -`vault`. +validation or instruction handler logic is there code that checks that the +`authority` account passed into the instruction handler matches the `authority` +account on the `vault`. ```rust use anchor_lang::prelude::*; use anchor_spl::token::{self, Mint, Token, TokenAccount}; -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); +declare_id!("J89xWAprDsLAAwcTA6AhrK49UMSAYJJWdXvw4ZQK4suu"); + +pub const DISCRIMINATOR_SIZE: usize = 8; #[program] pub mod account_data_matching { @@ -240,7 +252,7 @@ pub mod account_data_matching { pub fn insecure_withdraw(ctx: Context) -> Result<()> { let amount = ctx.accounts.token_account.amount; - let seeds = &[b"vault".as_ref(), &[*ctx.bumps.get("vault").unwrap()]]; + let seeds = &[b"vault".as_ref(), &[ctx.bumps.vault]]; let signer = [&seeds[..]]; let cpi_ctx = CpiContext::new_with_signer( @@ -263,7 +275,7 @@ pub struct InitializeVault<'info> { #[account( init, payer = authority, - space = 8 + 32 + 32 + 32, + space = DISCRIMINATOR_SIZE + Vault::INIT_SPACE, seeds = [b"vault"], bump, )] @@ -306,6 +318,7 @@ pub struct InsecureWithdraw<'info> { } #[account] +#[derive(Default, InitSpace)] pub struct Vault { token_account: Pubkey, authority: Pubkey, @@ -313,64 +326,73 @@ pub struct Vault { } ``` -#### 2. Test `insecure_withdraw` instruction +### 2. Test insecure_withdraw Instruction Handler -To prove that this is a problem, let’s write a test where an account other than -the vault’s `authority` tries to withdraw from the vault. +To prove that this is a problem, let's write a test where an account other than +the vault's `authority` tries to withdraw from the vault. The test file includes the code to invoke the `initialize_vault` instruction -using the provider wallet as the `authority` and then mints 100 tokens to the -`vault` token account. +handler using the provider wallet as the `authority` and then mints 100 tokens +to the `vault` token account. -Add a test to invoke the `insecure_withdraw` instruction. Use -`withdrawDestinationFake` as the `withdrawDestination` account and `walletFake` -as the `authority`. Then send the transaction using `walletFake`. +Add a test to invoke the `insecure_withdraw` instruction handler. Use +`fakeWithdrawDestination` as the `withdrawDestination` account and `fakeWallet` +as the `authority`. Then send the transaction using `fakeWallet`. Since there are no checks the verify the `authority` account passed into the -instruction matches the values stored on the `vault` account initialized in the -first test, the instruction will process successfully and the tokens will be -transferred to the `withdrawDestinationFake` account. +instruction handler matches the values stored on the `vault` account initialized +in the first test, the instruction handler will process successfully and the +tokens will be transferred to the `fakeWithdrawDestination` account. ```typescript -describe("account-data-matching", () => { +describe("Account Data Matching", () => { ... - it("Insecure withdraw", async () => { - const tx = await program.methods - .insecureWithdraw() - .accounts({ - vault: vaultPDA, - tokenAccount: tokenPDA, - withdrawDestination: withdrawDestinationFake, - authority: walletFake.publicKey, - }) - .transaction() - - await anchor.web3.sendAndConfirmTransaction(connection, tx, [walletFake]) - - const balance = await connection.getTokenAccountBalance(tokenPDA) - expect(balance.value.uiAmount).to.eq(0) - }) + it("allows insecure withdrawal", async () => { + try { + const tx = await program.methods + .insecureWithdraw() + .accounts({ + vault: vaultPDA, + tokenAccount: tokenPDA, + withdrawDestination: fakeWithdrawDestination, + authority: fakeWallet.publicKey, + }) + .transaction(); + + await anchor.web3.sendAndConfirmTransaction(provider.connection, tx, [ + fakeWallet, + ]); + + const tokenAccount = await getAccount(provider.connection, tokenPDA); + expect(Number(tokenAccount.amount)).to.equal(0); + } catch (error) { + throw new Error( + `Insecure withdraw failed unexpectedly: ${error.message}`, + ); + } + }); }) ``` Run `anchor test` to see that both transactions will complete successfully. ```bash -account-data-matching - ✔ Initialize Vault (811ms) - ✔ Insecure withdraw (403ms) +Account Data Matching + ✔ initializes the vault and mints tokens (879ms) + ✔ allows insecure withdrawal (431ms) ``` -#### 3. Add `secure_withdraw` instruction +### 3. Add secure_withdraw Instruction Handler -Let’s go implement a secure version of this instruction called +Let's go implement a secure version of this instruction handler called `secure_withdraw`. -This instruction will be identical to the `insecure_withdraw` instruction, -except we’ll use the `has_one` constraint in the account validation struct -(`SecureWithdraw`) to check that the `authority` account passed into the -instruction matches the `authority` account on the `vault` account. That way -only the correct authority account can withdraw the vault’s tokens. +This instruction handler will be identical to the `insecure_withdraw` +instruction handler, except we'll use the `has_one` constraint in the account +validation struct (`SecureWithdraw`) to check that the `authority` account +passed into the instruction handler matches the `authority` account on the +`vault` account. That way only the correct authority account can withdraw the +vault's tokens. ```rust use anchor_lang::prelude::*; @@ -378,6 +400,8 @@ use anchor_spl::token::{self, Mint, Token, TokenAccount}; declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); +pub const DISCRIMINATOR_SIZE: usize = 8; + #[program] pub mod account_data_matching { use super::*; @@ -385,7 +409,7 @@ pub mod account_data_matching { pub fn secure_withdraw(ctx: Context) -> Result<()> { let amount = ctx.accounts.token_account.amount; - let seeds = &[b"vault".as_ref(), &[*ctx.bumps.get("vault").unwrap()]]; + let seeds = &[b"vault".as_ref(), &[ctx.bumps.vault]]; let signer = [&seeds[..]]; let cpi_ctx = CpiContext::new_with_signer( @@ -411,7 +435,6 @@ pub struct SecureWithdraw<'info> { has_one = token_account, has_one = authority, has_one = withdraw_destination, - )] pub vault: Account<'info, Vault>, #[account( @@ -427,94 +450,104 @@ pub struct SecureWithdraw<'info> { } ``` -#### 4. Test `secure_withdraw` instruction +### 4. Test secure_withdraw Instruction Handler -Now let’s test the `secure_withdraw` instruction with two tests: one that uses -`walletFake` as the authority and one that uses `wallet` as the authority. We -expect the first invocation to return an error and the second to succeed. +Now let's test the `secure_withdraw` instruction handler with two tests: one +that uses `fakeWallet` as the authority and one that uses `wallet` as the +authority. We expect the first invocation to return an error and the second to +succeed. ```typescript describe("account-data-matching", () => { ... - it("Secure withdraw, expect error", async () => { + it("prevents unauthorized secure withdrawal", async () => { try { const tx = await program.methods .secureWithdraw() .accounts({ vault: vaultPDA, tokenAccount: tokenPDA, - withdrawDestination: withdrawDestinationFake, - authority: walletFake.publicKey, + withdrawDestination: fakeWithdrawDestination, + authority: fakeWallet.publicKey, }) - .transaction() + .transaction(); - await anchor.web3.sendAndConfirmTransaction(connection, tx, [walletFake]) - } catch (err) { - expect(err) - console.log(err) + await anchor.web3.sendAndConfirmTransaction(provider.connection, tx, [ + fakeWallet, + ]); + + throw new Error("Secure withdraw should have failed but didn't"); + } catch (error) { + expect(error).to.be.an("error"); + console.log("Expected error occurred:", error.message); } - }) - - it("Secure withdraw", async () => { - await spl.mintTo( - connection, - wallet.payer, - mint, - tokenPDA, - wallet.payer, - 100 - ) - - await program.methods - .secureWithdraw() - .accounts({ - vault: vaultPDA, - tokenAccount: tokenPDA, - withdrawDestination: withdrawDestination, - authority: wallet.publicKey, - }) - .rpc() - - const balance = await connection.getTokenAccountBalance(tokenPDA) - expect(balance.value.uiAmount).to.eq(0) - }) + }); + + it("allows secure withdrawal by authorized user", async () => { + try { + await new Promise((resolve) => setTimeout(resolve, 1000)); + + await mintTo( + provider.connection, + wallet.payer, + mint, + tokenPDA, + wallet.payer, + 100, + ); + + await program.methods + .secureWithdraw() + .accounts({ + vault: vaultPDA, + tokenAccount: tokenPDA, + withdrawDestination, + authority: wallet.publicKey, + }) + .rpc(); + + const tokenAccount = await getAccount(provider.connection, tokenPDA); + expect(Number(tokenAccount.amount)).to.equal(0); + } catch (error) { + throw new Error(`Secure withdraw failed unexpectedly: ${error.message}`); + } + }); }) ``` Run `anchor test` to see that the transaction using an incorrect authority -account will now return an Anchor Error while the transaction using correct -accounts completes successfully. +account will now return an Anchor Error while the transaction using the correct +accounts complete successfully. ```bash -'Program Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS invoke [1]', -'Program log: Instruction: SecureWithdraw', -'Program log: AnchorError caused by account: vault. Error Code: ConstraintHasOne. Error Number: 2001. Error Message: A has one constraint was violated.', -'Program log: Left:', -'Program log: DfLZV18rD7wCQwjYvhTFwuvLh49WSbXFeJFPQb5czifH', -'Program log: Right:', -'Program log: 5ovvmG5ntwUC7uhNWfirjBHbZD96fwuXDMGXiyMwPg87', -'Program Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS consumed 10401 of 200000 compute units', -'Program Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS failed: custom program error: 0x7d1' +"Program J89xWAprDsLAAwcTA6AhrK49UMSAYJJWdXvw4ZQK4suu invoke [1]", +"Program log: Instruction: SecureWithdraw", +"Program log: AnchorError caused by account: vault. Error Code: ConstraintHasOne. Error Number: 2001. Error Message: A has one constraint was violated.", +"Program log: Left:", +"Program log: GprrWv9r8BMxQiWea9MrbCyK7ig7Mj8CcseEbJhDDZXM", +"Program log: Right:", +"Program log: 2jTDDwaPzbpG2oFnnqtuHJpiS9k9dDVqzzfA2ofcqfFS", +"Program J89xWAprDsLAAwcTA6AhrK49UMSAYJJWdXvw4ZQK4suu consumed 11790 of 200000 compute units", +"Program J89xWAprDsLAAwcTA6AhrK49UMSAYJJWdXvw4ZQK4suu failed: custom program error: 0x7d1" ``` Note that Anchor specifies in the logs the account that causes the error (`AnchorError caused by account: vault`). ```bash -✔ Secure withdraw, expect error (77ms) -✔ Secure withdraw (10073ms) +✔ prevents unauthorized secure withdrawal +✔ allows secure withdrawal by authorized user (1713ms) ``` And just like that, you've closed up the security loophole. The theme across most of these potential exploits is that they're quite simple. However, as your -programs grow in scope and complexity, it becomse increasingly easy to miss +programs grow in scope and complexity, it becomes increasingly easy to miss possible exploits. It's great to get in a habit of writing tests that send instructions that _shouldn't_ work. The more the better. That way you catch problems before you deploy. If you want to take a look at the final solution code you can find it on the -`solution` branch of -[the repository](https://github.com/Unboxed-Software/solana-account-data-matching/tree/solution). +[`solution` branch of the repository](https://github.com/solana-developers/account-data-matching/tree/solution). ## Challenge @@ -528,6 +561,7 @@ Remember, if you find a bug or exploit in somebody else's program, please alert them! If you find one in your own program, be sure to patch it right away. + Push your code to GitHub and [tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=a107787e-ad33-42bb-96b3-0592efc1b92f)! diff --git a/content/courses/program-security/arbitrary-cpi.md b/content/courses/program-security/arbitrary-cpi.md index 737cf8cb6..edb15793d 100644 --- a/content/courses/program-security/arbitrary-cpi.md +++ b/content/courses/program-security/arbitrary-cpi.md @@ -3,7 +3,7 @@ title: Arbitrary CPI objectives: - Explain the security risks associated with invoking a CPI to an unknown program - - Showcase how Anchor’s CPI module prevents this from happening when making a + - Showcase how Anchor's CPI module prevents this from happening when making a CPI from one Anchor program to another - Safely and securely make a CPI from an Anchor program to an arbitrary non-anchor program @@ -13,9 +13,9 @@ description: "How to safely invoke Solana programs from other Solana programs." ## Summary - To generate a CPI, the target program must be passed into the invoking - instruction as an account. This means that any target program could be passed - into the instruction. Your program should check for incorrect or unexpected - programs. + instruction handler as an account. This means that any target program could be + passed into the instruction handler. Your program should check for incorrect + or unexpected programs. - Perform program checks in native programs by simply comparing the public key of the passed-in program to the progam you expected. - If a program is written in Anchor, then it may have a publicly available CPI @@ -25,27 +25,27 @@ description: "How to safely invoke Solana programs from other Solana programs." ## Lesson -A cross program invocation (CPI) is when one program invokes an instruction on -another program. An “arbitrary CPI” is when a program is structured to issue a -CPI to whatever program is passed into the instruction rather than expecting to -perform a CPI to one specific program. Given that the callers of your program's -instruction can pass any program they'd like into the instruction's list of -accounts, failing to verify the address of a passed-in program results in your -program performing CPIs to arbitrary programs. +A cross program invocation (CPI) is when one program invokes an instruction +handler on another program. An “arbitrary CPI” is when a program is structured +to issue a CPI to whatever program is passed into the instruction handler rather +than expecting to perform a CPI to one specific program. Given that the callers +of your program's instruction handler can pass any program they'd like into the +instruction's list of accounts, failing to verify the address of a passed-in +program results in your program performing CPIs to arbitrary programs. This lack of program checks creates an opportunity for a malicious user to pass in a different program than expected, causing the original program to call an -instruction on this mystery program. There’s no telling what the consequences of -this CPI could be. It depends on the program logic (both that of the original -program and the unexpected program), as well as what other accounts are passed -into the original instruction. +instruction handler on this mystery program. There's no telling what the +consequences of this CPI could be. It depends on the program logic (both that of +the original program and the unexpected program), as well as what other accounts +are passed into the original instruction handler. -### Missing program checks +### Missing Program Checks -Take the following program as an example. The `cpi` instruction invokes the -`transfer` instruction on `token_program`, but there is no code that checks -whether or not the `token_program` account passed into the instruction is, in -fact, the SPL Token Program. +Take the following program as an example. The `cpi` instruction handler invokes +the `transfer` instruction handler on `token_program`, but there is no code that +checks whether or not the `token_program` account passed into the instruction +handler is, in fact, the SPL Token Program. ```rust use anchor_lang::prelude::*; @@ -85,14 +85,14 @@ pub struct Cpi<'info> { } ``` -An attacker could easily call this instruction and pass in a duplicate token -program that they created and control. +An attacker could easily call this instruction handler and pass in a duplicate +token program that they created and control. -### Add program checks +### Add Program Checks It's possible to fix this vulnerabilty by simply adding a few lines to the `cpi` -instruction to check whether or not `token_program`'s public key is that of the -SPL Token Program. +instruction handler to check whether or not `token_program`'s public key is that +of the SPL Token Program. ```rust pub fn cpi_secure(ctx: Context, amount: u64) -> ProgramResult { @@ -117,22 +117,23 @@ pub fn cpi_secure(ctx: Context, amount: u64) -> ProgramResult { } ``` -Now, if an attacker passes in a different token program, the instruction will -return the `ProgramError::IncorrectProgramId` error. +Now, if an attacker passes in a different token program, the instruction handler +will return the `ProgramError::IncorrectProgramId` error. -Depending on the program you’re invoking with your CPI, you can either hard code -the address of the expected program ID or use the program’s Rust crate to get +Depending on the program you're invoking with your CPI, you can either hard code +the address of the expected program ID or use the program's Rust crate to get the address of the program, if available. In the example above, the `spl_token` crate provides the address of the SPL Token Program. -### Use an Anchor CPI module +### Use an Anchor CPI Module -A simpler way to manage program checks is to use Anchor CPI modules. We learned -in a -[previous lesson](https://github.com/Unboxed-Software/solana-course/blob/main/content/anchor-cpi) +A simpler way to manage program checks is to use +[Anchor CPI](https://book.anchor-lang.com/anchor_in_depth/CPIs.html) module. We +learned in a +[previous lesson of Anchor CPI](/content/courses/onchain-development/anchor-cpi.md) that Anchor can automatically generate CPI modules to make CPIs into the program simpler. These modules also enhance security by verifying the public key of the -program that’s passed into one of its public instructions. +program that's passed into one of its public instructions. Every Anchor program uses the `declare_id()` macro to define the address of the program. When a CPI module is generated for a specific program, it uses the @@ -182,12 +183,15 @@ impl<'info> Cpi<'info> { } ``` -Note that, like the example above, Anchor has created a few + + +Like the example above, Anchor has created a few [wrappers for popular native programs](https://github.com/coral-xyz/anchor/tree/master/spl/src) that allow you to issue CPIs into them as if they were Anchor programs. + -Additionally and depending on the program you’re making the CPI to, you may be -able to use Anchor’s +Additionally and depending on the program you're making the CPI to, you may be +able to use Anchor's [`Program` account type](https://docs.rs/anchor-lang/latest/anchor_lang/accounts/program/struct.Program.html) to validate the passed-in program in your account validation struct. Between the [`anchor_lang`](https://docs.rs/anchor-lang/latest/anchor_lang) and [`anchor_spl`](https://docs.rs/anchor_spl/latest/) crates, @@ -218,10 +222,10 @@ mints, distribution, and transfers, and a separate metadata program is used to assign metadata to tokens. So the vulnerability we go through here could also be applied to real tokens. -#### 1. Setup +### 1. Setup -We'll start with the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-arbitrary-cpi/tree/starter). +We'll start with the +[`starter` branch of this repository](https://github.com/solana-developers/arbitrary-cpi/tree/starter). Clone the repository and then open it on the `starter` branch. Notice that there are three programs: @@ -242,7 +246,7 @@ look at the program. It has two instructions: The second program, `character-metadata`, is meant to be the "approved" program for handling character metadata. Have a look at this program. It has a single -instruction for `create_metadata` that creates a new PDA and assigns a +instruction handler for `create_metadata` that creates a new PDA and assigns a pseudo-random value between 0 and 20 for the character's health and power. The last program, `fake-metadata` is a "fake" metadata program meant to @@ -250,59 +254,63 @@ illustrate what an attacker might make to exploit our `gameplay` program. This program is almost identical to the `character-metadata` program, only it assigns a character's initial health and power to be the max allowed: 255. -#### 2. Test `create_character_insecure` instruction +### 2. Test create_character_insecure Instruction Handler There is already a test in the `tests` directory for this. It's long, but take a minute to look at it before we talk through it together: ```typescript -it("Insecure instructions allow attacker to win every time", async () => { - // Initialize player one with real metadata program - await gameplayProgram.methods - .createCharacterInsecure() - .accounts({ - metadataProgram: metadataProgram.programId, - authority: playerOne.publicKey, - }) - .signers([playerOne]) - .rpc(); - - // Initialize attacker with fake metadata program - await gameplayProgram.methods - .createCharacterInsecure() - .accounts({ - metadataProgram: fakeMetadataProgram.programId, - authority: attacker.publicKey, - }) - .signers([attacker]) - .rpc(); - - // Fetch both player's metadata accounts - const [playerOneMetadataKey] = getMetadataKey( - playerOne.publicKey, - gameplayProgram.programId, - metadataProgram.programId, - ); - - const [attackerMetadataKey] = getMetadataKey( - attacker.publicKey, - gameplayProgram.programId, - fakeMetadataProgram.programId, - ); - - const playerOneMetadata = - await metadataProgram.account.metadata.fetch(playerOneMetadataKey); - - const attackerMetadata = - await fakeMetadataProgram.account.metadata.fetch(attackerMetadataKey); - - // The regular player should have health and power between 0 and 20 - expect(playerOneMetadata.health).to.be.lessThan(20); - expect(playerOneMetadata.power).to.be.lessThan(20); - - // The attacker will have health and power of 255 - expect(attackerMetadata.health).to.equal(255); - expect(attackerMetadata.power).to.equal(255); +it("Insecure instructions allow attacker to win every time successfully", async () => { + try { + // Initialize player one with real metadata program + await gameplayProgram.methods + .createCharacterInsecure() + .accounts({ + metadataProgram: metadataProgram.programId, + authority: playerOne.publicKey, + }) + .signers([playerOne]) + .rpc(); + + // Initialize attacker with fake metadata program + await gameplayProgram.methods + .createCharacterInsecure() + .accounts({ + metadataProgram: fakeMetadataProgram.programId, + authority: attacker.publicKey, + }) + .signers([attacker]) + .rpc(); + + // Fetch both player's metadata accounts + const [playerOneMetadataKey] = getMetadataKey( + playerOne.publicKey, + gameplayProgram.programId, + metadataProgram.programId, + ); + + const [attackerMetadataKey] = getMetadataKey( + attacker.publicKey, + gameplayProgram.programId, + fakeMetadataProgram.programId, + ); + + const playerOneMetadata = + await metadataProgram.account.metadata.fetch(playerOneMetadataKey); + + const attackerMetadata = + await fakeMetadataProgram.account.metadata.fetch(attackerMetadataKey); + // The regular player should have health and power between 0 and 20 + expect(playerOneMetadata.health).to.be.lessThan(20); + expect(playerOneMetadata.power).to.be.lessThan(20); + + // The attacker will have health and power of 255 + expect(attackerMetadata.health).to.equal(255); + expect(attackerMetadata.power).to.equal(255); + } catch (error) { + console.error("Test failed:", error); + throw error; + } }); ``` @@ -319,12 +327,12 @@ are each 255, making the attacker unbeatable. If you haven't already, run `anchor test` to see that this test in fact behaves as described. -#### 3. Create a `create_character_secure` instruction +### 3. Create a create_character_secure Instruction Handler -Let's fix this by creating a secure instruction for creating a new character. -This instruction should implement proper program checks and use the -`character-metadata` program's `cpi` crate to do the CPI rather than just using -`invoke`. +Let's fix this by creating a secure instruction handler for creating a new +character. This instruction handler should implement proper program checks and +use the `character-metadata` program's `cpi` crate to do the CPI rather than +just using `invoke`. If you want to test out your skills, try this on your own before moving ahead. @@ -352,7 +360,7 @@ pub struct CreateCharacterSecure<'info> { #[account( init, payer = authority, - space = 8 + 32 + 32 + 64, + space = DISCRIMINATOR_SIZE + Character::INIT_SPACE, seeds = [authority.key().as_ref()], bump )] @@ -363,25 +371,27 @@ pub struct CreateCharacterSecure<'info> { seeds::program = metadata_program.key(), bump, )] - /// CHECK: manual checks + /// CHECK: This account will not be checked by anchor pub metadata_account: AccountInfo<'info>, pub metadata_program: Program<'info, CharacterMetadata>, pub system_program: Program<'info, System>, } ``` -Lastly, we add the `create_character_secure` instruction. It will be the same as -before but will use the full functionality of Anchor CPIs rather than using -`invoke` directly: +Lastly, we add the `create_character_secure` instruction handler. It will be the +same as before but will use the full functionality of Anchor CPIs rather than +using `invoke` directly: ```rust pub fn create_character_secure(ctx: Context) -> Result<()> { + // Initialize character data let character = &mut ctx.accounts.character; character.metadata = ctx.accounts.metadata_account.key(); - character.auth = ctx.accounts.authority.key(); + character.authority = ctx.accounts.authority.key(); character.wins = 0; - let context = CpiContext::new( + // Prepare CPI context + let cpi_context = CpiContext::new( ctx.accounts.metadata_program.to_account_info(), CreateMetadata { character: ctx.accounts.character.to_account_info(), @@ -391,20 +401,21 @@ pub fn create_character_secure(ctx: Context) -> Result<() }, ); - create_metadata(context)?; + // Perform CPI to create metadata + create_metadata(cpi_context)?; Ok(()) } ``` -#### 4. Test `create_character_secure` +### 4. Test create_character_secure Instruction Handler Now that we have a secure way of initializing a new character, let's create a new test. This test just needs to attempt to initialize the attacker's character and expect an error to be thrown. ```typescript -it("Secure character creation doesn't allow fake program", async () => { +it("prevents secure character creation with fake program", async () => { try { await gameplayProgram.methods .createCharacterSecure() @@ -414,23 +425,25 @@ it("Secure character creation doesn't allow fake program", async () => { }) .signers([attacker]) .rpc(); + + throw new Error("Expected createCharacterSecure to throw an error"); } catch (error) { - expect(error); + expect(error).to.be.instanceOf(Error); console.log(error); } }); ``` Run `anchor test` if you haven't already. Notice that an error was thrown as -expected, detailing that the program ID passed into the instruction is not the -expected program ID: +expected, detailing that the program ID passed into the instruction handler is +not the expected program ID: ```bash 'Program log: AnchorError caused by account: metadata_program. Error Code: InvalidProgramId. Error Number: 3008. Error Message: Program ID was not as expected.', 'Program log: Left:', -'Program log: FKBWhshzcQa29cCyaXc1vfkZ5U985gD5YsqfCzJYUBr', +'Program log: HQqG7PxftCD5BB9WUWcYksrjDLUwCmbV8Smh1W8CEgQm', 'Program log: Right:', -'Program log: D4hPnYEsAx4u3EQMrKEXsY3MkfLndXbBKTEYTwwm25TE' +'Program log: 4FgVd2dgsFnXbSHz8fj9twNbfx8KWcBJkHa6APicU6KS' ``` That's all you need to do to protect against arbitrary CPIs! @@ -440,8 +453,7 @@ certainly won't stop you from architecting the program you need, but please take every precaution possible to ensure no vulnerabilities in your program. If you want to take a look at the final solution code you can find it on the -`solution` branch of -[the same repository](https://github.com/Unboxed-Software/solana-arbitrary-cpi/tree/solution). +[`solution` branch of the same repository](https://github.com/solana-developers/arbitrary-cpi/tree/solution). ## Challenge @@ -449,13 +461,14 @@ Just as with other lessons in this unit, your opportunity to practice avoiding this security exploit lies in auditing your own or other programs. Take some time to review at least one program and ensure that program checks are -in place for every program passed into the instructions, particularly those that -are invoked via CPI. +in place for every program passed into the instruction handlers, particularly +those that are invoked via CPI. Remember, if you find a bug or exploit in somebody else's program, please alert them! If you find one in your own program, be sure to patch it right away. + Push your code to GitHub and [tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=5bcaf062-c356-4b58-80a0-12cca99c29b0)! diff --git a/content/courses/program-security/bump-seed-canonicalization.md b/content/courses/program-security/bump-seed-canonicalization.md index e87e288fe..1ee208d63 100644 --- a/content/courses/program-security/bump-seed-canonicalization.md +++ b/content/courses/program-security/bump-seed-canonicalization.md @@ -3,46 +3,58 @@ title: Bump Seed Canonicalization objectives: - Explain the vulnerabilities associated with using PDAs derived without the canonical bump - - Initialize a PDA using Anchor’s `seeds` and `bump` constraints to + - Initialize a PDA using Anchor's `seeds` and `bump` constraints to automatically use the canonical bump - Use Anchor's `seeds` and `bump` constraints to ensure the canonical bump is always used in future instructions when deriving a PDA description: - "Understand the need for consistent PDA calculation by storing and reusuing - the canonical bump." + "Understand the need for consistent PDA calculation by storing and reusing the + canonical bump." --- ## Summary - The [**`create_program_address`**](https://docs.rs/solana-program/latest/solana_program/pubkey/struct.Pubkey.html#method.create_program_address) - function derives a PDA without searching for the **canonical bump**. This - means there are multiple valid bumps, all of which will produce different - addresses. + function derives a PDA but does so without searching for the canonical bump. + It allows multiple valid bumps to produce different addresses. While this can + still generate a valid PDA, it lacks determinism, as multiple bumps may yield + different addresses for the same set of seeds. - Using [**`find_program_address`**](https://docs.rs/solana-program/latest/solana_program/pubkey/struct.Pubkey.html#method.find_program_address) - ensures that the highest valid bump, or canonical bump, is used for the - derivation, thus creating a deterministic way to find an address given - specific seeds. -- Upon initialization, you can use Anchor's `seeds` and `bump` constraint to - ensure that PDA derivations in the account validation struct always use the - canonical bump -- Anchor allows you to **specify a bump** with the `bump = ` - constraint when verifying the address of a PDA -- Because `find_program_address` can be expensive, best practice is to store the - derived bump in an account’s data field to be referenced later on when - re-deriving the address for verification + ensures that the **highest valid bump**, often referred to as the **canonical + bump**, is used in the PDA derivation. This provides a deterministic way to + compute an address for a given set of seeds, ensuring consistency across the + program. +- In Anchor, you can specify the `seeds` and the `bump` to ensure that PDA + derivations in your account validation struct always align with the correct + canonical bump. +- Anchor also allows you to specify a bump directly in the validation struct + using the `bump = ` constraint. This ensures that the correct bump + is used when verifying the PDA. +- Using `find_program_address` can be computationally expensive due to the + process of searching for the highest valid bump. It's considered best practice + to store the derived bump in an account's data field upon initialization. This + allows the bump to be referenced in subsequent instruction handlers, avoiding + the need to repeatedly call `find_program_address` to re-derive the PDA. + ```rust #[derive(Accounts)] pub struct VerifyAddress<'info> { - #[account( - seeds = [DATA_PDA_SEED.as_bytes()], - bump = data.bump - )] - data: Account<'info, Data>, + #[account( + seeds = [DATA_PDA_SEED.as_bytes()], + bump = data.bump + )] + data: Account<'info, Data>, } ``` +- In summary, while `create_program_address` can generate a PDA, + `find_program_address` ensures consistency and reliability by always producing + the canonical bump, which is critical for deterministic program execution. + This helps maintain integrity in onchain apps, especially when validating PDAs + across multiple instruction handlers. + ## Lesson Bump seeds are a number between 0 and 255, inclusive, used to ensure that an @@ -52,37 +64,39 @@ is a valid PDA. The **canonical bump** is the highest bump value that produces a valid PDA. The standard in Solana is to _always use the canonical bump_ when deriving PDAs, both for security and convenience. -### Insecure PDA derivation using `create_program_address` +### Insecure PDA Derivation using create_program_address Given a set of seeds, the `create_program_address` function will produce a valid PDA about 50% of the time. The bump seed is an additional byte added as a seed -to "bump" the derived address into valid territory. Since there are 256 possible -bump seeds and the function produces valid PDAs approximately 50% of the time, -there are many valid bumps for a given set of input seeds. +to "bump" the derived address into a valid territory. Since there are 256 +possible bump seeds and the function produces valid PDAs approximately 50% of +the time, there are many valid bumps for a given set of input seeds. -You can imagine that this could cause confusion for locating accounts when using +You can imagine that this could cause confusion in locating accounts when using seeds as a way of mapping between known pieces of information to accounts. Using the canonical bump as the standard ensures that you can always find the right account. More importantly, it avoids security exploits caused by the open-ended nature of allowing multiple bumps. -In the example below, the `set_value` instruction uses a `bump` that was passed -in as instruction data to derive a PDA. The instruction then derives the PDA -using `create_program_address` function and checks that the `address` matches -the public key of the `data` account. +In the example below, the `set_value` instruction handler uses a `bump` that was +passed in as instruction data to derive a PDA. The instruction handler then +derives the PDA using `create_program_address` function and checks that the +`address` matches the public key of the `data` account. ```rust use anchor_lang::prelude::*; -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); +declare_id!("ABQaKhtpYQUUgZ9m2sAY7ZHxWv6KyNdhUJW8Dh8NQbkf"); #[program] pub mod bump_seed_canonicalization_insecure { use super::*; + // Insecure PDA Derivation using create_program_address pub fn set_value(ctx: Context, key: u64, new_value: u64, bump: u8) -> Result<()> { let address = - Pubkey::create_program_address(&[key.to_le_bytes().as_ref(), &[bump]], ctx.program_id).unwrap(); + Pubkey::create_program_address(&[key.to_le_bytes().as_ref(), &[bump]], ctx.program_id) + .unwrap(); if address != ctx.accounts.data.key() { return Err(ProgramError::InvalidArgument.into()); } @@ -95,33 +109,34 @@ pub mod bump_seed_canonicalization_insecure { #[derive(Accounts)] pub struct BumpSeed<'info> { - data: Account<'info, Data>, + #[account(mut)] + pub data: Account<'info, Data>, } #[account] pub struct Data { - value: u64, + pub value: u64, } ``` -While the instruction derives the PDA and checks the passed-in account, which is -good, it allows the caller to pass in an arbitrary bump. Depending on the -context of your program, this could result in undesired behavior or potential -exploit. +While the instruction handler derives the PDA and checks the passed-in account, +which is good, it allows the caller to pass in an arbitrary bump. Depending on +the context of your program, this could result in undesired behavior or +potential exploit. If the seed mapping was meant to enforce a one-to-one relationship between PDA and user, for example, this program would not properly enforce that. A user could call the program multiple times with many valid bumps, each producing a different PDA. -### Recommended derivation using `find_program_address` +### Recommended Derivation using find_program_address A simple way around this problem is to have the program expect only the canonical bump and use `find_program_address` to derive the PDA. The [`find_program_address`](https://docs.rs/solana-program/latest/solana_program/pubkey/struct.Pubkey.html#method.find_program_address) -_always uses the canonical bump_. This function iterates through calling +_always uses the canonical bump_. This function iterates by calling `create_program_address`, starting with a bump of 255 and decrementing the bump by one with each iteration. As soon as a valid address is found, the function returns both the derived PDA and the canonical bump used to derive it. @@ -151,12 +166,12 @@ pub fn set_value_secure( } ``` -### Use Anchor’s `seeds` and `bump` constraints +### Use Anchor's seeds and bump Constraints Anchor provides a convenient way to derive PDAs in the account validation struct using the `seeds` and `bump` constraints. These can even be combined with the `init` constraint to initialize the account at the intended address. To protect -the program from the vulnerability we’ve been discussing throughout this lesson, +the program from the vulnerability we've been discussing throughout this lesson, Anchor does not even allow you to initialize an account at a PDA using anything but the canonical bump. Instead, it uses `find_program_address` to derive the PDA and subsequently performs the initialization. @@ -166,6 +181,8 @@ use anchor_lang::prelude::*; declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); +pub const DISCRIMINATOR_SIZE: usize = 8; + #[program] pub mod bump_seed_canonicalization_recommended { use super::*; @@ -175,28 +192,29 @@ pub mod bump_seed_canonicalization_recommended { Ok(()) } } - -// initialize account at PDA +// Initialize account at PDA #[derive(Accounts)] #[instruction(key: u64)] pub struct BumpSeed<'info> { - #[account(mut)] - payer: Signer<'info>, - #[account( - init, - seeds = [key.to_le_bytes().as_ref()], - // derives the PDA using the canonical bump - bump, - payer = payer, - space = 8 + 8 - )] - data: Account<'info, Data>, - system_program: Program<'info, System> + #[account(mut)] + pub payer: Signer<'info>, + #[account( + init, + seeds = [key.to_le_bytes().as_ref()], + // Derives the PDA using the canonical bump + bump, + payer = payer, + space = DISCRIMINATOR_SIZE + Data::INIT_SPACE + )] + pub data: Account<'info, Data>, + + pub system_program: Program<'info, System>, } #[account] +#[derive(InitSpace)] pub struct Data { - value: u64, + pub value: u64, } ``` @@ -210,7 +228,7 @@ arbitrary bumps, but rather to let you optimize your program. The iterative nature of `find_program_address` makes it expensive, so best practice is to store the canonical bump in the PDA account's data upon initializing a PDA, allowing you to reference the bump stored when validating the PDA in subsequent -instructions. +instruction handlers. When you specify the bump to use, Anchor uses `create_program_address` with the provided bump instead of `find_program_address`. This pattern of storing the @@ -222,66 +240,76 @@ use anchor_lang::prelude::*; declare_id!("CVwV9RoebTbmzsGg1uqU1s4a3LvTKseewZKmaNLSxTqc"); +// Constant for account space calculation +pub const DISCRIMINATOR_SIZE: usize = 8; + #[program] pub mod bump_seed_canonicalization_recommended { use super::*; + // Instruction handler to set a value and store the bump pub fn set_value(ctx: Context, _key: u64, new_value: u64) -> Result<()> { ctx.accounts.data.value = new_value; - // store the bump on the account - ctx.accounts.data.bump = *ctx.bumps.get("data").unwrap(); + + // Store the canonical bump on the account + // This bump is automatically derived by Anchor + ctx.accounts.data.bump = ctx.bumps.data; + Ok(()) } + // Instruction handler to verify the PDA address pub fn verify_address(ctx: Context, _key: u64) -> Result<()> { msg!("PDA confirmed to be derived with canonical bump: {}", ctx.accounts.data.key()); Ok(()) } } -// initialize account at PDA +// Account validation struct for initializing the PDA account #[derive(Accounts)] #[instruction(key: u64)] pub struct BumpSeed<'info> { - #[account(mut)] - payer: Signer<'info>, - #[account( - init, - seeds = [key.to_le_bytes().as_ref()], - // derives the PDA using the canonical bump - bump, - payer = payer, - space = 8 + 8 + 1 - )] - data: Account<'info, Data>, - system_program: Program<'info, System> + #[account(mut)] + pub payer: Signer<'info>, + + #[account( + init, + seeds = [key.to_le_bytes().as_ref()], + bump, // Anchor automatically uses the canonical bump + payer = payer, + space = DISCRIMINATOR_SIZE + Data::INIT_SPACE + )] + pub data: Account<'info, Data>, + + pub system_program: Program<'info, System> } +// Account validation struct for verifying the PDA address #[derive(Accounts)] #[instruction(key: u64)] pub struct VerifyAddress<'info> { - #[account( - seeds = [key.to_le_bytes().as_ref()], - // guranteed to be the canonical bump every time - bump = data.bump - )] - data: Account<'info, Data>, + #[account( + seeds = [key.to_le_bytes().as_ref()], + bump = data.bump // Use the stored bump, guaranteed to be canonical + )] + pub data: Account<'info, Data>, } +// Data structure for the PDA account #[account] +#[derive(InitSpace)] pub struct Data { - value: u64, - // bump field - bump: u8 + pub value: u64, + pub bump: u8 // Stores the canonical bump } ``` If you don't specify the bump on the `bump` constraint, Anchor will still use `find_program_address` to derive the PDA using the canonical bump. As a -consequence, your instruction will incur a variable amount of compute budget. -Programs that are already at risk of exceeding their compute budget should use -this with care since there is a chance that the program’s budget may be -occasionally and unpredictably exceeded. +consequence, your instruction handler will incur a variable amount of compute +budget. Programs that are already at risk of exceeding their compute budget +should use this with care since there is a chance that the program's budget may +be occasionally and unpredictably exceeded. On the other hand, if you only need to verify the address of a PDA passed in without initializing an account, you'll be forced to either let Anchor derive @@ -294,35 +322,35 @@ To demonstrate the security exploits possible when you don't check for the canonical bump, let's work with a program that lets each program user "claim" rewards on time. -#### 1. Setup +### 1. Setup -Start by getting the code on the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-bump-seed-canonicalization/tree/starter). +Start by getting the code on the +[`starter` branch of this repository](https://github.com/solana-developers/bump-seed-canonicalization/tree/starter). -Notice that there are two instructions on the program and a single test in the -`tests` directory. +Notice that there are two instruction handlers on the program and a single test +in the `tests` directory. -The instructions on the program are: +The instruction handlers on the program are: 1. `create_user_insecure` 2. `claim_insecure` -The `create_user_insecure` instruction simply creates a new account at a PDA -derived using the signer's public key and a passed-in bump. +The `create_user_insecure` instruction handler simply creates a new account at a +PDA derived using the signer's public key and a passed-in bump. -The `claim_insecure` instruction mints 10 tokens to the user and then marks the -account's rewards as claimed so that they can't claim again. +The `claim_insecure` instruction handler mints 10 tokens to the user and then +marks the account's rewards as claimed so that they can't claim again. However, the program doesn't explicitly check that the PDAs in question are using the canonical bump. Have a look at the program to understand what it does before proceeding. -#### 2. Test insecure instructions +### 2. Test Insecure Instruction Handlers -Since the instructions don't explicitly require the `user` PDA to use the -canonical bump, an attacker can create multiple accounts per wallet and claim -more rewards than should be allowed. +Since the instruction handlers don't explicitly require the `user` PDA to use +the canonical bump, an attacker can create multiple accounts per wallet and +claim more rewards than should be allowed. The test in the `tests` directory creates a new keypair called `attacker` to represent an attacker. It then loops through all possible bumps and calls @@ -331,156 +359,188 @@ the attacker has been able to claim rewards multiple times and has earned more than the 10 tokens allotted per user. ```typescript -it("Attacker can claim more than reward limit with insecure instructions", async () => { - const attacker = Keypair.generate(); - await safeAirdrop(attacker.publicKey, provider.connection); - const ataKey = await getAssociatedTokenAddress(mint, attacker.publicKey); - - let numClaims = 0; - - for (let i = 0; i < 256; i++) { - try { - const pda = createProgramAddressSync( - [attacker.publicKey.toBuffer(), Buffer.from([i])], - program.programId, - ); - await program.methods - .createUserInsecure(i) - .accounts({ - user: pda, - payer: attacker.publicKey, - }) - .signers([attacker]) - .rpc(); - await program.methods - .claimInsecure(i) - .accounts({ - user: pda, - mint, - payer: attacker.publicKey, - userAta: ataKey, - }) - .signers([attacker]) - .rpc(); - - numClaims += 1; - } catch (error) { - if (error.message !== "Invalid seeds, address must fall off the curve") { - console.log(error); +it("allows attacker to claim more than reward limit with insecure instruction handlers", async () => { + try { + const attacker = Keypair.generate(); + await airdropIfRequired( + connection, + attacker.publicKey, + 1 * LAMPORTS_PER_SOL, + 0.5 * LAMPORTS_PER_SOL, + ); + const ataKey = await getAssociatedTokenAddress(mint, attacker.publicKey); + + let successfulClaimCount = 0; + + for (let i = 0; i < 256; i++) { + try { + const pda = anchor.web3.PublicKey.createProgramAddressSync( + [attacker.publicKey.toBuffer(), Buffer.from([i])], + program.programId, + ); + await program.methods + .createUserInsecure(i) + .accounts({ + user: pda, + payer: attacker.publicKey, + }) + .signers([attacker]) + .rpc(); + await program.methods + .claimInsecure(i) + .accounts({ + user: pda, + mint, + payer: attacker.publicKey, + userAta: ataKey, + mintAuthority, + tokenProgram: anchor.utils.token.TOKEN_PROGRAM_ID, + associatedTokenProgram: anchor.utils.token.ASSOCIATED_PROGRAM_ID, + systemProgram: anchor.web3.SystemProgram.programId, + rent: anchor.web3.SYSVAR_RENT_PUBKEY, + }) + .signers([attacker]) + .rpc(); + + successfulClaimCount += 1; + } catch (error) { + if ( + error instanceof Error && + !error.message.includes( + "Invalid seeds, address must fall off the curve", + ) + ) { + console.error(error); + } } } - } - const ata = await getAccount(provider.connection, ataKey); + const ata = await getAccount(connection, ataKey); - console.log( - `Attacker claimed ${numClaims} times and got ${Number(ata.amount)} tokens`, - ); + console.log( + `Attacker claimed ${successfulClaimCount} times and got ${Number( + ata.amount, + )} tokens`, + ); - expect(numClaims).to.be.greaterThan(1); - expect(Number(ata.amount)).to.be.greaterThan(10); + expect(successfulClaimCount).to.be.greaterThan(1); + expect(Number(ata.amount)).to.be.greaterThan(10); + } catch (error) { + throw new Error(`Test failed: ${error.message}`); + } }); ``` Run `anchor test` to see that this test passes, showing that the attacker is -successful. Since the test calles the instructions for every valid bump, it -takes a bit to run, so be patient. +successful. Since the test calls the instruction handlers for every valid bump, +it takes a bit to run, so be patient. ```bash - bump-seed-canonicalization -Attacker claimed 129 times and got 1290 tokens - ✔ Attacker can claim more than reward limit with insecure instructions (133840ms) + Bump seed canonicalization +Attacker claimed 121 times and got 1210 tokens + ✔ allows attacker to claim more than reward limit with insecure instructions (119994ms) ``` -#### 3. Create secure instructions +### 3. Create Secure Instruction Handler -Let's demonstrate patching the vulnerability by creating two new instructions: +Let's demonstrate patching the vulnerability by creating two new instruction +handlers: 1. `create_user_secure` 2. `claim_secure` -Before we write the account validation or instruction logic, let's create a new -user type, `UserSecure`. This new type will add the canonical bump as a field on -the struct. +Before we write the account validation or instruction handler logic, let's +create a new user type, `UserSecure`. This new type will add the canonical bump +as a field on the struct. ```rust +// Secure user account structure #[account] +#[derive(InitSpace)] pub struct UserSecure { - auth: Pubkey, - bump: u8, - rewards_claimed: bool, + pub auth: Pubkey, + pub bump: u8, + pub rewards_claimed: bool, } ``` -Next, let's create account validation structs for each of the new instructions. -They'll be very similar to the insecure versions but will let Anchor handle the -derivation and deserialization of the PDAs. +Next, let's create account validation structs for each of the new instruction +handlers. They'll be very similar to the insecure versions but will let Anchor +handle the derivation and deserialization of the PDAs. ```rust +// Account validation struct for securely creating a user account #[derive(Accounts)] pub struct CreateUserSecure<'info> { #[account(mut)] - payer: Signer<'info>, + pub payer: Signer<'info>, #[account( init, - seeds = [payer.key().as_ref()], - // derives the PDA using the canonical bump - bump, payer = payer, - space = 8 + 32 + 1 + 1 + space = DISCRIMINATOR_SIZE + UserSecure::INIT_SPACE, + seeds = [payer.key().as_ref()], + bump )] - user: Account<'info, UserSecure>, - system_program: Program<'info, System>, + pub user: Account<'info, UserSecure>, + pub system_program: Program<'info, System>, } +// Account validation struct for secure claiming of rewards #[derive(Accounts)] pub struct SecureClaim<'info> { #[account( + mut, seeds = [payer.key().as_ref()], bump = user.bump, constraint = !user.rewards_claimed @ ClaimError::AlreadyClaimed, constraint = user.auth == payer.key() )] - user: Account<'info, UserSecure>, + pub user: Account<'info, UserSecure>, #[account(mut)] - payer: Signer<'info>, + pub payer: Signer<'info>, #[account( init_if_needed, payer = payer, associated_token::mint = mint, associated_token::authority = payer )] - user_ata: Account<'info, TokenAccount>, + pub user_ata: Account<'info, TokenAccount>, #[account(mut)] - mint: Account<'info, Mint>, - /// CHECK: mint auth PDA - #[account(seeds = ["mint".as_bytes().as_ref()], bump)] + pub mint: Account<'info, Mint>, + /// CHECK: This is the mint authority PDA, checked by seeds constraint + #[account(seeds = [b"mint"], bump)] pub mint_authority: UncheckedAccount<'info>, - token_program: Program<'info, Token>, - associated_token_program: Program<'info, AssociatedToken>, - system_program: Program<'info, System>, - rent: Sysvar<'info, Rent>, + pub token_program: Program<'info, Token>, + pub associated_token_program: Program<'info, AssociatedToken>, + pub system_program: Program<'info, System>, + pub rent: Sysvar<'info, Rent>, } ``` -Finally, let's implement the instruction logic for the two new instructions. The -`create_user_secure` instruction simply needs to set the `auth`, `bump` and -`rewards_claimed` fields on the `user` account data. +Finally, let's implement the instruction handler logic for the two new +instruction handlers. The `create_user_secure` instruction handler simply needs +to set the `auth`, `bump` and `rewards_claimed` fields on the `user` account +data. ```rust +// Secure instruction to create a user account pub fn create_user_secure(ctx: Context) -> Result<()> { - ctx.accounts.user.auth = ctx.accounts.payer.key(); - ctx.accounts.user.bump = *ctx.bumps.get("user").unwrap(); - ctx.accounts.user.rewards_claimed = false; + ctx.accounts.user.set_inner(UserSecure { + auth: ctx.accounts.payer.key(), + bump: ctx.bumps.user, + rewards_claimed: false, + }); Ok(()) } ``` -The `claim_secure` instruction needs to mint 10 tokens to the user and set the -`user` account's `rewards_claimed` field to `true`. +The `claim_secure` instruction handler needs to mint 10 tokens to the user and +set the `user` account's `rewards_claimed` field to `true`. ```rust +// Secure instruction to claim rewards pub fn claim_secure(ctx: Context) -> Result<()> { + // Mint tokens to the user's associated token account token::mint_to( CpiContext::new_with_signer( ctx.accounts.token_program.to_account_info(), @@ -489,104 +549,142 @@ pub fn claim_secure(ctx: Context) -> Result<()> { to: ctx.accounts.user_ata.to_account_info(), authority: ctx.accounts.mint_authority.to_account_info(), }, - &[&[ - b"mint".as_ref(), - &[*ctx.bumps.get("mint_authority").unwrap()], - ]], + &[&[b"mint", &[ctx.bumps.mint_authority]]], ), 10, )?; + // Mark rewards as claimed ctx.accounts.user.rewards_claimed = true; Ok(()) } ``` -#### 4. Test secure instructions +### 4. Test Secure Instruction Handlers Let's go ahead and write a test to show that the attacker can no longer claim -more than once using the new instructions. +more than once using the new instruction handlers. Notice that if you start to loop through using multiple PDAs like the old test, -you can't even pass the non-canonical bump to the instructions. However, you can -still loop through using the various PDAs and at the end check that only 1 claim -happened for a total of 10 tokens. Your final test will look something like -this: +you can't even pass the non-canonical bump to the instruction handlers. However, +you can still loop through using the various PDAs and at the end check that only +1 claim happened for a total of 10 tokens. Your final test will look something +like this: ```typescript -it.only("Attacker can only claim once with secure instructions", async () => { - const attacker = Keypair.generate(); - await safeAirdrop(attacker.publicKey, provider.connection); - const ataKey = await getAssociatedTokenAddress(mint, attacker.publicKey); - const [userPDA] = findProgramAddressSync( - [attacker.publicKey.toBuffer()], - program.programId, - ); - - await program.methods - .createUserSecure() - .accounts({ - payer: attacker.publicKey, - }) - .signers([attacker]) - .rpc(); - - await program.methods - .claimSecure() - .accounts({ - payer: attacker.publicKey, - userAta: ataKey, - mint, - user: userPDA, - }) - .signers([attacker]) - .rpc(); - - let numClaims = 1; - - for (let i = 0; i < 256; i++) { - try { - const pda = createProgramAddressSync( - [attacker.publicKey.toBuffer(), Buffer.from([i])], - program.programId, - ); - await program.methods - .createUserSecure() - .accounts({ - user: pda, - payer: attacker.publicKey, - }) - .signers([attacker]) - .rpc(); - - await program.methods - .claimSecure() - .accounts({ - payer: attacker.publicKey, - userAta: ataKey, - mint, - user: pda, - }) - .signers([attacker]) - .rpc(); - - numClaims += 1; - } catch {} - } +it("allows attacker to claim only once with secure instruction handlers", async () => { + try { + const attacker = Keypair.generate(); + await airdropIfRequired( + connection, + attacker.publicKey, + 1 * LAMPORTS_PER_SOL, + 0.5 * LAMPORTS_PER_SOL, + ); + const ataKey = await getAssociatedTokenAddress(mint, attacker.publicKey); + const [userPDA] = anchor.web3.PublicKey.findProgramAddressSync( + [attacker.publicKey.toBuffer()], + program.programId, + ); + + await program.methods + .createUserSecure() + .accounts({ + payer: attacker.publicKey, + user: userPDA, + systemProgram: anchor.web3.SystemProgram.programId, + }) + .signers([attacker]) + .rpc(); + + await program.methods + .claimSecure() + .accounts({ + payer: attacker.publicKey, + user: userPDA, + userAta: ataKey, + mint, + mintAuthority, + tokenProgram: anchor.utils.token.TOKEN_PROGRAM_ID, + associatedTokenProgram: anchor.utils.token.ASSOCIATED_PROGRAM_ID, + systemProgram: anchor.web3.SystemProgram.programId, + rent: anchor.web3.SYSVAR_RENT_PUBKEY, + }) + .signers([attacker]) + .rpc(); + + let successfulClaimCount = 1; + + for (let i = 0; i < 256; i++) { + try { + const pda = anchor.web3.PublicKey.createProgramAddressSync( + [attacker.publicKey.toBuffer(), Buffer.from([i])], + program.programId, + ); + await program.methods + .createUserSecure() + .accounts({ + user: pda, + payer: attacker.publicKey, + systemProgram: anchor.web3.SystemProgram.programId, + }) + .signers([attacker]) + .rpc(); + + await program.methods + .claimSecure() + .accounts({ + payer: attacker.publicKey, + user: pda, + userAta: ataKey, + mint, + mintAuthority, + tokenProgram: anchor.utils.token.TOKEN_PROGRAM_ID, + associatedTokenProgram: anchor.utils.token.ASSOCIATED_PROGRAM_ID, + systemProgram: anchor.web3.SystemProgram.programId, + rent: anchor.web3.SYSVAR_RENT_PUBKEY, + }) + .signers([attacker]) + .rpc(); + + successfulClaimCount += 1; + } catch (error) { + if ( + error instanceof Error && + !error.message.includes("Error Number: 2006") && + !error.message.includes( + "Invalid seeds, address must fall off the curve", + ) + ) { + // Comment console error logs to see the test outputs properly + console.error(error); + } + } + } + + const ata = await getAccount(connection, ataKey); - const ata = await getAccount(provider.connection, ataKey); + console.log( + `Attacker claimed ${successfulClaimCount} times and got ${Number( + ata.amount, + )} tokens`, + ); - expect(Number(ata.amount)).to.equal(10); - expect(numClaims).to.equal(1); + expect(Number(ata.amount)).to.equal(10); + expect(successfulClaimCount).to.equal(1); + } catch (error) { + throw new Error(`Test failed: ${error.message}`); + } }); ``` ```bash - bump-seed-canonicalization + Bump seed canonicalization Attacker claimed 119 times and got 1190 tokens - ✔ Attacker can claim more than reward limit with insecure instructions (128493ms) - ✔ Attacker can only claim once with secure instructions (1448ms) + ✔ allows attacker to claim more than reward limit with insecure instruction handlers (117370ms) +Attacker claimed 1 times and got 10 tokens + ✔ allows attacker to claim only once with secure instruction handlers (16362ms) ``` If you use Anchor for all of the PDA derivations, this particular exploit is @@ -594,8 +692,7 @@ pretty simple to avoid. However, if you end up doing anything "non-standard," be careful to design your program to explicitly use the canonical bump! If you want to take a look at the final solution code you can find it on the -`solution` branch of -[the same repository](https://github.com/Unboxed-Software/solana-bump-seed-canonicalization/tree/solution). +[`solution` branch of the same repository](https://github.com/solana-developers/bump-seed-canonicalization/tree/solution). ## Challenge @@ -609,6 +706,7 @@ Remember, if you find a bug or exploit in somebody else's program, please alert them! If you find one in your own program, be sure to patch it right away. + Push your code to GitHub and [tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=d3f6ca7a-11c8-421f-b7a3-d6c08ef1aa8b)! diff --git a/content/courses/program-security/closing-accounts.md b/content/courses/program-security/closing-accounts.md index 2f62f0c9c..d4a9b28de 100644 --- a/content/courses/program-security/closing-accounts.md +++ b/content/courses/program-security/closing-accounts.md @@ -33,7 +33,7 @@ While it sounds simple, closing accounts properly can be tricky. There are a number of ways an attacker could circumvent having the account closed if you don't follow specific steps. -To get a better understanding of these attack vectors, let’s explore each of +To get a better understanding of these attack vectors, let's explore each of these scenarios in depth. ### Insecure account closing @@ -45,10 +45,10 @@ account. This resets the owner from the owning program to the system program. Take a look at the example below. The instruction requires two accounts: 1. `account_to_close` - the account to be closed -2. `destination` - the account that should receive the closed account’s lamports +2. `destination` - the account that should receive the closed account's lamports The program logic is intended to close an account by simply increasing the -`destination` account’s lamports by the amount stored in the `account_to_close` +`destination` account's lamports by the amount stored in the `account_to_close` and setting the `account_to_close` lamports to 0. With this program, after a full transaction is processed, the `account_to_close` will be garbage collected by the runtime. @@ -238,7 +238,7 @@ Fortunately, Anchor makes all of this much simpler with the `#[account(close = )]` constraint. This constraint handles everything required to securely close an account: -1. Transfers the account’s lamports to the given `` +1. Transfers the account's lamports to the given `` 2. Zeroes out the account data 3. Sets the account discriminator to the `CLOSED_ACCOUNT_DISCRIMINATOR` variant @@ -258,8 +258,8 @@ pub struct CloseAccount { } ``` -The `force_defund` instruction is an optional addition that you’ll have to -implement on your own if you’d like to utilize it. +The `force_defund` instruction is an optional addition that you'll have to +implement on your own if you'd like to utilize it. ## Lab diff --git a/content/courses/program-security/duplicate-mutable-accounts.md b/content/courses/program-security/duplicate-mutable-accounts.md index 39ee8c079..b52f1e29e 100644 --- a/content/courses/program-security/duplicate-mutable-accounts.md +++ b/content/courses/program-security/duplicate-mutable-accounts.md @@ -3,7 +3,7 @@ title: Duplicate Mutable Accounts objectives: - Explain the security risks associated with instructions that require two mutable accounts of the same type and how to avoid them - - Implement a check for duplicate mutable accounts using long-form Rust + - Implement a check for duplicate mutable accounts using native Rust - Implement a check for duplicate mutable accounts using Anchor constraints description: "Under vulnerabilities that can occur with instruction handlers that handle @@ -13,49 +13,38 @@ description: ## Summary - When an instruction requires two mutable accounts of the same type, an - attacker can pass in the same account twice, causing the account to be mutated - in unintended ways. + attacker can pass in the same account twice, leading to unintended mutations. - To check for duplicate mutable accounts in Rust, simply compare the public keys of the two accounts and throw an error if they are the same. - ```rust - if ctx.accounts.account_one.key() == ctx.accounts.account_two.key() { - return Err(ProgramError::InvalidArgument) - } - ``` +### Checking for Duplicate Mutable Accounts in Rust -- In Anchor, you can use `constraint` to add an explicit constraint to an - account checking that it is not the same as another account. +In Rust, you can simply compare the public keys of the accounts and return an +error if they are identical: + +```rust +if ctx.accounts.account_one.key() == ctx.accounts.account_two.key() { + return Err(ProgramError::InvalidArgument) +} +``` + +### Using Constraints in Anchor + +In Anchor, you can add an explicit `constraint` to an account, ensuring it is +not the same as another account. ## Lesson -Duplicate Mutable Accounts refers to an instruction that requires two mutable -accounts of the same type. When this occurs, you should validate that two -accounts are different to prevent the same account from being passed into the -instruction twice. - -Since the program treats each account as separate, passing in the same account -twice could result in the second account being mutated in unintended ways. This -could result in very minor issues, or catastrophic ones - it really depends on -what data the code changes and how these accounts are used. Regardless, this is -a vulnerability all developers should be aware of. - -#### No check - -For example, imagine a program that updates a `data` field for `user_a` and -`user_b` in a single instruction. The value that the instruction sets for -`user_a` is different from `user_b`. Without verifying that `user_a` and -`user_b` are different, the program would update the `data` field on the -`user_a` account, then update the `data` field a second time with a different -value under the assumption that `user_b` is a separate account. - -You can see this example in the code below.Tthere is no check to verify that -`user_a` and `user_b` are not the same account. Passing in the same account for -`user_a` and `user_b` will result in the `data` field for the account being set -to `b` even though the intent is to set both values `a` and `b` on separate -accounts. Depending on what `data` represents, this could be a minor unintended -side-effect, or it could mean a severe security risk. allowing `user_a` and -`user_b` to be the same account could result in +**Duplicate Mutable Accounts** occur when an instruction requires two mutable +accounts of the same type. If the same account is passed twice, it can be +mutated in unintended ways, potentially causing security vulnerabilities. + +### No check + +Consider a program that updates a data field for `user_a` and `user_b` in a +single instruction. If the same account is passed for both `user_a` and +`user_b`, the program will overwrite the data field with the second value, +potentially leading to unintended side effects. ```rust use anchor_lang::prelude::*; @@ -67,32 +56,31 @@ pub mod duplicate_mutable_accounts_insecure { use super::*; pub fn update(ctx: Context, a: u64, b: u64) -> Result<()> { - let user_a = &mut ctx.accounts.user_a; - let user_b = &mut ctx.accounts.user_b; - - user_a.data = a; - user_b.data = b; + ctx.accounts.user_a.data = a; + ctx.accounts.user_b.data = b; Ok(()) } } #[derive(Accounts)] pub struct Update<'info> { - user_a: Account<'info, User>, - user_b: Account<'info, User>, + #[account(mut)] + pub user_a: Account<'info, User>, + #[account(mut)] + pub user_b: Account<'info, User>, } #[account] +#[derive(Default)] pub struct User { - data: u64, + pub data: u64, } ``` -#### Add check in instruction +#### Adding a check in Rust -To fix this problem with plan Rust, simply add a check in the instruction logic -to verify that the public key of `user_a` isn't the same as the public key of -`user_b`, returning an error if they are the same. +To avoid this, add a check in the instruction logic to ensure the accounts are +different: ```rust if ctx.accounts.user_a.key() == ctx.accounts.user_b.key() { @@ -113,83 +101,84 @@ pub mod duplicate_mutable_accounts_secure { pub fn update(ctx: Context, a: u64, b: u64) -> Result<()> { if ctx.accounts.user_a.key() == ctx.accounts.user_b.key() { - return Err(ProgramError::InvalidArgument.into()) + return Err(ProgramError::InvalidArgument) } - let user_a = &mut ctx.accounts.user_a; - let user_b = &mut ctx.accounts.user_b; - - user_a.data = a; - user_b.data = b; + ctx.accounts.user_a.data = a; + ctx.accounts.user_b.data = b; Ok(()) } } #[derive(Accounts)] pub struct Update<'info> { - user_a: Account<'info, User>, - user_b: Account<'info, User>, + #[account(mut)] + pub user_a: Account<'info, User>, + #[account(mut)] + pub user_b: Account<'info, User>, } #[account] +#[derive(Default)] pub struct User { - data: u64, + pub data: u64, } ``` -#### Use Anchor `constraint` +#### Using Anchor Constraint -An even better solution if you're using Anchor is to add the check to the -account validation struct instead of the instruction logic. +An even better solution in Anchor is to use +[the `constraint` keyword](https://www.anchor-lang.com/docs/account-constraints) +in the account validation struct. -You can use the `#[account(..)]` attribute macro and the `constraint` keyword to -add a manual constraint to an account. The `constraint` keyword will check -whether the expression that follows evaluates to true or false, returning an -error if the expression evaluates to false. +You can use the #[account(..)] attribute macro and the constraint keyword to add +a manual constraint to an account. The constraint keyword will check whether the +expression that follows evaluates to true or false, returning an error if the +expression evaluates to false. -The example below moves the check from the instruction logic to the account -validation struct by adding a `constraint` to the `#[account(..)]` attribute. +This ensures the check is performed automatically during account validation: ```rust use anchor_lang::prelude::*; -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); +declare_id!("AjBhRphs24vC1V8zZM25PTuLJhJJXFnYbimsZF8jpJAS"); #[program] pub mod duplicate_mutable_accounts_recommended { use super::*; pub fn update(ctx: Context, a: u64, b: u64) -> Result<()> { - let user_a = &mut ctx.accounts.user_a; - let user_b = &mut ctx.accounts.user_b; - - user_a.data = a; - user_b.data = b; + ctx.accounts.user_a.data = a; + ctx.accounts.user_b.data = b; Ok(()) } } #[derive(Accounts)] pub struct Update<'info> { - #[account(constraint = user_a.key() != user_b.key())] - user_a: Account<'info, User>, - user_b: Account<'info, User>, + #[account( + mut, + constraint = user_a.key() != user_b.key())] + pub user_a: Account<'info, User>, + #[account(mut)] + pub user_b: Account<'info, User>, } #[account] +#[derive(Default)] pub struct User { - data: u64, + pub data: u64, } ``` ## Lab -Let’s practice by creating a simple Rock Paper Scissors program to demonstrate +Let's practice by creating a simple Rock Paper Scissors program to demonstrate how failing to check for duplicate mutable accounts can cause undefined behavior within your program. This program will initialize “player” accounts and have a separate instruction -that requires two player accounts to represent starting a game of rock paper -scissors. +that requires two player accounts to represent starting a game of rock, paper +and scissors. - An `initialize` instruction to initialize a `PlayerState` account - A `rock_paper_scissors_shoot_insecure` instruction that requires two @@ -199,10 +188,10 @@ scissors. `rock_paper_scissors_shoot_insecure` instruction but adds a constraint that ensures the two player accounts are different -#### 1. Starter +### Starter To get started, download the starter code on the `starter` branch -of [this repository](https://github.com/unboxed-software/solana-duplicate-mutable-accounts/tree/starter). +of [this repository](https://github.com/solana-developers/duplicate-mutable-accounts/tree/starter). The starter code includes a program with two instructions and the boilerplate setup for the test file. @@ -215,11 +204,17 @@ player, but does not check that the accounts passed into the instruction are different. This means a single account can be used for both `PlayerState` accounts in the instruction. -```rust +```rust filename="constants.rs" +pub const DISCRIMINATOR_SIZE: usize = 8; +``` + +```rust filename="lib.rs" use anchor_lang::prelude::*; -use borsh::{BorshDeserialize, BorshSerialize}; -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); +mod constants; +use constants::DISCRIMINATOR_SIZE; + +declare_id!("Lo5sj2wWy4BHbe8kCSUvgdhzFbv9c6CEERfgAXusBj9"); #[program] pub mod duplicate_mutable_accounts { @@ -237,7 +232,6 @@ pub mod duplicate_mutable_accounts { player_two_choice: RockPaperScissors, ) -> Result<()> { ctx.accounts.player_one.choice = Some(player_one_choice); - ctx.accounts.player_two.choice = Some(player_two_choice); Ok(()) } @@ -248,7 +242,7 @@ pub struct Initialize<'info> { #[account( init, payer = payer, - space = 8 + 32 + 8 + space = DISCRIMINATOR_SIZE + PlayerState::INIT_SPACE )] pub new_player: Account<'info, PlayerState>, #[account(mut)] @@ -265,12 +259,13 @@ pub struct RockPaperScissorsInsecure<'info> { } #[account] +#[derive(Default, InitSpace)] pub struct PlayerState { - player: Pubkey, - choice: Option, + pub player: Pubkey, + pub choice: Option, } -#[derive(Clone, Copy, BorshDeserialize, BorshSerialize)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Copy, PartialEq, Eq, InitSpace)] pub enum RockPaperScissors { Rock, Paper, @@ -278,7 +273,7 @@ pub enum RockPaperScissors { } ``` -#### 2. Test `rock_paper_scissors_shoot_insecure` instruction +### Test rock_paper_scissors_shoot_insecure instruction The test file includes the code to invoke the `initialize` instruction twice to create two player accounts. @@ -288,8 +283,8 @@ passing in the `playerOne.publicKey` for as both `playerOne` and `playerTwo`. ```typescript describe("duplicate-mutable-accounts", () => { - ... - it("Invoke insecure instruction", async () => { + ... + it("Invokes insecure instruction", async () => { await program.methods .rockPaperScissorsShootInsecure({ rock: {} }, { scissors: {} }) .accounts({ @@ -305,7 +300,7 @@ describe("duplicate-mutable-accounts", () => { }) ``` -Run `anchor test` to see that the transactions completes successfully, even +Run `anchor test` to see that the transactions are completed successfully, even though the same account is used as two accounts in the instruction. Since the `playerOne` account is used as both players in the instruction, note the `choice` stored on the `playerOne` account is also overridden and set @@ -318,14 +313,14 @@ duplicate-mutable-accounts ✔ Invoke insecure instruction (406ms) ``` -Not only does allowing duplicate accounts not make a whole lot of sense for the -game, it also causes undefined behavior. If we were to build out this program -further, the program only has one chosen option and therefore can't compare -against a second option. The game would end in a draw every time. It's also -unclear to a human whether `playerOne`'s choice should be rock or scissors, so -the program behavior is strange. +Not only does allowing duplicate accounts do not make a whole lot of sense for +the game, but it also causes undefined behavior. If we were to build out this +program further, the program only has one chosen option and therefore can't be +compared against a second option. The game would end in a draw every time. It's +also unclear to a human whether `playerOne`'s choice should be rock or scissors, +so the program behavior is strange. -#### 3. Add `rock_paper_scissors_shoot_secure` instruction +### Add rock_paper_scissors_shoot_secure instruction Next, return to `lib.rs` and add a `rock_paper_scissors_shoot_secure` instruction that uses the `#[account(...)]` macro to add an additional @@ -335,14 +330,13 @@ instruction that uses the `#[account(...)]` macro to add an additional #[program] pub mod duplicate_mutable_accounts { use super::*; - ... + ... pub fn rock_paper_scissors_shoot_secure( ctx: Context, player_one_choice: RockPaperScissors, player_two_choice: RockPaperScissors, ) -> Result<()> { ctx.accounts.player_one.choice = Some(player_one_choice); - ctx.accounts.player_two.choice = Some(player_two_choice); Ok(()) } @@ -360,18 +354,18 @@ pub struct RockPaperScissorsSecure<'info> { } ``` -#### 7. Test `rock_paper_scissors_shoot_secure` instruction +### Test rock_paper_scissors_shoot_secure instruction -To test the `rock_paper_scissors_shoot_secure` instruction, we’ll invoke the -instruction twice. First, we’ll invoke the instruction using two different -player accounts to check that the instruction works as intended. Then, we’ll +To test the `rock_paper_scissors_shoot_secure` instruction, we'll invoke the +instruction twice. First, we'll invoke the instruction using two different +player accounts to check that the instruction works as intended. Then, we'll invoke the instruction using the `playerOne.publicKey` as both player accounts, which we expect to fail. ```typescript describe("duplicate-mutable-accounts", () => { - ... - it("Invoke secure instruction", async () => { + ... + it("Invokes secure instruction", async () => { await program.methods .rockPaperScissorsShootSecure({ rock: {} }, { scissors: {} }) .accounts({ @@ -407,11 +401,11 @@ Run `anchor test` to see that the instruction works as intended and using the `playerOne` account twice returns the expected error. ```bash -'Program Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS invoke [1]', +'Program Lo5sj2wWy4BHbe8kCSUvgdhzFbv9c6CEERfgAXusBj9 invoke [1]', 'Program log: Instruction: RockPaperScissorsShootSecure', 'Program log: AnchorError caused by account: player_one. Error Code: ConstraintRaw. Error Number: 2003. Error Message: A raw constraint was violated.', -'Program Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS consumed 5104 of 200000 compute units', -'Program Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS failed: custom program error: 0x7d3' +'Program Lo5sj2wWy4BHbe8kCSUvgdhzFbv9c6CEERfgAXusBj9 consumed 3414 of 200000 compute units', +'Program Lo5sj2wWy4BHbe8kCSUvgdhzFbv9c6CEERfgAXusBj9 failed: custom program error: 0x7d3' ``` The simple constraint is all it takes to close this loophole. While somewhat @@ -423,7 +417,7 @@ whether that is explicit. If you want to take a look at the final solution code you can find it on the `solution` branch of -[the repository](https://github.com/Unboxed-Software/solana-duplicate-mutable-accounts/tree/solution). +[the repository](https://github.com/solana-developers/duplicate-mutable-accounts/tree/solution). ## Challenge diff --git a/content/courses/program-security/owner-checks.md b/content/courses/program-security/owner-checks.md index 1b95f7281..99466ac52 100644 --- a/content/courses/program-security/owner-checks.md +++ b/content/courses/program-security/owner-checks.md @@ -3,11 +3,11 @@ title: Owner Checks objectives: - Explain the security risks associated with not performing appropriate owner checks - - Implement owner checks using long-form Rust - - Use Anchor’s `Account<'info, T>` wrapper and an account type to automate + - Use Anchor's `Account<'info, T>` wrapper and an account type to automate owner checks - - Use Anchor’s `#[account(owner = )]` constraint to explicitly define an + - Use Anchor's `#[account(owner = )]` constraint to explicitly define an external program that should own an account + - Implement owner checks using native Rust description: "Understand the use of account owner checks when processing incoming instructions." @@ -15,11 +15,17 @@ description: ## Summary -- Use **Owner Checks** to verify that accounts are owned by the expected - program. Without appropriate owner checks, accounts owned by unexpected - programs could be used in an instruction. -- To implement an owner check in Rust, simply check that an account’s owner - matches an expected program ID +- **Owner checks** ensure that accounts are owned by the expected program. + Without owner checks, accounts owned by other programs can be used in an + instruction handler. +- Anchor program account types implement the `Owner` trait, allowing + `Account<'info, T>` to automatically verify program ownership. +- You can also use Anchor's + [`#[account(owner = )]`](https://www.anchor-lang.com/docs/account-constraints) + constraint to define an account's owner when it's external to the current + program. +- To implement an owner check in native Rust, verify that the account's owner + matches the expected program ID. ```rust if ctx.accounts.account.owner != ctx.program_id { @@ -27,20 +33,15 @@ if ctx.accounts.account.owner != ctx.program_id { } ``` -- Anchor program account types implement the `Owner` trait which allows the - `Account<'info, T>` wrapper to automatically verify program ownership -- Anchor gives you the option to explicitly define the owner of an account if it - should be anything other than the currently executing program - ## Lesson -Owner checks are used to verify that an account passed into an instruction is -owned by an expected program. This prevents accounts owned by an unexpected -program from being used in an instruction. +Owner checks are used to verify that an account passed into an instruction +handler is owned by the expected program, preventing exploitation by accounts +from different programs. -As a refresher, the `AccountInfo` struct contains the following fields. An owner -check refers to checking that the `owner` field in the `AccountInfo` matches an -expected program ID. +The `AccountInfo` struct contains several fields, including the `owner`, which +represents the **program** that owns the account. Owner checks ensure that this +`owner` field in the `AccountInfo` matches the expected program ID. ```rust /// Account information @@ -65,27 +66,12 @@ pub struct AccountInfo<'a> { } ``` -#### Missing owner check - -The example below shows an `admin_instruction` intended to be accessible only by -an `admin` account stored on an `admin_config` account. - -Although the instruction checks the `admin` account signed the transaction and -matches the `admin` field stored on the `admin_config` account, there is no -owner check to verify the `admin_config` account passed into the instruction is -owned by the executing program. - -Since the `admin_config` is unchecked as indicated by the `AccountInfo` type, a -fake `admin_config` account owned by a different program could be used in the -`admin_instruction`. This means that an attacker could create a program with an -`admin_config` whose data structure matches the `admin_config` of your program, -set their public key as the `admin` and pass their `admin_config` account into -your program. This would let them spoof your program into thinking that they are -the authorized admin for your program. +### Missing owner check -This simplified example only prints the `admin` to the program logs. However, -you can imagine how a missing owner check could allow fake accounts to exploit -an instruction. +In the following example, an `admin_instruction` is intended to be restricted to +an `admin` account stored in the `admin_config` account. However, it fails to +check whether the program owns the `admin_config` account. Without this check, +an attacker can spoof the account. ```rust use anchor_lang::prelude::*; @@ -95,7 +81,7 @@ declare_id!("Cft4eTTrt4sJU4Ar35rUQHx6PSXfJju3dixmvApzhWws"); #[program] pub mod owner_check { use super::*; - ... + ... pub fn admin_instruction(ctx: Context) -> Result<()> { let account_data = ctx.accounts.admin_config.try_borrow_data()?; @@ -112,7 +98,8 @@ pub mod owner_check { #[derive(Accounts)] pub struct Unchecked<'info> { - admin_config: AccountInfo<'info>, + /// CHECK: This account will not be checked by Anchor + admin_config: UncheckedAccount<'info>, admin: Signer<'info>, } @@ -122,11 +109,10 @@ pub struct AdminConfig { } ``` -#### Add owner check +### Add owner check -In vanilla Rust, you could solve this problem by comparing the `owner` field on -the account to the program ID. If they do not match, you would return an -`IncorrectProgramId` error. +To resolve this issue in native Rust, compare the `owner` field with the program +ID: ```rust if ctx.accounts.admin_config.owner != ctx.program_id { @@ -134,9 +120,8 @@ if ctx.accounts.admin_config.owner != ctx.program_id { } ``` -Adding an owner check prevents accounts owned by an unexpected program to be -passed in as the `admin_config` account. If a fake `admin_config` account was -used in the `admin_instruction`, then the transaction would fail. +Adding an `owner` check ensures that accounts from other programs cannot be +passed into the instruction handler. ```rust use anchor_lang::prelude::*; @@ -166,7 +151,8 @@ pub mod owner_check { #[derive(Accounts)] pub struct Unchecked<'info> { - admin_config: AccountInfo<'info>, + /// CHECK: This account will not be checked by Anchor + admin_config: UncheckedAccount<'info>, admin: Signer<'info>, } @@ -176,26 +162,14 @@ pub struct AdminConfig { } ``` -#### Use Anchor’s `Account<'info, T>` - -Anchor can make this simpler with the `Account` type. - -`Account<'info, T>` is a wrapper around `AccountInfo` that verifies program -ownership and deserializes underlying data into the specified account type `T`. -This in turn allows you to use `Account<'info, T>` to easily validate ownership. - -For context, the `#[account]` attribute implements various traits for a data -structure representing an account. One of these is the `Owner` trait which -defines an address expected to own an account. The owner is set as the program -ID specified in the `declare_id!` macro. +### Use Anchor's `Account<'info, T>` -In the example below, `Account<'info, AdminConfig>` is used to validate the -`admin_config`. This will automatically perform the owner check and deserialize -the account data. Additionally, the `has_one` constraint is used to check that -the `admin` account matches the `admin` field stored on the `admin_config` -account. +Anchor simplifies owner checks with the `Account` type, which wraps +`AccountInfo` and automatically verifies ownership. -This way, you don’t need to clutter your instruction logic with owner checks. +In the following example, `Account<'info, AdminConfig>` validates the +`admin_config` account, and the `has_one` constraint checks that the admin +account matches the `admin` field in `admin_config`. ```rust use anchor_lang::prelude::*; @@ -205,7 +179,7 @@ declare_id!("Cft4eTTrt4sJU4Ar35rUQHx6PSXfJju3dixmvApzhWws"); #[program] pub mod owner_check { use super::*; - ... + ... pub fn admin_instruction(ctx: Context) -> Result<()> { msg!("Admin: {}", ctx.accounts.admin_config.admin.to_string()); Ok(()) @@ -227,19 +201,19 @@ pub struct AdminConfig { } ``` -#### Use Anchor’s `#[account(owner = )]` constraint +### Use Anchor's `#[account(owner = )]` constraint -In addition to the `Account` type, you can use an `owner` constraint. The -`owner` constraint allows you to define the program that should own an account -if it’s different from the currently executing one. This comes in handy if, for -example, you are writing an instruction that expects an account to be a PDA -derived from a different program. You can use the `seeds` and `bump` constraints -and define the `owner` to properly derive and verify the address of the account -passed in. +In addition to the `Account` type, you can use the Anchor's +[`owner` constraint](https://www.anchor-lang.com/docs/account-constraints) to +specify the program that should own an account when it differs from the +executing program. This is particularly useful when an instruction handler +expects an account to be a PDA created by another program. By using the `seeds` +and `bump` constraints along with the `owner`, you can properly derive and +verify the account's address. -To use the `owner` constraint, you’ll have to have access to the public key of -the program you expect to own an account. You can either pass the program in as -an additional account or hard-code the public key somewhere in your program. +To apply the `owner` constraint, you need access to the public key of the +program expected to own the account. This can be provided either as an +additional account or by hard-coding the public key within your program. ```rust use anchor_lang::prelude::*; @@ -280,41 +254,43 @@ pub struct AdminConfig { ## Lab -In this lab we’ll use two programs to demonstrate how a missing owner check -could allow a fake account to drain the tokens from a simplified token “vault” -account (note that this is very similar to the lab from the Signer Authorization -lesson). +In this lab, we'll demonstrate how the absence of an owner check can allow a +malicious actor to drain tokens from a simplified token vault. This is similar +to the lab from the +[Signer Authorization lesson](/content/courses/program-security/signer-auth.md). -To help illustrate this, one program will be missing an account owner check on -the vault account it withdraws tokens to. +We'll use two programs to illustrate this: -The second program will be a direct clone of the first program created by a -malicious user to create an account identical to the first program’s vault -account. +1. One program lacks an owner check on the vault account it withdraws tokens + from. +2. The second program is a clone created by a malicious user to mimic the first + program's vault account. -Without the owner check, this malicious user will be able to pass in the vault -account owned by their “faked” program and the original program will still -execute. +Without the owner check, the malicious user can pass in their vault account +owned by a fake program, and the original program will still execute the +withdrawal. -#### 1. Starter +### 1. Starter -To get started, download the starter code from the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-owner-checks/tree/starter). -The starter code includes two programs `clone` and `owner_check` and the -boilerplate setup for the test file. +Begin by downloading the starter code from the +[`starter` branch of this repository](https://github.com/solana-developers/owner-checks/tree/starter). +The starter code includes two programs: `clone` and `owner_check`, and the setup +for the test file. -The `owner_check` program includes two instructions: +The `owner_check` program includes two instruction handlers: -- `initialize_vault` initializes a simplified vault account that stores the - addresses of a token account and an authority account -- `insecure_withdraw` withdraws tokens from the token account, but is missing an - owner check for the vault account +- `initialize_vault`: Initializes a simplified vault account storing the + addresses of a token account and an authority account. +- `insecure_withdraw`: Withdraws tokens from the token account but lacks an + owner check for the vault account. ```rust use anchor_lang::prelude::*; use anchor_spl::token::{self, Mint, Token, TokenAccount}; -declare_id!("HQYNznB3XTqxzuEqqKMAD9XkYE5BGrnv8xmkoDNcqHYB"); +declare_id!("3uF3yaymq1YBmDDHpRPwifiaBf4eK8M2jLgaMcCTg9n9"); + +pub const DISCRIMINATOR_SIZE: usize = 8; #[program] pub mod owner_check { @@ -339,7 +315,7 @@ pub mod owner_check { let seeds = &[ b"token".as_ref(), - &[*ctx.bumps.get("token_account").unwrap()], + &[ctx.bumps.token_account], ]; let signer = [&seeds[..]]; @@ -363,7 +339,7 @@ pub struct InitializeVault<'info> { #[account( init, payer = authority, - space = 8 + 32 + 32, + space = DISCRIMINATOR_SIZE + Vault::INIT_SPACE, )] pub vault: Account<'info, Vault>, #[account( @@ -385,7 +361,7 @@ pub struct InitializeVault<'info> { #[derive(Accounts)] pub struct InsecureWithdraw<'info> { - /// CHECK: + /// CHECK: This account will not be checked by anchor pub vault: UncheckedAccount<'info>, #[account( mut, @@ -400,23 +376,26 @@ pub struct InsecureWithdraw<'info> { } #[account] +#[derive(Default, InitSpace)] pub struct Vault { token_account: Pubkey, authority: Pubkey, } ``` -The `clone` program includes a single instruction: +The `clone` program includes a single instruction handler: -- `initialize_vault` initializes a “vault” account that mimics the vault account - of the `owner_check` program. It stores the address of the real vault’s token - account, but allows the malicious user to put their own authority account. +- `initialize_vault`: Initializes a fake vault account that mimics the vault + account of the `owner_check` program, allowing the malicious user to set their + own authority. ```rust use anchor_lang::prelude::*; use anchor_spl::token::TokenAccount; -declare_id!("DUN7nniuatsMC7ReCh5eJRQExnutppN1tAfjfXFmGDq3"); +declare_id!("2Gn5MFGMvRjd548z6vhreh84UiL7L5TFzV5kKGmk4Fga"); + +pub const DISCRIMINATOR_SIZE: usize = 8; #[program] pub mod clone { @@ -434,7 +413,7 @@ pub struct InitializeVault<'info> { #[account( init, payer = authority, - space = 8 + 32 + 32, + space = DISCRIMINATOR_SIZE + Vault::INIT_SPACE, )] pub vault: Account<'info, Vault>, pub token_account: Account<'info, TokenAccount>, @@ -444,98 +423,92 @@ pub struct InitializeVault<'info> { } #[account] +#[derive(Default, InitSpace)] pub struct Vault { token_account: Pubkey, authority: Pubkey, } ``` -#### 2. Test `insecure_withdraw` instruction - -The test file includes a test to invoke the `initialize_vault` instruction on -the `owner_check` program using the provider wallet as the `authority` and then -mints 100 tokens to the token account. - -The test file also includes a test to invoke the `initialize_vault` instruction -on the `clone` program to initialize a fake `vault` account storing the same -`tokenPDA` account, but a different `authority`. Note that no new tokens are -minted here. +### 2. Test insecure_withdraw Instruction Handler -Let’s add a test to invoke the `insecure_withdraw` instruction. This test should -pass in the cloned vault and the fake authority. Since there is no owner check -to verify the `vaultClone` account is owned by the `owner_check` program, the -instruction’s data validation check will pass and show `walletFake` as a valid -authority. The tokens from the `tokenPDA` account will then be withdrawn to the -`withdrawDestinationFake` account. +The test file contains tests that initialize a vault in both programs. We'll add +a test to invoke the `insecure_withdraw` instruction handler, showing how the +lack of an owner check allows token withdrawal from the original program's +vault. ```typescript -describe("owner-check", () => { - ... - it("Insecure withdraw", async () => { - const tx = await program.methods +describe("Owner Check", () => { + ... + it("performs insecure withdraw", async () => { + try { + const transaction = await program.methods .insecureWithdraw() .accounts({ - vault: vaultClone.publicKey, - tokenAccount: tokenPDA, - withdrawDestination: withdrawDestinationFake, - authority: walletFake.publicKey, + vault: vaultCloneAccount.publicKey, + tokenAccount: tokenPDA, + withdrawDestination: unauthorizedWithdrawDestination, + authority: unauthorizedWallet.publicKey, }) - .transaction() - - await anchor.web3.sendAndConfirmTransaction(connection, tx, [walletFake]) + .transaction(); - const balance = await connection.getTokenAccountBalance(tokenPDA) - expect(balance.value.uiAmount).to.eq(0) - }) + await anchor.web3.sendAndConfirmTransaction(connection, transaction, [ + unauthorizedWallet, + ]); + const tokenAccountInfo = await getAccount(connection, tokenPDA); + expect(Number(tokenAccountInfo.amount)).to.equal(0); + } catch (error) { + console.error("Insecure withdraw failed:", error); + throw error; + } + }); }) ``` -Run `anchor test` to see that the `insecure_withdraw` completes successfully. +Run an `anchor test` to verify that the `insecure_withdraw` is complete +successfully. ```bash owner-check - ✔ Initialize Vault (808ms) - ✔ Initialize Fake Vault (404ms) - ✔ Insecure withdraw (409ms) + ✔ initializes vault (866ms) + ✔ initializes fake vault (443ms) + ✔ performs insecure withdraw (444ms) ``` -Note that `vaultClone` deserializes successfully even though Anchor -automatically initializes new accounts with a unique 8 byte discriminator and -checks the discriminator when deserializing an account. This is because the -discriminator is a hash of the account type name. + + +The `vaultCloneAccount` deserializes successfully due to both programs using the +same discriminator, derived from the identical `Vault` struct name. ```rust #[account] +#[derive(Default, InitSpace)] pub struct Vault { token_account: Pubkey, authority: Pubkey, } ``` -Since both programs initialize identical accounts and both structs are named -`Vault`, the accounts have the same discriminator even though they are owned by -different programs. +### 3. Add secure_withdraw Instruction Handler -#### 3. Add `secure_withdraw` instruction +We'll now close the security loophole by adding a `secure_withdraw` instruction +handler with an `Account<'info, Vault>` type to ensure an owner check is +performed. -Let’s close up this security loophole. - -In the `lib.rs` file of the `owner_check` program add a `secure_withdraw` -instruction and a `SecureWithdraw` accounts struct. - -In the `SecureWithdraw` struct, let’s use `Account<'info, Vault>` to ensure that -an owner check is performed on the `vault` account. We’ll also use the `has_one` -constraint to check that the `token_account` and `authority` passed into the -instruction match the values stored on the `vault` account. +In the `lib.rs` file of the `owner_check` program, add a `secure_withdraw` +instruction handler and a `SecureWithdraw` accounts struct. The `has_one` +constraint will be used to ensure that the `token_account` and `authority` +passed into the instruction handler match the values stored in the `vault` +account. ```rust #[program] pub mod owner_check { use super::*; - ... + ... - pub fn secure_withdraw(ctx: Context) -> Result<()> { + pub fn secure_withdraw(ctx: Context) -> Result<()> { let amount = ctx.accounts.token_account.amount; let seeds = &[ @@ -580,109 +553,116 @@ pub struct SecureWithdraw<'info> { } ``` -#### 4. Test `secure_withdraw` instruction +### 4. Test secure_withdraw Instruction Handler -To test the `secure_withdraw` instruction, we’ll invoke the instruction twice. -First, we’ll invoke the instruction using the `vaultClone` account, which we -expect to fail. Then, we’ll invoke the instruction using the correct `vault` -account to check that the instruction works as intended. +To test the `secure_withdraw` instruction handler, we'll invoke it twice. First, +we'll use the `vaultCloneAccount` account, expecting it to fail. Then, we'll +invoke the instruction handler with the correct `vaultAccount` account to verify +the instruction handler works as intended. ```typescript -describe("owner-check", () => { - ... - it("Secure withdraw, expect error", async () => { - try { - const tx = await program.methods - .secureWithdraw() - .accounts({ - vault: vaultClone.publicKey, - tokenAccount: tokenPDA, - withdrawDestination: withdrawDestinationFake, - authority: walletFake.publicKey, - }) - .transaction() - - await anchor.web3.sendAndConfirmTransaction(connection, tx, [walletFake]) - } catch (err) { - expect(err) - console.log(err) - } - }) - - it("Secure withdraw", async () => { - await spl.mintTo( - connection, - wallet.payer, - mint, - tokenPDA, - wallet.payer, - 100 - ) - - await program.methods +describe("Owner Check", () => { + ... + it("fails secure withdraw with incorrect authority", async () => { + try { + const transaction = await program.methods + .secureWithdraw() + .accounts({ + vault: vaultCloneAccount.publicKey, + tokenAccount: tokenPDA, + withdrawDestination: unauthorizedWithdrawDestination, + authority: unauthorizedWallet.publicKey, + }) + .transaction(); + + await anchor.web3.sendAndConfirmTransaction(connection, transaction, [ + unauthorizedWallet, + ]); + throw new Error("Expected transaction to fail, but it succeeded"); + } catch (error) { + expect(error).to.be.an("error"); + console.log("Error message:", error.message); + } + }); + + it("performs secure withdraw successfully", async () => { + try { + await mintTo( + connection, + walletAuthority.payer, + tokenMint, + tokenPDA, + walletAuthority.payer, + INITIAL_TOKEN_AMOUNT + ); + + await program.methods .secureWithdraw() .accounts({ - vault: vault.publicKey, - tokenAccount: tokenPDA, - withdrawDestination: withdrawDestination, - authority: wallet.publicKey, + vault: vaultAccount.publicKey, + tokenAccount: tokenPDA, + withdrawDestination: authorizedWithdrawDestination, + authority: walletAuthority.publicKey, }) - .rpc() + .rpc(); - const balance = await connection.getTokenAccountBalance(tokenPDA) - expect(balance.value.uiAmount).to.eq(0) - }) + const tokenAccountInfo = await getAccount(connection, tokenPDA); + expect(Number(tokenAccountInfo.amount)).to.equal(0); + } catch (error) { + console.error("Secure withdraw failed:", error); + throw error; + } + }); }) ``` -Run `anchor test` to see that the transaction using the `vaultClone` account -will now return an Anchor Error while the transaction using the `vault` account -completes successfully. +Running `anchor test` will show that the transaction using the +`vaultCloneAccount` account fails, while the transaction using the +`vaultAccount` account withdraws successfully. ```bash -'Program HQYNznB3XTqxzuEqqKMAD9XkYE5BGrnv8xmkoDNcqHYB invoke [1]', -'Program log: Instruction: SecureWithdraw', -'Program log: AnchorError caused by account: vault. Error Code: AccountOwnedByWrongProgram. Error Number: 3007. Error Message: The given account is owned by a different program than expected.', -'Program log: Left:', -'Program log: DUN7nniuatsMC7ReCh5eJRQExnutppN1tAfjfXFmGDq3', -'Program log: Right:', -'Program log: HQYNznB3XTqxzuEqqKMAD9XkYE5BGrnv8xmkoDNcqHYB', -'Program HQYNznB3XTqxzuEqqKMAD9XkYE5BGrnv8xmkoDNcqHYB consumed 5554 of 200000 compute units', -'Program HQYNznB3XTqxzuEqqKMAD9XkYE5BGrnv8xmkoDNcqHYB failed: custom program error: 0xbbf' +"Program 3uF3yaymq1YBmDDHpRPwifiaBf4eK8M2jLgaMcCTg9n9 invoke [1]", +"Program log: Instruction: SecureWithdraw", +"Program log: AnchorError caused by account: vault. Error Code: AccountOwnedByWrongProgram. Error Number: 3007. Error Message: The given account is owned by a different program than expected.", +"Program log: Left:", +"Program log: 2Gn5MFGMvRjd548z6vhreh84UiL7L5TFzV5kKGmk4Fga", +"Program log: Right:", +"Program log: 3uF3yaymq1YBmDDHpRPwifiaBf4eK8M2jLgaMcCTg9n9", +"Program 3uF3yaymq1YBmDDHpRPwifiaBf4eK8M2jLgaMcCTg9n9 consumed 4449 of 200000 compute units", +"Program 3uF3yaymq1YBmDDHpRPwifiaBf4eK8M2jLgaMcCTg9n9 failed: custom program error: 0xbbf" ``` -Here we see how using Anchor’s `Account<'info, T>` type can simplify the account -validation process to automate the ownership check. Additionally, note that -Anchor Errors can specify the account that causes the error (e.g. the third line -of the logs above say `AnchorError caused by account: vault`). This can be very -helpful when debugging. +Here we see how using Anchor's `Account<'info, T>` type simplifies the account +validation process by automating ownership checks. Additionally, Anchor errors +provide specific details, such as which account caused the error. For example, +the log indicates `AnchorError caused by account: vault`, which aids in +debugging. ```bash -✔ Secure withdraw, expect error (78ms) -✔ Secure withdraw (10063ms) +✔ fails secure withdraw with incorrect authority +✔ performs secure withdraw successfully (847ms) ``` -That’s all you need to ensure you check the owner on an account! Like some other -exploits, it’s fairly simple to avoid but very important. Be sure to always -think through which accounts should be owned by which programs and ensure that -you add appropriate validation. +Ensuring account ownership checks is critical to avoid security vulnerabilities. +This example demonstrates how simple it is to implement proper validation, but +it's vital to always verify which accounts are owned by specific programs. -If you want to take a look at the final solution code you can find it on the -`solution` branch of -[the repository](https://github.com/Unboxed-Software/solana-owner-checks/tree/solution). +If you'd like to review the final solution code, it's available on the +[`solution` branch of the repository](https://github.com/solana-developers/owner-checks/tree/solution). ## Challenge -Just as with other lessons in this unit, your opportunity to practice avoiding -this security exploit lies in auditing your own or other programs. +As with other lessons in this unit, practice preventing security exploits by +auditing your own or other programs. -Take some time to review at least one program and ensure that proper owner -checks are performed on the accounts passed into each instruction. +Take time to review at least one program to confirm that ownership checks are +properly enforced on all accounts passed into each instruction handler. -Remember, if you find a bug or exploit in somebody else's program, please alert -them! If you find one in your own program, be sure to patch it right away. +If you find a bug or exploit in another program, notify the developer. If you +find one in your own program, patch it immediately. + Push your code to GitHub and [tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=e3069010-3038-4984-b9d3-2dc6585147b1)! diff --git a/content/courses/program-security/pda-sharing.md b/content/courses/program-security/pda-sharing.md index 3e9c77201..7b70de4a7 100644 --- a/content/courses/program-security/pda-sharing.md +++ b/content/courses/program-security/pda-sharing.md @@ -3,7 +3,7 @@ title: PDA Sharing objectives: - Explain the security risks associated with PDA sharing - Derive PDAs that have discrete authority domains - - Use Anchor’s `seeds` and `bump` constraints to validate PDA accounts + - Use Anchor's `seeds` and `bump` constraints to validate PDA accounts description: "Understand the potential problems of reusing PDAs by using user and domain specific PDAs." @@ -15,7 +15,7 @@ description: possibility of users accessing data and funds that don't belong to them - Prevent the same PDA from being used for multiple accounts by using seeds that are user and/or domain-specific -- Use Anchor’s `seeds` and `bump` constraints to validate that a PDA is derived +- Use Anchor's `seeds` and `bump` constraints to validate that a PDA is derived using the expected seeds and bump ## Lesson @@ -26,25 +26,25 @@ a global PDA to represent the program. However, this opens up the possibility of account validation passing but a user being able to access funds, transfers, or data not belonging to them. -### Insecure global PDA +### Insecure Global PDA In the example below, the `authority` of the `vault` account is a PDA derived using the `mint` address stored on the `pool` account. This PDA is passed into -the instruction as the `authority` account to sign for the transfer tokens from -the `vault` to the `withdraw_destination`. +the instruction handler as the `authority` account to sign for the transfer of +tokens from the `vault` to the `withdraw_destination`. Using the `mint` address as a seed to derive the PDA to sign for the `vault` is insecure because multiple `pool` accounts could be created for the same `vault` -token account, but a different `withdraw_destination`. By using the `mint` as a -seed derive the PDA to sign for token transfers, any `pool` account could sign -for the transfer of tokens from a `vault` token account to an arbitrary -`withdraw_destination`. +token account, but with different `withdraw_destination` accounts. By using the +`mint` as a `seed` to derive the PDA for signing token transfers, any `pool` +account could sign for the transfer of tokens from a `vault` token account to an +arbitrary `withdraw_destination`. ```rust use anchor_lang::prelude::*; use anchor_spl::token::{self, Token, TokenAccount}; -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); +declare_id!("ABQaKhtpYQUUgZ9m2sAY7ZHxWv6KyNdhUJW8Dh8NQbkf"); #[program] pub mod pda_sharing_insecure { @@ -53,7 +53,7 @@ pub mod pda_sharing_insecure { pub fn withdraw_tokens(ctx: Context) -> Result<()> { let amount = ctx.accounts.vault.amount; let seeds = &[ctx.accounts.pool.mint.as_ref(), &[ctx.accounts.pool.bump]]; - token::transfer(ctx.accounts.transfer_ctx().with_signer(&[seeds]), amount) + token::transfer(get_transfer_ctx(&ctx.accounts).with_signer(&[seeds]), amount) } } @@ -63,28 +63,31 @@ pub struct WithdrawTokens<'info> { pool: Account<'info, TokenPool>, vault: Account<'info, TokenAccount>, withdraw_destination: Account<'info, TokenAccount>, - authority: AccountInfo<'info>, + /// CHECK: This is the PDA that signs for the transfer + authority: UncheckedAccount<'info>, token_program: Program<'info, Token>, } -impl<'info> WithdrawTokens<'info> { - pub fn transfer_ctx(&self) -> CpiContext<'_, '_, '_, 'info, token::Transfer<'info>> { - let program = self.token_program.to_account_info(); - let accounts = token::Transfer { - from: self.vault.to_account_info(), - to: self.withdraw_destination.to_account_info(), - authority: self.authority.to_account_info(), - }; - CpiContext::new(program, accounts) - } +pub fn get_transfer_ctx<'accounts, 'remaining, 'cpi_code, 'info>( + accounts: &'accounts WithdrawTokens<'info>, +) -> CpiContext<'accounts, 'remaining, 'cpi_code, 'info, token::Transfer<'info>> { + CpiContext::new( + accounts.token_program.to_account_info(), + token::Transfer { + from: accounts.vault.to_account_info(), + to: accounts.withdraw_destination.to_account_info(), + authority: accounts.authority.to_account_info(), + }, + ) } #[account] +#[derive(InitSpace)] pub struct TokenPool { - vault: Pubkey, - mint: Pubkey, - withdraw_destination: Pubkey, - bump: u8, + pub vault: Pubkey, + pub mint: Pubkey, + pub withdraw_destination: Pubkey, + pub bump: u8, } ``` @@ -93,7 +96,7 @@ pub struct TokenPool { One approach to create an account specific PDA is to use the `withdraw_destination` as a seed to derive the PDA used as the authority of the `vault` token account. This ensures the PDA signing for the CPI in the -`withdraw_tokens` instruction is derived using the intended +`withdraw_tokens` instruction handler is derived using the intended `withdraw_destination` token account. In other words, tokens from a `vault` token account can only be withdrawn to the `withdraw_destination` that was originally initialized with the `pool` account. @@ -114,7 +117,7 @@ pub mod pda_sharing_secure { ctx.accounts.pool.withdraw_destination.as_ref(), &[ctx.accounts.pool.bump], ]; - token::transfer(ctx.accounts.transfer_ctx().with_signer(&[seeds]), amount) + token::transfer(get_transfer_ctx(&ctx.accounts).with_signer(&[seeds]), amount) } } @@ -124,53 +127,57 @@ pub struct WithdrawTokens<'info> { pool: Account<'info, TokenPool>, vault: Account<'info, TokenAccount>, withdraw_destination: Account<'info, TokenAccount>, - authority: AccountInfo<'info>, + /// CHECK: This is the PDA that signs for the transfer + authority: UncheckedAccount<'info>, token_program: Program<'info, Token>, } -impl<'info> WithdrawTokens<'info> { - pub fn transfer_ctx(&self) -> CpiContext<'_, '_, '_, 'info, token::Transfer<'info>> { - let program = self.token_program.to_account_info(); - let accounts = token::Transfer { - from: self.vault.to_account_info(), - to: self.withdraw_destination.to_account_info(), - authority: self.authority.to_account_info(), - }; - CpiContext::new(program, accounts) - } +pub fn get_transfer_ctx<'accounts, 'remaining, 'cpi_code, 'info>( + accounts: &'accounts WithdrawTokens<'info>, +) -> CpiContext<'accounts, 'remaining, 'cpi_code, 'info, token::Transfer<'info>> { + CpiContext::new( + accounts.token_program.to_account_info(), + token::Transfer { + from: accounts.vault.to_account_info(), + to: accounts.withdraw_destination.to_account_info(), + authority: accounts.authority.to_account_info(), + }, + ) } #[account] +#[derive(InitSpace)] pub struct TokenPool { - vault: Pubkey, - mint: Pubkey, - withdraw_destination: Pubkey, - bump: u8, + pub vault: Pubkey, + pub mint: Pubkey, + pub withdraw_destination: Pubkey, + pub bump: u8, } ``` -### Anchor’s `seeds` and `bump` constraints +### Anchor's seeds and bump Constraints PDAs can be used as both the address of an account and allow programs to sign for the PDAs they own. The example below uses a PDA derived using the `withdraw_destination` as both -the address of the `pool` account and owner of the `vault` token account. This -means that only the `pool` account associated with correct `vault` and -`withdraw_destination` can be used in the `withdraw_tokens` instruction. +the address of the `pool` account and the owner of the `vault` token account. +This means that only the `pool` account associated with the correct `vault` and +`withdraw_destination` can be used in the `withdraw_tokens` instruction handler. -You can use Anchor’s `seeds` and `bump` constraints with the `#[account(...)]` +You can use Anchor's `seeds` and `bump` constraints with the +[`#[account(...)]`](https://www.anchor-lang.com/docs/account-constraints) attribute to validate the `pool` account PDA. Anchor derives a PDA using the -`seeds` and `bump` specified and compare against the account passed into the -instruction as the `pool` account. The `has_one` constraint is used to further -ensure that only the correct accounts stored on the `pool` account are passed -into the instruction. +`seeds` and `bump` specified and compares it against the account passed into the +instruction handler as the `pool` account. The `has_one` constraint is used to +further ensure that only the correct accounts stored on the `pool` account are +passed into the instruction handler. ```rust use anchor_lang::prelude::*; use anchor_spl::token::{self, Token, TokenAccount}; -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); +declare_id!("ABQaKhtpYQUUgZ9m2sAY7ZHxWv6KyNdhUJW8Dh8NQbkf"); #[program] pub mod pda_sharing_recommended { @@ -182,154 +189,161 @@ pub mod pda_sharing_recommended { ctx.accounts.pool.withdraw_destination.as_ref(), &[ctx.accounts.pool.bump], ]; - token::transfer(ctx.accounts.transfer_ctx().with_signer(&[seeds]), amount) + token::transfer(get_transfer_ctx(&ctx.accounts).with_signer(&[seeds]), amount) } } #[derive(Accounts)] pub struct WithdrawTokens<'info> { #[account( - has_one = vault, - has_one = withdraw_destination, - seeds = [withdraw_destination.key().as_ref()], - bump = pool.bump, - )] + seeds = [withdraw_destination.key().as_ref()], + bump = pool.bump, + has_one = vault, + has_one = withdraw_destination, + )] pool: Account<'info, TokenPool>, + #[account(mut)] vault: Account<'info, TokenAccount>, + #[account(mut)] withdraw_destination: Account<'info, TokenAccount>, token_program: Program<'info, Token>, } -impl<'info> WithdrawTokens<'info> { - pub fn transfer_ctx(&self) -> CpiContext<'_, '_, '_, 'info, token::Transfer<'info>> { - let program = self.token_program.to_account_info(); - let accounts = token::Transfer { - from: self.vault.to_account_info(), - to: self.withdraw_destination.to_account_info(), - authority: self.pool.to_account_info(), - }; - CpiContext::new(program, accounts) - } +pub fn get_transfer_ctx<'accounts, 'remaining, 'cpi_code, 'info>( + accounts: &'accounts WithdrawTokens<'info>, +) -> CpiContext<'accounts, 'remaining, 'cpi_code, 'info, token::Transfer<'info>> { + CpiContext::new( + accounts.token_program.to_account_info(), + token::Transfer { + from: accounts.vault.to_account_info(), + to: accounts.withdraw_destination.to_account_info(), + authority: accounts.pool.to_account_info(), + }, + ) } #[account] +#[derive(InitSpace)] pub struct TokenPool { - vault: Pubkey, - mint: Pubkey, - withdraw_destination: Pubkey, - bump: u8, + pub vault: Pubkey, + pub mint: Pubkey, + pub withdraw_destination: Pubkey, + pub bump: u8, } ``` ## Lab -Let’s practice by creating a simple program to demonstrate how a PDA sharing can -allow an attacker to withdraw tokens that don’t belong to them. this lab expands -on the examples above by including the instructions to initialize the required -program accounts. +Let's practice by creating a simple program to demonstrate how PDA sharing can +allow an attacker to withdraw tokens that don't belong to them. This lab expands +on the examples above by including the instruction handlers to initialize the +required program accounts. -#### 1. Starter +### 1. Starter -To get started, download the starter code on the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-pda-sharing/tree/starter). -The starter code includes a program with two instructions and the boilerplate -setup for the test file. +To get started, download the starter code on the +[`starter` branch of this repository](https://github.com/solana-developers/pda-sharing/tree/starter). +The starter code includes a program with two instruction handlers and the +boilerplate setup for the test file. -The `initialize_pool` instruction initializes a new `TokenPool` that stores a -`vault`, `mint`, `withdraw_destination`, and `bump`. The `vault` is a token -account where the authority is set as a PDA derived using the `mint` address. +The `initialize_pool` instruction handler initializes a new `TokenPool` that +stores a `vault`, `mint`, `withdraw_destination`, and `bump`. The `vault` is a +token account where the authority is set as a PDA derived using the `mint` +address. -The `withdraw_insecure` instruction will transfer tokens in the `vault` token -account to a `withdraw_destination` token account. +The `withdraw_insecure` instruction handler will transfer tokens in the `vault` +token account to a `withdraw_destination` token account. However, as written the seeds used for signing are not specific to the vault's -withdraw destination, thus opening up the program to security exploits. Take a +withdrawal destination, thus opening up the program to security exploits. Take a minute to familiarize yourself with the code before continuing on. -#### 2. Test `withdraw_insecure` instruction +### 2. Test withdraw_insecure Instruction Handler -The test file includes the code to invoke the `initialize_pool` instruction and -then mint 100 tokens to the `vault` token account. It also includes a test to -invoke the `withdraw_insecure` using the intended `withdraw_destination`. This -shows that the instructions can be used as intended. +The test file includes the code to invoke the `initialize_pool` instruction +handler and then mint 100 tokens to the `vault` token account. It also includes +a test to invoke the `withdraw_insecure` using the intended +`withdraw_destination`. This shows that the instruction handlers can be used as +intended. -After that, there are two more tests to show how the instructions are vulnerable -to exploit. +After that, there are two more tests to show how the instruction handlers are +vulnerable to exploit. -The first test invokes the `initialize_pool` instruction to create a "fake" -`pool` account using the same `vault` token account, but a different +The first test invokes the `initialize_pool` instruction handler to create a +"fake" `pool` account using the same `vault` token account, but a different `withdraw_destination`. The second test withdraws from this pool, stealing funds from the vault. ```typescript -it("Insecure initialize allows pool to be initialized with wrong vault", async () => { - await program.methods - .initializePool(authInsecureBump) - .accounts({ - pool: poolInsecureFake.publicKey, - mint: mint, - vault: vaultInsecure.address, - withdrawDestination: withdrawDestinationFake, - payer: walletFake.publicKey, - }) - .signers([walletFake, poolInsecureFake]) - .rpc(); - - await new Promise(x => setTimeout(x, 1000)); - - await spl.mintTo( - connection, - wallet.payer, - mint, - vaultInsecure.address, - wallet.payer, - 100, - ); - - const account = await spl.getAccount(connection, vaultInsecure.address); - expect(Number(account.amount)).to.equal(100); +it("allows insecure initialization with incorrect vault", async () => { + try { + await program.methods + .initializePool(insecureAuthorityBump) + .accounts({ + pool: insecurePoolFake.publicKey, + mint: tokenMint, + vault: insecureVault.address, + withdrawDestination: fakeWithdrawDestination, + }) + .signers([insecurePoolFake]) + .rpc(); + + await mintTo( + connection, + wallet.payer, + tokenMint, + insecureVault.address, + wallet.payer, + INITIAL_MINT_AMOUNT, + ); + + const vaultAccount = await getAccount(connection, insecureVault.address); + expect(Number(vaultAccount.amount)).to.equal(INITIAL_MINT_AMOUNT); + } catch (error) { + throw new Error(`Test failed: ${error.message}`); + } }); -it("Insecure withdraw allows stealing from vault", async () => { - await program.methods - .withdrawInsecure() - .accounts({ - pool: poolInsecureFake.publicKey, - vault: vaultInsecure.address, - withdrawDestination: withdrawDestinationFake, - authority: authInsecure, - signer: walletFake.publicKey, - }) - .signers([walletFake]) - .rpc(); - - const account = await spl.getAccount(connection, vaultInsecure.address); - expect(Number(account.amount)).to.equal(0); +it("allows insecure withdrawal to incorrect destination", async () => { + try { + await program.methods + .withdrawInsecure() + .accounts({ + pool: insecurePoolFake.publicKey, + authority: insecureAuthority, + }) + .rpc(); + + const vaultAccount = await getAccount(connection, insecureVault.address); + expect(Number(vaultAccount.amount)).to.equal(0); + } catch (error) { + throw new Error(`Test failed: ${error.message}`); + } }); ``` Run `anchor test` to see that the transactions complete successfully and the -`withdraw_instrucure` instruction allows the `vault` token account to be drained -to a fake withdraw destination stored on the fake `pool` account. +`withdraw_instrucure` instruction handler allows the `vault` token account to be +drained to a fake withdraw destination stored on the fake `pool` account. -#### 3. Add `initialize_pool_secure` instruction +### 3. Add initialize_pool_secure Instruction Handler -Now let's add a new instruction to the program for securely initializing a pool. +Now let's add a new instruction handler to the program for securely initializing +a pool. -This new `initialize_pool_secure` instruction will initialize a `pool` account -as a PDA derived using the `withdraw_destination`. It will also initialize a -`vault` token account with the authority set as the `pool` PDA. +This new `initialize_pool_secure` instruction handler will initialize a `pool` +account as a PDA derived using the `withdraw_destination`. It will also +initialize a `vault` token account with the authority set as the `pool` PDA. ```rust pub fn initialize_pool_secure(ctx: Context) -> Result<()> { ctx.accounts.pool.vault = ctx.accounts.vault.key(); ctx.accounts.pool.mint = ctx.accounts.mint.key(); ctx.accounts.pool.withdraw_destination = ctx.accounts.withdraw_destination.key(); - ctx.accounts.pool.bump = *ctx.bumps.get("pool").unwrap(); + ctx.accounts.pool.bump = ctx.bumps.pool; Ok(()) } - ... #[derive(Accounts)] @@ -337,7 +351,7 @@ pub struct InitializePoolSecure<'info> { #[account( init, payer = payer, - space = 8 + 32 + 32 + 32 + 1, + space = DISCRIMINATOR_SIZE + TokenPool::INIT_SPACE, seeds = [withdraw_destination.key().as_ref()], bump )] @@ -359,22 +373,25 @@ pub struct InitializePoolSecure<'info> { } ``` -#### 4. Add `withdraw_secure` instruction +### 4. Add withdraw_secure Instruction Handler -Next, add a `withdraw_secure` instruction. This instruction will withdraw tokens -from the `vault` token account to the `withdraw_destination`. The `pool` account -is validated using the `seeds` and `bump` constraints to ensure the correct PDA -account is provided. The `has_one` constraints check that the correct `vault` -and `withdraw_destination` token accounts are provided. +Next, add a `withdraw_secure` instruction handler. This instruction handler will +withdraw tokens from the `vault` token account to the `withdraw_destination`. +The `pool` account is validated using the `seeds` and `bump` constraints to +ensure the correct PDA account is provided. The `has_one` constraints check that +the correct `vault` and `withdraw_destination` token accounts are provided. ```rust pub fn withdraw_secure(ctx: Context) -> Result<()> { let amount = ctx.accounts.vault.amount; let seeds = &[ - ctx.accounts.pool.withdraw_destination.as_ref(), - &[ctx.accounts.pool.bump], + ctx.accounts.pool.withdraw_destination.as_ref(), + &[ctx.accounts.pool.bump], ]; - token::transfer(ctx.accounts.transfer_ctx().with_signer(&[seeds]), amount) + token::transfer( + get_secure_transfer_ctx(&ctx.accounts).with_signer(&[seeds]), + amount, + ) } ... @@ -387,83 +404,87 @@ pub struct WithdrawTokensSecure<'info> { seeds = [withdraw_destination.key().as_ref()], bump = pool.bump, )] - pool: Account<'info, TokenPool>, + pub pool: Account<'info, TokenPool>, #[account(mut)] - vault: Account<'info, TokenAccount>, + pub vault: Account<'info, TokenAccount>, #[account(mut)] - withdraw_destination: Account<'info, TokenAccount>, - token_program: Program<'info, Token>, + pub withdraw_destination: Account<'info, TokenAccount>, + pub token_program: Program<'info, Token>, } -impl<'info> WithdrawTokensSecure<'info> { - pub fn transfer_ctx(&self) -> CpiContext<'_, '_, '_, 'info, token::Transfer<'info>> { - let program = self.token_program.to_account_info(); - let accounts = token::Transfer { - from: self.vault.to_account_info(), - to: self.withdraw_destination.to_account_info(), - authority: self.pool.to_account_info(), - }; - CpiContext::new(program, accounts) - } +pub fn get_secure_transfer_ctx<'accounts, 'remaining, 'cpi_code, 'info>( + accounts: &'accounts WithdrawTokensSecure<'info>, +) -> CpiContext<'accounts, 'remaining, 'cpi_code, 'info, token::Transfer<'info>> { + CpiContext::new( + accounts.token_program.to_account_info(), + token::Transfer { + from: accounts.vault.to_account_info(), + to: accounts.withdraw_destination.to_account_info(), + authority: accounts.pool.to_account_info(), + }, + ) } ``` -#### 5. Test `withdraw_secure` instruction +### 5. Test withdraw_secure Instruction Handler -Finally, return to the test file to test the `withdraw_secure` instruction and -show that by narrowing the scope of our PDA signing authority, we've removed the -vulnerability. +Finally, return to the test file to test the `withdraw_secure` instruction +handler and show that by narrowing the scope of our PDA signing authority, we've +removed the vulnerability. Before we write a test showing the vulnerability has been patched let's write a -test that simply shows that the initialization and withdraw instructions work as -expected: +test that simply shows that the initialization and withdraw instruction handlers +work as expected: ```typescript -it("Secure pool initialization and withdraw works", async () => { - const withdrawDestinationAccount = await getAccount( - provider.connection, - withdrawDestination, - ); - - await program.methods - .initializePoolSecure() - .accounts({ - pool: authSecure, - mint: mint, - vault: vaultRecommended.publicKey, - withdrawDestination: withdrawDestination, - }) - .signers([vaultRecommended]) - .rpc(); - - await new Promise(x => setTimeout(x, 1000)); - - await spl.mintTo( - connection, - wallet.payer, - mint, - vaultRecommended.publicKey, - wallet.payer, - 100, - ); - - await program.methods - .withdrawSecure() - .accounts({ - pool: authSecure, - vault: vaultRecommended.publicKey, - withdrawDestination: withdrawDestination, - }) - .rpc(); - - const afterAccount = await getAccount( - provider.connection, - withdrawDestination, - ); - - expect( - Number(afterAccount.amount) - Number(withdrawDestinationAccount.amount), - ).to.equal(100); +it("performs secure pool initialization and withdrawal correctly", async () => { + try { + const initialWithdrawBalance = await getAccount( + connection, + withdrawDestination, + ); + + await program.methods + .initializePoolSecure() + .accounts({ + mint: tokenMint, + vault: recommendedVault.publicKey, + withdrawDestination: withdrawDestination, + }) + .signers([recommendedVault]) + .rpc(); + + await new Promise(resolve => setTimeout(resolve, 1000)); + + await mintTo( + connection, + wallet.payer, + tokenMint, + recommendedVault.publicKey, + wallet.payer, + INITIAL_MINT_AMOUNT, + ); + + await program.methods + .withdrawSecure() + .accounts({ + vault: recommendedVault.publicKey, + withdrawDestination: withdrawDestination, + }) + .rpc(); + + const finalWithdrawBalance = await getAccount( + connection, + withdrawDestination, + ); + + expect( + Number(finalWithdrawBalance.amount) - + Number(initialWithdrawBalance.amount), + ).to.equal(INITIAL_MINT_AMOUNT); + } catch (error) { + throw new Error(`Test failed: ${error.message}`); + } }); ``` @@ -476,68 +497,64 @@ Add a test that shows you can't call `withdraw_secure` with the wrong withdrawal destination. It can use the pool and vault created in the previous test. ```typescript -it("Secure withdraw doesn't allow withdraw to wrong destination", async () => { +it("prevents secure withdrawal to incorrect destination", async () => { try { await program.methods .withdrawSecure() .accounts({ - pool: authSecure, - vault: vaultRecommended.publicKey, - withdrawDestination: withdrawDestinationFake, + vault: recommendedVault.publicKey, + withdrawDestination: fakeWithdrawDestination, }) - .signers([walletFake]) + .signers([recommendedVault]) .rpc(); - assert.fail("expected error"); + throw new Error("Expected an error but withdrawal succeeded"); } catch (error) { - console.log(error.message); - expect(error); + expect(error).to.exist; + console.log("Error message:", error.message); } }); ``` Lastly, since the `pool` account is a PDA derived using the -`withdraw_destination` token account, we can’t create a fake `pool` account +`withdraw_destination` token account, we can't create a fake `pool` account using the same PDA. Add one more test showing that the new -`initialize_pool_secure` instruction won't let an attacker put in the wrong -vault. +`initialize_pool_secure` instruction handler won't let an attacker put in the +wrong vault. ```typescript -it("Secure pool initialization doesn't allow wrong vault", async () => { +it("prevents secure pool initialization with incorrect vault", async () => { try { await program.methods .initializePoolSecure() .accounts({ - pool: authSecure, - mint: mint, - vault: vaultInsecure.address, + mint: tokenMint, + vault: insecureVault.address, withdrawDestination: withdrawDestination, }) - .signers([vaultRecommended]) + .signers([recommendedVault]) .rpc(); - assert.fail("expected error"); + throw new Error("Expected an error but initialization succeeded"); } catch (error) { - console.log(error.message); - expect(error); + expect(error).to.exist; + console.log("Error message:", error.message); } }); ``` -Run `anchor test` and to see that the new instructions don't allow an attacker -to withdraw from a vault that isn't theirs. - -``` - pda-sharing - ✔ Initialize Pool Insecure (981ms) - ✔ Withdraw (470ms) - ✔ Insecure initialize allows pool to be initialized with wrong vault (10983ms) - ✔ Insecure withdraw allows stealing from vault (492ms) - ✔ Secure pool initialization and withdraw works (2502ms) -unknown signer: ARjxAsEPj6YsAPKaBfd1AzUHbNPtAeUsqusAmBchQTfV - ✔ Secure withdraw doesn't allow withdraw to wrong destination -unknown signer: GJcHJLot3whbY1aC9PtCsBYk5jWoZnZRJPy5uUwzktAY - ✔ Secure pool initialization doesn't allow wrong vault +Run `anchor test` to see that the new instruction handlers don't allow an +attacker to withdraw from a vault that isn't theirs. + +```bash + PDA sharing + ✔ allows insecure initialization with incorrect vault (852ms) + ✔ allows insecure withdrawal to incorrect destination (425ms) + ✔ performs secure pool initialization and withdrawal correctly (2150ms) +Error message: unknown signer: BpaG3NbsvLUqyFLZo9kWPwda3iPM8abJYkBfwBsASsgi + ✔ prevents secure withdrawal to incorrect destination +Error message: unknown signer: BpaG3NbsvLUqyFLZo9kWPwda3iPM8abJYkBfwBsASsgi + ✔ prevents secure pool initialization with incorrect vault ``` And that's it! Unlike some of the other security vulnerabilities we've @@ -546,8 +563,7 @@ particular Anchor type. You'll need to think through the architecture of your program and ensure that you aren't sharing PDAs across different domains. If you want to take a look at the final solution code you can find it on the -`solution` branch of -[the same repository](https://github.com/Unboxed-Software/solana-pda-sharing/tree/solution). +[`solution` branch of the same repository](https://github.com/solana-developers/pda-sharing/tree/solution). ## Challenge @@ -562,6 +578,7 @@ Remember, if you find a bug or exploit in somebody else's program, please alert them! If you find one in your own program, be sure to patch it right away. + Push your code to GitHub and [tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=5744079f-9473-4485-9a14-9be4d31b40d1)! diff --git a/content/courses/program-security/reinitialization-attacks.md b/content/courses/program-security/reinitialization-attacks.md index ae148a74a..715e6cfe8 100644 --- a/content/courses/program-security/reinitialization-attacks.md +++ b/content/courses/program-security/reinitialization-attacks.md @@ -2,10 +2,10 @@ title: Reinitialization Attacks objectives: - Explain security risks associated with a reinitialization vulnerability - - Use long-form Rust check if an account has already been initialized - - Using Anchor’s `init` constraint to initialize accounts, which automatically + - Using Anchor's `init` constraint to initialize accounts, which automatically sets an account discriminator that is checked to prevent the reinitialization of an account + - Use native Rust to check if an account has already been initialized description: "Understand the security risks of account reinitialized attacks being used to override data, and how to prevent them." @@ -13,83 +13,73 @@ description: ## Summary -- Use an account discriminator or initialization flag to check whether an - account has already been initialized to prevent an account from being - reinitialized and overriding existing account data. -- To prevent account reinitialization in plain Rust, initialize accounts with an - `is_initialized` flag and check if it has already been set to true when - initializing an account +- **Prevent Account Reinitialization:** Use an account discriminator or + initialization flag to prevent an account from being reinitialized and + overwriting existing data. +- **Anchor Approach:** Simplify this by using Anchor's `init` constraint to + create an account via a CPI to the system program, automatically setting its + discriminator. +- **Native Rust Approach:** In native Rust, set an is_initialized flag during + account initialization and check it before reinitializing: + ```rust if account.is_initialized { return Err(ProgramError::AccountAlreadyInitialized.into()); } ``` -- To simplify this, use Anchor’s `init` constraint to create an account via a - CPI to the system program and sets its discriminator ## Lesson -Initialization refers to setting the data of a new account for the first time. -When initializing a new account, you should implement a way to check if the -account has already been initialized. Without an appropriate check, an existing -account could be reinitialized and have existing data overwritten. - -Note that initializing an account and creating an account are two separate -instructions. Creating an account requires invoking the `create_account` -instruction on the System Program which specifies the space required for the -account, the rent in lamports allocated to the account, and the program owner of -the account. Initialization is an instruction that sets the data of a newly -created account. Creating and initializing an account can be combined into a -single transaction. +Initialization sets the data of a new account for the first time. It's essential +to check if an account has already been initialized to prevent overwriting +existing data. Note that creating and initializing an account are separate +actions. Creating an account involves invoking the `create_account` instruction +handler on the System Program, which allocates space, rent in lamports, and +assigns the program owner. Initialization sets the account data. These steps can +be combined into a single transaction. -#### Missing Initialization Check +### Missing Initialization Check -In the example below, there are no checks on the `user` account. The -`initialize` instruction deserializes the data of the `user` account as a `User` -account type, sets the `authority` field, and serializes the updated account -data to the `user` account. - -Without checks on the `user` account, the same account could be passed into the -`initialize` instruction a second time by another party to overwrite the -existing `authority` stored on the account data. +In the example below, there's no check on the `user` account. The `initialize` +instruction handler sets the `authority` field on the `User` account type and +serializes the data. Without checks, an attacker could reinitialize the account, +overwriting the existing `authority`. ```rust use anchor_lang::prelude::*; -use borsh::{BorshDeserialize, BorshSerialize}; declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); #[program] -pub mod initialization_insecure { +pub mod initialization_insecure { use super::*; pub fn initialize(ctx: Context) -> Result<()> { - let mut user = User::try_from_slice(&ctx.accounts.user.data.borrow()).unwrap(); - user.authority = ctx.accounts.authority.key(); - user.serialize(&mut *ctx.accounts.user.data.borrow_mut())?; + ctx.accounts.user.authority = ctx.accounts.authority.key(); Ok(()) } } #[derive(Accounts)] pub struct Initialize<'info> { - #[account(mut)] - user: AccountInfo<'info>, #[account(mut)] - authority: Signer<'info>, + pub user: Account<'info, User>, + #[account(mut)] + pub authority: Signer<'info>, + pub system_program: Program<'info, System>, } -#[derive(BorshSerialize, BorshDeserialize)] +#[account] +#[derive(InitSpace)] pub struct User { - authority: Pubkey, + pub authority: Pubkey, } ``` -#### Add `is_initialized` check +### Add is_initialized Check -One approach to fix this is to add an additional `is_initialized` field to the -`User` account type and use it as a flag to check if an account has already been -initialized. +To fix this, add an `is_initialized` field to the User account type and check it +before reinitializing: ```rust if user.is_initialized { @@ -97,15 +87,13 @@ if user.is_initialized { } ``` -By including a check within the `initialize` instruction, the `user` account -would only be initialized if the `is_initialized` field has not yet been set to -true. If the `is_initialized` field was already set, the transaction would fail, -thereby avoiding the scenario where an attacker could replace the account -authority with their own public key. +This ensures the `user` account is only initialized once. If `is_initialized` is +true, the transaction fails, preventing an attacker from changing the account +authority. ```rust use anchor_lang::prelude::*; -use borsh::{BorshDeserialize, BorshSerialize}; +use anchor_lang::solana_program::program_error::ProgramError; declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); @@ -114,140 +102,137 @@ pub mod initialization_secure { use super::*; pub fn initialize(ctx: Context) -> Result<()> { - let mut user = User::try_from_slice(&ctx.accounts.user.data.borrow()).unwrap(); + let user = &mut ctx.accounts.user; + if user.is_initialized { return Err(ProgramError::AccountAlreadyInitialized.into()); } - user.authority = ctx.accounts.authority.key(); user.is_initialized = true; + user.authority = ctx.accounts.authority.key(); - user.serialize(&mut *ctx.accounts.user.data.borrow_mut())?; Ok(()) } } #[derive(Accounts)] pub struct Initialize<'info> { - #[account(mut)] - user: AccountInfo<'info>, #[account(mut)] - authority: Signer<'info>, + pub user: Account<'info, User>, + #[account(mut)] + pub authority: Signer<'info>, + pub system_program: Program<'info, System>, } -#[derive(BorshSerialize, BorshDeserialize)] +#[account] +#[derive(InitSpace)] pub struct User { - is_initialized: bool, - authority: Pubkey, + pub is_initialized: bool, + pub authority: Pubkey, } ``` -#### Use Anchor’s `init` constraint - -Anchor provides an `init` constraint that can be used with the `#[account(...)]` -attribute to initialize an account. The `init` constraint creates the account -via a CPI to the system program and sets the account discriminator. +### Use Anchor's init Constraint -The `init` constraint must be used in combination with the `payer` and `space` -constraints. The `payer` specifies the account paying for the initialization of -the new account. The `space` specifies the amount of space the new account -requires, which determines the amount of lamports that must be allocated to the -account. The first 8 bytes of data is set as a discriminator that Anchor -automatically adds to identify the account type. - -Most importantly for this lesson, the `init` constraint ensures that this -instruction can only be called once per account, so you can set the initial -state of the account in the instruction logic and not have to worry about an -attacker trying to reinitialize the account. +[Anchor's `init` constraint](https://www.anchor-lang.com/docs/account-constraints), +used with the `#[account(...)]` attribute, initializes an account, sets the +account discriminator, and ensures that the instruction handler can only be +called once per account. The `init` constraint must be used with `payer` and +`space` constraints to specify the account paying for initialization and the +amount of space required. ```rust use anchor_lang::prelude::*; declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); +const DISCRIMINATOR_SIZE: usize = 8; + #[program] pub mod initialization_recommended { use super::*; - pub fn initialize(_ctx: Context) -> Result<()> { + pub fn initialize(ctx: Context) -> Result<()> { msg!("GM"); + ctx.accounts.user.authority = ctx.accounts.authority.key(); Ok(()) } } #[derive(Accounts)] pub struct Initialize<'info> { - #[account(init, payer = authority, space = 8+32)] - user: Account<'info, User>, + #[account( + init, + payer = authority, + space = DISCRIMINATOR_SIZE + User::INIT_SPACE + )] + pub user: Account<'info, User>, #[account(mut)] - authority: Signer<'info>, - system_program: Program<'info, System>, + pub authority: Signer<'info>, + pub system_program: Program<'info, System>, } #[account] +#[derive(InitSpace)] pub struct User { - authority: Pubkey, + pub authority: Pubkey, } ``` -#### Anchor’s `init_if_needed` constraint +#### Anchor's init_if_needed Constraint -It’s worth noting that Anchor has an `init_if_needed` constraint. This -constraint should be used very cautiously. In fact, it is blocked behind a -feature flag so that you are forced to be intentional about using it. + -The `init_if_needed` constraint does the same thing as the `init` constraint, -only if the account has already been initialized the instruction will still run. - -Given this, it’s \***\*\*\*\***extremely\***\*\*\*\*** important that when you -use this constraint you include checks to avoid resetting the account to its -initial state. - -For example, if the account stores an `authority` field that gets set in the -instruction using the `init_if_needed` constraint, you need checks that ensure -that no attacker could call the instruction after it has already been -initialized and have the `authority` field set again. +[Anchor's `init_if_needed` constraint](https://www.anchor-lang.com/docs/account-constraints), +guarded by a feature flag, should be used with caution.It initializes an account +only if it hasn't been initialized yet. If the account is already initialized, +the instruction handler will still execute, so +it's \***\*\*\*\***extremely\***\*\*\*\*** important to include checks in your +instruction handler to prevent resetting the account to its initial state. + -In most cases, it’s safer to have a separate instruction for initializing -account data. +For example, if the `authority` field is set in the instruction handler, ensure +that your instruction handler includes checks to prevent an attacker from +reinitializing it after it's already been set. Typically, it's safer to have a +separate instruction handler for initializing account data. ## Lab -For this lab we’ll create a simple program that does nothing but initialize -accounts. We’ll include two instructions: +In this lab, we'll create a simple Solana program with two instruction handlers: -- `insecure_initialization` - initializes an account that can be reinitialized -- `recommended_initialization` - initialize an account using Anchor’s `init` - constraint +- `insecure_initialization` - Initializes an account without checks, allowing + reinitialization. +- `recommended_initialization` - Initializes an account using Anchor's `init` + constraint, preventing reinitialization. -#### 1. Starter +### 1. Starter -To get started, download the starter code from the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-reinitialization-attacks/tree/starter). -The starter code includes a program with one instruction and the boilerplate -setup for the test file. +To get started, download the starter code from the +[`starter` branch of this repository](https://github.com/solana-developers/reinitialization-attacks/tree/starter). +The starter code includes a program with one instruction handler and the +boilerplate setup for the test file. -The `insecure_initialization` instruction initializes a new `user` account that -stores the public key of an `authority`. In this instruction, the account is -expected to be allocated client-side, then passed into the program instruction. -Once passed into the program, there are no checks to see if the `user` account's -initial state has already been set. This means the same account can be passed in -a second time to override the `authority` stored on an existing `user` account. +The `insecure_initialization` instruction handler initializes a new `user` +account that stores the public key of an `authority`. The account is expected to +be allocated client-side and then passed into the program instruction. However, +there are no checks to verify if the `user` account's initial state has already +been set. This means the same account can be passed in a second time, allowing +the `authority` to be overwritten. ```rust use anchor_lang::prelude::*; -use borsh::{BorshDeserialize, BorshSerialize}; -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); +declare_id!("HLhxJzFYjtXCET4HxnSzv27SpXg16FWNDi2LvrNmSvzH"); #[program] pub mod initialization { use super::*; pub fn insecure_initialization(ctx: Context) -> Result<()> { - let mut user = User::try_from_slice(&ctx.accounts.user.data.borrow()).unwrap(); - user.authority = ctx.accounts.authority.key(); - user.serialize(&mut *ctx.accounts.user.data.borrow_mut())?; + let user = &mut ctx.accounts.user; + let mut user_data = User::try_from_slice(&user.data.borrow())?; + user_data.authority = ctx.accounts.authority.key(); + user_data.serialize(&mut *user.data.borrow_mut())?; Ok(()) } } @@ -255,119 +240,153 @@ pub mod initialization { #[derive(Accounts)] pub struct Unchecked<'info> { #[account(mut)] - /// CHECK: - user: UncheckedAccount<'info>, - authority: Signer<'info>, + /// CHECK: This account will be initialized in the instruction + pub user: UncheckedAccount<'info>, + pub authority: Signer<'info>, } -#[derive(BorshSerialize, BorshDeserialize)] +#[account] +#[derive(InitSpace)] pub struct User { - authority: Pubkey, + pub authority: Pubkey, } ``` -#### 2. Test `insecure_initialization` instruction +### 2. Test insecure_initialization Instruction Handler The test file includes the setup to create an account by invoking the system -program and then invokes the `insecure_initialization` instruction twice using -the same account. +program and then invokes the `insecure_initialization` instruction handler twice +using the same account. -Since there are no checks the verify that the account data has not already been -initialized, the `insecure_initialization` instruction will complete -successfully both times, despite the second invocation providing a _different_ -authority account. +Since there are no checks in the `insecure_initialization` instruction handler +to verify that the account data has not already been initialized, this +instruction handler will execute successfully both times, even with a +_different_ authority account. ```typescript import * as anchor from "@coral-xyz/anchor"; import { Program } from "@coral-xyz/anchor"; -import { expect } from "chai"; import { Initialization } from "../target/types/initialization"; +import { + Keypair, + LAMPORTS_PER_SOL, + SystemProgram, + Transaction, + SendTransactionError, +} from "@solana/web3.js"; +import { expect } from "chai"; +import { airdropIfRequired } from "@solana-developers/helpers"; -describe("initialization", () => { +describe("Initialization", () => { const provider = anchor.AnchorProvider.env(); anchor.setProvider(provider); const program = anchor.workspace.Initialization as Program; - const wallet = anchor.workspace.Initialization.provider.wallet; - const walletTwo = anchor.web3.Keypair.generate(); + const walletAuthority = provider.wallet as anchor.Wallet; + const secondWallet = Keypair.generate(); + + const insecureUserAccount = Keypair.generate(); + const recommendedUserAccount = Keypair.generate(); - const userInsecure = anchor.web3.Keypair.generate(); - const userRecommended = anchor.web3.Keypair.generate(); + const ACCOUNT_SPACE = 32; + const AIRDROP_AMOUNT = 1 * LAMPORTS_PER_SOL; + const MINIMUM_BALANCE_FOR_RENT_EXEMPTION = 1 * LAMPORTS_PER_SOL; before(async () => { - const tx = new anchor.web3.Transaction().add( - anchor.web3.SystemProgram.createAccount({ - fromPubkey: wallet.publicKey, - newAccountPubkey: userInsecure.publicKey, - space: 32, - lamports: - await provider.connection.getMinimumBalanceForRentExemption(32), + try { + const rentExemptionAmount = + await provider.connection.getMinimumBalanceForRentExemption( + ACCOUNT_SPACE, + ); + + const createAccountInstruction = SystemProgram.createAccount({ + fromPubkey: walletAuthority.publicKey, + newAccountPubkey: insecureUserAccount.publicKey, + space: ACCOUNT_SPACE, + lamports: rentExemptionAmount, programId: program.programId, - }), - ); - - await anchor.web3.sendAndConfirmTransaction(provider.connection, tx, [ - wallet.payer, - userInsecure, - ]); - - await provider.connection.confirmTransaction( - await provider.connection.requestAirdrop( - walletTwo.publicKey, - 1 * anchor.web3.LAMPORTS_PER_SOL, - ), - "confirmed", - ); + }); + + const transaction = new Transaction().add(createAccountInstruction); + + await anchor.web3.sendAndConfirmTransaction( + provider.connection, + transaction, + [walletAuthority.payer, insecureUserAccount], + ); + + await airdropIfRequired( + provider.connection, + secondWallet.publicKey, + AIRDROP_AMOUNT, + MINIMUM_BALANCE_FOR_RENT_EXEMPTION, + ); + } catch (error) { + console.error("Setup failed:", error); + throw error; + } }); - it("Insecure init", async () => { - await program.methods - .insecureInitialization() - .accounts({ - user: userInsecure.publicKey, - }) - .rpc(); + it("performs insecure initialization", async () => { + try { + await program.methods + .insecureInitialization() + .accounts({ + user: insecureUserAccount.publicKey, + authority: walletAuthority.publicKey, + }) + .signers([walletAuthority.payer]) + .rpc(); + } catch (error) { + console.error("Insecure initialization failed:", error); + throw error; + } }); - it("Re-invoke insecure init with different auth", async () => { - const tx = await program.methods - .insecureInitialization() - .accounts({ - user: userInsecure.publicKey, - authority: walletTwo.publicKey, - }) - .transaction(); - await anchor.web3.sendAndConfirmTransaction(provider.connection, tx, [ - walletTwo, - ]); + it("re-invokes insecure initialization with different authority", async () => { + try { + const transaction = await program.methods + .insecureInitialization() + .accounts({ + user: insecureUserAccount.publicKey, + authority: secondWallet.publicKey, + }) + .signers([secondWallet]) + .transaction(); + + await anchor.web3.sendAndConfirmTransaction( + provider.connection, + transaction, + [secondWallet], + ); + } catch (error) { + console.error("Re-invocation of insecure initialization failed:", error); + throw error; + } }); }); ``` -Run `anchor test` to see that both transactions will complete successfully. +Run `anchor test` to verify that the `insecure_initialization` instruction +handler executes successfully in both invocations. ```bash -initialization - ✔ Insecure init (478ms) - ✔ Re-invoke insecure init with different auth (464ms) +Initialization + ✔ performs insecure initialization (420ms) + ✔ re-invokes insecure initialization with different authority (419ms) ``` -#### 3. Add `recommended_initialization` instruction +### 3. Add recommended_initialization Instruction Handler -Let's create a new instruction called `recommended_initialization` that fixes -this problem. Unlike the previous insecure instruction, this instruction should +Now, let's create a new instruction handler called `recommended_initialization` +that addresses the issue. Unlike the insecure instruction handler, this one will handle both the creation and initialization of the user's account using Anchor's `init` constraint. -This constraint instructs the program to create the account via a CPI to the -system program, so the account no longer needs to be created client-side. The -constraint also sets the account discriminator. Your instruction logic can then -set the account's initial state. - -By doing this, you ensure that any subsequent invocation of the same instruction -with the same user account will fail rather than reset the account's initial -state. +This constraint ensures the account is created via a CPI to the system program, +and the discriminator is set. This way, any subsequent invocation with the same +user account will fail, preventing reinitialization. ```rust use anchor_lang::prelude::*; @@ -378,7 +397,7 @@ declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); #[program] pub mod initialization { use super::*; - ... + ... pub fn recommended_initialization(ctx: Context) -> Result<()> { ctx.accounts.user.authority = ctx.accounts.authority.key(); Ok(()) @@ -387,92 +406,118 @@ pub mod initialization { #[derive(Accounts)] pub struct Checked<'info> { - #[account(init, payer = authority, space = 8+32)] + #[account( + init, + payer = authority, + space = DISCRIMINATOR_SIZE + User::INIT_SPACE + )] user: Account<'info, User>, #[account(mut)] authority: Signer<'info>, system_program: Program<'info, System>, } + +#[account] +#[derive(InitSpace)] +pub struct User { + pub authority: Pubkey, +} ``` -#### 4. Test `recommended_initialization` instruction +### 4. Test recommended_initialization Instruction Handler -To test the `recommended_initialization` instruction, we’ll invoke the -instruction twice just like before. This time, we expect the transaction to fail -when we try to initialize the same account a second time. +To test the `recommended_initialization` instruction handler, invoke it twice as +before. This time, the transaction should fail when attempting to initialize the +same account a second time. ```typescript -describe("initialization", () => { +describe("Initialization", () => { ... - it("Recommended init", async () => { - await program.methods - .recommendedInitialization() - .accounts({ - user: userRecommended.publicKey, - }) - .signers([userRecommended]) - .rpc() - }) - - it("Re-invoke recommended init with different auth, expect error", async () => { + it("performs recommended initialization", async () => { + try { + await program.methods + .recommendedInitialization() + .accounts({ + user: recommendedUserAccount.publicKey, + }) + .signers([recommendedUserAccount]) + .rpc(); + } catch (error) { + console.error("Recommended initialization failed:", error); + throw error; + } + }); + + it("fails to re-invoke recommended initialization with different authority", async () => { try { - // Add your test here. - const tx = await program.methods + const transaction = await program.methods .recommendedInitialization() .accounts({ - user: userRecommended.publicKey, - authority: walletTwo.publicKey, + user: recommendedUserAccount.publicKey, + authority: secondWallet.publicKey, }) - .transaction() - await anchor.web3.sendAndConfirmTransaction(provider.connection, tx, [ - walletTwo, - userRecommended, - ]) - } catch (err) { - expect(err) - console.log(err) + .transaction(); + + await anchor.web3.sendAndConfirmTransaction( + provider.connection, + transaction, + [secondWallet, recommendedUserAccount], + { commitment: "confirmed" } + ); + + throw new Error("Re-invocation succeeded unexpectedly"); + } catch (error) { + if (error.message === "Re-invocation succeeded unexpectedly") { + throw error; + } + + if (error instanceof SendTransactionError) { + console.log("Transaction failed as expected"); + } else { + console.error("Unexpected error:", error); + } + console.log(error) + expect(error).to.exist; } - }) -}) + }); +}); ``` -Run `anchor test` and to see that the second transaction which tries to -initialize the same account twice will now return an error stating the account -address is already in use. +Run `anchor test` to confirm that the second transaction fails with an error +indicating the account is already in use. ```bash -'Program CpozUgSwe9FPLy9BLNhY2LTGqLUk1nirUkMMA5RmDw6t invoke [1]', +'Program HLhxJzFYjtXCET4HxnSzv27SpXg16FWNDi2LvrNmSvzH invoke [1]', 'Program log: Instruction: RecommendedInitialization', 'Program 11111111111111111111111111111111 invoke [2]', -'Allocate: account Address { address: EMvbwzrs4VTR7G1sNUJuQtvRX1EuvLhqs4PFqrtDcCGV, base: None } already in use', +'Allocate: account Address { address: FcW7tG71GKuRgxEbgFuuNQNV3HVSMmVyKATo74iCK4yi, base: None } already in use', 'Program 11111111111111111111111111111111 failed: custom program error: 0x0', -'Program CpozUgSwe9FPLy9BLNhY2LTGqLUk1nirUkMMA5RmDw6t consumed 4018 of 200000 compute units', -'Program CpozUgSwe9FPLy9BLNhY2LTGqLUk1nirUkMMA5RmDw6t failed: custom program error: 0x0' +'Program HLhxJzFYjtXCET4HxnSzv27SpXg16FWNDi2LvrNmSvzH consumed 3330 of 200000 compute units', +'Program HLhxJzFYjtXCET4HxnSzv27SpXg16FWNDi2LvrNmSvzH failed: custom program error: 0x0' ``` -If you use Anchor's `init` constraint, that's usually all you need to protect -against reinitialization attacks! Remember, just because the fix for these -security exploits is simple doesn't mean it isn't important. Every time your -initialize an account, make sure you're either using the `init` constraint or -have some other check in place to avoid resetting an existing account's initial -state. +Using Anchor's `init` constraint is usually sufficient to protect against +reinitialization attacks. While the fix for these security exploits is +straightforward, it is crucial. Every time you initialize an account, ensure +that you're either using the `init` constraint or implementing another check to +prevent resetting an existing account's initial state. -If you want to take a look at the final solution code you can find it on the -`solution` branch of -[this repository](https://github.com/Unboxed-Software/solana-reinitialization-attacks/tree/solution). +For the final solution code, refer to the +[`solution` branch of this repository](https://github.com/solana-developers/reinitialization-attacks/tree/solution). ## Challenge -Just as with other lessons in this unit, your opportunity to practice avoiding -this security exploit lies in auditing your own or other programs. +Your challenge is to audit your own or other programs to practice avoiding this +security exploit. -Take some time to review at least one program and ensure that instructions are -properly protected against reinitialization attacks. +Take some time to review at least one program and confirm that instruction +handlers are adequately protected against reinitialization attacks. -Remember, if you find a bug or exploit in somebody else's program, please alert -them! If you find one in your own program, be sure to patch it right away. +If you find a bug or exploit in another program, alert the developer. If you +find one in your own program, patch it immediately. + Push your code to GitHub and [tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=652c68aa-18d9-464c-9522-e531fd8738d5)! diff --git a/content/courses/program-security/security-intro.md b/content/courses/program-security/security-intro.md index bc4a3849a..f3a17c022 100644 --- a/content/courses/program-security/security-intro.md +++ b/content/courses/program-security/security-intro.md @@ -1,48 +1,46 @@ --- -title: How to approach the Program Security module +title: How to Approach the Program Security Course objectives: - - understand how to approach the Program Security Module + - Understand how to approach the Program Security Course description: "Learn how to think intelligently about security for your onchain programs, - whether developing in Anchor or in Native Rust." + whether developing in Anchor or Native Rust." --- ## Overview -The goal of this course is to expose you to a wide variety of common security -exploits that are unique to Solana development. We’ve heavily modeled this -course off Coral's -[Sealevel Attacks](https://github.com/coral-xyz/sealevel-attacks) repo. +This course aims to introduce you to a range of common security exploits unique +to Solana development. We've modeled this course heavily on Coral's +[Sealevel Attacks](https://github.com/coral-xyz/sealevel-attacks) repository. -We've covered program security in our +Program security is covered in our [Anchor](/content/courses/onchain-development.md) and -[native Rust](/content/courses/native-onchain-development.md) development -courses because we wanted to make sure that anyone deploying programs to Mainnet -right out of the gates had at least a basic understanding of security. And if -that’s you then hopefully the fundamental principles you learned in that lesson -have led to you avoiding some common Solana exploits on your own. - -This unit is meant to build on top of that lesson with two goals in mind: - -1. To expand your awareness of the Solana programming model and the areas where - you need to focus to close up security loopholes in your programs -2. To show you the array of tools provided by Anchor to help you keep your - programs secure - -If you went through the Basic Security lesson, the first few lessons should seem -familiar. They largely cover topics we discussed in that lesson. After that, -some of the attacks may seem new. We encourage you to go through all of them. - -The last thing to call out is that there are a lot more lessons in this course -than in prior course. And the lessons aren't dependent on each other in the same -ways, so you can bounce around a bit more if you'd like. - -Originally, we were going to have more, shorter lessons in this course. And -while they might be shorter than average, they aren't much shorter. It turns out -that even though each of the security vulnerabilities is "simple," there's a lot -to discuss. So each lesson may have a little bit less prose and more code -snippets, making it easy for readers to choose how in depth to go. But, -ultimately, each lesson is still as fully-fledged as they have been before so -that you can really get a solid grasp on each of the discussed security risks. - -As always, we appreciate feedback. Good luck digging in! +[Native Rust](/content/courses/native-onchain-development.md) development +courses to ensure that anyone deploying programs to Mainnet has at least a basic +understanding of security. Those courses should help you avoid some common +Solana exploits on your own. + +This course builds on those courses with two main goals: + +1. Expand your awareness of the Solana programming model and highlight areas + where you need to focus to close security loopholes. +2. Introduce you to the tools provided by Anchor to help keep your programs + secure, and show native Rust users how to implement similar techniques on + their own. + +While the first few lessons in this course cover topics similar to those in the +[Anchor course](/content/courses/onchain-development/intro-to-anchor.md) or +[Program Security lesson](/content/courses/native-onchain-development/program-security.md) +in the [Native Course](/content/courses/native-onchain-development.md), but as +you progress, you'll encounter new types of attacks. We encourage you to explore +all of them. + + +Unlike the lessons in other courses, which are in order, you are welcome to +explore these lessons in whatever order suits you best. + +Even though each security vulnerability may seem "simple," there's a lot to +discuss. These lessons contain less prose and more code, ensuring you gain a +solid understanding of the security risks discussed. + +As always, your feedback is appreciated. Good luck as you delve into the course! diff --git a/content/courses/program-security/signer-auth.md b/content/courses/program-security/signer-auth.md index ab0d6a7be..3257e3bdc 100644 --- a/content/courses/program-security/signer-auth.md +++ b/content/courses/program-security/signer-auth.md @@ -1,61 +1,61 @@ --- title: Signer Authorization objectives: - - Explain the security risks associated with not performing appropriate signer - checks - - Implement signer checks using long-form Rust - - Implement signer checks using Anchor’s `Signer` type - - Implement signer checks using Anchor’s `#[account(signer)]` constraint + - Explain the security risks of not performing appropriate signer checks. + - Implement signer checks using native Rust + - Implement signer checks using Anchor's `Signer` type + - Implement signer checks using Anchor's `#[account(signer)]` constraint description: - "Ensure instructions are only ran by authorized accounts by implmementing - Signer checks." + "Ensure instructions are only executed by authorized accounts by implementing + signer checks." --- ## Summary -- Use **Signer Checks** to verify that specific accounts have signed a - transaction. Without appropriate signer checks, accounts may be able to - execute instructions they shouldn’t be authorized to perform. -- To implement a signer check in Rust, simply check that an account’s - `is_signer` property is `true` +- **Signer Checks** are essential to verify that specific accounts have signed a + transaction. Without proper signer checks, unauthorized accounts may execute + instructions they shouldn't be allowed to perform. +- In Anchor, you can use the `Signer` account type in your account validation + struct to automatically perform a signer check on a given account. +- Anchor also provides the + [`#[account(signer)]`](https://www.anchor-lang.com/docs/account-constraints) + constraint, which automatically verifies that a specified account has signed + the transaction. +- In native Rust, implement a signer check by verifying that an account's + `is_signer` property is `true`: + ```rust if !ctx.accounts.authority.is_signer { - return Err(ProgramError::MissingRequiredSignature.into()); + return Err(ProgramError::MissingRequiredSignature.into()); } ``` -- In Anchor, you can use the **`Signer`** account type in your account - validation struct to have Anchor automatically perform a signer check on a - given account -- Anchor also has an account constraint that will automatically verify that a - given account has signed a transaction ## Lesson -Signer checks are used to verify that a given account’s owner has authorized a -transaction. Without a signer check, operations whose execution should be -limited to only specific accounts can potentially be performed by any account. -In the worst case scenario, this could result in wallets being completely -drained by attackers passing in whatever account they want to an instruction. - -#### Missing Signer Check +**Signer checks** ensure that only authorized accounts can execute specific +instructions. Without these checks, any account might perform operations that +should be restricted, potentially leading to severe security vulnerabilities, +such as unauthorized access and control over program accounts. -The example below shows an oversimplified version of an instruction that updates -the `authority` field stored on a program account. +### Missing Signer Check -Notice that the `authority` field on the `UpdateAuthority` account validation -struct is of type `AccountInfo`. In Anchor, the `AccountInfo` account type -indicates that no checks are performed on the account prior to instruction -execution. +Below is an oversimplified instruction handler that updates the `authority` +field on a program account. Notice that the `authority` field in the +`UpdateAuthority` account validation struct is of type `UncheckedAccount`. In +Anchor, the +[`UncheckedAccount`](https://docs.rs/anchor-lang/latest/anchor_lang/accounts/unchecked_account/struct.UncheckedAccount.html) +type indicates that no checks are performed on the account before executing the +instruction handler. -Although the `has_one` constraint is used to validate the `authority` account -passed into the instruction matches the `authority` field stored on the `vault` -account, there is no check to verify the `authority` account authorized the +Although the `has_one` constraint ensures that the `authority` account passed to +the instruction handler matches the `authority` field on the `vault` account, +there is no verification that the `authority` account actually authorized the transaction. -This means an attacker can simply pass in the public key of the `authority` -account and their own public key as the `new_authority` account to reassign -themselves as the new authority of the `vault` account. At that point, they can -interact with the program as the new authority. +This omission allows an attacker to pass in the `authority` account's public key +and their own public key as the `new_authority` account, effectively reassigning +themselves as the new authority of the `vault` account. Once they have control, +they can interact with the program as the new authority. ```rust use anchor_lang::prelude::*; @@ -79,8 +79,10 @@ pub struct UpdateAuthority<'info> { has_one = authority )] pub vault: Account<'info, Vault>, - pub new_authority: AccountInfo<'info>, - pub authority: AccountInfo<'info>, + /// CHECK: This account will not be checked by Anchor + pub new_authority: UncheckedAccount<'info>, + /// CHECK: This account will not be checked by Anchor + pub authority: UncheckedAccount<'info>, } #[account] @@ -90,23 +92,20 @@ pub struct Vault { } ``` -#### Add signer authorization checks +### Adding Signer Authorization Checks -All you need to do to validate that the `authority` account signed is to add a -signer check within the instruction. That simply means checking that -`authority.is_signer` is `true`, and returning a `MissingRequiredSignature` -error if `false`. +To validate that the `authority` account signed the transaction, add a signer +check within the instruction handler: -```typescript +```rust if !ctx.accounts.authority.is_signer { return Err(ProgramError::MissingRequiredSignature.into()); } ``` -By adding a signer check, the instruction would only process if the account -passed in as the `authority` account also signed the transaction. If the -transaction was not signed by the account passed in as the `authority` account, -then the transaction would fail. +By adding this check, the instruction handler will only proceed if the +`authority` account has signed the transaction. If the account is not signed, +the transaction will fail. ```rust use anchor_lang::prelude::*; @@ -134,8 +133,10 @@ pub struct UpdateAuthority<'info> { has_one = authority )] pub vault: Account<'info, Vault>, - pub new_authority: AccountInfo<'info>, - pub authority: AccountInfo<'info>, + /// CHECK: This account will not be checked by Anchor + pub new_authority: UncheckedAccount<'info>, + /// CHECK: This account will not be checked by Anchor + pub authority: UncheckedAccount<'info>, } #[account] @@ -145,20 +146,15 @@ pub struct Vault { } ``` -#### Use Anchor’s `Signer` account type - -However, putting this check into the instruction function muddles the separation -between account validation and instruction logic. +### Use Anchor's Signer Account Type -Fortunately, Anchor makes it easy to perform signer checks by providing the -`Signer` account type. Simply change the `authority` account’s type in the -account validation struct to be of type `Signer`, and Anchor will check at -runtime that the specified account is a signer on the transaction. This is the -approach we generally recommend since it allows you to separate the signer check -from instruction logic. - -In the example below, if the `authority` account does not sign the transaction, -then the transaction will fail before even reaching the instruction logic. +Incorporating the +[`signer`](https://docs.rs/anchor-lang/latest/anchor_lang/accounts/signer/struct.Signer.html) +check directly within the instruction handler logic can blur the separation +between account validation and instruction handler execution. To maintain this +separation, use Anchor's `Signer` account type. By changing the `authority` +account's type to `Signer` in the validation struct, Anchor automatically checks +at runtime that the specified account signed the transaction. ```rust use anchor_lang::prelude::*; @@ -182,7 +178,8 @@ pub struct UpdateAuthority<'info> { has_one = authority )] pub vault: Account<'info, Vault>, - pub new_authority: AccountInfo<'info>, + /// CHECK: This account will not be checked by Anchor + pub new_authority: UncheckedAccount<'info>, pub authority: Signer<'info>, } @@ -193,33 +190,27 @@ pub struct Vault { } ``` -Note that when you use the `Signer` type, no other ownership or type checks are + +When you use the `Signer` type, no other ownership or type checks are performed. + -#### Use Anchor’s `#[account(signer)]` constraint - -While in most cases, the `Signer` account type will suffice to ensure an account -has signed a transaction, the fact that no other ownership or type checks are -performed means that this account can’t really be used for anything else in the -instruction. - -This is where the `signer` _constraint_ comes in handy. The `#[account(signer)]` -constraint allows you to verify the account signed the transaction, while also -getting the benefits of using the `Account` type if you wanted access to it’s -underlying data as well. - -As an example of when this would be useful, imagine writing an instruction that -you expect to be invoked via CPI that expects one of the passed in accounts to -be both a **\*\***signer**\*\*** on the transaciton and a \***\*\*\*\*\*\***data -source\***\*\*\*\*\*\***. Using the `Signer` account type here removes the -automatic deserialization and type checking you would get with the `Account` -type. This is both inconvenient, as you need to manually deserialize the account -data in the instruction logic, and may make your program vulnerable by not -getting the ownership and type checking performed by the `Account` type. - -In the example below, you can safely write logic to interact with the data -stored in the `authority` account while also verifying that it signed the -transaction. +### Using Anchor's `#[account(signer)]` Constraint + +While the `Signer` account type is useful, it doesn't perform other ownership or +type checks, limiting its use in instruction handler logic. +[Anchor's `#[account(signer)]`](https://www.anchor-lang.com/docs/account-constraints) +constraint addresses this by verifying that the account signed the transaction +while allowing access to its underlying data. + +For example, if you expect an account to be both a signer and a data source, +using the `Signer` type would require manual deserialization, and you wouldn't +benefit from automatic ownership and type checking. Instead, the +`#[account(signer)]` constraint allows you to access the data and ensure the +account signed the transaction. + +In this example, you can safely interact with the data stored in the `authority` +account while ensuring that it signed the transaction. ```rust use anchor_lang::prelude::*; @@ -246,7 +237,8 @@ pub struct UpdateAuthority<'info> { has_one = authority )] pub vault: Account<'info, Vault>, - pub new_authority: AccountInfo<'info>, + /// CHECK: This account will not be checked by Anchor + pub new_authority: UncheckedAccount<'info>, #[account(signer)] pub authority: Account<'info, AuthState> } @@ -258,52 +250,53 @@ pub struct Vault { } #[account] pub struct AuthState{ - amount: u64, - num_depositors: u64, - num_vaults: u64 + amount: u64, + num_depositors: u64, + num_vaults: u64 } ``` ## Lab -Let’s practice by creating a simple program to demonstrate how a missing signer -check can allow an attacker to withdraw tokens that don’t belong to them. +In this lab, we'll create a simple program to demonstrate how a missing signer +check can allow an attacker to withdraw tokens that don't belong to them. This +program initializes a simplified token `vault` account and shows how the absence +of a signer check could result in the vault being drained. -This program initializes a simplified token “vault” account and demonstrates how -a missing signer check could allow the vault to be drained. +### 1. Starter -#### 1. Starter +To get started, download the starter code from the +[`starter` branch of this repository](https://github.com/solana-developers/signer-auth/tree/starter). +The starter code includes a program with two instruction handlers and the +boilerplate setup for the test file. -To get started, download the starter code from the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-signer-auth/tree/starter). The -starter code includes a program with two instructions and the boilerplate setup -for the test file. +The `initialize_vault` instruction handler sets up two new accounts: `Vault` and +`TokenAccount`. The `Vault` account is initialized using a Program Derived +Address (PDA) and stores the address of a token account and the vault's +authority. The `vault` PDA will be the authority of the token account, enabling +the program to sign off on token transfers. -The `initialize_vault` instruction initializes two new accounts: `Vault` and -`TokenAccount`. The `Vault` account will be initialized using a Program Derived -Address (PDA) and store the address of a token account and the authority of the -vault. The authority of the token account will be the `vault` PDA which enables -the program to sign for the transfer of tokens. +The `insecure_withdraw` instruction handler transfers tokens from the `vault` +account's token account to a `withdraw_destination` token account. However, the +`authority` account in the `InsecureWithdraw` struct is of type +`UncheckedAccount`, a wrapper around `AccountInfo` that explicitly indicates the +account is unchecked. -The `insecure_withdraw` instruction will transfer tokens in the `vault` -account’s token account to a `withdraw_destination` token account. However, the -`authority` account in the `InsecureWithdraw` struct has a type of -`UncheckedAccount`. This is a wrapper around `AccountInfo` to explicitly -indicate the account is unchecked. +Without a signer check, anyone can provide the public key of the `authority` +account that matches the `authority` stored on the `vault` account, and the +`insecure_withdraw` instruction handler will continue processing. -Without a signer check, anyone can simply provide the public key of the -`authority` account that matches `authority` stored on the `vault` account and -the `insecure_withdraw` instruction would continue to process. - -While this is somewhat contrived in that any DeFi program with a vault would be -more sophisticated than this, it will show how the lack of a signer check can -result in tokens being withdrawn by the wrong party. +Although this example is somewhat contrived, as any DeFi program with a vault +would be more sophisticated, it effectively illustrates how the lack of a signer +check can lead to unauthorized token withdrawals. ```rust use anchor_lang::prelude::*; use anchor_spl::token::{self, Mint, Token, TokenAccount}; -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); +declare_id!("FeKh59XMh6BcN6UdekHnaFHsNH9NVE121GgDzSyYPKKS"); + +pub const DISCRIMINATOR_SIZE: usize = 8; #[program] pub mod signer_authorization { @@ -318,7 +311,7 @@ pub mod signer_authorization { pub fn insecure_withdraw(ctx: Context) -> Result<()> { let amount = ctx.accounts.token_account.amount; - let seeds = &[b"vault".as_ref(), &[*ctx.bumps.get("vault").unwrap()]]; + let seeds = &[b"vault".as_ref(), &[ctx.bumps.vault]]; let signer = [&seeds[..]]; let cpi_ctx = CpiContext::new_with_signer( @@ -341,7 +334,7 @@ pub struct InitializeVault<'info> { #[account( init, payer = authority, - space = 8 + 32 + 32, + space = DISCRIMINATOR_SIZE + Vault::INIT_SPACE, seeds = [b"vault"], bump )] @@ -380,73 +373,91 @@ pub struct InsecureWithdraw<'info> { } #[account] +#[derive(Default, InitSpace)] pub struct Vault { token_account: Pubkey, authority: Pubkey, } ``` -#### 2. Test `insecure_withdraw` instruction +### 2. Test insecure_withdraw Instruction Handler -The test file includes the code to invoke the `initialize_vault` instruction -using `wallet` as the `authority` on the vault. The code then mints 100 tokens -to the `vault` token account. Theoretically, the `wallet` key should be the only -one that can withdraw the 100 tokens from the vault. +The test file includes code to invoke the `initialize_vault` instruction +handler, using `walletAuthority` as the `authority` on the vault. The code then +mints 100 tokens to the `vaultTokenAccount` token account. Ideally, only the +`walletAuthority` key should be able to withdraw these 100 tokens from the +vault. -Now, let’s add a test to invoke `insecure_withdraw` on the program to show that -the current version of the program allows a third party to in fact withdraw -those 100 tokens. +Next, we'll add a test to invoke `insecure_withdraw` on the program to +demonstrate that the current version allows a third party to withdraw those 100 +tokens. -In the test, we’ll still use the public key of `wallet` as the `authority` -account, but we’ll use a different keypair to sign and send the transaction. +In the test, we'll use the `walletAuthority` public key as the `authority` +account but sign and send the transaction with a different keypair. ```typescript -describe("signer-authorization", () => { +describe("Signer Authorization", () => { ... - it("Insecure withdraw", async () => { - const tx = await program.methods - .insecureWithdraw() - .accounts({ - vault: vaultPDA, - tokenAccount: tokenAccount.publicKey, - withdrawDestination: withdrawDestinationFake, - authority: wallet.publicKey, - }) - .transaction() - - await anchor.web3.sendAndConfirmTransaction(connection, tx, [walletFake]) - - const balance = await connection.getTokenAccountBalance( - tokenAccount.publicKey - ) - expect(balance.value.uiAmount).to.eq(0) - }) + it("performs insecure withdraw", async () => { + try { + const transaction = await program.methods + .insecureWithdraw() + .accounts({ + vault: vaultPDA, + tokenAccount: vaultTokenAccount.publicKey, + withdrawDestination: unauthorizedWithdrawDestination, + authority: walletAuthority.publicKey, + }) + .transaction(); + + await anchor.web3.sendAndConfirmTransaction(connection, transaction, [ + unauthorizedWallet, + ]); + + const tokenAccountInfo = await getAccount( + connection, + vaultTokenAccount.publicKey + ); + expect(Number(tokenAccountInfo.amount)).to.equal(0); + } catch (error) { + console.error("Insecure withdraw failed:", error); + throw error; + } + }); }) ``` -Run `anchor test` to see that both transactions will complete successfully. +Run `anchor test` to confirm that both transactions will be completed +successfully. ```bash -signer-authorization - ✔ Initialize Vault (810ms) - ✔ Insecure withdraw (405ms) +Signer Authorization + ✔ initializes vault and mints tokens (882ms) + ✔ performs insecure withdraw (435ms) ``` -Since there is no signer check for the `authority` account, the -`insecure_withdraw` instruction will transfer tokens from the `vault` token -account to the `withdrawDestinationFake` token account as long as the public key -of the`authority` account matches the public key stored on the authority field -of the `vault` account. Clearly, the `insecure_withdraw` instruction is as -insecure as the name suggests. - -#### 3. Add `secure_withdraw` instruction - -Let’s fix the problem in a new instruction called `secure_withdraw`. This -instruction will be identical to the `insecure_withdraw` instruction, except -we’ll use the `Signer` type in the Accounts struct to validate the `authority` -account in the `SecureWithdraw` struct. If the `authority` account is not a -signer on the transaction, then we expect the transaction to fail and return an -error. +The `insecure_withdraw` instruction handler demonstrates a security +vulnerability. Since there is no signer check for the `authority` account, this +handler will transfer tokens from the `vaultTokenAccount` to the +`unauthorizedWithdrawDestination`, as long as the public key of the `authority` +account matches the `walletAuthority.publicKey` stored in the `vault` account's +`authority` field. + +In the test, we use the `unauthorizedWallet` to sign the transaction, while +still specifying the `walletAuthority.publicKey` as the authority in the +instruction accounts. This mismatch between the signer and the specified +`authority` would normally cause a transaction to fail. However, due to the lack +of a proper signer check in the `insecure_withdraw` handler, the transaction +succeeds. + +### 3. Add secure_withdraw Instruction Handler + +To fix this issue, we'll create a new instruction handler called +`secure_withdraw`. This instruction handler will be identical to +`insecure_withdraw`, but we'll use the `Signer` type in the Accounts struct to +validate the authority account in the `SecureWithdraw` struct. If the +`authority` account isn't a signer on the transaction, the transaction should +fail with an error. ```rust use anchor_lang::prelude::*; @@ -461,7 +472,7 @@ pub mod signer_authorization { pub fn secure_withdraw(ctx: Context) -> Result<()> { let amount = ctx.accounts.token_account.amount; - let seeds = &[b"vault".as_ref(), &[*ctx.bumps.get("vault").unwrap()]]; + let seeds = &[b"vault".as_ref(), &[ctx.bumps.vault]]; let signer = [&seeds[..]]; let cpi_ctx = CpiContext::new_with_signer( @@ -497,73 +508,101 @@ pub struct SecureWithdraw<'info> { } ``` -#### 4. Test `secure_withdraw` instruction +### 4. Test secure_withdraw Instruction Handler + +With the new instruction handler in place, return to the test file to test the +`secureWithdraw` instruction handler. Invoke the `secureWithdraw` instruction +handler, using the `walletAuthority.publicKey` as the `authority` account, and +use the `unauthorizedWallet` keypair as the signer. Set the +`unauthorizedWithdrawDestination` as the withdraw destination. -With the instruction in place, return to the test file to test the -`secure_withdraw` instruction. Invoke the `secure_withdraw` instruction, again -using the public key of `wallet` as the `authority` account and the -`withdrawDestinationFake` keypair as the signer and withdraw destination. Since -the `authority` account is validated using the `Signer` type, we expect the -transaction to fail the signer check and return an error. +Since the `authority` account is validated using the `Signer` type, the +transaction should fail with a signature verification error. This is because the +`unauthorizedWallet` is attempting to sign the transaction, but it doesn't match +the `authority` specified in the instruction (which is +`walletAuthority.publicKey`). + +The test expects this transaction to fail, demonstrating that the secure +withdraw function properly validates the signer. If the transaction unexpectedly +succeeds, the test will throw an error indicating that the expected security +check did not occur. ```typescript -describe("signer-authorization", () => { +describe("Signer Authorization", () => { ... - it("Secure withdraw", async () => { + it("fails to perform secure withdraw with incorrect signer", async () => { try { - const tx = await program.methods + const transaction = await program.methods .secureWithdraw() .accounts({ vault: vaultPDA, - tokenAccount: tokenAccount.publicKey, - withdrawDestination: withdrawDestinationFake, - authority: wallet.publicKey, + tokenAccount: vaultTokenAccount.publicKey, + withdrawDestination: unauthorizedWithdrawDestination, + authority: walletAuthority.publicKey, }) - .transaction() - - await anchor.web3.sendAndConfirmTransaction(connection, tx, [walletFake]) - } catch (err) { - expect(err) - console.log(err) + .transaction(); + + await anchor.web3.sendAndConfirmTransaction(connection, transaction, [ + unauthorizedWallet, + ]); + throw new Error("Expected transaction to fail, but it succeeded"); + } catch (error) { + expect(error).to.be.an("error"); + console.log("Error message:", error.message); } - }) + }); }) ``` -Run `anchor test` to see that the transaction will now return a signature +Run `anchor test` to see that the transaction now returns a signature verification error. ```bash -Error: Signature verification failed +signer-authorization +Error message: Signature verification failed. +Missing signature for public key [`GprrWv9r8BMxQiWea9MrbCyK7ig7Mj8CcseEbJhDDZXM`]. + ✔ fails to perform secure withdraw with incorrect signer ``` -That’s it! This is a fairly simple thing to avoid, but incredibly important. -Make sure to always think through who should who should be authorizing -instructions and make sure that each is a signer on the transaction. +This example shows how important it is to think through who should authorize +instructions and ensure that each is a signer on the transaction. -If you want to take a look at the final solution code you can find it on the -`solution` branch of -[the repository](https://github.com/Unboxed-Software/solana-signer-auth/tree/solution). +To review the final solution code, you can find it on the +[`solution` branch of the repository](https://github.com/solana-developers/signer-auth/tree/solution). ## Challenge -At this point in the course, we hope you've started to work on programs and -projects outside the labs and Challenges provided in these lessons. For this and -the remainder of the lessons on security vulnerabilities, the Challenge for each -lesson will be to audit your own code for the security vulnerability discussed -in the lesson. +Now that you've worked through the labs and challenges in this course, it's time +to apply your knowledge in a practical setting. For this challenge and those +that follow on security vulnerabilities, audit your own programs for the +specific vulnerability discussed in each lesson. + +### Steps -Alternatively, you can find open source programs to audit. There are plenty of -programs you can look at. A good start if you don't mind diving into native Rust -would be the -[SPL programs](https://github.com/solana-labs/solana-program-library). +1. **Audit Your Program or Find an Open Source Project**: -So for this lesson, take a look at a program (whether yours or one you've found -online) and audit it for signer checks. If you find a bug in somebody else's -program, please alert them! If you find a bug in your own program, be sure to -patch it right away. + - Begin by auditing your own code for missing signer checks, or find an open + source Solana program to audit. A great place to start is with the + [program examples](https://github.com/solana-developers/program-examples) + repository. + +2. **Look for Signer Check Issues**: + + - Focus on instruction handlers where signer authorization is crucial, + especially those that transfer tokens or modify sensitive account data. + - Review the program for any `UncheckedAccount` types where signer validation + should be enforced. + - Ensure that any accounts that should require user authorization are defined + as `Signer` in the instruction handler. + +3. **Patch or Report**: + - If you find a bug in your own code, fix it by using the `Signer` type for + accounts that require signer validation. + - If the issue exists in an open source project, notify the project + maintainers or submit a pull request. -Push your code to GitHub and + +After completing the challenge, push your code to GitHub and [tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=26b3f41e-8241-416b-9cfa-05c5ab519d80)! diff --git a/content/courses/program-security/type-cosplay.md b/content/courses/program-security/type-cosplay.md index 79dbe227c..f52c7d25d 100644 --- a/content/courses/program-security/type-cosplay.md +++ b/content/courses/program-security/type-cosplay.md @@ -2,19 +2,21 @@ title: Type Cosplay objectives: - Explain the security risks associated with not checking account types - - Implement an account type discriminator using long-form Rust + - Implement an account type discriminator using native Rust - Use Anchor's `init` constraint to initialize accounts - Use Anchor's `Account` type for account validation description: - "Under the risks of accounts of the wrong type being used in instruction, and - use account type checks to mitigate them." + "Understand the risks of using incorrect account types in instructions and how + to mitigate them with account type checks." --- ## Summary -- Use discriminators to distinguish between different account types -- To implement a discriminator in Rust, include a field in the account struct to - represent the account type +- **Discriminators** are 8-byte identifiers written to accounts that distinguish + between different account types, ensuring programs interact with the correct + data. +- **Implement a discriminator** in Rust by including a field in the account + struct to represent the account type. ```rust #[derive(BorshSerialize, BorshDeserialize)] @@ -30,41 +32,40 @@ description: } ``` -- To implement a discriminator check in Rust, verify that the discriminator of - the deserialized account data matches the expected value +- **Check the discriminator** in Rust to verify that the deserialized account + data matches the expected value. ```rust if user.discriminant != AccountDiscriminant::User { - return Err(ProgramError::InvalidAccountData.into()); + return Err(ProgramError::InvalidAccountData.into()); } ``` -- In Anchor, program account types automatically implement the `Discriminator` - trait which creates an 8 byte unique identifier for a type -- Use Anchor’s `Account<'info, T>` type to automatically check the discriminator - of the account when deserializing the account data +- **In Anchor**, program account types automatically implement the + `Discriminator` trait, which creates an 8-byte unique identifier for a type. +- Use Anchor's `Account<'info, T>` type to automatically check the discriminator + when deserializing the account data. ## Lesson -“Type cosplay” refers to an unexpected account type being used in place of an -expected account type. Under the hood, account data is simply stored as an array -of bytes that a program deserializes into a custom account type. Without -implementing a way to explicitly distinguish between account types, account data -from an unexpected account could result in an instruction being used in -unintended ways. +"Type cosplay" refers to using an unexpected account type in place of an +expected one. Under the hood, account data is stored as an array of bytes that a +program deserializes into a custom account type. Without a method to distinguish +between account types explicitly, data from an unexpected account could result +in instructions being used in unintended ways. -#### Unchecked account +### Unchecked Account In the example below, both the `AdminConfig` and `UserConfig` account types -store a single public key. The `admin_instruction` instruction deserializes the -`admin_config` account as an `AdminConfig` type and then performs a owner check +store a single public key. The `admin_instruction` deserializes the +`admin_config` account as an `AdminConfig` type and then performs an owner check and data validation check. -However, the `AdminConfig` and `UserConfig` account types have the same data -structure. This means a `UserConfig` account type could be passed in as the -`admin_config` account. As long as the public key stored on the account data -matches the `admin` signing the transaction, the `admin_instruction` instruction -would continue to process, even if the signer isn't actually an admin. +However, since the `AdminConfig` and `UserConfig` account types have the same +data structure, a `UserConfig` account type could be passed as the +`admin_config` account. As long as the public key stored on the account matches +the `admin` signing the transaction, the `admin_instruction` would process, even +if the signer isn't actually an admin. Note that the names of the fields stored on the account types (`admin` and `user`) make no difference when deserializing account data. The data is @@ -73,7 +74,6 @@ names. ```rust use anchor_lang::prelude::*; -use borsh::{BorshDeserialize, BorshSerialize}; declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); @@ -97,29 +97,51 @@ pub mod type_cosplay_insecure { #[derive(Accounts)] pub struct AdminInstruction<'info> { + /// CHECK: This account is not being validated by Anchor admin_config: UncheckedAccount<'info>, admin: Signer<'info>, } -#[derive(BorshSerialize, BorshDeserialize)] +#[derive(AnchorSerialize, AnchorDeserialize, InitSpace)] pub struct AdminConfig { admin: Pubkey, } -#[derive(BorshSerialize, BorshDeserialize)] +#[derive(AnchorSerialize, AnchorDeserialize, InitSpace)] pub struct UserConfig { user: Pubkey, } ``` -#### Add account discriminator +#### Add Account Discriminator + +To resolve this, add a discriminant field for each account type and set the +discriminant when initializing an account. + + + +While they sound similar, a +[Rust **discriminant**](https://doc.rust-lang.org/std/mem/fn.discriminant.html) +isn't the same thing as an +[Anchor **discriminator**](https://book.anchor-lang.com/anchor_bts/discriminator.html)! + +- **Rust discriminant**: This is an internal value that Rust uses to keep track + of which variant an enum currently represents. It's like a behind-the-scenes + label for enum variants. + +- **Anchor discriminator**: This is a unique 8-byte identifier that Anchor adds + to the beginning of each account's data. It helps Solana programs quickly + recognize what type of account they're dealing with. + +In simple terms: -To solve this, you can add a discriminant field for each account type and set -the discriminant when initializing an account. +- Discriminants are Rust's way of organizing enum variants. +- Discriminators are Anchor's way of labeling different account types in Solana. + The example below updates the `AdminConfig` and `UserConfig` account types with -a `discriminant` field. The `admin_instruction` instruction includes an -additional data validation check for the `discriminant` field. +a `discriminant` field. The `admin_instruction` now includes an additional data +validation check for the `discriminant` field. ```rust if account_data.discriminant != AccountDiscriminant::Admin { @@ -128,14 +150,13 @@ if account_data.discriminant != AccountDiscriminant::Admin { ``` If the `discriminant` field of the account passed into the instruction as the -`admin_config` account does not match the expected `AccountDiscriminant`, then -the transaction will fail. Simply make sure to set the appropriate value for -`discriminant` when you initialize each account (not shown in the example), and -then you can include these discriminant checks in every subsequent instruction. +`admin_config` account does not match the expected `AccountDiscriminant`, the +transaction will fail. Ensure that the appropriate value for `discriminant` is +set when initializing each account, and then include these checks in every +subsequent instruction. ```rust use anchor_lang::prelude::*; -use borsh::{BorshDeserialize, BorshSerialize}; declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); @@ -162,42 +183,43 @@ pub mod type_cosplay_secure { #[derive(Accounts)] pub struct AdminInstruction<'info> { + /// CHECK: This account is not being validated by Anchor admin_config: UncheckedAccount<'info>, admin: Signer<'info>, } -#[derive(BorshSerialize, BorshDeserialize)] +#[derive(AnchorSerialize, AnchorDeserialize, InitSpace)] pub struct AdminConfig { discriminant: AccountDiscriminant, admin: Pubkey, } -#[derive(BorshSerialize, BorshDeserialize)] +#[derive(AnchorSerialize, AnchorDeserialize, InitSpace)] pub struct UserConfig { discriminant: AccountDiscriminant, user: Pubkey, } -#[derive(BorshSerialize, BorshDeserialize, PartialEq)] +#[derive(AnchorSerialize, AnchorDeserialize, PartialEq, InitSpace)] pub enum AccountDiscriminant { Admin, User, } ``` -#### Use Anchor’s `Account` wrapper +### Use Anchor's Account Wrapper -Implementing these checks for every account needed for every instruction can be -tedious. Fortunately, Anchor provides a `#[account]` attribute macro for -automatically implementing traits that every account should have. +Implementing these checks for every account in every instruction can be tedious. +Fortunately, Anchor provides a `#[account]` attribute macro for automatically +implementing traits that every account should have. Structs marked with `#[account]` can then be used with `Account` to validate -that the passed in account is indeed the type you expect it to be. When -initializing an account whose struct representation has the `#[account]` -attribute, the first 8 bytes are automatically reserved for a discriminator -unique to the account type. When deserializing the account data, Anchor will -automatically check if the discriminator on the account matches the expected -account type and throw and error if it does not match. +that the passed-in account is indeed the type you expect. When initializing an +account whose struct representation has the `#[account]` attribute, the first 8 +bytes are automatically reserved for a discriminator unique to the account type. +When deserializing the account data, Anchor will automatically check if the +discriminator matches the expected account type and throw an error if it does +not. In the example below, `Account<'info, AdminConfig>` specifies that the `admin_config` account should be of type `AdminConfig`. Anchor then @@ -207,14 +229,13 @@ discriminator of the `AdminConfig` type. The data validation check for the `admin` field is also moved from the instruction logic to the account validation struct using the `has_one` constraint. `#[account(has_one = admin)]` specifies that the `admin_config` -account’s `admin` field must match the `admin` account passed into the +account's `admin` field must match the `admin` account passed into the instruction. Note that for the `has_one` constraint to work, the naming of the -account in the struct must match the naming of field on the account you are +account in the struct must match the naming of the field on the account you are validating. ```rust use anchor_lang::prelude::*; -use borsh::{BorshDeserialize, BorshSerialize}; declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); @@ -236,99 +257,97 @@ pub struct AdminInstruction<'info> { } #[account] +#[derive(InitSpace)] pub struct AdminConfig { admin: Pubkey, } #[account] +#[derive(InitSpace)] pub struct UserConfig { user: Pubkey, } ``` -It’s important to note that this is a vulnerability you don’t really have to -worry about when using Anchor - that’s the whole point of it in the first place! -After going through how this can be exploited if not handled properly in a -native rust program, hopefully you have a much better understanding of what the -purpose of the account discriminator is in an Anchor account. The fact that -Anchor sets and checks this discriminator automatically means that developers -can spend more time focusing on their product, but it’s still very important to -understand what Anchor is doing behind the scenes to develop robust Solana -programs. +This vulnerability is something you generally don't have to worry about when +using Anchor—that's the whole point! However, after exploring how this issue can +arise in native Rust programs, you should now have a better understanding of the +importance of the account discriminator in an Anchor account. Anchor's automatic +discriminator checks mean that developers can focus more on their product, but +it's still crucial to understand what Anchor is doing behind the scenes to build +robust Solana programs. ## Lab -For this lab we’ll create two programs to demonstrate a type cosplay -vulnerability. +In this lab, you'll create two programs to demonstrate a type cosplay +vulnerability: -- The first program will initialize program accounts without a discriminator -- The second program will initialize program accounts using Anchor’s `init` - constraint which automatically sets an account discriminator +- The first program initializes accounts without a discriminator. +- The second program initializes accounts using Anchor's `init` constraint, + which automatically sets an account discriminator. -#### 1. Starter +### 1. Starter -To get started, download the starter code from the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-type-cosplay/tree/starter). The -starter code includes a program with three instructions and some tests. +To get started, download the starter code from the starter branch of +[this repository](https://github.com/solana-developers/type-cosplay/tree/starter). +The starter code includes a program with three instructions and some tests. The three instructions are: -1. `initialize_admin` - initializes an admin account and sets the admin - authority of the program -2. `initialize_user` - intializes a standard user account -3. `update_admin` - allows the existing admin to update the admin authority of - the program +1. `initialize_admin`- Initializes an admin account and sets the admin authority + of the program. +2. `initialize_user` - Initializes a standard user account. +3. `update_admin` - Allows the existing admin to update the admin authority of + the program. -Take a look at these three instructions in the `lib.rs` file. The last -instruction should only be callable by the account matching the `admin` field on -the admin account initialized using the `initialize_admin` instruction. +Review the instructions in the `lib.rs` file. The last instruction should only +be callable by the account matching the `admin` field on the admin account +initialized using the `initialize_admin` instruction. -#### 2. Test insecure `update_admin` instruction +### 2. Test Insecure update_admin Instruction -However, both accounts have the same fields and field types: +Both the `AdminConfig` and `User` account types have the same fields and field +types: ```rust -#[derive(BorshSerialize, BorshDeserialize)] +#[derive(AnchorSerialize, AnchorDeserialize)] pub struct AdminConfig { admin: Pubkey, } -#[derive(BorshSerialize, BorshDeserialize)] +#[derive(AnchorSerialize, AnchorDeserialize)] pub struct User { user: Pubkey, } ``` -Because of this, it's possible to pass in a `User` account in place of the -`admin` account in the `update_admin` instruction, thereby bypassing the -requirement that one be an admin to call this instruction. +Because of this, it's possible to pass a `User` account in place of the `admin` +account in the `update_admin` instruction, bypassing the requirement that only +an admin can call this instruction. Take a look at the `solana-type-cosplay.ts` file in the `tests` directory. It -contains some basic setup and two tests. One test initializes a user account, -and the other invokes `update_admin` and passes in the user account in place of -an admin account. +contains a basic setup and two tests: one initializes a user account, and the +other invokes `update_admin` with the user account instead of an admin account. -Run `anchor test` to see that invoking `update_admin` will complete -successfully. +Run `anchor test` to see that invoking `update_admin` completes successfully: ```bash type-cosplay - ✔ Initialize User Account (233ms) - ✔ Invoke update admin instruction with user account (487ms) + ✔ Initialize User Account (223ms) + ✔ Invoke update admin instruction with user account (442ms) ``` -#### 3. Create `type-checked` program +### 3. Create type-checked Program -Now we'll create a new program called `type-checked` by running +Next, create a new program called `type-checked` by running `anchor new type-checked` from the root of the existing anchor program. -Now in your `programs` folder you will have two programs. Run `anchor keys list` -and you should see the program ID for the new program. Add it to the `lib.rs` -file of the `type-checked` program and to the `type_checked` program in the -`Anchor.toml` file. +Now, in your `programs` folder, you will have two programs. Run +`anchor keys list` to see the program ID for the new program. Add it to the +`lib.rs` file of the `type-checked` program and to the `Anchor.toml` file. -Next, update the test file's setup to include the new program and two new -keypairs for the accounts we'll be initializing for the new program. +Update the test file's setup to include the new program and two new keypairs for +the accounts to be initialized: ```typescript import * as anchor from "@coral-xyz/anchor"; @@ -352,23 +371,24 @@ describe("type-cosplay", () => { }); ``` -#### 4. Implement the `type-checked` program +### 4. Implement the type-checked Program In the `type_checked` program, add two instructions using the `init` constraint -to initialize an `AdminConfig` account and a `User` account. When using the -`init` constraint to initialize new program accounts, Anchor will automatically -set the first 8 bytes of account data as a unique discriminator for the account -type. +to initialize an `AdminConfig` account and a `User` account. Anchor will +automatically set the first 8 bytes of account data as a unique discriminator +for the account type. -We’ll also add an `update_admin` instruction that validates the `admin_config` -account as a `AdminConfig` account type using Anchor’s `Account` wrapper. For -any account passed in as the `admin_config` account, Anchor will automatically -check that the account discriminator matches the expected account type. +Add an `update_admin` instruction that validates the `admin_config` account as +an `AdminConfig` account type using Anchor's `Account` wrapper. Anchor will +automatically check that the account discriminator matches the expected account +type: ```rust use anchor_lang::prelude::*; -declare_id!("FZLRa6vX64QL6Vj2JkqY1Uzyzjgi2PYjCABcDabMo8U7"); +declare_id!("G36iNpB591wxFeaeq55qgTwHKJspBrETmgok94oyqgcc"); + +const DISCRIMINATOR_SIZE: usize = 8; #[program] pub mod type_checked { @@ -395,7 +415,7 @@ pub struct InitializeAdmin<'info> { #[account( init, payer = admin, - space = 8 + 32 + space = DISCRIMINATOR_SIZE + AdminConfig::INIT_SPACE )] pub admin_config: Account<'info, AdminConfig>, #[account(mut)] @@ -408,7 +428,7 @@ pub struct InitializeUser<'info> { #[account( init, payer = user, - space = 8 + 32 + space = DISCRIMINATOR_SIZE + User::INIT_SPACE )] pub user_account: Account<'info, User>, #[account(mut)] @@ -429,111 +449,131 @@ pub struct UpdateAdmin<'info> { } #[account] +#[derive(InitSpace)] pub struct AdminConfig { admin: Pubkey, } #[account] +#[derive(InitSpace)] pub struct User { user: Pubkey, } ``` -#### 5. Test secure `update_admin` instruction +### 5. Test Secure update_admin Instruction -In the test file, we’ll initialize an `AdminConfig` account and a `User` account -from the `type_checked` program. Then we’ll invoke the `updateAdmin` instruction -twice passing in the newly created accounts. +In the test file, initialize an `AdminConfig` account and a `User` account from +the `type_checked` program. Then, invoke the `updateAdmin` instruction twice, +passing in the newly created accounts: ```typescript describe("type-cosplay", () => { - ... + ... it("Initialize type checked AdminConfig Account", async () => { - await programChecked.methods - .initializeAdmin() - .accounts({ - adminConfig: adminAccountType.publicKey, - }) - .signers([adminAccountType]) - .rpc() - }) + try { + await programChecked.methods + .initializeAdmin() + .accounts({ + adminConfig: adminAccountChecked.publicKey, + }) + .signers([adminAccountChecked]) + .rpc(); + } catch (error) { + throw new Error( + `Initializing type checked AdminConfig Account failed: ${error.message}` + ); + } + }); it("Initialize type checked User Account", async () => { - await programChecked.methods - .initializeUser() - .accounts({ - userAccount: userAccountType.publicKey, - user: provider.wallet.publicKey, - }) - .signers([userAccountType]) - .rpc() - }) + try { + await programChecked.methods + .initializeUser() + .accounts({ + userAccount: userAccountChecked.publicKey, + user: provider.wallet.publicKey, + }) + .signers([userAccountChecked]) + .rpc(); + } catch (error) { + throw new Error( + `Initializing type checked User Account failed: ${error.message}` + ); + } + }); it("Invoke update instruction using User Account", async () => { try { await programChecked.methods .updateAdmin() .accounts({ - adminConfig: userAccountType.publicKey, + adminConfig: userAccountChecked.publicKey, newAdmin: newAdmin.publicKey, admin: provider.wallet.publicKey, }) - .rpc() - } catch (err) { - expect(err) - console.log(err) + .rpc(); + } catch (error) { + expect(error); + console.log(error); } - }) + }); it("Invoke update instruction using AdminConfig Account", async () => { - await programChecked.methods - .updateAdmin() - .accounts({ - adminConfig: adminAccountType.publicKey, - newAdmin: newAdmin.publicKey, - admin: provider.wallet.publicKey, - }) - .rpc() - }) + try { + await programChecked.methods + .updateAdmin() + .accounts({ + adminConfig: adminAccountChecked.publicKey, + newAdmin: newAdmin.publicKey, + admin: provider.wallet.publicKey, + }) + .rpc(); + } catch (error) { + throw new Error( + `Invoking update instruction using AdminConfig Account failed: ${error.message}` + ); + } + }); }) ``` -Run `anchor test`. For the transaction where we pass in the `User` account type, -we expect the instruction and return an Anchor Error for the account not being -of type `AdminConfig`. +Run `anchor test`. For the transaction where we pass in the User account type, +we expect the instruction to return an Anchor Error due to the account not being +of type AdminConfig: ```bash -'Program EU66XDppFCf2Bg7QQr59nyykj9ejWaoW93TSkk1ufXh3 invoke [1]', +'Program G36iNpB591wxFeaeq55qgTwHKJspBrETmgok94oyqgcc invoke [1]', 'Program log: Instruction: UpdateAdmin', 'Program log: AnchorError caused by account: admin_config. Error Code: AccountDiscriminatorMismatch. Error Number: 3002. Error Message: 8 byte discriminator did not match what was expected.', -'Program EU66XDppFCf2Bg7QQr59nyykj9ejWaoW93TSkk1ufXh3 consumed 4765 of 200000 compute units', -'Program EU66XDppFCf2Bg7QQr59nyykj9ejWaoW93TSkk1ufXh3 failed: custom program error: 0xbba' +'Program G36iNpB591wxFeaeq55qgTwHKJspBrETmgok94oyqgcc consumed 3506 of 200000 compute units', +'Program G36iNpB591wxFeaeq55qgTwHKJspBrETmgok94oyqgcc failed: custom program error: 0xbba' ``` -Following Anchor best practices and using Anchor types will ensure that your -programs avoid this vulnerability. Always use the `#[account]` attribute when -creating account structs, use the `init` constraint when initializing accounts, -and use the `Account` type in your account validation structs. +Following Anchor's best practices ensures that your programs avoid this +vulnerability. Always use the `#[account]` attribute when creating account +structs, use the `init` constraint when initializing accounts, and use the +`Account` type in your account validation structs. -If you want to take a look at the final solution code you can find it on the -`solution` branch of -[the repository](https://github.com/Unboxed-Software/solana-type-cosplay/tree/solution). +For the final solution code, you can find it on the `solution` branch of +[the repository](https://github.com/solana-developers/type-cosplay/tree/solution). ## Challenge -Just as with other lessons in this unit, your opportunity to practice avoiding -this security exploit lies in auditing your own or other programs. +As with other lessons in this unit, practice avoiding this security exploit by +auditing your own or other programs. -Take some time to review at least one program and ensure that account types have -a discriminator and that those are checked for each account and instruction. -Since standard Anchor types handle this check automatically, you're more likely -to find a vulnerability in a native program. +Review at least one program and ensure that account types have a discriminator +and that these are checked for each account and instruction. Since standard +Anchor types handle this check automatically, you're more likely to find a +vulnerability in a native program. Remember, if you find a bug or exploit in somebody else's program, please alert -them! If you find one in your own program, be sure to patch it right away. +them. If you find one in your own program, patch it immediately. + Push your code to GitHub and [tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=37ebccab-b19a-43c6-a96a-29fa7e80fdec)! diff --git a/content/courses/solana-pay/solana-pay.md b/content/courses/solana-pay/solana-pay.md index 7cd0baebe..5d08a75e4 100644 --- a/content/courses/solana-pay/solana-pay.md +++ b/content/courses/solana-pay/solana-pay.md @@ -3,10 +3,9 @@ title: Solana Pay objectives: - Use the Solana Pay specification to build payment requests and initiate transactions using URLs encoded as QR codes - - Use the `@solana/pay` library to help with the creation of Solana Pay - transaction requests + - Use the `@solana/pay` library to create Solana Pay transaction requests - Partially sign transactions and implement transaction gating based on - certain conditions + specific conditions description: "How to create Solana Pay payment requests using links and QR codes." --- @@ -15,19 +14,19 @@ description: - **Solana Pay** is a specification for encoding Solana transaction requests within URLs, enabling standardized transaction requests across different - Solana apps and wallets -- **Partial signing** of transactions allows for the creation of transactions - that require multiple signatures before they are submitted to the network + Solana apps and wallets. +- **Partial signing** of transactions allows the creation of transactions that + require multiple signatures before they are submitted to the network. - **Transaction gating** involves implementing rules that determine whether - certain transactions are allowed to be processed or not, based on certain - conditions or the presence of specific data in the transaction + certain transactions are allowed to be processed, based on specific conditions + or the presence of particular data in the transaction. ## Lesson The Solana community is continually improving and expanding the network's -functionality. But that doesn't always mean developing brand new technology. +functionality. But that doesn't always mean developing brand-new technology. Sometimes it means leveraging the network's existing features in new and -interesting ways. +innovative ways. Solana Pay is a great example of this. Rather than adding new functionality to the network, Solana Pay uses the network's existing signing features in a unique @@ -38,17 +37,17 @@ Throughout this lesson, you'll learn how to use Solana Pay to create transfer and transaction requests, encode these requests as a QR code, partially sign transactions, and gate transactions based on conditions you choose. Rather than leaving it at that, we hope you'll see this as an example of leveraging existing -features in new and interesting ways, using it as a launching pad for your own +features in new and innovative ways, using it as a launching pad for your own unique client-side network interactions. ### Solana Pay The [Solana Pay specification](https://docs.solanapay.com/spec) is a set of standards that allow users to request payments and initiate transactions using -URLs in a uniform way across various Solana apps and wallets. +URLs uniformly across various Solana apps and wallets. Request URLs are prefixed with `solana:` so that platforms can direct the link -to the appropriate application. For example, on mobile a URL that starts with +to the appropriate application. For example, on mobile, a URL that starts with `solana:` will be directed to wallet applications that support the Solana Pay specification. From there, the wallet can use the remainder of the URL to appropriately handle the request. @@ -141,8 +140,8 @@ a function that handles the request and response. import { NextApiRequest, NextApiResponse } from "next"; export default async function handler( - req: NextApiRequest, - res: NextApiResponse, + request: NextApiRequest, + response: NextApiResponse, ) { // Handle the request } @@ -163,18 +162,17 @@ Building on the empty endpoint from before, that may look like this: import { NextApiRequest, NextApiResponse } from "next"; export default async function handler( - req: NextApiRequest, - res: NextApiResponse, + request: NextApiRequest, + response: NextApiResponse, ) { - if (req.method === "GET") { - return get(res); - } else { - return res.status(405).json({ error: "Method not allowed" }); + if (request.method === "GET") { + return get(response); } + return response.status(405).json({ error: "Method not allowed" }); } -function get(res: NextApiResponse) { - res.status(200).json({ +function get(response: NextApiResponse) { + response.status(200).json({ label: "Store Name", icon: "https://solana.com/src/img/branding/solanaLogoMark.svg", }); @@ -205,26 +203,26 @@ transaction and return it to the wallet for signing by: import { NextApiRequest, NextApiResponse } from "next"; export default async function handler( - req: NextApiRequest, - res: NextApiResponse, + request: NextApiRequest, + response: NextApiResponse, ) { - if (req.method === "GET") { - return get(res); - } else if (req.method === "POST") { - return post(req, res); - } else { - return res.status(405).json({ error: "Method not allowed" }); + if (request.method === "GET") { + return get(response); } + if (request.method === "POST") { + return post(request, response); + } + return response.status(405).json({ error: "Method not allowed" }); } -function get(res: NextApiResponse) { - res.status(200).json({ +function get(response: NextApiResponse) { + response.status(200).json({ label: "Store Name", icon: "https://solana.com/src/img/branding/solanaLogoMark.svg", }); } -async function post(req: PublicKey, res: PublicKey) { - const { account, reference } = req.body; +async function post(request: NextApiRequest, response: NextApiResponse) { + const { account, reference } = request.body; const connection = new Connection(clusterApiUrl("devnet")); @@ -236,19 +234,19 @@ async function post(req: PublicKey, res: PublicKey) { }); const instruction = SystemProgram.transfer({ - fromPubkey: account, + fromPubkey: new PublicKey(account), toPubkey: Keypair.generate().publicKey, lamports: 0.001 * LAMPORTS_PER_SOL, }); - transaction.add(instruction); - - transaction.keys.push({ + instruction.keys.push({ pubkey: reference, isSigner: false, isWritable: false, }); + transaction.add(instruction); + const serializedTransaction = transaction.serialize({ requireAllSignatures: false, }); @@ -256,7 +254,7 @@ async function post(req: PublicKey, res: PublicKey) { const message = "Simple transfer of 0.001 SOL"; - res.send(200).json({ + response.status(200).json({ transaction: base64, message, }); @@ -495,21 +493,21 @@ variable. The first thing we'll do in this file is the following: import { NextApiRequest, NextApiResponse } from "next"; export default async function handler( - req: NextApiRequest, - res: NextApiResponse, + request: NextApiRequest, + response: NextApiResponse, ) { - if (req.method === "GET") { - return get(res); - } else if (req.method === "POST") { - return await post(req, res); - } else { - return res.status(405).json({ error: "Method not allowed" }); + if (request.method === "GET") { + return get(response); } + if (request.method === "POST") { + return await post(request, response); + } + return response.status(405).json({ error: "Method not allowed" }); } -function get(res: NextApiResponse) {} +function get(response: NextApiResponse) {} -async function post(req: NextApiRequest, res: NextApiResponse) {} +async function post(request: NextApiRequest, response: NextApiResponse) {} ``` #### 4. Update `get` function @@ -519,8 +517,8 @@ endpoint to return a label and icon. Update the `get` function to send a response with a "Scavenger Hunt!" label and a Solana logo icon. ```jsx -function get(res: NextApiResponse) { - res.status(200).json({ +function get(response: NextApiResponse) { + response.status(200).json({ label: "Scavenger Hunt!", icon: "https://solana.com/src/img/branding/solanaLogoMark.svg", }); @@ -561,35 +559,31 @@ import { NextApiRequest, NextApiResponse } from "next" import { PublicKey, Transaction } from "@solana/web3.js" ... -async function post(req: NextApiRequest, res: NextApiResponse) { - const { account } = req.body - const { reference, id } = req.query - - if (!account || !reference || !id) { - res.status(400).json({ error: "Missing required parameter(s)" }) - return - } - - try { - const transaction = await buildTransaction( - new PublicKey(account), - new PublicKey(reference), - id.toString() - ) - - res.status(200).json({ - transaction: transaction, - message: `You've found location ${id}!`, - }) - } catch (err) { - console.log(err) - let error = err as any - if (error.message) { - res.status(200).json({ transaction: "", message: error.message }) - } else { - res.status(500).json({ error: "error creating transaction" }) - } - } +async function post(request: NextApiRequest, response: NextApiResponse) { + const { account } = request.body; + const { reference, id } = request.query; + + if (!account || !reference || !id) { + response.status(400).json({ error: "Missing required parameter(s)" }); + return; + } + + try { + const transaction = await buildTransaction( + new PublicKey(account), + new PublicKey(reference), + id.toString(), + ); + + response.status(200).json({ + transaction: transaction, + message: `You've found location ${id}!`, + }); + } catch (error) { + console.log(error); + response.status(500).json({ transaction: "", message: error.message }); + return; + } } async function buildTransaction( @@ -603,7 +597,7 @@ async function buildTransaction( #### 6. Implement the `buildTransaction` function -Next, let’s implement the `buildTransaction` function. It should build, +Next, let's implement the `buildTransaction` function. It should build, partially sign, and return the check-in transaction. The sequence of items it needs to perform is: @@ -743,7 +737,7 @@ async function fetchUserState(account: PublicKey): Promise { #### 8. Implement `verifyCorrectLocation` function -Next, let’s implement the `verifyCorrectLocation` helper function. This function +Next, let's implement the `verifyCorrectLocation` helper function. This function is used to verify that a user is at the correct location in a scavenger hunt game. @@ -769,9 +763,8 @@ function verifyCorrectLocation( if (!lastLocation || currentLocation.index !== lastLocation.index + 1) { return false; - } else { - return true; } + return true; } ``` diff --git a/content/courses/state-compression/compressed-nfts.md b/content/courses/state-compression/compressed-nfts.md index 0e3372b81..89513bc5f 100644 --- a/content/courses/state-compression/compressed-nfts.md +++ b/content/courses/state-compression/compressed-nfts.md @@ -2,26 +2,26 @@ title: Compressed NFTs objectives: - Create a compressed NFT collection using Metaplex’s Bubblegum program - - Mint compressed NFTs using the Bubblegum TS SDK - - Transfer compressed NFTs using the Bubblegum TS SDK + - Mint compressed NFTs using the Bubblegum program + - Transfer compressed NFTs using the Bubblegum program - Read compressed NFT data using the Read API description: "How to mint, transfer and read large-scale NFT collections using Metaplex's - Bubblegum SDK." + Bubblegum Program." --- ## Summary - **Compressed NFTs (cNFTs)** use **State Compression** to hash NFT data and store the hash onchain in an account using a **concurrent Merkle tree** - structure + structure. - The cNFT data hash can’t be used to infer the cNFT data, but it can be used to - **verify** if the cNFT data you’re seeing is correct -- Supporting RPC providers **index** cNFT data off-chain when the cNFT is minted + **verify** if the cNFT data you’re seeing is correct. +- Supporting RPC providers **index** cNFT data offchain when the cNFT is minted so that you can use the **Read API** to access the data - The **Metaplex Bubblegum program** is an abstraction on top of the **State Compression** program that enables you to more simply create, mint, and manage - cNFT collections + cNFT collections. ## Lesson @@ -30,33 +30,32 @@ structure takes up less account storage than traditional NFTs. Compressed NFTs leverage a concept called **State Compression** to store data in a way that drastically reduces costs. -Solana’s transaction costs are so cheap that most users never think about how +Solana's transaction costs are so cheap that most users never think about how expensive minting NFTs can be at scale. The cost to set up and mint 1 million -traditional NFTs is approximately 24,000 SOL. By comparison, cNFTs can be -structured to where the same setup and mint costs 10 SOL or less. That means -anyone using NFTs at scale could cut costs by more than 1000x by using cNFTs -over traditional NFTs. +traditional NFTs using the Token Metadata Program is approximately 24,000 SOL. +By comparison, cNFTs can be structured to where the same setup and mint costs 10 +SOL or less. That means anyone using NFTs at scale could cut costs by more than +1000x by using cNFTs over traditional NFTs. However, cNFTs can be tricky to work with. Eventually, the tooling required to work with them will be sufficiently abstracted from the underlying technology that the developer experience between traditional NFTs and cNFTs will be -negligible. But for now, you’ll still need to understand the low level puzzle -pieces, so let’s dig in! +negligible. But for now, you'll still need to understand the low level puzzle +pieces, so let's dig in! ### A theoretical overview of cNFTs Most of the costs associated with traditional NFTs come down to account storage space. Compressed NFTs use a concept called State Compression to store data in -the blockchain’s cheaper **ledger state**, using more expensive account space -only to store a “fingerprint”, or **hash**, of the data. This hash allows you to +the blockchain’s **ledger state**, only using the account state to store a +“fingerprint”, or **hash**, of the data. This hash allows you to cryptographically verify that data has not been tampered with. To both store hashes and enable verification, we use a special binary tree structure known as a **concurrent Merkle tree**. This tree structure lets us hash data together in a deterministic way to compute a single, final hash that -gets stored onchain. This final hash is significantly smaller in size than all -the original data combined, hence the “compression.” The steps to this process -are: +gets stored onchain. This final hash is significantly smaller than all the +original data combined, hence the “compression.” The steps to this process are: 1. Take any piece of data 2. Create a hash of this data @@ -71,20 +70,20 @@ are: truth” can go through the same process and compare the final hash without having to store all the data onchain -One problem not addressed in the above is how to make data available if it can’t +One problem not addressed in the above is how to make data available if it can't be fetched from an account. Since this hashing process occurs onchain, all the data exists in the ledger state and could theoretically be retrieved from the original transaction by replaying the entire chain state from origin. However, -it’s much more straightforward (though still complicated) to have an **indexer** +it's much more straightforward (though still complicated) to have an **indexer** track and index this data as the transactions occur. This ensures there is an -off-chain “cache” of the data that anyone can access and subsequently verify +offchain “cache” of the data that anyone can access and subsequently verify against the onchain root hash. -This process is _very complex_. We’ll cover some of the key concepts below but -don’t worry if you don’t understand it right away. We’ll talk more theory in the -state compression lesson and focus primarily on application to NFTs in this -lesson. You’ll be able to work with cNFTs by the end of this lesson even if you -don’t fully understand every piece of the state compression puzzle. +This process is _very complex_. We’ll cover some key concepts below but don’t +worry if you don’t understand it right away. We’ll talk more theory in the state +compression lesson and focus primarily on application to NFTs in this lesson. +You’ll be able to work with cNFTs by the end of this lesson even if you don’t +fully understand every piece of the state compression puzzle. #### Concurrent Merkle trees @@ -130,9 +129,8 @@ node adds 32 bytes to a transaction, so large trees would quickly exceed the maximum transaction size limit without caching proof nodes onchain. Each of these three values, max depth, max buffer size, and canopy depth, comes -with a tradeoff. Increasing the value of any of these values increases the size -of the account used to store the tree, thus increasing the cost to create the -tree. +with a tradeoff. Increasing any of these values increases the size of the +account used to store the tree, thus increasing the cost to create the tree. Choosing the max depth is fairly straightforward as it directly relates to the number of leafs and therefore the amount of data you can store. If you need @@ -168,7 +166,7 @@ forever exist on the ledger state. #### Index data for easy lookup Under normal conditions, you would typically access onchain data by fetching the -appropriate account. When using state compression, however, it’s not so +appropriate account. When using state compression, however, it's not so straightforward. As mentioned above, the data now exists in the ledger state rather than in an @@ -177,90 +175,89 @@ instruction, but while this data will in a sense exist in the ledger state forever, it will likely be inaccessible through validators after a certain period of time. -To save space and be more performant, validators don’t retain every transaction -back to the genesis block. The specific amount of time you’ll be able to access +To save space and be more performant, validators don't retain every transaction +back to the genesis block. The specific amount of time you'll be able to access the Noop instruction logs related to your data will vary based on the validator, -but eventually you’ll lose access to it if you’re relying directly on +but eventually you'll lose access to it if you're relying directly on instruction logs. -Technically, you _can_ replay transaction state back to the genesis block but +Technically, you _can_ replay transaction state back to the genesis block, but the average team isn’t going to do that, and it certainly won’t be performant. + Instead, you should use an indexer that will observe the events sent to the Noop -program and store the relevant data off chain. That way you don’t need to worry +program and store the relevant data off chain. That way you don't need to worry about old data becoming inaccessible. ### Create a cNFT Collection -With the theoretical background out of the way, let’s turn our attention to the +With the theoretical background out of the way, let's turn our attention to the main point of this lesson: how to create a cNFT collection. Fortunately, you can use tools created by Solana Foundation, the Solana -developer community, and Metaplex to simplify the process. Specifically, we’ll +developer community, and Metaplex to simplify the process. Specifically, we'll be using the `@solana/spl-account-compression` SDK, the Metaplex Bubblegum -program, and the Bubblegum program’s corresponding TS SDK -`@metaplex-foundation/mpl-bugglegum`. - - +program `@metaplex-foundation/mpl-bubblegum` through the Umi library from +Metaplex. #### Prepare metadata -Prior to starting, you’ll prepare your NFT metadata similarly to how you would +Prior to starting, you'll prepare your NFT metadata similarly to how you would if you were using a Candy Machine. At its core, an NFT is simply a token with metadata that follows the NFT standard. In other words, it should be shaped something like this: ```json { - "name": "12_217_47", - "symbol": "RGB", - "description": "Random RGB Color", - "seller_fee_basis_points": 0, - "image": "https://raw.githubusercontent.com/ZYJLiu/rgb-png-generator/master/assets/12_217_47/12_217_47.png", + "name": "My Collection", + "symbol": "MC", + "description": "My Collection description", + "image": "https://lvvg33dqzykc2mbfa4ifua75t73tchjnfjbcspp3n3baabugh6qq.arweave.net/XWpt7HDOFC0wJQcQWgP9n_cxHS0qQik9-27CAAaGP6E", "attributes": [ { - "trait_type": "R", - "value": "12" + "trait_type": "Background", + "value": "transparent" }, { - "trait_type": "G", - "value": "217" + "trait_type": "Shape", + "value": "sphere" }, { - "trait_type": "B", - "value": "47" + "trait_type": "Resolution", + "value": "1920x1920" } ] } ``` -Depending on your use case, you may be able to generate this dynamically or you +Depending on your use case, you may be able to generate this dynamically, or you might want to have a JSON file prepared for each cNFT beforehand. You’ll also -need any other assets referenced by the JSON, such as the `image` url shown in +need any other assets referenced by the JSON, such as the `image` URL shown in the example above. #### Create Collection NFT -If you want your cNFTs to be part of a collection, you’ll need to create a -Collection NFT **before** you start minting cNFTs. This is a traditional NFT -that acts as the reference binding your cNFTs together into a single collection. -You can create this NFT using the `@metaplex-foundation/js` library. Just make -sure you set `isCollection` to `true`. +NFTs are intrinsically unique, compared to fungible tokens which have a supply. +However, it is important to bind NFTs produced by the same series together, +using a Collection. Collections allow people to discover other NFTs in the same +collection, and verify that individual NFTs are actually members of the +Collection (and not look-alikes produced by someone else). + +To have your cNFTs to be part of a collection, you’ll need to create a +Collection NFT **before** you start minting cNFTs. This is a traditional Token +Metadata Program NFT that acts as the reference binding your cNFTs together into +a single collection. The procedure to create this NFT is outlined in our +[NFTs with Metaplex lesson](https://solana.com/developers/courses/tokens-and-nfts/nfts-with-metaplex#add-the-nft-to-a-collection) ```typescript -const collectionNft = await metaplex.nfts().create({ - uri: someUri, - name: "Collection NFT", - sellerFeeBasisPoints: 0, - updateAuthority: somePublicKey, - mintAuthority: somePublicKey, - tokenStandard: 0, - symbol: "Collection", - isMutable: true, - isCollection: true, -}); +const collectionMint = generateSigner(umi); + +await createNft(umi, { + mint: collectionMint, + name: `My Collection`, + uri, + sellerFeeBasisPoints: percentAmount(0), + isCollection: true, // mint as collection NFT +}).sendAndConfirm(umi); ``` #### Create Merkle tree Account @@ -315,254 +312,199 @@ the max depth, while the buffer size will determine the number of concurrent changes (mints, transfers, etc.) within the same slot that can occur to the tree. In other words, choose the max depth that corresponds to the number of NFTs you need the tree to hold, then choose one of the options for max buffer -size based on the traffic you expect you’ll need to support. +size based on the traffic you expect you'll need to support. Next, choose the canopy depth. Increasing the canopy depth increases the -composability of your cNFTs. Any time your or another developer’s code attempts +composability of your cNFTs. Any time your or another developer's code attempts to verify a cNFT down the road, the code will have to pass in as many proof -nodes as there are “layers” in your tree. So for a max depth of 20, you’ll need +nodes as there are “layers” in your tree. So for a max depth of 20, you'll need to pass in 20 proof nodes. Not only is this tedious, but since each proof node -is 32 bytes it’s possible to max out transaction sizes very quickly. +is 32 bytes it's possible to max out transaction sizes very quickly. For example, if your tree has a very low canopy depth, an NFT marketplace may only be able to support simple NFTs transfers rather than support an onchain -bidding system for your cNFTs. The canopy effectively caches proof nodes onchain -so you don’t have to pass all of them into the transaction, allowing for more -complex transactions. +bidding system for your cNFTs. The canopy effectively caches proof nodes +onchain, so you don’t have to pass all of them into the transaction, allowing +for more complex transactions. Increasing any of these three values increases the size of the account, thereby increasing the cost associated with creating it. Weigh the benefits accordingly when choosing the values. -Once you know these values, you can use the `createAllocTreeIx` helper function -from the `@solana/spl-account-compression` TS SDK to create the instruction for -creating the empty account. +Once you know these values, you can use the `createTree` method from the +`@metaplex-foundation/mpl-bubblegum` package to create your tree. This +instruction creates and initializes two accounts: -```typescript -import { createAllocTreeIx } from "@solana/spl-account-compression" +1. A `Merkle Tree` account - this holds the merkle hash and is used to verify + the authenticity of data stored. + +2. A `Tree Config` account - this holds additional data specific to compressed + NFTs such as the tree creator, whether the tree is public, and + [other fields - see the Bubblehum program source](https://github.com/metaplex-foundation/mpl-bubblegum/blob/42ffed35da6b2a673efacd63030a360eac3ae64e/programs/bubblegum/program/src/state/mod.rs#L17). -const treeKeypair = Keypair.generate() +#### Setting up Umi -const allocTreeIx = await createAllocTreeIx( - connection, - treeKeypair.publicKey, - payer.publicKey, - { maxDepth: 20; maxBufferSize: 256 }, - canopyDepth -) +The `mpl-bubblegum` package is a plugin and cannot be used without the Umi +library from Metaplex. Umi is a framework for making JS/TS clients for onchain +programs that was created by Metaplex. + +Note that Umi has different implementations for many concepts than web3.js, +including Keypairs, PublicKeys, and Connections. However, it is easy to convert +from web3.js versions of these items to the Umi equivalents. + +To get started, we need to create an Umi instance + +```typescript +import { createUmi } from "@metaplex-foundation/umi-bundle-defaults"; +import { clusterApiUrl } from "@solana/web3.js"; + +const umi = createUmi(clusterApiUrl("devnet")); ``` -Note that this is simply a helper function for calculating the size required by -the account and creating the instruction to send to the System Program for -allocating the account. This function doesn’t interact with any -compression-specific programs yet. +The above code initializes an empty Umi instance without any signer or plugin +attached to it. You can find the exhaustive list of the plugins available +[on this Metaplex docs page](https://developers.metaplex.com/umi/metaplex-umi-plugins) -#### Use Bubblegum to Initialize Your Tree +The next part is to add in our imports and attach a signer to our Umi instance. -With the empty tree account created, you then use the Bubblegum program to -initialize the tree. In addition to the Merkle tree account, Bubblegum creates a -tree config account to add cNFT-specific tracking and functionality. +```typescript +import { dasApi } from "@metaplex-foundation/digital-asset-standard-api"; +import { createTree, mplBubblegum } from "@metaplex-foundation/mpl-bubblegum"; +import { keypairIdentity } from "@metaplex-foundation/umi"; +import { createUmi } from "@metaplex-foundation/umi-bundle-defaults"; +import { getKeypairFromFile } from "@solana-developers/helpers"; +import { clusterApiUrl } from "@solana/web3.js"; + +const umi = createUmi(clusterApiUrl("devnet")); + +// load keypair from local file system +// See https://github.com/solana-developers/helpers?tab=readme-ov-file#get-a-keypair-from-a-keypair-file +const localKeypair = await getKeypairFromFile(); + +// convert to Umi compatible keypair +const umiKeypair = umi.eddsa.createKeypairFromSecretKey(localKeypair.secretKey); + +// load the MPL Bubblegum program, dasApi plugin and assign a signer to our umi instance +umi.use(keypairIdentity(umiKeypair)).use(mplBubblegum()).use(dasApi()); + +console.log("Loaded UMI with Bubblegum"); +``` + +#### Use Bubblegum to Initialize Your Tree -Version 0.7 of the `@metaplex-foundation/mpl-bubblegum` TS SDK provides the -helper function `createCreateTreeInstruction` for calling the `create_tree` -instruction on the Bubblegum program. As part of the call, you’ll need to derive -the `treeAuthority` PDA expected by the program. This PDA uses the tree’s -address as a seed. +With Umi instantiated, we are ready to call the `createTree` method to +instantiate the Merkle tree and tree config accounts. ```typescript -import { - createAllocTreeIx, - SPL_ACCOUNT_COMPRESSION_PROGRAM_ID, - SPL_NOOP_PROGRAM_ID, -} from "@solana/spl-account-compression" -import { - PROGRAM_ID as BUBBLEGUM_PROGRAM_ID, - createCreateTreeInstruction, -} from "@metaplex-foundation/mpl-bubblegum" - -... - -const [treeAuthority, _bump] = PublicKey.findProgramAddressSync( - [treeKeypair.publicKey.toBuffer()], - BUBBLEGUM_PROGRAM_ID -) - -const createTreeIx = createCreateTreeInstruction( - { - treeAuthority, - merkleTree: treeKeypair.publicKey, - payer: payer.publicKey, - treeCreator: payer.publicKey, - logWrapper: SPL_NOOP_PROGRAM_ID, - compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID, - }, - { - maxBufferSize: 256, - maxDepth: 20, - public: false, - }, - BUBBLEGUM_PROGRAM_ID -) +const merkleTree = generateSigner(umi); +const builder = await createTree(umi, { + merkleTree, + maxDepth: 14, + maxBufferSize: 64, +}); +await builder.sendAndConfirm(umi); ``` -The list below shows the required input for this helper function: - -- `accounts` - An object representing the accounts required by the instruction. - This includes: - - `treeAuthority` - Bubblegum expects this to be a PDA derived using the - Merkle tree address as a seed - - `merkleTree` - The Merkle tree account - - `payer` - The address paying for transaction fees, rent, etc. - - `treeCreator` - The address to list as the tree creator - - `logWrapper` - The program to use to expose the data to indexers through - logs; this should be the address of the SPL Noop program unless you have - some other custom implementation - - `compressionProgram` - The compression program to use for initializing the - Merkle tree; this should be the address of the SPL State Compression program - unless you have some other custom implementation -- `args` - An object representing additional arguments required by the - instruction. This includes: - - `maxBufferSize` - The max buffer size of the Merkle tree - - `maxDepth` - The max depth of the Merkle tree - - `public` - When set to `true`, anyone will be able to mint cNFTs from the - tree; when set to `false`, only the tree creator or tree delegate will be - able to min cNFTs from the tree - -When submitted, this will invoke the `create_tree` instruction on the Bubblegum -program. This instruction does three things: - -1. Creates the tree config PDA account -2. Initializes the tree config account with appropriate initial values -3. Issues a CPI to the State Compression program to initialize the empty Merkle - tree account - -Feel free to take a look at the program code -[here](https://github.com/metaplex-foundation/mpl-bubblegum/blob/main/programs/bubblegum/program/src/lib.rs#L887). +The three values supplied i.e. the `merkleTree`, `maxDepth` and `maxBufferSize` +are required in order to create the tree while the rest are optional. For +example, the`tree creator` defaults to the Umi instance identity, while the +`public field to false. + +When set to true, `public` allows anyone to mint from the initialized tree and +if false, only the tree creator will be able to mint from the tree. + +Feel free to look at the code for the +[create_tree instruction handler](https://github.com/metaplex-foundation/mpl-bubblegum/blob/42ffed35da6b2a673efacd63030a360eac3ae64e/programs/bubblegum/program/src/processor/create_tree.rs#L40) +and +[create_tree's expected accounts](https://github.com/metaplex-foundation/mpl-bubblegum/blob/42ffed35da6b2a673efacd63030a360eac3ae64e/programs/bubblegum/program/src/processor/create_tree.rs#L20). #### Mint cNFTs With the Merkle tree account and its corresponding Bubblegum tree config account -initialized, it’s possible to mint cNFTs to the tree. The Bubblegum instruction -to use will be either `mint_v1` or `mint_to_collection_v1`, depending on whether -or not you want to the minted cNFT to be part of a collection. +initialized, it’s possible to mint cNFTs to the tree. The Bubblegum library, +provides two instructions we can make use of depending on whether the minted +asset will belong to a collection. -Version 0.7 of the `@metaplex-foundation/mpl-bubblegum` TS SDK provides helper -functions `createMintV1Instruction` and `createMintToCollectionV1Instruction` to -make it easier for you to create the instructions. +The two instructions are -Both functions will require you to pass in the NFT metadata and a list of -accounts required to mint the cNFT. Below is an example of minting to a -collection: +1. **MintV1** ```typescript -const mintWithCollectionIx = createMintToCollectionV1Instruction( - { - payer: payer.publicKey, - merkleTree: treeAddress, - treeAuthority, - treeDelegate: payer.publicKey, - leafOwner: destination, - leafDelegate: destination, - collectionAuthority: payer.publicKey, - collectionAuthorityRecordPda: BUBBLEGUM_PROGRAM_ID, - collectionMint: collectionDetails.mint, - collectionMetadata: collectionDetails.metadata, - editionAccount: collectionDetails.masterEditionAccount, - compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID, - logWrapper: SPL_NOOP_PROGRAM_ID, - bubblegumSigner, - tokenMetadataProgram: TOKEN_METADATA_PROGRAM_ID, - }, - { - metadataArgs: Object.assign(nftMetadata, { - collection: { key: collectionDetails.mint, verified: false }, - }), +await mintV1(umi, { + leafOwner, + merkleTree, + metadata: { + name: "My Compressed NFT", + uri: "https://example.com/my-cnft.json", + sellerFeeBasisPoints: 0, // 0% + collection: none(), + creators: [ + { address: umi.identity.publicKey, verified: false, share: 100 }, + ], }, -); +}).sendAndConfirm(umi); ``` -Notice that there are two arguments for the helper function: `accounts` and -`args`. The `args` parameter is simply the NFT metadata, while `accounts` is an -object listing the accounts required by the instruction. There are admittedly a -lot of them: - -- `payer` - the account that will pay for the transaction fees, rent, etc. -- `merkleTree` - the Merkle tree account -- `treeAuthority` - the tree authority; should be the same PDA you derived - previously -- `treeDelegate` - the tree delegate; this is usually the same as the tree - creator -- `leafOwner` - the desired owner of the compressed NFT being minted -- `leafDelegate` - the desired delegate of the compressed NFT being minted; this - is usually the same as the leaf owner -- `collectionAuthority` - the authority of the collection NFT -- `collectionAuthorityRecordPda` - optional collection authority record PDA; - there typically is none, in which case you should put the Bubblegum program - address -- `collectionMint` - the mint account for the collection NFT -- `collectionMetadata` - the metadata account for the collection NFT -- `editionAccount` - the master edition account of the collection NFT -- `compressionProgram` - the compression program to use; this should be the - address of the SPL State Compression program unless you have some other custom - implementation -- `logWrapper` - the program to use to expose the data to indexers through logs; - this should be the address of the SPL Noop program unless you have some other - custom implementation -- `bubblegumSigner` - a PDA used by the Bubblegrum program to handle collection - verification -- `tokenMetadataProgram` - the token metadata program that was used for the - collection NFT; this is usually always the Metaplex Token Metadata program - -Minting without a collection requires fewer accounts, none of which are -exclusive to minting without a collection. You can take a look at the example -below. +2. **mintToCollectionV1** ```typescript -const mintWithoutCollectionIx = createMintV1Instruction( - { - payer: payer.publicKey, - merkleTree: treeAddress, - treeAuthority, - treeDelegate: payer.publicKey, - leafOwner: destination, - leafDelegate: destination, - compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID, - logWrapper: SPL_NOOP_PROGRAM_ID, - }, - { - message: nftMetadata, +await mintToCollectionV1(umi, { + leafOwner, + merkleTree, + collectionMint, + metadata: { + name: "My Compressed NFT", + uri: "https://example.com/my-cnft.json", + sellerFeeBasisPoints: 0, // 0% + collection: { key: collectionMint, verified: false }, + creators: [ + { address: umi.identity.publicKey, verified: false, share: 100 }, + ], }, -); +}).sendAndConfirm(umi); ``` +Both functions will require you to pass in the NFT metadata and a list of +accounts required to mint the cNFT such as the `leafOwner`, `merkleTree` account +etc. + ### Interact with cNFTs -It’s important to note that cNFTs _are not_ SPL tokens. That means your code +It's important to note that cNFTs _are not_ SPL tokens. That means your code needs to follow different conventions to handle cNFT functionality like fetching, querying, transferring, etc. #### Fetch cNFT data The simplest way to fetch data from an existing cNFT is to use the -[Digital Asset Standard Read API](https://solana.com/developers/guides/javascript/compressed-nfts#reading-compressed-nfts-metadata) -(Read API). Note that this is separate from the standard JSON RPC. To use the -Read API, you’ll need to use a supporting RPC Provider. Metaplex maintains a -(likely non-exhaustive) -[list of RPC providers](https://developers.metaplex.com/bubblegum/rpcs) that -support the Read API. In this lesson we’ll be using +[Digital Asset Standard Read API](https://developers.metaplex.com/das-api) (Read +API). Note that this is separate from the standard JSON RPC. To use the Read +API, you’ll need to use a supporting RPC Provider. Metaplex maintains a (likely +non-exhaustive) +[list of RPC providers that support the DAS Read API](https://developers.metaplex.com/rpc-providers#rpcs-with-das-support). + +In this lesson we’ll be using [Helius](https://docs.helius.dev/compression-and-das-api/digital-asset-standard-das-api) as they have free support for Devnet. +You might need to update your RPC connection endpoint in the Umi instantiation + +```typescript +const umi = createUmi( + "https://devnet.helius-rpc.com/?api-key=YOUR-HELIUS-API-KEY", +); +``` + To use the Read API to fetch a specific cNFT, you need to have the cNFT’s asset ID. However, after minting cNFTs, you’ll have at most two pieces of information: 1. The transaction signature 2. The leaf index (possibly) -The only real guarantee is that you’ll have the transaction signature. It is +The only real guarantee is that you'll have the transaction signature. It is **possible** to locate the leaf index from there, but it involves some fairly complex parsing. The short story is you must retrieve the relevant instruction -logs from the Noop program and parse them to find the leaf index. We’ll cover +logs from the `Noop program` and parse them to find the leaf index. We’ll cover this more in depth in a future lesson. For now, we’ll assume you know the leaf index. @@ -571,7 +513,7 @@ controlled by your code and can be set up sequentially so that your code can track which index is going to be used for each mint. I.e. the first mint will use index 0, the second index 1, etc. -Once you have the leaf index, you can derive the cNFT’s corresponding asset ID. +Once you have the leaf index, you can derive the cNFT's corresponding asset ID. When using Bubblegum, the asset ID is a PDA derived using the Bubblegum program ID and the following seeds: @@ -579,36 +521,36 @@ ID and the following seeds: 2. The Merkle tree address 3. The leaf index -The indexer essentially observes transaction logs from the Noop program as they -happen and stores the cNFT metadata that was hashed and stored in the Merkle -tree. This enables them to surface that data when requested. This asset id is -what the indexer uses to identify the particular asset. +The indexer essentially observes transaction logs from the `Noop program` as +they happen and stores the cNFT metadata that was hashed and stored in the +Merkle tree. This enables them to surface that data when requested. This asset +ID is what the indexer uses to identify the particular asset. -For simplicity, you can just use the `getLeafAssetId` helper function from the -Bubblegum SDK. With the asset ID, fetching the cNFT is fairly straightforward. -Simply use the `getAsset` method provided by the supporting RPC provider: +For simplicity, you can just use the `findLeafAssetIdPda` helper function from +the Bubblegum library. ```typescript -const assetId = await getLeafAssetId(treeAddress, new BN(leafIndex)); -const response = await fetch(process.env.RPC_URL, { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - jsonrpc: "2.0", - id: "my-id", - method: "getAsset", - params: { - id: assetId, - }, - }), +const [assetId, bump] = await findLeafAssetIdPda(umi, { + merkleTree, + leafIndex, +}); +``` + +With the asset ID, fetching the cNFT is fairly straightforward. Simply use the +`getAsset` method provided by the supporting RPC provider and the `dasApi` +library: + +```typescript +const [assetId, bump] = await findLeafAssetIdPda(umi, { + merkleTree, + leafIndex, }); -const { result } = await response.json(); -console.log(JSON.stringify(result, null, 2)); +const rpcAsset = await umi.rpc.getAsset(assetId); ``` -This will return a JSON object that is comprehensive of what a traditional NFT’s -on- and off-chain metadata would look like combined. For example, you can find +This will return a JSON object that is comprehensive of what a traditional NFT's +on- and offchain metadata would look like combined. For example, you can find the cNFT attributes at `content.metadata.attributes` or the image at `content.files.uri`. @@ -626,30 +568,30 @@ and more. For example, Helius supports the following methods: - `getAssetsByCreator` - `getAssetsByGroup` -We won’t go over most of these directly, but be sure to look through the +We won't go over most of these directly, but be sure to look through the [Helius docs](https://docs.helius.dev/compression-and-das-api/digital-asset-standard-das-api) to learn how to use them correctly. #### Transfer cNFTs Just as with a standard SPL token transfer, security is paramount. An SPL token -transfer, however, makes verifying transfer authority very easy. It’s built into -the SPL Token program and standard signing. A compressed token’s ownership is +transfer, however, makes verifying transfer authority very easy. It's built into +the SPL Token program and standard signing. A compressed token's ownership is more difficult to verify. The actual verification will happen program-side, but your client-side code needs to provide additional information to make it possible. While there is a Bubblegum `createTransferInstruction` helper function, there is more assembly required than usual. Specifically, the Bubblegum program needs to -verify that the entirety of the cNFT’s data is what the client asserts before a +verify that the entirety of the cNFT's data is what the client asserts before a transfer can occur. The entirety of the cNFT data has been hashed and stored as a single leaf on the Merkle tree, and the Merkle tree is simply a hash of all -the tree’s leafs and branches. Because of this, you can’t simply tell the -program what account to look at and have it compare that account’s `authority` +the tree's leafs and branches. Because of this, you can't simply tell the +program what account to look at and have it compare that account's `authority` or `owner` field to the transaction signer. Instead, you need to provide the entirety of the cNFT data and any of the Merkle -tree’s proof information that isn’t stored in the canopy. That way, the program +tree's proof information that isn't stored in the canopy. That way, the program can independently prove that the provided cNFT data, and therefore the cNFT owner, is accurate. Only then can the program safely determine if the transaction signer should, in fact, be allowed to transfer the cNFT. @@ -662,974 +604,516 @@ In broad terms, this involves a five step process: 4. Prepare the asset proof as a list of `AccountMeta` objects 5. Build and send the Bubblegum transfer instruction -The first two steps are very similar. Using your supporting RPC provider, use -the `getAsset` and `getAssetProof` methods to fetch the asset data and proof, -respectively. +Fortunately, we can make use of the `transfer` method which takes care of all +these steps. ```typescript -const assetDataResponse = await fetch(process.env.RPC_URL, { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - jsonrpc: "2.0", - id: "my-id", - method: "getAsset", - params: { - id: assetId, - }, - }), -}); -const assetData = (await assetDataResponse.json()).result; - -const assetProofResponse = await fetch(process.env.RPC_URL, { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - jsonrpc: "2.0", - id: "my-id", - method: "getAssetProof", - params: { - id: assetId, - }, - }), -}); -const assetProof = (await assetProofResponse.json()).result; +const assetWithProof = await getAssetWithProof(umi, assetId); + +await transfer(umi, { + ...assetWithProof, + leafOwner: currentLeafOwner, + newLeafOwner: newLeafOwner.publicKey, +}).sendAndConfirm(umi); ``` -The third step is to fetch the Merkle tree account. The simplest way to do this -is using the `ConcurrentMerkleTreeAccount` type from -`@solana/spl-account-compression`: +### Conclusion -```typescript -const treePublicKey = new PublicKey(assetData.compression.tree); +We've covered the primary skills needed to interact with cNFTs, but haven't been +fully comprehensive. You can also use Bubblegum to do things like burn, verify, +delegate, and more. We won't go through these, but these instructions are +similar to the mint and transfer process. If you need this additional +functionality, take a look at the +[Bubblegum docs](https://developers.metaplex.com/bubblegum) on how to leverage +the helper functions it provides. -const treeAccount = await ConcurrentMerkleTreeAccount.fromAccountAddress( - connection, - treePublicKey, -); +## Lab + +Let's jump in and practice creating and working with cNFTs. Together, we'll +build as simple a script as possible that will let us mint a cNFT collection +from a Merkle tree. + +#### 1. Create a new project + +To begin create and initialize an empty NPM project and change directory into +it. + +```bash +mkdir cnft-demo +npm init -y +cd cnft-demo ``` -Step four is the most conceptually challenging step. Using the three pieces of -information gathered, you’ll need to assemble the proof path for the cNFT’s -corresponding leaf. The proof path is represented as accounts passed to the -program instruction. The program uses each of the account addresses as proof -nodes to prove the leaf data is what you say it is. +Install all the required dependencies -The full proof is provided by the indexer as shown above in `assetProof`. -However, you can exclude the same number of tail-end accounts from the proof as -the depth of the canopy. +```bash +npm i @solana/web3.js@1 @solana-developers/helpers@2.5.2 @metaplex-foundation/mpl-token-metadata @metaplex-foundation/mpl-bubblegum @metaplex-foundation/digital-asset-standard-api @metaplex-foundation/umi-bundle-defaults -```typescript -const canopyDepth = treeAccount.getCanopyDepth() || 0; - -const proofPath: AccountMeta[] = assetProof.proof - .map((node: string) => ({ - pubkey: new PublicKey(node), - isSigner: false, - isWritable: false, - })) - .slice(0, assetProof.proof.length - canopyDepth); +npm i --save-dev esrun ``` -Finally, you can assemble the transfer instruction. The instruction helper -function, `createTransferInstruction`, requires the following arguments: - -- `accounts` - a list of instruction accounts, as expected; they are as follows: - - `merkleTree` - the Merkle tree account - - `treeAuthority` - the Merkle tree authority - - `leafOwner` - the owner of the leaf (cNFT) in question - - `leafDelegate` - the delegate of the leaf (cNFT) in question; if no delegate - has been added then this should be the same as `leafOwner` - - `newLeafOwner` - the address of the new owner post-transfer - - `logWrapper` - the program to use to expose the data to indexers through - logs; this should be the address of the SPL Noop program unless you have - some other custom implementation - - `compressionProgram` - the compression program to use; this should be the - address of the SPL State Compression program unless you have some other - custom implementation - - `anchorRemainingAccounts` - this is where you add the proof path -- `args` - additional arguments required by the instruction; they are: - - `root` - the root Merkle tree node from the asset proof; this is provided by - the indexer as a string and must be converted to bytes first - - `dataHash` - the hash of the asset data retrieved from the indexer; this is - provided by the indexer as a string and must be converted to bytes first - - `creatorHash` - the hash of the cNFT creator as retrieved from the indexer; - this is provided by the indexer as a string and must be converted to bytes - first - - `nonce` - used to ensure that no two leafs have the same hash; this value - should be the same as `index` - - `index` - the index where the cNFT’s leaf is located on the Merkle tree - -An example of this is shown below. Note that the first 3 lines of code grab -additional information nested in the objects shown previously so they are ready -to go when assembling the instruction itself. +In this first script, we will learn about creating a tree, hence let's create +the file `create-tree.ts` -```typescript -const treeAuthority = treeAccount.getAuthority(); -const leafOwner = new PublicKey(assetData.ownership.owner); -const leafDelegate = assetData.ownership.delegate - ? new PublicKey(assetData.ownership.delegate) - : leafOwner; - -const transferIx = createTransferInstruction( - { - merkleTree: treePublicKey, - treeAuthority, - leafOwner, - leafDelegate, - newLeafOwner: receiver, - logWrapper: SPL_NOOP_PROGRAM_ID, - compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID, - anchorRemainingAccounts: proofPath, - }, - { - root: [...new PublicKey(assetProof.root.trim()).toBytes()], - dataHash: [ - ...new PublicKey(assetData.compression.data_hash.trim()).toBytes(), - ], - creatorHash: [ - ...new PublicKey(assetData.compression.creator_hash.trim()).toBytes(), - ], - nonce: assetData.compression.leaf_id, - index: assetData.compression.leaf_id, - }, -); +```bash +mkdir src && touch src/create-tree.ts ``` -### Conclusion +This Umi instantiation code will be repeated in a lot of files, so feel free to +create a wrapper file to instantiate it: -We’ve covered the primary skills needed to interact with cNFTs, but haven’t been -fully comprehensive. You can also use Bubblegum to do things like burn, verify, -delegate, and more. We won’t go through these, but these instructions are -similar to the mint and transfer process. If you need this additional -functionality, take a look at the -[Bubblegum client source code](https://github.com/metaplex-foundation/mpl-bubblegum/tree/main/clients/js-solita) -and leverage the helper functions it provides. +```typescript filename="create-tree.ts" +import { dasApi } from "@metaplex-foundation/digital-asset-standard-api"; +import { createTree, mplBubblegum } from "@metaplex-foundation/mpl-bubblegum"; +import { generateSigner, keypairIdentity } from "@metaplex-foundation/umi"; +import { createUmi } from "@metaplex-foundation/umi-bundle-defaults"; +import { + getExplorerLink, + getKeypairFromFile, +} from "@solana-developers/helpers"; +import { clusterApiUrl } from "@solana/web3.js"; -Keep in mind that compression is fairly new. Available tooling will evolve -rapidly but the principles you’ve learned in this lesson will likely remain the -same. These principles can also be broadened to arbitrary state compression, so -be sure to master them here so you’re ready for more fun stuff in future -lessons! +const umi = createUmi(clusterApiUrl("devnet")); -## Lab +// load keypair from local file system +// See https://github.com/solana-developers/helpers?tab=readme-ov-file#get-a-keypair-from-a-keypair-file +const localKeypair = await getKeypairFromFile(); -Let’s jump in and practice creating and working with cNFTs. Together, we’ll -build as simple a script as possible that will let us mint a cNFT collection -from a Merkle tree. +// convert to Umi compatible keypair +const umiKeypair = umi.eddsa.createKeypairFromSecretKey(localKeypair.secretKey); + +// load the MPL Bubblegum program, dasApi plugin and assign a signer to our umi instance +umi.use(keypairIdentity(umiKeypair)).use(mplBubblegum()).use(dasApi()); +``` -#### 1. Get the starter code +In the code above, we load the user's keypair wallet from the system wallet +located at `.config/solana/id.json`, instantiate a new Umi instance and assign +the keypair to it. We also assign the Bubblegum and dasApi plugins to it as +well. -First things first, clone the starter code from the `starter` branch of our -[cNFT lab repository](https://github.com/Unboxed-Software/solana-cnft-demo). +#### 2. Create the Merkle tree account -`git clone https://github.com/Unboxed-Software/solana-cnft-demo.git` +We’ll start by creating the Merkle tree account. To do this we will use the +`createTree` method from Metaplex Bubblegum program. -`cd solana-cnft-demo` +This function takes in three default values -`npm install` +- `merkleTree` - The Merkle tree account address +- `maxDepth` - Determines the max number of leaves the tree will hold and + therefore the max number of cNFTs that the tree can contain. +- `maxBufferSize` - Determines how many concurrent changes can occur in the tree + in parallel. -Take some time to familiarize yourself with the starter code provided. Most -important are the helper functions provided in `utils.ts` and the URIs provided -in `uri.ts`. +You can also supply in optional fields such as -The `uri.ts` file provides 10k URIs that you can use for the off-chain portion -of your NFT metadata. You can, of course, create your own metadata. But this -lesson isn’t explicitly about preparing metadata so we’ve provided some for you. +- `treeCreator` - The address of the tree authority, defaults to current + `umi.identity` instance. +- `public` - Determines whether anyone else apart from the tree creator will be + able to mint cNFTs from the tree. -The `utils.ts` file has a few helper functions to keep you from writing more -unnecessary boilerplate than you need to. They are as follows: +```typescript filename="create-tree.ts" +const merkleTree = generateSigner(umi); +const builder = await createTree(umi, { + merkleTree, + maxDepth: 14, + maxBufferSize: 64, +}); +await builder.sendAndConfirm(umi); -- `getOrCreateKeypair` will create a new keypair for you and save it to a `.env` - file, or if there’s already a private key in the `.env` file it will - initialize a keypair from that. -- `airdropSolIfNeeded` will airdrop some Devnet SOL to a specified address if - that address’s balance is below 1 SOL. -- `createNftMetadata` will create the NFT metadata for a given creator public - key and index. The metadata it’s getting is just dummy metadata using the URI - corresponding to the provided index from the `uri.ts` list of URIs. -- `getOrCreateCollectionNFT` will fetch the collection NFT from the address - specified in `.env` or if there is none it will create a new one and add the - address to `.env`. +let explorerLink = getExplorerLink("address", merkleTree.publicKey, "devnet"); +console.log(`Explorer link: ${explorerLink}`); +console.log("Merkle tree address is :", merkleTree.publicKey); +console.log("✅ Finished successfully!"); +``` -Finally, there’s some boilerplate in `index.ts` that calls creates a new Devnet -connection, calls `getOrCreateKeypair` to initialize a “wallet,” and calls -`airdropSolIfNeeded` to fund the wallet if its balance is low. +Run the `create-tree.ts` script using esrun -We will be writing all of our code in the `index.ts`. +```bash +npx esrun create-tree.ts +``` -#### 2. Create the Merkle tree account +Make sure to remember the Merkle tree address as we will be using it in the next +step when minting compressed NFTs. -We’ll start by creating the Merkle tree account. Let’s encapsulate this in a -function that will eventually create _and_ initialize the account. We’ll put it -below our `main` function in `index.ts`. Let’s call it -`createAndInitializeTree`. For this function to work, it will need the following -parameters: - -- `connection` - a `Connection` to use for interacting with the network. -- `payer` - a `Keypair` that will pay for transactions. -- `maxDepthSizePair` - a `ValidDepthSizePair`. This type comes from - `@solana/spl-account-compression`. It’s a simple object with properties - `maxDepth` and `maxBufferSize` that enforces a valid combination of the two - values. -- `canopyDepth` - a number for the canopy depth In the body of the function, - we’ll generate a new address for the tree, then create the instruction for - allocating a new Merkle tree account by calling `createAllocTreeIx` from - `@solana/spl-account-compression`. +Your output will be similar to this -```typescript -async function createAndInitializeTree( - connection: Connection, - payer: Keypair, - maxDepthSizePair: ValidDepthSizePair, - canopyDepth: number, -) { - const treeKeypair = Keypair.generate(); - - const allocTreeIx = await createAllocTreeIx( - connection, - treeKeypair.publicKey, - payer.publicKey, - maxDepthSizePair, - canopyDepth, - ); -} +```bash +Explorer link: https://explorer.solana.com/address/ZwzNxXw83PUmWSypXmqRH669gD3hF9rEjHWPpVghr5h?cluster=devnet +Merkle tree address is : ZwzNxXw83PUmWSypXmqRH669gD3hF9rEjHWPpVghr5h +✅ Finished successfully! ``` -#### 3. Use Bubblegum to initialize the Merkle tree and create the tree config account - -With the instruction for creating the tree ready to go, we can create an -instruction for invoking `create_tree` on the Bubblegum program. This will -initialize the Merkle tree account _and_ create a new tree config account on the -Bubblegum program. - -This instruction needs us to provide the following: - -- `accounts` - an object of required accounts; this includes: - - `treeAuthority` - this should be a PDA derived with the Merkle tree address - and the Bubblegum program - - `merkleTree` - the address of the Merkle tree - - `payer` - the transaction fee payer - - `treeCreator` - the address of the tree creator; we’ll make this the same as - `payer` - - `logWrapper` - make this the `SPL_NOOP_PROGRAM_ID` - - `compressionProgram` - make this the `SPL_ACCOUNT_COMPRESSION_PROGRAM_ID` -- `args` - a list of instruction arguments; this includes: - - `maxBufferSize` - the buffer size from our function’s `maxDepthSizePair` - parameter - - `maxDepth` - the max depth from our function’s `maxDepthSizePair` parameter - - `public` - whether or no the tree should be public; we’ll set this to - `false` - -Finally, we can add both instructions to a transaction and submit the -transaction. Keep in mind that the transaction needs to be signed by both the -`payer` and the `treeKeypair`. +Congratulations! You've created a Bubblegum tree. Follow the Explorer link to +make sure that the process finished successfully, -```typescript -async function createAndInitializeTree( - connection: Connection, - payer: Keypair, - maxDepthSizePair: ValidDepthSizePair, - canopyDepth: number, -) { - const treeKeypair = Keypair.generate(); - - const allocTreeIx = await createAllocTreeIx( - connection, - treeKeypair.publicKey, - payer.publicKey, - maxDepthSizePair, - canopyDepth, - ); - - const [treeAuthority, _bump] = PublicKey.findProgramAddressSync( - [treeKeypair.publicKey.toBuffer()], - BUBBLEGUM_PROGRAM_ID, - ); - - const createTreeIx = createCreateTreeInstruction( +![Solana Explorer with details about created Merkle tree](/public/assets/courses/unboxed/solana-explorer-create-tree.png) + +#### 3. Mint cNFTs to your tree + +Believe it or not, that's all you needed to do to set up your tree to compressed +NFTs! Now let's turn our attention to minting. + +First, let's create a new file called `mint-compressed-nft-to-collection.ts`, +add our imports and instantiate Umi + +```typescript filename="mint-compressed-nft-to-collection.ts" +import { dasApi } from "@metaplex-foundation/digital-asset-standard-api"; +import { + findLeafAssetIdPda, + LeafSchema, + mintToCollectionV1, + mplBubblegum, + parseLeafFromMintToCollectionV1Transaction, +} from "@metaplex-foundation/mpl-bubblegum"; +import { + keypairIdentity, + publicKey as UMIPublicKey, +} from "@metaplex-foundation/umi"; +import { createUmi } from "@metaplex-foundation/umi-bundle-defaults"; +import { getKeypairFromFile } from "@solana-developers/helpers"; +import { clusterApiUrl } from "@solana/web3.js"; + +const umi = createUmi(clusterApiUrl("devnet")); + +// load keypair from local file system +// See https://github.com/solana-developers/helpers?tab=readme-ov-file#get-a-keypair-from-a-keypair-file +const localKeypair = await getKeypairFromFile(); + +// convert to Umi compatible keypair +const umiKeypair = umi.eddsa.createKeypairFromSecretKey(localKeypair.secretKey); + +// load the MPL Bubblegum program, dasApi plugin and assign a signer to our umi instance +umi.use(keypairIdentity(umiKeypair)).use(mplBubblegum()).use(dasApi()); +``` + +I am going to be +[recycling a Collection NFT](https://explorer.solana.com/address/D2zi1QQmtZR5fk7wpA1Fmf6hTY2xy8xVMyNgfq6LsKy1?cluster=devnet) +I already created in the NFTs with Metaplex lesson, but if you'd like to create +a new collection for this lesson, check out the code +[on this repo](https://github.com/solana-developers/professional-education/blob/main/labs/metaplex-umi/create-collection.ts) + + +Find the code to create a Metaplex Collection NFT in our [NFTs with Metaplex lesson](https://solana.com/developers/courses/tokens-and-nfts/nfts-with-metaplex#add-the-nft-to-a-collection). + + +To mint a compressed NFT to a collection we will need + +- `leafOwner` - The recipient of the compressed NFT + +- `merkleTree` - The Merkle tree address we created in the previous step + +- `collection` - The collection our cNFT will belong to. This is not required, + and you can leave it out if your cNFT doesn't belong to a collection. + +- `metadata` - Your offchain metadata. This lesson won't focus onto how to + prepare your metadata, but you can check out the + [recommended structure from Metaplex](https://developers.metaplex.com/token-metadata/token-standard#the-non-fungible-standard). + +Our cNFT will use this structure we already prepared earlier. + +```json filename="nft.json" +{ + "name": "My NFT", + "symbol": "MN", + "description": "My NFT Description", + "image": "https://lycozm33rkk5ozjqldiuzc6drazmdp5d5g3g7foh3gz6rz5zp7va.arweave.net/XgTss3uKlddlMFjRTIvDiDLBv6Pptm-Vx9mz6Oe5f-o", + "attributes": [ { - treeAuthority, - merkleTree: treeKeypair.publicKey, - payer: payer.publicKey, - treeCreator: payer.publicKey, - logWrapper: SPL_NOOP_PROGRAM_ID, - compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID, + "trait_type": "Background", + "value": "transparent" }, { - maxBufferSize: maxDepthSizePair.maxBufferSize, - maxDepth: maxDepthSizePair.maxDepth, - public: false, - }, - ); - - const tx = new Transaction().add(allocTreeIx, createTreeIx); - tx.feePayer = payer.publicKey; - - try { - const txSignature = await sendAndConfirmTransaction( - connection, - tx, - [treeKeypair, payer], - { - commitment: "confirmed", - skipPreflight: true, - }, - ); - - console.log(`https://explorer.solana.com/tx/${txSignature}?cluster=devnet`); - - console.log("Tree Address:", treeKeypair.publicKey.toBase58()); - - return treeKeypair.publicKey; - } catch (err: any) { - console.error("\nFailed to create Merkle tree:", err); - throw err; - } + "trait_type": "Shape", + "value": "sphere" + } + ] } ``` -If you want to test what you have so far, feel free to call -`createAndInitializeTree` from `main` and provide small values for the max depth -and max buffer size. +Putting it all into code, we will have -```typescript -async function main() { - const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); - const wallet = await getOrCreateKeypair("Wallet_1"); - await airdropSolIfNeeded(wallet.publicKey); - - const maxDepthSizePair: ValidDepthSizePair = { - maxDepth: 3, - maxBufferSize: 8, - }; - - const canopyDepth = 0; - - const treeAddress = await createAndInitializeTree( - connection, - wallet, - maxDepthSizePair, - canopyDepth, - ); -} +```typescript filename="mint-compressed-nft-to-collection.ts" +const merkleTree = UMIPublicKey("ZwzNxXw83PUmWSypXmqRH669gD3hF9rEjHWPpVghr5h"); + +const collectionMint = UMIPublicKey( + "D2zi1QQmtZR5fk7wpA1Fmf6hTY2xy8xVMyNgfq6LsKy1", +); + +const uintSig = await( + await mintToCollectionV1(umi, { + leafOwner: umi.identity.publicKey, + merkleTree, + collectionMint, + metadata: { + name: "My NFT", + uri: "https://chocolate-wet-narwhal-846.mypinata.cloud/ipfs/QmeBRVEmASS3pyK9YZDkRUtAham74JBUZQE3WD4u4Hibv9", + sellerFeeBasisPoints: 0, // 0% + collection: { key: collectionMint, verified: false }, + creators: [ + { + address: umi.identity.publicKey, + verified: false, + share: 100, + }, + ], + }, + }).sendAndConfirm(umi), +).signature; + +const b64Sig = base58.deserialize(uintSig); +console.log(b64Sig); ``` -Keep in mind that Devnet SOL is throttled so if you test too many times you -might run out of Devnet SOL before we get to minting. To test, in your terminal -run the following: - -`npm run start` - -#### 4. Mint cNFTs to your tree - -Believe it or not, that’s all you needed to do to set up your tree to compressed -NFTs! Now let’s turn our attention to minting. - -First, let’s declare a function called `mintCompressedNftToCollection`. It will -need the following parameters: - -- `connection` - a `Connection` to use for interacting with the network. -- `payer` - a `Keypair` that will pay for transactions. -- `treeAddress` - the Merkle tree’s address -- `collectionDetails` - the details of the collection as type - `CollectionDetails` from `utils.ts` -- `amount` - the number of cNFTs to mint - -The body of this function will do the following: - -1. Derive the tree authority just like before. Again, this is a PDA derived from - the Merkle tree address and the Bubblegum program. -2. Derive the `bubblegumSigner`. This is a PDA derived from the string - `"collection_cpi"` and the Bubblegum program and is essential for minting to - a collection. -3. Create the cNFT metadata by calling `createNftMetadata` from our `utils.ts` - file. -4. Create the mint instruction by calling `createMintToCollectionV1Instruction` - from the Bubblegum SDK. -5. Build and send a transaction with the mint instruction -6. Repeat steps 3-6 `amount` number of times - -The `createMintToCollectionV1Instruction` takes two arguments: `accounts` and -`args`. The latter is simply the NFT metadata. As with all complex instructions, -the primary hurdle is knowing which accounts to provide. So let’s go through -them real quick: - -- `payer` - the account that will pay for the transaction fees, rent, etc. -- `merkleTree` - the Merkle tree account -- `treeAuthority` - the tree authority; should be the same PDA you derived - previously -- `treeDelegate` - the tree delegate; this is usually the same as the tree - creator -- `leafOwner` - the desired owner of the compressed NFT being minted -- `leafDelegate` - the desired delegate of the compressed NFT being minted; this - is usually the same as the leaf owner -- `collectionAuthority` - the authority of the collection NFT -- `collectionAuthorityRecordPda` - optional collection authority record PDA; - there typically is none, in which case you should put the Bubblegum program - address -- `collectionMint` - the mint account for the collection NFT -- `collectionMetadata` - the metadata account for the collection NFT -- `editionAccount` - the master edition account of the collection NFT -- `compressionProgram` - the compression program to use; this should be the - address of the SPL State Compression program unless you have some other custom - implementation -- `logWrapper` - the program to use to expose the data to indexers through logs; - this should be the address of the SPL Noop program unless you have some other - custom implementation -- `bubblegumSigner` - a PDA used by the Bubblegrum program to handle collection - verification -- `tokenMetadataProgram` - the token metadata program that was used for the - collection NFT; this is usually always the Metaplex Token Metadata program - -When you put it all together, this is what it’ll look like: +The difference between the first statement is that we are returning the byte +array representing the transaction signature. -```typescript -async function mintCompressedNftToCollection( - connection: Connection, - payer: Keypair, - treeAddress: PublicKey, - collectionDetails: CollectionDetails, - amount: number, -) { - // Derive the tree authority PDA ('TreeConfig' account for the tree account) - const [treeAuthority] = PublicKey.findProgramAddressSync( - [treeAddress.toBuffer()], - BUBBLEGUM_PROGRAM_ID, - ); - - // Derive the bubblegum signer, used by the Bubblegum program to handle "collection verification" - // Only used for `createMintToCollectionV1` instruction - const [bubblegumSigner] = PublicKey.findProgramAddressSync( - [Buffer.from("collection_cpi", "utf8")], - BUBBLEGUM_PROGRAM_ID, - ); - - for (let i = 0; i < amount; i++) { - // Compressed NFT Metadata - const compressedNFTMetadata = createNftMetadata(payer.publicKey, i); - - // Create the instruction to "mint" the compressed NFT to the tree - const mintIx = createMintToCollectionV1Instruction( - { - payer: payer.publicKey, // The account that will pay for the transaction - merkleTree: treeAddress, // The address of the tree account - treeAuthority, // The authority of the tree account, should be a PDA derived from the tree account address - treeDelegate: payer.publicKey, // The delegate of the tree account, should be the same as the tree creator by default - leafOwner: payer.publicKey, // The owner of the compressed NFT being minted to the tree - leafDelegate: payer.publicKey, // The delegate of the compressed NFT being minted to the tree - collectionAuthority: payer.publicKey, // The authority of the "collection" NFT - collectionAuthorityRecordPda: BUBBLEGUM_PROGRAM_ID, // Must be the Bubblegum program id - collectionMint: collectionDetails.mint, // The mint of the "collection" NFT - collectionMetadata: collectionDetails.metadata, // The metadata of the "collection" NFT - editionAccount: collectionDetails.masterEditionAccount, // The master edition of the "collection" NFT - compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID, - logWrapper: SPL_NOOP_PROGRAM_ID, - bubblegumSigner, - tokenMetadataProgram: TOKEN_METADATA_PROGRAM_ID, - }, - { - metadataArgs: Object.assign(compressedNFTMetadata, { - collection: { key: collectionDetails.mint, verified: false }, - }), - }, - ); - - try { - // Create new transaction and add the instruction - const tx = new Transaction().add(mintIx); - - // Set the fee payer for the transaction - tx.feePayer = payer.publicKey; - - // Send the transaction - const txSignature = await sendAndConfirmTransaction( - connection, - tx, - [payer], - { commitment: "confirmed", skipPreflight: true }, - ); - - console.log( - `https://explorer.solana.com/tx/${txSignature}?cluster=devnet`, - ); - } catch (err) { - console.error("\nFailed to mint compressed NFT:", err); - throw err; - } - } -} +We need this has in order to be able to get the leaf schema and with this schema +derive the asset ID. + +```typescript filename="mint-compressed-nft-to-collection.ts" +const leaf: LeafSchema = await parseLeafFromMintToCollectionV1Transaction( + umi, + uintSig, +); +const assetId = findLeafAssetIdPda(umi, { + merkleTree, + leafIndex: leaf.nonce, +})[0]; ``` -This is a great point to test with a small tree. Simply update `main` to call -`getOrCreateCollectionNFT` then `mintCompressedNftToCollection`: +With everything in place, we can now run our script +`mint-compressed-nft-to-collection.ts` -```typescript -async function main() { - const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); - const wallet = await getOrCreateKeypair("Wallet_1"); - await airdropSolIfNeeded(wallet.publicKey); - - const maxDepthSizePair: ValidDepthSizePair = { - maxDepth: 3, - maxBufferSize: 8, - }; - - const canopyDepth = 0; - - const treeAddress = await createAndInitializeTree( - connection, - wallet, - maxDepthSizePair, - canopyDepth, - ); - - const collectionNft = await getOrCreateCollectionNFT(connection, wallet); - - await mintCompressedNftToCollection( - connection, - wallet, - treeAddress, - collectionNft, - 2 ** maxDepthSizePair.maxDepth, - ); -} +```bash +npx esrun mint-compressed-nft-to-collection.ts ``` -Again, to run, in your terminal type: `npm run start` +Your output should resemble -#### 5. Read existing cNFT data +```bash +asset id: D4A8TYkKE5NzkqBQ4mPybgFbAUDN53fwJ64b8HwEEuUS +✅ Finished successfully! +``` -Now that we’ve written code to mint cNFTs, let’s see if we can actually fetch -their data. This is tricky because the onchain data is just the Merkle tree -account, the data from which can be used to verify existing information as -accurate but is useless in conveying what the information is. +We aren't returning the Explorer link because this address won't exists on the +Solana state but is indexed by RPCs that support the DAS API. -Let’s start by declaring a function `logNftDetails` that takes as parameters -`treeAddress` and `nftsMinted`. +In the next step we will query this address to fetch out cNFT details. -At this point we don’t actually have a direct identifier of any kind that points -to our cNFT. To get that, we’ll need to know the leaf index that was used when -we minted our cNFT. We can then use that to derive the asset ID used by the Read -API and subsequently use the Read API to fetch our cNFT data. +#### 4. Read existing cNFT data -In our case, we created a non-public tree and minted 8 cNFTs, so we know that -the leaf indexes used were 0-7. With this, we can use the `getLeafAssetId` -function from `@metaplex-foundation/mpl-bubblegum` to get the asset ID. +Now that we’ve written code to mint cNFTs, let’s see if we can actually fetch +their data. -Finally, we can use an RPC that supports the -[Read API](https://solana.com/developers/guides/javascript/compressed-nfts) to -fetch the asset. We’ll be using -[Helius](https://docs.helius.dev/compression-and-das-api/digital-asset-standard-das-api), -but feel free to choose your own RPC provider. To use Helius, you’ll need to get -a free API Key from [the Helius website](https://dev.helius.xyz/). Then add your -`RPC_URL` to your `.env` file. For example: +Create a new file `fetch-cnft-details.ts` ```bash -## Add this -RPC_URL=https://devnet.helius-rpc.com/?api-key=YOUR_API_KEY +fetch-cnft-details.ts ``` -Then simply issue a POST request to your provided RPC URL and put the `getAsset` -information in the body: +Import our packages and instantiate Umi. Here we will finally make use of the +`umi.use(dasApi())` we've been importing. -```typescript -async function logNftDetails(treeAddress: PublicKey, nftsMinted: number) { - for (let i = 0; i < nftsMinted; i++) { - const assetId = await getLeafAssetId(treeAddress, new BN(i)); - console.log("Asset ID:", assetId.toBase58()); - const response = await fetch(process.env.RPC_URL, { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - jsonrpc: "2.0", - id: "my-id", - method: "getAsset", - params: { - id: assetId, - }, - }), - }); - const { result } = await response.json(); - console.log(JSON.stringify(result, null, 2)); - } -} +In the instantiation of Umi, we are going to make a change to our connection +endpoint and use an RPC that supports the DAS API. + +Be sure to update this with your Helius API keys which you can get from the +[developer dashboard page](https://dashboard.helius.dev/signup?redirectTo=onboarding) + +```typescript filename="fetch-cnft-details.ts" +import { dasApi } from "@metaplex-foundation/digital-asset-standard-api"; +import { mplBubblegum } from "@metaplex-foundation/mpl-bubblegum"; +import { + keypairIdentity, + publicKey as UMIPublicKey, +} from "@metaplex-foundation/umi"; +import { createUmi } from "@metaplex-foundation/umi-bundle-defaults"; +import { getKeypairFromFile } from "@solana-developers/helpers"; + +const umi = createUmi( + "https://devnet.helius-rpc.com/?api-key=YOUR-HELIUS-API-KEY", +); + +// load keypair from local file system +// See https://github.com/solana-developers/helpers?tab=readme-ov-file#get-a-keypair-from-a-keypair-file +const localKeypair = await getKeypairFromFile(); + +// convert to Umi compatible keypair +const umiKeypair = umi.eddsa.createKeypairFromSecretKey(localKeypair.secretKey); + +// load the MPL Bubblegum program, dasApi plugin and assign a signer to our umi instance +umi.use(keypairIdentity(umiKeypair)).use(mplBubblegum()).use(dasApi()); ``` -Helius essentially observes transaction logs as they happen and stores the NFT -metadata that was hashed and stored in the Merkle tree. This enables them to -surface that data when requested. +Fetching a compressed NFT details is as simple as calling the `getAsset` method +with the `assetId` from the previous step. + +```typescript filename="fetch-cnft-details.ts" +const assetId = UMIPublicKey("D4A8TYkKE5NzkqBQ4mPybgFbAUDN53fwJ64b8HwEEuUS"); -If we add a call to this function at the end of `main` and re-run your script, -the data we get back in the console is very comprehensive. It includes all of -the data you’d expect in both the onchain and off-chain portion of a traditional -NFT. You can find the cNFT’s attributes, files, ownership and creator -information, and more. +// @ts-ignore +const rpcAsset = await umi.rpc.getAsset(assetId); +console.log(rpcAsset); +``` + +Let’s start by declaring a function `logNftDetails` that takes as parameters +`treeAddress` and `nftsMinted`. + +The output of our console.log would output ```json { - "interface": "V1_NFT", - "id": "48Bw561h1fGFK4JGPXnmksHp2fpniEL7hefEc6uLZPWN", - "content": { - "$schema": "https://schema.metaplex.com/nft1.0.json", - "json_uri": "https://raw.githubusercontent.com/Unboxed-Software/rgb-png-generator/master/assets/183_89_78/183_89_78.json", - "files": [ - { - "uri": "https://raw.githubusercontent.com/Unboxed-Software/rgb-png-generator/master/assets/183_89_78/183_89_78.png", - "cdn_uri": "https://cdn.helius-rpc.com/cdn-cgi/image//https://raw.githubusercontent.com/Unboxed-Software/rgb-png-generator/master/assets/183_89_78/183_89_78.png", - "mime": "image/png" - } - ], - "metadata": { - "attributes": [ - { - "value": "183", - "trait_type": "R" - }, - { - "value": "89", - "trait_type": "G" - }, - { - "value": "78", - "trait_type": "B" - } - ], - "description": "Random RGB Color", - "name": "CNFT", - "symbol": "CNFT" + interface: 'V1_NFT', + id: 'D4A8TYkKE5NzkqBQ4mPybgFbAUDN53fwJ64b8HwEEuUS', + content: { + '$schema': 'https://schema.metaplex.com/nft1.0.json', + json_uri: 'https://chocolate-wet-narwhal-846.mypinata.cloud/ipfs/QmeBRVEmASS3pyK9YZDkRUtAham74JBUZQE3WD4u4Hibv9', + files: [ [Object] ], + metadata: { + attributes: [Array], + description: 'My NFT Description', + name: 'My NFT', + symbol: '', + token_standard: 'NonFungible' }, - "links": { - "image": "https://raw.githubusercontent.com/Unboxed-Software/rgb-png-generator/master/assets/183_89_78/183_89_78.png" + links: { + image: 'https://lycozm33rkk5ozjqldiuzc6drazmdp5d5g3g7foh3gz6rz5zp7va.arweave.net/XgTss3uKlddlMFjRTIvDiDLBv6Pptm-Vx9mz6Oe5f-o' } }, - "authorities": [ + authorities: [ { - "address": "DeogHav5T2UV1zf5XuH4DTwwE5fZZt7Z4evytUUtDtHd", - "scopes": ["full"] + address: '4sk8Ds1T4bYnN4j23sMbVyHYABBXQ53NoyzVrXGd3ja4', + scopes: [Array] } ], - "compression": { - "eligible": false, - "compressed": true, - "data_hash": "3RsXHMBDpUPojPLZuMyKgZ1kbhW81YSY3PYmPZhbAx8K", - "creator_hash": "Di6ufEixhht76sxutC9528H7PaWuPz9hqTaCiQxoFdr", - "asset_hash": "2TwWjQPdGc5oVripPRCazGBpAyC5Ar1cia8YKUERDepE", - "tree": "7Ge8nhDv2FcmnpyfvuWPnawxquS6gSidum38oq91Q7vE", - "seq": 8, - "leaf_id": 7 + compression: { + eligible: false, + compressed: true, + data_hash: '2UgKwnTkguefRg3P5J33UPkNebunNMFLZTuqvnBErqhr', + creator_hash: '4zKvSQgcRhJFqjQTeCjxuGjWydmWTBVfCB5eK4YkRTfm', + asset_hash: '2DwKkMFYJHDSgTECiycuBApMt65f3N1ZwEbRugRZymwJ', + tree: 'ZwzNxXw83PUmWSypXmqRH669gD3hF9rEjHWPpVghr5h', + seq: 4, + leaf_id: 3 }, - "grouping": [ + grouping: [ { - "group_key": "collection", - "group_value": "9p2RqBUAadMznAFiBEawMJnKR9EkFV98wKgwAz8nxLmj" + group_key: 'collection', + group_value: 'D2zi1QQmtZR5fk7wpA1Fmf6hTY2xy8xVMyNgfq6LsKy1' } ], - "royalty": { - "royalty_model": "creators", - "target": null, - "percent": 0, - "basis_points": 0, - "primary_sale_happened": false, - "locked": false + royalty: { + royalty_model: 'creators', + target: null, + percent: 0, + basis_points: 0, + primary_sale_happened: false, + locked: false }, - "creators": [ + creators: [ { - "address": "HASk3AoTPAvC1KnXSo6Qm73zpkEtEhbmjLpXLgvyKBkR", - "share": 100, - "verified": false + address: '4kg8oh3jdNtn7j2wcS7TrUua31AgbLzDVkBZgTAe44aF', + share: 100, + verified: false } ], - "ownership": { - "frozen": false, - "delegated": false, - "delegate": null, - "ownership_model": "single", - "owner": "HASk3AoTPAvC1KnXSo6Qm73zpkEtEhbmjLpXLgvyKBkR" + ownership: { + frozen: false, + delegated: false, + delegate: null, + ownership_model: 'single', + owner: '4kg8oh3jdNtn7j2wcS7TrUua31AgbLzDVkBZgTAe44aF' }, - "supply": { - "print_max_supply": 0, - "print_current_supply": 0, - "edition_nonce": 0 - }, - "mutable": false, - "burnt": false + supply: { print_max_supply: 0, print_current_supply: 0, edition_nonce: null }, + mutable: true, + burnt: false } ``` Remember, the Read API also includes ways to get multiple assets, query by owner, creator, etc., and more. Be sure to look through the [Helius docs](https://docs.helius.dev/compression-and-das-api/digital-asset-standard-das-api) -to see what’s available. +to see what's available. -#### 6. Transfer a cNFT +#### 5. Transfer a cNFT -The last thing we’re going to add to our script is a cNFT transfer. Just as with +The last thing we're going to add to our script is a cNFT transfer. Just as with a standard SPL token transfer, security is paramount. Unlike with a standard SPL token transfer, however, to build a secure transfer with state compression of any kind, the program performing the transfer needs the entire asset data. -The program, Bubblegum in this case, needs to be provided with the entire data -that was hashed and stored on the corresponding leaf _and_ needs to be given the -“proof path” for the leaf in question. That makes cNFT transfers a bit trickier -than SPL token transfers. +Fortunately for us can get the asset data with the `getAssetWithProof` method. -Remember, the general steps are: +Le't first create a new file `transfer-asset.ts`, and populate it with the code +for instantiating a new Umi client. -1. Fetch the cNFT's asset data from the indexer -2. Fetch the cNFT's proof from the indexer -3. Fetch the Merkle tree account from the Solana blockchain -4. Prepare the asset proof as a list of `AccountMeta` objects -5. Build and send the Bubblegum transfer instruction +```typescript filename="transfer-asset.ts" +import { dasApi } from "@metaplex-foundation/digital-asset-standard-api"; +import { + getAssetWithProof, + mplBubblegum, + transfer, +} from "@metaplex-foundation/mpl-bubblegum"; +import { + keypairIdentity, + publicKey as UMIPublicKey, +} from "@metaplex-foundation/umi"; +import { createUmi } from "@metaplex-foundation/umi-bundle-defaults"; +import { base58 } from "@metaplex-foundation/umi/serializers"; +import { + getExplorerLink, + getKeypairFromFile, +} from "@solana-developers/helpers"; +import { clusterApiUrl } from "@solana/web3.js"; -Let’s start by declaring a `transferNft` function that takes the following: +const umi = createUmi(clusterApiUrl("devnet")); -- `connection` - a `Connection` object -- `assetId` - a `PublicKey` object -- `sender` - a `Keypair` object so we can sign the transaction -- `receiver` - a `PublicKey` object representing the new owner +// load keypair from local file system +// See https://github.com/solana-developers/helpers?tab=readme-ov-file#get-a-keypair-from-a-keypair-file +const localKeypair = await getKeypairFromFile(); -Inside that function, let’s fetch the asset data again then also fetch the asset -proof. For good measure, let’s wrap everything in a `try catch`. +// convert to Umi compatible keypair +const umiKeypair = umi.eddsa.createKeypairFromSecretKey(localKeypair.secretKey); -```typescript -async function transferNft( - connection: Connection, - assetId: PublicKey, - sender: Keypair, - receiver: PublicKey, -) { - try { - const assetDataResponse = await fetch(process.env.RPC_URL, { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - jsonrpc: "2.0", - id: "my-id", - method: "getAsset", - params: { - id: assetId, - }, - }), - }); - const assetData = (await assetDataResponse.json()).result; - - const assetProofResponse = await fetch(process.env.RPC_URL, { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - jsonrpc: "2.0", - id: "my-id", - method: "getAssetProof", - params: { - id: assetId, - }, - }), - }); - const assetProof = (await assetProofResponse.json()).result; - } catch (err: any) { - console.error("\nFailed to transfer nft:", err); - throw err; - } -} +// load the MPL Bubblegum program, dasApi plugin and assign a signer to our umi instance +umi.use(keypairIdentity(umiKeypair)).use(mplBubblegum()).use(dasApi()); ``` -Next, let’s fetch the Merkle tree account from the chain, get the canopy depth, -and assemble the proof path. We do this by mapping the asset proof we got from -Helius to a list of `AccountMeta` objects, then removing any proof nodes at the -end that are already cached onchain in the canopy. +We are not ready to transfer our asset. Using the `assetId` for our cNFT, we can +call the `transfer` method from the Bubblegum library -```typescript -async function transferNft( - connection: Connection, - assetId: PublicKey, - sender: Keypair, - receiver: PublicKey -) { - try { - ... - - const treePublicKey = new PublicKey(assetData.compression.tree) - - const treeAccount = await ConcurrentMerkleTreeAccount.fromAccountAddress( - connection, - treePublicKey - ) - - const canopyDepth = treeAccount.getCanopyDepth() || 0 - - const proofPath: AccountMeta[] = assetProof.proof - .map((node: string) => ({ - pubkey: new PublicKey(node), - isSigner: false, - isWritable: false, - })) - .slice(0, assetProof.proof.length - canopyDepth) - } catch (err: any) { - console.error("\nFailed to transfer nft:", err) - throw err - } -} -``` +```typescript filename="transfer-asset.ts" +const assetId = UMIPublicKey("D4A8TYkKE5NzkqBQ4mPybgFbAUDN53fwJ64b8HwEEuUS"); -Finally, we build the instruction using `createTransferInstruction`, add it to a -transaction, then sign and send the transaction. This is what the entire -`transferNft` function looks like when finished: +//@ts-ignore +const assetWithProof = await getAssetWithProof(umi, assetId); -```typescript -async function transferNft( - connection: Connection, - assetId: PublicKey, - sender: Keypair, - receiver: PublicKey, -) { - try { - const assetDataResponse = await fetch(process.env.RPC_URL, { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - jsonrpc: "2.0", - id: "my-id", - method: "getAsset", - params: { - id: assetId, - }, - }), - }); - const assetData = (await assetDataResponse.json()).result; - - const assetProofResponse = await fetch(process.env.RPC_URL, { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - jsonrpc: "2.0", - id: "my-id", - method: "getAssetProof", - params: { - id: assetId, - }, - }), - }); - const assetProof = (await assetProofResponse.json()).result; - - const treePublicKey = new PublicKey(assetData.compression.tree); - - const treeAccount = await ConcurrentMerkleTreeAccount.fromAccountAddress( - connection, - treePublicKey, - ); - - const canopyDepth = treeAccount.getCanopyDepth() || 0; - - const proofPath: AccountMeta[] = assetProof.proof - .map((node: string) => ({ - pubkey: new PublicKey(node), - isSigner: false, - isWritable: false, - })) - .slice(0, assetProof.proof.length - canopyDepth); - - const treeAuthority = treeAccount.getAuthority(); - const leafOwner = new PublicKey(assetData.ownership.owner); - const leafDelegate = assetData.ownership.delegate - ? new PublicKey(assetData.ownership.delegate) - : leafOwner; - - const transferIx = createTransferInstruction( - { - merkleTree: treePublicKey, - treeAuthority, - leafOwner, - leafDelegate, - newLeafOwner: receiver, - logWrapper: SPL_NOOP_PROGRAM_ID, - compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID, - anchorRemainingAccounts: proofPath, - }, - { - root: [...new PublicKey(assetProof.root.trim()).toBytes()], - dataHash: [ - ...new PublicKey(assetData.compression.data_hash.trim()).toBytes(), - ], - creatorHash: [ - ...new PublicKey(assetData.compression.creator_hash.trim()).toBytes(), - ], - nonce: assetData.compression.leaf_id, - index: assetData.compression.leaf_id, - }, - ); - - const tx = new Transaction().add(transferIx); - tx.feePayer = sender.publicKey; - const txSignature = await sendAndConfirmTransaction( - connection, - tx, - [sender], - { - commitment: "confirmed", - skipPreflight: true, - }, - ); - console.log(`https://explorer.solana.com/tx/${txSignature}?cluster=devnet`); - } catch (err: any) { - console.error("\nFailed to transfer nft:", err); - throw err; - } -} +let uintSig = await( + await transfer(umi, { + ...assetWithProof, + leafOwner: umi.identity.publicKey, + newLeafOwner: UMIPublicKey("J63YroB8AwjDVjKuxjcYFKypVM3aBeQrfrVmNBxfmThB"), + }).sendAndConfirm(umi), +).signature; + +const b64sig = base58.deserialize(uintSig); + +let explorerLink = getExplorerLink("transaction", b64sig, "devnet"); +console.log(`Explorer link: ${explorerLink}`); +console.log("✅ Finished successfully!"); ``` -Lets transfer our first compressed NFT at index 0 to someone else. First we’ll -need to spin up another wallet with some funds, then grab the assetID at index 0 -using `getLeafAssetId`. Then we’ll do the transfer. Finally, we’ll print out the -entire collection using our function `logNftDetails`. You’ll note that the NFT -at index zero will now belong to our new wallet in the `ownership` field. +Running our script with `npx esrun transfer-asset.ts`, should output something +similar to this if successful: -```typescript -async function main() { - const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); - const wallet = await getOrCreateKeypair("Wallet_1"); - await airdropSolIfNeeded(wallet.publicKey); - - const maxDepthSizePair: ValidDepthSizePair = { - maxDepth: 3, - maxBufferSize: 8, - }; - - const canopyDepth = 0; - - const treeAddress = await createAndInitializeTree( - connection, - wallet, - maxDepthSizePair, - canopyDepth, - ); - - const collectionNft = await getOrCreateCollectionNFT(connection, wallet); - - await mintCompressedNftToCollection( - connection, - wallet, - treeAddress, - collectionNft, - 2 ** maxDepthSizePair.maxDepth, - ); - - const recieverWallet = await getOrCreateKeypair("Wallet_2"); - const assetId = await getLeafAssetId(treeAddress, new BN(0)); - await airdropSolIfNeeded(recieverWallet.publicKey); - - console.log( - `Transfering ${assetId.toString()} from ${wallet.publicKey.toString()} to ${recieverWallet.publicKey.toString()}`, - ); - - await transferNft(connection, assetId, wallet, recieverWallet.publicKey); - - await logNftDetails(treeAddress, 8); -} +```bash +Explorer link: https://explorer.solana.com/tx/3sNgN7Gnh5FqcJ7ZuUEXFDw5WeojpwkDjdfvTNWy68YCEJUF8frpnUJdHhHFXAtoopsytzkKewh39Rf7phFQ2hCF?cluster=devnet +✅ Finished successfully! ``` -Go ahead and run your script. The whole thing should execute without failing, -and all for close to 0.01 SOL! +Open the explorer link, and scroll to the bottom to observer your tx logs, + +![Solana Explorer showing logs of the transfer cnft instruction](/public/assets/courses/unboxed/solana-explorer-showing-cnft-transfer-logs.png) Congratulations! Now you know how to mint, read, and transfer cNFTs. If you wanted, you could update the max depth, max buffer size, and canopy depth to @@ -1637,21 +1121,19 @@ larger values and as long as you have enough Devnet SOL, this script will let you mint up to 10k cNFTs for a small fraction of what it would cost to mint 10k traditional NFTs. -If you plan to mint a large amount of NFTs you might want -to try and batch these instructions for fewer total transactions. - -If you need more time with this lab, feel free to go through it again and/or -take a look at the solution code on the `solution` branch of the -[lab repo](https://github.com/Unboxed-Software/solana-cnft-demo/tree/solution). +Inspect the cNFT on Solana Explorer! Just like previously, if you have any +issues, you should fix them yourself, but if needed the +[solution code](https://github.com/solana-foundation/compressed-nfts) is +available. ### Challenge -It’s your turn to take these concepts for a spin on your own! We’re not going to +It's your turn to take these concepts for a spin on your own! We're not going to be overly prescriptive at this point, but here are some ideas: 1. Create your own production cNFT collection -2. Build a UI for this lesson’s lab that will let you mint a cNFT and display it -3. See if you can replicate some of the lab script’s functionality in an onchain +2. Build a UI for this lesson's lab that will let you mint a cNFT and display it +3. See if you can replicate some of the lab script's functionality in an onchain program, i.e. write a program that can mint cNFTs diff --git a/content/courses/state-compression/generalized-state-compression.md b/content/courses/state-compression/generalized-state-compression.md index 00252dbf0..6a74e7c70 100644 --- a/content/courses/state-compression/generalized-state-compression.md +++ b/content/courses/state-compression/generalized-state-compression.md @@ -1,309 +1,374 @@ --- -title: Generalized State Compression +title: Generalized State Compression objectives + objectives: - - Explain the logic flow behind Solana state compression + - Explain the flow of Solana’s state compression logic. - Explain the difference between a Merkle tree and a concurrent Merkle tree - - Implement generic state compression in basic Solana programs + - Implement generic state compression in a basic Solana program + description: - "How state compression - the tech behind compressed NFTs - works, and how to - implement it in your own Solana programs." + Understand how state compression - the technology behind compressed NFTs works + - and learn how to apply it in your Solana programs. --- ## Summary -- State Compression on Solana is most commonly used for compressed NFTs, but - it's possible to use it for arbitrary data -- State Compression lowers the amount of data you have to store onchain by - leveraging Merkle trees. -- Merkle trees store a single hash that represents an entire binary tree of - hashes. Each leaf on a Merkle tree is a hash of that leaf's data. -- Concurrent Merkle trees are a specialized version of Merkle trees that allow - concurrent updates. -- Because data in a state-compressed program is not stored onchain, you have to - user indexers to keep an off-chain cache of the data and then verify that data - against the onchain Merkle tree. +- State compression on Solana is primarily used for compressed NFTs (cNFTs), but + it can be applied to any data type +- State Compression lowers the amount of data you have to store onchain using + Merkle trees. +- A Merkle tree compresses data by hashing pairs of data repeatedly until a + single root hash is produced. This root hash is then stored onchain. +- Each leaf on a Merkle tree is a hash of that leaf’s data. +- A concurrent Merkle tree is a specialized version of a Merkle tree. Unlike a + standard Merkle tree, it allows multiple updates simultaneously without + affecting transaction validity. +- Data in a state-compressed program is not stored onchain. So you have to use + indexers to keep an offchain cache of the data. It’s this offchain cache data + that is used to then verify against the onchain Merkle tree. ## Lesson -Previously, we discussed state compression in the context of compressed NFTs. At -the time of writing, compressed NFTs represent the most common use case for -state compression, but it’s possible to use state compression within any -program. In this lesson, we’ll discuss state compression in more generalized -terms so that you can apply it to any of your programs. +Previously, we talked about state compression in the context of compressed NFTs. + +While compressed NFTs are the main use case for state compression, you can apply +state compression to any Solana program. In this lesson, we’ll discuss state +compression in general terms so you can use it across your Solana projects. ### A theoretical overview of state compression -In traditional programs, data is serialized (typically using borsh) and then -stored directly in an account. This allows the data to be easily read and -written through Solana programs. You can “trust” the data stored in the accounts -because it can’t be modified except through the mechanisms surfaced by the -program. - -State compression effectively asserts that the most important piece of this -equation is how “trustworthy” the data is. If all we care about is the ability -to trust that data is what it claims to be, then we can actually get away with -**_not_** storing the data in an account onchain. Instead, we can store hashes -of the data where the hashes can be used to prove or verify the data. The data -hash takes up significantly less storage space than the data itself. We can then -store the actual data somewhere much cheaper and worry about verifying it -against the onchain hash when the data is accessed. - -The specific data structure used by the Solana State Compression program is a -special binary tree structure known as a **concurrent Merkle tree**. This tree -structure hashes pieces of data together in a deterministic way to compute a -single, final hash that gets stored onchain. This final hash is significantly -smaller in size than all the original data combined, hence the “compression.” -The steps to this process are: - -1. Take any piece of data -2. Create a hash of this data -3. Store this hash as a “leaf” at the bottom of the tree -4. Each leaf pair is then hashed together, creating a “branch” -5. Each branch is then hashed together -6. Continually climb the tree and hash adjacent branches together -7. Once at the top of the tree, a final ”root hash” is produced -8. Store the root hash onchain as verifiable proof of the data within each leaf -9. Anyone wanting to verify that the data they have matches the “source of - truth” can go through the same process and compare the final hash without - having to store all the data onchain - -This involves a few rather serious development tradeoffs: - -1. Since the data is no longer stored in an account onchain, it is more - difficult to access. -2. Once the data has been accessed, developers must decide how often their - applications will verify the data against the onchain hash. -3. Any changes to the data will require sending the entirety of the previously - hashed data _and_ the new data into an instruction. Developer may also have - to provide additional data relevant to the proofs required to verify the - original data against the hash. - -Each of these will be a consideration when determining **if**, **when**, and -**how** to implement state compression for your program. +Normally, data in Solana programs is serialized (usually with borsh) and stored +directly in an account. This makes it easy to read and write the data through +the program. The account data is trustworthy because only the program can modify +it. + +However to verify the integrity of the data, then there’s no need to store the +actual data onchain. Instead, we can store hashes of the data, which can be used +to prove or verify its accuracy. This is called _state compression_. + +These hashes take up far less storage space than the original data. The full +data can be stored in a cheaper, offchain location, and only needs to be +verified against the onchain hash when accessed. + +The Solana State Compression program uses a program known as a **concurrent +Merkle tree**. A concurrent Merkle tree is a special kind of binary tree that +deterministically hashes data, i.e. the same inputs will always produce the same +Merkle root. + +The final hash, called a _Merkle root_, is significantly smaller in size than +all the original full data sets combined. This is why it’s called "compression". +And it’s this hash that’s stored onchain. + +**Outlined below are the steps to this process, in order:** + +1. Take a piece of data. +2. Create a hash of that data. +3. Store the hash as a "leaf" at the bottom of the tree. +4. Hash pairs of leaves together to create branches. +5. Hash pairs of branches together. +6. Repeat this process until you reach the top of the tree. +7. The top of the tree contains a final "root hash." +8. Store this root hash onchain as proof of the data. +9. To verify the data, recompute the hashes and compare the final hash to the + onchain root hash. + +This method comes with some trade-offs: + +1. The data isn’t stored onchain, so it’s harder to access. +2. Developers must decide how often to verify the data against the onchain hash. +3. If the data changes, the entire data set must be sent to the program, along + with the new data. You’ll also need proof that the data matches the hash. + +These considerations will guide you when deciding whether, when, and how to +implement state compression in your programs. With that quick overview, let’s go +into more technical detail. #### Concurrent Merkle trees -A **Merkle tree** is a binary tree structure represented by a single hash. Every -leaf node in the structure is a hash of its inner data while every branch is a -hash of its child leaf hashes. In turn, branches are also hashed together until, -eventually, one final root hash remains. - -Since the Merkle tree is represented as a single hash, any modification to leaf -data changes the root hash. This causes an issue when multiple transactions in -the same slot are attempting to modify leaf data. Since these transactions must -execute in series, all but the first will fail since the root hash and proof -passed in will have been invalidated by the first transaction to be executed. In -other words, a standard Merkle tree can only modify a single leaf per slot. In a -hypothetical state-compressed program that relies on a single Merkle tree for -its state, this severely limits throughput. - -This can be solved with a **concurrent Merkle tree**. A concurrent Merkle -tree is a Merkle tree that stores a secure changelog of the most recent changes -along with their root hash and the proof to derive it. When multiple -transactions in the same slot try to modify leaf data, the changelog can be used -as a source of truth to allow for concurrent changes to be made to the tree. - -In other words, while an account storing a Merkle tree would have only the root -hash, a concurrent Merkle tree will also contain additional data that allows -subsequent writes to successfully occur. This includes: - -1. The root hash - The same root hash that a standard Merkle tree has. -2. A changelog buffer - This buffer contains proof data pertinent to recent root - hash changes so that subsequent writes in the same slot can still be - successful. -3. A canopy - When performing an update action on any given leaf, you need the - entire proof path from that leaf to the root hash. The canopy stores - intermediate proof nodes along that path so they don’t all have to be passed - into the program from the client. - -As a program architect, you control three values directly related to these three -items. Your choice determines the size of the tree, the cost to create the tree, -and the number of concurrent changes that can be made to the tree: - -1. Max depth -2. Max buffer size -3. Canopy depth - -The **max depth** is the maximum number of hops to get from any leaf to the root -of the tree. Since Merkle trees are binary trees, every leaf is connected only -to one other leaf. Max depth can then logically be used to calculate the number -of nodes for the tree with `2 ^ maxDepth`. - -The **max buffer size** is effectively the maximum number of concurrent changes -that you can make to a tree within a single slot with the root hash still being -valid. When multiple transactions are submitted in the same slot, each of which -is competing to update leafs on a standard Merkle tree, only the first to run -will be valid. This is because that “write” operation will modify the hash -stored in the account. Subsequent transactions in the same slot will be trying -to validate their data against a now-outdated hash. A concurrent Merkle tree has -a buffer so that the buffer can keep a running log of these modifications. This -allows the State Compression Program to validate multiple data writes in the -same slot because it can look up what the previous hashes were in the buffer and -compare against the appropriate hash. - -The **canopy depth** is the number of proof nodes that are stored onchain for -any given proof path. Verifying any leaf requires the complete proof path for -the tree. The complete proof path is made up of one proof node for every “layer” -of the tree, i.e. a max depth of 14 means there are 14 proof nodes. Every proof -node passed into the program adds 32 bytes to a transaction, so large trees -would quickly exceed the maximum transaction size limit. Caching proof nodes -onchain in the canopy helps improve program composability. - -Each of these three values, max depth, max buffer size, and canopy depth, comes -with a tradeoff. Increasing the value of any of these values increases the size -of the account used to store the tree, thus increasing the cost of creating the -tree. - -Choosing the max depth is fairly straightforward as it directly relates to the -number of leafs and therefore the amount of data you can store. If you need 1 -million cNFTs on a single tree where each cNFT is a leaf of the tree, find the -max depth that makes the following expression true: `2^maxDepth > 1 million`. -The answer is 20. - -Choosing a max buffer size is effectively a question of throughput: how many -concurrent writes do you need? The larger the buffer, the higher the throughput. - -Lastly, the canopy depth will determine your program’s composability. State -compression pioneers have made it clear that omitting a canopy is a bad idea. -Program A can’t call your state-compressed program B if doing so maxes out the -transaction size limits. Remember, program A also has required accounts and data -in addition to required proof paths, each of which take up transaction space. - -#### Data access on a state-compressed program - -A state-compressed account doesn’t store the data itself. Rather, it stores the -concurrent Merkle tree structure discussed above. The raw data itself lives only -in the blockchain’s cheaper **ledger state.** This makes data access somewhat -more difficult, but not impossible. - -The Solana ledger is a list of entries containing signed transactions. In -theory, this can be traced back to the genesis block. This effectively means any -data that has ever been put into a transaction exists in the ledger. - -Since the state compression hashing process occurs onchain, all the data exists -in the ledger state and could theoretically be retrieved from the original -transaction by replaying the entire chain state from the beginning. However, -it’s much more straightforward (though still complicated) to have -an **indexer** track and index this data as the transactions occur. This ensures -there is an off-chain “cache” of the data that anyone can access and -subsequently verify against the onchain root hash. - -This process is complex, but it will make sense after some practice. - -### State compression tooling - -The theory described above is essential to properly understanding state -compression. But you don’t have to implement any of it from scratch. Brilliant -engineers have laid most of the groundwork for you in the form of the SPL State -Compression Program and the Noop Program. +Since a Merkle tree is represented as a single hash, any change to a leaf node +alters the root hash. This becomes problematic when multiple transactions in the +same slot try to update leaf data in the same slot. Since transactions are +executed serially i.e. one after the other — all but the first will fail since +the root hash and proof passed in will have been invalidated by the first +transaction executed. + +In short, a standard Merkle tree can only handle one leaf update per +[slot](https://solana.com/docs/terminology#slot). This significantly limits the +throughput in a state-compressed program that depends on a single Merkle tree +for its state. + +Thankfully, this issue can be addressed using a _concurrent_ Merkle tree. Unlike +a regular Merkle tree, a concurrent Merkle tree keeps a secure changelog of +recent updates, along with their root hash and the proof needed to derive it. +When multiple transactions in the same slot attempt to modify leaf data, the +changelog serves as a reference, enabling concurrent updates to the tree. + +How does the concurrent Merkle tree achieve this? In a standard Merkle tree, +only the root hash is stored. However, a concurrent Merkle tree includes extra +data that ensures subsequent writes can succeed. + +This includes: + +1. The root hash - The same root hash found in a regular Merkle tree. +2. A changelog buffer - A buffer containing proof data for recent root hash + changes, allowing further writes in the same slot to succeed. +3. A canopy - To update a specific leaf, you need the entire proof path from the + leaf to the root hash. The canopy stores intermediate proof nodes along this + path so that not all of them need to be sent from the client to the program. + +### Key Parameters for Configuring a Concurrent Merkle Tree + +As a developer, you are responsible for controlling three key parameters that +directly affect the tree’s size, cost, and the number of concurrent changes it +can handle: + +1. **Max Depth** +2. **Max Buffer Size** +3. **Canopy Depth** + +Let’s take a brief overview of each parameter. + +#### Max Depth + +The **max depth** determines how many levels or "hops" are required to reach the +root of the tree from any leaf. Since Merkle trees are structured as binary +trees, where each leaf is paired with only one other leaf, the max depth can be +used to calculate the total number of nodes in the tree with the formula: +`2^maxDepth`. + +Here’s a quick TypeScript function for illustration: + +```typescript +const getMaxDepth = (itemCount: number) => { + if (itemCount === 0) { + return 0; + } + return Math.ceil(Math.log2(itemCount)); +}; +``` + +A max depth of 20 would allow for over one million leaves, making it suitable +for storing large datasets like NFTs. + +#### Max Buffer Size + +The **max buffer size** controls how many concurrent updates can be made to the +tree within a single slot while keeping the root hash valid. In a standard +Merkle tree, only the first transaction in a slot would be successful since it +updates the root hash, causing all subsequent transactions to fail due to hash +mismatches. However, in a concurrent Merkle tree, the buffer maintains a log of +changes, allowing multiple transactions to update the tree simultaneously by +checking the appropriate root hash from the buffer. A larger buffer size +increases throughput by enabling more concurrent changes. + +#### Canopy Depth + +The **canopy depth** specifies how many proof nodes are stored onchain for any +given proof path. To verify any leaf in the tree, you need a complete proof +path, which includes one proof node for every layer of the tree. For a tree with +a max depth of 14, there will be 14 proof nodes in total. Each proof node adds +32 bytes to the transaction, and without careful management, large trees could +exceed the transaction size limit. + +Storing more proof nodes onchain (i.e., having a deeper canopy) allows other +programs to interact with your tree without exceeding transaction limits, but it +also uses more onchain storage. Consider the complexity of interactions with +your tree when deciding on an appropriate canopy depth. + +### Balancing Trade-offs + +These three values—max depth, max buffer size, and canopy depth—all come with +trade-offs. Increasing any of them will enlarge the account used to store the +tree, raising the cost of creating the tree. + +- **Max Depth:** This is straightforward to determine based on how much data + needs to be stored. For example, if you need to store 1 million compressed + NFTs (cNFTs), where each cNFT is a leaf, you would need a max depth of 20 + (`2^maxDepth > 1 million`). +- **Max Buffer Size:** The choice of buffer size is mainly a question of + throughput—how many concurrent updates are required? A larger buffer allows + for more updates in the same slot. +- **Canopy Depth:** A deeper canopy improves composability, enabling other + programs to interact with your state-compressed program without exceeding + transaction size limits. Omitting the canopy is discouraged, as it could cause + issues with transaction size, especially when other programs are involved. + +### Data Access in a State-Compressed Program + +In a state-compressed program, the actual data isn’t stored directly onchain. +Instead, the concurrent Merkle tree structure is stored, while the raw data +resides in the blockchain’s more affordable ledger state. This makes accessing +the data more challenging, but not impossible. + +The Solana ledger is essentially a list of entries containing signed +transactions, which can be traced back to the Genesis block theoretically. This +means any data that has ever been included in a transaction is stored in the +ledger. + +Since the state compression process happens onchain, all the data is still in +the ledger state. In theory, you could retrieve the original data by replaying +the entire chain state from the start. However, it’s far more practical (though +still somewhat complex) to use an indexer to track and index the data as the +transactions happen. This creates an offchain "cache" of the data that can be +easily accessed and verified against the onchain root hash. + +While this process may seem complex at first, it becomes clearer with practice. + +### State Compression Tooling + +While understanding the theory behind state compression is crucial, you don’t +have to build it all from scratch. Talented engineers have already developed +essential tools like the SPL State Compression Program and the Noop Program to +simplify the process. #### SPL State Compression and Noop Programs -The SPL State Compression Program exists to make the process of creating and -updating concurrent Merkle trees repeatable and composable throughout the Solana -ecosystem. It provides instructions for initializing Merkle trees, managing tree -leafs (i.e. add, update, remove data), and verifying leaf data. - -The State Compression Program also leverages a separate “no op” program whose -primary purpose is to make leaf data easier to index by logging it to the ledger -state. When you want to store compressed data, you pass it to the State -Compression program where it gets hashed and emitted as an “event” to the Noop -program. The hash gets stored in the corresponding concurrent Merkle tree, but -the raw data remains accessible through the Noop program’s transaction logs. - -#### Index data for easy lookup - -Under normal conditions, you would typically access onchain data by fetching the -appropriate account. When using state compression, however, it’s not so -straightforward. - -As mentioned above, the data now exists in the ledger state rather than in an -account. The easiest place to find the full data is in the logs of the Noop -instruction. Unfortunately, while this data will in a sense exist in the ledger -state forever, it will likely be inaccessible through validators after a certain -period of time. - -To save space and be more performant, validators don’t retain every transaction -back to the genesis block. The specific amount of time you’ll be able to access -the Noop instruction logs related to your data will vary based on the validator. -Eventually, you’ll lose access to it if you’re relying directly on instruction -logs. - -Technically, you *can* replay the transaction state back to the genesis block -but the average team isn’t going to do that, and it certainly won’t be -performant. The +The SPL State Compression Program is designed to streamline and standardize the +creation and management of concurrent Merkle trees across the Solana ecosystem. +It provides Instruction Handlers for initializing Merkle trees, handling tree +leaves (such as adding, updating, or removing data), and verifying the integrity +of leaf data. + +Additionally, the State Compression Program works in conjunction with a separate +"Noop" program. A [no-op program]() +does nothing - literally 'no operation.' The Solana Noop Program only logs data +to the ledger state, however that logging is essential to state compression: + +When you store compressed data, it’s passed to the State Compression Program, +which hashes the data and emits it as an "event" to the Noop Program. While the +hash is stored in the concurrent Merkle tree, the raw data can still be accessed +via the Noop Program’s transaction logs. + +### Indexing Data for Easy Lookup + +Typically, accessing onchain data is as simple as fetching the relevant account. +However, with state compression, it’s not that straightforward. + +As mentioned earlier, the data now resides in the ledger state rather than in an +account. The most accessible place to find the complete data is in the logs of +the Noop instruction. While this data remains in the ledger state indefinitely, +it may become inaccessible through validators after a certain period. + +Validators don’t store all transactions back to the Genesis block to save space +and improve performance. The length of time you can access Noop instruction logs +varies depending on the validator. Eventually, the logs will become unavailable +if you’re relying on direct access to them. + +In theory, it’s possible to replay transaction states back to the genesis block, +but this approach is impractical for most teams and isn’t efficient. Some RPC +providers have adopted the [Digital Asset Standard (DAS)](https://docs.helius.dev/compression-and-das-api/digital-asset-standard-das-api) -has been adopted by many RPC providers to enable efficient queries of compressed -NFTs and other assets. However, at the time of writing, it doesn’t support -arbitrary state compression. Instead, you have two primary options: +to enable efficient querying of compressed NFTs and other assets. However, as of +now, DAS does not support arbitrary state compression. + +You essentially have two main options: -1. Use an indexing provider that will build a custom indexing solution for your - program that observes the events sent to the Noop program and stores the - relevant data off-chain. -2. Create your own pseudo-indexing solution that stores transaction data - off-chain. +1. Use an indexing provider to create a custom indexing solution for your + program, which will monitor the events sent to the Noop program and store the + relevant data offchain. +2. Build your indexing solution that stores transaction data offchain. -For many dApps, option 2 makes plenty of sense. Larger-scale applications may -need to rely on infrastructure providers to handle their indexing. +For many dApps, option 2 can be a practical choice. Larger-scale applications, +however, may need to rely on infrastructure providers to manage their indexing +needs. -### State compression development process +### State Compression Development Process -#### Create Rust types +#### Create Rust Types -As with a typical Anchor program, one of the first things you should do is -define your program’s Rust types. However, Rust types in a traditional Anchor -program often represent accounts. In a state-compressed program, your account -state will only store the Merkle tree. The more “usable” data schema will just -be serialized and logged to the Noop program. +In a typical Anchor program, developers often start by defining the Rust types +that represent accounts. For a state-compressed program, however, the focus +shifts to defining types that align with the Merkle tree structure. -This type should include all the data stored in the leaf node and any contextual -information needed to make sense of the data. For example, if you were to create -a simple messaging program, your `Message` struct might look as follows: +In state compression, your onchain account will primarily store the Merkle tree. +The more practical data will be serialized and logged to the Noop program for +easier access and management. Your Rust types should encompass all data stored +in the leaf nodes and any contextual information necessary for interpreting that +data. For instance, if you’re developing a simple messaging program, your +`Message` struct might look something like this: ```rust -#[derive(AnchorSerialize)] +const DISCRIMINATOR_SIZE: usize = 8; +const PUBKEY_SIZE: usize = 32; + +/// A log entry for messages sent between two public keys. +#[derive(AnchorSerialize, AnchorDeserialize)] pub struct MessageLog { - leaf_node: [u8; 32], // The leaf node hash - from: Pubkey, // Pubkey of the message sender - to: Pubkey, // Pubkey of the message recipient - message: String, // The message to send + /// The leaf node hash for message logging. + pub leaf_node: [u8; DISCRIMINATOR_SIZE + PUBKEY_SIZE], + /// The public key of the message sender. + pub from: Pubkey, + /// The public key of the message recipient. + pub to: Pubkey, + /// The actual message content. + pub message: String, } -impl MessageLog { - // Constructs a new message log from given leaf node and message - pub fn new(leaf_node: [u8; 32], from: Pubkey, to: Pubkey, message: String) -> Self { - Self { leaf_node, from, to, message } - } +/// Constructs a new `MessageLog`. +/// +/// # Arguments +/// +/// * `leaf_node` - A 32-byte array representing the leaf node hash. +/// * `from` - The public key of the message sender. +/// * `to` - The public key of the message recipient. +/// * `message` - The message to be sent. +/// +/// # Returns +/// +/// Returns a new `MessageLog` instance. +pub fn new_message_log(leaf_node: [u8; DISCRIMINATOR_SIZE + PUBKEY_SIZE], from: Pubkey, to: Pubkey, message: String) -> MessageLog { + MessageLog { leaf_node, from, to, message } } ``` -To be abundantly clear, **this is not an account that you will be able to read -from**. Your program will be creating an instance of this type from instruction -inputs, not constructing an instance of this type from account data that it -reads. We’ll discuss how to read data in a later section. +To be absolutely clear, the **`MessageLog` is not an account you will read +from**. Instead, your program will create an instance of `MessageLog` using +inputs from Instructions Handler, rather than constructing it from data read +from an account. We will cover how to read data from compressed accounts later. -#### Initialize a new tree +#### Initialize a New Tree -Clients will create and initialize the Merkle tree account in two separate -instructions. The first is simply allocating the account by calling System -Program. The second will be an instruction that you create on a custom program -that initializes the new account. This initialization is effectively just -recording what the max depth and buffer size for the Merkle tree should be. +To set up a new Merkle tree, clients need to perform two distinct steps. -All this instruction needs to do is build a CPI to invoke the -`init_empty_merkle_tree` instruction on the State Compression Program. Since -this requires the max depth and max buffer size, these will need to be passed in -as arguments to the instruction. +1. First, they allocate the account by calling the System Program. +2. Next, they use a custom program to initialize the new account. This + initialization involves setting the maximum depth and buffer size for the + Merkle tree. -Remember, the max depth refers to the maximum number of hops to get from any -leaf to the root of the tree. Max buffer size refers to the amount of space -reserved for storing a changelog of tree updates. This changelog is used to -ensure that your tree can support concurrent updates within the same block. +The initialization Instruction Handler must create a CPI (Cross-Program +Invocation) to call the `init_empty_merkle_tree` instruction from the State +Compression Program. You’ll need to provide the maximum depth and buffer size as +arguments to this instruction Handler. -For example, if we were initializing a tree for storing messages between users, -the instruction might look like this: +- **Max depth**: Defines the maximum number of hops needed to travel from any + leaf to the root of the tree. +- **Max buffer size**: Specifies the space allocated for storing a changelog of + tree updates. This changelog is essential for supporting concurrent updates + within the same block. + +For instance, if you are initializing a tree to store messages between users, +your Instruction Handler might look like this: ```rust +/// Initializes an empty Merkle tree for storing messages with a specified depth and buffer size. +/// +/// This function creates a CPI (Cross-Program Invocation) call to initialize the Merkle tree account +/// using the provided authority and compression program. The PDA (Program Derived Address) seeds are used for +/// signing the transaction. +/// +/// # Arguments +/// +/// * `ctx` - The context containing the accounts required for Merkle tree initialization. +/// * `max_depth` - The maximum depth of the Merkle tree. +/// * `max_buffer_size` - The maximum buffer size of the Merkle tree. +/// +/// # Returns +/// +/// This function returns a `Result<()>`, indicating success or failure. +/// +/// # Errors +/// +/// This function will return an error if the CPI call to `init_empty_merkle_tree` fails. pub fn create_messages_tree( ctx: Context, max_depth: u32, // Max depth of the Merkle tree @@ -311,130 +376,185 @@ pub fn create_messages_tree( ) -> Result<()> { // Get the address for the Merkle tree account let merkle_tree = ctx.accounts.merkle_tree.key(); - // Define the seeds for pda signing - let signer_seeds: &[&[&[u8]]] = &[ + + // The seeds for PDAs signing + let signers_seeds: &[&[&[u8]]] = &[ &[ - merkle_tree.as_ref(), // The address of the Merkle tree account as a seed - &[*ctx.bumps.get("tree_authority").unwrap()], // The bump seed for the pda + merkle_tree.as_ref(), // The address of the Merkle tree account + &[*ctx.bumps.get("tree_authority").unwrap()], // The bump seed for the PDA ], ]; - // Create cpi context for init_empty_merkle_tree instruction. + // Create CPI context for `init_empty_merkle_tree` instruction handler let cpi_ctx = CpiContext::new_with_signer( - ctx.accounts.compression_program.to_account_info(), // The spl account compression program + ctx.accounts.compression_program.to_account_info(), // The SPL account compression program Initialize { authority: ctx.accounts.tree_authority.to_account_info(), // The authority for the Merkle tree, using a PDA merkle_tree: ctx.accounts.merkle_tree.to_account_info(), // The Merkle tree account to be initialized noop: ctx.accounts.log_wrapper.to_account_info(), // The noop program to log data }, - signer_seeds // The seeds for pda signing + signers_seeds // The seeds for PDAs signing ); - // CPI to initialize an empty Merkle tree with given max depth and buffer size + // CPI to initialize an empty Merkle tree with the given max depth and buffer size init_empty_merkle_tree(cpi_ctx, max_depth, max_buffer_size)?; Ok(()) } ``` -#### Add hashes to the tree - -With an initialized Merkle tree, it’s possible to start adding data hashes. This -involves passing the uncompressed data to an instruction on your program that -will hash the data, log it to the Noop program, and use the State Compression -Program’s `append` instruction to add the hash to the tree. The following -discuss what your instruction needs to do in depth: - -1. Use the `hashv` function from the `keccak` crate to hash the data. In most - cases, you’ll want to also hash the owner or authority of the data as well to - ensure that it can only be modified by the proper authority. -2. Create a log object representing the data you wish to log to the Noop - Program, then call `wrap_application_data_v1` to issue a CPI to the Noop - program with this object. This ensures that the uncompressed data is readily - available to any client looking for it. For broad use cases like cNFTs, that - would be indexers. You might also create your own observing client to - simulate what indexers are doing but specific to your application. -3. Build and issue a CPI to the State Compression Program’s `append` - instruction. This takes the hash computed in step 1 and adds it to the next - available leaf on your Merkle tree. Just as before, this requires the Merkle - tree address and the tree authority bump as signature seeds. - -When all this is put together using the messaging example, it looks something -like this: +#### Adding Hashes to the Tree + +Once the Merkle tree is initialized, you can begin adding data hashes to it. +This process involves passing the uncompressed data to an Instruction handler +within your program, which will hash the data, log it to the Noop Program, and +then use the State Compression Program’s `append` instruction to add the hash to +the tree. Here’s how the Instruction Handler operates in detail: + +1. **Hash the Data**: Use the `hashv` function from the `keccak` crate to hash + the data. It’s recommended to include the data owner or authority in the hash + to ensure that only the proper authority can modify it. +2. **Log the Data**: Create a log object representing the data you want to log + to the Noop Program. Then, call `wrap_application_data_v1` to issue a CPI + (Cross-Program Invocation) to the Noop Program with this object. This makes + the uncompressed data easily accessible to any client, such as indexers, that + may need it. You could also develop a custom client to observe and index data + for your application specifically. + +3. **Append the Hash**: Construct and issue a CPI to the State Compression + Program’s `append` Instruction. This will take the hash generated in step 1 + and append it to the next available leaf on the Merkle tree. As with previous + steps, this requires the Merkle tree address and tree authority bump as + signature seeds. + +When applied to a messaging system, the resulting implementation might look like +this: ```rust -// Instruction for appending a message to a tree. +/// Appends a message to the Merkle tree. +/// +/// This function hashes the message and the sender’s public key to create a leaf node, +/// logs the message using the noop program, and appends the leaf node to the Merkle tree. +/// +/// # Arguments +/// +/// * `ctx` - The context containing the accounts required for appending the message. +/// * `message` - The message to append to the Merkle tree. +/// +/// # Returns +/// +/// This function returns a `Result<()>`, indicating success or failure. +/// +/// # Errors +/// +/// This function will return an error if any of the CPI calls (logging or appending) fail. pub fn append_message(ctx: Context, message: String) -> Result<()> { - // Hash the message + whatever key should have update authority + // Hash the message + sender’s public key to create a leaf node let leaf_node = keccak::hashv(&[message.as_bytes(), ctx.accounts.sender.key().as_ref()]).to_bytes(); - // Create a new "message log" using the leaf node hash, sender, receipient, and message - let message_log = MessageLog::new(leaf_node.clone(), ctx.accounts.sender.key().clone(), ctx.accounts.receipient.key().clone(), message); - // Log the "message log" data using noop program + + // Create a new "MessageLog" using the leaf node hash, sender, recipient, and message + let message_log = new_message_log( + leaf_node.clone(), + ctx.accounts.sender.key().clone(), + ctx.accounts.recipient.key().clone(), + message, + ); + + // Log the "MessageLog" data using the noop program wrap_application_data_v1(message_log.try_to_vec()?, &ctx.accounts.log_wrapper)?; - // Get the address for the Merkle tree account + + // Get the Merkle tree account address let merkle_tree = ctx.accounts.merkle_tree.key(); - // Define the seeds for pda signing - let signer_seeds: &[&[&[u8]]] = &[ + + // The seeds for PDAs signing + let signers_seeds: &[&[&[u8]]] = &[ &[ merkle_tree.as_ref(), // The address of the Merkle tree account as a seed - &[*ctx.bumps.get("tree_authority").unwrap()], // The bump seed for the pda + &[*ctx.bumps.get("tree_authority").unwrap()], // The bump seed for the PDA ], ]; - // Create a new cpi context and append the leaf node to the Merkle tree. + + // Create a CPI context and append the leaf node to the Merkle tree let cpi_ctx = CpiContext::new_with_signer( - ctx.accounts.compression_program.to_account_info(), // The spl account compression program + ctx.accounts.compression_program.to_account_info(), // The SPL account compression program Modify { - authority: ctx.accounts.tree_authority.to_account_info(), // The authority for the Merkle tree, using a PDA + authority: ctx.accounts.tree_authority.to_account_info(), // Authority for the Merkle tree, using a PDA merkle_tree: ctx.accounts.merkle_tree.to_account_info(), // The Merkle tree account to be modified noop: ctx.accounts.log_wrapper.to_account_info(), // The noop program to log data }, - signer_seeds // The seeds for pda signing + signers_seeds, // The seeds for PDAs signing ); - // CPI to append the leaf node to the Merkle tree + + // CPI call to append the leaf node to the Merkle tree append(cpi_ctx, leaf_node)?; + Ok(()) } ``` -#### Update hashes +#### Updating Hashes -To update data, you need to create a new hash to replace the hash at the -relevant leaf on the Merkle tree. To do this, your program needs access to four -things: +To update a leaf in a Merkle tree, you’ll need to generate a new hash to replace +the existing one. This process requires four key inputs: -1. The index of the leaf to update +1. The index of the leaf you wish to update 2. The root hash of the Merkle tree -3. The original data you wish to modify +3. The original data you want to modify 4. The updated data -Given access to this data, a program instruction can follow very similar steps -as those used to append the initial data to the tree: - -1. **Verify update authority** - The first step is new. In most cases, you want - to verify update authority. This typically involves proving that the signer - of the `update` transaction is the true owner or authority of the leaf at the - given index. Since the data is compressed as a hash on the leaf, we can’t - simply compare the `authority` public key to a stored value. Instead, we need - to compute the previous hash using the old data and the `authority` listed in - the account validation struct. We then build and issue a CPI to the State - Compression Program’s `verify_leaf` instruction using our computed hash. -2. **Hash the new data** - This step is the same as the first step from - appending initial data. Use the `hashv` function from the `keccak` crate to - hash the new data and the update authority, each as their corresponding byte +Using these inputs, you can follow a series of steps similar to those used when +initially appending data to the tree: + +1. **Verify Update Authority**: The first step, unique to updates, is to verify + the authority of the entity making the update. This generally involves + checking that the signer of the `update` transaction is indeed the owner or + authority of the leaf at the specified index. Since the data in the leaf is + hashed, you can’t directly compare the authority’s public key to a stored + value. Instead, compute the previous hash using the old data and the + `authority` listed in the account validation struct. Then, invoke a CPI to + the State Compression Program’s `verify_leaf` instruction to confirm the hash + matches. + +2. **Hash the New Data**: This step mirrors the hashing process for appending + data. Use the `hashv` function from the `keccak` crate to hash the new data + and the update authority, converting each to its corresponding byte representation. -3. **Log the new data** - This step is the same as the second step from - appending initial data. Create an instance of the log struct and call - `wrap_application_data_v1` to issue a CPI to the Noop program. -4. **Replace the existing leaf hash** - This step is slightly different than the - last step of appending initial data. Build and issue a CPI to the State - Compression Program’s `replace_leaf` instruction. This uses the old hash, the - new hash, and the leaf index to replace the data of the leaf at the given - index with the new hash. Just as before, this requires the Merkle tree - address and the tree authority bump as signature seeds. - -Combined into a single instruction, this process looks as follows: + +3. **Log the New Data**: As with the initial append operation, create a log + object to represent the new data, and use `wrap_application_data_v1` to + invoke the Noop Program via CPI. This ensures that the new uncompressed data + is logged and accessible offchain. + +4. **Replace the Existing Leaf Hash**: This step is slightly different from + appending new data. Here, you’ll need to invoke a CPI to the State + Compression Program’s `replace_leaf` instruction. This operation will replace + the existing hash at the specified leaf index with the new hash. You’ll need + to provide the old hash, the new hash, and the leaf index. As usual, the + Merkle tree address and tree authority bump are required as signature seeds. + +When combined, the instructions for updating a hash might look like this: ```rust +/// Updates a message in the Merkle tree. +/// +/// This function verifies the old message in the Merkle tree by checking its leaf node, +/// and then replaces it with a new message by modifying the Merkle tree’s leaf node. +/// +/// # Arguments +/// +/// * `ctx` - The context containing the accounts required for updating the message. +/// * `index` - The index of the leaf node to update. +/// * `root` - The root hash of the Merkle tree. +/// * `old_message` - The old message that is currently in the Merkle tree. +/// * `new_message` - The new message to replace the old message. +/// +/// # Returns +/// +/// This function returns a `Result<()>`, indicating success or failure. +/// +/// # Errors +/// +/// This function will return an error if verification or replacement of the Merkle tree leaf fails. pub fn update_message( ctx: Context, index: u32, @@ -442,59 +562,67 @@ pub fn update_message( old_message: String, new_message: String ) -> Result<()> { - let old_leaf = keccak - ::hashv(&[old_message.as_bytes(), ctx.accounts.sender.key().as_ref()]) - .to_bytes(); + // Hash the old message + sender’s public key to create the old leaf node + let old_leaf = keccak::hashv(&[old_message.as_bytes(), ctx.accounts.sender.key().as_ref()]).to_bytes(); + // Get the Merkle tree account address let merkle_tree = ctx.accounts.merkle_tree.key(); - // Define the seeds for pda signing - let signer_seeds: &[&[&[u8]]] = &[ + // The seeds for PDAs signing + let signers_seeds: &[&[&[u8]]] = &[ &[ merkle_tree.as_ref(), // The address of the Merkle tree account as a seed - &[*ctx.bumps.get("tree_authority").unwrap()], // The bump seed for the pda + &[*ctx.bumps.get("tree_authority").unwrap()], // The bump seed for the PDA ], ]; - // Verify Leaf + // Verify the old leaf node in the Merkle tree { + // If the old and new messages are the same, no update is needed if old_message == new_message { msg!("Messages are the same!"); return Ok(()); } + // Create CPI context for verifying the leaf node let cpi_ctx = CpiContext::new_with_signer( - ctx.accounts.compression_program.to_account_info(), // The spl account compression program + ctx.accounts.compression_program.to_account_info(), // The SPL account compression program VerifyLeaf { - merkle_tree: ctx.accounts.merkle_tree.to_account_info(), // The Merkle tree account to be modified + merkle_tree: ctx.accounts.merkle_tree.to_account_info(), // The Merkle tree account to be verified }, - signer_seeds // The seeds for pda signing + signers_seeds, // The seeds for PDAs signing ); - // Verify or Fails + + // Verify the old leaf node in the Merkle tree verify_leaf(cpi_ctx, root, old_leaf, index)?; } - let new_leaf = keccak - ::hashv(&[new_message.as_bytes(), ctx.accounts.sender.key().as_ref()]) - .to_bytes(); + // Hash the new message + sender’s public key to create the new leaf node + let new_leaf = keccak::hashv(&[new_message.as_bytes(), ctx.accounts.sender.key().as_ref()]).to_bytes(); - // Log out for indexers - let message_log = MessageLog::new(new_leaf.clone(), ctx.accounts.sender.key().clone(), ctx.accounts.recipient.key().clone(), new_message); - // Log the "message log" data using noop program + // Log the new message for indexers using the noop program + let message_log = new_message_log( + new_leaf.clone(), + ctx.accounts.sender.key().clone(), + ctx.accounts.recipient.key().clone(), + new_message, + ); wrap_application_data_v1(message_log.try_to_vec()?, &ctx.accounts.log_wrapper)?; - // replace leaf + // Replace the old leaf with the new leaf in the Merkle tree { + // Create CPI context for replacing the leaf node let cpi_ctx = CpiContext::new_with_signer( - ctx.accounts.compression_program.to_account_info(), // The spl account compression program + ctx.accounts.compression_program.to_account_info(), // The SPL account compression program Modify { authority: ctx.accounts.tree_authority.to_account_info(), // The authority for the Merkle tree, using a PDA merkle_tree: ctx.accounts.merkle_tree.to_account_info(), // The Merkle tree account to be modified noop: ctx.accounts.log_wrapper.to_account_info(), // The noop program to log data }, - signer_seeds // The seeds for pda signing + signers_seeds, // The seeds for PDAs signing ); - // CPI to append the leaf node to the Merkle tree + + // Replace the old leaf node with the new one in the Merkle tree replace_leaf(cpi_ctx, root, old_leaf, new_leaf, index)?; } @@ -502,55 +630,63 @@ pub fn update_message( } ``` -#### Delete hashes +#### Deleting Hashes -At the time of writing, the State Compression Program doesn’t provide an -explicit `delete` instruction. Instead, you’ll want to update leaf data with -data that indicates the data as “deleted.” The specific data will depend on your -use case and security concerns. Some may opt to set all data to 0, whereas -others might store a static string that all “deleted” items will have in common. +As of now, the State Compression Program does not have a dedicated `delete` +instruction. -#### Access data from a client +Instead, you can simulate deletion by updating the leaf data with a value that +signals it has been "deleted." -The discussion so far has covered 3 of the 4 standard CRUD procedures: Create, -Update, and Delete. What’s left is one of the more difficult concepts in state -compression: reading data. +The exact value you choose will depend on your specific use case and security +requirements. For some, this may involve setting all data fields to zero, while +others might prefer storing a predefined static string that marks the leaf as +deleted. This approach allows you to handle deletions in a way that suits your +application’s needs without compromising data integrity. -Accessing data from a client is tricky primarily because the data isn’t stored -in a format that is easy to access. The data hashes stored in the Merkle tree -account can’t be used to reconstruct the initial data, and the data logged to -the Noop program isn’t available indefinitely. +#### Accessing Data from a Client -Your best bet is one of two options: +We’ve covered creating, updating, and deleting data in state compression, but +reading data presents its unique challenges. -1. Work with an indexing provider to create a custom indexing solution for your - program, then write client-side code based on how the indexer gives you - access to the data. -2. Create your own pseudo-indexer as a lighter-weight solution. +Accessing compressed data from a client can be tricky because the Merkle tree +stores only data hashes, which cannot be used to recover the original data. +Additionally, the uncompressed data logged to the Noop program is not retained +indefinitely. -If your project is truly decentralized such that many participants will interact -with your program through means other than your own frontend, then option 2 -might not be sufficient. However, depending on the scale of the project or -whether or not you’ll have control over most program access, it can be a viable -approach. +To access this data, you generally have two options: -There is no “right” way to do this. Two potential approaches are: +1. **Work with an indexing provider** to develop a custom solution tailored to + your program. This allows you to write client-side code to retrieve and + access the data based on how the indexer provides it. +2. **Create your own pseudo-indexer** to store and retrieve the data, offering a + lighter-weight solution. -1. Store the raw data in a database at the same time as sending it to the - program, along with the leaf that the data is hashed and stored to. -2. Create a server that observes your program’s transactions, looks up the - associated Noop logs, decodes the logs, and stores them. +If your project is decentralized and expects widespread interaction beyond your +frontend, option 2 might not be sufficient. However, if you have control over +most program interactions, this approach can work. -We’ll do a little bit of both when writing tests in this lesson’s lab (though we -won’t persist data in a db - it will only live in memory for the duration of the -tests). +There’s no one-size-fits-all solution here. Two potential strategies include: -The setup for this is somewhat tedious. Given a particular transaction, you can -fetch the transaction from the RPC provider, get the inner instructions -associated with the Noop program, use the `deserializeApplicationDataEvent` -function from the `@solana/spl-account-compression` JS package to get the logs, -then deserialize them using Borsh. Below is an example based on the messaging -program used above. +1. **Store raw data**: One approach is to store the raw data in a database + simultaneously by sending it to the program. This allows you to keep a record + of the data, along with the Merkle tree leaf where the data was hashed and + stored. + +2. **Create a transaction observer**: Another approach is to create a server + that observes the transactions your program executes. This server would fetch + transactions, look up the related Noop logs, decode them, and store the data. + +When writing tests in the lab, we’ll simulate both of these approaches, although +instead of using a database, the data will be stored in memory for the test’s +duration. + +The process of setting this up can be a bit complex. For a given transaction, +you’ll retrieve it from the RPC provider, extract the inner instructions related +to the Noop program, and use the `deserializeApplicationDataEvent` function from +the `@solana/spl-account-compression` JS package to decode the logs. Then, +you’ll use Borsh to deserialize the data. Here’s an example from the messaging +program to illustrate the process: ```typescript export async function getMessageLog( @@ -612,18 +748,24 @@ export async function getMessageLog( ### Conclusion -Generalized state compression can be difficult but is absolutely possible to -implement with the available tools. Additionally, the tools and programs will -only get better over time. If you come up with solutions that improve your -development experience, please share with the community! +Implementing generalized state compression may be challenging, but it is +entirely achievable using the available tools. As the ecosystem evolves, these +tools and programs will continue to improve, making the process more +streamlined. If you discover solutions that enhance your development experience, +please don’t hesitate to share them with the community! + + +Remember to write comprehensive tests for your state compression implementation. This ensures your program behaves correctly and helps catch potential issues early in the development process. + -## Lab +## Lab: Building a Note-Taking App with Generalized State Compression -Let’s practice generalized state compression by creating a new Anchor program. -This program will use custom state compression to power a simple note-taking -app. +In this lab, we’ll walk through the process of developing an Anchor program that +uses custom state compression to power a basic note-taking app. This will give +you hands-on experience in working with compressed data and help reinforce key +concepts around state compression on Solana. -#### 1. Project setup +#### 1. Set up the Project Start by initializing an Anchor program: @@ -631,8 +773,9 @@ Start by initializing an Anchor program: anchor init compressed-notes ``` -We’ll be using the `spl-account-compression` crate with the `cpi` feature -enabled. Let’s add it as a dependency in `programs/compressed-notes/Cargo.toml`. +Next, we’ll add the `spl-account-compression` crate with the `cpi` feature +enabled. To do this, update the `Cargo.toml` file located at +`programs/compressed-notes` by adding the following dependency: ```toml [dependencies] @@ -641,9 +784,12 @@ spl-account-compression = { version="0.2.0", features = ["cpi"] } solana-program = "1.16.0" ``` -We’ll be testing locally but we need both the Compression program and the Noop -program from Mainnet. We’ll need to add these to the `Anchor.toml` in the root -directory so they get cloned to our local cluster. +We’ll be running tests locally, but we’ll need both the State Compression +Program and the Noop Program from the Mainnet to do so. To make sure these +programs are available on our local cluster, we need to include them in the +`Anchor.toml` file located in the root directory. Here’s how you can add them: + +In `Anchor.toml`, update the programs section with the following entries: ```toml [test.validator] @@ -656,15 +802,15 @@ address = "noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV" address = "cmtDvXumGCrqC1Age74AVPhSRVXJMd8PJS91L8KbNCK" ``` -Lastly, let’s prepare the `lib.rs` file for the rest of the Demo. Remove the -`initialize` instruction and the `Initialize` accounts struct, then add the -imports shown in the code snippet below (be sure to put in **_your_** program -id): +Finally, let’s set up the `lib.rs` file for the remainder of the demo. Start by +removing the `initialize` instruction and the `Initialize` accounts struct. +Next, add the necessary imports as indicated in the code snippet, making sure to +include **_your_** program ID. ```rust use anchor_lang::{ prelude::*, - solana_program::keccak + solana_program::keccak, }; use spl_account_compression::{ Noop, @@ -676,125 +822,216 @@ use spl_account_compression::{ wrap_application_data_v1, }; -declare_id!("YOUR_KEY_GOES_HERE"); - -// STRUCTS GO HERE +// Replace with your program ID +declare_id!("PROGRAM_PUBLIC_KEY_GOES_HERE"); +/// A program that manages compressed notes using a Merkle tree for efficient storage and verification. #[program] pub mod compressed_notes { use super::*; - // FUNCTIONS GO HERE + // Define your program instructions here. + + /// Initializes a new Merkle tree for storing messages. + /// + /// This function creates a Merkle tree with the specified maximum depth and buffer size. + /// + /// # Arguments + /// + /// * `ctx` - The context containing the accounts required for initializing the tree. + /// * `max_depth` - The maximum depth of the Merkle tree. + /// * `max_buffer_size` - The maximum buffer size of the Merkle tree. + pub fn create_messages_tree( + ctx: Context, + max_depth: u32, + max_buffer_size: u32, + ) -> Result<()> { + // Tree creation logic here + Ok(()) + } + + /// Appends a new message to the Merkle tree. + /// + /// This function hashes the message and adds it as a leaf node to the tree. + /// + /// # Arguments + /// + /// * `ctx` - The context containing the accounts required for appending the message. + /// * `message` - The message to append to the Merkle tree. + pub fn append_message(ctx: Context, message: String) -> Result<()> { + // Message appending logic here + Ok(()) + } + + /// Updates an existing message in the Merkle tree. + /// + /// This function verifies the old message and replaces it with the new message in the tree. + /// + /// # Arguments + /// + /// * `ctx` - The context containing the accounts required for updating the message. + /// * `index` - The index of the message in the tree. + /// * `root` - The root of the Merkle tree. + /// * `old_message` - The old message to be replaced. + /// * `new_message` - The new message to replace the old message. + pub fn update_message( + ctx: Context, + index: u32, + root: [u8; 32], + old_message: String, + new_message: String, + ) -> Result<()> { + // Message updating logic here + Ok(()) + } + + // Add more functions as needed +} + +// Add structs for accounts, state, etc., here +/// Struct for holding the account information required for message operations. +#[derive(Accounts)] +pub struct MessageAccounts<'info> { + /// The Merkle tree account. + #[account(mut)] + pub merkle_tree: AccountInfo<'info>, + /// The authority for the Merkle tree. + pub tree_authority: AccountInfo<'info>, + /// The sender’s account. + pub sender: Signer<'info>, + /// The recipient’s account. + pub recipient: AccountInfo<'info>, + /// The compression program (Noop program). + pub compression_program: Program<'info, SplAccountCompression>, + /// The log wrapper account for logging data. + pub log_wrapper: AccountInfo<'info>, } ``` -For the rest of this Demo, we’ll be making updates to the program code directly -in the `lib.rs` file. This simplifies the explanations a bit. You’re welcome to -modify the structure as you will. +For the remainder of this demo, we’ll be making updates directly in the `lib.rs` +file. This approach simplifies the explanations. You can modify the structure as +needed. -Feel free to build before continuing. This ensures your environment is working -properly and shortens future build times. +It’s a good idea to build your project now to confirm that your environment is +set up correctly and to reduce build times in the future. #### 2. Define `Note` schema -Next, we’re going to define what a note looks like within our program. Notes -should have the following properties: +Next, we’ll define the structure of a note within our program. Each note should +have the following attributes: -- `leaf_node` - this should be a 32-byte array representing the hash stored on - the leaf node -- `owner` - the public key of the note owner -- `note` - the string representation of the note +- `leaf_node` - a 32-byte array representing the hash stored on the leaf node. +- `owner` - the public key of the note’s owner. +- `note` - a string containing the text of the note. ```rust -#[derive(AnchorSerialize)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +/// A struct representing a log entry in the Merkle tree for a note. pub struct NoteLog { - leaf_node: [u8; 32], // The leaf node hash - owner: Pubkey, // Pubkey of the note owner - note: String, // The note message + /// The leaf node hash generated from the note data. + pub leaf_node: [u8; 32], + /// The public key of the note’s owner. + pub owner: Pubkey, + /// The content of the note. + pub note: String, } -impl NoteLog { - // Constructs a new note from given leaf node and message - pub fn new(leaf_node: [u8; 32], owner: Pubkey, note: String) -> Self { - Self { leaf_node, owner, note } - } +/// Constructs a new note log from a given leaf node, owner, and note message. +/// +/// # Arguments +/// +/// * `leaf_node` - A 32-byte array representing the hash of the note. +/// * `owner` - The public key of the note’s owner. +/// * `note` - The note message content. +/// +/// # Returns +/// +/// A new `NoteLog` struct containing the provided data. +pub fn create_note_log(leaf_node: [u8; 32], owner: Pubkey, note: String) -> NoteLog { + NoteLog { leaf_node, owner, note } } ``` -In a traditional Anchor program, this would be an account struct, but since -we’re using state compression, our accounts won’t be mirroring our native -structures. Since we don’t need all the functionality of an account, we can just -use the `AnchorSerialize` derive macro rather than the `account` macro. +In a traditional Anchor program, a note would typically be represented by a +`Note` struct using the `account` macro. However, because we’re using state +compression we use `NoteLog`, a struct with the `AnchorSerialize` macro applied. -#### 3. Define input accounts and constraints +#### 3. Define Account Constraints -As luck would have it, every one of our instructions will be using the same -accounts. We’ll create a single `NoteAccounts` struct for our account -validation. It’ll need the following accounts: +All our instruction handlers will use the same +[account constraints](https://www.anchor-lang.com/docs/account-constraints): -- `owner` - this is the creator and owner of the note; should be a signer on the - transaction -- `tree_authority` - the authority for the Merkle tree; used for signing - compression-related CPIs -- `merkle_tree` - the address of the Merkle tree used to store the note hashes; - will be unchecked since it is validated by the State Compression Program -- `log_wrapper` - the address of the Noop Program -- `compression_program` - the address of the State Compression Program +- `owner` - The creator and owner of the note, who must sign the transaction. +- `tree_authority` - The authority for the Merkle tree, used for signing + compression-related CPIs. +- `merkle_tree` - The address of the Merkle tree where note hashes are stored; + this will be unchecked as it’s validated by the State Compression Program. +- `log_wrapper` - The address of the Noop Program. +- `compression_program` - The address of the State Compression Program. ```rust #[derive(Accounts)] +/// Accounts required for interacting with the Merkle tree for note management. pub struct NoteAccounts<'info> { - // The payer for the transaction + /// The payer for the transaction, who also owns the note. #[account(mut)] pub owner: Signer<'info>, - // The pda authority for the Merkle tree, only used for signing + /// The PDA (Program Derived Address) authority for the Merkle tree. + /// This account is only used for signing and is derived from the Merkle tree address. #[account( seeds = [merkle_tree.key().as_ref()], bump, )] pub tree_authority: SystemAccount<'info>, - // The Merkle tree account - /// CHECK: This account is validated by the spl account compression program + /// The Merkle tree account, where the notes are stored. + /// This account is validated by the SPL Account Compression program. + /// + /// The `UncheckedAccount` type is used since the account’s validation is deferred to the CPI. #[account(mut)] pub merkle_tree: UncheckedAccount<'info>, - // The noop program to log data + /// The Noop program used for logging data. + /// This is part of the SPL Account Compression stack and logs the note operations. pub log_wrapper: Program<'info, Noop>, - // The spl account compression program + /// The SPL Account Compression program used for Merkle tree operations. pub compression_program: Program<'info, SplAccountCompression>, } ``` -#### 4. Create `create_note_tree` instruction - -Next, let’s create our `create_note_tree` instruction. Remember, clients will -have already allocated the Merkle tree account but will use this instruction to -initialize it. +#### 4. Create `create_note_tree` Instruction handler -All this instruction needs to do is build a CPI to invoke the -`init_empty_merkle_tree` instruction on the State Compression Program. To do -this, it needs the accounts listed in the `NoteAccounts` account validation -struct. It also needs two additional arguments: +Next, we’ll make the `create_note_tree` instruction handler, to initialize the +already allocated Merkle tree account. -1. `max_depth` - the max depth of the Merkle tree -2. `max_buffer_size` - the max buffer size of the Merkle tree +To implement this, you’ll need to build a CPI to invoke the +`init_empty_merkle_tree` instruction from the State Compression Program. The +`NoteAccounts` struct will provide the necessary accounts, but you’ll also need +to include two additional arguments: -These values are required for initializing the data on the Merkle tree account. -Remember, the max depth refers to the maximum number of hops to get from any -leaf to the root of the tree. Max buffer size refers to the amount of space -reserved for storing a changelog of tree updates. This changelog is used to -ensure that your tree can support concurrent updates within the same block. +1. **`max_depth`** - Specifies the maximum depth of the Merkle tree, indicating + the longest path from any leaf to the root. +2. **`max_buffer_size`** - Defines the maximum buffer size for the Merkle tree, + which determines the space allocated for recording tree updates. This buffer + is crucial for supporting concurrent updates within the same block. ```rust #[program] pub mod compressed_notes { use super::*; - // Instruction for creating a new note tree. + /// Instruction to create a new note tree (Merkle tree) for storing compressed notes. + /// + /// # Arguments + /// * `ctx` - The context that includes the accounts required for this transaction. + /// * `max_depth` - The maximum depth of the Merkle tree. + /// * `max_buffer_size` - The maximum buffer size of the Merkle tree. + /// + /// # Returns + /// * `Result<()>` - Returns a success or error result. pub fn create_note_tree( ctx: Context, max_depth: u32, // Max depth of the Merkle tree @@ -803,59 +1040,61 @@ pub mod compressed_notes { // Get the address for the Merkle tree account let merkle_tree = ctx.accounts.merkle_tree.key(); - // Define the seeds for pda signing - let signer_seeds: &[&[&[u8]]] = &[&[ - merkle_tree.as_ref(), // The address of the Merkle tree account as a seed - &[*ctx.bumps.get("tree_authority").unwrap()], // The bump seed for the pda + // The seeds for PDAs signing + let signers_seeds: &[&[&[u8]]] = &[&[ + merkle_tree.as_ref(), // The Merkle tree account address as the seed + &[*ctx.bumps.get("tree_authority").unwrap()], // The bump seed for the tree authority PDA ]]; - // Create cpi context for init_empty_merkle_tree instruction. + // Create a CPI (Cross-Program Invocation) context for initializing the empty Merkle tree. let cpi_ctx = CpiContext::new_with_signer( - ctx.accounts.compression_program.to_account_info(), // The spl account compression program + ctx.accounts.compression_program.to_account_info(), // The SPL Account Compression program Initialize { - authority: ctx.accounts.tree_authority.to_account_info(), // The authority for the Merkle tree, using a PDA - merkle_tree: ctx.accounts.merkle_tree.to_account_info(), // The Merkle tree account to be initialized - noop: ctx.accounts.log_wrapper.to_account_info(), // The noop program to log data + authority: ctx.accounts.tree_authority.to_account_info(), // PDA authority for the Merkle tree + merkle_tree: ctx.accounts.merkle_tree.to_account_info(), // The Merkle tree account + noop: ctx.accounts.log_wrapper.to_account_info(), // The Noop program for logging data }, - signer_seeds, // The seeds for pda signing + signers_seeds, // The seeds for PDAs signing ); - // CPI to initialize an empty Merkle tree with given max depth and buffer size + // CPI call to initialize an empty Merkle tree with the specified depth and buffer size. init_empty_merkle_tree(cpi_ctx, max_depth, max_buffer_size)?; + Ok(()) } - //... + // Additional functions for the program can go here... } ``` -Ensure that your signer seeds on the CPI include both the Merkle tree address -and the tree authority bump. - -#### 5. Create `append_note` instruction - -Now, let’s create our `append_note` instruction. This instruction needs to take -the raw note as a String and compress it into a hash that we’ll store on the -Merkle tree. We’ll also log the note to the Noop program so the entirety of the -data exists within the chain’s state. - -The steps here are as follows: - -1. Use the `hashv` function from the `keccak` crate to hash the note and owner, - each as their corresponding byte representation. It’s **_crucial_** that you - hash the owner as well as the note. This is how we’ll verify note ownership - before updates in the update instruction. -2. Create an instance of the `NoteLog` struct using the hash from step 1, the - owner’s public key, and the raw note as a String. Then call - `wrap_application_data_v1` to issue a CPI to the Noop program, passing the - instance of `NoteLog`. This ensures the entirety of the note (not just the - hash) is readily available to any client looking for it. For broad use cases - like cNFTs, that would be indexers. You might create your observing client to - simulate what indexers are doing but for your own application. -3. Build and issue a CPI to the State Compression Program’s `append` - instruction. This takes the hash computed in step 1 and adds it to the next - available leaf on your Merkle tree. Just as before, this requires the Merkle - tree address and the tree authority bump as signature seeds. +Make sure that when setting up your CPI, you include both the Merkle tree +address and the tree authority bump in the signer seeds. + +#### 5. Create `append_note` Instruction handler + +Let’s create the `append_note` instruction handler. This will compress a raw +note into a hash and store it on the Merkle tree, while also logging the note to +the Noop program to ensure all data remains available onchain. + +Here’s how to accomplish this: + +1. **Hash the Data**: Utilize the `hashv` function from the `keccak` crate to + compute a hash of the note and the owner’s public key. Both should be + converted to their byte representations. It’s essential to hash the owner + along with the note to facilitate ownership verification during updates. + +2. **Log the Data**: Create a `NoteLog` instance with the hash from step 1, the + owner’s public key, and the note as a `String`. Then, use + `wrap_application_data_v1` to issue a CPI to the Noop program with this + `NoteLog` instance. This ensures the complete note (not just the hash) is + available to clients, similar to how indexers manage cNFTs. You might also + develop an observing client to simulate indexer functionality specific to + your application. + +3. **Append to the Merkle Tree**: Build and issue a CPI to the State Compression + Program’s `append` instruction. This will add the hash from step 1 to the + next available leaf on your Merkle tree. Ensure that the Merkle tree address + and the tree authority bump are included as signature seeds. ```rust #[program] @@ -864,34 +1103,47 @@ pub mod compressed_notes { //... - // Instruction for appending a note to a tree. + /// Instruction to append a note to the Merkle tree. + /// + /// # Arguments + /// * `ctx` - The context containing accounts needed for this transaction. + /// * `note` - The note message to append as a leaf node in the Merkle tree. + /// + /// # Returns + /// * `Result<()>` - Returns a success or error result. pub fn append_note(ctx: Context, note: String) -> Result<()> { - // Hash the "note message" which will be stored as leaf node in the Merkle tree - let leaf_node = - keccak::hashv(&[note.as_bytes(), ctx.accounts.owner.key().as_ref()]).to_bytes(); - // Create a new "note log" using the leaf node hash and note. + // Step 1: Hash the note message to create a leaf node for the Merkle tree + let leaf_node = keccak::hashv(&[note.as_bytes(), ctx.accounts.owner.key().as_ref()]).to_bytes(); + + // Step 2: Create a new NoteLog instance containing the leaf node, owner, and note let note_log = NoteLog::new(leaf_node.clone(), ctx.accounts.owner.key().clone(), note); - // Log the "note log" data using noop program + + // Step 3: Log the NoteLog data using the Noop program wrap_application_data_v1(note_log.try_to_vec()?, &ctx.accounts.log_wrapper)?; - // Get the address for the Merkle tree account + + // Step 4: Get the Merkle tree account key (address) let merkle_tree = ctx.accounts.merkle_tree.key(); - // Define the seeds for pda signing - let signer_seeds: &[&[&[u8]]] = &[&[ + + // Step 5: The seeds for PDAs signing + let signers_seeds: &[&[&[u8]]] = &[&[ merkle_tree.as_ref(), // The address of the Merkle tree account as a seed - &[*ctx.bumps.get("tree_authority").unwrap()], // The bump seed for the pda + &[*ctx.bumps.get("tree_authority").unwrap()], // The bump seed for the PDA ]]; - // Create a new cpi context and append the leaf node to the Merkle tree. + + // Step 6: Create a CPI (Cross-Program Invocation) context to modify the Merkle tree let cpi_ctx = CpiContext::new_with_signer( - ctx.accounts.compression_program.to_account_info(), // The spl account compression program + ctx.accounts.compression_program.to_account_info(), // SPL Account Compression program Modify { - authority: ctx.accounts.tree_authority.to_account_info(), // The authority for the Merkle tree, using a PDA - merkle_tree: ctx.accounts.merkle_tree.to_account_info(), // The Merkle tree account to be modified - noop: ctx.accounts.log_wrapper.to_account_info(), // The noop program to log data + authority: ctx.accounts.tree_authority.to_account_info(), // The PDA authority for the + merkle_tree: ctx.accounts.merkle_tree.to_account_info(), // The Merkle tree account to modify + noop: ctx.accounts.log_wrapper.to_account_info(), // The Noop program for logging data }, - signer_seeds, // The seeds for pda signing + signers_seeds, // Seeds for PDAs with that will sign the transaction ); - // CPI to append the leaf node to the Merkle tree + + // Step 7: Append the leaf node to the Merkle tree using CPI append(cpi_ctx, leaf_node)?; + Ok(()) } @@ -899,42 +1151,42 @@ pub mod compressed_notes { } ``` -#### 6. Create `update_note` instruction - -The last instruction we’ll make is the `update_note` instruction. This should -replace an existing leaf with a new hash representing the new updated note data. - -For this to work, we’ll need the following parameters: - -1. `index` - the index of the leaf we are going to update -2. `root` - the root hash of the Merkle tree -3. `old_note` - the string representation of the old note we’re updating -4. `new_note` - the string representation of the new note we want to update to - -Remember, the steps here are similar to `append_note`, but with some minor -additions and modifications: - -1. The first step is new. We need to first prove that the `owner` calling this - function is the true owner of the leaf at the given index. Since the data is - compressed as a hash on the leaf, we can’t simply compare the `owner` public - key to a stored value. Instead, we need to compute the previous hash using - the old note data and the `owner` listed in the account validation struct. We - then build and issue a CPI to the State Compression Program’s `verify_leaf` - instruction using our computed hash. -2. This step is the same as the first step from creating the `append_note` - instruction. Use the `hashv` function from the `keccak` crate to hash the new - note and its owner, each as their corresponding byte representation. -3. This step is the same as the second step from creating the `append_note` - instruction. Create an instance of the `NoteLog` struct using the hash from - step 2, the owner’s public key, and the new note as a string. Then call - `wrap_application_data_v1` to issue a CPI to the Noop program, passing the - instance of `NoteLog` -4. This step is slightly different than the last step from creating the - `append_note` instruction. Build and issue a CPI to the State Compression - Program’s `replace_leaf` instruction. This uses the old hash, the new hash, - and the leaf index to replace the data of the leaf at the given index with - the new hash. Just as before, this requires the Merkle tree address and the - tree authority bump as signature seeds. +#### 6. Create `update_note` Instruction Handler + +The final instruction we’ll implement is `update_note`, which will replace an +existing leaf with a new hash that represents the updated note data. + +To perform this update, you’ll need the following parameters: + +1. **Index**: The index of the leaf to be updated. +2. **Root**: The root hash of the Merkle tree. +3. **Old Note**: The string representation of the note that is being updated. +4. **New Note**: The string representation of the updated note. + +The process for this instruction is similar to `append_note`, with some +additional steps: + +1. **Verify Ownership**: Before updating, prove that the `owner` executing this + instruction is the rightful owner of the leaf at the specified index. Since + the leaf data is compressed as a hash, you can’t directly compare the + `owner`'s public key. Instead, compute the previous hash using the old note + data and the `owner` from the account validation struct. Then, use this + computed hash to build and issue a CPI to the State Compression Program’s + `verify_leaf` instruction. + +2. **Hash the New Data**: Hash the new note and the owner’s public key using the + `hashv` function from the `keccak` crate, converting each to its byte + representation. + +3. **Log the New Data**: Create a `NoteLog` instance with the new hash from step + 2, the owner’s public key, and the new note. Call `wrap_application_data_v1` + to issue a CPI to the Noop program with this `NoteLog` instance, ensuring the + updated note data is available to clients. + +4. **Replace the Leaf**: Build and issue a CPI to the State Compression + Program’s `replace_leaf` instruction. This will replace the old hash with the + new hash at the specified leaf index. Ensure the Merkle tree address and the + tree authority bump are included as signature seeds. ```rust #[program] @@ -943,85 +1195,97 @@ pub mod compressed_notes { //... - pub fn update_note( + /// Instruction to update a note in the Merkle tree. + /// + /// # Arguments + /// * `ctx` - The context containing accounts needed for this transaction. + /// * `index` - The index of the note to update in the Merkle tree. + /// * `root` - The root hash of the Merkle tree for verification. + /// * `old_note` - The current note to be updated. + /// * `new_note` - The new note that will replace the old one. + /// + /// # Returns + /// * `Result<()>` - Returns a success or error result. + pub fn update_note( ctx: Context, index: u32, root: [u8; 32], old_note: String, new_note: String, ) -> Result<()> { - let old_leaf = - keccak::hashv(&[old_note.as_bytes(), ctx.accounts.owner.key().as_ref()]).to_bytes(); + // Step 1: Hash the old note to generate the corresponding leaf node + let old_leaf = keccak::hashv(&[old_note.as_bytes(), ctx.accounts.owner.key().as_ref()]).to_bytes(); + // Step 2: Get the address of the Merkle tree account let merkle_tree = ctx.accounts.merkle_tree.key(); - // Define the seeds for pda signing - let signer_seeds: &[&[&[u8]]] = &[&[ + // Step 3: The seeds for PDAs signing + let signers_seeds: &[&[&[u8]]] = &[&[ merkle_tree.as_ref(), // The address of the Merkle tree account as a seed - &[*ctx.bumps.get("tree_authority").unwrap()], // The bump seed for the pda + &[*ctx.bumps.get("tree_authority").unwrap()], // The bump seed for the PDA ]]; - // Verify Leaf - { - if old_note == new_note { - msg!("Notes are the same!"); - return Ok(()); - } - - let cpi_ctx = CpiContext::new_with_signer( - ctx.accounts.compression_program.to_account_info(), // The spl account compression program - VerifyLeaf { - merkle_tree: ctx.accounts.merkle_tree.to_account_info(), // The Merkle tree account to be modified - }, - signer_seeds, // The seeds for pda signing - ); - // Verify or Fails - verify_leaf(cpi_ctx, root, old_leaf, index)?; + // Step 4: Check if the old note and new note are the same + if old_note == new_note { + msg!("Notes are the same!"); + return Ok(()); } - let new_leaf = - keccak::hashv(&[new_note.as_bytes(), ctx.accounts.owner.key().as_ref()]).to_bytes(); + // Step 5: Verify the leaf node in the Merkle tree + let verify_cpi_ctx = CpiContext::new_with_signer( + ctx.accounts.compression_program.to_account_info(), // The SPL account compression program + VerifyLeaf { + merkle_tree: ctx.accounts.merkle_tree.to_account_info(), // The Merkle tree account to be modified + }, + signers_seeds, // The seeds for PDAs signing + ); + // Verify or fail + verify_leaf(verify_cpi_ctx, root, old_leaf, index)?; + + // Step 6: Hash the new note to create the new leaf node + let new_leaf = keccak::hashv(&[new_note.as_bytes(), ctx.accounts.owner.key().as_ref()]).to_bytes(); - // Log out for indexers + // Step 7: Create a NoteLog entry for the new note let note_log = NoteLog::new(new_leaf.clone(), ctx.accounts.owner.key().clone(), new_note); - // Log the "note log" data using noop program + + // Step 8: Log the NoteLog data using the Noop program wrap_application_data_v1(note_log.try_to_vec()?, &ctx.accounts.log_wrapper)?; - // replace leaf - { - let cpi_ctx = CpiContext::new_with_signer( - ctx.accounts.compression_program.to_account_info(), // The spl account compression program - Modify { - authority: ctx.accounts.tree_authority.to_account_info(), // The authority for the Merkle tree, using a PDA - merkle_tree: ctx.accounts.merkle_tree.to_account_info(), // The Merkle tree account to be modified - noop: ctx.accounts.log_wrapper.to_account_info(), // The noop program to log data - }, - signer_seeds, // The seeds for pda signing - ); - // CPI to append the leaf node to the Merkle tree - replace_leaf(cpi_ctx, root, old_leaf, new_leaf, index)?; - } + // Step 9: Prepare to replace the old leaf node with the new one in the Merkle tree + let modify_cpi_ctx = CpiContext::new_with_signer( + ctx.accounts.compression_program.to_account_info(), // The SPL account compression program + Modify { + authority: ctx.accounts.tree_authority.to_account_info(), // The authority for the Merkle tree, using a PDA + merkle_tree: ctx.accounts.merkle_tree.to_account_info(), // The Merkle tree account to be modified + noop: ctx.accounts.log_wrapper.to_account_info(), // The Noop program to log data + }, + signers_seeds, // The seeds for PDAs signing + ); + + // Step 10: Replace the old leaf node with the new leaf node in the Merkle tree + replace_leaf(modify_cpi_ctx, root, old_leaf, new_leaf, index)?; Ok(()) } } ``` -#### 7. Client test setup +#### 7. Client Test Setup -We’re going to write a few tests to ensure that our program works as expected. -First, let’s do some setup. +To ensure our program functions correctly, we’ll set up and write some tests. +Here’s what you need to do for the setup: -We’ll be using the `@solana/spl-account-compression` package. Go ahead and -install it: +1. **Install Dependencies**: We’ll be using the + `@solana/spl-account-compression` package for our tests. Install it using the + following command: ```bash yarn add @solana/spl-account-compression ``` -Next, we’re going to give you the contents of a utility file we’ve created to -make testing easier. Create a `utils.ts` file in the `tests` directory, add in -the below, then we’ll explain it. +2. **Create Utility File**: To simplify testing, we’ve provided a utility file. + Create a `utils.ts` file in the `tests` directory and add the provided + contents. We’ll go over the details of this file shortly. ```typescript import { @@ -1130,36 +1394,41 @@ export async function getNoteLog(connection: Connection, txSignature: string) { } ``` -There are 3 main things in the above file: +The `utils.ts` file contains three key components: + +1. **`NoteLog` Class**: This class represents the note log that we’ll extract + from the Noop program logs. It also includes the Borsh schema, named + `NoteLogBorshSchema`, which is used for deserialization. -1. `NoteLog` - a class representing the note log we’ll find in the Noop program - logs. We’ve also added the borsh schema as `NoteLogBorshSchema` for - deserialization. -2. `getHash` - a function that creates a hash of the note and note owner so we - can compare it to what we find on the Merkle tree -3. `getNoteLog` - a function that looks through the provided transaction’s logs, - finds the Noop program logs, then deserializes and returns the corresponding - Note log. +2. **`getHash` Function**: This function generates a hash from the note and its + owner, allowing us to compare it against the data in the Merkle tree. -#### 8. Write client tests +3. **`getNoteLog` Function**: This function searches through the transaction + logs to locate the Noop program logs then deserializes and retrieves the + corresponding `NoteLog`. -Now that we’ve got our packages installed and utility file ready, let’s dig into -the tests themselves. We’re going to create four of them: +#### 8. Write Client Tests -1. Create Note Tree - this will create the Merkle tree we’ll be using to store - note hashes -2. Add Note - this will call our `append_note` instruction -3. Add Max Size Note - this will call our `append_note` instruction with a note - that maxes out the 1232 bytes allowed in a single transaction -4. Update First Note - this will call our `update_note` instruction to modify - the first note we added +With our packages and utility file set up, we’re ready to dive into writing the +tests. We will create four tests for our program: -The first test is mostly just for setup. In the last three tests, we’ll be -asserting each time that the note hash on the tree matches what we would expect -given the note text and signer. +1. **Create Note Tree**: This test will initialize the Merkle tree for storing + note hashes. +2. **Add Note**: This test will invoke the `append_note` instruction to add a + note to the tree. +3. **adds max size note to the Merkle tree**: This test will also use the + `append_note` instruction, but with a note that reaches the maximum allowable + size of 1232 bytes in a single transaction. +4. **Updates the first note in the Merkle tree**: This test will use the + `update_note` instruction to modify the first note that was added. -Let’s start with our imports. There are quite a few from Anchor, -`@solana/web3.js`, `@solana/spl-account-compression`, and our own utils file. +The first test is mainly for setup purposes. For the remaining three tests, we +will check that the note hash in the Merkle tree matches the expected value +based on the note content and the signer. + +We’ll start by setting up our imports. This includes a variety of components +from Anchor, `@solana/web3.js`, `@solana/spl-account-compression`, and our own +utility functions. ```typescript import * as anchor from "@coral-xyz/anchor"; @@ -1183,9 +1452,13 @@ import { getHash, getNoteLog } from "./utils"; import { assert } from "chai"; ``` -Next, we’ll want to set up the state variables we’ll be using throughout our -tests. This includes the default Anchor setup as well as generating a Merkle -tree keypair, the tree authority, and some notes. +Next, we’ll set up the state variables needed for our tests. This setup will +include: + +1. **Default Anchor Setup**: Configure the basic environment for Anchor testing. +2. **Merkle Tree Keypair**: Generate a keypair for the Merkle tree. +3. **Tree Authority**: Create a keypair for the authority of the Merkle tree. +4. **Notes**: Define some sample notes to use in the tests. ```typescript describe("compressed-notes", () => { @@ -1193,7 +1466,7 @@ describe("compressed-notes", () => { anchor.setProvider(provider); const connection = new Connection( provider.connection.rpcEndpoint, - "confirmed", // has to be confirmed for some of the methods below + "confirmed", ); const wallet = provider.wallet as anchor.Wallet; @@ -1203,7 +1476,6 @@ describe("compressed-notes", () => { const merkleTree = Keypair.generate(); // Derive the PDA to use as the tree authority for the Merkle tree account - // This is a PDA derived from the Note program, which allows the program to sign for appends instructions to the tree const [treeAuthority] = PublicKey.findProgramAddressSync( [merkleTree.publicKey.toBuffer()], program.programId, @@ -1213,19 +1485,23 @@ describe("compressed-notes", () => { const secondNote = "0".repeat(917); const updatedNote = "updated note"; - // TESTS GO HERE + describe("Merkle Tree Operations", () => { + // Tests will go here + }); }); ``` -Finally, let’s start with the tests themselves. First the `Create Note Tree` -test. This test will do two things: +Now, let’s dive into the `Create Note Tree` test. This test will accomplish two +key tasks: -1. Allocate a new account for the Merkle tree with a max depth of 3, max buffer - size of 8, and canopy depth of 0 -2. Initialize this new account using our program’s `createNoteTree` instruction +1. **Allocate a New Merkle Tree Account**: Create a new account for the Merkle + tree, specifying a max depth of 3, a max buffer size of 8, and a canopy depth + of 0. +2. **Initialize the Account**: Use our program’s `createNoteTree` instruction to + set up the newly allocated Merkle tree account. ```typescript -it("Create Note Tree", async () => { +it("creates a new note tree", async () => { const maxDepthSizePair: ValidDepthSizePair = { maxDepth: 3, maxBufferSize: 8, @@ -1233,7 +1509,7 @@ it("Create Note Tree", async () => { const canopyDepth = 0; - // instruction to create new account with required space for tree + // Instruction to create a new account with the required space for the tree const allocTreeIx = await createAllocTreeIx( connection, merkleTree.publicKey, @@ -1242,12 +1518,13 @@ it("Create Note Tree", async () => { canopyDepth, ); - // instruction to initialize the tree through the Note program + // Instruction to initialize the tree through the Note program const ix = await program.methods .createNoteTree(maxDepthSizePair.maxDepth, maxDepthSizePair.maxBufferSize) .accounts({ + owner: wallet.publicKey, merkleTree: merkleTree.publicKey, - treeAuthority: treeAuthority, + treeAuthority, logWrapper: SPL_NOOP_PROGRAM_ID, compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID, }) @@ -1255,114 +1532,118 @@ it("Create Note Tree", async () => { const tx = new Transaction().add(allocTreeIx, ix); await sendAndConfirmTransaction(connection, tx, [wallet.payer, merkleTree]); -}); -``` -Next, we’ll create the `Add Note` test. It should call `append_note` with -`firstNote`, then check that the onchain hash matches our computed hash and that -the note log matches the text of the note we passed into the instruction. + // Fetch the Merkle tree account to confirm it’s initialized + const merkleTreeAccount = + await ConcurrentMerkleTreeAccount.fromAccountAddress( + connection, + merkleTree.publicKey, + ); + assert(merkleTreeAccount, "Merkle tree should be initialized"); +}); -```typescript -it("Add Note", async () => { +it("adds a note to the Merkle tree", async () => { const txSignature = await program.methods .appendNote(firstNote) .accounts({ + owner: wallet.publicKey, merkleTree: merkleTree.publicKey, - treeAuthority: treeAuthority, + treeAuthority, logWrapper: SPL_NOOP_PROGRAM_ID, compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID, }) .rpc(); const noteLog = await getNoteLog(connection, txSignature); - const hash = getHash(firstNote, provider.publicKey); + const hash = getHash(firstNote, wallet.publicKey); - assert(hash === Buffer.from(noteLog.leafNode).toString("hex")); - assert(firstNote === noteLog.note); + assert( + hash === Buffer.from(noteLog.leafNode).toString("hex"), + "Leaf node hash should match", + ); + assert(firstNote === noteLog.note, "Note should match the appended note"); }); -``` - -Next, we’ll create the `Add Max Size Note` test. It is the same as the previous -test, but with the second note. -```typescript -it("Add Max Size Note", async () => { - // Size of note is limited by max transaction size of 1232 bytes, minus additional data required for the instruction +it("adds max size note to the Merkle tree", async () => { const txSignature = await program.methods .appendNote(secondNote) .accounts({ + owner: wallet.publicKey, merkleTree: merkleTree.publicKey, - treeAuthority: treeAuthority, + treeAuthority, logWrapper: SPL_NOOP_PROGRAM_ID, compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID, }) .rpc(); const noteLog = await getNoteLog(connection, txSignature); - const hash = getHash(secondNote, provider.publicKey); + const hash = getHash(secondNote, wallet.publicKey); - assert(hash === Buffer.from(noteLog.leafNode).toString("hex")); - assert(secondNote === noteLog.note); + assert( + hash === Buffer.from(noteLog.leafNode).toString("hex"), + "Leaf node hash should match", + ); + assert( + secondNote === noteLog.note, + "Note should match the appended max size note", + ); }); -``` - -Lastly, we’ll create the `Update First Note` test. This is slightly more complex -than adding a note. We’ll do the following: -1. Get the Merkle tree root as it’s required by the instruction. -2. Call the `update_note` instruction of our program, passing in the index 0 - (for the first note), the Merkle tree root, the first note, and the updated - data. Remember, it needs the first note and the root because the program must - verify the entire proof path for the note’s leaf before it can be updated. - -```typescript -it("Update First Note", async () => { +it("updates the first note in the Merkle tree", async () => { const merkleTreeAccount = await ConcurrentMerkleTreeAccount.fromAccountAddress( connection, merkleTree.publicKey, ); - - const rootKey = merkleTreeAccount.tree.changeLogs[0].root; - const root = Array.from(rootKey.toBuffer()); + const root = merkleTreeAccount.getCurrentRoot(); const txSignature = await program.methods .updateNote(0, root, firstNote, updatedNote) .accounts({ + owner: wallet.publicKey, merkleTree: merkleTree.publicKey, - treeAuthority: treeAuthority, + treeAuthority, logWrapper: SPL_NOOP_PROGRAM_ID, compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID, }) .rpc(); const noteLog = await getNoteLog(connection, txSignature); - const hash = getHash(updatedNote, provider.publicKey); + const hash = getHash(updatedNote, wallet.publicKey); - assert(hash === Buffer.from(noteLog.leafNode).toString("hex")); - assert(updatedNote === noteLog.note); + assert( + hash === Buffer.from(noteLog.leafNode).toString("hex"), + "Leaf node hash should match after update", + ); + assert( + updatedNote === noteLog.note, + "Updated note should match the logged note", + ); }); ``` -That’s it, congrats! Go ahead and run `anchor test` and you should get four -passing tests. +That’s a wrap—congratulations! Run `anchor test`, and you should see all four +tests passing. -If you’re running into issues, feel free to go back through some of the demo or -look at the full solution code in the +If you encounter any issues, don’t hesitate to revisit the demo or check out the +complete solution code in the [Compressed Notes repository](https://github.com/unboxed-software/anchor-compressed-notes). -## Challenge +### Challenge -Now that you’ve practiced the basics of state compression, add a new instruction -to the Compressed Notes program. This new instruction should allow users to -delete an existing note. keep in mind that you can’t remove a leaf from the -tree, so you’ll need to decide what “deleted” looks like for your program. Good -luck! +Now that you’ve got the hang of state compression, it’s time to add a new +feature to the Compressed Notes program. Your task is to implement an +instruction that allows users to delete an existing note. Keep in mind that you +can’t physically remove a leaf from the Merkle tree, so you’ll need to come up +with a method to signify that a note has been deleted. -If you'd like a very simple example of a delete function, check out the -[`solution` branch on GitHub](https://github.com/Unboxed-Software/anchor-compressed-notes/tree/solution). +Good luck, and happy coding! + +For a straightforward example of how to implement a delete function, check out +the +[`main` branch on GitHub](https://github.com/Unboxed-Software/anchor-compressed-notes/tree/main). -Push your code to GitHub and -[tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=60f6b072-eaeb-469c-b32e-5fea4b72d1d1)! +Push your code to GitHub and [let us know what you think of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=60f6b072-eaeb-469c-b32e-5fea4b72d1d1)! +``` diff --git a/content/courses/state-compression/metadata.yml b/content/courses/state-compression/metadata.yml index a95b504a7..3a86b52a2 100644 --- a/content/courses/state-compression/metadata.yml +++ b/content/courses/state-compression/metadata.yml @@ -6,7 +6,3 @@ lessons: - compressed-nfts - generalized-state-compression priority: 65 -# Uses deprecated @metaplex-foundation/js library for NFTs -# which also uses old RPC methods and no longer functions. -# TODO: Superteam to update -isHidden: true diff --git a/content/courses/token-extensions/close-mint.md b/content/courses/token-extensions/close-mint.md index f2751b852..08ecd4ca6 100644 --- a/content/courses/token-extensions/close-mint.md +++ b/content/courses/token-extensions/close-mint.md @@ -198,10 +198,10 @@ await setAuthority( ## Lab -In this lab, we'll create a mint with the `close mint` extension. We will then -mint some of the tokens and see what happens when we try to close it with a -non-zero supply (hint, the close transaction will fail). Lastly, we will burn -the supply and close the account. +In this lab, we'll create a token mint account with the `close mint` extension. +We will then mint some of the tokens and see what happens when we try to close +it with a non-zero supply (hint, the close transaction will fail). Lastly, we +will burn the supply and close the account. ### 1. Getting Started @@ -213,7 +213,7 @@ Next, we'll need to add our dependencies. Run the following to install the required packages: ```bash -npm i @solana-developers/helpers @solana/spl-token @solana/web3.js esrun dotenv typescript +npm i @solana-developers/helpers@2 @solana/spl-token @solana/web3.js@1 esrun dotenv typescript ``` Create a directory named `src`. In this directory, create a file named @@ -290,7 +290,7 @@ the local RPC URL. const connection = new Connection("http://127.0.0.1:8899", "confirmed"); ``` -Alternatively, if you’d like to use testnet or devnet, import the +Alternatively, if you'd like to use testnet or devnet, import the `clusterApiUrl` from `@solana/web3.js` and pass it to the connection as such: ```typescript @@ -304,6 +304,14 @@ running `solana config get` in your terminal. And then go to address. You can get your address from running `solana address` in your terminal. +For example, assuming `keypairPath` is `/home/.config/solana/id.json` + +```typescript +const payer = initializeKeypair(connection, { + keypairPath: "/home/.config/solana/id.json", +}); +``` + ### 3. Create a mint with close authority Let's create a closable mint by creating the function `createClosableMint` in a @@ -399,7 +407,13 @@ export async function createClosableMint( ``` Now let's call this function in `src/index.ts`. First you'll need to import our -new function. Then paste the following under the right comment section: +new function by uncommenting the 3rd line. + +```ts +import { createClosableMint } from "./create-mint"; +``` + +Then paste the following under the right comment section: ```ts // CREATE A MINT WITH CLOSE AUTHORITY @@ -470,7 +484,7 @@ Underneath the minting functions, add the following code block: /** * Get mint information to verify supply */ -const mintInfo = await getMint( +let mintInfo = await getMint( connection, mintKeypair.publicKey, "finalized", @@ -591,7 +605,7 @@ Putting this all together we get: ```ts // CLOSE MINT -const mintInfo = await getMint( +mintInfo = await getMint( connection, mintKeypair.publicKey, "finalized", diff --git a/content/courses/token-extensions/default-account-state.md b/content/courses/token-extensions/default-account-state.md index baff06b7a..2286a04bd 100644 --- a/content/courses/token-extensions/default-account-state.md +++ b/content/courses/token-extensions/default-account-state.md @@ -223,7 +223,7 @@ Next, we'll need to add our dependencies. Run the following to install the required packages: ```bash -npm i @solana-developers/helpers @solana/spl-token @solana/web3.js esrun dotenv typescript +npm i @solana-developers/helpers@2 @solana/spl-token @solana/web3.js@1 esrun dotenv typescript ``` Create a directory named `src`. In this directory, create a file named @@ -296,7 +296,7 @@ the local RPC URL. const connection = new Connection("http://127.0.0.1:8899", "confirmed"); ``` -Alternatively, if you’d like to use testnet or devnet, import the +Alternatively, if you'd like to use testnet or devnet, import the `clusterApiUrl` from `@solana/web3.js` and pass it to the connection as such: ```typescript @@ -647,7 +647,7 @@ esrun src/index.ts #### 7.3 Transferring without thawing the recipient's account -Now that we’ve tested minting, we can test transferring our tokens frozen and +Now that we've tested minting, we can test transferring our tokens frozen and not. First lets test a transfer without thawing the recipient's token account. Remember, by default, the `otherTokenAccountKeypair` is frozen due to the extension. @@ -746,7 +746,7 @@ Remember the key takeaways: accounts. - Frozen account's balance cannot change. -Congratulations! We’ve just created and tested a mint using the default account +Congratulations! We've just created and tested a mint using the default account extension! ## Challenge diff --git a/content/courses/token-extensions/group-member.md b/content/courses/token-extensions/group-member.md index 6953444d9..ca12d49d3 100644 --- a/content/courses/token-extensions/group-member.md +++ b/content/courses/token-extensions/group-member.md @@ -537,7 +537,7 @@ const groupMetadata: LabNFTMetadata = { tokenUri: "", }; -// Upload off-chain metadata +// Upload offchain metadata groupMetadata.tokenUri = await uploadOffChainMetadata(payer, groupMetadata); // Format group token metadata diff --git a/content/courses/token-extensions/immutable-owner.md b/content/courses/token-extensions/immutable-owner.md index b5342a53a..bfd83f4f3 100644 --- a/content/courses/token-extensions/immutable-owner.md +++ b/content/courses/token-extensions/immutable-owner.md @@ -139,7 +139,7 @@ Next, we'll need to add our dependencies. Run the following to install the required packages: ```bash -npm i @solana-developers/helpers @solana/spl-token @solana/web3.js esrun dotenv typescript +npm i @solana-developers/helpers@2 @solana/spl-token @solana/web3.js@1 esrun dotenv typescript ``` Create a directory named `src`. In this directory, create a file named @@ -182,7 +182,7 @@ the local RPC URL. const connection = new Connection("http://127.0.0.1:8899", "confirmed"); ``` -Alternatively, if you’d like to use testnet or devnet, import the +Alternatively, if you'd like to use testnet or devnet, import the `clusterApiUrl` from `@solana/web3.js` and pass it to the connection as such: ```typescript @@ -360,7 +360,7 @@ const signature = await sendAndConfirmTransaction(connection, transaction, [ return signature; ``` -Now that we’ve added the functionality for `token-helper`, we can create our +Now that we've added the functionality for `token-helper`, we can create our test token accounts. One of the two test token accounts will be created by calling `createTokenAccountWithImmutableOwner`. The other will be created with the baked-in SPL helper function `createAssociatedTokenAccount`. This helper @@ -475,7 +475,7 @@ Now we can run `npx esrun src/index.ts`. This test should log a failure message similar to the one from the previous test. This means that both of our token accounts are in fact immutable and working as intended. -Congratulations! We’ve just created token accounts and tested the immutable +Congratulations! We've just created token accounts and tested the immutable owner extension! If you are stuck at any point, you can find the working code on the `solution` branch of [this repository](https://github.com/Unboxed-Software/solana-lab-immutable-owner/tree/solution). diff --git a/content/courses/token-extensions/interest-bearing-token.md b/content/courses/token-extensions/interest-bearing-token.md index ba44e30ad..316d440b6 100644 --- a/content/courses/token-extensions/interest-bearing-token.md +++ b/content/courses/token-extensions/interest-bearing-token.md @@ -213,7 +213,7 @@ Next, we'll need to add our dependencies. Run the following to install the required packages: ```bash -npm i @solana-developers/helpers @solana/spl-token @solana/web3.js esrun dotenv typescript +npm i @solana-developers/helpers@2 @solana/spl-token @solana/web3.js@1 esrun dotenv typescript ``` Create a directory named `src`. In this directory, create a file named @@ -542,7 +542,7 @@ Now run `npx esrun src/index.ts`. This is expected to fail and log out **Mint tokens and read interest rate** -So we’ve tested updating the interest rate. How do we check that the accrued +So we've tested updating the interest rate. How do we check that the accrued interest increases when an account mints more tokens? We can use the `amountToUiAmount` and `getAccount` helpers from the SPL library to help us achieve this. @@ -690,7 +690,7 @@ try { This is expected to work and the new interest rate should be 10. -Thats it! We’ve just created an interest bearing token, updated the interest +Thats it! We've just created an interest bearing token, updated the interest rate and logged the updated state of the token! ## Challenge diff --git a/content/courses/token-extensions/non-transferable-token.md b/content/courses/token-extensions/non-transferable-token.md index f9f2d4c43..34bc4dfe7 100644 --- a/content/courses/token-extensions/non-transferable-token.md +++ b/content/courses/token-extensions/non-transferable-token.md @@ -127,7 +127,7 @@ Next, we'll need to add our dependencies. Run the following to install the required packages: ```bash -npm i @solana-developers/helpers @solana/spl-token @solana/web3.js esrun dotenv typescript +npm i @solana-developers/helpers@2 @solana/spl-token @solana/web3.js@1 esrun dotenv typescript ``` Create a directory named `src`. In this directory, create a file named @@ -303,7 +303,7 @@ esrun src/index.ts ``` The non-transferable mint has been set up correctly and will be created when we -run `npm start`. Let’s move on to the next step and create a source account and +run `npm start`. Let's move on to the next step and create a source account and mint a token to it. #### 4. Mint token diff --git a/content/courses/token-extensions/permanent-delegate.md b/content/courses/token-extensions/permanent-delegate.md index bbc26b9a1..2474c1cfe 100644 --- a/content/courses/token-extensions/permanent-delegate.md +++ b/content/courses/token-extensions/permanent-delegate.md @@ -250,7 +250,7 @@ Next, we'll need to add our dependencies. Run the following to install the required packages: ```bash -npm i @solana-developers/helpers @solana/spl-token @solana/web3.js esrun +npm i @solana-developers/helpers@2 @solana/spl-token @solana/web3.js@1 esrun ``` Create a directory named `src`. In this directory, create a file named @@ -338,7 +338,7 @@ the local RPC URL. const connection = new Connection("http://127.0.0.1:8899", "confirmed"); ``` -Alternatively, if you’d like to use testnet or devnet, import the +Alternatively, if you'd like to use testnet or devnet, import the `clusterApiUrl` from `@solana/web3.js` and pass it to the connection as such: ```typescript diff --git a/content/courses/token-extensions/required-memo.md b/content/courses/token-extensions/required-memo.md index c5f5decf3..54baeaeb6 100644 --- a/content/courses/token-extensions/required-memo.md +++ b/content/courses/token-extensions/required-memo.md @@ -177,7 +177,7 @@ Next, we'll need to add our dependencies. Run the following to install the required packages: ```bash -npm i @solana-developers/helpers @solana/spl-token @solana/web3.js esrun dotenv typescript +npm i @solana-developers/helpers@2 @solana/spl-token @solana/web3.js@1 esrun dotenv typescript ``` Create a directory named `src`. In this directory, create a file named @@ -242,7 +242,7 @@ the local RPC URL. `const connection = new Connection("http://127.0.0.1:8899", "confirmed");` -Alternatively, if you’d like to use testnet or devnet, import the +Alternatively, if you'd like to use testnet or devnet, import the `clusterApiUrl` from `@solana/web3.js` and pass it to the connection as such: ```typescript @@ -600,7 +600,7 @@ extension. npx esrun src/index.ts ``` -Congratulations! We’ve just tested the required memo extension! +Congratulations! We've just tested the required memo extension! ## Challenge diff --git a/content/courses/token-extensions/token-extensions-metadata.md b/content/courses/token-extensions/token-extensions-metadata.md index 081def3fa..8baf0fa57 100644 --- a/content/courses/token-extensions/token-extensions-metadata.md +++ b/content/courses/token-extensions/token-extensions-metadata.md @@ -389,7 +389,7 @@ export function createUpdateAuthorityInstruction( The function `createEmitInstruction` "emits" or logs out token-metadata in the expected TokenMetadata state format. This is a required function for metadata programs that want to follow the TokenMetadata interface. The emit instruction -allows indexers and other off-chain users to call to get metadata. This also +allows indexers and other offchain users to call to get metadata. This also allows custom metadata programs to store [metadata in a different format while maintaining compatibility with the Interface standards](https://solana.com/developers/guides/token-extensions/metadata-pointer#metadata-interface-instructions). @@ -643,10 +643,10 @@ at 100 KiB. **`helpers.ts`** file provides us with a useful helper function `uploadOffChainMetadata`. -`uploadOffChainMetadata` is a helper to store the off-chain metadata on Arweave +`uploadOffChainMetadata` is a helper to store the offchain metadata on Arweave using Irys (formerly Bundlr). In this lab we will be more focused on the Token Extensions Program interaction, so this uploader function is provided. It is -important to note that an NFT or any off-chain metadata can be stored anywhere +important to note that an NFT or any offchain metadata can be stored anywhere with any storage provider like [NFT.storage](https://nft.storage/), Solana's native [ShadowDrive](https://www.shdwdrive.com/), or [Irys (formerly Bundlr)](https://irys.xyz/). At the end of the day, all you need @@ -695,12 +695,12 @@ you are running into airdropping problems: - Copy the address and airdrop some devnet sol from [faucet.solana](https://faucet.solana.com/). -### 1. Uploading the off-chain metadata +### 1. Uploading the offchain metadata In this section we will decide on our NFT metadata and upload our files to NFT.Storage using the helper functions provided in the starting code. -To upload our off-chain metadata, we need to first prepare an image that will +To upload our offchain metadata, we need to first prepare an image that will represent our NFT. We've provided `cat.png`, but feel free to replace it with your own. Most image types are supported by most wallets. (Again devenet Irys allows up to 100KiB per file) @@ -763,7 +763,7 @@ console.log("Token URI:", tokenUri); Now run `npm run start` in your terminal and test your code. You should see the URI logged once the uploading is done. If you visit the link you should see a -JSON object that holds all of our off-chain metadata. +JSON object that holds all of our offchain metadata. ### 2. Create NFT function @@ -1081,12 +1081,15 @@ const onChainMetadata = await getTokenMetadata(connection, mint.publicKey); // Now we can see the metadata coming with the mint console.log("onchain metadata =====>", onChainMetadata); -// And we can even get the off-chain json now -if (onChainMetadata && onChainMetadata.uri) { - const offChainMetadata = await fetch(onChainMetadata.uri).then(res => - res.json(), - ); - console.log("Mint off-chain metadata =====>", offChainMetadata); +// And we can even get the offchain json now +if (onChainMetadata?.uri) { + try { + const response = await fetch(onChainMetadata.uri); + const offChainMetadata = await response.json(); + console.log("Mint offchain metadata =====>", offChainMetadata); + } catch (error) { + console.error("Error fetching or parsing offchain metadata:", error); + } } ``` @@ -1289,12 +1292,15 @@ export default async function createNFTWithEmbeddedMetadata( // Now we can see the metadata coming with the mint console.log("onchain metadata =====>", onChainMetadata); - // And we can even get the off-chain JSON now - if (onChainMetadata && onChainMetadata.uri) { - const offChainMetadata = await fetch(onChainMetadata.uri).then(res => - res.json(), - ); - console.log("Mint off-chain metadata =====>", offChainMetadata); + // And we can even get the offchain JSON now + if (onChainMetadata?.uri) { + try { + const response = await fetch(onChainMetadata.uri); + const offChainMetadata = await response.json(); + console.log("Mint offchain metadata =====>", offChainMetadata); + } catch (error) { + console.error("Error fetching or parsing offchain metadata:", error); + } } } ``` diff --git a/content/courses/token-extensions/token-extensions-onchain.md b/content/courses/token-extensions/token-extensions-onchain.md index 4d46e33bf..1cbf6c2ce 100644 --- a/content/courses/token-extensions/token-extensions-onchain.md +++ b/content/courses/token-extensions/token-extensions-onchain.md @@ -328,11 +328,10 @@ solana --version If the version printed out after running `solana --version` is less than `1.18.0` then you can update the -[cli version manually](https://docs.solanalabs.com/cli/install). Note, at the -time of writing this, you cannot simply run the `solana-install update` command. -This command will not update the CLI to the correct version for us, so we have -to explicitly download version `1.18.0`. You can do so with the following -command: +[cli version manually](/docs/intro/installation.md). Note, at the time of +writing this, you cannot simply run the `solana-install update` command. This +command will not update the CLI to the correct version for us, so we have to +explicitly download version `1.18.0`. You can do so with the following command: ```bash solana-install init 1.18.0 diff --git a/content/courses/token-extensions/transfer-fee.md b/content/courses/token-extensions/transfer-fee.md index f981c5e98..6b150d899 100644 --- a/content/courses/token-extensions/transfer-fee.md +++ b/content/courses/token-extensions/transfer-fee.md @@ -159,7 +159,7 @@ There are a couple of notes when transferring tokens with the `transfer fee` extension. First, the recipient is the one who "pays" for the fee. If I send 100 tokens -with basis points of 50 (5%), the recipient will receive 95 tokens (five +with basis points of 500 (5%), the recipient will receive 95 tokens (five withheld) Second, the fee is calculated not by the tokens sent, but the smallest unit of @@ -167,9 +167,9 @@ said token. In Solana programming, we always specify amounts to be transferred, minted or burned in their smallest unit. To send one SOL to someone, we actually send `1 * 10 ^ 9` lamports. Another way to look at it is if you wanted to send one US dollar, you're actually sending 100 pennies. Let's make this dollar a -token with a 50 basis points (5%) transfer fee. Sending one dollar, would result -in a five cent fee. Now let's say we have a max fee of 10 cents, this will -always be the highest fee, even if we send $10,000. +token with a 500 basis points (5%) transfer fee. Sending one dollar, would +result in a five cent fee. Now let's say we have a max fee of 10 cents, this +will always be the highest fee, even if we send $10,000. The calculation can be summed up like this: @@ -459,7 +459,7 @@ Next, we'll need to add our dependencies. Run the following to install the required packages: ```bash -npm i @solana-developers/helpers @solana/spl-token @solana/web3.js esrun dotenv typescript +npm i @solana-developers/helpers@2 @solana/spl-token @solana/web3.js@1 esrun dotenv typescript ``` Create a directory named `src`. In this directory, create a file named @@ -535,7 +535,7 @@ the local RPC URL. const connection = new Connection("http://127.0.0.1:8899", "confirmed"); ``` -Alternatively, if you’d like to use testnet or devnet, import the +Alternatively, if you'd like to use testnet or devnet, import the `clusterApiUrl` from `@solana/web3.js` and pass it to the connection as such: ```typescript diff --git a/content/courses/token-extensions/transfer-hook.md b/content/courses/token-extensions/transfer-hook.md index e389268ea..f27cead37 100644 --- a/content/courses/token-extensions/transfer-hook.md +++ b/content/courses/token-extensions/transfer-hook.md @@ -95,7 +95,7 @@ const [pda] = PublicKey.findProgramAddressSync( By storing the extra accounts required by the `Execute` instruction in the `extra_account_meta_list` PDA, these accounts can be automatically added to a token transfer instruction from the client. We'll see how to do that in the -off-chain section. +offchain section. #### 1. `initialize_extra_account_meta_list` instruction: diff --git a/content/courses/tokens-and-nfts/metadata.yml b/content/courses/tokens-and-nfts/metadata.yml index 07e368033..ec3629722 100644 --- a/content/courses/tokens-and-nfts/metadata.yml +++ b/content/courses/tokens-and-nfts/metadata.yml @@ -6,7 +6,3 @@ lessons: - token-program-advanced - nfts-with-metaplex priority: 10 -# Uses deprecated @metaplex-foundation/js library for NFTs -# which also uses old RPC methods and no longer functions. -# TODO: Superteam to update -isHidden: true diff --git a/content/courses/tokens-and-nfts/nfts-with-metaplex.md b/content/courses/tokens-and-nfts/nfts-with-metaplex.md index 4d9b09167..f5b929b5c 100644 --- a/content/courses/tokens-and-nfts/nfts-with-metaplex.md +++ b/content/courses/tokens-and-nfts/nfts-with-metaplex.md @@ -19,8 +19,9 @@ description: files, any special traits the NFT has, and more. - The **Metaplex Token Metadata** program is an onchain program that attaches metadata to a token mint. We can interact with the Token Metadata program - using the `metaplex-foundation/js` npm module, also known as the - [Metaplex JavaScript SDK](https://github.com/metaplex-foundation/js). + using the + [Token Metadata package](https://developers.metaplex.com/token-metadata) via + Umi, a tool made by Metaplex for working with onchain programs. ### Lesson @@ -69,75 +70,108 @@ program. offchain component of NFT metadata. In the following sections, we'll cover the basics of using the -`metaplex-foundation/js` npm module (also known as the Metaplex JavaScript SDK) -to prepare assets, create NFTs, update NFTs, and associate an NFT with a broader -collection. For more information on `metaplex-foundation/js` see the -[Metaplex JavaScript SDK README](https://github.com/metaplex-foundation/js) and -the -[Metaplex JS SDK Examples](https://github.com/metaplex-foundation/js-examples). - -The Metaplex Foundation has newer mechanisms to create -NFTs, including [Umi](https://github.com/metaplex-foundation/umi) and -[MPL Core](https://github.com/metaplex-foundation/mpl-core/blob/main/clients/js/README.md) -but [Metaplex JavaScript SDK](https://github.com/metaplex-foundation/js) remains -the simplest and most popular JS/TS client to create NFTs with broad wallet -support. - -#### Metaplex instance - -A `Metaplex` instance serves as the entry point for accessing the Metaplex SDK. -This instance accepts a connection used to communicate with the cluster. -Additionally, developers can customize the SDK's interactions by specifying an -"Identity Driver" and a "Storage Driver". - -The Identity Driver is a keypair that can be used to sign transactions, which is -a requirement when creating an NFT. The Storage Driver is used to specify the -storage service you want to use for uploading assets. The `irysStorage` driver -is the default option, and it uploads assets to Irys, a permanent and -decentralized storage service. - -Below is an example of how you can set up the `Metaplex` instance for devnet. +`metaplex-foundation/token-metadata` plugin with Umi to prepare assets, create +NFTs, update NFTs, and associate an NFT with a broader collection. For more +information on `metaplex-foundation/token-metadata` see the +[developer docs for Token Metadata](https://developers.metaplex.com/token-metadata). + + +[Metaplex Core](https://developers.metaplex.com/core), is an NFT standard from Metaplex where asset details such as the owner, name, uri e.t.c are stored on a single account. However, the most common style of NFT is still by making a Solana +SPL token with some Metadata attached via the Metaplex Metadata program, so +that's what we'll be using in this tutorial. + +#### UMI instance + +Umi is a framework for making JS/TS clients for onchain programs, that was +created by Metaplex. Umi can create JS/TS clients for many programs, but in +practice, it's most commonly used to communicate to the Token Metadata program. + +Note that Umi has different implementations for many concepts than web3.js, +including Keypairs, PublicKeys, and Connections. However, it is easy to convert +from web3.js versions of these items to the Umi equivalents. + +#### Installation and setting up Umi + +First we create a new Umi instance. We can do this by either providing our own +RPC endpoint, or use the public facing Solana endpoints provided by the +`clusterApiUrl` method. + +```typescript +import { createUmi } from "@metaplex-foundation/umi-bundle-defaults"; +import { clusterApiUrl } from "@solana/web3.js"; + +const umi = createUmi(clusterApiUrl("devnet")); +``` + +Finally, we pass in the identity for our Umi instance (this is the keypair that +will be used to sign transactions) and the plugins that we will use, in our +case, this is the `metaplex-foundation/mpl-token-metadata`. ```typescript -const metaplex = Metaplex.make(connection) - .use(keypairIdentity(user)) - .use( - irysStorage({ - address: "https://devnet.irys.xyz", - providerUrl: "https://api.devnet.solana.com", - timeout: 60000, - }), - ); +import { mplTokenMetadata } from "@metaplex-foundation/mpl-token-metadata"; +import { keypairIdentity } from "@metaplex-foundation/umi"; +import { createUmi } from "@metaplex-foundation/umi-bundle-defaults"; +import { getKeypairFromFile } from "@solana-developers/helpers"; +import { promises as fs } from "fs"; +import { clusterApiUrl } from "@solana/web3.js"; + +const umi = createUmi(clusterApiUrl("devnet")); + +// load keypair from local file system +// See https://github.com/solana-developers/helpers?tab=readme-ov-file#get-a-keypair-from-a-keypair-file +const localKeypair = await getKeypairFromFile(); + +// convert to Umi compatible keypair +const umiKeypair = umi.eddsa.createKeypairFromSecretKey(localKeypair.secretKey); + +// load the MPL metadata program plugin and assign a signer to our umi instance +umi.use(keypairIdentity(umiKeypair)).use(mplTokenMetadata()); ``` -#### Upload assets +#### Uploading assets Before creating an NFT, you must prepare and upload any assets you plan to associate with the NFT. While this doesn't have to be an image, most NFTs have an image associated with them. Preparing and uploading an image involves converting the image to a buffer, -converting it to the Metaplex format using the `toMetaplexFile` function, and -finally uploading it to the designated Storage Driver. +converting the file to a +[generic file](https://developers.metaplex.com/umi/storage#generic-files) using +the `createGenericFile()` function and finally uploading it to the designated +Storage Driver. + +The `GenericFile` type allows Umi to support different file variations despite +the difference of browser files and local file system files i.e. those on your +computer. + +In action, uploading an image named `random-image.png` from your computer would +take the following steps: -The Metaplex SDK supports the creation of a new Metaplex file from either files -present on your local computer or those uploaded by a user through a browser. -You can do the former using `readFileSync()` to read the image file, then -convert it into a Metaplex file using `toMetaplexFile()`. Finally, use your -`Metaplex` instance to call `storage().upload(file)` to upload the file. The -function's return value will be the URI where the image was stored. +1. Reading the file using `readFile` into a buffer. + +2. Creating a generic file type with the files MIME Type from the buffer and + filePath. + +3. Uploading file to designated storage provider. ```typescript -const buffer = readFileSync("src/" + nftData.imageFile); -const file = toMetaplexFile(buffer, nftData.imageFile); -const imageUri = await metaplex.storage().upload(file); +let filePath = "random-image.png"; + +const buffer = await fs.readFile(filePath); +let file = createGenericFile(buffer, filePath, { + // chose the correct file MIME type https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types + contentType: "image/jpeg", +}); +const [image] = await umi.uploader.upload([file]); ``` +The function's return value will be the URI where the image was stored. + #### Upload metadata After uploading an image, it's time to upload the offchain JSON metadata using -the `nfts().uploadMetadata()` function. This will return a URI where the JSON -metadata is stored. +the `uploadJson()` method. This will return a URI where the JSON metadata is +stored. Remember, the offchain portion of the metadata includes things like the image URI as well as additional information like the name and description of the NFT. @@ -146,47 +180,57 @@ most cases, you should follow the [NFT standard](https://developers.metaplex.com/token-metadata/token-standard#the-non-fungible-standard) to ensure compatibility with wallets, programs, and applications. -To create the metadata, use the `uploadMetadata` method provided by the SDK. -This method accepts a metadata object and returns a URI that points to the -uploaded metadata. +To create the metadata, use the `uploadJson()` method provided by the SDK. This +method accepts a metadata object and returns a URI that points to the uploaded +metadata. ```typescript -const { uri } = await metaplex.nfts().uploadMetadata({ - name: "My NFT", - description: "My description", - image: imageUri, +const uri = await umi.uploader.uploadJson({ + name, + description, + image, }); ``` #### Create the NFT After uploading the NFT's metadata, you can finally create the NFT on the -network. The Metaplex SDK's `create()` method allows you to create a new NFT -with minimal configuration. This method will create the mint account, token -account, metadata account, and master edition account for you. The data provided -to this method will represent the onchain portion of the NFT metadata. You can -explore the SDK to see all the other input optionally supplied to this method. +network. The `mplTokenMetadata` plugin we added earlier provides the required +helpers to create an NFT or any other token with minimal configuration. The +helper `createNft` method will create the mint account, token account, metadata +account, and master edition account for you. The data provided to this method +will represent the onchain portion of the NFT metadata. You can explore the SDK +to see all the other input optionally supplied to this method. ```typescript -const { nft } = await metaplex.nfts().create( - { - uri: uri, - name: "My NFT", - sellerFeeBasisPoints: 0, - }, - { commitment: "finalized" }, -); +const { signature, result } = await createNft(umi, { + mint, + name: "My NFT", + uri, + updateAuthority: umi.identity.publicKey, + sellerFeeBasisPoints: percentAmount(0), +}).sendAndConfirm(umi, { send: { commitment: "finalized" } }); ``` -This method returns an object containing information about the newly created -NFT. By default, the SDK sets the `isMutable` property to true, allowing for -updates to be made to the NFT's metadata. However, you can choose to set -`isMutable` to false, making the NFT's metadata immutable. +The `sendAndConfirm` method is what takes care of signing our transaction and +sending it. It also provides other options to set pre-flight checks and our +desired commitment for the transaction, which defaults to `confirmed` if not +provided. + +This method returns an object containing the transaction signature and a result. +The result object contains the outcome of our transaction. If successful, the +`err` inside this will be set to null otherwise it'll contain the error for the +failed transaction. + +By default, the SDK sets the `isMutable` property to true, allowing for updates +to be made to the NFT's metadata. However, you can choose to set `isMutable` to +false, making the NFT's metadata immutable. #### Update the NFT -If you've left `isMutable` as true, you may update your NFT's metadata. The -SDK's `update` method allows you to update both the onchain and offchain +If you've left `isMutable` as true, you may update your NFT's metadata. + +The SDK's `updateV1` method allows you to update both the onchain and offchain portions of the NFT's metadata. To update the offchain metadata, you'll need to repeat the steps of uploading a new image and metadata URI (as outlined in the previous steps), then provide the new metadata URI to this method. This will @@ -194,20 +238,22 @@ change the URI that the onchain metadata points to, effectively updating the offchain metadata as well. ```typescript -const nft = await metaplex.nfts().findByMint({ mintAddress }); +const nft = await fetchMetadataFromSeeds(umi, { mintAddress }); -const { response } = await metaplex.nfts().update( - { - nftOrSft: nft, +await updateV1(umi, { + mint, + authority: umi.identity, + data: { + ...nft, + sellerFeeBasisPoints: 0, name: "Updated Name", - uri: uri, - sellerFeeBasisPoints: 100, }, - { commitment: "finalized" }, -); + primarySaleHappened: true, + isMutable: true, +}).sendAndConfirm(umi); ``` -Note that any fields you don't include in the call to `update` will stay the +Note that any fields you don't include in the call to `updateV1` will stay the same, by design. #### Add the NFT to a collection @@ -232,30 +278,31 @@ NFT Metadata: `isCollection`. This field tells the token program that this NFT is a Collection NFT. ```typescript -const { collectionNft } = await metaplex.nfts().create( - { - uri: uri, - name: "My NFT Collection", - sellerFeeBasisPoints: 0, - isCollection: true, - }, - { commitment: "finalized" }, -); +const collectionMint = generateSigner(umi); + +await createNft(umi, { + mint: collectionMint, + name: `My Collection`, + uri, + sellerFeeBasisPoints: percentAmount(0), + isCollection: true, +}).sendAndConfirm(umi); ``` -You then list the collection's Mint Address as the reference for the -`collection` field in our new Nft. +To mint an NFT into this collection, the +[Collection type](https://mpl-token-metadata-js-docs.vercel.app/types/Collection.html) +which has two fields, the address of the `collectionMint` generated above and +the verified field. ```typescript -const { nft } = await metaplex.nfts().create( - { - uri: uri, - name: "My NFT", - sellerFeeBasisPoints: 0, - collection: collectionNft.mintAddress, - }, - { commitment: "finalized" }, -); +const { signature, result } = await createNft(umi, { + mint, + name: "My NFT", + uri, + updateAuthority: umi.identity.publicKey, + sellerFeeBasisPoints: percentAmount(0), + collection: { key: collectionMint.publicKey, verified: false }, +}).sendAndConfirm(umi, { send: { commitment: "finalized" } }); ``` When you checkout the metadata on your newly created NFT, you should now see a @@ -270,62 +317,71 @@ When you checkout the metadata on your newly created NFT, you should now see a The last thing you need to do is verify the NFT. This effectively just flips the `verified` field above to true, but it's incredibly important. This is what lets -consuming programs and apps know that your NFT is in fact part of the -collection. You can do this using the `verifyCollection` function: +consuming programs and apps, including wallets and art marketplaces, know that +your NFT is in fact part of the collection - because the Collection's owner has +signed a transaction making the NFT a member of that collection. You can do this +using the `verifyCollectionV1` function: ```typescript -await metaplex.nfts().verifyCollection({ - mintAddress: nft.address, - collectionMintAddress: collectionNft.address, - isSizedCollection: true, -}); +const metadata = findMetadataPda(umi, { mint: mint.publicKey }); + +await verifyCollectionV1(umi, { + metadata, + collectionMint, + authority: umi.identity, +}).sendAndConfirm(umi); ``` ### Lab -In this lab, we'll go through the steps to create an NFT using the Metaplex SDK, -update the NFT's metadata after the fact, and then associate the NFT with a -collection. By the end, you will have a basic understanding of how to use the -Metaplex SDK to interact with NFTs on Solana. +In this lab, we'll go through the steps to create an NFT using the Metaplex Umi +framework, update the NFT's metadata after the fact, and then associate the NFT +with a collection. By the end, you will have a basic understanding of how to use +the Metaplex Umi and the mplTokenMetadata library to interact with NFTs on +Solana. #### Part 1: Creating an NFT collection To begin, make a new folder and install the relevant dependencies: +```bash +npm i @solana/web3.js@1 @solana-developers/helpers@2 @metaplex-foundation/mpl-token-metadata @metaplex-foundation/umi-bundle-defaults @metaplex-foundation/umi-uploader-irys esrun ``` -npm i "@solana/web3.js" "@solana-developers/helpers "@metaplex-foundation/js" -``` - -As of July 15 2024 Metaplex shows a deprecation warning -when installing this package. However, as mentioned above, the -[Metaplex JavaScript SDK](https://github.com/metaplex-foundation/js) remains the -simplest and most popular JS/TS client to create NFTs with broad wallet -support. -Then create a file called `create-metaplex-collection.ts`, and add our imports: +Then create a file called `create-metaplex-nft-collection.ts`, and add our +imports: ```typescript -import { Connection, clusterApiUrl, LAMPORTS_PER_SOL } from "@solana/web3.js"; import { - getKeypairFromFile, - airdropIfRequired, -} from "@solana-developers/helpers"; + createNft, + mplTokenMetadata, +} from "@metaplex-foundation/mpl-token-metadata"; import { - Metaplex, + createGenericFile, + generateSigner, keypairIdentity, - irysStorage, - toMetaplexFile, -} from "@metaplex-foundation/js"; -import { readFileSync } from "fs"; + percentAmount, +} from "@metaplex-foundation/umi"; +import { createUmi } from "@metaplex-foundation/umi-bundle-defaults"; +import { irysUploader } from "@metaplex-foundation/umi-uploader-irys"; +import { + airdropIfRequired, + getExplorerLink, + getKeypairFromFile, +} from "@solana-developers/helpers"; +import { clusterApiUrl, Connection, LAMPORTS_PER_SOL } from "@solana/web3.js"; +import { promises as fs } from "fs"; +import * as path from "path"; ``` Connect to devnet, load a user and Airdrop some SOL if needed: ```typescript -// create a new connection to the cluster's API +// create a new connection to Solana's devnet cluster const connection = new Connection(clusterApiUrl("devnet")); -// initialize a keypair for the user +// load keypair from local file system +// assumes that the keypair is already generated using `solana-keygen new` const user = await getKeypairFromFile(); await airdropIfRequired( @@ -338,85 +394,96 @@ await airdropIfRequired( console.log("Loaded user:", user.publicKey.toBase58()); ``` -Connect to Metaplex and Irys: +Create a new Umi instance, assign it the loaded keypair, load the +`mplTokenMetadata` to interact with the metadata program and `irysUploader` to +upload our files. ```typescript -// metaplex set up -const metaplex = Metaplex.make(connection) - .use(keypairIdentity(user)) - .use( - irysStorage({ - address: "https://devnet.irys.xyz", - providerUrl: "https://api.devnet.solana.com", - timeout: 60000, - }), - ); -``` +const umi = createUmi(connection); -Add the data we want in for our Collection: +// load keypair from local file system +// See https://github.com/solana-developers/helpers?tab=readme-ov-file#get-a-keypair-from-a-keypair-file +const user = await getKeypairFromFile(); -```typescript -const collectionNftData = { - name: "TestCollectionNFT", - symbol: "TEST", - description: "Test Description Collection", - sellerFeeBasisPoints: 100, - imageFile: "success.png", - isCollection: true, - collectionAuthority: user, -}; +// convert to umi compatible keypair +const umiKeypair = umi.eddsa.createKeypairFromSecretKey(user.secretKey); + +// assigns a signer to our umi instance, and loads the MPL metadata program and Irys uploader plugins. +umi + .use(keypairIdentity(umiKeypair)) + .use(mplTokenMetadata()) + .use(irysUploader()); ``` -Upload the offchain metadata to irys: +Download the image assets the collection image from the links below and save +them inside your working directory, + +1. collection image: + https://github.com/solana-developers/professional-education/blob/main/labs/metaplex-umi/collection.png + +2. NFT image: + https://github.com/solana-developers/professional-education/blob/main/labs/metaplex-umi/nft.png + +We will use these images as our collection and nft cover images respectively. + +We will use Irys as our storage provider, and Metaplex conveniently ships the +`umi-uploader-irys` plugin we can use to upload our files. The plugin, also +takes care of storage fees so that we don't have to worry about making this on +our own. + +Upload the offchain metadata to Irys: ```typescript -// Load file into Metaplex -const buffer = readFileSync(collectionNftData.imageFile); -const file = toMetaplexFile(buffer, collectionNftData.imageFile); +const collectionImagePath = path.resolve(__dirname, "collection.png"); -// upload image and get image uri -const imageUri = await metaplex.storage().upload(file); -console.log("image uri:", imageUri); - -// upload metadata and get metadata uri (off chain metadata) -const uploadMetadataOutput = await metaplex.nfts().uploadMetadata({ - name: collectionNftData.name, - symbol: collectionNftData.symbol, - description: collectionNftData.description, - image: imageUri, +const buffer = await fs.readFile(collectionImagePath); +let file = createGenericFile(buffer, collectionImagePath, { + contentType: "image/png", }); - -const collectionUri = uploadMetadataOutput.uri; -console.log("Collection offchain metadata URI:", collectionUri); +const [image] = await umi.uploader.upload([file]); +console.log("image uri:", image); + +// upload offchain json to Arweave using irys +const uri = await umi.uploader.uploadJson({ + name: "My Collection", + symbol: "MC", + description: "My Collection description", + image, +}); +console.log("Collection offchain metadata URI:", uri); ``` Then actually make the collection: -``` -// create a collection NFT using the URI from the metadata -const createNftOutput = await metaplex.nfts().create( - { - uri: collectionUri, - name: collectionNftData.name, - sellerFeeBasisPoints: collectionNftData.sellerFeeBasisPoints, - symbol: collectionNftData.symbol, - isCollection: true, - }, - { commitment: "finalized" } -); - -const collectionNft = createNftOutput.nft; +```typescript +// generate mint keypair +const collectionMint = generateSigner(umi); + +// create and mint NFT +await createNft(umi, { + mint: collectionMint, + name: "My Collection", + uri, + updateAuthority: umi.identity.publicKey, + sellerFeeBasisPoints: percentAmount(0), + isCollection: true, +}).sendAndConfirm(umi, { send: { commitment: "finalized" } }); -console.log( - `Collection NFT: https://explorer.solana.com/address/${collectionNft.address.toString()}?cluster=devnet` +let explorerLink = getExplorerLink( + "address", + collectionMint.publicKey, + "devnet", ); - -console.log(`Collection NFT address is`, collectionNft.address.toString()); - +console.log(`Collection NFT: ${explorerLink}`); +console.log(`Collection NFT address is:`, collectionMint.publicKey); console.log("✅ Finished successfully!"); ``` -Run the file with: +We advise using [esrun](https://www.npmjs.com/package/esrun) to run the scripts +because it allows you to use top level await without having to wrap your code +inside asynchronous function. + +Run the `create-metaplex-nft-collection.ts` script ``` npx esrun create-metaplex-nft-collection.ts @@ -426,18 +493,23 @@ The output should look like this: ``` % npx esrun create-metaplex-nft-collection.ts -Loaded user: 4DRi8LxWhAWf9LwUuyb3Rz4Y4USEKzjdvQBWTyrkJtwg -image uri: https://arweave.net/_3vbKzFO7zcfMkFaVdhorntkPHDqz4YNTyf_9lrGw1c -Collection offchain metadata URI: https://arweave.net/Jw29lMkHp-PIxddEuLFkavdOTP1bJ9MeXTu-QqY3qTM -Collection NFT: https://explorer.solana.com/address/8dP75EWHnYAc6pgeiToXtE66qEaHx1PcoetZpmnAQeAp?cluster=devnet -Collection NFT address is 8dP75EWHnYAc6pgeiToXtE66qEaHx1PcoetZpmnAQeAp + +Loaded user: 4kg8oh3jdNtn7j2wcS7TrUua31AgbLzDVkBZgTAe44aF +image uri: https://arweave.net/XWpt7HDOFC0wJQcQWgP9n_cxHS0qQik9-27CAAaGP6E +Collection offchain metadata URI: https://arweave.net/atIf58t3FHa3heoOtNqPkVvEGC_9WzAduY0GQE-LnFI +Collection NFT: https://explorer.solana.com/address/D2zi1QQmtZR5fk7wpA1Fmf6hTY2xy8xVMyNgfq6LsKy1?cluster=devnet +Collection NFT address is: D2zi1QQmtZR5fk7wpA1Fmf6hTY2xy8xVMyNgfq6LsKy1 ✅ Finished successfully! ``` -Congratulations! You've created a Metaplex Verified Collection. Check this out -on Solana Explorer using the URL above. If you have any trouble, try and fix it -yourself, but if you need to you can also check out the -[solution code](https://github.com/solana-developers/professional-education/blob/main/labs/create-metaplex-nft-collection.ts). +Congratulations! You've created a Metaplex Collection. Check this out on Solana +Explorer using the URL above which should resemble + +![Solana Explorer with details about created collection](/public/assets/courses/unboxed/solana-explorer-metaplex-collection.png) + +If you have any trouble, try and fix it yourself, but if you need to you can +also check out the +[solution code](https://github.com/solana-developers/professional-education/blob/main/labs/metaplex-umi/create-collection.ts). We'll use the collection NFT address in the next step. @@ -449,28 +521,35 @@ the same as the previous file, with slightly different imports: ```typescript import { - Connection, - clusterApiUrl, - PublicKey, - LAMPORTS_PER_SOL, -} from "@solana/web3.js"; + createNft, + findMetadataPda, + mplTokenMetadata, + verifyCollectionV1, +} from "@metaplex-foundation/mpl-token-metadata"; +import { + createGenericFile, + generateSigner, + keypairIdentity, + percentAmount, + publicKey as UMIPublicKey, +} from "@metaplex-foundation/umi"; +import { createUmi } from "@metaplex-foundation/umi-bundle-defaults"; +import { irysUploader } from "@metaplex-foundation/umi-uploader-irys"; import { - getKeypairFromFile, airdropIfRequired, + getExplorerLink, + getKeypairFromFile, } from "@solana-developers/helpers"; -import { - Metaplex, - keypairIdentity, - irysStorage, - toMetaplexFile, -} from "@metaplex-foundation/js"; -import { readFileSync } from "fs"; - -// create a new connection to the cluster's API +import { clusterApiUrl, Connection, LAMPORTS_PER_SOL } from "@solana/web3.js"; +import { promises as fs } from "fs"; +import * as path from "path"; +// create a new connection to Solana's devnet cluster const connection = new Connection(clusterApiUrl("devnet")); -// initialize a keypair for the user +// load keypair from local file system +// assumes that the keypair is already generated using `solana-keygen new` const user = await getKeypairFromFile(); +console.log("Loaded user:", user.publicKey.toBase58()); await airdropIfRequired( connection, @@ -479,145 +558,234 @@ await airdropIfRequired( 0.1 * LAMPORTS_PER_SOL, ); -console.log("Loaded user:", user.publicKey.toBase58()); +const umi = createUmi(connection); + +// convert to umi compatible keypair +const umiKeypair = umi.eddsa.createKeypairFromSecretKey(user.secretKey); -// metaplex set up -const metaplex = Metaplex.make(connection) - .use(keypairIdentity(user)) - .use( - irysStorage({ - address: "https://devnet.irys.xyz", - providerUrl: "https://api.devnet.solana.com", - timeout: 60000, - }), - ); +// load our plugins and signer +umi + .use(keypairIdentity(umiKeypair)) + .use(mplTokenMetadata()) + .use(irysUploader()); ``` Now let's tell Metaplex our collection, and the NFT we want to make: ```typescript // Substitute in your collection NFT address from create-metaplex-nft-collection.ts -const collectionNftAddress = new PublicKey("YOUR_COLLECTION_NFT_ADDRESS_HERE"); +const collectionNftAddress = UMIPublicKey("YOUR_COLLECTION_NFT_ADDRESS_HERE"); -// example data for a new NFT +// example data and metadata for our NFT const nftData = { - name: "Name", - symbol: "SYMBOL", - description: "Description", + name: "My NFT", + symbol: "MN", + description: "My NFT Description", sellerFeeBasisPoints: 0, - imageFile: "solana.png", + imageFile: "nft.png", }; ``` We can then put out files into Irys: ```typescript -// Load the file into Metaplex -const buffer = readFileSync(nftData.imageFile); -const file = toMetaplexFile(buffer, nftData.imageFile); +const NFTImagePath = path.resolve(__dirname, "nft.png"); -// upload image and get image uri -const imageUri = await metaplex.storage().upload(file); -console.log("image uri:", imageUri); - -// upload metadata and get metadata uri (off chain metadata) -const uploadMetadataOutput = await metaplex.nfts().uploadMetadata({ - name: nftData.name, - symbol: nftData.symbol, - description: nftData.description, - image: imageUri, +const buffer = await fs.readFile(NFTImagePath); +let file = createGenericFile(buffer, NFTImagePath, { + contentType: "image/png", }); -const metadataUri = uploadMetadataOutput.uri; +// upload image and get image uri +const [image] = await umi.uploader.upload([file]); +console.log("image uri:", image); + +// upload offchain json using irys and get metadata uri +const uri = await umi.uploader.uploadJson({ + name: "My NFT", + symbol: "MN", + description: "My NFT Description", + image, +}); +console.log("NFT offchain metadata URI:", uri); ``` And then create an NFT using the URI from the metadata: ```typescript -const createNftOutput = await metaplex.nfts().create( - { - uri: metadataUri, // metadata URI - name: nftData.name, - sellerFeeBasisPoints: nftData.sellerFeeBasisPoints, - symbol: nftData.symbol, - collection: collectionNftAddress, +// generate mint keypair +const mint = generateSigner(umi); + +// create and mint NFT +await createNft(umi, { + mint, + name: "My NFT", + symbol: "MN", + uri, + updateAuthority: umi.identity.publicKey, + sellerFeeBasisPoints: percentAmount(0), + collection: { + key: collectionAddress, + verified: false, }, - { commitment: "finalized" }, -); -const nft = createNftOutput.nft; +}).sendAndConfirm(umi, { send: { commitment: "finalized" } }); -console.log( - `Token Mint: https://explorer.solana.com/address/${nft.address.toString()}?cluster=devnet`, -); +let explorerLink = getExplorerLink("address", mint.publicKey, "devnet"); +console.log(`Token Mint: ${explorerLink}`); ``` -Finally let's verify our mint as being part of our collection. This makes it so +Run `npx esrun create-metaplex-nft.ts`. If all goes well, you will see the +following: + +``` +% npx esrun create-metaplex-nft.ts + +Loaded user: 4kg8oh3jdNtn7j2wcS7TrUua31AgbLzDVkBZgTAe44aF +image uri: https://arweave.net/XgTss3uKlddlMFjRTIvDiDLBv6Pptm-Vx9mz6Oe5f-o +NFT offchain metadata URI: https://arweave.net/PK3Url31k4BYNvYOgTuYgWuCLrNjl5BrrF5lbY9miR8 +Token Mint: https://explorer.solana.com/address/CymscdAwuTRjCz1ezsNZa15MnwGNrxhGUEToLFcyijMT?cluster=devnet +Created NFT address is CymscdAwuTRjCz1ezsNZa15MnwGNrxhGUEToLFcyijMT +✅ Finished successfully! +``` + +Inspect your NFT at the address given! If you have any trouble, try and fix it +yourself, but if you need to you can also check out the +[solution code](https://github.com/solana-developers/professional-education/blob/main/labs/metaplex-umi/create-nft.ts). + +You should have something similar to this image on your explorer page +![Solana Explorer with details about created NFT](/public/assets/courses/unboxed/solana-explorer-metaplex-nft.png) + +Finally, let's verify our mint as being part of our collection. This makes it so the `verified` field in the onchain metadata is set to `true`, so consuming programs and apps can know for sure that the NFT in fact belongs to the -collection: +collection. + +Create a new file `verify-metaplex-nft.ts`, import the required libraries and +instantiate a new umi Instance. ```typescript -await metaplex.nfts().verifyCollection({ - // Verify our collection as a Certified Collection - // See https://developers.metaplex.com/token-metadata/collections - mintAddress: nft.mint.address, - collectionMintAddress: collectionNftAddress, - isSizedCollection: true, -}); +import { + findMetadataPda, + mplTokenMetadata, + verifyCollectionV1, +} from "@metaplex-foundation/mpl-token-metadata"; +import { + keypairIdentity, + publicKey as UMIPublicKey, +} from "@metaplex-foundation/umi"; +import { createUmi } from "@metaplex-foundation/umi-bundle-defaults"; +import { irysUploader } from "@metaplex-foundation/umi-uploader-irys"; +import { + airdropIfRequired, + getExplorerLink, + getKeypairFromFile, +} from "@solana-developers/helpers"; +import { clusterApiUrl, Connection, LAMPORTS_PER_SOL } from "@solana/web3.js"; + +// create a new connection to Solana's devnet cluster +const connection = new Connection(clusterApiUrl("devnet")); -console.log(`Created NFT address is`, nft.address.toString()); +// load keypair from local file system +// assumes that the keypair is already generated using `solana-keygen new` +const user = await getKeypairFromFile(); +console.log("Loaded user:", user.publicKey.toBase58()); +await airdropIfRequired( + connection, + user.publicKey, + 1 * LAMPORTS_PER_SOL, + 0.1 * LAMPORTS_PER_SOL, +); + +const umi = createUmi(connection); + +// Substitute in your collection NFT address from create-metaplex-nft-collection.ts +const collectionAddress = UMIPublicKey(""); + +// Substitute in your NFT address from create-metaplex-nft.ts +const nftAddress = UMIPublicKey(""); +``` + +Verifying an NFT will require you to have the `collectionAddress` you used +created in the creation of a collection stage, and we will use the +`verifyCollectionV1` method. + +```typescript +// Verify our collection as a Certified Collection +// See https://developers.metaplex.com/token-metadata/collections +const metadata = findMetadataPda(umi, { mint: nftAddress }); +await verifyCollectionV1(umi, { + metadata, + collectionMint: collectionAddress, + authority: umi.identity, +}).sendAndConfirm(umi); + +let explorerLink = getExplorerLink("address", nftAddress, "devnet"); +console.log(`verified collection: ${explorerLink}`); console.log("✅ Finished successfully!"); ``` -Run `npx esrun create-metaplex-nft.ts`. If all goes well, you will see the +Run `npx esrun verify-metaplex-nft.ts`. If all goes well, you will see the following: ``` % npx esrun create-metaplex-nft.ts -Loaded user: 4DRi8LxWhAWf9LwUuyb3Rz4Y4USEKzjdvQBWTyrkJtwg -image uri: https://arweave.net/Z2JoDr7W4A3mWE9aTq8ex13IoPx1v1QewjVqPKPItuE -Token Mint: https://explorer.solana.com/address/HCq8ERk1PSLCuBJeWx7du4dom3YbyfKpaCWDQ3Eft6aM?cluster=devnet -Created NFT address is HCq8ERk1PSLCuBJeWx7du4dom3YbyfKpaCWDQ3Eft6aM + +Loaded user: 4kg8oh3jdNtn7j2wcS7TrUua31AgbLzDVkBZgTAe44aF +verified collection: https://explorer.solana.com/address/CymscdAwuTRjCz1ezsNZa15MnwGNrxhGUEToLFcyijMT?cluster=devnet ✅ Finished successfully! ``` -Inspect your NFT at the address given! If you have any trouble, try and fix it -yourself, but if you need to you can also check out the -[solution code](https://github.com/solana-developers/professional-education/blob/main/labs/create-metaplex-nft.ts). +Inspect your verified NFT at the address given! If you have any trouble, try and +fix it yourself, but if you need to you can also check out the +[solution code](https://github.com/solana-developers/professional-education/blob/main/labs/metaplex-umi/verify-nft.ts). + +The verified flag on your NFT should now be set to `1` -> `true` showing that +it's verified. To confirm this, look under the metadata tab on the Solana +Explorer to confirm that your NFT is verified as part of the collection. + +![Solana Explorer with details about created NFT](/public/assets/courses/unboxed/solana-explorer-verified-nft.png) Remember the NFT address, we'll use it in the next step. #### 3. Update the NFT -Create a new file, called `update-metaplex-nft.ts`. The imports will be simila +Create a new file, called `update-metaplex-nft.ts`. The imports will be similar to our previous files: ```typescript import { - Connection, - clusterApiUrl, - PublicKey, - LAMPORTS_PER_SOL, -} from "@solana/web3.js"; + createNft, + fetchMetadataFromSeeds, + updateV1, + findMetadataPda, + mplTokenMetadata, +} from "@metaplex-foundation/mpl-token-metadata"; +import { + createGenericFile, + generateSigner, + keypairIdentity, + percentAmount, + publicKey as UMIPublicKey, +} from "@metaplex-foundation/umi"; +import { createUmi } from "@metaplex-foundation/umi-bundle-defaults"; +import { irysUploader } from "@metaplex-foundation/umi-uploader-irys"; import { - getKeypairFromFile, airdropIfRequired, getExplorerLink, + getKeypairFromFile, } from "@solana-developers/helpers"; -import { - Metaplex, - keypairIdentity, - irysStorage, - toMetaplexFile, -} from "@metaplex-foundation/js"; -import { readFileSync } from "fs"; +import { clusterApiUrl, Connection, LAMPORTS_PER_SOL } from "@solana/web3.js"; +import { promises as fs } from "fs"; +import * as path from "path"; -// create a new connection to the cluster's API +// create a new connection to Solana's devnet cluster const connection = new Connection(clusterApiUrl("devnet")); -// initialize a keypair for the user +// load keypair from local file system +// assumes that the keypair is already generated using `solana-keygen new` const user = await getKeypairFromFile(); +console.log("Loaded user:", user.publicKey.toBase58()); await airdropIfRequired( connection, @@ -626,18 +794,16 @@ await airdropIfRequired( 0.1 * LAMPORTS_PER_SOL, ); -console.log("Loaded user:", user.publicKey.toBase58()); +const umi = createUmi(connection); + +// convert to umi compatible keypair +const umiKeypair = umi.eddsa.createKeypairFromSecretKey(user.secretKey); -// metaplex set up -const metaplex = Metaplex.make(connection) - .use(keypairIdentity(user)) - .use( - irysStorage({ - address: "https://devnet.irys.xyz", - providerUrl: "https://api.devnet.solana.com", - timeout: 60000, - }), - ); +// load our plugins and signer +umi + .use(keypairIdentity(umiKeypair)) + .use(mplTokenMetadata()) + .use(irysUploader()); ``` Let's load our NFT, specifying the address from the previous example, and set up @@ -645,75 +811,83 @@ what we'd like to update: ```typescript // Load the NFT using the mint address -const nftAddress: PublicKey = new PublicKey("YOUR_NFT_ADDRESS_HERE"); -const nft = await metaplex.nfts().findByMint({ mintAddress: nftAddress }); +const mint = UMIPublicKey("YOUR_NFT_ADDRESS_HERE"); +const asset = await fetchDigitalAsset(umi, mint); // example data for updating an existing NFT const updatedNftData = { - name: "Updated", + name: "Updated Asset", symbol: "UPDATED", description: "Updated Description", - sellerFeeBasisPoints: 100, - imageFile: "success.png", + sellerFeeBasisPoints: 0, + imageFile: "nft.png", }; ``` We can then use Metaplex to update our NFT: ```typescript -// Load the image file into Metaplex -const buffer = readFileSync(updatedNftData.imageFile); -const file = toMetaplexFile(buffer, updatedNftData.imageFile); - -// Upload the new image and get image URI -const imageUri = await metaplex.storage().upload(file); -console.log("image uri:", imageUri); - -// Upload new offchain metadata -const uploadMetadataOutput = await metaplex.nfts().uploadMetadata({ - name: updatedNftData.name, - symbol: updatedNftData.symbol, - description: updatedNftData.description, - image: imageUri, +const NFTImagePath = path.resolve(__dirname, "nft.png"); + +const buffer = await fs.readFile(NFTImagePath); +let file = createGenericFile(buffer, NFTImagePath, { + contentType: "image/png", }); -const updatedUri = uploadMetadataOutput.uri; +// upload new image and get image uri +const [image] = await umi.uploader.upload([file]); +console.log("image uri:", image); -// update the NFT metadata -const { response } = await metaplex.nfts().update( - { - nftOrSft: nft, - uri: updatedUri, +// upload updated offchain json using irys and get metadata uri +const uri = await umi.uploader.uploadJson({ + name: "Updated ", + symbol: "UPDATED", + description: "Updated Description", + image, +}); +console.log("NFT offchain metadata URI:", uri); + +// Load the NFT using the mint address +const mint = UMIPublicKey("Zxd9TmtBHQNti6tJxtx1AKYJFykNUwJL4rth441CjRd"); +const nft = await fetchMetadataFromSeeds(umi, { mint }); + +await updateV1(umi, { + mint, + authority: umi.identity, + data: { + ...nft, + sellerFeeBasisPoints: 0, + name: "Updated Asset", }, - { commitment: "finalized" }, -); + primarySaleHappened: true, + isMutable: true, +}).sendAndConfirm(umi); -console.log( - `NFT updated with new metadata URI: ${getExplorerLink( - "transaction", - response.signature, - "devnet", - )}`, -); +let explorerLink = getExplorerLink("address", mint, "devnet"); +console.log(`NFT updated with new metadata URI: ${explorerLink}`); console.log("✅ Finished successfully!"); ``` Run `npx esrun update-metaplex-nft.ts`. You should see something like: -```typescript +```bash % npx esrun update-metaplex-nft.ts -Loaded user: 4DRi8LxWhAWf9LwUuyb3Rz4Y4USEKzjdvQBWTyrkJtwg -image uri: https://arweave.net/nd6rxpoGfPmB2gTutEgnH5B76CFlDxgM6cZ7BcUmFis -NFT updated with new metadata URI: https://explorer.solana.com/tx/4eiuMFLeku7ArV5xkRTPKxBXjfjQsSVGFZimMkMaMxK1d21qptQE5kgKiuob6J852m4GuoMqcjz1ee4oy7fXVEWU?cluster=devnet + +Loaded user: 4kg8oh3jdNtn7j2wcS7TrUua31AgbLzDVkBZgTAe44aF +image uri: https://arweave.net/dboiAebucLGhprtknDQnp-yMj348cpJF4aQul406odg +NFT offchain metadata URI: https://arweave.net/XEjo-44GHRFNOEtPUdDsQlW5z1Gtpk2Wv0HvR8ll1Bw +NFT updated with new metadata URI: https://explorer.solana.com/address/Zxd9TmtBHQNti6tJxtx1AKYJFykNUwJL4rth441CjRd?cluster=devnet ✅ Finished successfully! ``` Inspect the updated NFT on Solana Explorer! Just like previously, if you have any issues, you should fix them yourself, but if needed the -[solution code](https://github.com/solana-developers/professional-education/blob/main/labs/update-metaplex-nft.ts) +[solution code](https://github.com/solana-developers/professional-education/blob/main/labs/metaplex-umi/update-nft.ts) is available. +![Solana Explorer with details about the updated NFT](/public/assets/courses/unboxed/solana-explorer-with-updated-NFT.png) + Congratulations! You've successfully learned how to use the Metaplex SDK to create, update, and verify NFTs as part of a collection. That's everything you need to build out your own collection for just about any use case. You could diff --git a/content/courses/tokens-and-nfts/token-program-advanced.md b/content/courses/tokens-and-nfts/token-program-advanced.md index fd9472e45..b979dd803 100644 --- a/content/courses/tokens-and-nfts/token-program-advanced.md +++ b/content/courses/tokens-and-nfts/token-program-advanced.md @@ -1,5 +1,5 @@ --- -title: Token burning and Delegation +title: Token Burning and Delegation objectives: - Understand why and how to burn tokens - Allow a token holder to allocate a limited amount of tokens to another @@ -8,19 +8,34 @@ description: "How to burn tokens, and approve/revoke token delegations on Solana." --- +### Summary + +- **Burning tokens** reduces the total supply of a token by removing them from + circulation. +- **Approving a delegate**, allows another account to transfer or burn a + specified amount of tokens from a token account while retaining original + account ownership. +- **Revoking a delegate**, removes their authority to act on behalf of the token + account owner. +- Each of these operations is facilitated through the `spl-token` library, + utilizing specific functions for each action. + ### Lesson -Finally, we'll cover burning tokens, and delegation. You may not use these in -your own application, so if you're really excited about NFTs, feel free to skip -to [creating NFTs with Metaplex](/content/courses/tokens/nfts-with-metaplex)! +In this lesson, we'll cover burning tokens and delegation. You may not have a +need for these in your own application, so if you're more interested in NFTs, +feel free to skip ahead to +[creating NFTs with Metaplex](/content/courses/tokens-and-nfts/nfts-with-metaplex.md)! -### Burn Tokens +#### Burn Tokens Burning tokens is the process of decreasing the token supply of a given token mint. Burning tokens removes the tokens from the given token account and from broader circulation. -To burn tokens using the `spl-token` library, use the `burn` function. +To burn tokens using the `spl-token` library, use the +[`burn()`](https://solana-labs.github.io/solana-program-library/token/js/functions/burn.html#burn) +function. ```typescript import { burn } from "@solana/spl-token"; @@ -37,20 +52,22 @@ const transactionSignature = await burn( ); ``` -The `burn` function requires the following arguments: +The `burn()` function requires the following arguments: -- `connection` - the JSON-RPC connection to the cluster -- `payer` - the account of the payer for the transaction -- `account` - the token account to burn tokens from -- `mint` - the token mint associated with the token account -- `owner` - the account of the owner of the token account -- `amount` - the amount of tokens to burn +- `connection`: JSON-RPC connection to the cluster. +- `payer`: The account responsible for paying transaction fees. +- `account`: The token account from which tokens will be burned. +- `mint`: The token mint associated with the token account. +- `owner`: The owner of the token account. +- `amount`: The number of tokens to burn. -Under the hood, the `burn` function creates a transaction with instructions -obtained from the `createBurnInstruction` function: +Under the hood, the `burn()` function creates a transaction using the +instruction obtained from +[`createBurnInstruction()`](https://solana-labs.github.io/solana-program-library/token/js/functions/createBurnInstruction.html#createBurnInstruction) +function. ```typescript -import { PublicKey, Transaction } from "@solana/web3"; +import { PublicKey, Transaction } from "@solana/web3.js"; import { createBurnInstruction } from "@solana/spl-token"; async function buildBurnTransaction( @@ -67,16 +84,16 @@ async function buildBurnTransaction( } ``` -### Approve Delegate +#### Approve Delegate Approving a delegate is the process of authorizing another account to transfer -or burn tokens from a token account. When using a delegate, the authority over -the token account remains with the original owner. The maximum amount of tokens -a delegate may transfer or burn is specified at the time the owner of the token -account approves the delegate. Note that there can only be one delegate account -associated with a token account at any given time. +or burn tokens from a token account. The authority over the token account +remains with the original owner. The maximum number of tokens a delegate can +transfer or burn is defined when the owner approves the delegate. Only one +delegate can be associated with a token account at a time. -To approve a delegate using the `spl-token` library, you use the `approve` +To approve a delegate using the `spl-token` library, use the +[`approve()`](https://solana-labs.github.io/solana-program-library/token/js/functions/approve.html#approve) function. ```typescript @@ -90,21 +107,23 @@ const transactionSignature = await approve( ); ``` -The `approve` function returns a `TransactionSignature` that can be viewed on -Solana Explorer. The `approve` function requires the following arguments: +The `approve()` function returns a `TransactionSignature` that can be viewed on +Solana Explorer. It requires the following arguments: -- `connection` - the JSON-RPC connection to the cluster -- `payer` - the account of the payer for the transaction -- `account` - the token account to delegate tokens from -- `delegate` - the account the owner is authorizing to transfer or burn tokens -- `owner` - the account of the owner of the token account -- `amount` - the maximum number of tokens the delegate may transfer or burn +- `connection`: The JSON-RPC connection to the cluster. +- `payer`: The account of the payer for the transaction. +- `account`: The token account to delegate tokens from. +- `delegate`: The account authorized to transfer or burn tokens. +- `owner`: The account of the owner of the token account. +- `amount`: The maximum number of tokens the delegate can transfer or burn. -Under the hood, the `approve` function creates a transaction with instructions -obtained from the `createApproveInstruction` function: +Under the hood, the `approve()` function creates a transaction with instructions +obtained from the +[`createApproveInstruction()`](https://solana-labs.github.io/solana-program-library/token/js/functions/createApproveInstruction.html#createApproveInstruction) +function. ```typescript -import { PublicKey, Transaction } from "@solana/web3"; +import { PublicKey, Transaction } from "@solana/web3.js"; import { createApproveInstruction } from "@solana/spl-token"; async function buildApproveTransaction( @@ -121,14 +140,15 @@ async function buildApproveTransaction( } ``` -### Revoke Delegate +#### Revoke Delegate -A previously approved delegate for a token account can be later revoked. Once a -delegate is revoked, the delegate can no longer transfer tokens from the owner's -token account. Any remaining amount left untransferred from the previously -approved amount can no longer be transferred by the delegate. +A previously approved delegate for a token account can be revoked. Once revoked, +the delegate can no longer transfer tokens from the owner's token account. Any +untransferred amount from the previously approved tokens will no longer be +accessible by the delegate. -To revoke a delegate using the `spl-token` library, you use the `revoke` +To revoke a delegate using the `spl-token` library, use the +[`revoke()`](https://solana-labs.github.io/solana-program-library/token/js/functions/revoke.html#revoke) function. ```typescript @@ -137,20 +157,22 @@ import { revoke } from "@solana/spl-token"; const transactionSignature = await revoke(connection, payer, account, owner); ``` -The `revoke` function returns a `TransactionSignature` that can be viewed on -Solana Explorer. The `revoke` function requires the following arguments: +The `revoke()` function returns a `TransactionSignature` that can be viewed on +Solana Explorer. This function requires the following arguments: -- `connection` - the JSON-RPC connection to the cluster -- `payer` - the account of the payer for the transaction -- `account` - the token account to revoke the delegate authority from -- `owner` - the account of the owner of the token account +- `connection`: The JSON-RPC connection to the cluster. +- `payer`: The account responsible for paying the transaction fees. +- `account`: The token account from which to revoke the delegate authority. +- `owner`: The account of the owner of the token account. -Under the hood, the `revoke` function creates a transaction with instructions -obtained from the `createRevokeInstruction` function: +Under the hood, the `revoke()` function generates a transaction using the +instructions from the +[`createRevokeInstruction()`](https://solana-labs.github.io/solana-program-library/token/js/functions/createRevokeInstruction.html#createRevokeInstruction) +function: ```typescript -import { PublicKey, Transaction } from "@solana/web3"; -import { revoke } from "@solana/spl-token"; +import { PublicKey, Transaction } from "@solana/web3.js"; +import { createRevokeInstruction } from "@solana/spl-token"; async function buildRevokeTransaction( account: PublicKey, @@ -166,113 +188,187 @@ async function buildRevokeTransaction( ### Lab -This lab extends the lab from the -[previous chapter](/content/courses/tokens/token-program). +This lab extends the concepts covered in the previous lesson on the +[Token Program](/content/courses/tokens-and-nfts/token-program.md). -#### 1. Delegating tokens +#### 1. Delegating Tokens -Let's use `approve` from `spl-token` to authorize a delegate to transfer or burn -up to 50 tokens from our token account. +We will use the `approve()` function from the `spl-token` library to authorize a +delegate to transfer or burn up to 50 tokens from our token account. -Just like [Transferring Tokens](/content/courses/tokens/token-program) in the -previous lab, you can -[add a second account on devnet](/developers/courses/intro-to-solana/intro-to-cryptography) -if you like, or find a friend who has a devnet account! +Similar to the process of +[Transferring Tokens](/content/courses/tokens-and-nfts/token-program.md#transferring-tokens) +in the previous lab, you can +[add a second account on Devnet](/content/courses/intro-to-solana/intro-to-cryptography.md) +if desired or collaborate with a friend who has a Devnet account. -Create a new file `delegate-tokens.ts` +Create a new file named `delegate-tokens.ts`. For this example, we are using the +System Program ID as a delegate for demonstration, but you can use an actual +address that you want to delegate. -```typescript +```typescript filename="delegate-tokens.ts" import "dotenv/config"; import { getExplorerLink, getKeypairFromEnvironment, } from "@solana-developers/helpers"; -import { Connection, PublicKey, clusterApiUrl } from "@solana/web3.js"; import { - approve, - getOrCreateAssociatedTokenAccount, - revoke, -} from "@solana/spl-token"; - -const connection = new Connection(clusterApiUrl("devnet")); - + Connection, + PublicKey, + clusterApiUrl, + SystemProgram, +} from "@solana/web3.js"; +import { approve, getOrCreateAssociatedTokenAccount } from "@solana/spl-token"; + +const DEVNET_URL = clusterApiUrl("devnet"); +const TOKEN_DECIMALS = 2; +const DELEGATE_AMOUNT = 50; +const MINOR_UNITS_PER_MAJOR_UNITS = 10 ** TOKEN_DECIMALS; + +// Initialize connection and load user keypair +const connection = new Connection(DEVNET_URL); const user = getKeypairFromEnvironment("SECRET_KEY"); -console.log( - `🔑 Loaded our keypair securely, using an env file! Our public key is: ${user.publicKey.toBase58()}`, -); +console.log(`🔑 Loaded keypair. Public key: ${user.publicKey.toBase58()}`); -// Add the delegate public key here. -const delegate = new PublicKey("YOUR_DELEGATE_HERE"); +// Replace this with your actual address +// For this example, we will be using System Program's ID as a delegate +const delegatePublicKey = new PublicKey(SystemProgram.programId); -// Substitute in your token mint account -const tokenMintAccount = new PublicKey("YOUR_TOKEN_MINT_ADDRESS_HERE"); +// Substitute your token mint address +const tokenMintAddress = new PublicKey("YOUR_TOKEN_MINT_ADDRESS_HERE"); -// Get or create the source and destination token accounts to store this token -const sourceTokenAccount = await getOrCreateAssociatedTokenAccount( - connection, - user, - tokenMintAccount, - user.publicKey, -); - -// Our token has two decimal places -const MINOR_UNITS_PER_MAJOR_UNITS = Math.pow(10, 2); +try { + // Get or create the user's token account + const userTokenAccount = await getOrCreateAssociatedTokenAccount( + connection, + user, + tokenMintAddress, + user.publicKey, + ); -const approveTransactionSignature = await approve( - connection, - user, - sourceTokenAccount.address, - delegate, - user.publicKey, - 50 * MINOR_UNITS_PER_MAJOR_UNITS, -); + // Approve the delegate + const approveTransactionSignature = await approve( + connection, + user, + userTokenAccount.address, + delegatePublicKey, + user.publicKey, + DELEGATE_AMOUNT * MINOR_UNITS_PER_MAJOR_UNITS, + ); -console.log( - `Approve Delegate Transaction: ${getExplorerLink( + const explorerLink = getExplorerLink( "transaction", approveTransactionSignature, "devnet", - )}`, -); + ); + + console.log(`✅ Delegate approved. Transaction: ${explorerLink}`); +} catch (error) { + console.error( + `Error: ${error instanceof Error ? error.message : String(error)}`, + ); +} +``` + +Replace `YOUR_TOKEN_MINT_ADDRESS_HERE` with your token mint address obtained +from the previous lesson +[Token Program](/content/courses/tokens-and-nfts/token-program.md#create-the-token-mint). + +Run the script using `npx esrun delegate-tokens.ts`. You should see: + +```bash +🔑 Loaded keypair. Public key: GprrWv9r8BMxQiWea9MrbCyK7ig7Mj8CcseEbJhDDZXM +✅ Delegate approved. Transaction: https://explorer.solana.com/tx/21tX6L7zk5tkHeoD7V1JYYW25VAWRfQrJPnxDcMXw94yuFbHxX4UZEgS6k6co9dBWe7PqFoMoWEVfbVA92Dk4xsQ?cluster=devnet ``` +Open the Explorer link, you will see the ‌approval information. + +![Delegate Tokens](/public/assets/courses/unboxed/delegate-token.png) + #### 2. Revoke Delegate -Lets revoke the `delegate` using the `spl-token` library's `revoke` function. +Let's revoke the `delegate` using the `spl-token` library's `revoke()` function. -Revoke will set delegate for the token account to null and reset the delegated -amount to 0. +Revoke will set the delegate for the token account to null and reset the +delegated amount to 0. -All we will need for this function is the token account and user. After the +Create a new file `revoke-approve-tokens.ts`. -```typescript -const revokeTransactionSignature = await revoke( - connection, - user, - delegate, - user.publicKey, -); +```typescript filename="revoke-approve-tokens.ts" +import "dotenv/config"; +import { + getExplorerLink, + getKeypairFromEnvironment, +} from "@solana-developers/helpers"; +import { Connection, PublicKey, clusterApiUrl } from "@solana/web3.js"; +import { revoke, getOrCreateAssociatedTokenAccount } from "@solana/spl-token"; -console.log( - `Revoke Delegate Transaction: ${getExplorerLink( +const DEVNET_URL = clusterApiUrl("devnet"); +// Substitute your token mint address +const TOKEN_MINT_ADDRESS = "YOUR_TOKEN_MINT_ADDRESS_HERE"; + +const connection = new Connection(DEVNET_URL); +const user = getKeypairFromEnvironment("SECRET_KEY"); + +console.log(`🔑 Loaded keypair. Public key: ${user.publicKey.toBase58()}`); + +try { + const tokenMintAddress = new PublicKey(TOKEN_MINT_ADDRESS); + + const userTokenAccount = await getOrCreateAssociatedTokenAccount( + connection, + user, + tokenMintAddress, + user.publicKey, + ); + + const revokeTransactionSignature = await revoke( + connection, + user, + userTokenAccount.address, + user.publicKey, + ); + + const explorerLink = getExplorerLink( "transaction", revokeTransactionSignature, "devnet", - )}`, -); + ); + + console.log(`✅ Revoke Delegate Transaction: ${explorerLink}`); +} catch (error) { + console.error( + `Error: ${error instanceof Error ? error.message : String(error)}`, + ); +} +``` + +Replace `YOUR_TOKEN_MINT_ADDRESS_HERE` with your mint token address obtained +from the previous lesson +[Token Program](/content/courses/tokens-and-nfts/token-program.md#create-the-token-mint). + +Run the script using `npx esrun revoke-approve-tokens.ts`. You should see: + +```bash +🔑 Loaded keypair. Public key: GprrWv9r8BMxQiWea9MrbCyK7ig7Mj8CcseEbJhDDZXM +✅ Revoke Delegate Transaction: https://explorer.solana.com/tx/YTc2Vd41SiGiHf3iEPkBH3y164fMbV2TSH2hbe7WypT6K6Q2b3f31ryFWhypmBK2tXmvGYjXeYbuwxHeJvnZZX8?cluster=devnet ``` +Open the Explorer link, you will see the revoke information. + +![Revoke Approve Tokens](/public/assets/courses/unboxed/revoke-approve-tokens.png) + #### 3. Burn Tokens Finally, let's remove some tokens from circulation by burning them. -Use the `spl-token` library's `burn` function to remove half of your tokens from -circulation. +Use the `spl-token` library's `burn()` function to remove half of your tokens +from circulation. Now, call this function to burn 5 of the user's tokens. -Now call this new function in `main` to burn 25 of the user's tokens. +Create a new file `burn-tokens.ts`. -```typescript +```typescript filename="burn-tokens.ts" import "dotenv/config"; import { getExplorerLink, @@ -281,51 +377,70 @@ import { import { Connection, PublicKey, clusterApiUrl } from "@solana/web3.js"; import { getOrCreateAssociatedTokenAccount, burn } from "@solana/spl-token"; -const connection = new Connection(clusterApiUrl("devnet")); +const DEVNET_URL = clusterApiUrl("devnet"); +const TOKEN_DECIMALS = 2; +const BURN_AMOUNT = 5; +// Substitute your token mint address +const TOKEN_MINT_ADDRESS = "YOUR_TOKEN_MINT_ADDRESS_HERE"; +const connection = new Connection(DEVNET_URL); const user = getKeypairFromEnvironment("SECRET_KEY"); -console.log( - `🔑 Loaded our keypair securely, using an env file! Our public key is: ${user.publicKey.toBase58()}`, -); +console.log(`🔑 Loaded keypair. Public key: ${user.publicKey.toBase58()}`); -// Substitute in your token mint account -const tokenMintAccount = new PublicKey("YOUR_TOKEN_MINT_ADDRESS_HERE"); +try { + const tokenMintAccount = new PublicKey(TOKEN_MINT_ADDRESS); -// Get the account where the user stores these tokens -const sourceTokenAccount = await getOrCreateAssociatedTokenAccount( - connection, - user, - tokenMintAccount, - user.publicKey, -); + const userTokenAccount = await getOrCreateAssociatedTokenAccount( + connection, + user, + tokenMintAccount, + user.publicKey, + ); -// Our token has two decimal places -const MINOR_UNITS_PER_MAJOR_UNITS = Math.pow(10, 2); + const burnAmount = BURN_AMOUNT * 10 ** TOKEN_DECIMALS; -const transactionSignature = await burn( - connection, - user, - sourceTokenAccount.address, - tokenMintAccount, - user, - 25 * MINOR_UNITS_PER_MAJOR_UNITS, -); + const transactionSignature = await burn( + connection, + user, + userTokenAccount.address, + tokenMintAccount, + user, + burnAmount, + ); -console.log( - `Burn Transaction: ${getExplorerLink( + const explorerLink = getExplorerLink( "transaction", transactionSignature, "devnet", - )}`, -); + ); + + console.log(`✅ Burn Transaction: ${explorerLink}`); +} catch (error) { + console.error( + `Error: ${error instanceof Error ? error.message : String(error)}`, + ); +} ``` -Well done! You've now +Replace `YOUR_TOKEN_MINT_ADDRESS_HERE` with your mint token address obtained +from the previous chapter +[Token Program](/content/courses/tokens-and-nfts/token-program.md#create-the-token-mint). + +Run the script using `npx esrun burn-tokens.ts`. You should see: + +```bash +🔑 Loaded keypair. Public key: GprrWv9r8BMxQiWea9MrbCyK7ig7Mj8CcseEbJhDDZXM +✅ Burn Transaction: https://explorer.solana.com/tx/5Ufipgvsi5aLzzcr8QQ7mLXHyCwBDqsPxGTPinvFpjSiARnEDgFiPbD2ZiaDkkmwKDMoQ94bf5uqF2M7wjFWcKuv?cluster=devnet +``` + +Open the Explorer link, you will see the burn information. + +![Burn Tokens](/public/assets/courses/unboxed/burn-tokens.png) - +Well done! You've now completed the lab. -### Completed the lab? + Push your code to GitHub and [tell us what you thought of this lesson](https://form.typeform.com/to/IPH0UGz7#answers-lesson=72cab3b8-984b-4b09-a341-86800167cfc7)! diff --git a/content/courses/tokens-and-nfts/token-program.md b/content/courses/tokens-and-nfts/token-program.md index f06d0e69c..6c05a868f 100644 --- a/content/courses/tokens-and-nfts/token-program.md +++ b/content/courses/tokens-and-nfts/token-program.md @@ -28,7 +28,7 @@ description: - Creating Token Mints and Token Accounts requires allocating **rent** in SOL. The rent for a Token Account can be refunded when the account is closed. Additionally, tokens created with the - [Token Extensions Program](/developers/courses/token-extensions-for-mints/close-mint) + [Token Extensions Program](/content/courses/token-extensions/close-mint.md) can also close Token Mints. ### Lesson @@ -402,7 +402,7 @@ async function buildMintToTransaction( SPL Token transfers require both the sender and receiver to have token accounts for the mint of the tokens being transferred. The tokens are transferred from -the sender’s token account to the receiver’s token account. +the sender's token account to the receiver's token account. You can use `getOrCreateAssociatedTokenAccount` when obtaining the receiver's associated token account to ensure their token account exists before the @@ -457,14 +457,14 @@ async function buildTransferTransaction( ### Lab -We’re going to use the Token Token Program to create a Token Mint, create an +We're going to use the Token Program to create a Token Mint, create an Associated Token Account, mint tokens, transfer tokens, and burn tokens. Assuming you already have a `.env` file with a `SECRET_KEY` setup per -[Cryptography fundamentals](/developers/courses/intro-to-solana/intro-to-cryptography). +[Cryptography fundamentals](/content/courses/intro-to-solana/intro-to-cryptography.md). ```bash -npm i @solana/web3.js @solana/spl-token @solana-developers/helpers esrun +npm i @solana/web3.js@1 @solana/spl-token @solana-developers/helpers@2 esrun ``` #### Create the Token Mint @@ -476,7 +476,7 @@ and `freezeAuthority`. Think of the token mint as the factory that makes tokens. Our `user`, as the `mintAuthority` is the person that runs the factory. -```typescript +```typescript filename="create-token-mint.ts" import { createMint } from "@solana/spl-token"; import "dotenv/config"; import { @@ -530,7 +530,7 @@ npm i @metaplex-foundation/mpl-token-metadata@2 Create a new file called `create-token-metadata.ts` -```typescript +```typescript filename="create-token-metadata.ts" // This uses "@metaplex-foundation/mpl-token-metadata@2" to create tokens import "dotenv/config"; import { @@ -617,7 +617,7 @@ const transactionLink = getExplorerLink( "devnet", ); -console.log(`✅ Transaction confirmed, explorer link is: ${transactionLink}!`); +console.log(`✅ Transaction confirmed, explorer link is: ${transactionLink}`); const tokenMintLink = getExplorerLink( "address", @@ -625,9 +625,12 @@ const tokenMintLink = getExplorerLink( "devnet", ); -console.log(`✅ Look at the token mint again: ${tokenMintLink}!`); +console.log(`✅ Look at the token mint again: ${tokenMintLink}`); ``` +Replace `YOUR_TOKEN_MINT_ADDRESS_HERE` with your address of the mint and run the +script using `npx esrun create-token-metadata.ts`. + You'll now see Solana Explorer is updated, showing the token's name and symbol on the mint! @@ -655,7 +658,7 @@ on a wallet and our mint address, making the account if it needs to. Remember to substitute in your token mint address below! -```typescript +```typescript filename="create-token-account.ts" import { getOrCreateAssociatedTokenAccount } from "@solana/spl-token"; import "dotenv/config"; import { @@ -697,7 +700,7 @@ const link = getExplorerLink( console.log(`✅ Created token Account: ${link}`); ``` -Run the script using `npx esrun create-token-mint.ts`. You should see: +Run the script using `npx esrun create-token-account.ts`. You should see: ```bash ✅ Success! Created token account: https://explorer.solana.com/address/CTjoLdEeK8rk4YWYW9ZqACyjHexbYKH3hEoagHxLVEFs?cluster=devnet @@ -707,16 +710,19 @@ Open the token account in Solana Explorer. Look at the owner - it's the account you made the ATA for! The balance will be zero, as we haven't sent any tokens there yet. Let's mint some tokens there and fix that! +Remember the address of your token account ! We'll use it to mint tokens. + #### Mint Tokens Now that we have a token mint and a token account, let's mint tokens to the token account. Recall that we set the `user` as the `mintAuthority` for the `mint` we created. -Create a function `mintTokens` that uses the `spl-token` function `mintTo` to -mint tokens: +Create an empty file called `mint-tokens.ts`. Then uses the `spl-token` function +`mintTo()` to mint tokens. Remember to substitute in your token mint address and +token account address below! -```typescript +```typescript filename="mint-tokens.ts" import { mintTo } from "@solana/spl-token"; import "dotenv/config"; import { @@ -766,7 +772,7 @@ account! Next, let's transfer some of the tokens we just minted using the `spl-token` library's `transfer` function. You can -[add a second account on devnet](/developers/courses/intro-to-solana/intro-to-cryptography) +[add a second account on devnet](/content/courses/intro-to-solana/intro-to-cryptography.md) if you like, or find a friend who has a devnet account and send them your token! As you saw in Explorer, the tokens currently reside in an Associated Token @@ -776,7 +782,11 @@ associated token account - we can just look it up using mint of the token we want to send. Likewise, we can find (or make) an ATA for our recipient to hold this token too. -```typescript +Create an empty file called `transfer-tokens.ts`. Then replace +`YOUR_RECIPIENT_HERE` with your recipient public key and replace +`YOUR_TOKEN_MINT_ADDRESS_HERE` with your token mint address. + +```typescript filename="transfer-tokens.ts" import "dotenv/config"; import { getExplorerLink, @@ -803,7 +813,7 @@ const MINOR_UNITS_PER_MAJOR_UNITS = Math.pow(10, 2); console.log(`💸 Attempting to send 1 token to ${recipient.toBase58()}...`); -// Get or create the source and destination token accounts to store this token +// Get or create the source token account to store this token const sourceTokenAccount = await getOrCreateAssociatedTokenAccount( connection, sender, @@ -811,6 +821,7 @@ const sourceTokenAccount = await getOrCreateAssociatedTokenAccount( sender.publicKey, ); +// Get or create the destination token account to store this token const destinationTokenAccount = await getOrCreateAssociatedTokenAccount( connection, sender, @@ -830,7 +841,13 @@ const signature = await transfer( const explorerLink = getExplorerLink("transaction", signature, "devnet"); -console.log(`✅ Transaction confirmed, explorer link is: ${explorerLink}!`); +console.log(`✅ Transaction confirmed, explorer link is: ${explorerLink}`); +``` + +Run the script using `npx esrun transfer-tokens.ts`. You should see: + +```bash +✅ Transaction confirmed, explorer link is: https://explorer.solana.com/tx/SgV2j2DkaErYf7ERiB11USoZzGqAk8HPEqVJLP8HWdz9M61FSFgyEMXJycHQtfCooCAPBom7Vi3akEAwSUHQUsu?cluster=devnet ``` Open the Explorer link. You see your balance go down, and the recipient's @@ -838,7 +855,7 @@ balance go up! ### Challenge -Now it’s your turn to build something independently. Create an application that +Now it's your turn to build something independently. Create an application that allows a user to create a new mint, create a token account, and mint tokens. To interact with the Token Program using the wallet adapter, you will have to @@ -852,7 +869,7 @@ approval. 2. Create a new Token Mint in the `CreateMint` component. If you need a refresher on how to send transactions to a wallet for approval, have a look at the - [Wallets lesson](/developers/courses/intro-to-solana/interact-with-wallets). + [Wallets lesson](/content/courses/intro-to-solana/interact-with-wallets.md). When creating a new mint, the newly generated `Keypair` will also have to sign the transaction. When additional signers are required in addition to the diff --git a/content/guides/advanced/introduction-to-durable-nonces.md b/content/guides/advanced/introduction-to-durable-nonces.md index b8a604e4d..cdabdad89 100644 --- a/content/guides/advanced/introduction-to-durable-nonces.md +++ b/content/guides/advanced/introduction-to-durable-nonces.md @@ -1,6 +1,6 @@ --- date: 2024-06-29T00:00:00Z -difficulty: intermediate +difficulty: advanced title: "Durable & Offline Transaction Signing using Nonces" description: "One-stop shop for Solana's Durable Nonces: an easy way to power your Solana @@ -156,8 +156,8 @@ Now that we know what Durable Nonces are, it's time to use them to send durable transactions. > If you do not have the Solana CLI installed, please go through -> [this](https://docs.solana.com/cli/install-solana-cli-tools) tutorial and set -> up the CLI and a keypair with some airdropped SOL on devnet +> [this tutorial](/docs/intro/installation.md) and set up the Solana CLI and +> create a keypair with some airdropped SOL on devnet ### Create Nonce Authority @@ -219,7 +219,7 @@ AkrQn5QWLACSP5EMT2R1ZHyKaGWVFrDHJ6NL89HKtwjQ This is the base58 encoded hash that will be used in place of recent blockhashes while signing a transaction. -### Displace Nonce Account +### Display Nonce Account We can inspect the details of a Nonce Account in a prettier formatted version @@ -477,21 +477,23 @@ as we discussed before. This is done to avoid using the same nonce again. Voila, we've gone through a very real-life use case of Durable Nonces. Now let's see how to use them in transactions using JavaScript and the -[`@solana/web3.js`](https://solana-labs.github.io/solana-web3.js/) package. +[`@solana/web3.js`](https://solana-labs.github.io/solana-web3.js/v1.x/) package. -## Durable Nonces with Solana `web3.js` +## Durable Nonces with Solana Web3.js We'll use a similar example of making a simple transfer to demonstrate how to send transactions using durable nonces. -### Create Nonce Authority +### Create Nonce Authority (Web3.js) ```ts const nonceAuthKP = Keypair.generate(); -// airdrop some SOL into this account from https://solfaucet.com/ ``` -### Create Nonce Accounts +_If you need SOL, you can use the +[faucet.solana.com](https://faucet.solana.com/)_ to get some. + +### Create Nonce Accounts (Web3.js) ```ts const nonceKeypair = Keypair.generate(); @@ -533,7 +535,7 @@ const sig = await sendAndConfirmRawTransaction( console.log("Nonce initiated: ", sig); ``` -### Fetch Initialised Nonce Account +### Fetch Nonce Account (Web3.js) ```ts const accountInfo = await connection.getAccountInfo(nonceKeypair.publicKey); @@ -617,5 +619,5 @@ change will happen on-chain, and the winner can be decided. ## References - [Neodyme Blog: Nonce Upon a Time, or a Total Loss of Funds](https://neodyme.io/blog/nonce-upon-a-time/) -- [Solana Durable Nonces CLI](https://docs.solana.com/offline-signing/durable-nonce) -- [Solana Durable Transaction Nonces Proposal](https://docs.solana.com/implemented-proposals/durable-tx-nonces) +- [Solana Durable Nonces CLI](https://docs.solanalabs.com/cli/examples/durable-nonce) +- [Solana Durable Transaction Nonces Proposal](https://docs.solanalabs.com/implemented-proposals/durable-tx-nonces) diff --git a/content/guides/advanced/stake-weighted-qos.md b/content/guides/advanced/stake-weighted-qos.md index 7fe090140..149b5cca1 100644 --- a/content/guides/advanced/stake-weighted-qos.md +++ b/content/guides/advanced/stake-weighted-qos.md @@ -106,7 +106,7 @@ Stake-weighted QoS will not work unless BOTH sides are properly configured. ### Configuring the Validator node -On the validator, you’ll have to enable +On the validator, you'll have to enable `--staked-nodes-overrides /path/to/overrides.yml`. The `--staked-nodes-overrides` flag helps the validator prioritize transactions being sent from known sources to apply stake to their transactions. This can @@ -114,7 +114,7 @@ help a validator prioritize certain transactions over known hosts over others, enabling the usage of Stake-weighted QoS with RPCs. RPCs should not be staked in any way. -Today, Stake-weighted QoS gives a stake-weighted priority to 80% of a leader’s +Today, Stake-weighted QoS gives a stake-weighted priority to 80% of a leader's TPU capacity. However, there are configuration options which can be used to virtually assign different stake-weights to TPU peers, including assigning unstaked peers virtual stake. @@ -130,7 +130,7 @@ staked_map_id: `staked_map_id` contains a map of identity public key to the stake amount in lamports to apply to each RPC. When set, the validator will prioritize QUIC connections with the RPC found at that identity publicKey, assigning an amount -of stake to their transactions. The 80% of the leader’s TPU capacity will be +of stake to their transactions. The 80% of the leader's TPU capacity will be split proportionally based on the lamport amounts specified in the `staked-nodes-overrides` file and existing cluster stake. @@ -140,9 +140,8 @@ On the RPC you will have to use `--rpc-send-transaction-tpu-peer` to forward transactions to a specific leader. The exact usage would be `--rpc-send-transaction-tpu-peer HOST:PORT`. The Host is the ip address of the leader you have the `staked-nodes-overrides` enabled on and the Port is the QUIC -TPU port of that host. The QUIC TPU port number is the lowest value of your -`--dynamic-port-range` plus 9. For example, if the flag is -`--dynamic-port-range 8000-8100`, the QUIC TPU port is `8009`. +TPU port of that host. The QUIC TPU port for a leader can be identified by +making an RPC call to [getClusterNodes](/docs/rpc/http/getClusterNodes.mdx). The peering would looking like the following: diff --git a/content/guides/advanced/verified-builds.md b/content/guides/advanced/verified-builds.md new file mode 100644 index 000000000..f87473775 --- /dev/null +++ b/content/guides/advanced/verified-builds.md @@ -0,0 +1,553 @@ +--- +date: 2024-09-26T00:00:00Z +difficulty: intermediate +title: "How to Verify a Program" +description: + "Verified builds is a way to link your program to its source code and let + everyone independently verify that the program was indeed built from that + provided source code." +tags: + - web3js +keywords: + - tutorial + - verified builds + - security.txt + - verified source code + - find a programs source code + - security + - blockchain tutorial +--- + +This guide is meant to be a reference for developers who want to implement +verified builds for their programs on Solana. We will cover what verified builds +are, how to use them, special considerations, and best practices to ensure the +authenticity of your program onchain. + +# What are verified builds? + +Verified builds ensure that the executable program you deploy to Solana’s +network matches the source code in your repository. By doing this, developers +and users can have confidence that the program running onchain corresponds +exactly to the public codebase, promoting transparency and security. + +The verification process involves comparing the hash of the onchain program with +the hash of the locally built program from the source code. This ensures no +discrepancies between the two versions. + +> While a verified build should not be considered more secure than an unverified +> build, the build enables developers to self verify the source code matches +> what is deployed onchain. Using the source code, a developer can then validate +> what the code executes when sending a transaction. + +The verified builds pipeline was thought out and is maintained by +[Ellipsis Labs](https://ellipsislabs.xyz/) and [OtterSec](https://osec.io/). For +more details, follow the guide in the +[original verified builds](https://github.com/Ellipsis-Labs/solana-verifiable-build) +repository as well and the verify build process directly into the +[Anza](https://www.anza.xyz/) tool suite, once supported there. + +# How does it work? + +The verification process is done by comparing the hash of the onchain program +with the hash of the locally built program from the source code. You build your +program in a controlled environment using the Solana Verify CLI and Docker. This +ensures that the build process is deterministic and consistent across different +systems. Once you have the executable, you can deploy it to the Solana network. +During the build process a +[PDA](https://explorer.solana.com/address/63XDCHrwZu3mXsw2RUFb4tbNpChuUHx4eA5aJMnHkpQQ/anchor-account) +of the [verify program](https://github.com/otter-sec/otter-verify) will be +created. This PDA contains all the data necessary to verify the program. The PDA +contains the program address, git url, commit hash and the arguments used to +build the program. + +Using the data in the PDA everyone can run the verify program command locally +and check if the program was built from the provided source code. Then everyone +can verify for themselves completely trustlessly or can run their own +[verify API](https://github.com/otter-sec/solana-verified-programs-api) +maintained by [OtterSec](https://github.com/otter-sec) to provide an easy access +point for users to check the verification. You can already see these +[API calls](https://verify.osec.io/status/PhoeNiXZ8ByJGLkxNfZRnkUfjvmuYqLR89jjFHGqdXY) +being used in the +[Solana Explorer](https://explorer.solana.com/address/E1fcPKuV1UJEsTJPpab2Jr8y87ZN73i4CHTPLbWQE6CA/verified-build) +and +[SolanaFM](https://solana.fm/address/E1fcPKuV1UJEsTJPpab2Jr8y87ZN73i4CHTPLbWQE6CA/transactions?cluster=mainnet-alpha), +among other places. + +# Why should I use verified builds? + +Using verified builds provides the following benefits: + +- Security: Guarantee that the program running onchain matches the source code, + preventing malicious alterations. + +- Transparency: Allows other users and developers to validate that the onchain + program is trustworthy by comparing it with the public codebase. + +- Trust: Increase user confidence, as verified builds demonstrate that your + program's onchain behavior is aligned with your public code. When building + verifiable programs, you minimize risks associated with running unauthorized + or malicious code. It also ensures you comply with best practices and give + security researchers an easy way to contact you. Also wallets and other tools + can allow transactions from your program more easily as long as it is + verified. + +- Discoverability: When you provide a verified build of you program everyone can + find your source code, docs, program SDK or IDL and they can also easily + contact you via github in case there is an issue. + +# How do I create verified builds? + +To create verified builds, you'll need to follow these steps: + +Summary: + +- Commit your code to a public repository +- Build a verified build in docker +- Deploy the verified build +- Verify the deployed program against public API + +If you verify your program which is not build in a docker container it will most +likely fail because Solana program builds are not deterministic across different +systems. + + + +### Install Docker and Cargo + +Install the necessary tools ensure you have Docker and Cargo installed. Docker +provides a controlled build environment to ensure consistency, and Cargo is used +for managing Rust packages. + +- Docker: Follow the steps on the + [Docker website](https://docs.docker.com/engine/install/) to install Docker + for your platform. Once installed, ensure the Docker service is running + following this guide further. +- Cargo: If you don’t already have Cargo installed, you can install it by + running the following command: + +```bash +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh +``` + +### Install the Solana Verify CLI + +The Solana Verify CLI is the primary tool used to verify builds. Solana Verify +CLI is currently maintained by [Ellipsis Labs](https://ellipsislabs.xyz/) and +can be installed using Cargo. + +> The verify process will soon move into the [Anza](https://www.anza.xyz/) tool +> suite. The general way of verifying builds will stay very similar though. + +You can install it by running: + +```bash +cargo install solana-verify +``` + +If you need a specific version of the CLI, you can pin the version with: + +```bash +cargo install solana-verify --version $VERSION +``` + +If desired, you can install a version directly from a specific commit: + +```bash +cargo install solana-verify --git https://github.com/Ellipsis-Labs/solana-verifiable-build --rev 13a1db2 +``` + +### Prepare project + +To verify against a repository it needs to have a `Cargo.lock` file in the root +directory of your repository. If you only have one program in your repository +and a `cargo.lock` file in your root you can directly go to the next step and +build your program. + +If your program is in a subfolder and you have a rust workspace you need to +create a workspace `Cargo.toml` file in the root directory of your repository. + +You can use this `Cargo.toml` example as a preset: + +```toml filename="Cargo.toml" +[workspace] +members = ["program/programs/*"] +resolver = "2" + +[profile.release] +overflow-checks = true +lto = "fat" +codegen-units = 1 + +[profile.release.build-override] +opt-level = 3 +incremental = false +codegen-units = 1 +``` + +Make sure that your program is in the `workspace/members` array and that the +`Cargo.toml` of your program has the correct `lib` name configured. + +> Important is the `lib name` not the package name! + +Something like this: + +```toml filename="waffle/Cargo.toml" +[package] +name = "waffle" +version = "0.1.0" +edition = "2021" + +[lib] +name = "waffle" +crate-type = ["cdylib", "lib"] + +[dependencies] +solana-program = "2.1.0" +``` + +In this [repository](https://github.com/solana-developers/verified-program) you +can see an example of a workspace with a program in a subfolder. Notice also +that when the program is in a subfolder you later need to add this folder as +`--mount-path` to the `verify-from-repo` command. + +In this [repository](https://github.com/solana-developers/solana-game-preset) +you can find an anchor example. In this +[repository](https://github.com/solana-developers/verified-program-root) you can +find a native rust example. + +With this `Cargo.toml` file in place you can then run `cargo generate-lockfile` +to create a lock file and continue to building your program. + +### Building Verifiable Programs + +To verifiably build your Solana program, navigate to the directory containing +your workspace's `Cargo.toml` file and run: + +```bash +solana-verify build +``` + +This will copy your environment into a docker container and build it in a +deterministic way. + +> Make sure that you actually deploy the verified build and don't accidentally +> overwrite it with `anchor build` or `cargo build-sbf` since these will most +> likely not result into the same hash and though your verification will fail. + +For projects with multiple programs, you can build a specific program by using +the library name (not the package name): + +```bash +solana-verify build --library-name $PROGRAM_LIB_NAME +``` + +This process ensures deterministic builds and can take some time, especially on +certain systems (e.g., M1 MacBook) because it is running within a docker +container. For faster builds, using a Linux machine running x86 architecture is +recommended. + +Once the build completes, you can retrieve the hash of the executable using the +following command: + +```bash +solana-verify get-executable-hash target/deploy/$PROGRAM_LIB_NAME.so +``` + +### Deploying Verifiable Programs + +Once you have built your program and retrieved its hash, you can deploy it to +the Solana network. It is recommended to use a multi-signature or governance +solution like [Squads Protocol](https://squads.so/protocol) for safe +deployments, but you can also directly deploy with: + +```bash +solana program deploy -u $NETWORK_URL target/deploy/$PROGRAM_LIB_NAME.so --program-id $PROGRAM_ID --with-compute-unit-price 50000 --max-sign-attempts 100 --use-rpc +``` + +A currently fitting low priority fee you can request from your rpc provider for +example [Quicknode](https://www.quicknode.com/gas-tracker/solana). + +To verify the deployed program matches the built executable, run: + +```bash +solana-verify get-program-hash -u $NETWORK_URL $PROGRAM_ID +``` + +> You may have different versions deployed on different +> [Solana clusters](/docs/core//clusters.md) (i.e. devnet, testnet, mainnet). +> Ensure you use the correct network URL for the desired Solana cluster you want +> to verify a program against. Remote verification will only work on mainnet. + +Now you can already get the hash of your program and compare it to your binary +hash from earlier if you want: + +```bash +solana-verify get-program-hash $PROGRAM_ID +``` + +### Verifying against repositories + +To verify a program against its public repository, use: + +```bash +solana-verify verify-from-repo -u $NETWORK_URL --program-id $PROGRAM_ID https://github.com/$REPO_PATH --commit-hash $COMMIT_HASH --library-name $PROGRAM_LIB_NAME --mount-path $MOUNT_PATH +``` + +> While you run the verified build in your program directory, when running +> `verify-from-repo` you need to add the `--mount-path` flag. This will be the +> path to the folder containing the `Cargo.toml` that contains your program's +> library name. + +This command compares the onchain program hash with the executable hash built +from the source at the specified commit hash. + +At the end the command will ask you if you want to upload your verification data +onchain. If you do that the Solana Explorer will immediately show your program's +verification data. Until it was verified by a remote build it will show as +unverified. Learn how you can verify your program against a public API in the +next step. + +If you want to lock the verification to a certain release, you can append the +`--commit-hash` flag to the command. + +### Verify against public API + +Finally you can also directly verify the program against anyone that is running +the verify API:: + +```bash +solana-verify verify-from-repo --remote -um --program-id PhoeNiXZ8ByJGLkxNfZRnkUfjvmuYqLR89jjFHGqdXY https://github.com/Ellipsis-Labs/phoenix-v1 +``` + +> It is recommended to use a payed RPC Url because otherwise you may run into +> rate limits of the free RPCs. So instead of `-um` you should use +> `--url yourRpcUrl` for a more reliable verification. + +The `--remote` flag sends a build request to the OtterSec API, which triggers a +remote build of your program. Once the build is complete, the system verifies +that the onchain hash of your program matches the hash of the generated build +artifact from your repository. + +The default is the +[OtterSec API](https://github.com/otter-sec/solana-verified-programs-api). + +Once the build is done, which takes a while, and was successful you will be able +to see your program as verified in the +[OtterSec API for single programs](https://verify.osec.io/status/PhoeNiXZ8ByJGLkxNfZRnkUfjvmuYqLR89jjFHGqdXY) +and in the +[Solana Explorer](https://explorer.solana.com/address/PhoeNiXZ8ByJGLkxNfZRnkUfjvmuYqLR89jjFHGqdXY/verified-build), +[SolanaFM](https://solana.fm/address/PhoeNiXZ8ByJGLkxNfZRnkUfjvmuYqLR89jjFHGqdXY?cluster=mainnet-alpha), +[SolScan](https://solscan.io/account/PhoeNiXZ8ByJGLkxNfZRnkUfjvmuYqLR89jjFHGqdXY#programVerification) +and eventually also on the community-run website +[SolanaVerify.org](https://www.solanaverify.org/) maintained by +[0xDeep](https://x.com/0xDeep) and the +[OtterSec verified programs API](https://verify.osec.io/verified-programs) and +at last in the +[Verified Programs Dune Dashboard](https://dune.com/jonashahn/verified-programs/dedf21e1-9b71-42c8-89f9-02ed94628657) +contributing to a more healthy Solana ecosystem. + + + +## Verify from docker image + +You can also verify your program against a docker image by running the following +command: + +```bash +solana-verify verify-from-image -e +examples/hello_world/target/deploy/hello_world.so -i +ellipsislabs/hello_world_verifiable_build:latest -p +2ZrriTQSVekoj414Ynysd48jyn4AX6ZF4TTJRqHfbJfn +``` + +This command loads up the image stored at +`ellipsislabs/hello_world_verifiable_build:latest`, and verifies that the hash +of the executable path in the container is the same as the hash of the on-chain +program supplied to the command. Because the build was already uploaded to an +image, there is no need for a full rebuild of the executable which can take a +long time. + +The Dockerfile that creates the image +`ellipsislabs/hello_world_verifiable_build:latest` can be found in the ellipsis +labs repository +[/examples/hello_world](https://github.com/Ellipsis-Labs/solana-verifiable-build/tree/master/examples/hello_world). + +Below is the expected output: + +```bash +Verifying image: "ellipsislabs/hello_world_verifiable_build:latest", on network +"https://api.mainnet-beta.solana.com" against program ID +2ZrriTQSVekoj414Ynysd48jyn4AX6ZF4TTJRqHfbJfn Executable path in container: +"examples/hello_world/target/deploy/hello_world.so" + +Executable hash: +08d91368d349c2b56c712422f6d274a1e8f1946ff2ecd1dc3efc3ebace52a760 Program hash: +08d91368d349c2b56c712422f6d274a1e8f1946ff2ecd1dc3efc3ebace52a760 Executable +matches on-chain program data ✅ +``` + +## Example verified build + +Here’s an example of verifying an example program with the ID +`FWEYpBAf9WsemQiNbAewhyESfR38GBBHLrCaU3MpEKWv` using the source code from this +[repository](https://github.com/solana-developers/verified-program): + +```bash +solana-verify verify-from-repo https://github.com/solana-developers/verified-program --url YOUR-RPC-URL --program-id FWEYpBAf9WsemQiNbAewhyESfR38GBBHLrCaU3MpEKWv --mount-path waffle --library-name waffle --commit-hash 5b82b86f02afbde330dff3e1847bed2d42069f4e +``` + +By default the `verify-from-repo` command takes the last commit on the main +branch. You can also define a certain commit in case you want to continue +working on the repository by using the `commit-hash` parameter: +`--commit-hash 5b82b86f02afbde330dff3e1847bed2d42069f4e` + +Finally you can also directly verify the program against the OtterSec API: + +```bash +solana-verify verify-from-repo https://github.com/solana-developers/verified-program --url YOUR-RPC-URL --remote --program-id FWEYpBAf9WsemQiNbAewhyESfR38GBBHLrCaU3MpEKWv --mount-path waffle --library-name waffle --commit-hash 5b82b86f02afbde330dff3e1847bed2d42069f4e +``` + +The `--remote` command sends a build request to the OtterSec API, which triggers +a remote build of your program. Once the build is complete, the system verifies +that the onchain hash of your program matches the hash of the generated build +artifact from your repository. + +# Conclusion + +Using [verified builds on Solana](/content/guides/advanced/verified-builds.md) +ensures the integrity and trustworthiness of your programs on the network and +allow developers to find your SDKs directly from a Solana Explorer. By +leveraging tools like the Solana Verify CLI and Docker, you can maintain +verifiable and secure builds that align with your source code. Always take the +necessary precautions to use consistent environments, and consider governance +solutions for safe upgrades and deployments. + +## Security + Disclaimer + +While verified builds are a powerful tool for ensuring the integrity of your +Solana programs it is not completely trustless in the default setup. The docker +images are built and hosted by the Solana Foundation. + +Be aware that you are building your project in a downloaded docker image and +that your whole setup gets copied into that docker image for building including +potentially sensitive information. + +If you want to have a completely trustless setup you can build the docker images +yourself and host them on your own infrastructure. This way you can be sure that +the docker images are not tampered with. You can find the scripts to create your +own docker images in the +[Verified builds repository](https://github.com/Ellipsis-Labs/solana-verifiable-build) +and you can fork it and run the github actions yourself or validate that they +are correct. + +Furthermore for the remote verification you are trusting the OtterSec API and +the +[Solana Explorer](https://explorer.solana.com/address/PhoeNiXZ8ByJGLkxNfZRnkUfjvmuYqLR89jjFHGqdXY) +to a certain degree. + +The API or Solana Explorer may potentially display incorrect information if +compromised. + +If you want to have a completely trustless setup you can run the +[Verify API](https://github.com/otter-sec/solana-verified-programs-api) yourself +or run the program verification locally yourself using the `verify-from-repo` +command using the on chain verify data that is saved in a +[PDA](https://explorer.solana.com/address/63XDCHrwZu3mXsw2RUFb4tbNpChuUHx4eA5aJMnHkpQQ/anchor-account) +that is derived from the programs deploy authority and the +[verify program](https://explorer.solana.com/address/verifycLy8mB96wd9wqq3WDXQwM4oU6r42Th37Db9fC). + +The verify program is deployed by the [OtterSec team](https://osec.io/) and is +not yet frozen so it can be upgraded at any time. + +The Solana Foundation, OtterSec and the Ellipsis Labs team are not responsible +for any losses or damages that may occur from using the verified builds +pipeline. + +# Security.txt for Solana programs + +In addition to verified builds you can also add a `security.txt` file to your +program. In the future, once implemented, the `security.txt` will hold the +verifier public key for easy access to the verification data stored in the +verification PDA. The PDA containing all the information needed to build and +verify a program is derived from the programs address and the verifier pubkey. +By default this is the same pubkey that built and deployed the program. But it +can also be another pubkey that can be specified in the `security.txt`. + +The `security.txt` feature allows developers to embed contact and security +information directly within their Solana smart contracts. Inspired by +[securitytxt.org](https://securitytxt.org), this approach provides a +standardized way for security researchers to reach out to project maintainers, +even if they only know the contract's address. + +## Why use security.txt? + +For many projects, especially smaller or private ones, identifying the +developers from just the contract address can be difficult and time-consuming. +Embedding a `security.txt` file within the program ensures that security +researchers can easily contact the correct people, potentially preventing +exploits and ensuring timely bug reports. + +## How to implement security.txt + +To add a `security.txt` to your Solana program, include the following steps: + +Add the `solana-security-txt` dependency to your `Cargo.toml`: + +```toml filename="Cargo.toml" +[dependencies] +solana-security-txt = "1.1.1" +``` + +Use the `security_txt!` macro in your contract to define your security +information. You can include contact details, project URLs, and even a security +policy. Here's an example: + +```rust +#[cfg(not(feature = "no-entrypoint"))] +use {default_env::default_env, solana_security_txt::security_txt}; + +#[cfg(not(feature = "no-entrypoint"))] +security_txt! { + name: "MyProject", + project_url: "https://myproject.com", + contacts: "email:security@myproject.com,discord:security#1234", + policy: "https://myproject.com/security-policy", + + // Optional Fields + preferred_languages: "en,de", + source_code: "https://github.com/solana-developers/solana-game-preset", + source_revision: "5vJwnLeyjV8uNJSp1zn7VLW8GwiQbcsQbGaVSwRmkE4r", + source_release: "", + encryption: "", + auditors: "Verifier pubkey: 5vJwnLeyjV8uNJSp1zn7VLW8GwiQbcsQbGaVSwRmkE4r", + acknowledgements: "Thank you to our bug bounty hunters!" +} +``` + +Once the `security.txt` information is embedded in your program, it can be +easily queried via tools like the Solana Explorer, ensuring that your contact +and security details are available to anyone looking to report potential issues. + +## Best practices + +- Use Links: For information likely to change (e.g., contact details), it's + recommended to link to a web page rather than hard-coding them into the + contract. This avoids the need for frequent program upgrades. + +- Verification: Before deploying, verify the format and content using the + `query-security-txt` tool, which can validate both onchain programs and local + binaries: + +```bash +query-security-txt target/bpfel-unknown-unknown/release/my_contract.so +``` + +By embedding security contact information directly into your contract, you make +it easier for researchers to reach you, fostering better security and +communication within the Solana ecosystem. + +This is +[an example of how security.txt looks in the Solana Explorer](https://explorer.solana.com/address/HPxKXnBN4vJ8RjpdqDCU7gvNQHeeyGnSviYTJ4fBrDt4/security?cluster=devnet) + +The `security.txt` project is maintained by +[Neodyme Labs](https://github.com/neodyme-labs) diff --git a/content/guides/dapps/cash-app.md b/content/guides/dapps/cash-app.md index a15b1a8f5..4a495f82d 100644 --- a/content/guides/dapps/cash-app.md +++ b/content/guides/dapps/cash-app.md @@ -400,7 +400,7 @@ Since we are directly manipulating the lamports in an account, we want to ensure that the signer of the instruction is the same as the owner of the account so that only the owner can call this instruction. This is why the following validation check was implemented: -`require!(cash_account.owner = ctx.accounts.signer, ErrorCode::InvalidSigner)`. +`require!(cash_account.owner == ctx.accounts.signer, ErrorCode::InvalidSigner)`. For error handling. the `#[error_code]` Anchor macro is used, which generates `Error` and `type Result = Result ` types to be used as return @@ -1494,7 +1494,7 @@ const withdrawInstruction = await program.methods **Additional documentation:** - [Transactions and Instructions](https://solana.com/docs/core/transactions) -- [Connection Class](https://solana-labs.github.io/solana-web3.js/classes/Connection.html) +- [Connection Class](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Connection.html) - Library for [wallets](https://github.com/solana-mobile/mobile-wallet-adapter/tree/main/android/walletlib) to provide the Mobile Wallet Adapter transaction signing services to dapps diff --git a/content/guides/dapps/journal.md b/content/guides/dapps/journal.md index 63275e353..4ddde0ede 100644 --- a/content/guides/dapps/journal.md +++ b/content/guides/dapps/journal.md @@ -212,10 +212,9 @@ In the above code, we used the following macros: - `#[account(...)]` attribute macro then specifies additional constraints on the accounts -Each journal entry is a Program Derived Address ( -[PDA](https://solanacookbook.com/core-concepts/pdas.html#facts)) that stores the -entries state on-chain. Since we are creating a new journal entry here, it needs -to be initialized using the `init_if_needed` constraint. +Each journal entry is a Program Derived Address ( [PDA](/docs/core/pda.md)) that +stores the entries state on-chain. Since we are creating a new journal entry +here, it needs to be initialized using the `init_if_needed` constraint. With Anchor, a PDA is initialized with the `seeds`, `bumps`, and `init_if_needed` constraints. The `init_if_needed` constraint also requires the diff --git a/content/guides/games/energy-system.md b/content/guides/games/energy-system.md index 17ac844bf..ed1d8ec0c 100644 --- a/content/guides/games/energy-system.md +++ b/content/guides/games/energy-system.md @@ -227,24 +227,43 @@ directly use it in the game. ```js useEffect(() => { if (!publicKey) { + console.log("Missing public key"); return; } + const [pda] = PublicKey.findProgramAddressSync( [Buffer.from("player", "utf8"), publicKey.toBuffer()], new PublicKey(LUMBERJACK_PROGRAM_ID), ); - try { - program.account.playerData.fetch(pda).then(data => { + + const fetchPlayerData = async () => { + try { + const data = await program.account.playerData.fetch(pda); setGameState(data); - }); - } catch (e) { - window.alert("No player data found, please init!"); - } + } catch (error) { + console.error("Error fetching player data:", error); + window.alert("No player data found, please init!"); + } + }; + + fetchPlayerData(); - connection.onAccountChange(pda, account => { - setGameState(program.coder.accounts.decode("playerData", account.data)); - }); + const handleAccountChange = (account: AccountInfo) => { + try { + const decodedData = program.coder.accounts.decode("playerData", account.data); + setGameState(decodedData); + } catch (error) { + console.error("Error decoding account data:", error); + } + }; + + const subscriptionId = connection.onAccountChange(pda, handleAccountChange); + + return () => { + connection.removeAccountChangeListener(subscriptionId); + }; }, [publicKey]); + ``` ### Calculate energy and show count down diff --git a/content/guides/games/hello-world.md b/content/guides/games/hello-world.md index 9befc65a6..cff9a9c9f 100644 --- a/content/guides/games/hello-world.md +++ b/content/guides/games/hello-world.md @@ -25,9 +25,9 @@ keywords: In this development guide, we will walkthrough a simple on-chain game using the Solana blockchain. This game, lovingly called _Tiny Adventure_, is a beginner-friendly Solana program created using the -[Anchor framework](/content/guides/getstarted/intro-to-anchor.md). The goal of -this program is to show you how to create a simple game that allows players to -track their position and move left or right. +[Anchor framework](/docs/programs/anchor). The goal of this program is to show +you how to create a simple game that allows players to track their position and +move left or right. > You can find the complete source code, available to deploy from your browser, > in this @@ -195,7 +195,7 @@ Alternatively, you can use the signer's address as an extra seed in the ### Move Left Instruction -Now that we can initialize a `GameDataAccount` account, let’s implement the +Now that we can initialize a `GameDataAccount` account, let's implement the `move_left` instruction which allows a player update their `player_position`. In this example, moving left simply means decrementing the `player_position` @@ -233,8 +233,8 @@ pub struct MoveLeft<'info> { ### Move Right Instruction -Lastly, let’s implement the `move_right` instruction. Similarly, moving right -will simply mean incrementing the `player_position` by 1. We’ll also limit the +Lastly, let's implement the `move_right` instruction. Similarly, moving right +will simply mean incrementing the `player_position` by 1. We'll also limit the maximum position to 3. Just like before, the only account needed for this instruction is the @@ -381,7 +381,7 @@ file and add the code snippets from the following sections. ### Derive the GameDataAccount Account Address -First, let’s derive the PDA for the `GameDataAccount` using the +First, let's derive the PDA for the `GameDataAccount` using the `findProgramAddress` function. > A [Program Derived Address (PDA)](/docs/core/pda.md) is unique address in the @@ -398,7 +398,7 @@ const [globalLevel1GameDataAccount, bump] = ### Initialize the Game State -Next, let’s try to fetch the game data account using the PDA from the previous +Next, let's try to fetch the game data account using the PDA from the previous step. If the account doesn't exist, we'll create it by invoking the `initialize` instruction from our program. @@ -460,8 +460,8 @@ console.log("Player position is:", gameDateAccount.playerPosition.toString()); ### Logging the Player's Position -Lastly, let’s use a `switch` statement to log the character's position based on -the `playerPosition` value stored in the `gameDateAccount`. We’ll use this as a +Lastly, let's use a `switch` statement to log the character's position based on +the `playerPosition` value stored in the `gameDateAccount`. We'll use this as a visual representation of the character's movement in the game. ```ts filename="client.ts" diff --git a/content/guides/games/interact-with-tokens.md b/content/guides/games/interact-with-tokens.md index 4273f3cdc..d36375972 100644 --- a/content/guides/games/interact-with-tokens.md +++ b/content/guides/games/interact-with-tokens.md @@ -1,7 +1,7 @@ --- date: 2024-04-25T00:00:00Z difficulty: intermediate -title: How interact with tokens in programs +title: How to interact with tokens in programs description: Learn how to use tokens in Solana games with an on-chain tutorial tags: - games @@ -88,13 +88,13 @@ pub mod anchor_token { ``` Here we are simply bringing into scope the crates and corresponding modules we -will be using for this program. We’ll be using the `anchor_spl` and +will be using for this program. We'll be using the `anchor_spl` and `mpl_token_metadata` crates to help us interact with the SPL Token program and Metaplex's Token Metadata program. ## Create Mint instruction -First, let’s implement an instruction to create a new token mint and its +First, let's implement an instruction to create a new token mint and its metadata account. The on-chain token metadata, including the name, symbol, and URI, will be provided as parameters to the instruction. @@ -107,7 +107,7 @@ The `create_mint` instruction requires the following accounts: - `admin` - the `ADMIN_PUBKEY` that signs the transaction and pays for the initialization of the accounts - `reward_token_mint` - the new token mint we are initializing, using a PDA as - both the mint account’s address and its mint authority + both the mint account's address and its mint authority - `metadata_account` - the metadata account we are initializing for the token mint - `token_program` - required for interacting with instructions on the Token @@ -211,7 +211,7 @@ pub struct CreateMint<'info> { The `create_mint` instruction creates a new token mint, using a Program Derived Address (PDA) as both the address of the token mint and its mint authority. The -instruction takes a URI (off-chain metadata), name, and symbol as parameters. +instruction takes a URI (offchain metadata), name, and symbol as parameters. This instruction then creates a metadata account for the token mint through a [Cross-Program Invocation (CPI)](/docs/core/cpi.md) calling the @@ -295,7 +295,7 @@ health by 10 and mints 1 token to the player's token account as a reward. The `kill_enemy` instruction requires the following accounts: - `player` - the player receiving the token -- `player_data` - the player data account storing the player’s current health +- `player_data` - the player data account storing the player's current health - `player_token_account` - the player's associated token account where tokens will be minted - `reward_token_mint` - the token mint account, specifying the type of token @@ -389,7 +389,7 @@ pub enum ErrorCode { ``` The player's health is reduced by 10 to represent the “battle with the enemy”. -We’ll also check the player's current health and return a custom Anchor error if +We'll also check the player's current health and return a custom Anchor error if the player has 0 health. The instruction then uses a cross-program invocation (CPI) to call the `mint_to` @@ -409,7 +409,7 @@ token and restore their health to its maximum value. The `heal` instruction requires the following accounts: - `player` - the player executing the healing action -- `player_data` - the player data account storing the player’s current health +- `player_data` - the player data account storing the player's current health - `player_token_account` - the player's associated token account where the tokens will be burned - `reward_token_mint` - the token mint account, specifying the type of token diff --git a/content/guides/games/nfts-in-games.md b/content/guides/games/nfts-in-games.md index 5aa0ab570..4e7e2924c 100644 --- a/content/guides/games/nfts-in-games.md +++ b/content/guides/games/nfts-in-games.md @@ -56,10 +56,9 @@ other assets for your game. GameShift offers these features: Using NFTs, you can conditionally gate access to a particular part of a game based on owning the NFT. This can form a more tight-knit community within your -game. In -[JavaScript](https://docs.solana.com/de/developing/clients/javascript-api) using -the [Metaplex SDK](https://github.com/metaplex-foundation/js#readme) this would -look like this: +game. In [JavaScript](/docs/clients/javascript.md) using the +[Metaplex SDK](https://github.com/metaplex-foundation/js#readme) this would look +like this: ```js JSON.parse( @@ -234,7 +233,7 @@ let uri = 'https://raw.githubusercontent.com/KhronosGroup/glTF-Sample-Models/mas let asset: Asset = await loader.load(uri); let gltf: GlTf = asset.gltf; console.log(gltf); -// -> {asset: {…}, scene: 0, scenes: Array(1), nodes: Array(2), meshes: Array(1), …} +// -> {asset: {...}, scene: 0, scenes: Array(1), nodes: Array(2), meshes: Array(1), ...} let data = await asset.accessorData(0); // fetches BoxTextured0.bin let image: Image = await asset.imageData.get(0) // fetches CesiumLogoFlat.png diff --git a/content/guides/games/store-sol-in-pda.md b/content/guides/games/store-sol-in-pda.md index a024af0f7..d472df7f7 100644 --- a/content/guides/games/store-sol-in-pda.md +++ b/content/guides/games/store-sol-in-pda.md @@ -180,7 +180,7 @@ displays the starting message. The `initialize_level_one` instruction requires 4 accounts: - `new_game_data_account` - the `GameDataAccount` we are initializing to store - the player’s position + the player's position - `chest_vault` - the `ChestVaultAccount` we are initializing to store the SOL reward - `signer` - the player paying for the initialization of the accounts diff --git a/content/guides/getstarted/cosmwasm-to-solana.md b/content/guides/getstarted/cosmwasm-to-solana.md index 5709298fc..04666aa67 100644 --- a/content/guides/getstarted/cosmwasm-to-solana.md +++ b/content/guides/getstarted/cosmwasm-to-solana.md @@ -408,7 +408,7 @@ pub fn process_reset( ## Solana Program Advantages 1. Performance Efficiency: - - Solana’s binary instruction data and direct account manipulation provide + - Solana's binary instruction data and direct account manipulation provide high performance and low latency. - This is critical for high-throughput applications like decentralized exchanges (DEXes) and other performance-sensitive use cases. @@ -424,6 +424,6 @@ pub fn process_reset( specialized logic. In conclusion, Solana is ideal for applications that require high performance, -low latency, and fine-grained control over execution. It’s better suited for +low latency, and fine-grained control over execution. It's better suited for developers comfortable with lower-level programming and those who need to optimize for specific use cases. diff --git a/content/guides/getstarted/full-stack-solana-development.md b/content/guides/getstarted/full-stack-solana-development.md index f5361a6f7..a6050f6b3 100644 --- a/content/guides/getstarted/full-stack-solana-development.md +++ b/content/guides/getstarted/full-stack-solana-development.md @@ -489,10 +489,10 @@ pub struct Counter { Make sure you go over the comments! -The `initialize` instruction instruction does only one this: it creates a new -account of the `Counter` type. To do this, we need to know who's paying, details -of the account we're creating like the space and the address, and which program -to use to create the account. +The `initialize` instruction does only one thing: it creates a new account of +the `Counter` type. To do this, we need to know who's paying, details of the +account we're creating like the space and the address, and which program to use +to create the account. Let's go line by line: @@ -1085,7 +1085,7 @@ Throw this command into your terminal to install all the `wallet-adapter` stuff we need: ```shell -yarn add react @solana/web3.js \ +yarn add react @solana/web3.js@1 \ @solana/wallet-adapter-base @solana/wallet-adapter-react \ @solana/wallet-adapter-react-ui @solana/wallet-adapter-wallets ``` @@ -1227,20 +1227,33 @@ export default function CounterState() { const [counterData, setCounterData] = useState(null); useEffect(() => { - // Fetch initial account data - program.account.counter.fetch(counterPDA).then(data => { - setCounterData(data); - }); + const fetchCounterData = async () => { + try { + // Fetch initial account data + const data = await program.account.counter.fetch(counterPDA); + setCounterData(data); + } catch (error) { + console.error("Error fetching counter data:", error); + } + }; + + fetchCounterData(); // Subscribe to account change const subscriptionId = connection.onAccountChange( // The address of the account we want to watch counterPDA, - // callback for when the account changes + // Callback for when the account changes accountInfo => { - setCounterData( - program.coder.accounts.decode("counter", accountInfo.data), - ); + try { + const decodedData = program.coder.accounts.decode( + "counter", + accountInfo.data, + ); + setCounterData(decodedData); + } catch (error) { + console.error("Error decoding account data:", error); + } }, ); @@ -1248,8 +1261,9 @@ export default function CounterState() { // Unsubscribe from account change connection.removeAccountChangeListener(subscriptionId); }; + // eslint-disable-next-line react-hooks/exhaustive-deps - }, [program]); + }, [program, counterPDA, connection]); // Render the value of the counter return

Count: {counterData?.count?.toString()}

; diff --git a/content/guides/getstarted/hello-world-in-your-browser.md b/content/guides/getstarted/hello-world-in-your-browser.md index 6c3d94d32..1fece1426 100644 --- a/content/guides/getstarted/hello-world-in-your-browser.md +++ b/content/guides/getstarted/hello-world-in-your-browser.md @@ -28,6 +28,8 @@ keywords: altRoutes: - /developers/guides/hello-world-in-your-browser - /developers/guides/solana-playground + - /developers/guides/solang/solang-getting-started + - /developers/guides/solang-getting-started --- For this "hello world" quickstart guide, we will use @@ -115,9 +117,8 @@ use solana_program::{ ### Write your program logic Every Solana program must define an `entrypoint` that tells the Solana runtime -where to start executing your onchain code. Your program's -[entrypoint](/docs/programs/lang-rust.md#program-entrypoint) should provide a -public function named `process_instruction`: +where to start executing your onchain code. Your program's entrypoint should +provide a public function named `process_instruction`: ```rust // declare and export the program's entrypoint @@ -141,8 +142,7 @@ Every onchain program should return the `Ok` [result enum](https://doc.rust-lang.org/std/result/) with a value of `()`. This tells the Solana runtime that your program executed successfully without errors. -Our program above will simply -[log a message](/docs/programs/debugging.md#logging) of "_Hello, world!_" to the +Our program above will simply log a message of "_Hello, world!_" to the blockchain cluster, then gracefully exit with `Ok(())`. ### Build your program @@ -322,4 +322,3 @@ your local development environment: - [Interacting with Tokens and NFTs](/developers/courses/tokens.md) - [Developer Guides](/developers/guides/) - [Developing Games](/content/guides/games/getting-started-with-game-development.md) -- [Learn more about developing Solana programs with Rust](/docs/programs/lang-rust.md) diff --git a/content/guides/getstarted/how-to-cpi-with-signer.md b/content/guides/getstarted/how-to-cpi-with-signer.md index 063559faa..5908d17f9 100644 --- a/content/guides/getstarted/how-to-cpi-with-signer.md +++ b/content/guides/getstarted/how-to-cpi-with-signer.md @@ -14,9 +14,8 @@ keywords: - tutorial --- -This guide uses the -[Anchor framework](/content/guides/getstarted/intro-to-anchor.md) to demonstrate -how to transfer SOL using a [Cross-Program Invocation (CPI)](/docs/core/cpi.md) +This guide uses the [Anchor framework](/docs/programs/anchor) to demonstrate how +to transfer SOL using a [Cross-Program Invocation (CPI)](/docs/core/cpi.md) where the sender is a PDA that the program must sign for. A typical use case for this scenario is a program that manages diff --git a/content/guides/getstarted/how-to-cpi.md b/content/guides/getstarted/how-to-cpi.md index e21105cc1..1a1b49283 100644 --- a/content/guides/getstarted/how-to-cpi.md +++ b/content/guides/getstarted/how-to-cpi.md @@ -13,9 +13,8 @@ keywords: - tutorial --- -This guide uses the -[Anchor framework](/content/guides/getstarted/intro-to-anchor.md) to demonstrate -how to transfer SOL using a [Cross Program Invocation (CPI)](/docs/core/cpi.md). +This guide uses the [Anchor framework](/docs/programs/anchor) to demonstrate how +to transfer SOL using a [Cross Program Invocation (CPI)](/docs/core/cpi.md). Included below are three different, but functionally equivalent implementations that you may come across when reading or writing Solana programs. Here is a final reference program on diff --git a/content/guides/getstarted/how-to-create-a-token.md b/content/guides/getstarted/how-to-create-a-token.md index 7a217e1a8..5a6b2543d 100644 --- a/content/guides/getstarted/how-to-create-a-token.md +++ b/content/guides/getstarted/how-to-create-a-token.md @@ -49,7 +49,7 @@ instead. ## Installing Solana Tools First we need to download Solana tools to our system. Follow this guide to -[install the Solana CLI](https://docs.solanalabs.com/cli/install). +[install the Solana CLI](/docs/intro/installation.md). ## Create folder @@ -264,15 +264,9 @@ created and uploaded. spl-token initialize-metadata mntTymSqMU4e1NEDdxJ9XoPN4MitCgQ7xxGW6AuRAWQ 'Example token' 'EXMPL' https://raw.githubusercontent.com/solana-developers/opos-asset/main/assets/CompressedCoil/metadata.json ``` -Congratulations, you created a token with metadata! Look at your token in one of -the Solana block explorers: - -- [Solana Explorer](https://explorer.solana.com/address/mntTymSqMU4e1NEDdxJ9XoPN4MitCgQ7xxGW6AuRAWQ?cluster=devnet) -- [SolanaFM](https://solana.fm/?cluster=devnet-solana) -- [SolScan](https://solscan.io/token/mntTymSqMU4e1NEDdxJ9XoPN4MitCgQ7xxGW6AuRAWQ?cluster=devnet) - -Make sure your explorer is set to devnet (if you are working on devnet) and -replace the address to your mint address starting with `mnt`. +Congratulations, you created a token with metadata! Look at your token's mint +address (starting with `mnt`) in Solana Explorer - making sure to to use devnet +(if you are working on devnet). ![Token with metadata](/assets/guides/make-a-token/token-with-metadata.png) diff --git a/content/guides/getstarted/intro-to-anchor.md b/content/guides/getstarted/intro-to-anchor.md deleted file mode 100644 index 3012c23cb..000000000 --- a/content/guides/getstarted/intro-to-anchor.md +++ /dev/null @@ -1,750 +0,0 @@ ---- -date: 2024-04-24T00:00:00Z -difficulty: beginner -title: "Getting Started with the Anchor Framework" -description: - "This guide provides a basic overview of the Anchor framework. Anchor is a - very popular Rust framework for building Solana programs (known as 'smart - contracts' on other chains) that removes boilerplate, provides secure - defaults, and builds client programs automatically." -tags: - - rust - - anchor -keywords: - - tutorial ---- - -The [Anchor framework](https://www.anchor-lang.com/) uses -[Rust macros](https://doc.rust-lang.org/book/ch19-06-macros.html) to reduce -boilerplate code and simplify the implementation of common security checks -required for writing Solana programs. - -Think of Anchor as a framework for Solana programs much like Next.js is for web -development. Just as Next.js allows developers to create websites using React -instead of relying solely on HTML and TypeScript, Anchor provides a set of tools -and abstractions that make building Solana programs more intuitive and secure. - -The main macros found in an Anchor program include: - -- [`declare_id`](#declare_id-macro): Specifies the program's on-chain address -- [`#[program]`](#program-macro): Specifies the module containing the program’s - instruction logic -- [`#[derive(Accounts)]`](#derive-accounts-macro): Applied to structs to - indicate a list of accounts required for an instruction -- [`#[account]`](#account-macro): Applied to structs to create custom account - types specific to the program - -## Anchor Program - -Below is a simple Anchor program with a single instruction that creates a new -account. We'll walk through it to explain the basic structure of an Anchor -program. Here is the program on -[Solana Playground](https://beta.solpg.io/660f3a86cffcf4b13384d022). - -```rust filename="lib.rs" -use anchor_lang::prelude::*; - -declare_id!("11111111111111111111111111111111"); - -#[program] -mod hello_anchor { - use super::*; - pub fn initialize(ctx: Context, data: u64) -> Result<()> { - ctx.accounts.new_account.data = data; - msg!("Changed data to: {}!", data); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Initialize<'info> { - #[account(init, payer = signer, space = 8 + 8)] - pub new_account: Account<'info, NewAccount>, - #[account(mut)] - pub signer: Signer<'info>, - pub system_program: Program<'info, System>, -} - -#[account] -pub struct NewAccount { - data: u64, -} -``` - -### declare_id macro - -The -[`declare_id`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/lang/attribute/account/src/lib.rs#L430) -macro is used to specify the on-chain address of the program (program ID). - -```rust filename="lib.rs" {3} -use anchor_lang::prelude::*; - -declare_id!("11111111111111111111111111111111"); -``` - -When you build an Anchor program for the first time, the framework generates a -new keypair used to deploy the program (unless specified otherwise). The public -key from this keypair should be used as the program ID in the `declare_id` -macro. - -- When using [Solana Playground](https://beta.solpg.io/), the program ID is - updated automatically for you and can be exported using the UI. -- When building locally, the program keypair can be found in - `/target/deploy/your_program_name.json` - -### program macro - -The -[`#[program]`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/lang/attribute/program/src/lib.rs#L12) -macro specifies the module containing all of your program's instructions. Each -public function in the module represents a separate instruction for the program. - -In every function, the first parameter is always a `Context` type. Subsequent -parameters, which are optional, define any additional `data` required by the -instruction. - -```rust filename="lib.rs" {5, 8-12} -use anchor_lang::prelude::*; - -declare_id!("11111111111111111111111111111111"); - -#[program] -mod hello_anchor { - use super::*; - pub fn initialize(ctx: Context, data: u64) -> Result<()> { - ctx.accounts.new_account.data = data; - msg!("Changed data to: {}!", data); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Initialize<'info> { - #[account(init, payer = signer, space = 8 + 8)] - pub new_account: Account<'info, NewAccount>, - #[account(mut)] - pub signer: Signer<'info>, - pub system_program: Program<'info, System>, -} - -#[account] -pub struct NewAccount { - data: u64, -} -``` - -The -[`Context`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/lang/src/context.rs#L24) -type provides the instruction with access to the following non-argument inputs: - -```rust -pub struct Context<'a, 'b, 'c, 'info, T> { - /// Currently executing program id. - pub program_id: &'a Pubkey, - /// Deserialized accounts. - pub accounts: &'b mut T, - /// Remaining accounts given but not deserialized or validated. - /// Be very careful when using this directly. - pub remaining_accounts: &'c [AccountInfo<'info>], - /// Bump seeds found during constraint validation. This is provided as a - /// convenience so that handlers don't have to recalculate bump seeds or - /// pass them in as arguments. - pub bumps: BTreeMap, -} -``` - -`Context` is a generic type where `T` represents the set of accounts required by -an instruction. When defining the instruction's `Context`, the `T` type is a -struct that implements the `Accounts` trait (`Context`). - -This context parameter allows the instruction to access: - -- `ctx.accounts`: The instruction's accounts -- `ctx.program_id`: The address of the program itself -- `ctx.remaining_accounts`: All remaining accounts provided to the instruction - but not specified in the `Accounts` struct -- `ctx.bumps`: Bump seeds for any - [Program Derived Address (PDA)](/docs/core/pda.md) accounts specified in the - `Accounts` struct - -### derive(Accounts) macro - -The -[`#[derive(Accounts)]`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/lang/derive/accounts/src/lib.rs#L630) -macro is applied to a struct and implements the -[`Accounts`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/lang/src/lib.rs#L105) -trait. This is used to specify and validate a set of accounts required for a -particular instruction. - -```rust /Accounts/ {1} -#[derive(Accounts)] -pub struct Initialize<'info> { - #[account(init, payer = signer, space = 8 + 8)] - pub new_account: Account<'info, NewAccount>, - #[account(mut)] - pub signer: Signer<'info>, - pub system_program: Program<'info, System>, -} -``` - -Each field in the struct represents an account that is required by an -instruction. The naming of each field is arbitrary, but it is recommended to use -a descriptive name that indicates the purpose of the account. - -```rust /signer/2 /new_account/ /system_program/ -#[derive(Accounts)] -pub struct Initialize<'info> { - #[account(init, payer = signer, space = 8 + 8)] - pub new_account: Account<'info, NewAccount>, - #[account(mut)] - pub signer: Signer<'info>, - pub system_program: Program<'info, System>, -} -``` - -When building Solana programs, it's essential to validate the accounts provided -by the client. This validation is achieved in Anchor through account constraints -and specifying appropriate account types: - -- [Account Constraints](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/lang/syn/src/parser/accounts/constraints.rs): - Constraints define additional conditions that an account must satisfy to be - considered valid for the instruction. Constraints are applied using the - `#[account(..)]` attribute, which is placed above an account field in the - `Accounts` struct. - - ```rust {3, 5} - #[derive(Accounts)] - pub struct Initialize<'info> { - #[account(init, payer = signer, space = 8 + 8)] - pub new_account: Account<'info, NewAccount>, - #[account(mut)] - pub signer: Signer<'info>, - pub system_program: Program<'info, System>, - } - ``` - -- [Account Types](https://github.com/coral-xyz/anchor/tree/852fcc77beb6302474a11e0f8e6f1e688021be36/lang/src/accounts): - Anchor provides various account types to help ensure that the account provided - by the client matches what the program expects. - - ```rust /Account/2 /Signer/ /Program/ - #[derive(Accounts)] - pub struct Initialize<'info> { - #[account(init, payer = signer, space = 8 + 8)] - pub new_account: Account<'info, NewAccount>, - #[account(mut)] - pub signer: Signer<'info>, - pub system_program: Program<'info, System>, - } - ``` - -Accounts within the `Accounts` struct are accessible in an instruction through -the `Context`, using the `ctx.accounts` syntax. - -```rust filename="lib.rs" /ctx.accounts.new_account/ /new_account/ /Initialize/ {15-22} -use anchor_lang::prelude::*; - -declare_id!("11111111111111111111111111111111"); - -#[program] -mod hello_anchor { - use super::*; - pub fn initialize(ctx: Context, data: u64) -> Result<()> { - ctx.accounts.new_account.data = data; - msg!("Changed data to: {}!", data); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Initialize<'info> { - #[account(init, payer = signer, space = 8 + 8)] - pub new_account: Account<'info, NewAccount>, - #[account(mut)] - pub signer: Signer<'info>, - pub system_program: Program<'info, System>, -} - -#[account] -pub struct NewAccount { - data: u64, -} -``` - -When an instruction in an Anchor program is invoked, the program performs the -following checks as specified the in `Accounts` struct: - -- Account Type Verification: It verifies that the accounts passed into the - instruction correspond to the account types defined in the instruction - Context. - -- Constraint Checks: It checks the accounts against any additional constraints - specified. - -This helps ensure that the accounts passed to the instruction from the client -are valid. If any checks fail, then the instruction fails with an error before -reaching the main logic of the instruction handler function. - -For more detailed examples, refer to the -[constraints](https://www.anchor-lang.com/docs/account-constraints) and -[account types](https://www.anchor-lang.com/docs/account-types) sections in the -Anchor documentation. - -### account macro - -The -[`#[account]`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/lang/attribute/account/src/lib.rs#L66) -macro is applied to structs to define the format of a custom data account type -for a program. Each field in the struct represents a field that will be stored -in the account data. - -```rust {3} -#[account] -pub struct NewAccount { - data: u64, -} -``` - -This macro implements various traits -[detailed here](https://docs.rs/anchor-lang/latest/anchor_lang/attr.account.html). -The key functionalities of the `#[account]` macro include: - -- [Assign Ownership](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/lang/attribute/account/src/lib.rs#L119-L132): - When creating an account, the ownership of the account is automatically - assigned to the program specified in the `declare_id`. -- [Set Discriminator](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/lang/attribute/account/src/lib.rs#L101-L117): - A unique 8-byte discriminator, specific to the account type, is added as the - first 8 bytes of account data during its initialization. This helps in - differentiating account types and account validation. -- [Data Serialization and Deserialization](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/lang/attribute/account/src/lib.rs#L202-L246): - The account data corresponding to the account type is automatically serialized - and deserialized. - -```rust filename="lib.rs" /data/2,6 /NewAccount/ {24-27} -use anchor_lang::prelude::*; - -declare_id!("11111111111111111111111111111111"); - -#[program] -mod hello_anchor { - use super::*; - pub fn initialize(ctx: Context, data: u64) -> Result<()> { - ctx.accounts.new_account.data = data; - msg!("Changed data to: {}!", data); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Initialize<'info> { - #[account(init, payer = signer, space = 8 + 8)] - pub new_account: Account<'info, NewAccount>, - #[account(mut)] - pub signer: Signer<'info>, - pub system_program: Program<'info, System>, -} - -#[account] -pub struct NewAccount { - data: u64, -} -``` - -In Anchor, an account discriminator is an 8-byte identifier, unique to each -account type. This identifier is derived from the first 8 bytes of the SHA256 -hash of the account type's name. The first 8 bytes in an account's data are -specifically reserved for this discriminator. - -```rust /8/1 -#[account(init, payer = signer, space = 8 + 8)] -pub new_account: Account<'info, NewAccount>, -``` - -The discriminator is used during the following two scenarios: - -- Initialization: During the initialization of an account, the discriminator is - set with the account type's discriminator. -- Deserialization: When account data is deserialized, the discriminator within - the data is checked against the expected discriminator of the account type. - -If there's a mismatch, it indicates that the client has provided an unexpected -account. This mechanism serves as an account validation check in Anchor -programs, ensuring the correct and expected accounts are used. - -## IDL File - -When an Anchor program is built, Anchor generates an interface description -language (IDL) file representing the structure of the program. This IDL file -provides a standardized JSON-based format for building program instructions and -fetching program accounts. - -Below are examples of how an IDL file relates to the program code. - -### Instructions - -The `instructions` array in the IDL corresponds with the instructions on the -program and specifies the required accounts and parameters for each instruction. - -```json filename="IDL.json" {6,8-10, 12} -{ - "version": "0.1.0", - "name": "hello_anchor", - "instructions": [ - { - "name": "initialize", - "accounts": [ - { "name": "newAccount", "isMut": true, "isSigner": true }, - { "name": "signer", "isMut": true, "isSigner": true }, - { "name": "systemProgram", "isMut": false, "isSigner": false } - ], - "args": [{ "name": "data", "type": "u64" }] - } - ], - "accounts": [ - { - "name": "NewAccount", - "type": { - "kind": "struct", - "fields": [{ "name": "data", "type": "u64" }] - } - } - ] -} -``` - -```rust filename="lib.rs" {8, 18, 20, 21} -use anchor_lang::prelude::*; - -declare_id!("11111111111111111111111111111111"); - -#[program] -mod hello_anchor { - use super::*; - pub fn initialize(ctx: Context, data: u64) -> Result<()> { - ctx.accounts.new_account.data = data; - msg!("Changed data to: {}!", data); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Initialize<'info> { - #[account(init, payer = signer, space = 8 + 8)] - pub new_account: Account<'info, NewAccount>, - #[account(mut)] - pub signer: Signer<'info>, - pub system_program: Program<'info, System>, -} - -#[account] -pub struct NewAccount { - data: u64, -} -``` - -### Accounts - -The `accounts` array in the IDL corresponds with structs in the program -annotated with the `#[account]` macro, which specifies the structure of the -program's data accounts. - -```json filename="IDL.json" {16-22} -{ - "version": "0.1.0", - "name": "hello_anchor", - "instructions": [ - { - "name": "initialize", - "accounts": [ - { "name": "newAccount", "isMut": true, "isSigner": true }, - { "name": "signer", "isMut": true, "isSigner": true }, - { "name": "systemProgram", "isMut": false, "isSigner": false } - ], - "args": [{ "name": "data", "type": "u64" }] - } - ], - "accounts": [ - { - "name": "NewAccount", - "type": { - "kind": "struct", - "fields": [{ "name": "data", "type": "u64" }] - } - } - ] -} -``` - -```rust filename="lib.rs" {24-27} -use anchor_lang::prelude::*; - -declare_id!("11111111111111111111111111111111"); - -#[program] -mod hello_anchor { - use super::*; - pub fn initialize(ctx: Context, data: u64) -> Result<()> { - ctx.accounts.new_account.data = data; - msg!("Changed data to: {}!", data); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Initialize<'info> { - #[account(init, payer = signer, space = 8 + 8)] - pub new_account: Account<'info, NewAccount>, - #[account(mut)] - pub signer: Signer<'info>, - pub system_program: Program<'info, System>, -} - -#[account] -pub struct NewAccount { - data: u64, -} - -``` - -## Client - -Anchor provides a Typescript client library -([`@coral-xyz/anchor`](https://github.com/coral-xyz/anchor/tree/852fcc77beb6302474a11e0f8e6f1e688021be36/ts/packages/anchor)) -that simplifies the process of interacting with Solana programs from the client. - -To use the client library, you first need to set up an instance of a -[`Program`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/ts/packages/anchor/src/program/index.ts#L58) -using the IDL file generated by Anchor. - -### Client Program - -Creating an instance of the `Program` requires the program's IDL, its on-chain -address (`programId`), and an -[`AnchorProvider`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/ts/packages/anchor/src/provider.ts#L55). -An `AnchorProvider` combines two things: - -- `Connection` - the connection to a [Solana cluster](/docs/core/clusters.md) - (i.e. localhost, devnet, mainnet) -- `Wallet` - (optional) a default wallet used to pay and sign transactions - -When building an Anchor program locally, the setup for creating an instance of -the `Program` is done automatically in the test file. The IDL file can be found -in the `/target` folder. - -```typescript showLineNumbers -import * as anchor from "@coral-xyz/anchor"; -import { Program, BN } from "@coral-xyz/anchor"; -import { HelloAnchor } from "../target/types/hello_anchor"; - -const provider = anchor.AnchorProvider.env(); -anchor.setProvider(provider); -const program = anchor.workspace.HelloAnchor as Program; -``` - -When integrating with a frontend using the -[wallet adapter](https://solana.com/developers/guides/wallets/add-solana-wallet-adapter-to-nextjs), -you'll need to manually set up the `AnchorProvider` and `Program`. - -```ts {8-9, 12} -import { Program, Idl, AnchorProvider, setProvider } from "@coral-xyz/anchor"; -import { useAnchorWallet, useConnection } from "@solana/wallet-adapter-react"; -import { IDL, HelloAnchor } from "./idl"; - -const { connection } = useConnection(); -const wallet = useAnchorWallet(); - -const provider = new AnchorProvider(connection, wallet, {}); -setProvider(provider); - -const programId = new PublicKey("..."); -const program = new Program(IDL, programId); -``` - -Alternatively, you can create an instance of the `Program` using only the IDL -and the `Connection` to a Solana cluster. This means if there is no default -`Wallet`, but allows you to use the `Program` to fetch accounts before a wallet -is connected. - -```ts {8-10} -import { Program } from "@coral-xyz/anchor"; -import { clusterApiUrl, Connection, PublicKey } from "@solana/web3.js"; -import { IDL, HelloAnchor } from "./idl"; - -const programId = new PublicKey("..."); -const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); - -const program = new Program(IDL, programId, { - connection, -}); -``` - -### Invoke Instructions - -Once the `Program` is set up, you can use the Anchor -[`MethodsBuilder`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/ts/packages/anchor/src/program/namespace/methods.ts#L155) -to build an instruction, a transaction, or build and send a transaction. The -basic format looks like this: - -- `program.methods` - This is the builder API for creating instruction calls - related to the program's IDL -- `.instructionName` - Specific instruction from the program IDL, passing in any - instruction data as comma-separated values -- `.accounts` - Pass in the address of each account required by the instruction - as specified in the IDL -- `.signers` - Optionally pass in an array of keypairs required as additional - signers by the instruction - -```ts -await program.methods - .instructionName(instructionData1, instructionData2) - .accounts({}) - .signers([]) - .rpc(); -``` - -Below are examples of how to invoke an instruction using the methods builder. - -#### rpc() - -The -[`rpc()`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/ts/packages/anchor/src/program/namespace/methods.ts#L283) -method -[sends a signed transaction](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/ts/packages/anchor/src/program/namespace/rpc.ts#L29) -with the specified instruction and returns a `TransactionSignature`. When using -`.rpc`, the `Wallet` from the `Provider` is automatically included as a signer. - -```ts {13} -// Generate keypair for the new account -const newAccountKp = new Keypair(); - -const data = new BN(42); -const transactionSignature = await program.methods - .initialize(data) - .accounts({ - newAccount: newAccountKp.publicKey, - signer: wallet.publicKey, - systemProgram: SystemProgram.programId, - }) - .signers([newAccountKp]) - .rpc(); -``` - -#### transaction() - -The -[`transaction()`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/ts/packages/anchor/src/program/namespace/methods.ts#L382) -method -[builds a `Transaction`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/ts/packages/anchor/src/program/namespace/transaction.ts#L18-L26) -and adds the specified instruction to the transaction (without automatically -sending). - -```ts {12} /transaction/1,2,4 -// Generate keypair for the new account -const newAccountKp = new Keypair(); - -const data = new BN(42); -const transaction = await program.methods - .initialize(data) - .accounts({ - newAccount: newAccountKp.publicKey, - signer: wallet.publicKey, - systemProgram: SystemProgram.programId, - }) - .transaction(); - -const transactionSignature = await connection.sendTransaction(transaction, [ - wallet.payer, - newAccountKp, -]); -``` - -#### instruction() - -The -[`instruction()`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/ts/packages/anchor/src/program/namespace/methods.ts#L348) -method -[builds a `TransactionInstruction`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/ts/packages/anchor/src/program/namespace/instruction.ts#L57-L61) -using the specified instruction. This is useful if you want to manually add the -instruction to a transaction and combine it with other instructions. - -```ts {12} /instruction/ -// Generate keypair for the new account -const newAccountKp = new Keypair(); - -const data = new BN(42); -const instruction = await program.methods - .initialize(data) - .accounts({ - newAccount: newAccountKp.publicKey, - signer: wallet.publicKey, - systemProgram: SystemProgram.programId, - }) - .instruction(); - -const transaction = new Transaction().add(instruction); - -const transactionSignature = await connection.sendTransaction(transaction, [ - wallet.payer, - newAccountKp, -]); -``` - -### Fetch Accounts - -The client `Program` also allows you to easily fetch and filter program -accounts. Simply use `program.account` and then specify the name of the account -type on the IDL. Anchor then deserializes and returns all accounts as specified. - -#### all() - -Use -[`all()`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/ts/packages/anchor/src/program/namespace/account.ts#L251) -to fetch all existing accounts for a specific account type. - -```ts /all/ -const accounts = await program.account.newAccount.all(); -``` - -#### memcmp - -Use `memcmp` to filter for accounts storing data that matches a specific value -at a specific offset. When calculating the offset, remember that the first 8 -bytes are reserved for the account discriminator in accounts created through an -Anchor program. Using `memcmp` requires you to understand the byte layout of the -data field for the account type you are fetching. - -```ts /memcmp/ -const accounts = await program.account.newAccount.all([ - { - memcmp: { - offset: 8, - bytes: "", - }, - }, -]); -``` - -#### fetch() - -Use -[`fetch()`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/ts/packages/anchor/src/program/namespace/account.ts#L165) -to get the account data for a specific account by passing in the account address - -```ts /fetch/ -const account = await program.account.newAccount.fetch(ACCOUNT_ADDRESS); -``` - -#### fetchMultiple() - -Use -[`fetchMultiple()`](https://github.com/coral-xyz/anchor/blob/852fcc77beb6302474a11e0f8e6f1e688021be36/ts/packages/anchor/src/program/namespace/account.ts#L200) -to get the account data for multiple accounts by passing in an array of account -addresses - -```ts /fetchMultiple/ -const accounts = await program.account.newAccount.fetchMultiple([ - ACCOUNT_ADDRESS_ONE, - ACCOUNT_ADDRESS_TWO, -]); -``` diff --git a/content/guides/getstarted/intro-to-native-rust.md b/content/guides/getstarted/intro-to-native-rust.md index 27e508e67..ed8d95b7b 100644 --- a/content/guides/getstarted/intro-to-native-rust.md +++ b/content/guides/getstarted/intro-to-native-rust.md @@ -16,7 +16,7 @@ To write Solana programs without leveraging the Anchor framework, we use the This is the base library for writing onchain programs in Rust. For beginners, it is recommended to start with the -[Anchor framework](/content/guides/getstarted/intro-to-anchor.md). +[Anchor framework](/docs/programs/anchor). ## Program @@ -326,7 +326,7 @@ pub struct NewAccount { Interacting with Solana programs written in native Rust involves directly building the -[`TransactionInstruction`](https://solana-labs.github.io/solana-web3.js/classes/TransactionInstruction.html). +[`TransactionInstruction`](https://solana-labs.github.io/solana-web3.js/v1.x/classes/TransactionInstruction.html). Similarly, fetching and deserializing account data requires creating a schema compatible with the on-chain program's data structures. diff --git a/content/guides/getstarted/local-rust-hello-world.md b/content/guides/getstarted/local-rust-hello-world.md index 7b065d4a9..56da11d31 100644 --- a/content/guides/getstarted/local-rust-hello-world.md +++ b/content/guides/getstarted/local-rust-hello-world.md @@ -57,9 +57,9 @@ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh ## Run your localhost validator The Solana CLI comes with the -[test validator](https://docs.solana.com/developing/test-validator) built in. -This command line tool will allow you to run a full blockchain cluster on your -machine. +[test validator](https://docs.solanalabs.com/cli/examples/test-validator) built +in. This command line tool will allow you to run a full blockchain cluster on +your machine. ```shell solana-test-validator @@ -81,8 +81,8 @@ solana config set --url localhost ## Create a new Rust library with Cargo Solana programs written in Rust are _libraries_ which are compiled to -[BPF bytecode](https://docs.solana.com/developing/on-chain-programs/faq#berkeley-packet-filter-bpf) -and saved in the `.so` format. +[BPF bytecode](/docs/programs/faq.md#berkeley-packet-filter-bpf) and saved in +the `.so` format. Initialize a new Rust library named `hello_world` via the Cargo command line: @@ -143,8 +143,8 @@ use solana_program::{ Every Solana program must define an `entrypoint` that tells the Solana runtime where to start executing your onchain code. Your program's -[entrypoint](https://docs.solana.com/developing/on-chain-programs/developing-rust#program-entrypoint) -should provide a public function named `process_instruction`: +[entrypoint](/docs/programs/lang-rust.md#program-entrypoint) should provide a +public function named `process_instruction`: ```rust // declare and export the program's entrypoint @@ -169,9 +169,8 @@ Every onchain program should return the `Ok` tells the Solana runtime that your program executed successfully without errors. This program above will simply -[log a message](https://docs.solana.com/developing/on-chain-programs/debugging#logging) -of "_Hello, world!_" to the blockchain cluster, then gracefully exit with -`Ok(())`. +[log a message](/docs/programs/debugging.md#logging) of "_Hello, world!_" to the +blockchain cluster, then gracefully exit with `Ok(())`. ## Build your Rust program @@ -202,8 +201,8 @@ solana program deploy ./target/deploy/hello_world.so ``` Once your Solana program has been deployed (and the transaction -[finalized](https://docs.solana.com/cluster/commitments)), the above command -will output your program's public address (aka its "program id"). +[finalized](https://docs.solanalabs.com/consensus/commitments)), the above +command will output your program's public address (aka its "program id"). ```shell # example output @@ -229,8 +228,8 @@ library. ### Install Node.js To use node in WSL2 on Windows, please follow this -[guide to installing node in WSL2](https://learn.microsoft.com/en-us/windows/dev-environment/javascript/nodejs-on-wsl) -to install node. +[guide to installing node in WSL2](https://learn.microsoft.com/en-us/windows/dev-environment/javascript/nodejs-on-wsl) to +install node. ```shell sudo apt-get install curl @@ -252,7 +251,7 @@ For macOS you can Install the Solana web3.js library and the Solana helpers library: ```shell -npm install @solana/web3.js @solana-developers/helpers +npm install @solana/web3.js@1 @solana-developers/helpers@2 ``` Create a new file called `client.mjs` and add the following code: @@ -307,7 +306,7 @@ await connection.confirmTransaction({ }); console.log( - `Congratulations! Look at your ‘Hello World’ transaction in the Solana Explorer: + `Congratulations! Look at your ‘Hello World' transaction in the Solana Explorer: https://explorer.solana.com/tx/${txHash}?cluster=custom`, ); ``` @@ -326,7 +325,7 @@ node client.mjs You should see the following output: ```shell -Congratulations! Look at your ‘Hello World’ transaction in the Solana Explorer: +Congratulations! Look at your ‘Hello World' transaction in the Solana Explorer: https://explorer.solana.com/tx/2fTcQ74z4DVi8WRuf2oNZ36z7k9tGRThaRPXBMYgjMUNUbUSKLrP6djpRUZ8msuTXvZHFe3UXi31dfgytG2aJZbv?cluster=custom ``` @@ -363,7 +362,7 @@ transaction on the Solana blockchain. See the links below to learn more about writing Rust based Solana programs: -- [Overview of writing Solana programs](https://docs.solana.com/developing/on-chain-programs/overview) +- [Overview of writing Solana programs](/docs/programs/overview.md) - [Solana Quick Start Guide](https://solana.com/docs/intro/quick-start) -- [Learn more about developing Solana programs with Rust](https://docs.solana.com/developing/on-chain-programs/developing-rust) -- [Debugging onchain programs](https://docs.solana.com/developing/on-chain-programs/debugging) +- [Learn more about developing Solana programs with Rust](/docs/programs/lang-rust.md) +- [Debugging onchain programs](/docs/programs/debugging.md) diff --git a/content/guides/getstarted/rust-to-solana.md b/content/guides/getstarted/rust-to-solana.md index 3d6995bf5..c1d9d75a1 100644 --- a/content/guides/getstarted/rust-to-solana.md +++ b/content/guides/getstarted/rust-to-solana.md @@ -35,13 +35,13 @@ need to know to start their Solana journeys. ## Understanding the Core Differences First, note that this guide aims at understanding the differences in using Rust -as a language when working with Solana. It won’t cover +as a language when working with Solana. It won't cover [Blockchain or Solana basics](https://solana.com/learn/blockchain-basics). -It also won’t cover core Solana concepts that must be understood in order to +It also won't cover core Solana concepts that must be understood in order to program in Solana, such as: -- [Programs](https://solana.com/docs/core/programs) - Solana’s version of smart +- [Programs](https://solana.com/docs/core/programs) - Solana's version of smart contracts - [Accounts](https://solana.com/docs/core/accounts) - A record in the Solana ledger that either holds data (a data account) or is an executable program @@ -53,7 +53,7 @@ program in Solana, such as: For more information on those core concepts, check out the [Solana developer documentation](https://solana.com/docs). -Let’s now look at the differences in **project setup**. +Let's now look at the differences in **project setup**. ## Key Setup Details @@ -152,7 +152,7 @@ Using an additional crate that depends on `rand` will also cause compile errors. However, if the crate used simply depends on `rand` but does not actually generate random numbers, then it is possible to work around this by adding the -following to the program’s Cargo.toml: +following to the program's Cargo.toml: ```toml [dependencies] @@ -222,7 +222,7 @@ allows developers to develop and deploy Solana programs. ![Solana Playground](/assets/guides/rust-to-solana/solana-playground.png) -It’s the easiest way to begin developing with Solana, and it supports building, +It's the easiest way to begin developing with Solana, and it supports building, testing, and deploying Solana Rust programs. Additionally, a number of built-in tutorials are available to guide learning. @@ -237,17 +237,16 @@ boilerplate code, speeding up the development cycle. Additionally, it provides some security checks by default, making Solana programs more secure. To create a new program, simply -[create a new Anchor project](https://solana.com/developers/guides/getstarted/intro-to-anchor) -in the Solana playground. +[create a new Anchor project](/docs/programs/anchor) in the Solana playground. Alternatively, [install the Anchor CLI](https://www.anchor-lang.com/docs/installation) locally, and then use `anchor init ` to create a new Anchor project. -## Creating Off-chain Programs +## Creating offchain Programs So far, this guide has covered the key details of developing **onchain Solana -programs** in Rust. However, it’s also possible to develop **off-chain Solana +programs** in Rust. However, it's also possible to develop **offchain Solana clients** in Rust. This can be done by using the [solana_sdk crate](https://docs.rs/solana-sdk/latest/solana_sdk/). This contains the [solana_client crate](https://docs.rs/solana-client/latest/solana_client/) @@ -257,7 +256,7 @@ that allows Rust programs to interact with a Solana node via the Another option is to use the [anchor_client crate](https://docs.rs/anchor-client/latest/anchor_client/) which interacts with Solana programs written in Anchor via RPC. Alternatively, -consider writing onchain programs in Rust, and off-chain +consider writing onchain programs in Rust, and offchain [clients in JS/TS](https://solana.com/de/docs/clients/javascript-reference). ## Wrap Up @@ -265,9 +264,6 @@ consider writing onchain programs in Rust, and off-chain This guide has covered the basics of developing for Solana with Rust, from setup details and restrictions to development environments and frameworks. -For more Rust-related Solana resources, check out the -[Developing with Rust page](https://solana.com/docs/programs/lang-rust). - For other Solana program examples written with Rust, check out these [examples on GitHub](https://github.com/solana-labs/solana-program-library/tree/master/examples/rust). diff --git a/content/guides/getstarted/scaffold-nextjs-anchor.md b/content/guides/getstarted/scaffold-nextjs-anchor.md index 5adfef9a4..6af833062 100644 --- a/content/guides/getstarted/scaffold-nextjs-anchor.md +++ b/content/guides/getstarted/scaffold-nextjs-anchor.md @@ -57,7 +57,7 @@ If you haven't installed Solana CLI, Rust, or Anchor before, you can easily do so by [following our helpful installation guide](https://solana.com/docs/intro/installation) -> This scaffolds only supports TypeScript for now, but don’t worry, TypeScript +> This scaffolds only supports TypeScript for now, but don't worry, TypeScript > simply extends on the JavaScript you already know to add helpful type > definitions. @@ -255,7 +255,7 @@ supports shared layouts, nested routing, loading states, and error handling. The `solana-provider.tsx` already has all the wallet features you need, It handles auto connects of Solana wallet easily, and you can move to multiple components of your web application with the wallet states managed. This NextJS -app is using `[@tanstack/react-query](`https://tanstack.com/query/latest)` to +app is using [`@tanstack/react-query`](https://tanstack.com/query/latest) to fetch, cache, synchronize, and update server state in your web applications easily. React-Query here is used for all the data fetching needs, like a hook for `useGetBalance` to get the balance of your wallet, `useTransferSol` to diff --git a/content/guides/getstarted/solana-test-validator.md b/content/guides/getstarted/solana-test-validator.md index c69046e2c..a2dd52058 100644 --- a/content/guides/getstarted/solana-test-validator.md +++ b/content/guides/getstarted/solana-test-validator.md @@ -76,7 +76,7 @@ Once you have the `solana-test-validator` up and running, you can interact with it using various Solana CLI (Command Line Interface) commands. These commands let you [deploy programs](/docs/programs/deploying.md), manage [accounts](/docs/core/accounts.md), send -[transactions](/docs/core/transactions.md), and much more. Here’s a detailed +[transactions](/docs/core/transactions.md), and much more. Here's a detailed guide on the key commands you will use. ### Checking the Status of the Test Validator diff --git a/content/guides/getstarted/solana-token-airdrop-and-faucets.md b/content/guides/getstarted/solana-token-airdrop-and-faucets.md index ee5dbf884..ec9f527db 100644 --- a/content/guides/getstarted/solana-token-airdrop-and-faucets.md +++ b/content/guides/getstarted/solana-token-airdrop-and-faucets.md @@ -43,7 +43,7 @@ connection.requestAirdrop(); ``` See more: -[`requestAirdrop()`](https://solana-labs.github.io/solana-web3.js/classes/Connection.html#requestAirdrop) +[`requestAirdrop()`](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Connection.html#requestAirdrop) documentation inside web3.js. ## 2. Web Faucet @@ -86,8 +86,7 @@ Currently supported: ### Using the Solana CLI -Specify your [Cluster](https://docs.solana.com/clusters) to be your RPC -provider's URL: +Specify your [Cluster](/docs/core/clusters.md) to be your RPC provider's URL: ```shell solana config set --url diff --git a/content/guides/intro/wallets-explained.md b/content/guides/intro/wallets-explained.md index f837ea241..acd9af700 100644 --- a/content/guides/intro/wallets-explained.md +++ b/content/guides/intro/wallets-explained.md @@ -270,8 +270,8 @@ signature pens! ✍️.** Do you want to start building on Solana? - A gentle introduction to Solana: - [https://solana.com/news/solana-scaffold-part-1-wallet-adapter](https://solana.com/news/solana-scaffold-part-1-wallet-adapter) -- Tutorials: [https://soldev.app/](https://soldev.app/) + [Quick Start guides](/docs/intro/quick-start/index.md) +- [Install the Solana CLI](/docs/intro/installation.md) ### Blockchain and blockchain diff --git a/content/guides/javascript/compressed-nfts.md b/content/guides/javascript/compressed-nfts.md index 08e391ca3..b2b899d85 100644 --- a/content/guides/javascript/compressed-nfts.md +++ b/content/guides/javascript/compressed-nfts.md @@ -18,7 +18,7 @@ altRoutes: Compressed NFTs on Solana use the [Bubblegum](https://docs.metaplex.com/programs/compression/) program from Metaplex to cheaply and securely store NFT metadata using -[State Compression](https://docs.solana.com/learn/state-compression). +[State Compression](/docs/advanced/state-compression.md). This developer guide will use JavaScript/TypeScript to demonstrate: @@ -30,10 +30,9 @@ This developer guide will use JavaScript/TypeScript to demonstrate: ## Intro to Compressed NFTs -Compressed NFTs use -[State Compression](https://docs.solana.com/learn/state-compression) and -[merkle trees](https://docs.solana.com/learn/state-compression#what-is-a-merkle-tree) -to drastically reduce the storage cost for NFTs. Instead of storing an NFT's +Compressed NFTs use [State Compression](/docs/advanced/state-compression.md) and +[merkle trees](/docs/advanced/state-compression.md#what-is-a-merkle-tree) to +drastically reduce the storage cost for NFTs. Instead of storing an NFT's metadata in a typical Solana account, compressed NFTs store the metadata within the ledger. This allows compressed NFTs to still inherit the security and speed of the Solana blockchain, while at the same time reducing the overall storage @@ -53,10 +52,9 @@ transfer. More on this below. ### Compressed NFTs and indexers Since compressed NFTs store all of their metadata in the -[ledger](https://docs.solana.com/terminology#ledger), instead of in traditional -[accounts](https://docs.solana.com/terminology#account) like uncompressed NFTs, -we will need to help of indexing services to quickly fetch our compressed NFT's -metadata. +[ledger](/docs/terminology.md#ledger), instead of in traditional +[accounts](/docs/terminology.md#account) like uncompressed NFTs, we will need to +help of indexing services to quickly fetch our compressed NFT's metadata. Supporting RPC providers are using the Digital Asset Standard Read API (or "Read API" for short) to add additional RPC methods that developers can call. These @@ -90,7 +88,7 @@ happen in 3 primary steps: - create an NFT collection (or use an existing one) - create a - [concurrent merkle tree](https://docs.solana.com/learn/state-compression#what-is-a-concurrent-merkle-tree) + [concurrent merkle tree](/docs/advanced/state-compression.md#what-is-a-concurrent-merkle-tree) (using the `@solana/spl-account-compression` SDK) - mint compressed NFTs into your tree (to any owner's address you want) @@ -141,7 +139,7 @@ Using your preferred package manager (e.g. npm, yarn, pnpm, etc), install these packages into your project: ```shell -yarn add @solana/web3.js @solana/spl-token @solana/spl-account-compression +yarn add @solana/web3.js@1 @solana/spl-token @solana/spl-account-compression ``` ```shell @@ -168,7 +166,7 @@ actually create them following the same process of creating an - store the collection's metadata in an Account on-chain Since NFT Collections having nothing special to do with -[State Compression](https://docs.solana.com/learn/state-compression) or +[State Compression](/docs/advanced/state-compression.md) or [compressed NFTs](/content/guides/javascript/compressed-nfts.md), we will not cover creating one in this guide. @@ -186,7 +184,7 @@ various addresses for your Collection, including: ## Create a tree One of the most important decisions to make when creating compressed NFTs is -[how to setup your tree](https://docs.solana.com/learn/state-compression#sizing-a-concurrent-merkle-tree). +[how to setup your tree](/docs/advanced/state-compression.md#sizing-a-concurrent-merkle-tree). Especially since the values used to size your tree will determine the overall cost of creation, and **CANNOT** be changed after creation. @@ -218,9 +216,8 @@ Your tree size is set by 3 values, each serving a very specific purpose: a large of cost and composability of your compressed NFT collection > Read more about the details about -> [State Compression](https://docs.solana.com/learn/state-compression), -> including -> [how to size a tree](https://docs.solana.com/learn/state-compression#sizing-a-concurrent-merkle-tree) +> [State Compression](/docs/advanced/state-compression.md), including +> [how to size a tree](/docs/advanced/state-compression.md#sizing-a-concurrent-merkle-tree) > and potential composability concerns. Let's assume we are going to create a compressed NFT collection with 10k NFTs in @@ -254,8 +251,7 @@ node hashes" on-chain. Thus requiring us to always include `4` proof node values ### Generate addresses for the tree When creating a new tree, we need to generate a new -[Keypair](https://docs.solana.com/terminology#keypair) address for the tree to -have: +[Keypair](/docs/terminology.md#keypair) address for the tree to have: ```ts const treeKeypair = Keypair.generate(); @@ -363,8 +359,8 @@ Allowing us to cryptographically verify that our original metadata has not changed (unless we want it to). > Learn more about how State Compression uses -> [concurrent merkle trees](https://docs.solana.com/learn/state-compression#what-is-a-concurrent-merkle-tree) -> to cryptographically secure off-chain data using the Solana ledger. +> [concurrent merkle trees](/docs/advanced/state-compression.md#what-is-a-concurrent-merkle-tree) +> to cryptographically secure offchain data using the Solana ledger. ### Define our NFT's metadata @@ -401,8 +397,8 @@ In this demo, the key pieces of our NFT's metadata to note are: ### Derive the Bubblegum signer When minting new compressed NFTs, the Bubblegum program needs a PDA to perform a -[cross-program invocation](https://docs.solana.com/developing/programming-model/calling-between-programs#cross-program-invocations) -(`cpi`) to the SPL compression program. +[cross-program invocation](/docs/core/cpi.md) (`cpi`) to the SPL compression +program. > This `bubblegumSigner` PDA is derived using a hard coded seed string of > `collection_cpi` and owned by the Bubblegum program. If this hard coded value @@ -626,8 +622,8 @@ together in a deterministic way to compute the "root hash". Therefore, allowing for cryptographic validation of an asset within the merkle tree. > **NOTE:** While each of these hash values resemble a Solana Account's -> [address/public key](https://docs.solana.com/terminology#public-key-pubkey), -> they are not addresses. +> [address/public key](/docs/terminology.md#public-key-pubkey), they are not +> addresses. Transferring ownership of a compressed NFT happens in 5 broad steps: @@ -745,8 +741,7 @@ Since we will use the `createTransferInstruction` helper function from the Bubblegum SDK to actually build our transfer instruction, we need to: - remove the proof values that are already stored on-chain in the - [tree's canopy](https://docs.solana.com/learn/state-compression#canopy-depth), - and + [tree's canopy](/docs/advanced/state-compression.md#canopy-depth), and - convert the remaining proof values into the valid `AccountMeta` structure that the instruction builder function accepts @@ -813,7 +808,7 @@ helper function. Since each of these hash values resemble and are formatted similar to PublicKeys, we can use the -[`PublicKey`](https://solana-labs.github.io/solana-web3.js/classes/PublicKey.html) +[`PublicKey`](https://solana-labs.github.io/solana-web3.js/v1.x/classes/PublicKey.html) class in web3.js to convert them into an accepted byte array format. #### Send the transaction diff --git a/content/guides/javascript/get-program-accounts.md b/content/guides/javascript/get-program-accounts.md index d38050295..fdad11bb4 100644 --- a/content/guides/javascript/get-program-accounts.md +++ b/content/guides/javascript/get-program-accounts.md @@ -17,22 +17,24 @@ altRoutes: --- An RPC method that returns all accounts owned by a program. Currently pagination -is not supported. Requests to `getProgramAccounts` should include the -`dataSlice` and/or `filters` parameters to improve response time and return only -intended results. +is not supported. Requests to +[`getProgramAccounts`](/docs/rpc//http/getProgramAccounts.mdx) should include +the `dataSlice` and/or `filters` parameters to improve response time and return +only intended results. -## getProgramAccounts has the following syntax: +## getProgramAccounts RPC Method + +The `getProgramAccounts` RPC method has the following syntax: -```md - `programId`: `string` - Pubkey of the program to query, provided as a base58 encoded string - (optional) `configOrCommitment`: `object` - Configuration parameters containing the following optional fields: - (optional) `commitment`: `string` - - [State commitment](https://docs.solana.com/developing/clients/jsonrpc-api#configuring-state-commitment) + [State commitment](/docs/rpc/index.mdx#configuring-state-commitment) - (optional) `encoding`: `string` - Encoding for account data, either: `base58`, `base64`, or `jsonParsed`. Note, web3js users should instead use - [getParsedProgramAccounts](https://solana-labs.github.io/solana-web3.js/classes/Connection.html#getParsedProgramAccounts) + [`getParsedProgramAccounts`](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Connection.html#getParsedProgramAccounts) - (optional) `dataSlice`: `object` - Limit the returned account data based on: - `offset`: `number` - Number of bytes into account data to begin returning - `length`: `number` - Number of bytes of account data to return @@ -46,8 +48,7 @@ intended results. - `dataSize`: `number` - Compares the account data length with the provided data size - (optional) `withContext`: `boolean` - Wrap the result in an - [RpcResponse JSON object](https://docs.solana.com/developing/clients/jsonrpc-api#rpcresponse-structure) -``` + [RpcResponse JSON object](/docs/rpc/index.mdx#rpcresponse-structure) ### Response @@ -143,7 +144,7 @@ token accounts that are owned by our wallet address. When looking at a token account, we can see the first two fields stored on a token account are both pubkeys, and that each pubkey is 32 bytes in length. Given that `owner` is the second field, we should begin our `memcmp` at an `offset` of 32 bytes. From -here, we’ll be looking for accounts whose owner field matches our wallet +here, we'll be looking for accounts whose owner field matches our wallet address. ![Account Size](/public/assets/guides/get-program-accounts/memcmp.png) @@ -192,7 +193,7 @@ import { clusterApiUrl, Connection } from "@solana/web3.js"; The output should look like: -```text +```shell Found 2 token account(s) for wallet FriELggez2Dy3phZeHHAdpcoEXkKQVkv6tx3zDtCVP8T: -- Token Account Address 0: H12yCcKLHFJFfohkeKiN8v3zgaLnUMwRcnJTyB4igAsy -- Mint: CKKDsBT6KiT4GDKs3e39Ue9tDkhuGUKM3cC2a7pmV9YK @@ -216,7 +217,7 @@ Much like `memcmp`, `dataSlice` accepts two arguments: - `length`: The number of bytes which should be returned `dataSlice` is particularly useful when we run queries on a large dataset but -don’t actually care about the account data itself. An example of this would be +don't actually care about the account data itself. An example of this would be if we wanted to find the number of token accounts (i.e. number of token holders) for a particular token mint. @@ -255,7 +256,7 @@ import { clusterApiUrl, Connection } from "@solana/web3.js"; })(); ``` -The output should look like (notice the empty at acccount.data): +The output should look like (notice the empty `` at `account.data`): ```text Found 3 token account(s) for mint BUGuuhPsHpk8YZrL2GctsCtXGneL1gmT5zYb7eMHZDWf @@ -300,11 +301,11 @@ Found 3 token account(s) for mint BUGuuhPsHpk8YZrL2GctsCtXGneL1gmT5zYb7eMHZDWf ``` By combining all three parameters (`dataSlice`, `dataSize`, and `memcmp`) we can -limit the scope of our query and efficiently return only the data we’re +limit the scope of our query and efficiently return only the data we're interested in. ## Other Resources - [RPC API Documentation](https://solana.com/docs/rpc/http/getprogramaccounts) -- [web3.js documentation](https://solana-labs.github.io/solana-web3.js/classes/Connection.html#getProgramAccounts) -- [getParsedProgramAccounts documentation](https://solana-labs.github.io/solana-web3.js/classes/Connection.html#getParsedProgramAccounts) +- [web3.js documentation](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Connection.html#getProgramAccounts) +- [getParsedProgramAccounts documentation](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Connection.html#getParsedProgramAccounts) diff --git a/content/guides/solang/getting-started.md b/content/guides/solang/getting-started.md deleted file mode 100644 index 44b716680..000000000 --- a/content/guides/solang/getting-started.md +++ /dev/null @@ -1,329 +0,0 @@ ---- -featured: false -date: 2023-07-17T00:00:00Z -difficulty: intro -title: "Getting started with Solang" -description: - "Quickstart guide on how to build your first Solana program with Solidity - using Solang" -tags: - - quickstart - - solang - - solidity -keywords: - - solang - - solidity - - tutorial - - intro to solana development - - blockchain developer - - blockchain tutorial -altRoutes: - - /developers/guides/solang/solang-getting-started - - /developers/guides/solang-getting-started ---- - -# Getting Started with Solang - -Welcome to this beginner’s guide on getting started with Solang! -[Solang](https://solang.readthedocs.io/) is a Solidity Compiler that allows you -to write Solana programs - referred to in other blockchains as 'smart -contracts' - using the Solidity programming language. - -If you’re an EVM developer that’s interested in leveraging the high speed and -low fees of the Solana network, then Solang is the perfect tool for you. With -Solang, you can leverage your existing knowledge of Solidity to start building -on Solana! - -## Installation - -In this section, we'll help you set up your development environment for Solang. -Just follow the steps outlined below: - -1. **Check Your Prerequisites**: Before diving in, ensure that you have - [Rust](https://www.rust-lang.org/tools/install) and - [Node.js](https://nodejs.org/en) installed on your system. Windows users will - also need to have [Windows Subsystem for Linux](/docs/intro/installation) set - up. -2. **Solana Tool Suite Installation**: Begin by installing the - [Solana Tool Suite](https://docs.solana.com/cli/install-solana-cli-tools), - which includes the Solana Command Line Interface (CLI) and the latest version - of Solang. -3. **Anchor Framework Installation**: Next, install the - [Anchor Framework](https://www.anchor-lang.com/docs/installation). Anchor is - a widely used framework in the Solana ecosystem and simplifies the process of - building Solana programs. With version 0.28, you can start building with - Solang directly through Anchor. - - As of the time of this writing, please install Anchor using the following - command for compatibility with Solang version 0.3.1: - - ``` - cargo install --git https://github.com/coral-xyz/anchor anchor-cli --locked --force - ``` - -4. **Solang Extension for VSCode**: If you're a Visual Studio Code (VSCode) - user, it's recommended to install the Solang - [extension](https://marketplace.visualstudio.com/items?itemName=solang.solang) - to assist with syntax highlighting. Remember to disable any active Solidity - extensions to ensure that the Solang extension works correctly. - -## Creating a new project - -Once you’ve installed the Solana CLI and Anchor, you can create a new project -with the following command: - -``` -anchor init project_name --solidity -``` - -This command generates a new project with a basic Solang on-chain program -(equivalent to a smart contract on EVM) and a test file that demonstrate how to -interact with the program from the client. - -## On-chain program Overview - -Next, let’s go over the starter code beginning with the on-chain program itself. -Within your project's `./solidity` directory, you’ll find the following contract -below, which includes: - -- A `constructor` to initialize a state variable -- A `print` function to print messages to the program logs -- A `flip` function to update the state variable -- A `get` function to return the current value of the state variable - -```solidity -@program_id("F1ipperKF9EfD821ZbbYjS319LXYiBmjhzkkf5a26rC") -contract starter { - bool private value = true; - - @payer(payer) - constructor(address payer) { - print("Hello, World!"); - } - - /// A message that can be called on instantiated contracts. - /// This one flips the value of the stored `bool` from `true` - /// to `false` and vice versa. - function flip() public { - value = !value; - } - - /// Simply returns the current value of our `bool`. - function get() public view returns (bool) { - return value; - } -} -``` - -### Important Differences - -Compared to an EVM smart contract, there are two important differences you might -notice: - -1. The `@program_id` annotation: - - On Solana, smart contracts are referred to as “programs”. The `@program_id` - annotation is used to specify the on-chain address of the program. - -```solidity -@program_id("F1ipperKF9EfD821ZbbYjS319LXYiBmjhzkkf5a26rC") // on-chain program address -``` - -2. The `@payer` annotation: - - When storing data on-chain, a certain amount of SOL needs to be allocated to - cover the storage costs. The `@payer` annotation specifies the user that will - pay the SOL required to create the account for storing the state variable. - -```solidity -@payer(payer) // payer for the "data account" -constructor(address payer) { - print("Hello, World!"); -} -``` - -### Storing of state data - -An important distinction between EVM smart contracts and Solana programs is how -each stores "state" variables/data: - -- EVM smart contracts can directly store state variables. -- Solana onchain programs, on the other hand, create separate accounts to hold - state data. These are often referred to as "data accounts" and are "owned" by - a program. - -In this example, when the contract is deployed, it is deployed to the address -specified in `@program_id`. When the `constructor` is called after the program -is deployed, a separate account with its own address is created to store the -state variable, instead of being stored within the contract itself. - -This may sound a bit different than what you’re used to, but don't worry! Let’s -go over the test file to shed more light on this concept. - -## Test File Overview - -The starter test file can be found in the `./tests` directory. This file -provides an example of how to interact with the program from the client. - -Anchor sets up the `provider` and `program` to help us connect to the contract -from the client. This is done using an IDL file which lays out the program's -public interface, similar to the ABI files used in EVM smart contracts. If you -run `anchor build`, the IDL file is generated and can be found at -`./target/idl`. - -```jsx -import * as anchor from "@coral-xyz/anchor" -import { Program } from "@coral-xyz/anchor" -import { Starter } from "../target/types/starter" - -describe("starter", () => { - // Configure the client to use the local cluster. - const provider = anchor.AnchorProvider.env() - anchor.setProvider(provider) - - const dataAccount = anchor.web3.Keypair.generate() - const wallet = provider.wallet - - const program = anchor.workspace.Starter as Program - - it("Is initialized!", async () => { - // Add your test here. - const tx = await program.methods - .new(wallet.publicKey) - .accounts({ dataAccount: dataAccount.publicKey }) - .signers([dataAccount]) - .rpc() - console.log("Your transaction signature", tx) - - const val1 = await program.methods - .get() - .accounts({ dataAccount: dataAccount.publicKey }) - .view() - - console.log("state", val1) - - await program.methods - .flip() - .accounts({ dataAccount: dataAccount.publicKey }) - .rpc() - - const val2 = await program.methods - .get() - .accounts({ dataAccount: dataAccount.publicKey }) - .view() - - console.log("state", val2) - }) -}) -``` - -In the test file, we first generate a new Keypair which will be used to create -the “data account” that stores the contract’s state. - -```jsx -const dataAccount = anchor.web3.Keypair.generate(); -``` - -Next, we use the `new` instruction to create a new data account. This -instruction corresponds with the contract's `constructor`. The newly created -data account will be initialized to store the state variable defined in the -contract. - -Here, the `payer` is specified as `wallet.publicKey`, and the address of the -`dataAccount` that we plan to create is provided. The generated `dataAccount` -Keypair is included as an additional signer on the transaction, as it's being -used to create a new account. Essentially, this verifies that we hold the secret -key corresponding to the address of the new account we're creating. - -```jsx -// Client -const tx = await program.methods - .new(wallet.publicKey) - .accounts({ dataAccount: dataAccount.publicKey }) - .signers([dataAccount]) - .rpc() - -// on-chain program -@payer(payer) -constructor(address payer) { - print("Hello, World!"); -} -``` - -The contract's `get` function is then invoked to fetch the value stored in the -specified `dataAccount`. - -```jsx -// Client -const val1 = await program.methods - .get() - .accounts({ dataAccount: dataAccount.publicKey }) - .view() - -// on-chain program -function get() public view returns (bool) { - return value; -} -``` - -Next, the contract’s `flip` function is used to modify the state of the -specified `dataAccount`. - -```jsx -// Client -await program.methods - .flip() - .accounts({ dataAccount: dataAccount.publicKey }) - .rpc() - -// on-chain program -function flip() public { - value = !value; -} -``` - -To run the test, use the `anchor test` command in the terminal. - -The `anchor test` command performs the following tasks: - -- Start a local Solana validator -- Build and deploy your on-chain program to the local validator -- Run the test file - -The following output should then be displayed in the console: - -``` -Your transaction signature 2x7jh3yka9LU6ZeJLUZNNDJSzq6vdUAXk3mUKuP1MYwr6ArYMHDGw6i15jJnMtnC7BP7zKactStHhTekjq2vh6hP -state true -state false - ✔ Is initialized! (782ms) -``` - -You can then inspect the program logs in `./.anchor/program-logs` where you'll -find the "Hello, World!" message: - -``` -Program F1ipperKF9EfD821ZbbYjS319LXYiBmjhzkkf5a26rC invoke [1] -Program 11111111111111111111111111111111 invoke [2] -Program 11111111111111111111111111111111 success -Program log: Hello, World! -``` - -Congratulations! You've successfully built your first Solana program with -Solang! While there might be differences compared to what you're accustomed to -with standard Solidity smart contracts, Solang provides an excellent bridge to -help leverage your existing Solidity skills and experience to build on Solana. - -## Next Steps - -Interested in diving deeper? Check out the `solana-developers/program-examples` -[repository](https://github.com/solana-developers/program-examples). You'll find -Solang implementations for common Solana use cases in the `basics` and `tokens` -sections. - -If you have question feel free to post them on -[Solana Stack exchange](https://solana.stackexchange.com/). If you have -questions for the Solang maintainers directly, you reach out to them directly in -the [Hyperledger Foundation discord.](https://discord.com/invite/hyperledger) - -Have fun building! diff --git a/content/guides/solang/index.md b/content/guides/solang/index.md deleted file mode 100644 index fb281e0f3..000000000 --- a/content/guides/solang/index.md +++ /dev/null @@ -1,4 +0,0 @@ ---- -metaOnly: true -title: Solang ---- diff --git a/content/guides/token-extensions/dynamic-meta-data-nft.md b/content/guides/token-extensions/dynamic-meta-data-nft.md index ad68dd621..b52b6d83a 100644 --- a/content/guides/token-extensions/dynamic-meta-data-nft.md +++ b/content/guides/token-extensions/dynamic-meta-data-nft.md @@ -46,16 +46,16 @@ character's stats or inventory). ## Building the on-chain program In this developer guide, we will demonstrate how to build these Token Extension -based NFTs and custom metadata using an -[Anchor program](/content/guides/getstarted/intro-to-anchor.md). This program -will save the level and the collected resources of a game player within an NFT. +based NFTs and custom metadata using an [Anchor program](/docs/programs/anchor). +This program will save the level and the collected resources of a game player +within an NFT. This NFT will be created by the Anchor program so it is very easy to mint from the JavaScript client. Each NFT will have some basic structure provided via the Token Metadata interface: - default on-chain fields - `name`, `symbol` and `uri` - - the `uri` is a link to an off-chain json file which contains the off chain + - the `uri` is a link to an offchain json file which contains the off chain metadata of the NFT - we will also have custom "additional fields" that we define diff --git a/content/guides/token-extensions/getting-started.md b/content/guides/token-extensions/getting-started.md index a843fcd9d..5e43e6a42 100644 --- a/content/guides/token-extensions/getting-started.md +++ b/content/guides/token-extensions/getting-started.md @@ -41,10 +41,9 @@ You can find a dedicated video tutorial for each Token Extension on this ## How do I create a token with token extensions? To get started creating tokens with token extensions, you can use the -[Solana Tool Suite](https://solana.com/docs/intro/installation) to create tokens -with a CLI. Based on the extension you want to create, your command flags may be -different. Below are the flags to add to create tokens with each type of -extension. +[Solana Tool Suite](/docs/intro/installation.md) to create tokens with a CLI. +Based on the extension you want to create, your command flags may be different. +Below are the flags to add to create tokens with each type of extension. | Extension | CLI Flag | | -------------------------------------------------------------------------------------------------- | ----------------------------------------- | @@ -106,7 +105,7 @@ make sense to combine: - Confidential transfer + permanent delegate Other than these, you have the option to customize with any combination of token -extensions that suit your project’s needs. +extensions that suit your project's needs. ## How do I add custom logic to my tokens with token extensions? @@ -126,7 +125,7 @@ It is important to note that while transfer hooks give the capability to insert custom logic within a transfer, all accounts from the initial transfer are converted to read-only accounts. This means that the signer privileges of the sender do not extend to the Transfer Hook program. This is to avoid potential -unexpected logic executing on someone’s wallet who interacts with a token with +unexpected logic executing on someone's wallet who interacts with a token with transfer hooks, protecting the users. You can diff --git a/content/guides/token-extensions/transfer-hook.md b/content/guides/token-extensions/transfer-hook.md index b5b70559b..685850004 100644 --- a/content/guides/token-extensions/transfer-hook.md +++ b/content/guides/token-extensions/transfer-hook.md @@ -790,7 +790,7 @@ create_account( )?; ``` -Once we’ve created the account, we initialize the account data to store the list +Once we've created the account, we initialize the account data to store the list of ExtraAccountMetas. ```rust @@ -810,7 +810,7 @@ ExtraAccountMetas account. ### Custom Transfer Hook Instruction -Next, let’s implement the custom `transfer_hook` instruction. This is the +Next, let's implement the custom `transfer_hook` instruction. This is the instruction the Token Extension program will invoke on every token transfer. In this example, we will require a fee paid in wSOL for every token transfer. diff --git a/content/guides/wallets/add-solana-wallet-adapter-to-nextjs.md b/content/guides/wallets/add-solana-wallet-adapter-to-nextjs.md index f360fd035..867e1d038 100644 --- a/content/guides/wallets/add-solana-wallet-adapter-to-nextjs.md +++ b/content/guides/wallets/add-solana-wallet-adapter-to-nextjs.md @@ -85,7 +85,7 @@ Install these packages to your Next.js project using your preferred node package manager: ```shell -npm install @solana/web3.js \ +npm install @solana/web3.js@1 \ @solana/wallet-adapter-base \ @solana/wallet-adapter-react \ @solana/wallet-adapter-react-ui \ @@ -148,7 +148,7 @@ import the provided standard CSS styles required for these react components to be displayed properly in our application. Each of these styles can be easily overridden to customize the look. -Let’s import these dependencies and use them further in the context/provider +Let's import these dependencies and use them further in the context/provider component we are building: ```tsx filename=AppWalletProvider.tsx @@ -340,7 +340,7 @@ side of your app that is a child of your `AppWalletAdapter` context. In this example guide, it will be your entire application. - the `useWallet` hook has details like `publicKey` and state of the wallet, - whether it’s `connecting` or it’s `connected`. + whether it's `connecting` or it's `connected`. - the `useConnection` hook will facilitate your application's connection to the Solana blockchain, via your RPC endpoint @@ -397,7 +397,7 @@ const getAirdropOnClick = async () => { ### Getting a wallet balance -Here’s an example of getting the SOL balance of the wallet connected using the +Here's an example of getting the SOL balance of the wallet connected using the `useConnection` and `useWallet` hooks. [`getBalance`](https://solana.com/docs/rpc/http/getbalance#parameters) is an RPC @@ -425,7 +425,7 @@ With functions like these and the ones provided within the wallet adapter packages, you can detect whether the user's wallet is connected or not, create a button to get an airdrop of devnet or SOL in the network defined, and more. -Let’s make another page now to demonstrate how we can use each of these hooks to +Let's make another page now to demonstrate how we can use each of these hooks to access actually access the `connection` object and your user's wallet state to send or sign transactions, read the wallet balance, and test functionality. diff --git a/content/resources/info/docs.md b/content/resources/info/docs.md index 14e5fd94f..2f491a254 100644 --- a/content/resources/info/docs.md +++ b/content/resources/info/docs.md @@ -1,12 +1,12 @@ --- category: documentation -title: Core Documentation +title: Solana Documentation description: "The official Solana documentation on developing, validators, SPL tokens, wallets and more." tags: - docs isExternal: true -href: https://docs.solana.com/ -repoUrl: https://github.com/solana-labs/solana/tree/master/docs +href: https://solana.com/docs +repoUrl: https://github.com/solana-foundation/developer-content/tree/main/docs --- diff --git a/content/workshops/solana-101.md b/content/workshops/solana-101.md index 1210c2ed8..72850d2b4 100644 --- a/content/workshops/solana-101.md +++ b/content/workshops/solana-101.md @@ -9,7 +9,7 @@ repoUrl: https://github.com/Solana-Workshops/solana-101 duration: "2 hours" objectives: - The Solana Network - - Solana’s Programming Model + - Solana's Programming Model - Tokens & NFTs tags: - Introduction @@ -36,7 +36,7 @@ authorGithubUsername: buffalojoec - Technical Advantages - Network Overview -#### Solana’s Programming Model +#### Solana's Programming Model - Accounts @@ -64,19 +64,19 @@ authorGithubUsername: buffalojoec ### Why Solana? -Let’s talk about the main technological advantages to building a decentralized +Let's talk about the main technological advantages to building a decentralized application on Solana. -Solana has extremely fast block confirmation times, so users don’t have to wait +Solana has extremely fast block confirmation times, so users don't have to wait to make sure their action worked. -Solana’s transaction fees are exceptionally low, so developers can build more +Solana's transaction fees are exceptionally low, so developers can build more robust user experiences that cost less. -Let’s take a brief look at how Solana’s network creates blocks and processes +Let's take a brief look at how Solana's network creates blocks and processes transactions. Like most proof-of-stake networks, Solana elects a leader for each block -creation cycle, who’s responsible for creating a new block. +creation cycle, who's responsible for creating a new block. Unlike Ethereum - Solana does not use a mempool. Instead, it forwards new transactions to the next leader in the block creation cycle, which means when @@ -86,12 +86,12 @@ into a new block. Next, Solana leverages a high-throughput engine called Turbine that disseminates information about a new block to the rest of the network. -When a block’s transactions are executed, Solana’s runtime actually allows the +When a block's transactions are executed, Solana's runtime actually allows the operations within each transaction to run in parallel wherever possible. The combination of these 3 innovations leads to greatly increased speed and throughput for the network. -Solana’s most popular innovation is Proof-of-History, which leverages a +Solana's most popular innovation is Proof-of-History, which leverages a Verifiable-Delay Function (VDF) to allow all nodes in the network to agree on the passage of time. @@ -100,23 +100,23 @@ Weighted QoS, makes it perfect for high-performance applications. ### Programming on Solana -Now let’s dive into the concepts you’ll need to know when programming on Solana. -The first thing we’ll want to understand is the concept of an account. +Now let's dive into the concepts you'll need to know when programming on Solana. +The first thing we'll want to understand is the concept of an account. #### Account An account on Solana is a slice of data from the blockchain. Everything on Solana is an account! You can kind of think of it like a -computer’s file system - where everything is a file! +computer's file system - where everything is a file! Every account has a unique address, holds some balance of SOL, and can store arbitrary data. Based on the size of that arbitrary data, a user is required to -pay some value of SOL for what’s called “Rent”. +pay some value of SOL for what's called “Rent”. Since this is blockchain data, anyone can read from an account. Also, anyone can -credit SOL or tokens to an account. However, only an account’s owner can modify -its data - which includes debiting it’s SOL balance. +credit SOL or tokens to an account. However, only an account's owner can modify +its data - which includes debiting it's SOL balance. ``` { @@ -134,9 +134,9 @@ its data - which includes debiting it’s SOL balance. If we take a look at what an actual account looks like in raw form, we can see some of the fields present on all accounts shown here. -The “key” field is just that account’s address. +The “key” field is just that account's address. -The “lamports” field simply tracks that account’s current balance of SOL. +The “lamports” field simply tracks that account's current balance of SOL. Lamports are the smaller denomination of SOL. “Data” is where the arbitrary data is stored inside of an account. @@ -145,7 +145,7 @@ If that arbitrary data stored in this account is actually an executable program, the “is_executable” boolean will be set to true. Lastly, the “owner” field determines which Solana program has the authority to -perform changes to this account’s data, including its balance of Lamports. +perform changes to this account's data, including its balance of Lamports. #### Programs @@ -157,9 +157,9 @@ we mentioned before. Right now, Solana programs can be written in Rust, C/C++ or Python. Soon, we may be able to write programs in other languages - such as TypeScript and GoLang. -Unlike Ethereum’s “smart contracts”, programs don’t actually have state of their +Unlike Ethereum's “smart contracts”, programs don't actually have state of their own. Instead, they perform reads and writes on accounts from the blockchain. To -perform a write, this program must be the designated owner of the account it’s +perform a write, this program must be the designated owner of the account it's attempting to modify. Programs are designed to process what are called “instructions”, and they can also send these instructions to other programs on the network. diff --git a/docs/advanced/actions.md b/docs/advanced/actions.md index 2a9d188bf..40703a55b 100644 --- a/docs/advanced/actions.md +++ b/docs/advanced/actions.md @@ -88,7 +88,7 @@ interface. an optional list of related actions. 2. the [POST request](#post-request) returns a signable transaction or message that the client then prompts the user's wallet to sign and execute on the - blockchain or in another off-chain service. + blockchain or in another offchain service. ### Action Execution and Lifecycle @@ -510,8 +510,8 @@ export interface ActionParameter { The `pattern` should be a string equivalent of a valid regular expression. This regular expression pattern should by used by blink-clients to validate user -input before before making the POST request. If the `pattern` is not a valid -regular expression, it should be ignored by clients. +input before making the POST request. If the `pattern` is not a valid regular +expression, it should be ignored by clients. The `patternDescription` is a human readable description of the expected input requests from the user. If `pattern` is provided, the `patternDescription` is @@ -592,7 +592,7 @@ user input elements are also supported: - `textarea` - equivalent of HTML [textarea element](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/textarea). - Allowing the user provide multi-line input. + Allowing the user to provide multi-line input. - `select` - equivalent of HTML [select element](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/select), allowing the user to experience a “dropdown” style field. The Action API @@ -816,9 +816,9 @@ export interface ActionPostResponse { ``` - `transaction` - The value must be a base64-encoded - [serialized transaction](https://solana-labs.github.io/solana-web3.js/classes/Transaction.html#serialize). + [serialized transaction](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Transaction.html#serialize). The client must base64-decode the transaction and - [deserialize it](https://solana-labs.github.io/solana-web3.js/classes/Transaction.html#from). + [deserialize it](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Transaction.html#from). - `message` - The value must be a UTF-8 string that describes the nature of the transaction included in the response. The client should display this value to @@ -839,16 +839,16 @@ export interface ActionPostResponse { #### POST Response - Transaction If the transaction -[`signatures`](https://solana-labs.github.io/solana-web3.js/classes/Transaction.html#signatures) +[`signatures`](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Transaction.html#signatures) are empty or the transaction has NOT been partially signed: - The client must ignore the - [`feePayer`](https://solana-labs.github.io/solana-web3.js/classes/Transaction.html#feePayer) + [`feePayer`](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Transaction.html#feePayer) in the transaction and set the `feePayer` to the `account` in the request. - The client must ignore the - [`recentBlockhash`](https://solana-labs.github.io/solana-web3.js/classes/Transaction.html#recentBlockhash) + [`recentBlockhash`](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Transaction.html#recentBlockhash) in the transaction and set the `recentBlockhash` to the - [latest blockhash](https://solana-labs.github.io/solana-web3.js/classes/Connection.html#getLatestBlockhash). + [latest blockhash](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Connection.html#getLatestBlockhash). - The client must serialize and deserialize the transaction before signing it. This ensures consistent ordering of the account keys, as a workaround for [this issue](https://github.com/solana-labs/solana/issues/21722). @@ -856,9 +856,9 @@ are empty or the transaction has NOT been partially signed: If the transaction has been partially signed: - The client must NOT alter the - [`feePayer`](https://solana-labs.github.io/solana-web3.js/classes/Transaction.html#feePayer) + [`feePayer`](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Transaction.html#feePayer) or - [`recentBlockhash`](https://solana-labs.github.io/solana-web3.js/classes/Transaction.html#recentBlockhash) + [`recentBlockhash`](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Transaction.html#recentBlockhash) as this would invalidate any existing signatures. - The client must verify existing signatures, and if any are invalid, the client must reject the transaction as **malformed**. @@ -971,9 +971,9 @@ clients in one of the following ways: UI with the included Action metadata, but will not allow the user to execute further actions. -If no `links.next` is not provided, blink clients should assume the current -action is final action in the chain, presenting their "completed" UI state after -the transaction is confirmed. +If `links.next` is not provided, blink clients should assume the current action +is final action in the chain, presenting their "completed" UI state after the +transaction is confirmed. ## actions.json @@ -1070,9 +1070,8 @@ The following table outlines the syntax for path matching patterns: ### Rules Examples -The following example demonstrates an exact match rule to map requests requests -to `/buy` from your site's root to the exact path `/api/buy` relative to your -site's root: +The following example demonstrates an exact match rule to map requests to `/buy` +from your site's root to the exact path `/api/buy` relative to your site's root: ```json filename="actions.json" { @@ -1189,7 +1188,7 @@ string with each separated by a semi-colon. No other data should be included with Identifier Message's Memo instruction. The `identity` and the `reference` should be included as read-only, non-signer -[keys](https://solana-labs.github.io/solana-web3.js/classes/TransactionInstruction.html#keys) +[keys](https://solana-labs.github.io/solana-web3.js/v1.x/classes/TransactionInstruction.html#keys) in the transaction on an instruction that is NOT the Identifier Message Memo instruction. @@ -1210,7 +1209,7 @@ associated with the Action Provider in a multi-step process: 3. Verify the specific transaction is the first on-chain occurrence of the `reference` on-chain: - If this transaction is the first occurrence, the transaction is considered - verified and can a safely attributed to the Action Provider. + verified and can be safely attributed to the Action Provider. - If this transaction is NOT the first occurrence, it is considered invalid and therefore not attributed to the Action Provider. diff --git a/docs/advanced/confirmation.md b/docs/advanced/confirmation.md index 98e56cb57..35d4df0bb 100644 --- a/docs/advanced/confirmation.md +++ b/docs/advanced/confirmation.md @@ -40,7 +40,7 @@ where the magic happens and at a high level it consists of four components: - a **list of accounts** to load, and - a **“recent blockhash.”** -In this article, we’re going to be focusing a lot on a transaction’s +In this article, we're going to be focusing a lot on a transaction's [recent blockhash](/docs/terminology.md#blockhash) because it plays a big role in transaction confirmation. @@ -65,14 +65,14 @@ touch on everything except steps 1 and 4. A [“blockhash”](/docs/terminology.md#blockhash) refers to the last Proof of History (PoH) hash for a [“slot”](/docs/terminology.md#slot) (description -below). Since Solana uses PoH as a trusted clock, a transaction’s recent +below). Since Solana uses PoH as a trusted clock, a transaction's recent blockhash can be thought of as a **timestamp**. ### Proof of History refresher -Solana’s Proof of History mechanism uses a very long chain of recursive SHA-256 +Solana's Proof of History mechanism uses a very long chain of recursive SHA-256 hashes to build a trusted clock. The “history” part of the name comes from the -fact that block producers hash transaction id’s into the stream to record which +fact that block producers hash transaction id's into the stream to record which transactions were processed in their block. [PoH hash calculation](https://github.com/anza-xyz/agave/blob/aa0922d6845e119ba466f88497e8209d1c82febc/entry/src/poh.rs#L79): @@ -123,7 +123,7 @@ the runtime. ### Example of transaction expiration -Let’s walk through a quick example: +Let's walk through a quick example: 1. A validator is actively producing a new block for the current slot 2. The validator receives a transaction from a user with the recent blockhash @@ -138,26 +138,26 @@ Let’s walk through a quick example: then starts producing the block for the next slot (validators get to produce blocks for 4 consecutive slots) 6. The validator checks that same transaction again and finds it is now 152 - blockhashes old and rejects it because it’s too old :( + blockhashes old and rejects it because it's too old :( ## Why do transactions expire? -There’s a very good reason for this actually, it’s to help validators avoid +There's a very good reason for this actually, it's to help validators avoid processing the same transaction twice. A naive brute force approach to prevent double processing could be to check -every new transaction against the blockchain’s entire transaction history. But +every new transaction against the blockchain's entire transaction history. But by having transactions expire after a short amount of time, validators only need to check if a new transaction is in a relatively small set of _recently_ processed transactions. ### Other blockchains -Solana’s approach of prevent double processing is quite different from other +Solana's approach to prevent double processing is quite different from other blockchains. For example, Ethereum tracks a counter (nonce) for each transaction sender and will only process transactions that use the next valid nonce. -Ethereum’s approach is simple for validators to implement, but it can be +Ethereum's approach is simple for validators to implement, but it can be problematic for users. Many people have encountered situations when their Ethereum transactions got stuck in a _pending_ state for a long time and all the later transactions, which used higher nonce values, were blocked from @@ -165,12 +165,12 @@ processing. ### Advantages on Solana -There are a few advantages to Solana’s approach: +There are a few advantages to Solana's approach: 1. A single fee payer can submit multiple transactions at the same time that are - allowed to be processed in any order. This might happen if you’re using + allowed to be processed in any order. This might happen if you're using multiple applications at the same time. -2. If a transaction doesn’t get committed to a block and expires, users can try +2. If a transaction doesn't get committed to a block and expires, users can try again knowing that their previous transaction will NOT ever be processed. By not using counters, the Solana wallet experience may be easier for users to @@ -181,7 +181,7 @@ quickly and avoid annoying pending states. Of course there are some disadvantages too: -1. Validators have to actively track a set of all processed transaction id’s to +1. Validators have to actively track a set of all processed transaction id's to prevent double processing. 2. If the expiration time period is too short, users might not be able to submit their transaction before it expires. @@ -189,7 +189,7 @@ Of course there are some disadvantages too: These disadvantages highlight a tradeoff in how transaction expiration is configured. If the expiration time of a transaction is increased, validators need to use more memory to track more transactions. If expiration time is -decreased, users don’t have enough time to submit their transaction. +decreased, users don't have enough time to submit their transaction. Currently, Solana clusters require that transactions use blockhashes that are no more than 151 blocks old. @@ -208,27 +208,27 @@ target time of 400ms. One minute is not a lot of time considering that a client needs to fetch a recent blockhash, wait for the user to sign, and finally hope that the -broadcasted transaction reaches a leader that is willing to accept it. Let’s go +broadcasted transaction reaches a leader that is willing to accept it. Let's go through some tips to help avoid confirmation failures due to transaction expiration! ### Fetch blockhashes with the appropriate commitment level -Given the short expiration time frame, it’s imperative that clients and +Given the short expiration time frame, it's imperative that clients and applications help users create transactions with a blockhash that is as recent as possible. When fetching blockhashes, the current recommended RPC API is called [`getLatestBlockhash`](/docs/rpc/http/getLatestBlockhash.mdx). By default, this API uses the `finalized` commitment level to return the most recently finalized -block’s blockhash. However, you can override this behavior by +block's blockhash. However, you can override this behavior by [setting the `commitment` parameter](/docs/rpc/index.mdx#configuring-state-commitment) to a different commitment level. **Recommendation** The `confirmed` commitment level should almost always be used for RPC requests -because it’s usually only a few slots behind the `processed` commitment and has +because it's usually only a few slots behind the `processed` commitment and has a very low chance of belonging to a dropped [fork](https://docs.solanalabs.com/consensus/fork-generation). @@ -237,10 +237,10 @@ But feel free to consider the other options: - Choosing `processed` will let you fetch the most recent blockhash compared to other commitment levels and therefore gives you the most time to prepare and process a transaction. But due to the prevalence of forking in the Solana - blockchain, roughly 5% of blocks don’t end up being finalized by the cluster - so there’s a real chance that your transaction uses a blockhash that belongs + blockchain, roughly 5% of blocks don't end up being finalized by the cluster + so there's a real chance that your transaction uses a blockhash that belongs to a dropped fork. Transactions that use blockhashes for abandoned blocks - won’t ever be considered recent by any blocks that are in the finalized + won't ever be considered recent by any blocks that are in the finalized blockchain. - Using the [default commitment](/docs/rpc#default-commitment) level `finalized` will eliminate any risk that the blockhash you choose will belong to a dropped @@ -259,22 +259,22 @@ into issues due to one node lagging behind the other. When RPC nodes receive a `sendTransaction` request, they will attempt to determine the expiration block of your transaction using the most recent finalized block or with the block selected by the `preflightCommitment` -parameter. A **VERY** common issue is that a received transaction’s blockhash +parameter. A **VERY** common issue is that a received transaction's blockhash was produced after the block used to calculate the expiration for that -transaction. If an RPC node can’t determine when your transaction expires, it +transaction. If an RPC node can't determine when your transaction expires, it will only forward your transaction **one time** and afterwards will then **drop** the transaction. Similarly, when RPC nodes receive a `simulateTransaction` request, they will simulate your transaction using the most recent finalized block or with the block selected by the `preflightCommitment` parameter. If the block chosen for -simulation is older than the block used for your transaction’s blockhash, the +simulation is older than the block used for your transaction's blockhash, the simulation will fail with the dreaded “blockhash not found” error. **Recommendation** Even if you use `skipPreflight`, **ALWAYS** set the `preflightCommitment` -parameter to the same commitment level used to fetch your transaction’s +parameter to the same commitment level used to fetch your transaction's blockhash for both `sendTransaction` and `simulateTransaction` requests. ### Be wary of lagging RPC nodes when sending transactions @@ -290,18 +290,18 @@ lagging behind the first. For `sendTransaction` requests, clients should keep resending a transaction to a RPC node on a frequent interval so that if an RPC node is slightly lagging -behind the cluster, it will eventually catch up and detect your transaction’s +behind the cluster, it will eventually catch up and detect your transaction's expiration properly. For `simulateTransaction` requests, clients should use the [`replaceRecentBlockhash`](/docs/rpc/http/simulateTransaction.mdx) parameter to -tell the RPC node to replace the simulated transaction’s blockhash with a +tell the RPC node to replace the simulated transaction's blockhash with a blockhash that will always be valid for simulation. ### Avoid reusing stale blockhashes Even if your application has fetched a very recent blockhash, be sure that -you’re not reusing that blockhash in transactions for too long. The ideal +you're not reusing that blockhash in transactions for too long. The ideal scenario is that a recent blockhash is fetched right before a user signs their transaction. @@ -309,19 +309,19 @@ transaction. Poll for new recent blockhashes on a frequent basis to ensure that whenever a user triggers an action that creates a transaction, your application already has -a fresh blockhash that’s ready to go. +a fresh blockhash that's ready to go. **Recommendation for wallets** -Poll for new recent blockhashes on a frequent basis and replace a transaction’s +Poll for new recent blockhashes on a frequent basis and replace a transaction's recent blockhash right before they sign the transaction to ensure the blockhash is as fresh as possible. ### Use healthy RPC nodes when fetching blockhashes By fetching the latest blockhash with the `confirmed` commitment level from an -RPC node, it’s going to respond with the blockhash for the latest confirmed -block that it’s aware of. Solana’s block propagation protocol prioritizes +RPC node, it's going to respond with the blockhash for the latest confirmed +block that it's aware of. Solana's block propagation protocol prioritizes sending blocks to staked nodes so RPC nodes naturally lag about a block behind the rest of the cluster. They also have to do more work to handle application requests and can lag a lot more under heavy user traffic. @@ -338,11 +338,11 @@ still return a blockhash that is just about to expire. Monitor the health of your RPC nodes to ensure that they have an up-to-date view of the cluster state with one of the following methods: -1. Fetch your RPC node’s highest processed slot by using the +1. Fetch your RPC node's highest processed slot by using the [`getSlot`](/docs/rpc/http/getSlot.mdx) RPC API with the `processed` commitment level and then call the - [`getMaxShredInsertSlot](/docs/rpc/http/getMaxShredInsertSlot.mdx) RPC API to - get the highest slot that your RPC node has received a “shred” of a block + [`getMaxShredInsertSlot`](/docs/rpc/http/getMaxShredInsertSlot.mdx) RPC API + to get the highest slot that your RPC node has received a “shred” of a block for. If the difference between these responses is very large, the cluster is producing blocks far ahead of what the RPC node has processed. 2. Call the `getLatestBlockhash` RPC API with the `confirmed` commitment level @@ -373,25 +373,25 @@ To start using durable transactions, a user first needs to submit a transaction that [invokes instructions that create a special on-chain “nonce” account](https://docs.rs/solana-program/latest/solana_program/system_instruction/fn.create_nonce_account.html) and stores a “durable blockhash” inside of it. At any point in the future (as -long as the nonce account hasn’t been used yet), the user can create a durable +long as the nonce account hasn't been used yet), the user can create a durable transaction by following these 2 rules: 1. The instruction list must start with an [“advance nonce” system instruction](https://docs.rs/solana-program/latest/solana_program/system_instruction/fn.advance_nonce_account.html) which loads their on-chain nonce account -2. The transaction’s blockhash must be equal to the durable blockhash stored by +2. The transaction's blockhash must be equal to the durable blockhash stored by the on-chain nonce account -Here’s how these durable transactions are processed by the Solana runtime: +Here's how these durable transactions are processed by the Solana runtime: -1. If the transaction’s blockhash is no longer “recent”, the runtime checks if - the transaction’s instruction list begins with an “advance nonce” system +1. If the transaction's blockhash is no longer “recent”, the runtime checks if + the transaction's instruction list begins with an “advance nonce” system instruction 2. If so, it then loads the nonce account specified by the “advance nonce” instruction -3. Then it checks that the stored durable blockhash matches the transaction’s +3. Then it checks that the stored durable blockhash matches the transaction's blockhash -4. Lastly it makes sure to advance the nonce account’s stored blockhash to the +4. Lastly it makes sure to advance the nonce account's stored blockhash to the latest recent blockhash to ensure that the same transaction can never be processed again diff --git a/docs/advanced/index.md b/docs/advanced/index.md index 933c4c67f..11ae9d7f4 100644 --- a/docs/advanced/index.md +++ b/docs/advanced/index.md @@ -1,5 +1,5 @@ --- metaOnly: true -title: Advanced Concepts +title: Advanced Topics sidebarSortOrder: 3 --- diff --git a/docs/advanced/lookup-tables.md b/docs/advanced/lookup-tables.md index 4cc2bed60..e88f26e52 100644 --- a/docs/advanced/lookup-tables.md +++ b/docs/advanced/lookup-tables.md @@ -1,7 +1,10 @@ --- sidebarSortOrder: 4 title: Address Lookup Tables -description: "" +description: + Learn how to use Solana Address Lookup Tables (ALTs) to efficiently handle up + to 64 addresses per transaction. Create, extend, and utilize lookup tables + using web3.js. --- Address Lookup Tables, commonly referred to as "_lookup tables_" or "_ALTs_" for @@ -36,7 +39,7 @@ Creating a new lookup table with the `@solana/web3.js` library is similar to the older `legacy` transactions, but with some differences. Using the `@solana/web3.js` library, you can use the -[`createLookupTable`](https://solana-labs.github.io/solana-web3.js/classes/AddressLookupTableProgram.html#createLookupTable) +[`createLookupTable`](https://solana-labs.github.io/solana-web3.js/v1.x/classes/AddressLookupTableProgram.html#createLookupTable) function to construct the instruction needed to create a new lookup table, as well as determine its address: @@ -72,7 +75,7 @@ console.log("lookup table address:", lookupTableAddress.toBase58()); Adding addresses to a lookup table is known as "_extending_". Using the `@solana/web3.js` library, you can create a new _extend_ instruction using the -[`extendLookupTable`](https://solana-labs.github.io/solana-web3.js/classes/AddressLookupTableProgram.html#extendLookupTable) +[`extendLookupTable`](https://solana-labs.github.io/solana-web3.js/v1.x/classes/AddressLookupTableProgram.html#extendLookupTable) method: ```js @@ -106,7 +109,7 @@ Enabling up to 64 addresses in those future transactions. Similar to requesting another account (or PDA) from the cluster, you can fetch a complete Address Lookup Table with the -[`getAddressLookupTable`](https://solana-labs.github.io/solana-web3.js/classes/Connection.html#getAddressLookupTable) +[`getAddressLookupTable`](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Connection.html#getAddressLookupTable) method: ```js @@ -144,7 +147,7 @@ utilize the onchain lookup capabilities. Just like older `legacy` transactions, you can create all the [instructions](/docs/terminology.md#instruction) your transaction will execute onchain. You can then provide an array of these instructions to the -[Message](/docs/terminology.md#message) used in the `v0 transaction. +[Message](/docs/terminology.md#message) used in the `v0` transaction. > NOTE: The instructions used inside a `v0` transaction can be constructed using > the same methods and functions used to create the instructions in the past. diff --git a/docs/advanced/retry.md b/docs/advanced/retry.md index 6949e2ad2..dfea6dac3 100644 --- a/docs/advanced/retry.md +++ b/docs/advanced/retry.md @@ -3,6 +3,11 @@ sidebarSortOrder: 2 title: Retrying Transactions altRoutes: - /docs/core/transactions/retry +description: + Learn how to handle dropped transactions and implement custom retry logic on + Solana. This guide covers transaction rebroadcasting, preflight checks, and + best practices for managing transaction retries to ensure reliable transaction + processing on the Solana blockchain. --- # Retrying Transactions @@ -24,7 +29,7 @@ their own custom rebroadcasting logic. - Developers should enable preflight checks to raise errors before transactions are submitted - Before re-signing any transaction, it is **very important** to ensure that the - initial transaction’s blockhash has expired + initial transaction's blockhash has expired ## The Journey of a Transaction @@ -59,13 +64,13 @@ forwarding it to the relevant leaders. UDP allows validators to quickly communicate with one another, but does not provide any guarantees regarding transaction delivery. -Because Solana’s leader schedule is known in advance of every +Because Solana's leader schedule is known in advance of every [epoch](/docs/terminology.md#epoch) (~2 days), an RPC node will broadcast its transaction directly to the current and next leaders. This is in contrast to other gossip protocols such as Ethereum that propagate transactions randomly and broadly across the entire network. By default, RPC nodes will try to forward transactions to leaders every two seconds until either the transaction is -finalized or the transaction’s blockhash expires (150 blocks or ~1 minute 19 +finalized or the transaction's blockhash expires (150 blocks or ~1 minute 19 seconds as of the time of this writing). If the outstanding rebroadcast queue size is greater than [10,000 transactions](https://github.com/solana-labs/solana/blob/bfbbc53dac93b3a5c6be9b4b65f679fdb13e41d9/send-transaction-service/src/send_transaction_service.rs#L20), @@ -75,7 +80,7 @@ that RPC operators can adjust to change the default behavior of this retry logic. When an RPC node broadcasts a transaction, it will attempt to forward the -transaction to a leader’s +transaction to a leader's [Transaction Processing Unit (TPU)](https://github.com/solana-labs/solana/blob/cd6f931223181d5a1d47cba64e857785a175a760/core/src/validator.rs#L867). The TPU processes transactions in five distinct phases: @@ -105,7 +110,7 @@ For more information on the TPU, please refer to ## How Transactions Get Dropped -Throughout a transaction’s journey, there are a few scenarios in which the +Throughout a transaction's journey, there are a few scenarios in which the transaction can be unintentionally dropped from the network. ### Before a transaction is processed @@ -113,7 +118,7 @@ transaction can be unintentionally dropped from the network. If the network drops a transaction, it will most likely do so before the transaction is processed by a leader. UDP [packet loss](https://en.wikipedia.org/wiki/Packet_loss) is the simplest reason -why this might occur. During times of intense network load, it’s also possible +why this might occur. During times of intense network load, it's also possible for validators to become overwhelmed by the sheer number of transactions required for processing. While validators are equipped to forward surplus transactions via `tpu_forwards`, there is a limit to the amount of data that can @@ -127,7 +132,7 @@ There are also two lesser known reasons why a transaction may be dropped before it is processed. The first scenario involves transactions that are submitted via an RPC pool. Occasionally, part of the RPC pool can be sufficiently ahead of the rest of the pool. This can cause issues when nodes within the pool are required -to work together. In this example, the transaction’s +to work together. In this example, the transaction's [recentBlockhash](/docs/core/transactions.md#recent-blockhash) is queried from the advanced part of the pool (Backend A). When the transaction is submitted to the lagging part of the pool (Backend B), the nodes will not recognize the @@ -137,20 +142,20 @@ transaction submission if developers enable ![Transaction dropped via an RPC Pool](/assets/docs/rt-dropped-via-rpc-pool.png) -Temporarily network forks can also result in dropped transactions. If a -validator is slow to replay its blocks within the Banking Stage, it may end up -creating a minority fork. When a client builds a transaction, it’s possible for -the transaction to reference a `recentBlockhash` that only exists on the -minority fork. After the transaction is submitted, the cluster can then switch -away from its minority fork before the transaction is processed. In this -scenario, the transaction is dropped due to the blockhash not being found. +Temporary network forks can also result in dropped transactions. If a validator +is slow to replay its blocks within the Banking Stage, it may end up creating a +minority fork. When a client builds a transaction, it's possible for the +transaction to reference a `recentBlockhash` that only exists on the minority +fork. After the transaction is submitted, the cluster can then switch away from +its minority fork before the transaction is processed. In this scenario, the +transaction is dropped due to the blockhash not being found. ![Transaction dropped due to minority fork (before processed)](/assets/docs/rt-dropped-minority-fork-pre-process.png) ### After a transaction is processed and before it is finalized In the event a transaction references a `recentBlockhash` from a minority fork, -it’s still possible for the transaction to be processed. In this case, however, +it's still possible for the transaction to be processed. In this case, however, it would be processed by the leader on the minority fork. When this leader attempts to share its processed transactions with the rest of the network, it would fail to reach consensus with the majority of validators that do not @@ -201,8 +206,8 @@ the transaction will be processed or finalized by the cluster. ## Customizing Rebroadcast Logic In order to develop their own rebroadcasting logic, developers should take -advantage of `sendTransaction`’s `maxRetries` parameter. If provided, -`maxRetries` will override an RPC node’s default retry logic, allowing +advantage of `sendTransaction`'s `maxRetries` parameter. If provided, +`maxRetries` will override an RPC node's default retry logic, allowing developers to manually control the retry process [within reasonable bounds](https://github.com/solana-labs/solana/blob/98707baec2385a4f7114d2167ef6dfb1406f954f/validator/src/main.rs#L1258-L1274). @@ -210,9 +215,9 @@ A common pattern for manually retrying transactions involves temporarily storing the `lastValidBlockHeight` that comes from [getLatestBlockhash](/docs/rpc/http/getLatestBlockhash.mdx). Once stashed, an application can then -[poll the cluster’s blockheight](/docs/rpc/http/getBlockHeight.mdx) and manually +[poll the cluster's blockheight](/docs/rpc/http/getBlockHeight.mdx) and manually retry the transaction at an appropriate interval. In times of network -congestion, it’s advantageous to set `maxRetries` to 0 and manually rebroadcast +congestion, it's advantageous to set `maxRetries` to 0 and manually rebroadcast via a custom algorithm. While some applications may employ an [exponential backoff](https://en.wikipedia.org/wiki/Exponential_backoff) algorithm, others such as [Mango](https://www.mango.markets/) opt to @@ -285,10 +290,10 @@ fork. If an application has access to RPC nodes behind a load balancer, it can also choose to divide its workload amongst specific nodes. RPC nodes that serve data-intensive requests such as -[getProgramAccounts](https://solanacookbook.com/guides/get-program-accounts.html) -may be prone to falling behind and can be ill-suited for also forwarding -transactions. For applications that handle time-sensitive transactions, it may -be prudent to have dedicated nodes that only handle `sendTransaction`. +[getProgramAccounts](/content//guides/javascript/get-program-accounts.md) may be +prone to falling behind and can be ill-suited for also forwarding transactions. +For applications that handle time-sensitive transactions, it may be prudent to +have dedicated nodes that only handle `sendTransaction`. ### The Cost of Skipping Preflight @@ -310,7 +315,7 @@ for, it is recommended that developers keep `skipPreflight` set to `false`. Despite all attempts to rebroadcast, there may be times in which a client is required to re-sign a transaction. Before re-signing any transaction, it is -**very important** to ensure that the initial transaction’s blockhash has +**very important** to ensure that the initial transaction's blockhash has expired. If the initial blockhash is still valid, it is possible for both transactions to be accepted by the network. To an end-user, this would appear as if they unintentionally sent the same transaction twice. diff --git a/docs/advanced/state-compression.md b/docs/advanced/state-compression.md index a91eee2b8..5e01a602e 100644 --- a/docs/advanced/state-compression.md +++ b/docs/advanced/state-compression.md @@ -3,14 +3,14 @@ sidebarSortOrder: 4 title: State Compression description: 'State Compression is the method of cheaply and securely storing - "fingerprints" of off-chain data in the Solana leger, instead of expensive + "fingerprints" of offchain data in the Solana leger, instead of expensive accounts.' --- On Solana, [State Compression](/docs/advanced/state-compression.md) is the -method of creating a "fingerprint" (or hash) of off-chain data and storing this +method of creating a "fingerprint" (or hash) of offchain data and storing this fingerprint on-chain for secure verification. Effectively using the security of -the Solana ledger to securely validate off-chain data, verifying it has not been +the Solana ledger to securely validate offchain data, verifying it has not been tampered with. This method of "compression" allows Solana programs and dApps to use cheap @@ -25,22 +25,22 @@ this final hash on-chain. ## What is State Compression? In simple terms, state compression uses "**_tree_**" structures to -cryptographically hash off-chain data together, in a deterministic way, to +cryptographically hash offchain data together, in a deterministic way, to compute a single final hash that gets stored on-chain. These _trees_ are created in this "_deterministic_" process by: - taking any piece of data - creating a hash of this data -- storing this hash as a `leaf` the bottom of the tree -- each `leaf` pair is then hash together, creating a `branch` -- each `branch` is then hash together +- storing this hash as a `leaf` at the bottom of the tree +- each `leaf` pair is then hashed together, creating a `branch` +- each `branch` is then hashed together - continually climbing the tree and hashing adjacent branches together - once at the top of the tree, a final `root hash` is produced This `root hash` is then stored onchain, as a verifiable **_proof_** of all of the data within every leaf. Allowing anyone to cryptographically verify all the -off-chain data within the tree, while only actually storing a **minimal** amount +offchain data within the tree, while only actually storing a **minimal** amount of data on-chain. Therefore, significantly reducing the cost to store/prove large amounts of data due to this "state compression". @@ -94,9 +94,9 @@ In high throughput applications, like within the [Solana runtime](/docs/core/fees.md), requests to change an on-chain _traditional merkle tree_ could be received by validators in relatively rapid succession (e.g. within the same slot). Each leaf data change would still be -required to performed in series. Resulting in each subsequent request for change -to fail, due to the root hash and proof being invalidated by the previous change -request in the slot. +required to be performed in series. Resulting in each subsequent request for +change to fail, due to the root hash and proof being invalidated by the previous +change request in the slot. Enter, Concurrent merkle trees. @@ -175,8 +175,8 @@ We must use a `maxDepth` of `14` to ensure we can store all of our data. The `maxDepth` value will be one of the primary drivers of cost when creating a tree since you will pay this cost upfront at tree creation. The higher the max -tree depth depth, the more data fingerprints (aka hashes) you can store, the -higher the cost. +tree depth, the more data fingerprints (aka hashes) you can store, the higher +the cost. ### Max buffer size @@ -193,14 +193,14 @@ sized and set at tree creation via this `maxBufferSize` value. ### Canopy depth -The "canopy depth", sometimes called the canopy size, is the number of proof -nodes that are cached/stored on-chain for any given proof path. +The "canopy depth," also known as the canopy size, refers to the number of proof +node levels that are cached or stored onchain for a given proof path. When performing an update action on a `leaf`, like transferring ownership (e.g. selling a compressed NFT), the **complete** proof path must be used to verify original ownership of the leaf and therefore allow for the update action. This verification is performed using the **complete** proof path to correctly compute -the current `root hash` (or any cached `root hash` via the on-chain "concurrent +the current `root hash` (or any cached `root hash` via the onchain "concurrent buffer"). The larger a tree's max depth is, the more proof nodes are required to perform @@ -222,6 +222,18 @@ For example, a tree with a max depth of `14` would require `14` total proof nodes. With a canopy of `10`, only `4` proof nodes are required to be submitted per update transaction. +![Canopy depth of 1 for a Concurrent Merkle Tree of max depth of 3](/assets/docs/compression/canopy-depth-1.png) + +Consider another example, this time with a tree of max depth `3`. If we want to +apply an action to one of the tree’s leaves—such as updating `R4`—we need to +provide proofs for `L4` and `R2`. However, we can omit `R1` since it is already +cached/stored onchain due to our canopy depth of `1`, which ensures that all +nodes at level 1 (`L1` and `R1`) are stored onchain. This results in a total of +2 required proofs. + +Therefore, the number of proofs required to update a leaf is equal to the max +depth minus the canopy depth. In this example, `3 - 1 = 2`. + #### The larger the canopy depth value, the higher the cost The `canopyDepth` value is also a primary factor of cost when creating a tree @@ -267,7 +279,7 @@ package, developers can use the function to calculate the required space for a given tree size parameters. Then using the -[`getMinimumBalanceForRentExemption`](https://solana-labs.github.io/solana-web3.js/classes/Connection.html#getMinimumBalanceForRentExemption) +[`getMinimumBalanceForRentExemption`](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Connection.html#getMinimumBalanceForRentExemption) function to get the final cost (in lamports) to allocate the required space for the tree on-chain. diff --git a/docs/advanced/versions.md b/docs/advanced/versions.md index c86528558..501b4aa84 100644 --- a/docs/advanced/versions.md +++ b/docs/advanced/versions.md @@ -13,10 +13,9 @@ Versioned Transactions are the new transaction format that allow for additional functionality in the Solana runtime, including [Address Lookup Tables](/docs/advanced/lookup-tables.md). -While changes to [onchain](/docs/programs/index.md) programs are **NOT** -required to support the new functionality of versioned transactions (or for -backwards compatibility), developers **WILL** need update their client side code -to prevent +While changes to onchain programs are **NOT** required to support the new +functionality of versioned transactions (or for backwards compatibility), +developers **WILL** need update their client side code to prevent [errors due to different transaction versions](#max-supported-transaction-version). ## Current Transaction Versions @@ -46,13 +45,14 @@ transaction is returned when `legacy` is selected) ## How to set max supported version You can set the `maxSupportedTransactionVersion` using both the -[`@solana/web3.js`](https://solana-labs.github.io/solana-web3.js/) library and -JSON formatted requests directly to an RPC endpoint. +[`@solana/web3.js`](https://solana-labs.github.io/solana-web3.js/v1.x/) library +and JSON formatted requests directly to an RPC endpoint. ### Using web3.js -Using the [`@solana/web3.js`](https://solana-labs.github.io/solana-web3.js/) -library, you can retrieve the most recent block or get a specific transaction: +Using the +[`@solana/web3.js`](https://solana-labs.github.io/solana-web3.js/v1.x/) library, +you can retrieve the most recent block or get a specific transaction: ```js // connect to the `devnet` cluster and get the current `slot` diff --git a/docs/clients/javascript-reference.md b/docs/clients/javascript-reference.md index 92b104638..cfa80f396 100644 --- a/docs/clients/javascript-reference.md +++ b/docs/clients/javascript-reference.md @@ -1,5 +1,8 @@ --- title: Web3.js API Examples +description: + Learn how to interact with the Solana blockchain using the @solana/web3.js + library through practical code samples and explanations. --- ## Web3 API Reference Guide @@ -8,13 +11,13 @@ The `@solana/web3.js` library is a package that has coverage over the [Solana JSON RPC API](/docs/rpc). You can find the full documentation for the `@solana/web3.js` library -[here](https://solana-labs.github.io/solana-web3.js/). +[here](https://solana-labs.github.io/solana-web3.js/v1.x/). ## General ### Connection -[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Connection.html) +[Source Documentation](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Connection.html) Connection is used to interact with the [Solana JSON RPC](/docs/rpc). You can use Connection to confirm transactions, get account info, and more. @@ -59,12 +62,12 @@ console.log(slotLeader); ``` The above example shows only a few of the methods on Connection. Please see the -[source generated docs](https://solana-labs.github.io/solana-web3.js/classes/Connection.html) +[source generated docs](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Connection.html) for the full list. ### Transaction -[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Transaction.html) +[Source Documentation](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Transaction.html) A transaction is used to interact with programs on the Solana blockchain. These transactions are constructed with TransactionInstructions, containing all the @@ -139,7 +142,7 @@ await web3.sendAndConfirmRawTransaction(connection, rawTransaction); ### Keypair -[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Keypair.html) +[Source Documentation](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Keypair.html) The keypair is used to create an account with a public key and secret key within Solana. You can either generate, generate from a seed, or create from a secret @@ -211,7 +214,7 @@ entropy. Do not share your seed. Treat the seed like you would a private key. ### PublicKey -[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/PublicKey.html) +[Source Documentation](https://solana-labs.github.io/solana-web3.js/v1.x/classes/PublicKey.html) PublicKey is used throughout `@solana/web3.js` in transactions, keypairs, and programs. You require publickey when listing each account in a transaction and @@ -257,7 +260,7 @@ console.log(`Valid Program Address: ${validProgramAddress}`); ### SystemProgram -[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/SystemProgram.html) +[Source Documentation](https://solana-labs.github.io/solana-web3.js/v1.x/classes/SystemProgram.html) The SystemProgram grants the ability to create accounts, allocate account data, assign an account to programs, work with nonce accounts, and transfer lamports. @@ -355,7 +358,7 @@ await web3.sendAndConfirmTransaction(connection, assignTransaction, [ ### Secp256k1Program -[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Secp256k1Program.html) +[Source Documentation](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Secp256k1Program.html) The Secp256k1Program is used to verify Secp256k1 signatures, which are used by both Bitcoin and Ethereum. @@ -418,7 +421,7 @@ await web3.sendAndConfirmTransaction(connection, transaction, [fromPublicKey]); ### Message -[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Message.html) +[Source Documentation](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Message.html) Message is used as another way to construct transactions. You can construct a message using the accounts, header, instructions, and recentBlockhash that are a @@ -484,7 +487,7 @@ await web3.sendAndConfirmTransaction(connection, transaction, [fromPublicKey]); ### Struct -[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Struct.html) +[Source Documentation](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Struct.html) The struct class is used to create Rust compatible structs in javascript. This class is only compatible with Borsh encoded Rust structs. @@ -514,7 +517,7 @@ export class Fee extends Struct { ### Enum -[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Enum.html) +[Source Documentation](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Enum.html) The Enum class is used to represent a Rust compatible Enum in javascript. The enum will just be a string representation if logged but can be properly @@ -544,7 +547,7 @@ export class AccountType extends Enum {} ### NonceAccount -[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/NonceAccount.html) +[Source Documentation](https://solana-labs.github.io/solana-web3.js/v1.x/classes/NonceAccount.html) Normally a transaction is rejected if a transaction's `recentBlockhash` field is too old. To provide for certain custodial services, Nonce Accounts are used. @@ -634,7 +637,7 @@ offline with the nonce in place of the `recentBlockhash`. ### VoteAccount -[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/VoteAccount.html) +[Source Documentation](https://solana-labs.github.io/solana-web3.js/v1.x/classes/VoteAccount.html) Vote account is an object that grants the capability of decoding vote accounts from the native vote account program on the network. @@ -708,7 +711,7 @@ VoteAccount { ### StakeProgram -[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/StakeProgram.html) +[Source Documentation](https://solana-labs.github.io/solana-web3.js/v1.x/classes/StakeProgram.html) The StakeProgram facilitates staking SOL and delegating them to any validators on the network. You can use StakeProgram to create a stake account, stake some @@ -808,7 +811,7 @@ await web3.sendAndConfirmTransaction(connection, withdrawTransaction, [ ### Authorized -[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Authorized.html) +[Source Documentation](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Authorized.html) Authorized is an object used when creating an authorized account for staking within Solana. You can designate a `staker` and `withdrawer` separately, @@ -819,7 +822,7 @@ You can find more usage of the `Authorized` object under ### Lockup -[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Lockup.html) +[Source Documentation](https://solana-labs.github.io/solana-web3.js/v1.x/classes/Lockup.html) Lockup is used in conjunction with the [StakeProgram](/docs/clients/javascript.md#StakeProgram) to create an account. diff --git a/docs/clients/javascript.md b/docs/clients/javascript.md index 1d1d678bc..6d4932896 100644 --- a/docs/clients/javascript.md +++ b/docs/clients/javascript.md @@ -2,6 +2,10 @@ sidebarLabel: JavaScript / TypeScript title: JavaScript Client for Solana sidebarSortOrder: 2 +description: + Learn how to interact with Solana using the JavaScript/TypeScript client + library (@solana/web3.js). This guide covers wallet connections, transactions, + and custom program interactions with code examples. --- ## What is Solana-Web3.js? @@ -10,7 +14,7 @@ The Solana-Web3.js library aims to provide complete coverage of Solana. The library was built on top of the [Solana JSON RPC API](/docs/rpc). You can find the full documentation for the `@solana/web3.js` library -[here](https://solana-labs.github.io/solana-web3.js/). +[here](https://solana-labs.github.io/solana-web3.js/v1.x/). ## Common Terminology @@ -30,13 +34,13 @@ For the full list of terms, see #### yarn ```shell -yarn add @solana/web3.js +yarn add @solana/web3.js@1 ``` #### npm ```shell -npm install --save @solana/web3.js +npm install --save @solana/web3.js@1 ``` #### Bundle diff --git a/docs/clients/rust.md b/docs/clients/rust.md index 6f8d93d0e..7dd221995 100644 --- a/docs/clients/rust.md +++ b/docs/clients/rust.md @@ -2,6 +2,7 @@ sidebarLabel: Rust title: Rust Client for Solana sidebarSortOrder: 1 +description: Learn how to use Solana's Rust crates for development. --- Solana's Rust crates are @@ -30,7 +31,7 @@ development: SBF. This crate contains many fundamental data types and is re-exported from [`solana-sdk`], which cannot be imported from a Solana program. -- [`solana-sdk`] — The basic off-chain SDK, it re-exports +- [`solana-sdk`] — The basic offchain SDK, it re-exports [`solana-program`] and adds more APIs on top of that. Most Solana programs that do not run on-chain will import this. diff --git a/docs/core/accounts.md b/docs/core/accounts.md index eb689c6b0..c43d7984f 100644 --- a/docs/core/accounts.md +++ b/docs/core/accounts.md @@ -2,8 +2,11 @@ sidebarSortOrder: 1 sidebarLabel: Solana Account Model title: Solana Account Model -altRoutes: - - /docs/core +description: + Learn about Solana's account model, including how accounts store data and + programs, rent mechanics, account ownership, and the relationship between + programs and data accounts. Understand the core concepts of Solana's key-value + storage system. --- On Solana, all data is stored in what are referred to as "accounts”. The way diff --git a/docs/core/clusters.md b/docs/core/clusters.md index 28c16e285..c7b59ee60 100644 --- a/docs/core/clusters.md +++ b/docs/core/clusters.md @@ -2,6 +2,10 @@ sidebarLabel: Clusters & Endpoints title: Clusters and Public RPC Endpoints sidebarSortOrder: 8 +description: + Learn about Solana's network clusters (Devnet, Testnet, and Mainnet Beta), + their public RPC endpoints, rate limits, and use cases. Learn how to connect + to different Solana networks for development, testing, and production. --- The Solana blockchain has several different groups of validators, known as @@ -35,6 +39,22 @@ An example of some of these Solana blockchain explorers include: - [http://solanabeach.io/](http://solanabeach.io/). - [http://validators.app/](http://validators.app/). +## On a high level + +- Mainnet: Live production environment for deployed applications. +- Devnet: Testing with public accessibility for developers experimenting with + their applications. +- Testnet: Stress-testing for network upgrades and validator performance. + +**Example use cases**: You may want to debug a new program on Devnet or verify +performance metrics on Testnet before Mainnet deployment. + +| **Cluster** | **Endpoint** | **Purpose** | **Notes** | +| ----------- | ------------------------------------- | ------------------------------ | ------------------------------ | +| Mainnet | `https://api.mainnet-beta.solana.com` | Live production environment | Requires SOL for transactions | +| Devnet | `https://api.devnet.solana.com` | Public testing and development | Free SOL airdrop for testing | +| Testnet | `https://api.testnet.solana.com` | Validator and stress testing | May have intermittent downtime | + ## Devnet Devnet serves as a playground for anyone who wants to take Solana for a test diff --git a/docs/core/cpi.md b/docs/core/cpi.md index e330d7724..6a6add62b 100644 --- a/docs/core/cpi.md +++ b/docs/core/cpi.md @@ -2,6 +2,10 @@ title: Cross Program Invocation (CPI) sidebarLabel: Cross Program Invocation sidebarSortOrder: 6 +description: + Learn about Cross Program Invocation (CPI) on Solana - how programs can call + instructions on other programs, handle PDA signers, and compose functionality + across the Solana network. --- A Cross Program Invocation (CPI) refers to when one program invokes the diff --git a/docs/core/fees.md b/docs/core/fees.md index 8467f23d1..d4d098cb7 100644 --- a/docs/core/fees.md +++ b/docs/core/fees.md @@ -2,9 +2,9 @@ title: Fees on Solana sidebarSortOrder: 3 description: - "Your guide to transaction fees on Solana -- small fees paid to process - instructions on the network, based on computation and an optional - prioritization fee." + Learn about Solana's fee structure including transaction fees, prioritization + fees, and rent costs. Understand how fees are calculated, collected and + distributed across the network. keywords: - instruction fee - processing fee @@ -197,9 +197,8 @@ syscalls, etc), each may consume a of compute units. > A program can log details about its compute usage, including how much remains -> in its alloted compute budget. See -> [program debugging](/docs/programs/debugging.md#monitoring-compute-budget-consumption) -> for more information. You can also find more information in this guide for +> in its alloted compute budget. You can also find more information in this +> guide for > [optimizing your compute usage](/content/guides/advanced/how-to-optimize-compute.md). Each transaction is alloted a [compute unit limit](#compute-unit-limit), either @@ -339,7 +338,7 @@ let instruction = ComputeBudgetInstruction::set_compute_unit_price(1); #### Javascript The `@solana/web3.js` library includes functions within the -[`ComputeBudgetProgram`](https://solana-labs.github.io/solana-web3.js/classes/ComputeBudgetProgram.html) +[`ComputeBudgetProgram`](https://solana-labs.github.io/solana-web3.js/v1.x/classes/ComputeBudgetProgram.html) class to craft instructions for setting the _compute unit limit_ and _compute unit price_: @@ -423,9 +422,9 @@ allocates, the higher the withheld rent deposit will be. ### Rent exempt -Accounts must maintain a lamport balance greater the minimum required to store -its respective data on-chain. This is called "_rent exempt_" and that balance is -called the "_minimum balance for rent exemption_". +Accounts must maintain a lamport balance greater than the minimum required to +store its respective data on-chain. This is called "_rent exempt_" and that +balance is called the "_minimum balance for rent exemption_". > New accounts (and programs) on Solana are **REQUIRED** to be initialized with > enough lamports to become _rent exempt_. This was not always the case. diff --git a/docs/core/index.md b/docs/core/index.md index 519926a25..83c414bda 100644 --- a/docs/core/index.md +++ b/docs/core/index.md @@ -1,5 +1,117 @@ --- -metaOnly: true title: Core Concepts sidebarSortOrder: 2 +description: + Learn essential Solana blockchain concepts including accounts, transactions, + programs, programd derived addresses, cross program invocations, and how + tokens work on Solana. --- + +Build a strong understanding of the core concepts that make Solana different +from other blockchains. Understanding the "Solana programming model" through +these core concepts is very important to maximize your success as a Solana +blockchain developer. + +## Solana Account Model + +On Solana, all data is stored in what are referred to as "accounts”. The way +data is organized on the Solana blockchain resembles a +[key-value store](https://en.wikipedia.org/wiki/Key%E2%80%93value_database), +where each entry in the database is called an "account". + +Learn more about [Accounts](/docs/core/accounts.md) here. + +## Transactions and Instructions + +On Solana, we send [transactions](/docs/core/transactions#transaction) to +interact with the network. Transactions include one or more +[instructions](/docs/core/transactions#instruction), each representing a +specific operation to be processed. The execution logic for instructions is +stored on [programs](/docs/core/programs) deployed to the Solana network, where +each program stores its own set of instructions. + +Learn more about [Transactions](/docs/core/transactions.md) and +[Instructions](/docs/core/transactions.md#instruction) here. + +## Fees on Solana + +The Solana blockchain has a few different types of fees and costs that are +incurred to use the permissionless network. These can be segmented into a few +specific types: + +- [Transaction Fees](/docs/core/fees.md#transaction-fees) - A fee to have + validators process transactions/instructions +- [Prioritization Fees](/docs/core/fees.md#prioritization-fees) - An optional + fee to boost transactions processing order +- [Rent](/docs/core/fees.md#rent) - A withheld balance to keep data stored + on-chain + +Learn more about [Fees on Solana](/docs/core/fees.md) here. + +## Programs on Solana + +In the Solana ecosystem, "smart contracts" are called programs. Each program is +an on-chain account that stores executable logic, organized into specific +functions referred to as _instructions_ and called via _instruction handler_ +functions within the respective deployed program. + +Learn more about [Programs on Solana](/docs/core/programs.md) here. + +## Program Derived Address + +Program Derived Addresses (PDAs) provide developers on Solana with two main use +cases: + +- **Deterministic Account Addresses**: PDAs provide a mechanism to + deterministically derive an address using a combination of optional "seeds" + (predefined inputs) and a specific program ID. +- **Enable Program Signing**: The Solana runtime enables programs to "sign" for + PDAs which are derived from its program ID. + +You can think of PDAs as a way to create hashmap-like structures on-chain from a +predefined set of inputs (e.g. strings, numbers, and other account addresses). + +Learn more about [Program Derived Address](/docs/core/pda.md) here. + +## Cross Program Invocation + +A Cross Program Invocation (CPI) refers to when one program invokes the +instructions of another program. This mechanism allows for the composability of +Solana programs. + +You can think of instructions as API endpoints that a program exposes to the +network and a CPI as one API internally invoking another API. + +Learn more about [Cross Program Invocation](/docs/core/cpi.md) here. + +## Tokens on Solana + +Tokens are digital assets that represent ownership over diverse categories of +assets. Tokenization enables the digitalization of property rights, serving as a +fundamental component for managing both fungible and non-fungible assets. + +- Fungible Tokens represent interchangeable and divisible assets of the same + type and value (ex. USDC). +- Non-fungible Tokens (NFT) represent ownership of indivisible assets (e.g. + artwork). + +Learn more about [Tokens on Solana](/docs/core/tokens.md) here. + +## Clusters and Endpoints + +The Solana blockchain has several different groups of validators, known as +[Clusters](/docs/core/clusters.md). Each serving different purposes within the +overall ecosystem and containing dedicated api nodes to fulfill +[JSON-RPC](/docs/rpc/index.mdx) requests for their respective Cluster. + +The individual nodes within a Cluster are owned and operated by third parties, +with a public endpoint available for each. + +There are three primary clusters on the Solana network, each with a different +public endpoint: + +- Mainnet - `https://api.mainnet-beta.solana.com` +- Devnet - `https://api.devnet.solana.com` +- Testnet - `https://api.testnet.solana.com` + +Learn more about [Clusters and Endpoints](/docs/core/clusters.md) here. diff --git a/docs/core/pda.md b/docs/core/pda.md index 2e155eb2e..2d0330554 100644 --- a/docs/core/pda.md +++ b/docs/core/pda.md @@ -2,6 +2,10 @@ title: Program Derived Address (PDA) sidebarLabel: Program Derived Address sidebarSortOrder: 5 +description: + Learn about Program Derived Addresses (PDAs) on Solana - deterministic account + addresses that enable secure program signing. Understand PDA derivation, + canonical bumps, and how to create PDA accounts. --- Program Derived Addresses (PDAs) provide developers on Solana with two main use @@ -42,8 +46,8 @@ anything built at that location. - PDAs are addresses that fall off the Ed25519 curve and have no corresponding private key. -- Solana programs can programmatically "sign" for PDAs that are derived using - its program ID. +- Solana programs can programmatically "sign" on behalf of PDAs that are derived + using its program ID. - Deriving a PDA does not automatically create an on-chain account. diff --git a/docs/core/programs.md b/docs/core/programs.md index 330fb3d0c..89e34cdad 100644 --- a/docs/core/programs.md +++ b/docs/core/programs.md @@ -2,6 +2,10 @@ title: Programs sidebarLabel: Programs on Solana sidebarSortOrder: 4 +description: + Learn about Solana programs (smart contracts) and how to develop them using + Rust or the Anchor framework. Understand program deployment, upgrades, and + verification on the Solana network. --- In the Solana ecosystem, "smart contracts" are called programs. Each @@ -9,10 +13,6 @@ In the Solana ecosystem, "smart contracts" are called programs. Each stores executable logic, organized into specific functions referred to as [instructions](/docs/core/transactions.md#instruction). -For additional topics related to Solana programs, refer to the pages included -under the [Deploying Programs](/docs/programs/index.md) section of this -documentation. - ## Key Points - Programs are on-chain accounts that contain executable code. This code is @@ -33,10 +33,10 @@ Solana programs are predominantly written in the [Rust](https://doc.rust-lang.org/book/) programming language, with two common approaches for development: -- [Anchor](/content/guides/getstarted/intro-to-anchor.md): A framework designed - for Solana program development. It provides a faster and simpler way to write - programs, using Rust macros to significantly reduce boilerplate code. For - beginners, it is recommended to start with the Anchor framework. +- [Anchor](/docs/programs/anchor): A framework designed for Solana program + development. It provides a faster and simpler way to write programs, using + Rust macros to significantly reduce boilerplate code. For beginners, it is + recommended to start with the Anchor framework. - [Native Rust](/content/guides/getstarted/intro-to-native-rust.md): This approach involves writing Solana programs in Rust without leveraging any diff --git a/docs/core/tokens.md b/docs/core/tokens.md index 36092eb10..8122b119f 100644 --- a/docs/core/tokens.md +++ b/docs/core/tokens.md @@ -1,6 +1,10 @@ --- title: "Tokens on Solana" sidebarSortOrder: 7 +description: + Learn about Solana tokens (SPL Tokens) including fungible and non-fungible + tokens, Token Program, Token Extensions Program, mint accounts, token + accounts, and practical examples for creating and managing tokens on Solana. --- Tokens are digital assets that represent ownership over diverse categories of @@ -551,8 +555,8 @@ spl-token initialize-metadata ``` -The token URI is normally a link to off-chain metadata you want to associate -with the token. You can find an example of the JSON format +The token URI is normally a link to offchain metadata you want to associate with +the token. You can find an example of the JSON format [here](https://raw.githubusercontent.com/solana-developers/opos-asset/main/assets/DeveloperPortal/metadata.json). For example, running the following command will store the additional metadata diff --git a/docs/core/transactions.md b/docs/core/transactions.md index 560d771dd..edfd6d5f8 100644 --- a/docs/core/transactions.md +++ b/docs/core/transactions.md @@ -1,6 +1,10 @@ --- title: "Transactions and Instructions" sidebarSortOrder: 2 +description: + Learn about Solana transactions and instructions - the fundamental building + blocks for interacting with the Solana blockchain. Understand transaction + structure and instruction composition with practical examples. --- On Solana, we send [transactions](/docs/core/transactions#transaction) to diff --git a/docs/economics/index.md b/docs/economics/index.md index baf26baea..55444a801 100644 --- a/docs/economics/index.md +++ b/docs/economics/index.md @@ -8,7 +8,7 @@ sidebarSortOrder: 5 **Subject to change.** -Solana’s crypto-economic system is designed to promote a healthy, long term +Solana's crypto-economic system is designed to promote a healthy, long term self-sustaining economy with participant incentives aligned to the security and decentralization of the network. The main participants in this economy are validation-clients. Their contributions to the network, state validation, and diff --git a/docs/economics/inflation/terminology.md b/docs/economics/inflation/terminology.md index 56b1c8154..c6d6503f9 100644 --- a/docs/economics/inflation/terminology.md +++ b/docs/economics/inflation/terminology.md @@ -14,7 +14,7 @@ genesis block or protocol inflation) minus any tokens that have been burnt (via transaction fees or other mechanism) or slashed. At network launch, 500,000,000 SOL were instantiated in the genesis block. Since then the Total Current Supply has been reduced by the burning of transaction fees and a planned token -reduction event. Solana’s _Total Current Supply_ can be found at +reduction event. Solana's _Total Current Supply_ can be found at https://explorer.solana.com/supply ### Inflation Rate [%] @@ -48,26 +48,26 @@ _Inflation Schedule_. - While the _Inflation Schedule_ determines how the protocol issues SOL, this neglects the concurrent elimination of tokens in the ecosystem due to various factors. The primary token burning mechanism is the burning of a portion of - each transaction fee. $50\%$ of each transaction fee is burned, with the + each transaction fee. 50% of each transaction fee is burned, with the remaining fee retained by the validator that processes the transaction. - Additional factors such as loss of private keys and slashing events should also be considered in a holistic analysis of the _Effective Inflation Rate_. - For example, it’s estimated that $10-20\%$ of all BTC have been lost and are + For example, it's estimated that 10-20% of all BTC have been lost and are unrecoverable and that networks may experience similar yearly losses at the - rate of $1-2\%$. + rate of 1-2%. ### Staking Yield [%] The rate of return (aka _interest_) earned on SOL staked on the network. It is often quoted as an annualized rate (e.g. "the network _staking yield_ is -currently $10\%$ per year"). +currently 10% per year"). - _Staking yield_ is of great interest to validators and token holders who wish to delegate their tokens to avoid token dilution due to inflation (the extent of which is discussed below). -- $100\%$ of inflationary issuances are to be distributed to staked - token-holders in proportion to their staked SOL and to validators who charge a - commission on the rewards earned by their delegated SOL. +- 100% of inflationary issuances are to be distributed to staked token-holders + in proportion to their staked SOL and to validators who charge a commission on + the rewards earned by their delegated SOL. - There may be future consideration for an additional split of inflation issuance with the introduction of _Archivers_ into the economy. _Archivers_ are network participants who provide a decentralized storage service and diff --git a/docs/economics/staking/stake-programming.md b/docs/economics/staking/stake-programming.md index afac9e315..ed9852168 100644 --- a/docs/economics/staking/stake-programming.md +++ b/docs/economics/staking/stake-programming.md @@ -4,12 +4,12 @@ title: Stake Programming To maximize stake distribution, decentralization, and censorship resistance on the Solana network, staking can be performed programmatically. The team and -community have developed several on-chain and off-chain programs to make stakes +community have developed several on-chain and offchain programs to make stakes easier to manage. #### Stake-o-matic aka Auto-delegation Bots -This off-chain program manages a large population of validators staked by a +This offchain program manages a large population of validators staked by a central authority. The Solana Foundation uses an auto-delegation bot to regularly delegate its stake to "non-delinquent" validators that meet specified performance requirements. diff --git a/docs/intro/dev.md b/docs/intro/dev.md index ff65650cf..24dcec050 100644 --- a/docs/intro/dev.md +++ b/docs/intro/dev.md @@ -55,7 +55,7 @@ interact with your application onchain instead of just on the frontend. Developing on Solana still has CI/CD pipelines, testing, debugging tools, a frontend and backend, and anything you'd find in a normal development flow. -## What You'll Need Get Started +## What You'll Need to Get Started To get started with Solana development, you'll need different tools based on whether you are developing for client-side, onchain programs, or both. @@ -68,16 +68,18 @@ If you're developing on the client-side, you can work with any programming language you're comfortable with. Solana has community-contributed SDKs to help developers interact with the Solana network in most popular languages : -| Language | SDK | -| ---------- | ------------------------------------------------------------------------------------------- | -| RUST | [solana_sdk](https://docs.rs/solana-sdk/latest/solana_sdk/) | -| Typescript | [@solana/web3.js](https://github.com/solana-labs/solana-web3.js) | -| Python | [solders](https://github.com/kevinheavey/solders) | -| Java | [solanaj](https://github.com/skynetcap/solanaj) | -| C++ | [solcpp](https://github.com/mschneider/solcpp) | -| Go | [solana-go](https://github.com/gagliardetto/solana-go) | -| Kotlin | [solanaKT](https://github.com/metaplex-foundation/SolanaKT) | -| Dart | [solana](https://github.com/espresso-cash/espresso-cash-public/tree/master/packages/solana) | +| Language | SDK | +| ---------- | -------------------------------------------------------------------------------------------------------- | +| RUST | [solana_sdk](https://docs.rs/solana-sdk/latest/solana_sdk/) | +| Typescript | [@solana/web3.js](https://github.com/solana-labs/solana-web3.js) | +| Python | [solders](https://github.com/kevinheavey/solders) | +| Java | [solanaj](https://github.com/skynetcap/solanaj) or [solana4j](https://github.com/LMAX-Exchange/solana4j) | +| C++ | [solcpp](https://github.com/mschneider/solcpp) | +| Go | [solana-go](https://github.com/gagliardetto/solana-go) | +| Kotlin | [solanaKT](https://github.com/metaplex-foundation/SolanaKT) or [sol4k](https://github.com/sol4k/sol4k) | +| Dart | [solana](https://github.com/espresso-cash/espresso-cash-public/tree/master/packages/solana) | +| C# | [solnet](https://github.com/bmresearch/Solnet) | +| GdScript | [godot](https://github.com/Virus-Axel/godot-solana-sdk/) | You'll also need a connection with an RPC to interact with the network. You can either work with a [RPC infrastructure provider](https://solana.com/rpc) or @@ -107,10 +109,9 @@ can do this with the following command: curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh ``` -You'll then need to have the -[Solana CLI installed](https://docs.solanalabs.com/cli/install) to compile and -deploy your programs. You can install the Solana CLI by running the following -command: +You'll then need to have the [Solana CLI installed](/docs/intro/installation.md) +to compile and deploy your programs. You can install the Solana CLI by running +the following command: ```bash sh -c "$(curl -sSfL https://release.anza.xyz/stable/install)" @@ -126,7 +127,7 @@ solana-test-validator This will start a local validator on your machine that you can use to test your programs. You can -[read more about local development in this guide](https://solana.com/docs/intro/installation). +[read more about local development in this guide](/docs/intro/installation.md). When building onchain programs, you have a choice to either build with native Rust (ie, without a framework) or use the Anchor framework. Anchor is a @@ -150,8 +151,7 @@ your program based on your language preference: If you do not want to develop your programs locally, there's also the [online IDE Solana Playground](https://beta.solpg.io). Solana Playground allows you to write, test, and deploy programs on Solana. You can get started with -Solana Playground by -[following our guide](https://solana.com/developers/guides/getstarted/hello-world-in-your-browser). +Solana Playground by [following our quick start guide](/docs/intro/quick-start). ### Developer Environments @@ -192,8 +192,4 @@ problem can find your question! ## Next steps -You're now ready to get started building on Solana! - -- [Deploy your first Solana program in the browser](/content/guides/getstarted/hello-world-in-your-browser.md) -- [Get started building programs locally with Rust](/content/guides/getstarted/local-rust-hello-world.md) -- [Overview of writing Solana programs](/docs/programs/index.md) +[You're now ready to get started building on Solana!](/docs/intro/quick-start) diff --git a/docs/intro/installation.md b/docs/intro/installation.md index 9632b4271..0f155d59c 100644 --- a/docs/intro/installation.md +++ b/docs/intro/installation.md @@ -1,11 +1,17 @@ --- title: Installation +seoTitle: Install the Solana CLI and Anchor sidebarSortOrder: 1 description: - "Easily setup your local development environment for Solana development on - Linux, Mac or Windows. Including installing Rust, the Solana CLI, and Anchor." + A comprehensive guide to setting up your local Solana development environment. + Learn how to install Rust, the Solana CLI, and Anchor Framework on Windows + (WSL), Linux, and Mac. Includes step-by-step instructions for creating + wallets, requesting airdrops, and running a local validator. altRoutes: - /developers/guides/getstarted/setup-local-development + - /docs/install + - /install + - /setup --- This section covers the steps to set up your local environment for Solana @@ -190,8 +196,7 @@ Close and reopen your terminal to apply the PATH changes or run the following in export PATH="/Users/test/.local/share/solana/install/active_release/bin:$PATH" ``` - - + If you are using a Linux or WSL terminal, you can add the PATH environment @@ -260,12 +265,66 @@ agave-install update programs. The Anchor framework leverages Rust macros to simplify the process of writing Solana programs. -Install the Anchor CLI with the following command: +There are two ways to install the Anchor CLI and tooling: + +1. Using Anchor Version Manager (AVM) - is the **recommended installation** + method since it simplifies updating Anchor versions in the future +2. Without AVM - this requires more a manual process to update Anchor versions + later + + + + +The Anchor version manager (AVM) allows you to install and manage different +Anchor versions on your system, including more easily updating Anchor versions +in the future. + +Install AVM with the following command: + +```shell +cargo install --git https://github.com/coral-xyz/anchor avm --force +``` + +Test to ensure AVM was installed and is accessible: + +```shell +avm --version +``` + +Install the latest version of Anchor CLI using AVM: + +```shell +avm install latest +avm use latest +``` + +Or install a specific version of the Anchor CLI by declaring which version you +want to install: + +```shell +avm install 0.30.1 +avm use 0.30.1 +``` + +> Don't forget to run the `avm use` command to declare which Anchor CLI version +> should be used on your system. +> +> - If you installed the `latest` version, run `avm use latest`. +> - If you installed the version `0.30.1`, run `avm use 0.30.1`. + + + + + +Install a specific version of the Anchor CLI with the following command: ```shell cargo install --git https://github.com/coral-xyz/anchor --tag v0.30.1 anchor-cli ``` + + + You may see the following warning during installation. However, it does not affect the installation process. @@ -294,7 +353,7 @@ warning: `anchor-cli` (lib) generated 1 warning -To verify that the installation was successful, check the Ancor CLI version: +To verify that the installation was successful, check the Anchor CLI version: ```shell anchor --version @@ -383,6 +442,29 @@ You should the following output: +When running `anchor build`, if you encounter `error: not a directory` similar +following: + +``` +error: not a directory: '.../solana-release/bin/sdk/sbf/dependencies/platform-tools/rust/lib' +``` + +Try these solutions: + +1. Force install using the following command: + +```shell +cargo build-sbf --force-tools-install +``` + +2. If the above doesn't work, clear the Solana cache: + +```shell +rm -rf ~/.cache/solana/* +``` + +After applying either solution, attempt to run `anchor build` again. + If you are on Linux or WSL and encounter the following errors when running `anchor test` after creating a new Anchor project, it's may be due to missing Node.js or Yarn: @@ -422,7 +504,7 @@ Keypair Path: /Users/test/.config/solana/id.json Commitment: confirmed ``` -The RPC URL and Websocket URL specific the Solana cluster the CLI will make +The RPC URL and Websocket URL specify the Solana cluster the CLI will make requests to. By default this will be mainnet-beta. You can update the Solana CLI cluster using the following commands: diff --git a/docs/intro/quick-start/cross-program-invocation.md b/docs/intro/quick-start/cross-program-invocation.md index 94eb78b60..f3aafb4c9 100644 --- a/docs/intro/quick-start/cross-program-invocation.md +++ b/docs/intro/quick-start/cross-program-invocation.md @@ -2,6 +2,12 @@ sidebarLabel: Cross Program Invocation title: Cross Program Invocation sidebarSortOrder: 5 +description: + Learn how to implement Cross Program Invocations (CPIs) in Solana programs + using the Anchor framework. This tutorial demonstrates how to transfer SOL + between accounts, interact with the System Program, and handle Program Derived + Addresses (PDAs) in CPIs. Perfect for developers looking to build composable + Solana programs. --- In this section, we'll update the CRUD program from the previous PDA section to @@ -562,7 +568,7 @@ Running tests... You can then inspect the SolanFM links to view the transaction details, where -you’ll find the CPIs for the transfer instructions within the update and delete +you'll find the CPIs for the transfer instructions within the update and delete instructions. ![Update CPI](/assets/docs/intro/quickstart/cpi-update.png) diff --git a/docs/intro/quick-start/deploying-programs.md b/docs/intro/quick-start/deploying-programs.md index 0d5dec835..57c78a01d 100644 --- a/docs/intro/quick-start/deploying-programs.md +++ b/docs/intro/quick-start/deploying-programs.md @@ -2,6 +2,11 @@ sidebarLabel: Deploying Programs title: Deploying Your First Solana Program sidebarSortOrder: 3 +description: + Learn how to build, deploy, and test your first Solana program using the + Anchor framework and Solana Playground. This beginner-friendly guide walks + through creating a simple program, deploying it to devnet, running tests, and + closing the program. --- In this section, we'll build, deploy, and test a simple Solana program using the diff --git a/docs/intro/quick-start/index.md b/docs/intro/quick-start/index.md index fa361a2d7..b582d3258 100644 --- a/docs/intro/quick-start/index.md +++ b/docs/intro/quick-start/index.md @@ -2,6 +2,10 @@ sidebarLabel: Quick Start title: Solana Quick Start Guide sidebarSortOrder: 0 +description: + Learn Solana development basics. Create your first program, understand + accounts, send transactions, and explore PDAs and CPIs using Solana Playground + - no installation required. --- Welcome to the Solana Quick Start Guide! This hands-on guide will introduce you @@ -69,6 +73,19 @@ You should now see your wallet's address, SOL balance, and connected cluster your browser cache will remove your saved wallet.
+Some definitions you may find helpful: + +- _wallet address_: a unique identifier for a digital wallet, used to send or + receive crypto assets on a blockchain. Each wallet address is a string of + alphanumeric characters that represents a specific destination on the network. + Think of it like an email address or bank account number—if someone wants to + send you cryptocurrency, they need your wallet address to direct the funds. +- _connected cluster_: a set of network nodes that work together to maintain a + synchronized copy of the blockchain. These clusters are essential for + providing a decentralized, distributed ledger and powering the Solana network + by validating transactions, securing the chain, and executing programs (smart + contracts). + ### Get Devnet SOL Before we start building, we first need some devnet SOL. diff --git a/docs/intro/quick-start/program-derived-address.md b/docs/intro/quick-start/program-derived-address.md index 49ff4ac33..c5d4ed2cb 100644 --- a/docs/intro/quick-start/program-derived-address.md +++ b/docs/intro/quick-start/program-derived-address.md @@ -2,6 +2,12 @@ sidebarLabel: Program Derived Address title: Program Derived Address sidebarSortOrder: 4 +description: + Learn how to build a CRUD (Create, Read, Update, Delete) Solana program using + Program Derived Addresses (PDAs) and the Anchor framework. This step-by-step + guide demonstrates how to create, update, and delete on-chain message accounts + using PDAs, implement account validation, and write tests. Perfect for + developers looking to understand how to use PDAs in Solana programs. --- In this section, we'll walk through how to build a basic CRUD (Create, Read, @@ -525,7 +531,7 @@ The body of the function then: -Rebuld the program +Rebuild the program ```shell filename="Terminal" build @@ -599,7 +605,7 @@ The `Delete` struct defines the accounts required for the `delete` instruction: -Next, implement the logic for the `update` instruction. +Next, implement the logic for the `delete` instruction. ```rs filename="lib.rs" pub fn delete(_ctx: Context) -> Result<()> { diff --git a/docs/intro/quick-start/reading-from-network.md b/docs/intro/quick-start/reading-from-network.md index 6111d89c2..e65d0abdb 100644 --- a/docs/intro/quick-start/reading-from-network.md +++ b/docs/intro/quick-start/reading-from-network.md @@ -2,6 +2,11 @@ sidebarLabel: Reading from Network title: Reading from Network sidebarSortOrder: 1 +description: + Learn how to read data from the Solana blockchain network. This guide covers + fetching wallet accounts, program accounts, and token mint accounts using + JavaScript/TypeScript, with practical examples using the Solana web3.js + library. --- Now, let's explore how to read data from the Solana network. We'll fetch a few @@ -30,12 +35,12 @@ Account Model. For more details, refer to the ## Fetch Playground Wallet - - Let's start by looking at a familiar account - your own Playground Wallet! We'll fetch this account and examine its structure to understand what a basic Solana account looks like. + + ### Open Example 1 Click this [link](https://beta.solpg.io/6671c5e5cffcf4b13384d198) to open the @@ -124,6 +129,11 @@ Let's break down the fields in the output: - `data` - This field contains what we generally refer to as the account "data". For a wallet, it's empty (0 bytes), but other accounts use this field to store any arbitrary data as a serialized buffer of bytes. + +> When data is "buffered" in this way, it maintains its integrity and can be +> later deserialized back into its original form for use in applications. This +> process is widely used in blockchain for efficient data handling. + - `executable` - A flag that indicates whether the account is an executable program. For wallets and any accounts that store state, this is `false`. - `owner` - This field shows which program controls the account. For wallets, @@ -313,6 +323,14 @@ To read the `data` field from any account, you need to deserialize the data buffer into the expected data type. This is often done using helper functions from client libraries for a particular program. +**Deserialization** is the process of converting data from a stored format (like +raw bytes or JSON) back into a usable, structured format in a program. In +blockchain, it involves taking raw, encoded data from the network and +transforming it back into objects, classes, or readable structures so developers +can access and manipulate specific information within a program. Deserialization +is essential for interpreting account or transaction data received from a +network in a form that a program can process and display meaningfully. + Open this next [example](https://beta.solpg.io/6671cd8acffcf4b13384d19b) in Solana Playground. You'll see this code: diff --git a/docs/intro/quick-start/writing-to-network.md b/docs/intro/quick-start/writing-to-network.md index 4d28602dd..8d3e55154 100644 --- a/docs/intro/quick-start/writing-to-network.md +++ b/docs/intro/quick-start/writing-to-network.md @@ -2,6 +2,10 @@ sidebarLabel: Writing to Network title: Writing to Network sidebarSortOrder: 2 +description: + Learn how to interact with the Solana network by sending transactions and + instructions. Follow step-by-step examples to transfer SOL tokens and create + new tokens using the System Program and Token Extensions Program. --- Now that we've explored reading from the Solana network, let's learn how to diff --git a/docs/intro/wallets.md b/docs/intro/wallets.md index b2f29a098..0973a6de5 100644 --- a/docs/intro/wallets.md +++ b/docs/intro/wallets.md @@ -60,8 +60,8 @@ first will need to create a wallet.** ## Supported Wallets Several browser and mobile app based wallets support Solana. Find some options -that might be right for you on the -[Solana Ecosystem](https://solana.com/ecosystem/explore?categories=wallet) page. +that might be right for you on the [Solana Wallets](https://solana.com/wallets) +page. For advanced users or developers, the [command-line wallets](https://docs.solanalabs.com/cli/wallets) may be more diff --git a/docs/more/exchange.md b/docs/more/exchange.md index 6db344ca8..2aae6ecff 100644 --- a/docs/more/exchange.md +++ b/docs/more/exchange.md @@ -24,7 +24,7 @@ and high TPS. For specific requirements, please see To run an api node: -1. [Install the Solana command-line tool suite](https://docs.solanalabs.com/cli/install) +1. [Install the Solana command-line tool suite](/docs/intro/installation.md) 2. Start the validator with at least the following parameters: ```shell @@ -767,7 +767,7 @@ curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" - ## Prioritization Fees and Compute Units -In periods of high demand, it’s possible for a transaction to expire before a +In periods of high demand, it's possible for a transaction to expire before a validator has included such transactions in their block because they chose other transactions with higher economic value. Valid Transactions on Solana may be delayed or dropped if Prioritization Fees are not implemented properly. @@ -817,7 +817,7 @@ may only return the lowest fee for each block. This will often be zero, which is not a fully useful approximation of what Prioritization Fee to use in order to avoid being rejected by validator nodes. -The `getRecentPrioritizationFees` API takes accounts’ pubkeys as parameters, and +The `getRecentPrioritizationFees` API takes accounts' pubkeys as parameters, and then returns the highest of the minimum prioritization fees for these accounts. When no account is specified, the API will return the lowest fee to land to block, which is usually zero (unless the block is full). diff --git a/docs/programs/anchor/client-typescript.md b/docs/programs/anchor/client-typescript.md new file mode 100644 index 000000000..ba5c096ec --- /dev/null +++ b/docs/programs/anchor/client-typescript.md @@ -0,0 +1,354 @@ +--- +title: JS/TS Client +description: + Learn how to use Anchor's TypeScript client library to interact with Solana + progra +sidebarLabel: JS/TS Client +sidebarSortOrder: 3 +--- + +Anchor provides a Typescript client library +([@coral-xyz/anchor](https://github.com/coral-xyz/anchor/tree/v0.30.1/ts/packages/anchor)) +that simplifies the process of interacting with Solana programs from the client +in JavaScript or TypeScript. + +## Client Program + +To use the client library, first create an instance of a +[`Program`](https://github.com/coral-xyz/anchor/blob/v0.30.1/ts/packages/anchor/src/program/index.ts#L58) +using the [IDL file](/docs/programs/anchor/idl) generated by Anchor. + +Creating an instance of the `Program` requires the program's IDL and an +[`AnchorProvider`](https://github.com/coral-xyz/anchor/blob/v0.30.1/ts/packages/anchor/src/provider.ts#L55). +An `AnchorProvider` is an abstraction that combines two things: + +- `Connection` - the connection to a [Solana cluster](/docs/core/clusters.md) + (i.e. localhost, devnet, mainnet) +- `Wallet` - (optional) a default wallet used to pay and sign transactions + + + + + +When integrating with a frontend using the +[wallet adapter](https://solana.com/developers/guides/wallets/add-solana-wallet-adapter-to-nextjs), +you'll need to set up the `AnchorProvider` and `Program`. + +```ts {9-10, 12-14} +import { Program, AnchorProvider, setProvider } from "@coral-xyz/anchor"; +import { useAnchorWallet, useConnection } from "@solana/wallet-adapter-react"; +import type { HelloAnchor } from "./idlType"; +import idl from "./idl.json"; + +const { connection } = useConnection(); +const wallet = useAnchorWallet(); + +const provider = new AnchorProvider(connection, wallet, {}); +setProvider(provider); + +export const program = new Program(idl as HelloAnchor, { + connection, +}); +``` + +In the code snippet above: + +- `idl.json` is the IDL file generated by Anchor, found at + `/target/idl/.json` in an Anchor project. +- `idlType.ts` is the IDL type (for use with TS), found at + `/target/types/.ts` in an Anchor project. + +Alternatively, you can create an instance of the `Program` using only the IDL +and the `Connection` to a Solana cluster. This means there is no default +`Wallet`, but allows you to use the `Program` to fetch accounts or build +instructions without a connected wallet. + +```ts {8-10} +import { clusterApiUrl, Connection, PublicKey } from "@solana/web3.js"; +import { Program } from "@coral-xyz/anchor"; +import type { HelloAnchor } from "./idlType"; +import idl from "./idl.json"; + +const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); + +export const program = new Program(idl as HelloAnchor, { + connection, +}); +``` + + + + +Anchor automatically sets up a `Program` instance in the default test file of +new projects. However, this setup differs from how you'd initialize a `Program` +outside the Anchor workspace, such as in React or Node.js applications. + +```typescript +import * as anchor from "@coral-xyz/anchor"; +import { Program } from "@coral-xyz/anchor"; +import { HelloAnchor } from "../target/types/hello_anchor"; + +describe("hello_anchor", () => { + // Configure the client to use the local cluster. + anchor.setProvider(anchor.AnchorProvider.env()); + + const program = anchor.workspace.HelloAnchor as Program; + + it("Is initialized!", async () => { + // Add your test here. + const tx = await program.methods.initialize().rpc(); + console.log("Your transaction signature", tx); + }); +}); +``` + + + + +## Invoke Instructions + +Once the `Program` is set up using a program IDL, you can use the Anchor +[`MethodsBuilder`](https://github.com/coral-xyz/anchor/blob/v0.30.1/ts/packages/anchor/src/program/namespace/methods.ts#L155) +to: + +- Build individual instructions +- Build transactions +- Build and send transactions + +The basic format looks like the following: + + + + + +`program.methods` - This is the builder API for creating instruction calls from +the program's IDL + +```ts /methods/ {1} +await program.methods + .instructionName(instructionData) + .accounts({}) + .signers([]) + .rpc(); +``` + + + + +Following `.methods`, specify the name of an instruction from the program IDL, +passing in any required arguments as comma-separated values. + +```ts /instructionName/ /instructionData1/ /instructionData2/ {2} +await program.methods + .instructionName(instructionData1, instructionData2) + .accounts({}) + .signers([]) + .rpc(); +``` + + + + +`.accounts` - Pass in the address of the accounts required by the instruction as +specified in the IDL + +```ts /accounts/ {3} +await program.methods + .instructionName(instructionData) + .accounts({}) + .signers([]) + .rpc(); +``` + +Note that certain account addresses don't need to be explicitly provided, as the +Anchor client can automatically resolve them. These typically include: + +- Common accounts (ex. the System Program) +- Accounts where the address is a PDA (Program Derived Address) + + + + +`.signers` - Optionally pass in an array of keypairs required as additional +signers by the instruction. This is commonly used when creating new accounts +where the account address is the public key of a newly generated keypair. + +```ts /signers/ {4} +await program.methods + .instructionName(instructionData) + .accounts({}) + .signers([]) + .rpc(); +``` + +Note that `.signers` should only be used when also using `.rpc()`. When using +`.transaction()` or `.instruction()`, signers should be added to the transaction +before sending. + + + + +Anchor provides multiple methods for building program instructions: + + + + + +The +[`rpc()`](https://github.com/coral-xyz/anchor/blob/v0.30.1/ts/packages/anchor/src/program/namespace/methods.ts#L283) +method +[sends a signed transaction](https://github.com/coral-xyz/anchor/blob/v0.30.1/ts/packages/anchor/src/program/namespace/rpc.ts#L29) +with the specified instruction and returns a `TransactionSignature`. + +When using `.rpc`, the `Wallet` from the `Provider` is automatically included as +a signer. + +```ts {13} +// Generate keypair for the new account +const newAccountKp = new Keypair(); + +const data = new BN(42); +const transactionSignature = await program.methods + .initialize(data) + .accounts({ + newAccount: newAccountKp.publicKey, + signer: wallet.publicKey, + systemProgram: SystemProgram.programId, + }) + .signers([newAccountKp]) + .rpc(); +``` + + + + +The +[`transaction()`](https://github.com/coral-xyz/anchor/blob/v0.30.1/ts/packages/anchor/src/program/namespace/methods.ts#L382) +method +[builds a `Transaction`](https://github.com/coral-xyz/anchor/blob/v0.30.1/ts/packages/anchor/src/program/namespace/transaction.ts#L18-L26) +with the specified instruction without sending the transaction. + +```ts {12} /transaction()/1,2,4 +// Generate keypair for the new account +const newAccountKp = new Keypair(); + +const data = new BN(42); +const transaction = await program.methods + .initialize(data) + .accounts({ + newAccount: newAccountKp.publicKey, + signer: wallet.publicKey, + systemProgram: SystemProgram.programId, + }) + .transaction(); + +const transactionSignature = await connection.sendTransaction(transaction, [ + wallet.payer, + newAccountKp, +]); +``` + + + + +The +[`instruction()`](https://github.com/coral-xyz/anchor/blob/v0.30.1/ts/packages/anchor/src/program/namespace/methods.ts#L348) +method +[builds a `TransactionInstruction`](https://github.com/coral-xyz/anchor/blob/v0.30.1/ts/packages/anchor/src/program/namespace/instruction.ts#L57-L61) +using the specified instruction. This is useful if you want to manually add the +instruction to a transaction and combine it with other instructions. + +```ts {12} /instruction()/ +// Generate keypair for the new account +const newAccountKp = new Keypair(); + +const data = new BN(42); +const instruction = await program.methods + .initialize(data) + .accounts({ + newAccount: newAccountKp.publicKey, + signer: wallet.publicKey, + systemProgram: SystemProgram.programId, + }) + .instruction(); + +const transaction = new Transaction().add(instruction); + +const transactionSignature = await connection.sendTransaction(transaction, [ + wallet.payer, + newAccountKp, +]); +``` + + + + +## Fetch Accounts + +The `Program` client simplifies the process of fetching and deserializing +accounts created by your Anchor program. + +Use `program.account` followed by the name of the account type defined in the +IDL. Anchor provides multiple methods for fetching accounts. + + + + + +Use +[`all()`](https://github.com/coral-xyz/anchor/blob/v0.30.1/ts/packages/anchor/src/program/namespace/account.ts#L251) +to fetch all existing accounts for a specific account type. + +```ts /all/ +const accounts = await program.account.newAccount.all(); +``` + + + + +Use `memcmp` (memory compare) to filter for account data that matches a specific +value at a specific offset. Using `memcmp` requires you to understand the byte +layout of the data field for the account type you are fetching. + +When calculating the offset, remember that the first 8 bytes in accounts created +by an Anchor program are reserved for the account discriminator. + +```ts /memcmp/ +const accounts = await program.account.newAccount.all([ + { + memcmp: { + offset: 8, + bytes: "", + }, + }, +]); +``` + + + + +Use +[`fetch()`](https://github.com/coral-xyz/anchor/blob/v0.30.1/ts/packages/anchor/src/program/namespace/account.ts#L165) +to fetch the account data for a single account + +```ts /fetch/ +const account = await program.account.newAccount.fetch(ACCOUNT_ADDRESS); +``` + + + + +Use +[`fetchMultiple()`](https://github.com/coral-xyz/anchor/blob/v0.30.1/ts/packages/anchor/src/program/namespace/account.ts#L200) +to fetch the account data for multiple accounts by passing in an array of +account addresses + +```ts /fetchMultiple/ +const accounts = await program.account.newAccount.fetchMultiple([ + ACCOUNT_ADDRESS_ONE, + ACCOUNT_ADDRESS_TWO, +]); +``` + + + diff --git a/docs/programs/anchor/cpi.md b/docs/programs/anchor/cpi.md new file mode 100644 index 000000000..092b9481f --- /dev/null +++ b/docs/programs/anchor/cpi.md @@ -0,0 +1,551 @@ +--- +title: CPIs with Anchor +description: + Learn how to implement Cross Program Invocations (CPIs) in Anchor programs, + enabling interaction between different programs on Solana +sidebarLabel: CPIs with Anchor +sidebarSortOrder: 5 +--- + +[Cross Program Invocations (CPI)](/docs/core/cpi.md) refer to the process of one +program invoking instructions of another program, which enables the +composibility of programs on Solana. + +This section will cover the basics of implementing CPIs in an Anchor program, +using a simple SOL transfer instruction as a practical example. Once you +understand the basics of how to implement a CPI, you can apply the same concepts +for any instruction. + +## Cross Program Invocations + +Let's examine a program that implements a CPI to the System Program's transfer +instruction. Here is the example program on +[Solana Playground](https://beta.solpg.io/66df2751cffcf4b13384d35a). + +The `lib.rs` file includes a single `sol_transfer` instruction. When the +`sol_transfer` instruction on the Anchor program is invoked, the program +internally invokes the transfer instruction of the System Program. + +```rs filename="lib.rs" /sol_transfer/ /transfer/ {23} +use anchor_lang::prelude::*; +use anchor_lang::system_program::{transfer, Transfer}; + +declare_id!("9AvUNHjxscdkiKQ8tUn12QCMXtcnbR9BVGq3ULNzFMRi"); + +#[program] +pub mod cpi { + use super::*; + + pub fn sol_transfer(ctx: Context, amount: u64) -> Result<()> { + let from_pubkey = ctx.accounts.sender.to_account_info(); + let to_pubkey = ctx.accounts.recipient.to_account_info(); + let program_id = ctx.accounts.system_program.to_account_info(); + + let cpi_context = CpiContext::new( + program_id, + Transfer { + from: from_pubkey, + to: to_pubkey, + }, + ); + + transfer(cpi_context, amount)?; + Ok(()) + } +} + +#[derive(Accounts)] +pub struct SolTransfer<'info> { + #[account(mut)] + sender: Signer<'info>, + #[account(mut)] + recipient: SystemAccount<'info>, + system_program: Program<'info, System>, +} +``` + +The `cpi.test.ts` file shows how to invoke the Anchor program's `sol_transfer` +instruction and logs a link to the transaction details on SolanaFM. + +```ts filename="cpi.test.ts" +it("SOL Transfer Anchor", async () => { + const transactionSignature = await program.methods + .solTransfer(new BN(transferAmount)) + .accounts({ + sender: sender.publicKey, + recipient: recipient.publicKey, + }) + .rpc(); + + console.log( + `\nTransaction Signature:` + + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, + ); +}); +``` + +You can build, deploy, and run the test for this example on Playground to view +the transaction details on the [SolanaFM explorer](https://solana.fm/). + +The transaction details will show that the Anchor program was first invoked +(instruction 1), which then invokes the System Program (instruction 1.1), +resulting in a successful SOL transfer. + +![Transaction Details](/assets/docs/core/cpi/transaction-details.png) + +### Example 1 Explanation + +Implementing a CPI follows the same pattern as building an instruction to add to +a transaction. When implementing a CPI, we must specify the program ID, +accounts, and instruction data for the instruction being called. + +The System Program's transfer instruction requires two accounts: + +- `from`: The account sending SOL. +- `to`: The account receiving SOL. + +In the example program, the `SolTransfer` struct specifies the accounts required +by the transfer instruction. The System Program is also included because the CPI +invokes the System Program. + +```rust /sender/ /recipient/ /system_program/ +#[derive(Accounts)] +pub struct SolTransfer<'info> { + #[account(mut)] + sender: Signer<'info>, // from account + #[account(mut)] + recipient: SystemAccount<'info>, // to account + system_program: Program<'info, System>, // program ID +} +``` + +The following tabs present three approaches to implementing Cross Program +Invocations (CPIs), each at a different level of abstraction. All examples are +functionally equivalent. The main purpose is to illustrate the implementation +details of the CPI. + + + + + +The `sol_transfer` instruction included in the example code shows a typical +approach for constructing CPIs using the Anchor framework. + +This approach involves creating a +[`CpiContext`](https://docs.rs/anchor-lang/latest/anchor_lang/context/struct.CpiContext.html), +which includes the `program_id` and accounts required for the instruction being +called, followed by a helper function (`transfer`) to invoke a specific +instruction. + +```rust +use anchor_lang::system_program::{transfer, Transfer}; +``` + +```rust /cpi_context/ {14} +pub fn sol_transfer(ctx: Context, amount: u64) -> Result<()> { + let from_pubkey = ctx.accounts.sender.to_account_info(); + let to_pubkey = ctx.accounts.recipient.to_account_info(); + let program_id = ctx.accounts.system_program.to_account_info(); + + let cpi_context = CpiContext::new( + program_id, + Transfer { + from: from_pubkey, + to: to_pubkey, + }, + ); + + transfer(cpi_context, amount)?; + Ok(()) +} +``` + +The `cpi_context` variable specifies the program ID (System Program) and +accounts (sender and recipient) required by the transfer instruction. + +```rust /program_id/ /from_pubkey/ /to_pubkey/ +let cpi_context = CpiContext::new( + program_id, + Transfer { + from: from_pubkey, + to: to_pubkey, + }, +); +``` + +The `cpi_context` and `amount` are then passed into the `transfer` function to +execute the CPI invoking the transfer instruction of the System Program. + +```rust +transfer(cpi_context, amount)?; +``` + + + + +This example shows a different approach to implementing a CPI using the `invoke` +function and +[`system_instruction::transfer`](https://github.com/solana-labs/solana/blob/27eff8408b7223bb3c4ab70523f8a8dca3ca6645/sdk/program/src/system_instruction.rs#L881), +which is generally seen in native Rust programs. + +Under the hood, the previous example is an abstraction of this implementation. +The example below is functionally equivalent to the previous example. + +```rust +use anchor_lang::solana_program::{program::invoke, system_instruction}; +``` + +```rust /instruction/1,3 {9} +pub fn sol_transfer(ctx: Context, amount: u64) -> Result<()> { + let from_pubkey = ctx.accounts.sender.to_account_info(); + let to_pubkey = ctx.accounts.recipient.to_account_info(); + let program_id = ctx.accounts.system_program.to_account_info(); + + let instruction = + &system_instruction::transfer(&from_pubkey.key(), &to_pubkey.key(), amount); + + invoke(instruction, &[from_pubkey, to_pubkey, program_id])?; + Ok(()) +} +``` + + + + +You can also manually build the instruction to pass into the `invoke()` +function. This is useful when there is no crate available to help build the +instruction you want to invoke. This approach requires you to specify the +`AccountMeta`s for the instruction and correctly create the instruction data +buffer. + +The `sol_transfer` instruction below is a manual implementation of a CPI to the +System Program's transfer instruction. + +```rust /instruction/10,13 {28} +pub fn sol_transfer(ctx: Context, amount: u64) -> Result<()> { + let from_pubkey = ctx.accounts.sender.to_account_info(); + let to_pubkey = ctx.accounts.recipient.to_account_info(); + let program_id = ctx.accounts.system_program.to_account_info(); + + // Prepare instruction AccountMetas + let account_metas = vec![ + AccountMeta::new(from_pubkey.key(), true), + AccountMeta::new(to_pubkey.key(), false), + ]; + + // SOL transfer instruction discriminator + let instruction_discriminator: u32 = 2; + + // Prepare instruction data + let mut instruction_data = Vec::with_capacity(4 + 8); + instruction_data.extend_from_slice(&instruction_discriminator.to_le_bytes()); + instruction_data.extend_from_slice(&amount.to_le_bytes()); + + // Create instruction + let instruction = Instruction { + program_id: program_id.key(), + accounts: account_metas, + data: instruction_data, + }; + + // Invoke instruction + invoke(&instruction, &[from_pubkey, to_pubkey, program_id])?; + Ok(()) +} +``` + +The `sol_transfer` instruction above replicates this +[example](/docs/core/transactions.md#manual-sol-transfer) of manually building a +SOL transfer instruction. It follows the same pattern as building an +[instruction](/docs/core/transactions.md#instruction) to add to a transaction. + +When building an instruction in Rust, use the following syntax to specify the +`AccountMeta` for each account: + +```rust +AccountMeta::new(account1_pubkey, true), // writable, signer +AccountMeta::new(account2_pubkey, false), // writable, not signer +AccountMeta::new_readonly(account3_pubkey, false), // not writable, not signer +AccountMeta::new_readonly(account4_pubkey, true), // writable, signer +``` + + + + +Here is a reference program on +[Solana Playground](https://beta.solpg.io/github.com/ZYJLiu/doc-examples/tree/main/cpi) +which includes all 3 examples. + +## Cross Program Invocations with PDA Signers + +Next, let's examine a program that implements a CPI to the System Program's +transfer instruction where the sender is a Program Derived Address (PDA) that +must be "signed" for by the program. Here is the example program on +[Solana Playground](https://beta.solpg.io/66df2bd2cffcf4b13384d35b). + +The `lib.rs` file includes the following program with a single `sol_transfer` +instruction. + +```rust filename="lib.rs" +use anchor_lang::prelude::*; +use anchor_lang::system_program::{transfer, Transfer}; + +declare_id!("3455LkCS85a4aYmSeNbRrJsduNQfYRY82A7eCD3yQfyR"); + +#[program] +pub mod cpi { + use super::*; + + pub fn sol_transfer(ctx: Context, amount: u64) -> Result<()> { + let from_pubkey = ctx.accounts.pda_account.to_account_info(); + let to_pubkey = ctx.accounts.recipient.to_account_info(); + let program_id = ctx.accounts.system_program.to_account_info(); + + let seed = to_pubkey.key(); + let bump_seed = ctx.bumps.pda_account; + let signer_seeds: &[&[&[u8]]] = &[&[b"pda", seed.as_ref(), &[bump_seed]]]; + + let cpi_context = CpiContext::new( + program_id, + Transfer { + from: from_pubkey, + to: to_pubkey, + }, + ) + .with_signer(signer_seeds); + + transfer(cpi_context, amount)?; + Ok(()) + } +} + +#[derive(Accounts)] +pub struct SolTransfer<'info> { + #[account( + mut, + seeds = [b"pda", recipient.key().as_ref()], + bump, + )] + pda_account: SystemAccount<'info>, + #[account(mut)] + recipient: SystemAccount<'info>, + system_program: Program<'info, System>, +} +``` + +The `cpi.test.ts` file shows how to invoke the Anchor program's `sol_transfer` +instruction and logs a link to the transaction details on SolanaFM. + +It shows how to derive the PDA using the seeds specified in the program: + +```ts /pda/ /wallet.publicKey/ +const [PDA] = PublicKey.findProgramAddressSync( + [Buffer.from("pda"), wallet.publicKey.toBuffer()], + program.programId, +); +``` + +The first step in this example is to fund the PDA account with a basic SOL +transfer from the Playground wallet. + +```ts filename="cpi.test.ts" +it("Fund PDA with SOL", async () => { + const transferInstruction = SystemProgram.transfer({ + fromPubkey: wallet.publicKey, + toPubkey: PDA, + lamports: transferAmount, + }); + + const transaction = new Transaction().add(transferInstruction); + + const transactionSignature = await sendAndConfirmTransaction( + connection, + transaction, + [wallet.payer], // signer + ); + + console.log( + `\nTransaction Signature:` + + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, + ); +}); +``` + +Once the PDA is funded with SOL, invoke the `sol_transfer` instruction. This +instruction transfers SOL from the PDA account back to the `wallet` account via +a CPI to the System Program, which is "signed" for by the program. + +```ts +it("SOL Transfer with PDA signer", async () => { + const transactionSignature = await program.methods + .solTransfer(new BN(transferAmount)) + .accounts({ + pdaAccount: PDA, + recipient: wallet.publicKey, + }) + .rpc(); + + console.log( + `\nTransaction Signature: https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, + ); +}); +``` + +You can build, deploy, and run the test to view the transaction details on the +[SolanaFM explorer](https://solana.fm/). + +The transaction details will show that the custom program was first invoked +(instruction 1), which then invokes the System Program (instruction 1.1), +resulting in a successful SOL transfer. + +![Transaction Details](/assets/docs/core/cpi/transaction-details-pda.png) + +### Example 2 Explanation + +In the example code, the `SolTransfer` struct specifies the accounts required by +the transfer instruction. + +The sender is a PDA that the program must sign for. The `seeds` to derive the +address for the `pda_account` include the hardcoded string "pda" and the address +of the `recipient` account. This means the address for the `pda_account` is +unique for each `recipient`. + +```rust /pda_account/ /recipient/2 /system_program/ +#[derive(Accounts)] +pub struct SolTransfer<'info> { + #[account( + mut, + seeds = [b"pda", recipient.key().as_ref()], + bump, + )] + pda_account: SystemAccount<'info>, + #[account(mut)] + recipient: SystemAccount<'info>, + system_program: Program<'info, System>, +} +``` + +The Javascript equivalent to derive the PDA is included in the test file. + +```ts /pda/ /wallet.publicKey/ +const [PDA] = PublicKey.findProgramAddressSync( + [Buffer.from("pda"), wallet.publicKey.toBuffer()], + program.programId, +); +``` + +The following tabs present two approaches to implementing Cross Program +Invocations (CPIs), each at a different level of abstraction. Both examples are +functionally equivalent. The main purpose is to illustrate the implementation +details of the CPI. + + + + + +The `sol_transfer` instruction included in the example code shows a typical +approach for constructing CPIs using the Anchor framework. + +This approach involves creating a +[`CpiContext`](https://docs.rs/anchor-lang/latest/anchor_lang/context/struct.CpiContext.html), +which includes the `program_id` and accounts required for the instruction being +called, followed by a helper function (`transfer`) to invoke a specific +instruction. + +```rust /cpi_context/ {19} +pub fn sol_transfer(ctx: Context, amount: u64) -> Result<()> { + let from_pubkey = ctx.accounts.pda_account.to_account_info(); + let to_pubkey = ctx.accounts.recipient.to_account_info(); + let program_id = ctx.accounts.system_program.to_account_info(); + + let seed = to_pubkey.key(); + let bump_seed = ctx.bumps.pda_account; + let signer_seeds: &[&[&[u8]]] = &[&[b"pda", seed.as_ref(), &[bump_seed]]]; + + let cpi_context = CpiContext::new( + program_id, + Transfer { + from: from_pubkey, + to: to_pubkey, + }, + ) + .with_signer(signer_seeds); + + transfer(cpi_context, amount)?; + Ok(()) +} +``` + +When signing with PDAs, the seeds and bump seed are included in the +`cpi_context` as `signer_seeds` using `with_signer()`. The bump seed for a PDA +can be accessed using `ctx.bumps` followed by the name of the PDA account. + +```rust /signer_seeds/ /bump_seed/ {3} +let seed = to_pubkey.key(); +let bump_seed = ctx.bumps.pda_account; +let signer_seeds: &[&[&[u8]]] = &[&[b"pda", seed.as_ref(), &[bump_seed]]]; + +let cpi_context = CpiContext::new( + program_id, + Transfer { + from: from_pubkey, + to: to_pubkey, + }, +) +.with_signer(signer_seeds); +``` + +The `cpi_context` and `amount` are then passed into the `transfer` function to +execute the CPI. + +```rust +transfer(cpi_context, amount)?; +``` + +When the CPI is processed, the Solana runtime will validate that the provided +seeds and caller program ID derive a valid PDA. The PDA is then added as a +signer on the invocation. This mechanism allows for programs to sign for PDAs +that are derived from their program ID. + + + + +Under the hood, the previous example is a wrapper around the `invoke_signed()` +function which uses +[`system_instruction::transfer`](https://github.com/solana-labs/solana/blob/27eff8408b7223bb3c4ab70523f8a8dca3ca6645/sdk/program/src/system_instruction.rs#L881) +to build the instruction. + +This example shows how to use the `invoke_signed()` function to make a CPI +signed for by a PDA. + +```rust +use anchor_lang::solana_program::{program::invoke_signed, system_instruction}; +``` + +```rust /instruction/1,3 {13} +pub fn sol_transfer(ctx: Context, amount: u64) -> Result<()> { + let from_pubkey = ctx.accounts.pda_account.to_account_info(); + let to_pubkey = ctx.accounts.recipient.to_account_info(); + let program_id = ctx.accounts.system_program.to_account_info(); + + let seed = to_pubkey.key(); + let bump_seed = ctx.bumps.pda_account; + let signer_seeds: &[&[&[u8]]] = &[&[b"pda", seed.as_ref(), &[bump_seed]]]; + + let instruction = + &system_instruction::transfer(&from_pubkey.key(), &to_pubkey.key(), amount); + + invoke_signed(instruction, &[from_pubkey, to_pubkey, program_id], signer_seeds)?; + Ok(()) +} +``` + +This implementation is functionally equivalent to the previous example. The +`signer_seeds` are passed into the `invoke_signed` function. + + + + +Here is a reference program on +[Solana Playground](https://beta.solpg.io/github.com/ZYJLiu/doc-examples/tree/main/cpi-pda) +which includes both examples. diff --git a/docs/programs/anchor/idl.md b/docs/programs/anchor/idl.md new file mode 100644 index 000000000..79e42c8f7 --- /dev/null +++ b/docs/programs/anchor/idl.md @@ -0,0 +1,516 @@ +--- +title: IDL File +description: + Learn about the Interface Definition Language (IDL) file in Anchor, its + purpose, benefits, and how it simplifies program-client interactions +sidebarLabel: IDL File +sidebarSortOrder: 2 +--- + +An Interface Definition Language (IDL) file provides a standardized JSON file +describing the program's instructions and accounts. This file simplifies the +process of integrating your on-chain program with client applications. + +Key Benefits of the IDL: + +- Standardization: Provides a consistent format for describing the program's + instructions and accounts +- Client Generation: Used to generate client code to interact with the program + +The `anchor build` command generates an IDL file located at +`/target/idl/.json`. + +The code snippets below highlights how the program, IDL, and client relate to +each other. + +## Program Instructions + +The `instructions` array in the IDL corresponds directly to the instructions +defined in your program. It specifies the required accounts and parameters for +each instruction. + + + + + +The program below includes an `initialize` instruction, specifying the accounts +and parameters it requires. + +```rust {8-12, 15-22} +use anchor_lang::prelude::*; + +declare_id!("BYFW1vhC1ohxwRbYoLbAWs86STa25i9sD5uEusVjTYNd"); + +#[program] +mod hello_anchor { + use super::*; + pub fn initialize(ctx: Context, data: u64) -> Result<()> { + ctx.accounts.new_account.data = data; + msg!("Changed data to: {}!", data); + Ok(()) + } +} + +#[derive(Accounts)] +pub struct Initialize<'info> { + #[account(init, payer = signer, space = 8 + 8)] + pub new_account: Account<'info, NewAccount>, + #[account(mut)] + pub signer: Signer<'info>, + pub system_program: Program<'info, System>, +} + +#[account] +pub struct NewAccount { + data: u64, +} +``` + + + + +The generated IDL file includes the instruction in a standardized JSON format, +including its name, accounts, arguments, and discriminator. + +```json filename="JSON" {11-12, 14-27, 30-33} +{ + "address": "BYFW1vhC1ohxwRbYoLbAWs86STa25i9sD5uEusVjTYNd", + "metadata": { + "name": "hello_anchor", + "version": "0.1.0", + "spec": "0.1.0", + "description": "Created with Anchor" + }, + "instructions": [ + { + "name": "initialize", + "discriminator": [175, 175, 109, 31, 13, 152, 155, 237], + "accounts": [ + { + "name": "new_account", + "writable": true, + "signer": true + }, + { + "name": "signer", + "writable": true, + "signer": true + }, + { + "name": "system_program", + "address": "11111111111111111111111111111111" + } + ], + "args": [ + { + "name": "data", + "type": "u64" + } + ] + } + ], + "accounts": [ + { + "name": "NewAccount", + "discriminator": [176, 95, 4, 118, 91, 177, 125, 232] + } + ], + "types": [ + { + "name": "NewAccount", + "type": { + "kind": "struct", + "fields": [ + { + "name": "data", + "type": "u64" + } + ] + } + } + ] +} +``` + + + + +The IDL file is then used to generate a client for interacting with the program, +simplifying the process of invoking the program instruction. + +```ts {19-26} +import * as anchor from "@coral-xyz/anchor"; +import { Program, BN } from "@coral-xyz/anchor"; +import { HelloAnchor } from "../target/types/hello_anchor"; +import { Keypair } from "@solana/web3.js"; +import assert from "assert"; + +describe("hello_anchor", () => { + const provider = anchor.AnchorProvider.env(); + anchor.setProvider(provider); + const wallet = provider.wallet as anchor.Wallet; + const program = anchor.workspace.HelloAnchor as Program; + + it("initialize", async () => { + // Generate keypair for the new account + const newAccountKp = new Keypair(); + + // Send transaction + const data = new BN(42); + const transactionSignature = await program.methods + .initialize(data) + .accounts({ + newAccount: newAccountKp.publicKey, + signer: wallet.publicKey, + }) + .signers([newAccountKp]) + .rpc(); + + // Fetch the created account + const newAccount = await program.account.newAccount.fetch( + newAccountKp.publicKey, + ); + + console.log("Transaction signature: ", transactionSignature); + console.log("On-chain data is:", newAccount.data.toString()); + assert(data.eq(newAccount.data)); + }); +}); +``` + + + + +## Program Accounts + +The `accounts` array in the IDL corresponds to the structs in a program +annotated with the `#[account]` macro. These structs define the data stored in +accounts created by the program. + + + + + +The program below defines a `NewAccount` struct with a single `data` field of +type `u64`. + +```rust {24-27} +use anchor_lang::prelude::*; + +declare_id!("BYFW1vhC1ohxwRbYoLbAWs86STa25i9sD5uEusVjTYNd"); + +#[program] +mod hello_anchor { + use super::*; + pub fn initialize(ctx: Context, data: u64) -> Result<()> { + ctx.accounts.new_account.data = data; + msg!("Changed data to: {}!", data); + Ok(()) + } +} + +#[derive(Accounts)] +pub struct Initialize<'info> { + #[account(init, payer = signer, space = 8 + 8)] + pub new_account: Account<'info, NewAccount>, + #[account(mut)] + pub signer: Signer<'info>, + pub system_program: Program<'info, System>, +} + +#[account] +pub struct NewAccount { + data: u64, +} +``` + + + + +The generated IDL file includes the account in a standardized JSON format, +including its name, discriminator, and fields. + +```json filename="JSON" {39-40, 45-54} +{ + "address": "BYFW1vhC1ohxwRbYoLbAWs86STa25i9sD5uEusVjTYNd", + "metadata": { + "name": "hello_anchor", + "version": "0.1.0", + "spec": "0.1.0", + "description": "Created with Anchor" + }, + "instructions": [ + { + "name": "initialize", + "discriminator": [175, 175, 109, 31, 13, 152, 155, 237], + "accounts": [ + { + "name": "new_account", + "writable": true, + "signer": true + }, + { + "name": "signer", + "writable": true, + "signer": true + }, + { + "name": "system_program", + "address": "11111111111111111111111111111111" + } + ], + "args": [ + { + "name": "data", + "type": "u64" + } + ] + } + ], + "accounts": [ + { + "name": "NewAccount", + "discriminator": [176, 95, 4, 118, 91, 177, 125, 232] + } + ], + "types": [ + { + "name": "NewAccount", + "type": { + "kind": "struct", + "fields": [ + { + "name": "data", + "type": "u64" + } + ] + } + } + ] +} +``` + + + + +The IDL file is then used to generate a client for interacting with the program, +simplifying the process of fetching and deserializing account data. + +```ts {29-31} +import * as anchor from "@coral-xyz/anchor"; +import { Program, BN } from "@coral-xyz/anchor"; +import { HelloAnchor } from "../target/types/hello_anchor"; +import { Keypair } from "@solana/web3.js"; +import assert from "assert"; + +describe("hello_anchor", () => { + const provider = anchor.AnchorProvider.env(); + anchor.setProvider(provider); + const wallet = provider.wallet as anchor.Wallet; + const program = anchor.workspace.HelloAnchor as Program; + + it("initialize", async () => { + // Generate keypair for the new account + const newAccountKp = new Keypair(); + + // Send transaction + const data = new BN(42); + const transactionSignature = await program.methods + .initialize(data) + .accounts({ + newAccount: newAccountKp.publicKey, + signer: wallet.publicKey, + }) + .signers([newAccountKp]) + .rpc(); + + // Fetch the created account + const newAccount = await program.account.newAccount.fetch( + newAccountKp.publicKey, + ); + + console.log("Transaction signature: ", transactionSignature); + console.log("On-chain data is:", newAccount.data.toString()); + assert(data.eq(newAccount.data)); + }); +}); +``` + + + + +## Discriminators + +Anchor assigns a unique 8 byte discriminator to each instruction and account +type in a program. These discriminators serve as identifiers to distinguish +between different instructions or account types. + +The discriminator is generated using the first 8 bytes of the Sha256 hash of a +prefix combined with the instruction or account name. As of Anchor v0.30, these +discriminators are included in the IDL file. + +Note that when working with Anchor, you typically won't need to interact +directly with these discriminators. This section is primarily to provide context +on how the discriminator is generated and used. + + + + + +The instruction discriminator is used by the program to determine which specific +instruction to execute when called. + +When an Anchor program instruction is invoked, the discriminator is included as +the first 8 bytes of the instruction data. This is done automatically by the +Anchor client. + +```json filename="IDL" {4} + "instructions": [ + { + "name": "initialize", + "discriminator": [175, 175, 109, 31, 13, 152, 155, 237], + ... + } + ] +``` + +The discriminator for an instruction is the first 8 bytes of the Sha256 hash of +the prefix `global` plus the instruction name. + +For example: + +``` +sha256("global:initialize") +``` + +Hexadecimal output: + +``` +af af 6d 1f 0d 98 9b ed d4 6a 95 07 32 81 ad c2 1b b5 e0 e1 d7 73 b2 fb bd 7a b5 04 cd d4 aa 30 +``` + +The first 8 bytes are used as the discriminator for the instruction. + +``` +af = 175 +af = 175 +6d = 109 +1f = 31 +0d = 13 +98 = 152 +9b = 155 +ed = 237 +``` + +You can find the implementation of the discriminator generation in the Anchor +codebase +[here](https://github.com/coral-xyz/anchor/blob/v0.30.1/lang/syn/src/codegen/program/common.rs#L5-L19), +which is used +[here](https://github.com/coral-xyz/anchor/blob/v0.30.1/lang/syn/src/codegen/program/instruction.rs#L27). + + + + +The account discriminator is used to identify the specific account type when +deserializing on-chain data and is set when the account is created. + +```json filename="IDL" {4} + "accounts": [ + { + "name": "NewAccount", + "discriminator": [176, 95, 4, 118, 91, 177, 125, 232] + } + ] +``` + +The discriminator for an account is the first 8 bytes of the Sha256 hash of the +prefix `account` plus the account name. + +For example: + +``` +sha256("account:NewAccount") +``` + +Hexadecimal output: + +``` +b0 5f 04 76 5b b1 7d e8 a1 93 57 2a d3 5e b1 ae e5 f0 69 e2 09 7e 5c d2 64 56 55 2a cb 4a e9 57 +``` + +The first 8 bytes are used as the discriminator for the account. + +``` +b0 = 176 +5f = 95 +04 = 4 +76 = 118 +5b = 91 +b1 = 177 +7d = 125 +e8 = 232 +``` + +You can find the implementation of the discriminator generation in the Anchor +codebase +[here](https://github.com/coral-xyz/anchor/blob/v0.30.1/lang/attribute/account/src/lib.rs#L101-L117). + +Note that different programs using identical account names will generate the +same discriminator. When deserializing account data, Anchor programs will also +check an account is owned by the expected program for a specified account type. + + + + +The event discriminator is used to identify the specific event type when +deserializing on-chain data on event emission. + +```json filename="IDL" {4} + "events": [ + { + "name": "NewEvent", + "discriminator": [113, 21, 185, 70, 164, 99, 232, 201] + } + ] +``` + +The discriminator for an event is the first 8 bytes of the Sha256 hash of the +prefix `event` plus the event name. + +For example: + +``` +sha256("event:NewEvent") +``` + +Hexadecimal output: + +``` +71 15 b9 46 a4 63 e8 c9 2a 3c 4d 83 87 16 cd 9b 66 28 cb e2 cb 7c 5d 70 59 f3 42 2b dc 35 03 53 +``` + +The first 8 bytes are used as the discriminator for the account. + +Hex to decimal gives us: + +``` +71 = 113 +15 = 21 +b9 = 185 +46 = 70 +a4 = 164 +63 = 99 +e8 = 232 +c9 = 201 +``` + +You can find the implementation of the discriminator generation in the Anchor +codebase +[here](https://github.com/coral-xyz/anchor/blob/v0.30.1/lang/attribute/event/src/lib.rs#L23-L27). + +Note that different programs using identical event names will generate the same +discriminator. When deserializing event data, Anchor programs will also check an +event is owned by the expected program for a specified event type. + + + diff --git a/docs/programs/anchor/index.md b/docs/programs/anchor/index.md new file mode 100644 index 000000000..2f66ecd0e --- /dev/null +++ b/docs/programs/anchor/index.md @@ -0,0 +1,384 @@ +--- +title: Getting Started with Anchor +description: + Learn how to build Solana programs using the Anchor framework. This + comprehensive guide covers creating, building, testing, and deploying Solana + smart contracts with Anchor. +sidebarLabel: Anchor Framework +sidebarSortOrder: 0 +altRoutes: + - /docs/programs/debugging + - /docs/programs/lang-c + - /docs/programs/overview +--- + +The Anchor framework is a tool that simplifies the process of building Solana +programs. Whether you're new to blockchain development or an experienced +programmer, Anchor simplifies the process of writing, testing, and deploying +Solana programs. + +In this section, we'll walk through: + +- Creating a new Anchor project +- Building and testing your program +- Deploying to Solana clusters +- Understanding the project file structure + +## Prerequisites + +For detailed installation instructions, visit the +[installation](/docs/intro/installation) page. + +Before you begin, ensure you have the following installed: + +- Rust: The programming language for building Solana programs. +- Solana CLI: Command-line tool for Solana development. +- Anchor CLI: Command-line tool for the Anchor framework. + +To verify Anchor CLI installation, open your terminal and run: + +```shell filename="Terminal" +anchor --version +``` + +Expected output: + +```shell filename="Terminal" +anchor-cli 0.30.1 +``` + +## Getting Started + +This section covers the basic steps to create, build, and test your first local +Anchor program. + + + +### Create a new Project + +To start a new project, use the `anchor init` command followed by your project's +name. This command creates a new directory with the specified name and sets up a +default program and test file. + +```shell filename="Terminal" +anchor init my-program +``` + +Navigate to the new project directory and open it in your code editor. + +```shell filename="Terminal" copy +cd my-project +``` + +The default Anchor program is located at `/programs/my-project/src/lib.rs`. + + + + +The value in the `declare_id!` macro is the program ID, a unique identifier for +your program. + +By default, it is the public key of the keypair generated in +`/target/deploy/my_project-keypair.json`. + +```rs filename="lib.rs" +use anchor_lang::prelude::*; + +declare_id!("3ynNB373Q3VAzKp7m4x238po36hjAGFXFJB4ybN2iTyg"); + +#[program] +pub mod my_project { + use super::*; + + pub fn initialize(ctx: Context) -> Result<()> { + msg!("Greetings from: {:?}", ctx.program_id); + Ok(()) + } +} + +#[derive(Accounts)] +pub struct Initialize {} +``` + + + + +The default Typescript test file is located at `/tests/my-project.ts`. + + + + +This file demonstrates how to invoke the default program's `initialize` +instruction in Typescript. + +```ts filename="my-project.ts" +import * as anchor from "@coral-xyz/anchor"; +import { Program } from "@coral-xyz/anchor"; +import { MyProject } from "../target/types/my_project"; + +describe("my-project", () => { + // Configure the client to use the local cluster. + anchor.setProvider(anchor.AnchorProvider.env()); + + const program = anchor.workspace.MyProject as Program; + + it("Is initialized!", async () => { + // Add your test here. + const tx = await program.methods.initialize().rpc(); + console.log("Your transaction signature", tx); + }); +}); +``` + + + + +If you prefer Rust for testing, initialize your project with the +`--test-template rust` flag. + +```shell +anchor init --test-template rust my-program +``` + +The Rust test file will be at `/tests/src/test_initialize.rs`. + + + + +```rust filename="test_initialize.rs" +use std::str::FromStr; + +use anchor_client::{ + solana_sdk::{ + commitment_config::CommitmentConfig, pubkey::Pubkey, signature::read_keypair_file, + }, + Client, Cluster, +}; + +#[test] +fn test_initialize() { + let program_id = "3ynNB373Q3VAzKp7m4x238po36hjAGFXFJB4ybN2iTyg"; + let anchor_wallet = std::env::var("ANCHOR_WALLET").unwrap(); + let payer = read_keypair_file(&anchor_wallet).unwrap(); + + let client = Client::new_with_options(Cluster::Localnet, &payer, CommitmentConfig::confirmed()); + let program_id = Pubkey::from_str(program_id).unwrap(); + let program = client.program(program_id).unwrap(); + + let tx = program + .request() + .accounts(my_program::accounts::Initialize {}) + .args(my_program::instruction::Initialize {}) + .send() + .expect(""); + + println!("Your transaction signature {}", tx); +} +``` + + + + +### Build the Program + +Build the program by running `anchor build`. + +```shell filename="Terminal" copy +anchor build +``` + +The compiled program will be at `/target/deploy/my_project.so`. The content of +this file is what gets stored on the Solana network (as an executable account) +when you deploy your program. + +### Test the Program + +To test the program, run `anchor test`. + +```shell filename="Terminal" copy +anchor test +``` + +By default, the `Anchor.toml` config file specifies the `localnet` cluster. When +developing on `localnet`, `anchor test` will automatically: + +1. Start a local Solana validator +2. Build and deploy your program to the local cluster +3. Run the tests in the `tests` folder +4. Stop the local Solana validator + +Alternatively, you can manually start a local Solana validator and run tests +against it. This is useful if you want to keep the validator running while you +iterate on your program. It allows you to inspect accounts and transaction logs +on the [Solana Explorer](https://explorer.solana.com/?cluster=custom) while +developing locally. + +Open a new terminal and start a local Solana validator by running the +`solana-test-validator` command. + +```shell filename="Terminal" copy +solana-test-validator +``` + +In a separate terminal, run the tests against the local cluster. Use the +`--skip-local-validator` flag to skip starting the local validator since it's +already running. + +```shell filename="Terminal" copy +anchor test --skip-local-validator +``` + +### Deploy to Devnet + +By default, the `Anchor.toml` config file in an Anchor project specifies the +localnet cluster. + +```toml filename="Anchor.toml" {14} +[toolchain] + +[features] +resolution = true +skip-lint = false + +[programs.localnet] +my_program = "3ynNB373Q3VAzKp7m4x238po36hjAGFXFJB4ybN2iTyg" + +[registry] +url = "https://api.apr.dev" + +[provider] +cluster = "Localnet" +wallet = "~/.config/solana/id.json" + +[scripts] +test = "yarn run ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" +``` + +To deploy your program to devnet, change the `cluster` value to `Devnet`. Note +that this requires your wallet to have enough SOL on Devnet to cover deployment +cost. + +```diff +-cluster = "Localnet" ++cluster = "Devnet" +``` + +```toml filename="Anchor.toml" +[provider] +cluster = "Devnet" +wallet = "~/.config/solana/id.json" +``` + +Now when you run `anchor deploy`, your program will be deployed to the devnet +cluster. The `anchor test` command will also use the cluster specified in the +`Anchor.toml` file. + +```shell +anchor deploy +``` + +To deploy to mainnet, simply update the `Anchor.toml` file to specify the +mainnet cluster. + +```toml filename="Anchor.toml" +[provider] +cluster = "Mainnet" +wallet = "~/.config/solana/id.json" +``` + +### Update the Program + +Solana programs can be updated by redeploying the program to the same program +ID. + +To update a program, simply make changes to your program's code and run the +`anchor build` command to generated an updated `.so` file. + +```shell +anchor build +``` + +Then run the `anchor deploy` command to redeploy the updated program. + +```shell +anchor deploy +``` + +### Close the Program + +To reclaim the SOL allocated to a program account, you can close your Solana +program. + +To close a program, use the `solana program close ` command. For +example: + +```shell +solana program close 3ynNB373Q3VAzKp7m4x238po36hjAGFXFJB4ybN2iTyg --bypass-warning +``` + +Note that once a program is closed, the program ID cannot be reused to deploy a +new program. + + + +## Project File Structure + +Below is an overview of default file structure in an Anchor workspace: + +``` +. +├── .anchor +│ └── program-logs +├── app +├── migrations +├── programs +│ └── [project-name] +│ └── src +│ ├── lib.rs +│ ├── Cargo.toml +│ └── Xargo.toml +├── target +│ ├── deploy +│ │ └── [project-name]-keypair.json +│ ├── idl +│ │ └── [project-name].json +│ └── types +│ └── [project-name].ts +├── tests +│ └── [project-name].ts +├── Anchor.toml +├── Cargo.toml +└── package.json +``` + +### Programs Folder + +The `/programs` folder contains your project's Anchor programs. A single +workspace can contain multiple programs. + +### Tests Folder + +The `/tests` folder contains test files for your project. A default test file is +created for you when you create your project. + +### Target Folder + +The `/target` folder contains build outputs. The main subfolders include: + +- `/deploy`: Contains the keypair and program binary for your programs. +- `/idl`: Contains the JSON IDL for your programs. +- `/types`: Contains the TypeScript type for the IDL. + +### Anchor.toml File + +The `Anchor.toml` file configures workspace settings for your project. + +### .anchor Folder + +Includes a `program-logs` file that contains transaction logs from the last run +of test files. + +### App Folder + +The `/app` folder is an empty folder that can be optionally used for your +frontend code. diff --git a/docs/programs/anchor/pda.md b/docs/programs/anchor/pda.md new file mode 100644 index 000000000..bec281733 --- /dev/null +++ b/docs/programs/anchor/pda.md @@ -0,0 +1,325 @@ +--- +title: PDAs with Anchor +description: + Learn how to use Program Derived Addresses (PDAs) in Anchor programs, using + constraints, and implementing common PDA patterns +sidebarLabel: PDAs with Anchor +sidebarSortOrder: 4 +--- + +[Program Derived Addresses (PDA)](/docs/core/pda) refer to a feature of Solana +development that allows you to create a unique address derived deterministically +from pre-defined inputs (seeds) and a program ID. + +This section will cover basic examples of how to use PDAs in an Anchor program. + +## Anchor PDA Constraints + +When using PDAs in an Anchor program, you generally use Anchor's account +constraints to define the seeds used to derive the PDA. These constraints serve +as security checks to ensure that the correct address is derived. + +The constraints used to define the PDA seeds include: + +- `seeds`: An array of optional seeds used to derive the PDA. Seeds can be + static values or dynamic references to account data. +- `bump`: The bump seed used to derive the PDA. Used to ensure the address falls + off the Ed25519 curve and is a valid PDA. +- `seeds::program` - (Optional) The program ID used to derive the PDA address. + This constraint is only used to derive a PDA where the program ID is not the + current program. + +The `seeds` and `bump` constraints are required to be used together. + +### Usage Examples + +Below are examples demonstrating how to use PDA constraints in an Anchor +program. + + + + + +The `seeds` constraint specifies the optional values used to derive the PDA. + +#### No Optional Seeds + +- Use an empty array `[]` to define a PDA without optional seeds. + +```rs +#[derive(Accounts)] +pub struct InstructionAccounts<'info> { + #[account( + seeds = [], + bump, + )] + pub pda_account: SystemAccount<'info>, +} +``` + +#### Single Static Seed + +- Specify optional seeds in the `seeds` constraint. + +```rs +#[derive(Accounts)] +pub struct InstructionAccounts<'info> { + #[account( + seeds = [b"hello_world"], + bump, + )] + pub pda_account: SystemAccount<'info>, +} +``` + +#### Multiple Seeds and Account References + +- Multiple seeds can be specified in the `seeds` constraint. The `seeds` + constraint can also reference other account addresses or account data. + +```rs +#[derive(Accounts)] +pub struct InstructionAccounts<'info> { + pub signer: Signer<'info>, + #[account( + seeds = [b"hello_world", signer.key().as_ref()], + bump, + )] + pub pda_account: SystemAccount<'info>, +} +``` + +The example above uses both a static seed (`b"hello_world"`) and a dynamic seed +(the signer's public key). + + + + +The `bump` constraint specifies the bump seed used to derive the PDA. + +#### Automatic Bump Calculation + +When using the `bump` constraint without a value, the bump is automatically +calculated each time the instruction is invoked. + +```rs +#[derive(Accounts)] +pub struct InstructionAccounts<'info> { + #[account( + seeds = [b"hello_world"], + bump, + )] + pub pda_account: SystemAccount<'info>, +} +``` + +#### Specify Bump Value + +You can explicitly provide the bump value, which is useful for optimizing +compute unit usage. This assumes that the PDA account has been created and the +bump seed is stored as a field on an existing account. + +```rs +#[derive(Accounts)] +pub struct InstructionAccounts<'info> { + #[account( + seeds = [b"hello_world"], + bump = pda_account.bump_seed, + )] + pub pda_account: Account<'info, CustomAccount>, +} + +#[account] +pub struct CustomAccount { + pub bump_seed: u8, +} +``` + +By storing the bump value in the account's data, the program doesn't need to +recalculate it, saving compute units. The saved bump value can be stored on the +account itself or another account. + + + + +The `seeds::program` constraint specifies the program ID used to derive the PDA. +This constraint is only used when deriving a PDA from a different program. + +Use this constraint when your instruction needs to interact with PDA accounts +created by another program. + +```rs +#[derive(Accounts)] +pub struct InstructionAccounts<'info> { + #[account( + seeds = [b"hello_world"], + bump, + seeds::program = other_program.key(), + )] + pub pda_account: SystemAccount<'info>, + pub other_program: Program<'info, OtherProgram>, +} +``` + + + + +The `init` constraint is commonly used with `seeds` and `bump` to create a new +account with an address that is a PDA. Under the hood, the `init` constraint +invokes the System Program to create the account. + +```rs +#[derive(Accounts)] +pub struct InstructionAccounts<'info> { + #[account(mut)] + pub signer: Signer<'info>, + #[account( + init, + seeds = [b"hello_world", signer.key().as_ref()], + bump, + payer = signer, + space = 8 + 1, + )] + pub pda_account: Account<'info, CustomAccount>, + pub system_program: Program<'info, System>, +} + +#[account] +pub struct CustomAccount { + pub bump_seed: u8, +} +``` + + + + +## PDA seeds in the IDL + +Program Derived Address (PDA) seeds defined in the `seeds` constraint are +included in the program's IDL file. This allows the Anchor client to +automatically resolve accounts using these seeds when constructing instructions. + +This example below shows the relationship between the program, IDL, and client. + + + + + +The program below defines a `pda_account` using a static seed (`b"hello_world"`) +and the signer's public key as a dynamic seed. + +```rs {18} /signer/ +use anchor_lang::prelude::*; + +declare_id!("BZLiJ62bzRryYp9mRobz47uA66WDgtfTXhhgM25tJyx5"); + +#[program] +mod hello_anchor { + use super::*; + pub fn test_instruction(ctx: Context) -> Result<()> { + msg!("PDA: {}", ctx.accounts.pda_account.key()); + Ok(()) + } +} + +#[derive(Accounts)] +pub struct InstructionAccounts<'info> { + pub signer: Signer<'info>, + #[account( + seeds = [b"hello_world", signer.key().as_ref()], + bump, + )] + pub pda_account: SystemAccount<'info>, +} +``` + + + + +The program's IDL file includes the PDA seeds defined in the `seeds` constraint. + +- The static seed `b"hello_world"` is converted to byte values. +- The dynamic seed is included as reference to the signer account. + +```json {22-29} +{ + "address": "BZLiJ62bzRryYp9mRobz47uA66WDgtfTXhhgM25tJyx5", + "metadata": { + "name": "hello_anchor", + "version": "0.1.0", + "spec": "0.1.0", + "description": "Created with Anchor" + }, + "instructions": [ + { + "name": "test_instruction", + "discriminator": [33, 223, 61, 208, 32, 193, 201, 79], + "accounts": [ + { + "name": "signer", + "signer": true + }, + { + "name": "pda_account", + "pda": { + "seeds": [ + { + "kind": "const", + "value": [104, 101, 108, 108, 111, 95, 119, 111, 114, 108, 100] + }, + { + "kind": "account", + "path": "signer" + } + ] + } + } + ], + "args": [] + } + ] +} +``` + + + + +The Anchor client can automatically resolve the PDA address using the IDL file. + +In the example below, Anchor automatically resolves the PDA address using the +provider wallet as the signer, and its public key as the dynamic seed for PDA +derivation. This removes the need to explicitly derive the PDA when building the +instruction. + +```ts {13} +import * as anchor from "@coral-xyz/anchor"; +import { Program } from "@coral-xyz/anchor"; +import { HelloAnchor } from "../target/types/hello_anchor"; + +describe("hello_anchor", () => { + // Configure the client to use the local cluster. + anchor.setProvider(anchor.AnchorProvider.env()); + + const program = anchor.workspace.HelloAnchor as Program; + + it("Is initialized!", async () => { + // Add your test here. + const tx = await program.methods.testInstruction().rpc(); + console.log("Your transaction signature", tx); + }); +}); +``` + +When the instruction is invoked, the PDA is printed to program logs as defined +in the program instruction. + +```{3} +Program BZLiJ62bzRryYp9mRobz47uA66WDgtfTXhhgM25tJyx5 invoke [1] +Program log: Instruction: TestInstruction +Program log: PDA: 3Hikt5mpKaSS4UNA5Du1TZJ8tp4o8VC8YWW6X9vtfVnJ +Program BZLiJ62bzRryYp9mRobz47uA66WDgtfTXhhgM25tJyx5 consumed 18505 of 200000 compute units +Program BZLiJ62bzRryYp9mRobz47uA66WDgtfTXhhgM25tJyx5 success +``` + + + diff --git a/docs/programs/anchor/program-structure.md b/docs/programs/anchor/program-structure.md new file mode 100644 index 000000000..a8b0f2475 --- /dev/null +++ b/docs/programs/anchor/program-structure.md @@ -0,0 +1,399 @@ +--- +title: Anchor Program Structure +description: + Learn about the structure of Anchor programs, including key macros and their + roles in simplifying Solana program development +sidebarLabel: Program Structure +sidebarSortOrder: 1 +--- + +The [Anchor framework](https://www.anchor-lang.com/) uses +[Rust macros](https://doc.rust-lang.org/book/ch19-06-macros.html) to reduce +boilerplate code and simplify the implementation of common security checks +required for writing Solana programs. + +The main macros found in an Anchor program include: + +- [`declare_id`](#declare-id-macro): Specifies the program's on-chain address +- [`#[program]`](#program-macro): Specifies the module containing the program’s + instruction logic +- [`#[derive(Accounts)]`](#derive-accounts-macro): Applied to structs to + indicate a list of accounts required by an instruction +- [`#[account]`](#account-macro): Applied to structs to create custom account + types for the program + +## Example Program + +Let's examine a simple program that demonstrates the usage of the macros +mentioned above to understand the basic structure of an Anchor program. + +The example program below creates a new account (`NewAccount`) that stores a +`u64` value passed to the `initialize` instruction. + +```rust filename="lib.rs" +use anchor_lang::prelude::*; + +declare_id!("11111111111111111111111111111111"); + +#[program] +mod hello_anchor { + use super::*; + pub fn initialize(ctx: Context, data: u64) -> Result<()> { + ctx.accounts.new_account.data = data; + msg!("Changed data to: {}!", data); + Ok(()) + } +} + +#[derive(Accounts)] +pub struct Initialize<'info> { + #[account(init, payer = signer, space = 8 + 8)] + pub new_account: Account<'info, NewAccount>, + #[account(mut)] + pub signer: Signer<'info>, + pub system_program: Program<'info, System>, +} + +#[account] +pub struct NewAccount { + data: u64, +} +``` + +## declare_id! macro + +The +[`declare_id`](https://github.com/coral-xyz/anchor/blob/v0.30.1/lang/attribute/account/src/lib.rs#L430) +macro specifies the on-chain address of the program, known as the program ID. + +```rust filename="lib.rs" {3} +use anchor_lang::prelude::*; + +declare_id!("11111111111111111111111111111111"); +``` + +By default, the program ID is the public key of the keypair generated at +`/target/deploy/your_program_name.json`. + +To update the value of the program ID in the `declare_id` macro with the public +key of the keypair in the `/target/deploy/your_program_name.json` file, run the +following command: + +```shell filename="Terminal" +anchor keys sync +``` + +The `anchor keys sync` command is useful to run when cloning a repository where +the value of the program ID in a cloned repo's `declare_id` macro won't match +the one generated when you run `anchor build` locally. + +## #[program] macro + +The +[`#[program]`](https://github.com/coral-xyz/anchor/blob/v0.30.1/lang/attribute/program/src/lib.rs#L12) +macro defines the module that contains all the instruction handlers for your +program. Each public function within this module corresponds to an instruction +that can be invoked. + +```rust filename="lib.rs" {5, 8-12} +use anchor_lang::prelude::*; + +declare_id!("11111111111111111111111111111111"); + +#[program] +mod hello_anchor { + use super::*; + pub fn initialize(ctx: Context, data: u64) -> Result<()> { + ctx.accounts.new_account.data = data; + msg!("Changed data to: {}!", data); + Ok(()) + } +} + +#[derive(Accounts)] +pub struct Initialize<'info> { + #[account(init, payer = signer, space = 8 + 8)] + pub new_account: Account<'info, NewAccount>, + #[account(mut)] + pub signer: Signer<'info>, + pub system_program: Program<'info, System>, +} + +#[account] +pub struct NewAccount { + data: u64, +} +``` + +### Instruction Context + +Instruction handlers are functions that define the logic executed when an +instruction is invoked. The first parameter of each handler is a `Context` +type, where `T` is a struct implementing the `Accounts` trait and specifies the +accounts the instruction requires. + +The +[`Context`](https://github.com/coral-xyz/anchor/blob/v0.30.1/lang/src/context.rs#L24) +type provides the instruction with access to the following non-argument inputs: + +```rust +pub struct Context<'a, 'b, 'c, 'info, T> { + /// Currently executing program id. + pub program_id: &'a Pubkey, + /// Deserialized accounts. + pub accounts: &'b mut T, + /// Remaining accounts given but not deserialized or validated. + /// Be very careful when using this directly. + pub remaining_accounts: &'c [AccountInfo<'info>], + /// Bump seeds found during constraint validation. This is provided as a + /// convenience so that handlers don't have to recalculate bump seeds or + /// pass them in as arguments. + pub bumps: BTreeMap, +} +``` + +The `Context` fields can be accessed in an instruction using dot notation: + +- `ctx.accounts`: The accounts required for the instruction +- `ctx.program_id`: The program's public key (address) +- `ctx.remaining_accounts`: Additional accounts not specified in the `Accounts` + struct. +- `ctx.bumps`: Bump seeds for any + [Program Derived Address (PDA)](/docs/core/pda.md) accounts specified in the + `Accounts` struct + +Additional parameters are optional and can be included to specify arguments that +must be provided when the instruction is invoked. + +```rust filename="lib.rs" /Context/ /data/1 +pub fn initialize(ctx: Context, data: u64) -> Result<()> { + ctx.accounts.new_account.data = data; + msg!("Changed data to: {}!", data); + Ok(()) +} +``` + +In this example, the `Initialize` struct implements the `Accounts` trait where +each field in the struct represents an account required by the `initialize` +instruction. + +```rust filename="lib.rs" /Initialize/ /Accounts/ +#[program] +mod hello_anchor { + use super::*; + pub fn initialize(ctx: Context, data: u64) -> Result<()> { + ctx.accounts.new_account.data = data; + msg!("Changed data to: {}!", data); + Ok(()) + } +} + +#[derive(Accounts)] +pub struct Initialize<'info> { + #[account(init, payer = signer, space = 8 + 8)] + pub new_account: Account<'info, NewAccount>, + #[account(mut)] + pub signer: Signer<'info>, + pub system_program: Program<'info, System>, +} +``` + +## #[derive(Accounts)] macro + +The +[`#[derive(Accounts)]`](https://github.com/coral-xyz/anchor/blob/v0.30.1/lang/derive/accounts/src/lib.rs#L630) +macro is applied to a struct to specify the accounts that must be provided when +an instruction is invoked. This macro implements the +[`Accounts`](https://github.com/coral-xyz/anchor/blob/v0.30.1/lang/src/lib.rs#L105) +trait, which simplifies account validation and serialization and deserialization +of account data. + +```rust /Accounts/ {1} +#[derive(Accounts)] +pub struct Initialize<'info> { + #[account(init, payer = signer, space = 8 + 8)] + pub new_account: Account<'info, NewAccount>, + #[account(mut)] + pub signer: Signer<'info>, + pub system_program: Program<'info, System>, +} +``` + +Each field in the struct represents an account required by an instruction. The +naming of each field is arbitrary, but it is recommended to use a descriptive +name that indicates the purpose of the account. + +```rust /signer/2 /new_account/ /system_program/ +#[derive(Accounts)] +pub struct Initialize<'info> { + #[account(init, payer = signer, space = 8 + 8)] + pub new_account: Account<'info, NewAccount>, + #[account(mut)] + pub signer: Signer<'info>, + pub system_program: Program<'info, System>, +} +``` + +### Account Validation + +To prevent security vulnerabiliies, it's important to verify that accounts +provided to an instruction are the expected accounts. Accounts are validated in +Anchor programs in two ways that are generally used together: + +- [Account Constraints](https://www.anchor-lang.com/docs/account-constraints): + Constraints define additional conditions that an account must satisfy to be + considered valid for the instruction. Constraints are applied using the + `#[account(..)]` attribute, which is placed above a field in a struct that + implements the `Accounts` trait. + + You can find the implementation of the constraints + [here](https://github.com/coral-xyz/anchor/blob/v0.30.1/lang/syn/src/parser/accounts/constraints.rs). + + ```rust {3, 5} + #[derive(Accounts)] + pub struct Initialize<'info> { + #[account(init, payer = signer, space = 8 + 8)] + pub new_account: Account<'info, NewAccount>, + #[account(mut)] + pub signer: Signer<'info>, + pub system_program: Program<'info, System>, + } + ``` + +- [Account Types](https://www.anchor-lang.com/docs/account-types): Anchor + provides various account types to help ensure that the account provided by the + client matches what the program expects. + + You can find the implementation of the account types + [here](https://github.com/coral-xyz/anchor/blob/v0.30.1/lang/src/accounts). + + ```rust /Account/2 /Signer/ /Program/ + #[derive(Accounts)] + pub struct Initialize<'info> { + #[account(init, payer = signer, space = 8 + 8)] + pub new_account: Account<'info, NewAccount>, + #[account(mut)] + pub signer: Signer<'info>, + pub system_program: Program<'info, System>, + } + ``` + +When an instruction in an Anchor program is invoked, the program first validates +the accounts provided before executing the instruction's logic. After +validation, these accounts can be accessed within the instruction using the +`ctx.accounts` syntax. + +```rust filename="lib.rs" /ctx.accounts.new_account/ /new_account/ /Initialize/ +use anchor_lang::prelude::*; + +declare_id!("11111111111111111111111111111111"); + +#[program] +mod hello_anchor { + use super::*; + pub fn initialize(ctx: Context, data: u64) -> Result<()> { + ctx.accounts.new_account.data = data; + msg!("Changed data to: {}!", data); + Ok(()) + } +} + +#[derive(Accounts)] +pub struct Initialize<'info> { + #[account(init, payer = signer, space = 8 + 8)] + pub new_account: Account<'info, NewAccount>, + #[account(mut)] + pub signer: Signer<'info>, + pub system_program: Program<'info, System>, +} + +#[account] +pub struct NewAccount { + data: u64, +} +``` + +## #[account] macro + +The +[`#[account]`](https://github.com/coral-xyz/anchor/blob/v0.30.1/lang/attribute/account/src/lib.rs#L66) +macro is applied to structs that define the data stored in custom accounts +created by your program. + +```rust +#[account] +pub struct NewAccount { + data: u64, +} +``` + +This macro implements various traits +[detailed here](https://docs.rs/anchor-lang/latest/anchor_lang/attr.account.html). +The key functionalities of the `#[account]` macro include: + +- [Assign Program Owner](https://github.com/coral-xyz/anchor/blob/v0.30.1/lang/attribute/account/src/lib.rs#L119-L132): + When creating an account, the program owner of the account is automatically + set to the program specified in `declare_id`. +- [Set Discriminator](https://github.com/coral-xyz/anchor/blob/v0.30.1/lang/attribute/account/src/lib.rs#L101-L117): + A unique 8 byte discriminator, specific to the account type, is added as the + first 8 bytes of account data during its initialization. This helps in + differentiating account types and is used for account validation. +- [Data Serialization and Deserialization](https://github.com/coral-xyz/anchor/blob/v0.30.1/lang/attribute/account/src/lib.rs#L202-L246): + Account data is automatically serialized and deserialized as the account type. + +```rust filename="lib.rs" /data/2,6 /NewAccount/ {24-27} +use anchor_lang::prelude::*; + +declare_id!("11111111111111111111111111111111"); + +#[program] +mod hello_anchor { + use super::*; + pub fn initialize(ctx: Context, data: u64) -> Result<()> { + ctx.accounts.new_account.data = data; + msg!("Changed data to: {}!", data); + Ok(()) + } +} + +#[derive(Accounts)] +pub struct Initialize<'info> { + #[account(init, payer = signer, space = 8 + 8)] + pub new_account: Account<'info, NewAccount>, + #[account(mut)] + pub signer: Signer<'info>, + pub system_program: Program<'info, System>, +} + +#[account] +pub struct NewAccount { + data: u64, +} +``` + +### Account Discriminator + +An account discriminator in an Anchor program refers to an 8 byte identifier +unique to each account type. It's derived from the first 8 bytes of the SHA256 +hash of the string `account:`. This discriminator is stored as the +first 8 bytes of account data when an account is created. + +When creating an account in an Anchor program, 8 bytes must be allocated for the +discriminator. + +```rust /8/1 +#[account(init, payer = signer, space = 8 + 8)] +pub new_account: Account<'info, NewAccount>, +``` + +The discriminator is used during the following two scenarios: + +- Initialization: When an account is created, the discriminator is set as the + first 8 bytes of the account's data. +- Deserialization: When account data is deserialized, the first 8 bytes of + account data is checked against the discriminator of the expected account + type. + +If there's a mismatch, it indicates that the client has provided an unexpected +account. This mechanism serves as an account validation check in Anchor +programs. diff --git a/docs/programs/debugging.md b/docs/programs/debugging.md deleted file mode 100644 index 1f2ace740..000000000 --- a/docs/programs/debugging.md +++ /dev/null @@ -1,273 +0,0 @@ ---- -title: "Debugging Programs" ---- - -Solana programs run on-chain, so debugging them in the wild can be challenging. -To make debugging programs easier, developers can write unit tests that directly -test their program's execution via the Solana runtime, or run a local cluster -that will allow RPC clients to interact with their program. - -## Running unit tests - -- [Testing with Rust](/docs/programs/lang-rust.md#how-to-test) -- [Testing with C](/docs/programs/lang-c.md#how-to-test) - -## Logging - -During program execution both the runtime and the program log status and error -messages. - -For information about how to log from a program see the language specific -documentation: - -- [Logging from a Rust program](/docs/programs/lang-rust.md#logging) -- [Logging from a C program](/docs/programs/lang-c.md#logging) - -When running a local cluster the logs are written to stdout as long as they are -enabled via the `RUST_LOG` log mask. From the perspective of program development -it is helpful to focus on just the runtime and program logs and not the rest of -the cluster logs. To focus in on program specific information the following log -mask is recommended: - -```shell -export RUST_LOG=solana_runtime::system_instruction_processor=trace,solana_runtime::message_processor=info,solana_bpf_loader=debug,solana_rbpf=debug -``` - -Log messages coming directly from the program (not the runtime) will be -displayed in the form: - -`Program log: ` - -## Error Handling - -The amount of information that can be communicated via a transaction error is -limited but there are many points of possible failures. The following are -possible failure points and information about what errors to expect and where to -get more information: - -- The SBF loader may fail to parse the program, this should not happen since the - loader has already _finalized_ the program's account data. - - `InstructionError::InvalidAccountData` will be returned as part of the - transaction error. -- The SBF loader may fail to setup the program's execution environment - - `InstructionError::Custom(0x0b9f_0001)` will be returned as part of the - transaction error. "0x0b9f_0001" is the hexadecimal representation of - [`VirtualMachineCreationFailed`](https://github.com/solana-labs/solana/blob/bc7133d7526a041d1aaee807b80922baa89b6f90/programs/bpf_loader/src/lib.rs#L44). -- The SBF loader may have detected a fatal error during program executions - (things like panics, memory violations, system call errors, etc...) - - `InstructionError::Custom(0x0b9f_0002)` will be returned as part of the - transaction error. "0x0b9f_0002" is the hexadecimal representation of - [`VirtualMachineFailedToRunProgram`](https://github.com/solana-labs/solana/blob/bc7133d7526a041d1aaee807b80922baa89b6f90/programs/bpf_loader/src/lib.rs#L46). -- The program itself may return an error - - `InstructionError::Custom()` will be returned. The "user - defined value" must not conflict with any of the - [builtin runtime program errors](https://github.com/solana-labs/solana/blob/bc7133d7526a041d1aaee807b80922baa89b6f90/sdk/program/src/program_error.rs#L87). - Programs typically use enumeration types to define error codes starting at - zero so they won't conflict. - -In the case of `VirtualMachineFailedToRunProgram` errors, more information about -the specifics of what failed are written to the -[program's execution logs](/docs/programs/debugging.md#logging). - -For example, an access violation involving the stack will look something like -this: - -```text -SBF program 4uQeVj5tqViQh7yWWGStvkEG1Zmhx6uasJtWCJziofM failed: out of bounds memory store (insn #615), addr 0x200001e38/8 -``` - -## Monitoring Compute Budget Consumption - -The program can log the remaining number of compute units it will be allowed -before program execution is halted. Programs can use these logs to wrap -operations they wish to profile. - -- [Log the remaining compute units from a Rust program](/docs/programs/lang-rust.md#compute-budget) -- [Log the remaining compute units from a C program](/docs/programs/lang-c.md#compute-budget) - -See [compute budget](/docs/core/fees.md#compute-budget) for more information. - -## ELF Dump - -The SBF shared object internals can be dumped to a text file to gain more -insight into a program's composition and what it may be doing at runtime. - -- [Create a dump file of a Rust program](/docs/programs/lang-rust.md#elf-dump) -- [Create a dump file of a C program](/docs/programs/lang-c.md#elf-dump) - -## Instruction Tracing - -During execution the runtime SBF interpreter can be configured to log a trace -message for each SBF instruction executed. This can be very helpful for things -like pin-pointing the runtime context leading up to a memory access violation. - -The trace logs together with the [ELF dump](#elf-dump) can provide a lot of -insight (though the traces produce a lot of information). - -To turn on SBF interpreter trace messages in a local cluster configure the -`solana_rbpf` level in `RUST_LOG` to `trace`. For example: - -`export RUST_LOG=solana_rbpf=trace` - -## Source level debugging - -Source level debugging of onchain programs written in Rust or C can be done -using the `program run` subcommand of `solana-ledger-tool`, and lldb, -distributed with Solana Rust and Clang compiler binary package platform-tools. - -The `solana-ledger-tool program run` subcommand loads a compiled on-chain -program, executes it in RBPF virtual machine and runs a gdb server that accepts -incoming connections from LLDB or GDB. Once lldb is connected to -`solana-ledger-tool` gdbserver, it can control execution of an on-chain program. -Run `solana-ledger-tool program run --help` for an example of specifying input -data for parameters of the program entrypoint function. - -To compile a program for debugging use cargo-build-sbf build utility with the -command line option `--debug`. The utility will generate two loadable files, one -a usual loadable module with the extension `.so`, and another the same loadable -module but containing Dwarf debug information, a file with extension `.debug`. - -To execute a program in debugger, run `solana-ledger-tool program run` with -`-e debugger` command line option. For example, a crate named 'helloworld' is -compiled and an executable program is built in `target/deploy` directory. There -should be three files in that directory - -- helloworld-keypair.json -- a keypair for deploying the program, -- helloworld.debug -- a binary file containing debug information, -- helloworld.so -- an executable file loadable into the virtual machine. The - command line for running `solana-ledger-tool` would be something like this - -```shell -solana-ledger-tool program run -l test-ledger -e debugger target/deploy/helloworld.so -``` - -Note that `solana-ledger-tool` always loads a ledger database. Most on-chain -programs interact with a ledger in some manner. Even if for debugging purpose a -ledger is not needed, it has to be provided to `solana-ledger-tool`. A minimal -ledger database can be created by running `solana-test-validator`, which creates -a ledger in `test-ledger` subdirectory. - -In debugger mode `solana-ledger-tool program run` loads an `.so` file and starts -listening for an incoming connection from a debugger - -```text -Waiting for a Debugger connection on "127.0.0.1:9001"... -``` - -To connect to `solana-ledger-tool` and execute the program, run lldb. For -debugging rust programs it may be beneficial to run solana-lldb wrapper to lldb, -i.e. at a new shell prompt (other than the one used to start -`solana-ledger-tool`) run the command: - -```shell -solana-lldb -``` - -This script is installed in platform-tools path. If that path is not added to -`PATH` environment variable, it may be necessary to specify the full path, e.g. - -```text -~/.cache/solana/v1.35/platform-tools/llvm/bin/solana-lldb -``` - -After starting the debugger, load the .debug file by entering the following -command at the debugger prompt - -```text -(lldb) file target/deploy/helloworld.debug -``` - -If the debugger finds the file, it will print something like this - -```text -Current executable set to '/path/helloworld.debug' (bpf). -``` - -Now, connect to the gdb server that `solana-ledger-tool` implements, and debug -the program as usual. Enter the following command at lldb prompt - -```text -(lldb) gdb-remote 127.0.0.1:9001 -``` - -If the debugger and the gdb server establish a connection, the execution of the -program will be stopped at the entrypoint function, and lldb should print -several lines of the source code around the entrypoint function signature. From -this point on, normal lldb commands can be used to control execution of the -program being debugged. - -### Debugging in an IDE - -To debug onchain programs in Visual Studio IDE, install the CodeLLDB extension. -Open CodeLLDB Extension Settings. In Advanced settings change the value of -`Lldb: Library` field to the path of `liblldb.so` (or liblldb.dylib on macOS). -For example on Linux a possible path to Solana customized lldb can be -`/home//.cache/solana/v1.33/platform-tools/llvm/lib/liblldb.so.` where -`` is your Linux system username. This can also be added directly to -`~/.config/Code/User/settings.json` file, e.g. - -```json -{ - "lldb.library": "/home//.cache/solana/v1.35/platform-tools/llvm/lib/liblldb.so" -} -``` - -In `.vscode` subdirectory of your on-chain project, create two files - -First file is `tasks.json` with the following content - -```json -{ - "version": "2.0.0", - "tasks": [ - { - "label": "build", - "type": "shell", - "command": "cargo build-sbf --debug", - "problemMatcher": [], - "group": { - "kind": "build", - "isDefault": true - } - }, - { - "label": "solana-debugger", - "type": "shell", - "command": "solana-ledger-tool program run -l test-ledger -e debugger ${workspaceFolder}/target/deploy/helloworld.so" - } - ] -} -``` - -The first task is to build the on-chain program using cargo-build-sbf utility. -The second task is to run `solana-ledger-tool program run` in debugger mode. - -Another file is `launch.json` with the following content - -```json -{ - "version": "0.2.0", - "configurations": [ - { - "type": "lldb", - "request": "custom", - "name": "Debug", - "targetCreateCommands": [ - "target create ${workspaceFolder}/target/deploy/helloworld.debug" - ], - "processCreateCommands": ["gdb-remote 127.0.0.1:9001"] - } - ] -} -``` - -This file specifies how to run debugger and to connect it to the gdb server -implemented by `solana-ledger-tool`. - -To start debugging a program, first build it by running the build task. The next -step is to run `solana-debugger` task. The tasks specified in `tasks.json` file -are started from `Terminal >> Run Task...` menu of VSCode. When -`solana-ledger-tool` is running and listening from incoming connections, it's -time to start the debugger. Launch it from VSCode `Run and Debug` menu. If -everything is set up correctly, VSCode will start a debugging session and the -program execution should stop on the entrance into the `entrypoint` function. diff --git a/docs/programs/deploying.md b/docs/programs/deploying.md index 78aa89fc9..8b82817c5 100644 --- a/docs/programs/deploying.md +++ b/docs/programs/deploying.md @@ -1,270 +1,333 @@ --- title: "Deploying Programs" description: - "Deploying onchain programs can be done using the Solana CLI using the + Deploying onchain programs can be done using the Solana CLI using the Upgradable BPF loader to upload the compiled byte-code to the Solana - blockchain." + blockchain. +sidebarSortOrder: 2 --- -Solana onchain programs (otherwise known as "smart contracts") are stored in -"executable" accounts on Solana. These accounts are identical to any other -account but with the exception of: +Solana programs are stored in "executable" accounts on the network. These +accounts contain the program's compiled bytecode that define the instructions +users invoke to interact with the program. -- having the "executable" flag enabled, and -- the owner being assigned to a BPF loader +## CLI Commands -Besides those exceptions, they are governed by the same runtime rules as -non-executable accounts, hold SOL tokens for rent fees, and store a data buffer -which is managed by the BPF loader program. The latest BPF loader is called the -"Upgradeable BPF Loader". +The section is intented as a reference for the basic CLI commands for building +and deploying Solana programs. For a step-by-step guide on creating your first +program, start with [Developing Programs in Rust](/docs/programs/rust). -## Overview of the Upgradeable BPF Loader +### Build Program -### State accounts +To build your program, use the `cargo build-sbf` command. -The Upgradeable BPF loader program supports three different types of state -accounts: +```shell +cargo build-sbf +``` -1. [Program account](https://github.com/solana-labs/solana/blob/master/sdk/program/src/bpf_loader_upgradeable.rs#L34): - This is the main account of an on-chain program and its address is commonly - referred to as a "program id." Program id's are what transaction instructions - reference in order to invoke a program. Program accounts are immutable once - deployed, so you can think of them as a proxy account to the byte-code and - state stored in other accounts. -2. [Program data account](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/bpf_loader_upgradeable.rs#L39): - This account is what stores the executable byte-code of an on-chain program. - When a program is upgraded, this account's data is updated with new - byte-code. In addition to byte-code, program data accounts are also - responsible for storing the slot when it was last modified and the address of - the sole account authorized to modify the account (this address can be - cleared to make a program immutable). -3. [Buffer accounts](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/bpf_loader_upgradeable.rs#L27): - These accounts temporarily store byte-code while a program is being actively - deployed through a series of transactions. They also each store the address - of the sole account which is authorized to do writes. +This command will: -### Instructions +1. Compile your program +2. Create a `target/deploy` directory +3. Generate a `.so` file, where `` matches your + program's name in `Cargo.toml` -The state accounts listed above can only be modified with one of the following -instructions supported by the Upgradeable BPF Loader program: +The output `.so` file contains your program's compiled bytecode that will be +stored in a Solana account when you deploy your program. -1. [Initialize buffer](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L21): - Creates a buffer account and stores an authority address which is allowed to - modify the buffer. -2. [Write](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L28): - Writes byte-code at a specified byte offset inside a buffer account. Writes - are processed in small chunks due to a limitation of Solana transactions - having a maximum serialized size of 1232 bytes. -3. [Deploy](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L77): - Creates both a program account and a program data account. It fills the - program data account by copying the byte-code stored in a buffer account. If - the byte-code is valid, the program account will be set as executable, - allowing it to be invoked. If the byte-code is invalid, the instruction will - fail and all changes are reverted. -4. [Upgrade](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L102): - Fills an existing program data account by copying executable byte-code from a - buffer account. Similar to the deploy instruction, it will only succeed if - the byte-code is valid. -5. [Set authority](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L114): - Updates the authority of a program data or buffer account if the account's - current authority has signed the transaction being processed. If the - authority is deleted without replacement, it can never be set to a new - address and the account can never be closed. -6. [Close](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L127): - Clears the data of a program data account or buffer account and reclaims the - SOL used for the rent exemption deposit. +### Deploy Program + +To deploy your program, use the `solana program deploy` command followed by the +path to the `.so` file created by the `cargo build-sbf` command. + +```shell +solana program deploy ./target/deploy/your_program.so +``` -## How `solana program deploy` works +During times of congestion, there are a few additional flags you can use to help +with program deployment. + +- `--with-compute-unit-price`: Set compute unit price for transaction, in + increments of 0.000001 lamports (micro-lamports) per compute unit. +- `--max-sign-attempts`: Maximum number of attempts to sign or resign + transactions after blockhash expiration. If any transactions sent during the + program deploy are still unconfirmed after the initially chosen recent + blockhash expires, those transactions will be resigned with a new recent + blockhash and resent. Use this setting to adjust the maximum number of + transaction signing iterations. Each blockhash is valid for about 60 seconds, + which means using the default value of 5 will lead to sending transactions for + at least 5 minutes or until all transactions are confirmed,whichever comes + first. [default: 5] +- `--use-rpc`: Send write transactions to the configured RPC instead of + validator TPUs. This flag requires a stake-weighted RPC connection. + +You can use the flags individually or combine them together. For example: -Deploying a program on Solana requires hundreds, if not thousands of -transactions, due to the max size limit of 1232 bytes for Solana transactions. -The Solana CLI takes care of this rapid firing of transactions with the -`solana program deploy` subcommand. The process can be broken down into the -following 3 phases: +```shell +solana program deploy ./target/deploy/your_program.so --with-compute-unit-price 10000 --max-sign-attempts 1000 --use-rpc +``` -1. [Buffer initialization](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L2113): - First, the CLI sends a transaction which - [creates a buffer account](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L1903) - large enough for the byte-code being deployed. It also invokes the - [initialize buffer instruction](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/programs/bpf_loader/src/lib.rs#L320) - to set the buffer authority to restrict writes to the deployer's chosen - address. -2. [Buffer writes](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L2129): - Once the buffer account is initialized, the CLI - [breaks up the program byte-code](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L1940) - into ~1KB chunks and - [sends transactions at a rate of 100 transactions per second](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/client/src/tpu_client.rs#L133) - to write each chunk with - [the write buffer instruction](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/programs/bpf_loader/src/lib.rs#L334). - These transactions are sent directly to the current leader's transaction - processing (TPU) port and are processed in parallel with each other. Once all - transactions have been sent, the CLI - [polls the RPC API with batches of transaction signatures](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/client/src/tpu_client.rs#L216) - to ensure that every write was successful and confirmed. -3. [Finalization](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L1807): - Once writes are completed, the CLI - [sends a final transaction](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L2150) - to either - [deploy a new program](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/programs/bpf_loader/src/lib.rs#L362) - or - [upgrade an existing program](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/programs/bpf_loader/src/lib.rs#L513). - In either case, the byte-code written to the buffer account will be copied - into a program data account and verified. +- Use the + [Priority Fee API by Helius](https://docs.helius.dev/guides/priority-fee-api) + to get an estimate of the priority fee to set with the + `--with-compute-unit-price` flag. - +- Get a + [stake-weighted](https://solana.com/developers/guides/advanced/stake-weighted-qos) + RPC connection from [Helius](https://www.helius.dev/) or + [Trition](https://triton.one/) to use with the `--use-rpc` flag. The + `--use-rpc` flag should only be used with a stake-weighted RPC connection. -During times of congestion it is helpful to add priority fees and increase the -max sign attempts. Using a rpc url which has -[stake weighted quality of service](https://solana.com/developers/guides/advanced/stake-weighted-qos) -enabled can also help to make program deploys more reliable. Using Solana -version ^1.18.15 is recommended. +To update your default RPC URL with a custom RPC endpoint, use the +`solana config set` command. -Example command deploying a program with the Solana CLI: +```shell +solana config set --url +``` + +You can view the list of programs you've deployed using the +`solana program show --programs` command. ```shell -program deploy target/deploy/your_program.so --with-compute-unit-price 10000 --max-sign-attempts 1000 --use-rpc +solana program show --programs ``` - +Example output: -## Reclaim rent from program accounts +``` +Program Id | Slot | Authority | Balance +2w3sK6CW7Hy1Ljnz2uqPrQsg4KjNZxD4bDerXDkSX3Q1 | 133132 | 4kh6HxYZiAebF8HWLsUWod2EaQQ6iWHpHYCz8UcmFbM1 | 0.57821592 SOL +``` -The storage of data on the Solana blockchain requires the payment of -[rent](/docs/intro/rent.md), including for the byte-code for onchain programs. -Therefore as you deploy more or larger programs, the amount of rent paid to -remain rent-exempt will also become larger. +### Update Program -Using the current rent cost model configuration, a rent-exempt account requires -a deposit of ~0.7 SOL per 100KB stored. These costs can have an outsized impact -on developers who deploy their own programs since -[program accounts](/docs/core/accounts.md#custom-programs) are among the largest -we typically see on Solana. +A program's update authority can modify an existing Solana program by deploying +a new `.so` file to the same program ID. -#### Example of how much data is used for programs +To update an existing Solana program: -As a data point of the number of accounts and potential data stored on-chain, -below is the distribution of the largest accounts (at least 100KB) at slot -`103,089,804` on `mainnet-beta` by assigned on-chain program: +- Make changes to your program source code +- Run `cargo build-sbf` to generate an updated `.so` file +- Run `solana program deploy ./target/deploy/your_program.so` to deploy the + updated `.so` file -1. **Serum Dex v3**: 1798 accounts -2. **Metaplex Candy Machine**: 1089 accounts -3. **Serum Dex v2**: 864 accounts -4. **Upgradeable BPF Program Loader**: 824 accounts -5. **BPF Program Loader v2**: 191 accounts -6. **BPF Program Loader v1**: 150 accounts +The update authority can be changed using the +`solana program set-upgrade-authority` command. -### Reclaiming buffer accounts +```shell +solana program set-upgrade-authority --new-upgrade-authority +``` -Buffer accounts are used by the Upgradeable BPF loader to temporarily store -byte-code that is in the process of being deployed on-chain. This temporary -buffer is required when upgrading programs because the currently deployed -program's byte-code cannot be affected by an in-progress upgrade. +### Immutable Program -Unfortunately, deploys fail occasionally and instead of reusing the buffer -account, developers might retry their deployment with a new buffer and not -realize that they stored a good chunk of SOL in a forgotten buffer account from -an earlier deploy. +A program can be made immutable by removing its update authority. This is an +irreversible action. -> As of slot `103,089,804` on `mainnet-beta` there are 276 abandoned buffer -> accounts that could be reclaimed! +```shell +solana program set-upgrade-authority --final +``` -Developers can check if they own any abandoned buffer accounts by using the -Solana CLI: +You can specify that program should be immutable on deployment by setting the +`--final` flag when deploying the program. ```shell -solana program show --buffers --keypair ~/.config/solana/MY_KEYPAIR.json +solana program deploy ./target/deploy/your_program.so --final +``` -Buffer Address | Authority | Balance -9vXW2c3qo6DrLHa1Pkya4Mw2BWZSRYs9aoyoP3g85wCA | 2nr1bHFT86W9tGnyvmYW4vcHKsQB3sVQfnddasz4kExM | 3.41076888 SOL +### Close Program + +You can close your Solana program to reclaim the SOL allocated to the account. +Closing a program is irreversible, so it should be done with caution. To close a +program, use the `solana program close ` command. For example: + +```shell filename="Terminal" +solana program close 4Ujf5fXfLx2PAwRqcECCLtgDxHKPznoJpa43jUBxFfMz +--bypass-warning +``` + +Example output: + +``` +Closed Program Id 4Ujf5fXfLx2PAwRqcECCLtgDxHKPznoJpa43jUBxFfMz, 0.1350588 SOL +reclaimed +``` + +Note that once a program is closed, its program ID cannot be reused. Attempting +to deploy a program with a previously closed program ID will result in an error. + +``` +Error: Program 4Ujf5fXfLx2PAwRqcECCLtgDxHKPznoJpa43jUBxFfMz has been closed, use +a new Program Id ``` -And they can close those buffers to reclaim the SOL balance with the following +If you need to redeploy a program after closing it, you must generate a new +program ID. To generate a new keypair for the program, run the following command: -```shell -solana program close --buffers --keypair ~/.config/solana/MY_KEYPAIR.json +```shell filename="Terminal" +solana-keygen new -o ./target/deploy/your_program-keypair.json --force ``` -#### Fetch the owners of buffer accounts via RPC API +Alternatively, you can delete the existing keypair file and run +`cargo build-sbf` again, which will generate a new keypair file. + +### Program Buffer Accounts + +Deploying a program requires multiple transactions due to the 1232 byte limit +for transactions on Solana. An intermediate step of the deploy process involves +writing the program's byte-code to temporary "buffer account". + +This buffer account is automatically closed after successful program deployment. +However, if the deployment fails, the buffer account remains and you can either: + +- Continue the deployment using the existing buffer account +- Close the buffer account to reclaim the allocated SOL (rent) -The owners of all abandoned program deploy buffer accounts can be fetched via -the RPC API: +You can check if you have any open buffer accounts by using the +`solana program show --buffers` command. ```shell -curl http://api.mainnet-beta.solana.com -H "Content-Type: application/json" \ ---data-binary @- << EOF | jq --raw-output '.result | .[] | .account.data[0]' -{ - "jsonrpc":"2.0", "id":1, "method":"getProgramAccounts", - "params":[ - "BPFLoaderUpgradeab1e11111111111111111111111", - { - "dataSlice": {"offset": 5, "length": 32}, - "filters": [{"memcmp": {"offset": 0, "bytes": "2UzHM"}}], - "encoding": "base64" - } - ] -} -EOF +solana program show --buffers ``` -After re-encoding the base64 encoded keys into base58 and grouping by key, we -see some accounts have over 10 buffer accounts they could close, yikes! +Example output: -```shell -'BE3G2F5jKygsSNbPFKHHTxvKpuFXSumASeGweLcei6G3' => 10 buffer accounts -'EsQ179Q8ESroBnnmTDmWEV4rZLkRc3yck32PqMxypE5z' => 10 buffer accounts -'6KXtB89kAgzW7ApFzqhBg5tgnVinzP4NSXVqMAWnXcHs' => 12 buffer accounts -'FinVobfi4tbdMdfN9jhzUuDVqGXfcFnRGX57xHcTWLfW' => 15 buffer accounts -'TESAinbTL2eBLkWqyGA82y1RS6kArHvuYWfkL9dKkbs' => 42 buffer accounts +``` +Buffer Address | Authority | Balance +5TRm1DxYcXLbSEbbxWcQbEUCce7L4tVgaC6e2V4G82pM | 4kh6HxYZiAebF8HWLsUWod2EaQQ6iWHpHYCz8UcmFbM1 | 0.57821592 SOL ``` -### Reclaiming program data accounts +You can continue to the deployment using +`solana program deploy --buffer `. -You may now realize that program data accounts (the accounts that store the -executable byte-code for an on-chain program) can also be closed. +For example: + +```shell +solana program deploy --buffer 5TRm1DxYcXLbSEbbxWcQbEUCce7L4tVgaC6e2V4G82pM +``` -> **Note:** This does _not_ mean that _program accounts_ can be closed (those -> are immutable and can never be reclaimed, but it's fine they're pretty small). -> It's also important to keep in mind that once program data accounts are -> deleted, they can never be recreated for an existing program. Therefore, the -> corresponding program (and its program id) for any closed program data account -> is effectively disabled forever and may not be re-deployed +Expected output on successful deployment: -While it would be uncommon for developers to need to close program data accounts -since they can be rewritten during upgrades, one potential scenario is that -since program data accounts can't be _resized_. You may wish to deploy your -program at a new address to accommodate larger executables. +``` +Program Id: 2w3sK6CW7Hy1Ljnz2uqPrQsg4KjNZxD4bDerXDkSX3Q1 -The ability to reclaim program data account rent deposits also makes testing and -experimentation on the `mainnet-beta` cluster a lot less costly since you could -reclaim everything except the transaction fees and a small amount of rent for -the program account. Lastly, this could help developers recover most of their -funds if they mistakenly deploy a program at an unintended address or on the -wrong cluster. +Signature: 3fsttJFskUmvbdL5F9y8g43rgNea5tYZeVXbimfx2Up5viJnYehWe3yx45rQJc8Kjkr6nY8D4DP4V2eiSPqvWRNL +``` -To view the programs which are owned by your wallet address, you can run: +To close buffer accounts, use the `solana program close --buffers` command. ```shell -solana -V # must be 1.7.11 or higher! -solana program show --programs --keypair ~/.config/solana/MY_KEYPAIR.json - -Program Id | Slot | Authority | Balance -CN5x9WEusU6pNH66G22SnspVx4cogWLqMfmb85Z3GW7N | 53796672 | 2nr1bHFT86W9tGnyvmYW4vcHKsQB3sVQfnddasz4kExM | 0.54397272 SOL +solana program close --buffers ``` -To close those program data accounts and reclaim their SOL balance, you can run: +### ELF Dump + +The SBF shared object internals can be dumped to a text file to gain more +insight into a program's composition and what it may be doing at runtime. The +dump will contain both the ELF information as well as a list of all the symbols +and the instructions that implement them. Some of the BPF loader's error log +messages will reference specific instruction numbers where the error occurred. +These references can be looked up in the ELF dump to identify the offending +instruction and its context. ```shell -solana program close --programs --keypair ~/.config/solana/MY_KEYPAIR.json +cargo build-bpf --dump ``` -You might be concerned about this feature allowing malicious actors to close a -program in a way that negatively impacts end users. While this is a valid -concern in general, closing program data accounts doesn't make this any more -exploitable than was already possible. +The file will be output to `/target/deploy/your_program-dump.txt`. + +## Program Deployment Process + +Deploying a program on Solana requires multiple transactions, due to the max +size limit of 1232 bytes for Solana transactions. The Solana CLI sends these +transactions with the `solana program deploy` subcommand. The process can be +broken down into the following 3 phases: + +1. [Buffer initialization](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L2113): + First, the CLI sends a transaction which + [creates a buffer account](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L1903) + large enough for the byte-code being deployed. It also invokes the + [initialize buffer instruction](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/programs/bpf_loader/src/lib.rs#L320) + to set the buffer authority to restrict writes to the deployer's chosen + address. +2. [Buffer writes](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L2129): + Once the buffer account is initialized, the CLI + [breaks up the program byte-code](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L1940) + into ~1KB chunks and + [sends transactions at a rate of 100 transactions per second](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/client/src/tpu_client.rs#L133) + to write each chunk with + [the write buffer instruction](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/programs/bpf_loader/src/lib.rs#L334). + These transactions are sent directly to the current leader's transaction + processing (TPU) port and are processed in parallel with each other. Once all + transactions have been sent, the CLI + [polls the RPC API with batches of transaction signatures](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/client/src/tpu_client.rs#L216) + to ensure that every write was successful and confirmed. +3. [Finalization](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L1807): + Once writes are completed, the CLI + [sends a final transaction](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L2150) + to either + [deploy a new program](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/programs/bpf_loader/src/lib.rs#L362) + or + [upgrade an existing program](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/programs/bpf_loader/src/lib.rs#L513). + In either case, the byte-code written to the buffer account will be copied + into a program data account and verified. + +## Upgradeable BPF Loader Program + +The BPF loader program is the program that "owns" all executable accounts on +Solana. When you deploy a program, the owner of the program account is set to +the the BPF loader program. + +### State accounts + +The Upgradeable BPF loader program supports three different types of state +accounts: + +1. [Program account](https://github.com/solana-labs/solana/blob/master/sdk/program/src/bpf_loader_upgradeable.rs#L34): + This is the main account of an on-chain program and its address is commonly + referred to as a "program id." Program id's are what transaction instructions + reference in order to invoke a program. Program accounts are immutable once + deployed, so you can think of them as a proxy account to the byte-code and + state stored in other accounts. +2. [Program data account](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/bpf_loader_upgradeable.rs#L39): + This account is what stores the executable byte-code of an on-chain program. + When a program is upgraded, this account's data is updated with new + byte-code. In addition to byte-code, program data accounts are also + responsible for storing the slot when it was last modified and the address of + the sole account authorized to modify the account (this address can be + cleared to make a program immutable). +3. [Buffer accounts](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/bpf_loader_upgradeable.rs#L27): + These accounts temporarily store byte-code while a program is being actively + deployed through a series of transactions. They also each store the address + of the sole account which is authorized to do writes. + +### Instructions + +The state accounts listed above can only be modified with one of the following +instructions supported by the Upgradeable BPF Loader program: -Even without the ability to close a program data account, any upgradeable -program could be upgraded to a no-op implementation and then have its upgrade -authority cleared to make it immutable forever. This new feature for closing -program data accounts merely adds the ability to reclaim the rent deposit, -disabling a program was already technically possible. +1. [Initialize buffer](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L21): + Creates a buffer account and stores an authority address which is allowed to + modify the buffer. +2. [Write](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L28): + Writes byte-code at a specified byte offset inside a buffer account. Writes + are processed in small chunks due to a limitation of Solana transactions + having a maximum serialized size of 1232 bytes. +3. [Deploy](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L77): + Creates both a program account and a program data account. It fills the + program data account by copying the byte-code stored in a buffer account. If + the byte-code is valid, the program account will be set as executable, + allowing it to be invoked. If the byte-code is invalid, the instruction will + fail and all changes are reverted. +4. [Upgrade](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L102): + Fills an existing program data account by copying executable byte-code from a + buffer account. Similar to the deploy instruction, it will only succeed if + the byte-code is valid. +5. [Set authority](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L114): + Updates the authority of a program data or buffer account if the account's + current authority has signed the transaction being processed. If the + authority is deleted without replacement, it can never be set to a new + address and the account can never be closed. +6. [Close](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L127): + Clears the data of a program data account or buffer account and reclaims the + SOL used for the rent exemption deposit. diff --git a/docs/programs/examples.md b/docs/programs/examples.md index bd70d765d..d16eaf27a 100644 --- a/docs/programs/examples.md +++ b/docs/programs/examples.md @@ -1,5 +1,4 @@ --- -date: 2024-04-26T00:00:00Z title: "Program Examples" description: "A list of Solana program examples in different languages and frameworks, @@ -24,10 +23,11 @@ keywords: - blockchain tutorial - web3 developer - anchor +sidebarSortOrder: 3 --- The -"[Solana Program Examples](https://github.com/solana-developers/program-examples)" +[Solana Program Examples](https://github.com/solana-developers/program-examples) repository on GitHub offers several subfolders, each containing code examples for different Solana programming paradigms and languages, designed to help developers learn and experiment with Solana blockchain development. @@ -35,10 +35,10 @@ developers learn and experiment with Solana blockchain development. You can find the examples in the `solana-developers/program-examples` together with README files that explain you how to run the different examples. Most examples are self-contained and are available in native Rust (ie, with no -framework), [Anchor](https://www.anchor-lang.com/docs/installation), -[Seahorse](https://seahorse-lang.org/) and it also contains a list of examples -that we would love to -[see as contributions](https://github.com/solana-developers/program-examples?tab=readme-ov-file#examples-wed-love-to-see). +framework) and [Anchor](https://www.anchor-lang.com/docs/installation). It also +contains a list of examples that we would love to +[see as contributions](https://github.com/solana-developers/program-examples?tab=readme-ov-file#examples-wed-love-to-see). + Within the repo you will find the following subfolder, each with assorted example programs within them: @@ -56,22 +56,22 @@ Contains a series of examples that demonstrate the foundational steps for building Solana programs using native Rust libraries. These examples are designed to help developers understand the core concepts of Solana programming. -| Example Name | Description | Language | -| ----------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------ | ----------------------------------- | -| [Account Data](https://github.com/solana-developers/program-examples/tree/main/basics/account-data) | Saving an address with name, house number, street and city in an account. | Native, Anchor | -| [Checking Accounts](https://github.com/solana-developers/program-examples/tree/main/basics/checking-accounts) | Security lessons that shows how to do account checks | Native, Anchor | -| [Close Account](https://github.com/solana-developers/program-examples/tree/main/basics/close-account) | Show you how to close accounts to get its rent back. | Native, Anchor | -| [Counter](https://github.com/solana-developers/program-examples/tree/main/basics/counter) | A simple counter program in all the different architectures. | Native, Anchor, Seahorse, mpl-stack | -| [Create Account](https://github.com/solana-developers/program-examples/tree/main/basics/create-account) | How to create a system account within a program. | Native, Anchor | -| [Cross Program Invocation](https://github.com/solana-developers/program-examples/tree/main/basics/cross-program-invocation) | Using a hand and lever analogy this shows you how to call another program from within a program. | Native, Anchor | -| [hello solana](https://github.com/solana-developers/program-examples/tree/main/basics/hello-solana) | Hello world example which just prints hello world in the transaction logs. | Native, Anchor | -| [Pda Rent payer](https://github.com/solana-developers/program-examples/tree/main/basics/pda-rent-payer) | Shows you how you can use the lamports from a PDA to pay for a new account. | Native, Anchor | -| [Processing Instructions](https://github.com/solana-developers/program-examples/tree/main/basics/processing-instructions) | Shows you how to handle instruction data string and u32. | Native, Anchor | -| [Program Derived Addresses](https://github.com/solana-developers/program-examples/tree/main/basics/program-derived-addresses) | Shows how to use seeds to refer to a PDA and save data in it. | Native, Anchor | -| [Realloc](https://github.com/solana-developers/program-examples/tree/main/basics/realloc) | Shows you how to increase and decrease the size of an existing account. | Native, Anchor | -| [Rent](https://github.com/solana-developers/program-examples/tree/main/basics/rent) | Here you will learn how to calculate rent requirements within a program. | Native, Anchor | -| [Repository Layout](https://github.com/solana-developers/program-examples/tree/main/basics/repository-layout) | Recommendations on how to structure your program layout. | Native, Anchor | -| [Transfer SOL](https://github.com/solana-developers/program-examples/tree/main/basics/transfer-sol) | Different methods of transferring SOL for system accounts and PDAs. | Native, Anchor, Seahorse | +| Example Name | Description | Language | +| ----------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------ | ------------------------- | +| [Account Data](https://github.com/solana-developers/program-examples/tree/main/basics/account-data) | Saving an address with name, house number, street and city in an account. | Native, Anchor | +| [Checking Accounts](https://github.com/solana-developers/program-examples/tree/main/basics/checking-accounts) | Security lessons that shows how to do account checks | Native, Anchor | +| [Close Account](https://github.com/solana-developers/program-examples/tree/main/basics/close-account) | Show you how to close accounts to get its rent back. | Native, Anchor | +| [Counter](https://github.com/solana-developers/program-examples/tree/main/basics/counter) | A simple counter program in all the different architectures. | Native, Anchor, mpl-stack | +| [Create Account](https://github.com/solana-developers/program-examples/tree/main/basics/create-account) | How to create a system account within a program. | Native, Anchor | +| [Cross Program Invocation](https://github.com/solana-developers/program-examples/tree/main/basics/cross-program-invocation) | Using a hand and lever analogy this shows you how to call another program from within a program. | Native, Anchor | +| [hello solana](https://github.com/solana-developers/program-examples/tree/main/basics/hello-solana) | Hello world example which just prints hello world in the transaction logs. | Native, Anchor | +| [Pda Rent payer](https://github.com/solana-developers/program-examples/tree/main/basics/pda-rent-payer) | Shows you how you can use the lamports from a PDA to pay for a new account. | Native, Anchor | +| [Processing Instructions](https://github.com/solana-developers/program-examples/tree/main/basics/processing-instructions) | Shows you how to handle instruction data string and u32. | Native, Anchor | +| [Program Derived Addresses](https://github.com/solana-developers/program-examples/tree/main/basics/program-derived-addresses) | Shows how to use seeds to refer to a PDA and save data in it. | Native, Anchor | +| [Realloc](https://github.com/solana-developers/program-examples/tree/main/basics/realloc) | Shows you how to increase and decrease the size of an existing account. | Native, Anchor | +| [Rent](https://github.com/solana-developers/program-examples/tree/main/basics/rent) | Here you will learn how to calculate rent requirements within a program. | Native, Anchor | +| [Repository Layout](https://github.com/solana-developers/program-examples/tree/main/basics/repository-layout) | Recommendations on how to structure your program layout. | Native, Anchor | +| [Transfer SOL](https://github.com/solana-developers/program-examples/tree/main/basics/transfer-sol) | Different methods of transferring SOL for system accounts and PDAs. | Native, Anchor, Seahorse | ## Compression @@ -89,9 +89,9 @@ focused on compressed NFTs (cNFTs). Oracles allow to use off chain data in programs. -| Example Name | Description | Language | -| ------------------------------------------------------------------------------------ | --------------------------------------------------------------- | ---------------- | -| [Pyth](https://github.com/solana-developers/program-examples/tree/main/oracles/pyth) | Pyth makes price data of tokens available in on chain programs. | Anchor, Seahorse | +| Example Name | Description | Language | +| ------------------------------------------------------------------------------------ | --------------------------------------------------------------- | -------- | +| [Pyth](https://github.com/solana-developers/program-examples/tree/main/oracles/pyth) | Pyth makes price data of tokens available in on chain programs. | Anchor | ## Tokens @@ -99,15 +99,15 @@ Most tokens on Solana use the Solana Program Library (SPL) token standard. Here you can find many examples on how to mint, transfer, burn tokens and even how to interact with them in programs. -| Example Name | Description | Language | -| --------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------- | ------------------------ | -| [Create Token](https://github.com/solana-developers/program-examples/tree/main/tokens/create-token) | How to create a token and add metaplex metadata to it. | Anchor, Native | -| [NFT Minter](https://github.com/solana-developers/program-examples/tree/main/tokens/nft-minter) | Minting only one amount of a token and then removing the mint authority. | Anchor, Native | -| [PDA Mint Authority](https://github.com/solana-developers/program-examples/tree/main/tokens/pda-mint-authority) | Shows you how to change the mint authority of a mint, to mint tokens from within a program. | Anchor, Native | -| [SPL Token Minter](https://github.com/solana-developers/program-examples/tree/main/tokens/spl-token-minter) | Explains how to use Associated Token Accounts to be able to keep track of token accounts. | Anchor, Native | -| [Token Swap](https://github.com/solana-developers/program-examples/tree/main/tokens/token-swap) | Extensive example that shows you how to build a AMM (automated market maker) pool for SPL tokens. | Anchor | -| [Transfer Tokens](https://github.com/solana-developers/program-examples/tree/main/tokens/transfer-tokens) | Shows how to transfer SPL token using CPIs into the token program. | Anchor, Native, Seahorse | -| [Token-2022](https://github.com/solana-developers/program-examples/tree/main/tokens/token-2022) | See Token 2022 (Token extensions). | Anchor, Native | +| Example Name | Description | Language | +| --------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------- | -------------- | +| [Create Token](https://github.com/solana-developers/program-examples/tree/main/tokens/create-token) | How to create a token and add metaplex metadata to it. | Anchor, Native | +| [NFT Minter](https://github.com/solana-developers/program-examples/tree/main/tokens/nft-minter) | Minting only one amount of a token and then removing the mint authority. | Anchor, Native | +| [PDA Mint Authority](https://github.com/solana-developers/program-examples/tree/main/tokens/pda-mint-authority) | Shows you how to change the mint authority of a mint, to mint tokens from within a program. | Anchor, Native | +| [SPL Token Minter](https://github.com/solana-developers/program-examples/tree/main/tokens/spl-token-minter) | Explains how to use Associated Token Accounts to be able to keep track of token accounts. | Anchor, Native | +| [Token Swap](https://github.com/solana-developers/program-examples/tree/main/tokens/token-swap) | Extensive example that shows you how to build a AMM (automated market maker) pool for SPL tokens. | Anchor | +| [Transfer Tokens](https://github.com/solana-developers/program-examples/tree/main/tokens/transfer-tokens) | Shows how to transfer SPL token using CPIs into the token program. | Anchor, Native | +| [Token-2022](https://github.com/solana-developers/program-examples/tree/main/tokens/token-2022) | See Token 2022 (Token extensions). | Anchor, Native | ## Token 2022 (Token Extensions) diff --git a/docs/programs/faq.md b/docs/programs/faq.md index a5dd87b8c..c478fa37d 100644 --- a/docs/programs/faq.md +++ b/docs/programs/faq.md @@ -1,23 +1,10 @@ --- title: "FAQ" +sidebarSortOrder: 7 --- -When writing or interacting with Solana programs, there are common questions or -challenges that often come up. Below are resources to help answer these -questions. - -If not addressed here, ask on -[StackExchange](https://solana.stackexchange.com/questions/ask?tags=solana-program) -with the `solana-program` tag. - -## Limitations - -Developing programs on the Solana blockchain have some inherent limitation -associated with them. Below is a list of common limitation that you may run -into. - -See [limitations of developing programs](/docs/programs/limitations.md) for more -details +Post your questions on +[StackExchange](https://solana.stackexchange.com/questions/ask). ## Berkeley Packet Filter (BPF) @@ -77,20 +64,11 @@ Some instructions require the account to be a signer; this error is returned if an account is expected to be signed but is not. An implementation of a program might also cause this error when performing a -cross-program invocation that requires a signed program address, but the passed -signer seeds passed to [`invoke_signed`](/docs/core/cpi.md) don't match the +[cross-program invocation](/docs/core/cpi.md) that requires a signed program +address, but the passed signer seeds passed to `invoke_signed` don't match the signer seeds used to create the program address [`create_program_address`](/docs/core/pda.md#createprogramaddress). -## `rand` Rust dependency causes compilation failure - -See -[Rust Project Dependencies](/docs/programs/lang-rust.md#project-dependencies) - -## Rust restrictions - -See [Rust restrictions](/docs/programs/lang-rust.md#restrictions) - ## Stack SBF uses stack frames instead of a variable stack pointer. Each stack frame is @@ -106,7 +84,7 @@ Error: Function _ZN16curve25519_dalek7edwards21EdwardsBasepointTable6create17h17 ``` The message identifies which symbol is exceeding its stack frame, but the name -might be mangled if it is a Rust or C++ symbol. +might be mangled. > To demangle a Rust symbol use [rustfilt](https://github.com/luser/rustfilt). @@ -117,8 +95,6 @@ rustfilt _ZN16curve25519_dalek7edwards21EdwardsBasepointTable6create17h178b3d241 curve25519_dalek::edwards::EdwardsBasepointTable::create ``` -To demangle a C++ symbol use `c++filt` from binutils. - The reason a warning is reported rather than an error is because some dependent crates may include functionality that violates the stack frame restrictions even if the program doesn't use that functionality. If the program violates the stack @@ -128,16 +104,16 @@ SBF stack frames occupy a virtual address range starting at `0x200000000`. ## Heap size -Programs have access to a runtime heap either directly in C or via the Rust -`alloc` APIs. To facilitate fast allocations, a simple 32KB bump heap is -utilized. The heap does not support `free` or `realloc` so use it wisely. +Programs have access to a runtime heap via the Rust `alloc` APIs. To facilitate +fast allocations, a simple 32KB bump heap is utilized. The heap does not support +`free` or `realloc`. Internally, programs have access to the 32KB memory region starting at virtual address 0x300000000 and may implement a custom heap based on the program's specific needs. -- [Rust program heap usage](/docs/programs/lang-rust.md#heap) -- [C program heap usage](/docs/programs/lang-c.md#heap) +Rust programs implement the heap directly by defining a custom +[`global_allocator`](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/entrypoint.rs#L72) ## Loaders @@ -157,11 +133,7 @@ For all practical purposes program should always be written to target the latest BPF loader and the latest loader is the default for the command-line interface and the javascript APIs. -For language specific information about implementing a program for a particular -loader see: - - [Rust program entrypoints](/docs/programs/lang-rust.md#program-entrypoint) -- [C program entrypoints](/docs/programs/lang-c.md#program-entrypoint) ### Deployment @@ -193,10 +165,7 @@ results in various parameters falling on aligned offsets within the aligned byte array. This allows deserialization implementations to directly reference the byte array and provide aligned pointers to the program. -For language specific information about serialization see: - - [Rust program parameter deserialization](/docs/programs/lang-rust.md#parameter-deserialization) -- [C program parameter deserialization](/docs/programs/lang-c.md#parameter-deserialization) The latest loader serializes the program input parameters as follows (all encoding is little endian): diff --git a/docs/programs/index.md b/docs/programs/index.md index 6a09d28ff..4189752bd 100644 --- a/docs/programs/index.md +++ b/docs/programs/index.md @@ -1,5 +1,5 @@ --- title: Developing Programs +sidebarSortOrder: 2 metaOnly: true -sidebarSortOrder: 4 --- diff --git a/docs/programs/lang-c.md b/docs/programs/lang-c.md deleted file mode 100644 index b5bad22c9..000000000 --- a/docs/programs/lang-c.md +++ /dev/null @@ -1,192 +0,0 @@ ---- -title: "Developing with C" ---- - -Solana supports writing onchain programs using the C and C++ programming -languages. - -## Project Layout - -C projects are laid out as follows: - -```text -/src/ -/makefile -``` - -The `makefile` should contain the following: - -```shell -OUT_DIR := -include ~/.local/share/solana/install/active_release/bin/sdk/sbf/c/sbf.mk -``` - -The sbf-sdk may not be in the exact place specified above but if you setup your -environment per [How to Build](#how-to-build) then it should be. - -## How to Build - -First setup the environment: - -- Install the latest Rust stable from https://rustup.rs -- Install the latest - [Solana command-line tools](https://docs.solanalabs.com/cli/install) - -Then build using make: - -```shell -make -C -``` - -## How to Test - -Solana uses the [Criterion](https://github.com/Snaipe/Criterion) test framework -and tests are executed each time the program is built -[How to Build](#how-to-build). - -To add tests, create a new file next to your source file named -`test_.c` and populate it with criterion test cases. See the -[Criterion docs](https://criterion.readthedocs.io/en/master) for information on -how to write a test case. - -## Program Entrypoint - -Programs export a known entrypoint symbol which the Solana runtime looks up and -calls when invoking a program. Solana supports multiple versions of the SBF -loader and the entrypoints may vary between them. Programs must be written for -and deployed to the same loader. For more details see the -[FAQ section on Loaders](/docs/programs/faq.md#loaders). - -Currently there are two supported loaders -[SBF Loader](https://github.com/solana-labs/solana/blob/7ddf10e602d2ed87a9e3737aa8c32f1db9f909d8/sdk/program/src/bpf_loader.rs#L17) -and -[SBF loader deprecated](https://github.com/solana-labs/solana/blob/7ddf10e602d2ed87a9e3737aa8c32f1db9f909d8/sdk/program/src/bpf_loader_deprecated.rs#L14). - -They both have the same raw entrypoint definition, the following is the raw -symbol that the runtime looks up and calls: - -```c -extern uint64_t entrypoint(const uint8_t *input) -``` - -This entrypoint takes a generic byte array which contains the serialized program -parameters (program id, accounts, instruction data, etc...). To deserialize the -parameters each loader contains its own [helper function](#serialization). - -### Serialization - -Each loader provides a helper function that deserializes the program's input -parameters into C types: - -- [SBF Loader deserialization](https://github.com/solana-labs/solana/blob/d2ee9db2143859fa5dc26b15ee6da9c25cc0429c/sdk/bpf/c/inc/solana_sdk.h#L304) -- [SBF Loader deprecated deserialization](https://github.com/solana-labs/solana/blob/8415c22b593f164020adc7afe782e8041d756ddf/sdk/bpf/c/inc/deserialize_deprecated.h#L25) - -Some programs may want to perform deserialization themselves, and they can by -providing their own implementation of the [raw entrypoint](#program-entrypoint). -Take note that the provided deserialization functions retain references back to -the serialized byte array for variables that the program is allowed to modify -(lamports, account data). The reason for this is that upon return the loader -will read those modifications so they may be committed. If a program implements -their own deserialization function they need to ensure that any modifications -the program wishes to commit must be written back into the input byte array. - -Details on how the loader serializes the program inputs can be found in the -[Input Parameter Serialization](https://solana.com/docs/programs/faq#input-parameter-serialization) -docs. - -## Data Types - -The loader's deserialization helper function populates the -[SolParameters](https://github.com/solana-labs/solana/blob/8415c22b593f164020adc7afe782e8041d756ddf/sdk/sbf/c/inc/solana_sdk.h#L276) -structure: - -```c -/** - * Structure that the program's entrypoint input data is deserialized into. - */ -typedef struct { - SolAccountInfo* ka; /** Pointer to an array of SolAccountInfo, must already - point to an array of SolAccountInfos */ - uint64_t ka_num; /** Number of SolAccountInfo entries in `ka` */ - const uint8_t *data; /** pointer to the instruction data */ - uint64_t data_len; /** Length in bytes of the instruction data */ - const SolPubkey *program_id; /** program_id of the currently executing program */ -} SolParameters; -``` - -'ka' is an ordered array of the accounts referenced by the instruction and -represented as a -[SolAccountInfo](https://github.com/solana-labs/solana/blob/8415c22b593f164020adc7afe782e8041d756ddf/sdk/sbf/c/inc/solana_sdk.h#L173) -structures. An account's place in the array signifies its meaning, for example, -when transferring lamports an instruction may define the first account as the -source and the second as the destination. - -The members of the `SolAccountInfo` structure are read-only except for -`lamports` and `data`. Both may be modified by the program in accordance with -the "runtime enforcement policy". When an instruction reference the same account -multiple times there may be duplicate `SolAccountInfo` entries in the array but -they both point back to the original input byte array. A program should handle -these cases delicately to avoid overlapping read/writes to the same buffer. If a -program implements their own deserialization function care should be taken to -handle duplicate accounts appropriately. - -`data` is the general purpose byte array from the -[instruction's instruction data](/docs/core/transactions.md#instruction) being -processed. - -`program_id` is the public key of the currently executing program. - -## Heap - -C programs can allocate memory via the system call -[`calloc`](https://github.com/solana-labs/solana/blob/c3d2d2134c93001566e1e56f691582f379b5ae55/sdk/sbf/c/inc/solana_sdk.h#L245) -or implement their own heap on top of the 32KB heap region starting at virtual -address x300000000. The heap region is also used by `calloc` so if a program -implements their own heap it should not also call `calloc`. - -## Logging - -The runtime provides two system calls that take data and log it to the program -logs. - -- [`sol_log(const char*)`](https://github.com/solana-labs/solana/blob/d2ee9db2143859fa5dc26b15ee6da9c25cc0429c/sdk/sbf/c/inc/solana_sdk.h#L128) -- [`sol_log_64(uint64_t, uint64_t, uint64_t, uint64_t, uint64_t)`](https://github.com/solana-labs/solana/blob/d2ee9db2143859fa5dc26b15ee6da9c25cc0429c/sdk/sbf/c/inc/solana_sdk.h#L134) - -The [debugging](/docs/programs/debugging.md#logging) section has more -information about working with program logs. - -## Compute Budget - -Use the system call `sol_remaining_compute_units()` to return a `u64` indicating -the number of compute units remaining for this transaction. - -Use the system call -[`sol_log_compute_units()`](https://github.com/solana-labs/solana/blob/d3a3a7548c857f26ec2cb10e270da72d373020ec/sdk/sbf/c/inc/solana_sdk.h#L140) -to log a message containing the remaining number of compute units the program -may consume before execution is halted - -See the [Compute Budget](/docs/core/fees.md#compute-budget) documentation for -more information. - -## ELF Dump - -The SBF shared object internals can be dumped to a text file to gain more -insight into a program's composition and what it may be doing at runtime. The -dump will contain both the ELF information as well as a list of all the symbols -and the instructions that implement them. Some of the SBF loader's error log -messages will reference specific instruction numbers where the error occurred. -These references can be looked up in the ELF dump to identify the offending -instruction and its context. - -To create a dump file: - -```shell -cd -make dump_ -``` - -## Examples - -The -[Solana Program Library github](https://github.com/solana-labs/solana-program-library/tree/master/examples/c) -repo contains a collection of C examples diff --git a/docs/programs/lang-rust.md b/docs/programs/lang-rust.md deleted file mode 100644 index 22edde649..000000000 --- a/docs/programs/lang-rust.md +++ /dev/null @@ -1,400 +0,0 @@ ---- -title: "Developing with Rust" ---- - -Solana supports writing onchain programs using the -[Rust](https://www.rust-lang.org/) programming language. - - - -To quickly get started with Solana development and build your first Rust -program, take a look at these detailed quick start guides: - -- [Build and deploy your first Solana program using only your browser](/content/guides/getstarted/hello-world-in-your-browser.md). - No installation needed. -- [Setup your local environment](/docs/intro/installation) and use the local - test validator. - - - -## Project Layout - -Solana Rust programs follow the typical -[Rust project layout](https://doc.rust-lang.org/cargo/guide/project-layout.html): - -```text -/inc/ -/src/ -/Cargo.toml -``` - -Solana Rust programs may depend directly on each other in order to gain access -to instruction helpers when making -[cross-program invocations](/docs/core/cpi.md). When doing so it's important to -not pull in the dependent program's entrypoint symbols because they may conflict -with the program's own. To avoid this, programs should define an `no-entrypoint` -feature in `Cargo.toml` and use to exclude the entrypoint. - -- [Define the feature](https://github.com/solana-labs/solana-program-library/blob/fca9836a2c8e18fc7e3595287484e9acd60a8f64/token/program/Cargo.toml#L12) -- [Exclude the entrypoint](https://github.com/solana-labs/solana-program-library/blob/fca9836a2c8e18fc7e3595287484e9acd60a8f64/token/program/src/lib.rs#L12) - -Then when other programs include this program as a dependency, they should do so -using the `no-entrypoint` feature. - -- [Include without entrypoint](https://github.com/solana-labs/solana-program-library/blob/fca9836a2c8e18fc7e3595287484e9acd60a8f64/token-swap/program/Cargo.toml#L22) - -## Project Dependencies - -At a minimum, Solana Rust programs must pull in the -[`solana-program`](https://crates.io/crates/solana-program) crate. - -Solana SBF programs have some [restrictions](#restrictions) that may prevent the -inclusion of some crates as dependencies or require special handling. - -For example: - -- Crates that require the architecture be a subset of the ones supported by the - official toolchain. There is no workaround for this unless that crate is - forked and SBF added to that those architecture checks. -- Crates may depend on `rand` which is not supported in Solana's deterministic - program environment. To include a `rand` dependent crate refer to - [Depending on Rand](#depending-on-rand). -- Crates may overflow the stack even if the stack overflowing code isn't - included in the program itself. For more information refer to - [Stack](/docs/programs/faq.md#stack). - -## How to Build - -First setup the environment: - -- Install the latest Rust stable from https://rustup.rs/ -- Install the latest - [Solana command-line tools](https://docs.solanalabs.com/cli/install) - -The normal cargo build is available for building programs against your host -machine which can be used for unit testing: - -```shell -cargo build -``` - -To build a specific program, such as SPL Token, for the Solana SBF target which -can be deployed to the cluster: - -```shell -cd -cargo build-bpf -``` - -## How to Test - -Solana programs can be unit tested via the traditional `cargo test` mechanism by -exercising program functions directly. - -To help facilitate testing in an environment that more closely matches a live -cluster, developers can use the -[`program-test`](https://crates.io/crates/solana-program-test) crate. The -`program-test` crate starts up a local instance of the runtime and allows tests -to send multiple transactions while keeping state for the duration of the test. - -For more information the -[test in sysvar example](https://github.com/solana-labs/solana-program-library/blob/master/examples/rust/sysvar/tests/functional.rs) -shows how an instruction containing sysvar account is sent and processed by the -program. - -## Program Entrypoint - -Programs export a known entrypoint symbol which the Solana runtime looks up and -calls when invoking a program. Solana supports multiple versions of the BPF -loader and the entrypoints may vary between them. Programs must be written for -and deployed to the same loader. For more details see the -[FAQ section on Loaders](/docs/programs/faq.md#loaders). - -Currently there are two supported loaders -[BPF Loader](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/bpf_loader.rs#L17) -and -[BPF loader deprecated](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/bpf_loader_deprecated.rs#L14) - -They both have the same raw entrypoint definition, the following is the raw -symbol that the runtime looks up and calls: - -```rust -#[no_mangle] -pub unsafe extern "C" fn entrypoint(input: *mut u8) -> u64; -``` - -This entrypoint takes a generic byte array which contains the serialized program -parameters (program id, accounts, instruction data, etc...). To deserialize the -parameters each loader contains its own wrapper macro that exports the raw -entrypoint, deserializes the parameters, calls a user defined instruction -processing function, and returns the results. - -You can find the entrypoint macros here: - -- [BPF Loader's entrypoint macro](https://github.com/solana-labs/solana/blob/9b1199cdb1b391b00d510ed7fc4866bdf6ee4eb3/sdk/program/src/entrypoint.rs#L42) -- [BPF Loader deprecated's entrypoint macro](https://github.com/solana-labs/solana/blob/9b1199cdb1b391b00d510ed7fc4866bdf6ee4eb3/sdk/program/src/entrypoint_deprecated.rs#L38) - -The program defined instruction processing function that the entrypoint macros -call must be of this form: - -```rust -pub type ProcessInstruction = - fn(program_id: &Pubkey, accounts: &[AccountInfo], instruction_data: &[u8]) -> ProgramResult; -``` - -### Parameter Deserialization - -Each loader provides a helper function that deserializes the program's input -parameters into Rust types. The entrypoint macros automatically calls the -deserialization helper: - -- [BPF Loader deserialization](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/entrypoint.rs#L146) -- [BPF Loader deprecated deserialization](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/entrypoint_deprecated.rs#L57) - -Some programs may want to perform deserialization themselves and they can by -providing their own implementation of the [raw entrypoint](#program-entrypoint). -Take note that the provided deserialization functions retain references back to -the serialized byte array for variables that the program is allowed to modify -(lamports, account data). The reason for this is that upon return the loader -will read those modifications so they may be committed. If a program implements -their own deserialization function they need to ensure that any modifications -the program wishes to commit be written back into the input byte array. - -Details on how the loader serializes the program inputs can be found in the -[Input Parameter Serialization](/docs/programs/faq.md#input-parameter-serialization) -docs. - -### Data Types - -The loader's entrypoint macros call the program defined instruction processor -function with the following parameters: - -```rust -program_id: &Pubkey, -accounts: &[AccountInfo], -instruction_data: &[u8] -``` - -The program id is the public key of the currently executing program. - -The accounts is an ordered slice of the accounts referenced by the instruction -and represented as an -[AccountInfo](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/account_info.rs#L12) -structures. An account's place in the array signifies its meaning, for example, -when transferring lamports an instruction may define the first account as the -source and the second as the destination. - -The members of the `AccountInfo` structure are read-only except for `lamports` -and `data`. Both may be modified by the program in accordance with the "runtime -enforcement policy". Both of these members are protected by the Rust `RefCell` -construct, so they must be borrowed to read or write to them. The reason for -this is they both point back to the original input byte array, but there may be -multiple entries in the accounts slice that point to the same account. Using -`RefCell` ensures that the program does not accidentally perform overlapping -read/writes to the same underlying data via multiple `AccountInfo` structures. -If a program implements their own deserialization function care should be taken -to handle duplicate accounts appropriately. - -The instruction data is the general purpose byte array from the -[instruction's instruction data](/docs/core/transactions.md#instruction) being -processed. - -## Heap - -Rust programs implement the heap directly by defining a custom -[`global_allocator`](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/entrypoint.rs#L72) - -Programs may implement their own `global_allocator` based on its specific needs. -Refer to the [custom heap example](#examples) for more information. - -## Restrictions - -On-chain Rust programs support most of Rust's libstd, libcore, and liballoc, as -well as many 3rd party crates. - -There are some limitations since these programs run in a resource-constrained, -single-threaded environment, as well as being deterministic: - -- No access to - - `rand` - - `std::fs` - - `std::net` - - `std::future` - - `std::process` - - `std::sync` - - `std::task` - - `std::thread` - - `std::time` -- Limited access to: - - `std::hash` - - `std::os` -- Bincode is extremely computationally expensive in both cycles and call depth - and should be avoided -- String formatting should be avoided since it is also computationally - expensive. -- No support for `println!`, `print!`, the Solana [logging helpers](#logging) - should be used instead. -- The runtime enforces a limit on the number of instructions a program can - execute during the processing of one instruction. See - [computation budget](/docs/core/fees.md#compute-budget) for more information. - -## Depending on Rand - -Programs are constrained to run deterministically, so random numbers are not -available. Sometimes a program may depend on a crate that depends itself on -`rand` even if the program does not use any of the random number functionality. -If a program depends on `rand`, the compilation will fail because there is no -`get-random` support for Solana. The error will typically look like this: - -```shell -error: target is not supported, for more information see: https://docs.rs/getrandom/#unsupported-targets - --> /Users/jack/.cargo/registry/src/github.com-1ecc6299db9ec823/getrandom-0.1.14/src/lib.rs:257:9 - | -257 | / compile_error!("\ -258 | | target is not supported, for more information see: \ -259 | | https://docs.rs/getrandom/#unsupported-targets\ -260 | | "); - | |___________^ -``` - -To work around this dependency issue, add the following dependency to the -program's `Cargo.toml`: - -```rust -getrandom = { version = "0.1.14", features = ["dummy"] } -``` - -or if the dependency is on getrandom v0.2 add: - -```rust -getrandom = { version = "0.2.2", features = ["custom"] } -``` - -## Logging - -Rust's `println!` macro is computationally expensive and not supported. Instead -the helper macro -[`msg!`](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/log.rs#L33) -is provided. - -`msg!` has two forms: - -```rust -msg!("A string"); -``` - -or - -```rust -msg!(0_64, 1_64, 2_64, 3_64, 4_64); -``` - -Both forms output the results to the program logs. If a program so wishes they -can emulate `println!` by using `format!`: - -```rust -msg!("Some variable: {:?}", variable); -``` - -The [debugging](/docs/programs/debugging.md#logging) section has more -information about working with program logs the [Rust examples](#examples) -contains a logging example. - -## Panicking - -Rust's `panic!`, `assert!`, and internal panic results are printed to the -[program logs](/docs/programs/debugging.md#logging) by default. - -```shell -INFO solana_runtime::message_processor] Finalized account CGLhHSuWsp1gT4B7MY2KACqp9RUwQRhcUFfVSuxpSajZ -INFO solana_runtime::message_processor] Call SBF program CGLhHSuWsp1gT4B7MY2KACqp9RUwQRhcUFfVSuxpSajZ -INFO solana_runtime::message_processor] Program log: Panicked at: 'assertion failed: `(left == right)` - left: `1`, - right: `2`', rust/panic/src/lib.rs:22:5 -INFO solana_runtime::message_processor] SBF program consumed 5453 of 200000 units -INFO solana_runtime::message_processor] SBF program CGLhHSuWsp1gT4B7MY2KACqp9RUwQRhcUFfVSuxpSajZ failed: BPF program panicked -``` - -### Custom Panic Handler - -Programs can override the default panic handler by providing their own -implementation. - -First define the `custom-panic` feature in the program's `Cargo.toml` - -```rust -[features] -default = ["custom-panic"] -custom-panic = [] -``` - -Then provide a custom implementation of the panic handler: - -```rust -#[cfg(all(feature = "custom-panic", target_os = "solana"))] -#[no_mangle] -fn custom_panic(info: &core::panic::PanicInfo<'_>) { - solana_program::msg!("program custom panic enabled"); - solana_program::msg!("{}", info); -} -``` - -In the above snippit, the default implementation is shown, but developers may -replace that with something that better suits their needs. - -One of the side effects of supporting full panic messages by default is that -programs incur the cost of pulling in more of Rust's `libstd` implementation -into program's shared object. Typical programs will already be pulling in a fair -amount of `libstd` and may not notice much of an increase in the shared object -size. But programs that explicitly attempt to be very small by avoiding `libstd` -may take a significant impact (~25kb). To eliminate that impact, programs can -provide their own custom panic handler with an empty implementation. - -```rust -#[cfg(all(feature = "custom-panic", target_os = "solana"))] -#[no_mangle] -fn custom_panic(info: &core::panic::PanicInfo<'_>) { - // Do nothing to save space -} -``` - -## Compute Budget - -Use the system call `sol_remaining_compute_units()` to return a `u64` indicating -the number of compute units remaining for this transaction. - -Use the system call -[`sol_log_compute_units()`](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/log.rs#L141) -to log a message containing the remaining number of compute units the program -may consume before execution is halted - -See the [Compute Budget](/docs/core/fees.md#compute-budget) documentation for -more information. - -## ELF Dump - -The SBF shared object internals can be dumped to a text file to gain more -insight into a program's composition and what it may be doing at runtime. The -dump will contain both the ELF information as well as a list of all the symbols -and the instructions that implement them. Some of the BPF loader's error log -messages will reference specific instruction numbers where the error occurred. -These references can be looked up in the ELF dump to identify the offending -instruction and its context. - -To create a dump file: - -```shell -cd -cargo build-bpf --dump -``` - -## Examples - -The -[Solana Program Library GitHub](https://github.com/solana-labs/solana-program-library/tree/master/examples/rust) -repo contains a collection of Rust examples. - -The -[Solana Developers Program Examples GitHub](https://github.com/solana-developers/program-examples) -repo also contains a collection of beginner to intermediate Rust program -examples. diff --git a/docs/programs/limitations.md b/docs/programs/limitations.md index 1a4469669..1ec59ccc3 100644 --- a/docs/programs/limitations.md +++ b/docs/programs/limitations.md @@ -1,5 +1,6 @@ --- title: "Limitations" +sidebarSortOrder: 6 --- Developing programs on the Solana blockchain have some inherent limitation @@ -12,9 +13,35 @@ Since Rust based onchain programs must run be deterministic while running in a resource-constrained, single-threaded environment, they have some limitations on various libraries. -See -[Developing with Rust - Restrictions](/docs/programs/lang-rust.md#restrictions) -for a detailed breakdown these restrictions and limitations. +On-chain Rust programs support most of Rust's libstd, libcore, and liballoc, as +well as many 3rd party crates. + +There are some limitations since these programs run in a resource-constrained, +single-threaded environment, as well as being deterministic: + +- No access to + - `rand` + - `std::fs` + - `std::net` + - `std::future` + - `std::process` + - `std::sync` + - `std::task` + - `std::thread` + - `std::time` +- Limited access to: + - `std::hash` + - `std::os` +- Bincode is extremely computationally expensive in both cycles and call depth + and should be avoided +- String formatting should be avoided since it is also computationally + expensive. +- No support for `println!`, `print!`, use the + [`msg!`](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/log.rs#L33) + macro instead. +- The runtime enforces a limit on the number of instructions a program can + execute during the processing of one instruction. See + [computation budget](/docs/core/fees.md#compute-budget) for more information. ## Compute budget @@ -82,6 +109,4 @@ added to support writable data. ## Signed division -The SBF instruction set does not support -[signed division](https://www.kernel.org/doc/html/latest/bpf/bpf_design_QA.Html#q-why-there-is-no-bpf-sdiv-for-signed-divide-operation). -Adding a signed division instruction is a consideration. +The SBF instruction set does not support signed division. diff --git a/docs/programs/overview.md b/docs/programs/overview.md deleted file mode 100644 index 524e6bddc..000000000 --- a/docs/programs/overview.md +++ /dev/null @@ -1,107 +0,0 @@ ---- -title: Overview of Developing On-chain Programs -sidebarLabel: Overview -sidebarSortOrder: 0 -altRoutes: - - /docs/programs ---- - -Developers can write and deploy their own programs to the Solana blockchain. -This process can be broadly summarized into a few key steps. - - - -To quickly get started with Solana development and build your first Rust -program, take a look at these detailed quick start guides: - -- [Build and deploy your first Solana program using only your browser](/content/guides/getstarted/hello-world-in-your-browser.md). - No installation needed. -- [Setup your local environment](/docs/intro/installation) and use the local - test validator. - - - -## On-chain program development lifecycle - -1. Setup your development environment -2. Write your program -3. Compile the program -4. Generate the program's public address -5. Deploy the program - -### 1. Setup your development environment - -The most robust way of getting started with Solana development, is -[installing the Solana CLI](https://docs.solanalabs.com/cli/install) tools on -your local computer. This will allow you to have the most powerful development -environment. - -Some developers may also opt for using -[Solana Playground](https://beta.solpg.io/), a browser based IDE. It will let -you write, build, and deploy onchain programs. All from your browser. No -installation needed. - -### 2. Write your program - -Writing Solana programs is most commonly done so using the Rust language. These -Rust programs are effectively the same as creating a traditional -[Rust library](https://doc.rust-lang.org/rust-by-example/crates/lib.html). - -> You can read more about other [supported languages](#support-languages) below. - -### 3. Compile the program - -Once the program is written, it must be complied down to -[Berkley Packet Filter](/docs/programs/faq.md#berkeley-packet-filter-bpf) -byte-code that will then be deployed to the blockchain. - -### 4. Generate the program's public address - -Using the [Solana CLI](https://docs.solanalabs.com/cli/install), the developer -will generate a new unique [Keypair](/docs/terminology.md#keypair) for the new -program. The public address (aka -[Pubkey](/docs/terminology.md#public-key-pubkey)) from this Keypair will be used -on-chain as the program's public address (aka -[`programId`](/docs/terminology.md#program-id)). - -### 5. Deploying the program - -Then again using the CLI, the compiled program can be deployed to the selected -blockchain cluster by creating many transactions containing the program's -byte-code. Due to the transaction memory size limitations, each transaction -effectively sends small chunks of the program to the blockchain in a rapid-fire -manner. - -Once the entire program has been sent to the blockchain, a final transaction is -sent to write all of the buffered byte-code to the program's data account. This -either mark the new program as `executable`, or complete the process to upgrade -an existing program (if it already existed). - -## Support languages - -Solana programs are typically written in the -[Rust language](/docs/programs/lang-rust.md), but -[C/C++](/docs/programs/lang-c.md) are also supported. - -There are also various community driven efforts to enable writing on-chain -programs using other languages, including: - -- Python via [Seahorse](https://seahorse.dev/) (that acts as a wrapper the Rust - based Anchor framework) - -## Example programs - -You can also explore the [Program Examples](/docs/programs/examples.md) for -examples of onchain programs. - -## Limitations - -As you dive deeper into program development, it is important to understand some -of the important limitations associated with onchain programs. - -Read more details on the [Limitations](/docs/programs/limitations.md) page - -## Frequently asked questions - -Discover many of the [frequently asked questions](/docs/programs/faq.md) other -developers have about writing/understanding Solana programs. diff --git a/docs/programs/rust/index.md b/docs/programs/rust/index.md new file mode 100644 index 000000000..7fe456337 --- /dev/null +++ b/docs/programs/rust/index.md @@ -0,0 +1,474 @@ +--- +title: Developing Programs in Rust +description: + Learn how to develop Solana programs using Rust, including step-by-step + instructions for creating, building, testing, and deploying smart contracts on + the Solana blockchain. +sidebarLabel: Rust Programs +sidebarSortOrder: 1 +altRoutes: + - /docs/programs/lang-rust +--- + +Solana programs are primarily developed using the Rust programming language. +This page focuses on writing Solana programs in Rust without using the Anchor +framework, an approach often referred to as writing "native Rust" programs. + +Native Rust development provides developers with direct control over their +Solana programs. However, this approach requires more manual setup and +boilerplate code compared to using the Anchor framework. This method is +recommended for developers who: + +- Seek granular control over program logic and optimizations +- Want to learn the underlying concepts before moving to higher-level frameworks + +For beginners, we recommend starting with the Anchor framework. See the +[Anchor](/docs/programs/anchor) section for more information. + +## Prerequisites + +For detailed installation instructions, visit the +[installation](/docs/intro/installation) page. + +Before you begin, ensure you have the following installed: + +- Rust: The programming language for building Solana programs. +- Solana CLI: Command-line tool for Solana development. + +## Getting Started + +The example below covers the basic steps to create your first Solana program +written in Rust. We'll create a minimal program that prints "Hello, world!" to +the program log. + + + +### Create a new Program + +First, create a new Rust project using the standard `cargo init` command with +the `--lib` flag. + +```shell filename="Terminal" +cargo init hello_world --lib +``` + +Navigate to the project directory. You should see the default `src/lib.rs` and +`Cargo.toml` files + +```shell filename="Terminal" +cd hello_world +``` + +Next, add the `solana-program` dependency. This is the minimum dependency +required to build a Solana program. + +```shell filename="Terminal" +cargo add solana-program@1.18.26 +``` + +Next, add the following snippet to `Cargo.toml`. If you don't include this +config, the `target/deploy` directory will not be generated when you build the +program. + +```toml filename="Cargo.toml" +[lib] +crate-type = ["cdylib", "lib"] +``` + +Your `Cargo.toml` file should look like the following: + +```toml filename="Cargo.toml" +[package] +name = "hello_world" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] + +[dependencies] +solana-program = "1.18.26" +``` + +Next, replace the contents of `src/lib.rs` with the following code. This is a +minimal Solana program that prints "Hello, world!" to the program log when the +program is invoked. + +The `msg!` macro is used in Solana programs to print a message to the program +log. + +```rs filename="lib.rs" +use solana_program::{ + account_info::AccountInfo, entrypoint, entrypoint::ProgramResult, msg, pubkey::Pubkey, +}; + +entrypoint!(process_instruction); + +pub fn process_instruction( + _program_id: &Pubkey, + _accounts: &[AccountInfo], + _instruction_data: &[u8], +) -> ProgramResult { + msg!("Hello, world!"); + Ok(()) +} +``` + +### Build the Program + +Next, build the program using the `cargo build-sbf` command. + +```shell filename="Terminal" +cargo build-sbf +``` + +This command generates a `target/deploy` directory containing two important +files: + +1. A `.so` file (e.g., `hello_world.so`): This is the compiled Solana program + that will be deployed to the network as a "smart contract". +2. A keypair file (e.g., `hello_world-keypair.json`): The public key of this + keypair is used as the program ID when deploying the program. + +To view the program ID, run the following command in your terminal. This command +prints the public key of the keypair at the specified file path: + +```shell filename="Terminal" +solana address -k ./target/deploy/hello_world-keypair.json +``` + +Example output: + +``` +4Ujf5fXfLx2PAwRqcECCLtgDxHKPznoJpa43jUBxFfMz +``` + +### Test the Program + +Next, test the program using the `solana-program-test` crate. Add the following +dependencies to `Cargo.toml`. + +```shell filename="Terminal" +cargo add solana-program-test@1.18.26 --dev +cargo add solana-sdk@1.18.26 --dev +cargo add tokio --dev +``` + +Add the following test to `src/lib.rs`, below the program code. This is a test +module that invokes the hello world program. + +```rs filename="lib.rs" +#[cfg(test)] +mod test { + use super::*; + use solana_program_test::*; + use solana_sdk::{signature::Signer, transaction::Transaction}; + + #[tokio::test] + async fn test_hello_world() { + let program_id = Pubkey::new_unique(); + let (mut banks_client, payer, recent_blockhash) = + ProgramTest::new("hello_world", program_id, processor!(process_instruction)) + .start() + .await; + + // Create the instruction to invoke the program + let instruction = + solana_program::instruction::Instruction::new_with_borsh(program_id, &(), vec![]); + + // Add the instruction to a new transaction + let mut transaction = Transaction::new_with_payer(&[instruction], Some(&payer.pubkey())); + transaction.sign(&[&payer], recent_blockhash); + + // Process the transaction + let transaction_result = banks_client.process_transaction(transaction).await; + assert!(transaction_result.is_ok()); + } +} +``` + +Run the test using the `cargo test-sbf` command. The program log will display +"Hello, world!". + +```shell filename="Terminal" +cargo test-sbf +``` + +Example output: + +```shell filename="Terminal" {4} /Program log: Hello, world!/ +running 1 test +[2024-10-18T21:24:54.889570000Z INFO solana_program_test] "hello_world" SBF program from /hello_world/target/deploy/hello_world.so, modified 35 seconds, 828 ms, 268 µs and 398 ns ago +[2024-10-18T21:24:54.974294000Z DEBUG solana_runtime::message_processor::stable_log] Program 1111111QLbz7JHiBTspS962RLKV8GndWFwiEaqKM invoke [1] +[2024-10-18T21:24:54.974814000Z DEBUG solana_runtime::message_processor::stable_log] Program log: Hello, world! +[2024-10-18T21:24:54.976848000Z DEBUG solana_runtime::message_processor::stable_log] Program 1111111QLbz7JHiBTspS962RLKV8GndWFwiEaqKM consumed 140 of 200000 compute units +[2024-10-18T21:24:54.976868000Z DEBUG solana_runtime::message_processor::stable_log] Program 1111111QLbz7JHiBTspS962RLKV8GndWFwiEaqKM success +test test::test_hello_world ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.13s +``` + +### Deploy the Program + +Next, deploy the program. When developing locally, we can use the +`solana-test-validator`. + +First, configure the Solana CLI to use the local Solana cluster. + +```shell filename="Terminal" +solana config set -ul +``` + +Example output: + +``` +Config File: /.config/solana/cli/config.yml +RPC URL: http://localhost:8899 +WebSocket URL: ws://localhost:8900/ (computed) +Keypair Path: /.config/solana/id.json +Commitment: confirmed +``` + +Open a new terminal and run the `solana-test-validators` command to start the +local validator. + +```shell filename="Terminal" +solana-test-validator +``` + +While the test validator is running, run the `solana program deploy` command in +a separate terminal to deploy the program to the local validator. + +```shell filename="Terminal" +solana program deploy ./target/deploy/hello_world.so +``` + +Example output: + +``` +Program Id: 4Ujf5fXfLx2PAwRqcECCLtgDxHKPznoJpa43jUBxFfMz +Signature: +5osMiNMiDZGM7L1e2tPHxU8wdB8gwG8fDnXLg5G7SbhwFz4dHshYgAijk4wSQL5cXiu8z1MMou5kLadAQuHp7ybH +``` + +You can inspect the program ID and transaction signature on +[Solana Explorer](https://explorer.solana.com/?cluster=custom&customUrl=http%3A%2F%2Flocalhost%3A8899). +Note that the cluster on Solana Explorer must also be localhost. The "Custom RPC +URL" option on Solana Explorer defaults to `http://localhost:8899`. + +### Invoke the Program + +Next, we'll demonstrate how to invoke the program using a Rust client. + +First create an `examples` directory and a `client.rs` file. + +```shell filename="Terminal" +mkdir -p examples +touch examples/client.rs +``` + +Add the following to `Cargo.toml`. + +```toml filename="Cargo.toml" +[[example]] +name = "client" +path = "examples/client.rs" +``` + +Add the `solana-client` dependency. + +```shell filename="Terminal" +cargo add solana-client@1.18.26 --dev +``` + +Add the following code to `examples/client.rs`. This is a Rust client script +that funds a new keypair to pay for transaction fees and then invokes the hello +world program. + +```rs filename="example/client.rs" +use solana_client::rpc_client::RpcClient; +use solana_sdk::{ + commitment_config::CommitmentConfig, + instruction::Instruction, + pubkey::Pubkey, + signature::{Keypair, Signer}, + transaction::Transaction, +}; +use std::str::FromStr; + +#[tokio::main] +async fn main() { + // Program ID (replace with your actual program ID) + let program_id = Pubkey::from_str("4Ujf5fXfLx2PAwRqcECCLtgDxHKPznoJpa43jUBxFfMz").unwrap(); + + // Connect to the Solana devnet + let rpc_url = String::from("http://127.0.0.1:8899"); + let client = RpcClient::new_with_commitment(rpc_url, CommitmentConfig::confirmed()); + + // Generate a new keypair for the payer + let payer = Keypair::new(); + + // Request airdrop + let airdrop_amount = 1_000_000_000; // 1 SOL + let signature = client + .request_airdrop(&payer.pubkey(), airdrop_amount) + .expect("Failed to request airdrop"); + + // Wait for airdrop confirmation + loop { + let confirmed = client.confirm_transaction(&signature).unwrap(); + if confirmed { + break; + } + } + + // Create the instruction + let instruction = Instruction::new_with_borsh( + program_id, + &(), // Empty instruction data + vec![], // No accounts needed + ); + + // Add the instruction to new transaction + let mut transaction = Transaction::new_with_payer(&[instruction], Some(&payer.pubkey())); + transaction.sign(&[&payer], client.get_latest_blockhash().unwrap()); + + // Send and confirm the transaction + match client.send_and_confirm_transaction(&transaction) { + Ok(signature) => println!("Transaction Signature: {}", signature), + Err(err) => eprintln!("Error sending transaction: {}", err), + } +} +``` + +Before running the script, replace the program ID in the code snippet above with +the one for your program. + +You can get your program ID by running the following command. + +```shell filename="Terminal" +solana address -k ./target/deploy/hello_world-keypair.json +``` + +```diff +#[tokio::main] +async fn main() { +- let program_id = Pubkey::from_str("4Ujf5fXfLx2PAwRqcECCLtgDxHKPznoJpa43jUBxFfMz").unwrap(); ++ let program_id = Pubkey::from_str("YOUR_PROGRAM_ID).unwrap(); + } +} +``` + +Run the client script with the following command. + +```shell filename="Terminal" +cargo run --example client +``` + +Example output: + +``` +Transaction Signature: 54TWxKi3Jsi3UTeZbhLGUFX6JQH7TspRJjRRFZ8NFnwG5BXM9udxiX77bAACjKAS9fGnVeEazrXL4SfKrW7xZFYV +``` + +You can inspect the transaction signature on +[Solana Explorer](https://explorer.solana.com/?cluster=custom&customUrl=http%3A%2F%2Flocalhost%3A8899) +(local cluster) to see "Hello, world!" in the program log. + +### Update the Program + +Solana programs can be updated by redeploying to the same program ID. Update the +program in `src/lib.rs` to print "Hello, Solana!" instead of "Hello, world!". + +```diff filename="lib.rs" +pub fn process_instruction( + _program_id: &Pubkey, + _accounts: &[AccountInfo], + _instruction_data: &[u8], +) -> ProgramResult { +- msg!("Hello, world!"); ++ msg!("Hello, Solana!"); + Ok(()) +} +``` + +Test the updated program by running the `cargo test-sbf` command. + +```shell filename="Terminal" +cargo test-sbf +``` + +You should see "Hello, Solana!" in the program log. + +```shell filename="Terminal" {4} +running 1 test +[2024-10-23T19:28:28.842639000Z INFO solana_program_test] "hello_world" SBF program from /code/misc/delete/hello_world/target/deploy/hello_world.so, modified 4 minutes, 31 seconds, 435 ms, 566 µs and 766 ns ago +[2024-10-23T19:28:28.934854000Z DEBUG solana_runtime::message_processor::stable_log] Program 1111111QLbz7JHiBTspS962RLKV8GndWFwiEaqKM invoke [1] +[2024-10-23T19:28:28.936735000Z DEBUG solana_runtime::message_processor::stable_log] Program log: Hello, Solana! +[2024-10-23T19:28:28.938774000Z DEBUG solana_runtime::message_processor::stable_log] Program 1111111QLbz7JHiBTspS962RLKV8GndWFwiEaqKM consumed 140 of 200000 compute units +[2024-10-23T19:28:28.938793000Z DEBUG solana_runtime::message_processor::stable_log] Program 1111111QLbz7JHiBTspS962RLKV8GndWFwiEaqKM success +test test::test_hello_world ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.14s +``` + +Run the `cargo build-sbf` command to generate an updated `.so` file. + +```shell filename="Terminal" +cargo build-sbf +``` + +Redeploy the program using the `solana program deploy` command. + +```shell filename="Terminal" +solana program deploy ./target/deploy/hello_world.so +``` + +Run the client code again and inspect the transaction signature on Solana +Explorer to see "Hello, Solana!" in the program log. + +```shell filename="Terminal" +cargo run --example client +``` + +### Close the Program + +You can close your Solana program to reclaim the SOL allocated to the account. +Closing a program is irreversible, so it should be done with caution. + +To close a program, use the `solana program close ` command. For +example: + +```shell filename="Terminal" +solana program close 4Ujf5fXfLx2PAwRqcECCLtgDxHKPznoJpa43jUBxFfMz +--bypass-warning +``` + +Example output: + +``` +Closed Program Id 4Ujf5fXfLx2PAwRqcECCLtgDxHKPznoJpa43jUBxFfMz, 0.1350588 SOL +reclaimed +``` + +Note that once a program is closed, its program ID cannot be reused. Attempting +to deploy a program with a previously closed program ID will result in an error. + +``` +Error: Program 4Ujf5fXfLx2PAwRqcECCLtgDxHKPznoJpa43jUBxFfMz has been closed, use +a new Program Id +``` + +If you need to redeploy a program with the same source code after closing a +program, you must generate a new program ID. To generate a new keypair for the +program, run the following command: + +```shell filename="Terminal" +solana-keygen new -o ./target/deploy/hello_world-keypair.json --force +``` + +Alternatively, you can delete the existing keypair file (e.g. +`./target/deploy/hello_world-keypair.json`) and run `cargo build-sbf` again, +which will generate a new keypair file. + + diff --git a/docs/programs/rust/program-structure.md b/docs/programs/rust/program-structure.md new file mode 100644 index 000000000..b07022736 --- /dev/null +++ b/docs/programs/rust/program-structure.md @@ -0,0 +1,1407 @@ +--- +title: Rust Program Structure +sidebarLabel: Program Structure +description: + Learn how to structure Solana programs in Rust, including entrypoints, state + management, instruction handling, and testing. +sidebarSortOrder: 1 +--- + +Solana programs written in Rust have minimal structural requirements, allowing +for flexibility in how code is organized. The only requirement is that a program +must have an `entrypoint`, which defines where the execution of a program +begins. + +## Program Structure + +While there are no strict rules for file structure, Solana programs typically +follow a common pattern: + +- `entrypoint.rs`: Defines the entrypoint that routes incoming instructions. +- `state.rs`: Define program-specific state (account data). +- `instructions.rs`: Defines the instructions that the program can execute. +- `processor.rs`: Defines the instruction handlers (functions) that implement + the business logic for each instruction. +- `error.rs`: Defines custom errors that the program can return. + +You can find examples in the +[Solana Program Library](https://github.com/solana-labs/solana-program-library/tree/master/token/program/src). + +## Example Program + +To demonstrate how to build a native Rust program with multiple instructions, +we'll walk through a simple counter program that implements two instructions: + +1. `InitializeCounter`: Creates and initializes a new account with an initial + value. +2. `IncrementCounter`: Increments the value stored in an existing account. + +For simplicity, the program will be implemented in a single `lib.rs` file, +though in practice you may want to split larger programs into multiple files. + + + + +```rs filename="lib.rs" +use borsh::{BorshDeserialize, BorshSerialize}; +use solana_program::{ + account_info::{next_account_info, AccountInfo}, + entrypoint, + entrypoint::ProgramResult, + msg, + program::invoke, + program_error::ProgramError, + pubkey::Pubkey, + system_instruction, + sysvar::{rent::Rent, Sysvar}, +}; + +// Program entrypoint +entrypoint!(process_instruction); + +// Function to route instructions to the correct handler +pub fn process_instruction( + program_id: &Pubkey, + accounts: &[AccountInfo], + instruction_data: &[u8], +) -> ProgramResult { + // Unpack instruction data + let instruction = CounterInstruction::unpack(instruction_data)?; + + // Match instruction type + match instruction { + CounterInstruction::InitializeCounter { initial_value } => { + process_initialize_counter(program_id, accounts, initial_value)? + } + CounterInstruction::IncrementCounter => process_increment_counter(program_id, accounts)?, + }; + Ok(()) +} + +// Instructions that our program can execute +#[derive(BorshSerialize, BorshDeserialize, Debug)] +pub enum CounterInstruction { + InitializeCounter { initial_value: u64 }, // variant 0 + IncrementCounter, // variant 1 +} + +impl CounterInstruction { + pub fn unpack(input: &[u8]) -> Result { + // Get the instruction variant from the first byte + let (&variant, rest) = input + .split_first() + .ok_or(ProgramError::InvalidInstructionData)?; + + // Match instruction type and parse the remaining bytes based on the variant + match variant { + 0 => { + // For InitializeCounter, parse a u64 from the remaining bytes + let initial_value = u64::from_le_bytes( + rest.try_into() + .map_err(|_| ProgramError::InvalidInstructionData)?, + ); + Ok(Self::InitializeCounter { initial_value }) + } + 1 => Ok(Self::IncrementCounter), // No additional data needed + _ => Err(ProgramError::InvalidInstructionData), + } + } +} + +// Initialize a new counter account +fn process_initialize_counter( + program_id: &Pubkey, + accounts: &[AccountInfo], + initial_value: u64, +) -> ProgramResult { + let accounts_iter = &mut accounts.iter(); + + let counter_account = next_account_info(accounts_iter)?; + let payer_account = next_account_info(accounts_iter)?; + let system_program = next_account_info(accounts_iter)?; + + // Size of our counter account + let account_space = 8; // Size in bytes to store a u64 + + // Calculate minimum balance for rent exemption + let rent = Rent::get()?; + let required_lamports = rent.minimum_balance(account_space); + + // Create the counter account + invoke( + &system_instruction::create_account( + payer_account.key, // Account paying for the new account + counter_account.key, // Account to be created + required_lamports, // Amount of lamports to transfer to the new account + account_space as u64, // Size in bytes to allocate for the data field + program_id, // Set program owner to our program + ), + &[ + payer_account.clone(), + counter_account.clone(), + system_program.clone(), + ], + )?; + + // Create a new CounterAccount struct with the initial value + let counter_data = CounterAccount { + count: initial_value, + }; + + // Get a mutable reference to the counter account's data + let mut account_data = &mut counter_account.data.borrow_mut()[..]; + + // Serialize the CounterAccount struct into the account's data + counter_data.serialize(&mut account_data)?; + + msg!("Counter initialized with value: {}", initial_value); + + Ok(()) +} + +// Update an existing counter's value +fn process_increment_counter(program_id: &Pubkey, accounts: &[AccountInfo]) -> ProgramResult { + let accounts_iter = &mut accounts.iter(); + let counter_account = next_account_info(accounts_iter)?; + + // Verify account ownership + if counter_account.owner != program_id { + return Err(ProgramError::IncorrectProgramId); + } + + // Mutable borrow the account data + let mut data = counter_account.data.borrow_mut(); + + // Deserialize the account data into our CounterAccount struct + let mut counter_data: CounterAccount = CounterAccount::try_from_slice(&data)?; + + // Increment the counter value + counter_data.count = counter_data + .count + .checked_add(1) + .ok_or(ProgramError::InvalidAccountData)?; + + // Serialize the updated counter data back into the account + counter_data.serialize(&mut &mut data[..])?; + + msg!("Counter incremented to: {}", counter_data.count); + Ok(()) +} + +// Struct representing our counter account's data +#[derive(BorshSerialize, BorshDeserialize, Debug)] +pub struct CounterAccount { + count: u64, +} + +#[cfg(test)] +mod test { + use super::*; + use solana_program_test::*; + use solana_sdk::{ + instruction::{AccountMeta, Instruction}, + signature::{Keypair, Signer}, + system_program, + transaction::Transaction, + }; + + #[tokio::test] + async fn test_counter_program() { + let program_id = Pubkey::new_unique(); + let (mut banks_client, payer, recent_blockhash) = ProgramTest::new( + "counter_program", + program_id, + processor!(process_instruction), + ) + .start() + .await; + + // Create a new keypair to use as the address for our counter account + let counter_keypair = Keypair::new(); + let initial_value: u64 = 42; + + // Step 1: Initialize the counter + println!("Testing counter initialization..."); + + // Create initialization instruction + let mut init_instruction_data = vec![0]; // 0 = initialize instruction + init_instruction_data.extend_from_slice(&initial_value.to_le_bytes()); + + let initialize_instruction = Instruction::new_with_bytes( + program_id, + &init_instruction_data, + vec![ + AccountMeta::new(counter_keypair.pubkey(), true), + AccountMeta::new(payer.pubkey(), true), + AccountMeta::new_readonly(system_program::id(), false), + ], + ); + + // Send transaction with initialize instruction + let mut transaction = + Transaction::new_with_payer(&[initialize_instruction], Some(&payer.pubkey())); + transaction.sign(&[&payer, &counter_keypair], recent_blockhash); + banks_client.process_transaction(transaction).await.unwrap(); + + // Check account data + let account = banks_client + .get_account(counter_keypair.pubkey()) + .await + .expect("Failed to get counter account"); + + if let Some(account_data) = account { + let counter: CounterAccount = CounterAccount::try_from_slice(&account_data.data) + .expect("Failed to deserialize counter data"); + assert_eq!(counter.count, 42); + println!( + "✅ Counter initialized successfully with value: {}", + counter.count + ); + } + + // Step 2: Increment the counter + println!("Testing counter increment..."); + + // Create increment instruction + let increment_instruction = Instruction::new_with_bytes( + program_id, + &[1], // 1 = increment instruction + vec![AccountMeta::new(counter_keypair.pubkey(), true)], + ); + + // Send transaction with increment instruction + let mut transaction = + Transaction::new_with_payer(&[increment_instruction], Some(&payer.pubkey())); + transaction.sign(&[&payer, &counter_keypair], recent_blockhash); + banks_client.process_transaction(transaction).await.unwrap(); + + // Check account data + let account = banks_client + .get_account(counter_keypair.pubkey()) + .await + .expect("Failed to get counter account"); + + if let Some(account_data) = account { + let counter: CounterAccount = CounterAccount::try_from_slice(&account_data.data) + .expect("Failed to deserialize counter data"); + assert_eq!(counter.count, 43); + println!("✅ Counter incremented successfully to: {}", counter.count); + } + } +} +``` + +```toml filename="Cargo.toml" +[package] +name = "counter_program" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] + +[dependencies] +borsh = "1.5.1" +solana-program = "1.18.26" + +[dev-dependencies] +solana-program-test = "1.18.26" +solana-sdk = "1.18.26" +tokio = "1.41.0" +``` + + + + + + +### Create a new Program + +First, create a new Rust project using the standard `cargo init` command with +the `--lib` flag. + +```shell filename="Terminal" +cargo init counter_program --lib +``` + +Navigate to the project directory. You should see the default `src/lib.rs` and +`Cargo.toml` files + +```shell filename="Terminal" +cd counter_program +``` + +Next, add the `solana-program` dependency. This is the minimum dependency +required to build a Solana program. + +```shell filename="Terminal" +cargo add solana-program@1.18.26 +``` + +Next, add the following snippet to `Cargo.toml`. If you don't include this +config, the `target/deploy` directory will not be generated when you build the +program. + +```toml filename="Cargo.toml" +[lib] +crate-type = ["cdylib", "lib"] +``` + +Your `Cargo.toml` file should look like the following: + +```toml filename="Cargo.toml" +[package] +name = "counter_program" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] + +[dependencies] +solana-program = "1.18.26" +``` + +### Program Entrypoint + +A Solana program entrypoint is the function that gets called when a program is +invoked. The entrypoint has the following raw definition and developers are free +to create their own implementation of the entrypoint function. + +For simplicity, use the +[`entrypoint!`](https://github.com/solana-labs/solana/blob/v2.0/sdk/program/src/entrypoint.rs#L124-L140) +macro from the `solana_program` crate to define the entrypoint in your program. + +```rs +#[no_mangle] +pub unsafe extern "C" fn entrypoint(input: *mut u8) -> u64; +``` + +Replace the default code in `lib.rs` with the following code. This snippet: + +1. Imports the required dependencies from `solana_program` +2. Defines the program entrypoint using the `entrypoint!` macro +3. Implements the `process_instruction` function that will route instructions to + the appropriate handler functions + +```rs filename="lib.rs" {13} /process_instruction/ +use solana_program::{ + account_info::{next_account_info, AccountInfo}, + entrypoint, + entrypoint::ProgramResult, + msg, + program::invoke, + program_error::ProgramError, + pubkey::Pubkey, + system_instruction, + sysvar::{rent::Rent, Sysvar}, +}; + +entrypoint!(process_instruction); + +pub fn process_instruction( + program_id: &Pubkey, + accounts: &[AccountInfo], + instruction_data: &[u8], +) -> ProgramResult { + // Your program logic + Ok(()) +} +``` + +The `entrypoint!` macro requires a function with the the following +[type signature](https://github.com/solana-labs/solana/blob/v2.0/sdk/program/src/entrypoint.rs#L28-L29) +as an argument: + +```rs +pub type ProcessInstruction = + fn(program_id: &Pubkey, accounts: &[AccountInfo], instruction_data: &[u8]) -> ProgramResult; +``` + +When a Solana program is invoked, the entrypoint +[deserializes](https://github.com/solana-labs/solana/blob/v2.0/sdk/program/src/entrypoint.rs#L277) +the +[input data](https://github.com/solana-labs/solana/blob/v2.0/sdk/program/src/entrypoint.rs#L129-L131) +(provided as bytes) into three values and passes them to the +[`process_instruction`](https://github.com/solana-labs/solana/blob/v2.0/sdk/program/src/entrypoint.rs#L132) +function: + +- `program_id`: The public key of the program being invoked (current program) +- `accounts`: The `AccountInfo` for accounts required by the instruction being + invoked +- `instruction_data`: Additional data passed to the program which specifies the + instruction to execute and its required arguments + +These three parameters directly correspond to the data that clients must provide +when building an instruction to invoke a program. + +### Define Program State + +When building a Solana program, you'll typically start by defining your +program's state - the data that will be stored in accounts created and owned by +your program. + +Program state is defined using Rust structs that represent the data layout of +your program's accounts. You can define multiple structs to represent different +types of accounts for your program. + +When working with accounts, you need a way to convert your program's data types +to and from the raw bytes stored in an account's data field: + +- Serialization: Converting your data types into bytes to store in an account's + data field +- Deserialization: Converting the bytes stored in an account back into your data + types + +While you can use any serialization format for Solana program development, +[Borsh](https://borsh.io/) is commonly used. To use Borsh in your Solana +program: + +1. Add the `borsh` crate as a dependency to your `Cargo.toml`: + +```shell filename="Terminal" +cargo add borsh +``` + +2. Import the Borsh traits and use the derive macro to implement the traits for + your structs: + +```rust +use borsh::{BorshSerialize, BorshDeserialize}; + +// Define struct representing our counter account's data +#[derive(BorshSerialize, BorshDeserialize, Debug)] +pub struct CounterAccount { + count: u64, +} +``` + +Add the `CounterAccount` struct to `lib.rs` to define the program state. This +struct will be used in both the initialization and increment instructions. + +```rs filename="lib.rs" {12} {25-29} +use solana_program::{ + account_info::{next_account_info, AccountInfo}, + entrypoint, + entrypoint::ProgramResult, + msg, + program::invoke, + program_error::ProgramError, + pubkey::Pubkey, + system_instruction, + sysvar::{rent::Rent, Sysvar}, +}; +use borsh::{BorshSerialize, BorshDeserialize}; + +entrypoint!(process_instruction); + +pub fn process_instruction( + program_id: &Pubkey, + accounts: &[AccountInfo], + instruction_data: &[u8], +) -> ProgramResult { + // Your program logic + Ok(()) +} + +#[derive(BorshSerialize, BorshDeserialize, Debug)] +pub struct CounterAccount { + count: u64, +} +``` + +### Define Instructions + +Instructions refer to the different operations that your Solana program can +perform. Think of them as public APIs for your program - they define what +actions users can take when interacting with your program. + +Instructions are typically defined using a Rust enum where: + +- Each enum variant represents a different instruction +- The variant's payload represents the instruction's parameters + +Note that Rust enum variants are implicitly numbered starting from 0. + +Below is an example of an enum defining two instructions: + +```rust +#[derive(BorshSerialize, BorshDeserialize, Debug)] +pub enum CounterInstruction { + InitializeCounter { initial_value: u64 }, // variant 0 + IncrementCounter, // variant 1 +} +``` + +When a client invokes your program, they must provide instruction data (as a +buffer of bytes) where: + +- The first byte identifies which instruction variant to execute (0, 1, etc.) +- The remaining bytes contain the serialized instruction parameters (if + required) + +To convert the instruction data (bytes) into a variant of the enum, it is common +to implement a helper method. This method: + +1. Splits the first byte to get the instruction variant +2. Matches on the variant and parses any additional parameters from the + remaining bytes +3. Returns the corresponding enum variant + +For example, the `unpack` method for the `CounterInstruction` enum: + +```rust +impl CounterInstruction { + pub fn unpack(input: &[u8]) -> Result { + // Get the instruction variant from the first byte + let (&variant, rest) = input + .split_first() + .ok_or(ProgramError::InvalidInstructionData)?; + + // Match instruction type and parse the remaining bytes based on the variant + match variant { + 0 => { + // For InitializeCounter, parse a u64 from the remaining bytes + let initial_value = u64::from_le_bytes( + rest.try_into() + .map_err(|_| ProgramError::InvalidInstructionData)? + ); + Ok(Self::InitializeCounter { initial_value }) + } + 1 => Ok(Self::IncrementCounter), // No additional data needed + _ => Err(ProgramError::InvalidInstructionData), + } + } +} +``` + +Add the following code to `lib.rs` to define the instructions for the counter +program. + +```rs filename="lib.rs" {18-46} +use borsh::{BorshDeserialize, BorshSerialize}; +use solana_program::{ + account_info::AccountInfo, entrypoint, entrypoint::ProgramResult, msg, + program_error::ProgramError, pubkey::Pubkey, +}; + +entrypoint!(process_instruction); + +pub fn process_instruction( + program_id: &Pubkey, + accounts: &[AccountInfo], + instruction_data: &[u8], +) -> ProgramResult { + // Your program logic + Ok(()) +} + +#[derive(BorshSerialize, BorshDeserialize, Debug)] +pub enum CounterInstruction { + InitializeCounter { initial_value: u64 }, // variant 0 + IncrementCounter, // variant 1 +} + +impl CounterInstruction { + pub fn unpack(input: &[u8]) -> Result { + // Get the instruction variant from the first byte + let (&variant, rest) = input + .split_first() + .ok_or(ProgramError::InvalidInstructionData)?; + + // Match instruction type and parse the remaining bytes based on the variant + match variant { + 0 => { + // For InitializeCounter, parse a u64 from the remaining bytes + let initial_value = u64::from_le_bytes( + rest.try_into() + .map_err(|_| ProgramError::InvalidInstructionData)?, + ); + Ok(Self::InitializeCounter { initial_value }) + } + 1 => Ok(Self::IncrementCounter), // No additional data needed + _ => Err(ProgramError::InvalidInstructionData), + } + } +} +``` + +### Instruction Handlers + +Instruction handlers refer to the functions that contain the business logic for +each instruction. It's common to name handler functions as +`process_`, but you're free to choose any naming convention. + +Add the following code to `lib.rs`. This code uses the `CounterInstruction` enum +and `unpack` method defined in the previous step to route incoming instructions +to the appropriate handler functions: + +```rs filename="lib.rs" {8-17} {20-32} /process_initialize_counter/1 /process_increment_counter/1 +entrypoint!(process_instruction); + +pub fn process_instruction( + program_id: &Pubkey, + accounts: &[AccountInfo], + instruction_data: &[u8], +) -> ProgramResult { + // Unpack instruction data + let instruction = CounterInstruction::unpack(instruction_data)?; + + // Match instruction type + match instruction { + CounterInstruction::InitializeCounter { initial_value } => { + process_initialize_counter(program_id, accounts, initial_value)? + } + CounterInstruction::IncrementCounter => process_increment_counter(program_id, accounts)?, + }; +} + +fn process_initialize_counter( + program_id: &Pubkey, + accounts: &[AccountInfo], + initial_value: u64, +) -> ProgramResult { + // Implementation details... + Ok(()) +} + +fn process_increment_counter(program_id: &Pubkey, accounts: &[AccountInfo]) -> ProgramResult { + // Implementation details... + Ok(()) +} +``` + +Next, add the implementation of the `process_initialize_counter` function. This +instruction handler: + +1. Creates and allocates space for a new account to store the counter data +2. Initializing the account data with `initial_value` passed to the instruction + + + + +The `process_initialize_counter` function requires three accounts: + +1. The counter account that will be created and initialized +2. The payer account that will fund the new account creation +3. The System Program that we invoke to create the new account + +To define the accounts required by the instruction, we create an iterator over +the `accounts` slice and use the `next_account_info` function to get each +account. The number of accounts you define are the accounts required by the +instruction. + +The order of accounts is important - when building the instruction on the client +side, accounts must be provided in the same order as it is defined in the +program for the instruction to execute successfully. + +While the variable names for the accounts have no effect on the program's +functionality, using descriptive names is recommended. + +```rs filename="lib.rs" {6-10} +fn process_initialize_counter( + program_id: &Pubkey, + accounts: &[AccountInfo], + initial_value: u64, +) -> ProgramResult { + let accounts_iter = &mut accounts.iter(); + + let counter_account = next_account_info(accounts_iter)?; + let payer_account = next_account_info(accounts_iter)?; + let system_program = next_account_info(accounts_iter)?; + + Ok(()) +} +``` + +Before creating an account, we need to: + +1. Specify the space (in bytes) to allocate to the account's data field. Since + we're storing a u64 value (`count`), we need 8 bytes. + +2. Calculate the minimum "rent" balance required. On Solana, accounts must + maintain a minimum balance of lamports (rent) based on amount of data stored + on the account. + +```rs filename="lib.rs" {12-17} +fn process_initialize_counter( + program_id: &Pubkey, + accounts: &[AccountInfo], + initial_value: u64, +) -> ProgramResult { + let accounts_iter = &mut accounts.iter(); + + let counter_account = next_account_info(accounts_iter)?; + let payer_account = next_account_info(accounts_iter)?; + let system_program = next_account_info(accounts_iter)?; + + // Size of our counter account + let account_space = 8; // Size in bytes to store a u64 + + // Calculate minimum balance for rent exemption + let rent = Rent::get()?; + let required_lamports = rent.minimum_balance(account_space); + + Ok(()) +} +``` + +Once the space is defined and rent is calculated, create the account by invoking +the System Program's `create_account` instruction. + +On Solana, new accounts can only be created by the System Program. When creating +an account, we specify the amount of bytes to allocate and the program owner of +the new account. The System Program: + +1. Creates the new account +2. Allocates the specified space for the account's data field +3. Transfers ownership to the specified program + +This ownership transfer is important because only the program owner of an +account can modify an account's data. In this case, we set our program as the +owner, which will allow us to modify the account's data to store the counter +value. + +To invoke the System Program from our program's instruction, we make a Cross +Program Invocation (CPI) via the `invoke` function. A CPI allows one program to +call instructions on other programs - in this case, the System Program's +`create_account` instruction. + +```rs filename="lib.rs" {19-33} +fn process_initialize_counter( + program_id: &Pubkey, + accounts: &[AccountInfo], + initial_value: u64, +) -> ProgramResult { + let accounts_iter = &mut accounts.iter(); + + let counter_account = next_account_info(accounts_iter)?; + let payer_account = next_account_info(accounts_iter)?; + let system_program = next_account_info(accounts_iter)?; + + // Size of our counter account + let account_space = 8; // Size in bytes to store a u64 + + // Calculate minimum balance for rent exemption + let rent = Rent::get()?; + let required_lamports = rent.minimum_balance(account_space); + + // Create the counter account + invoke( + &system_instruction::create_account( + payer_account.key, // Account paying for the new account + counter_account.key, // Account to be created + required_lamports, // Amount of lamports to transfer to the new account + account_space as u64, // Size in bytes to allocate for the data field + program_id, // Set program owner to our program + ), + &[ + payer_account.clone(), + counter_account.clone(), + system_program.clone(), + ], + )?; + + Ok(()) +} +``` + +Once the account is created, we initialize the account data by: + +1. Creating a new `CounterAccount` struct with the `initial_value` provided to + the instruction. +2. Getting a mutable reference to the new account's data field. +3. Serializing the `CounterAccount` struct into the account's data field, + effectively storing the `initial_value` on the account. + +```rs filename="lib.rs" {35-44} /inital_value/ +fn process_initialize_counter( + program_id: &Pubkey, + accounts: &[AccountInfo], + initial_value: u64, +) -> ProgramResult { + let accounts_iter = &mut accounts.iter(); + + let counter_account = next_account_info(accounts_iter)?; + let payer_account = next_account_info(accounts_iter)?; + let system_program = next_account_info(accounts_iter)?; + + // Size of our counter account + let account_space = 8; // Size in bytes to store a u64 + + // Calculate minimum balance for rent exemption + let rent = Rent::get()?; + let required_lamports = rent.minimum_balance(account_space); + + // Create the counter account + invoke( + &system_instruction::create_account( + payer_account.key, // Account paying for the new account + counter_account.key, // Account to be created + required_lamports, // Amount of lamports to transfer to the new account + account_space as u64, // Size in bytes to allocate for the data field + program_id, // Set program owner to our program + ), + &[ + payer_account.clone(), + counter_account.clone(), + system_program.clone(), + ], + )?; + + // Create a new CounterAccount struct with the initial value + let counter_data = CounterAccount { + count: initial_value, + }; + + // Get a mutable reference to the counter account's data + let mut account_data = &mut counter_account.data.borrow_mut()[..]; + + // Serialize the CounterAccount struct into the account's data + counter_data.serialize(&mut account_data)?; + + msg!("Counter initialized with value: {}", initial_value); + + Ok(()) +} +``` + + + + +```rs filename="lib.rs" +// Initialize a new counter account +fn process_initialize_counter( + program_id: &Pubkey, + accounts: &[AccountInfo], + initial_value: u64, +) -> ProgramResult { + let accounts_iter = &mut accounts.iter(); + + let counter_account = next_account_info(accounts_iter)?; + let payer_account = next_account_info(accounts_iter)?; + let system_program = next_account_info(accounts_iter)?; + + // Size of our counter account + let account_space = 8; // Size in bytes to store a u64 + + // Calculate minimum balance for rent exemption + let rent = Rent::get()?; + let required_lamports = rent.minimum_balance(account_space); + + // Create the counter account + invoke( + &system_instruction::create_account( + payer_account.key, // Account paying for the new account + counter_account.key, // Account to be created + required_lamports, // Amount of lamports to transfer to the new account + account_space as u64, // Size in bytes to allocate for the data field + program_id, // Set program owner to our program + ), + &[ + payer_account.clone(), + counter_account.clone(), + system_program.clone(), + ], + )?; + + // Create a new CounterAccount struct with the initial value + let counter_data = CounterAccount { + count: initial_value, + }; + + // Get a mutable reference to the counter account's data + let mut account_data = &mut counter_account.data.borrow_mut()[..]; + + // Serialize the CounterAccount struct into the account's data + counter_data.serialize(&mut account_data)?; + + msg!("Counter initialized with value: {}", initial_value); + + Ok(()) +} +``` + +Next, add the implementation of the `process_increment_counter` function. This +instruction increments the value of an existing counter account. + + + + +Just like the `process_initialize_counter` function, we start by creating an +iterator over the accounts. In this case, we are only expecting one account, +which is the account to be updated. + +Note that in practice, a developer must implement various security checks to +validate the accounts passed to the program. Since all accounts are provided by +the caller of the instruction, there is no guarantee that the accounts provided +are the ones the program expects. Missing account validation checks are a common +source of program vulnerabilities. + +The example below includes a check to ensure the account we're referring to as +the `counter_account` is owned by the executing program. + +```rs filename="lib.rs" {6-9} +// Update an existing counter's value +fn process_increment_counter(program_id: &Pubkey, accounts: &[AccountInfo]) -> ProgramResult { + let accounts_iter = &mut accounts.iter(); + let counter_account = next_account_info(accounts_iter)?; + + // Verify account ownership + if counter_account.owner != program_id { + return Err(ProgramError::IncorrectProgramId); + } + + Ok(()) +} +``` + +To update the account data, we: + +- Mutably borrow the existing account's data field +- Deserialize the raw bytes into our `CounterAccount` struct +- Update the `count` value +- Serialize the modified struct back into the account's data field + +```rs filename="lib.rs" {11-24} +// Update an existing counter's value +fn process_increment_counter(program_id: &Pubkey, accounts: &[AccountInfo]) -> ProgramResult { + let accounts_iter = &mut accounts.iter(); + let counter_account = next_account_info(accounts_iter)?; + + // Verify account ownership + if counter_account.owner != program_id { + return Err(ProgramError::IncorrectProgramId); + } + + // Mutable borrow the account data + let mut data = counter_account.data.borrow_mut(); + + // Deserialize the account data into our CounterAccount struct + let mut counter_data: CounterAccount = CounterAccount::try_from_slice(&data)?; + + // Increment the counter value + counter_data.count = counter_data + .count + .checked_add(1) + .ok_or(ProgramError::InvalidAccountData)?; + + // Serialize the updated counter data back into the account + counter_data.serialize(&mut &mut data[..])?; + + msg!("Counter incremented to: {}", counter_data.count); + Ok(()) +} +``` + + + + +```rs filename="lib.rs" +// Update an existing counter's value +fn process_increment_counter(program_id: &Pubkey, accounts: &[AccountInfo]) -> ProgramResult { + let accounts_iter = &mut accounts.iter(); + let counter_account = next_account_info(accounts_iter)?; + + // Verify account ownership + if counter_account.owner != program_id { + return Err(ProgramError::IncorrectProgramId); + } + + // Mutable borrow the account data + let mut data = counter_account.data.borrow_mut(); + + // Deserialize the account data into our CounterAccount struct + let mut counter_data: CounterAccount = CounterAccount::try_from_slice(&data)?; + + // Increment the counter value + counter_data.count = counter_data + .count + .checked_add(1) + .ok_or(ProgramError::InvalidAccountData)?; + + // Serialize the updated counter data back into the account + counter_data.serialize(&mut &mut data[..])?; + + msg!("Counter incremented to: {}", counter_data.count); + Ok(()) +} +``` + +### Instruction Testing + +To test the program instructions, add the following dependencies to +`Cargo.toml`. + +```shell filename="Terminal" +cargo add solana-program-test@1.18.26 --dev +cargo add solana-sdk@1.18.26 --dev +cargo add tokio --dev +``` + +Then add the following test module to `lib.rs` and run `cargo test-sbf` to +execute the tests. Optionally, use the `--nocapture` flag to see the print +statements in the output. + +```shell filename="Terminal" +cargo test-sbf -- --nocapture +``` + + + + +First, set up the test module and import required dependencies: + +```rs filename="lib.rs" +#[cfg(test)] +mod test { + use super::*; + use solana_program_test::*; + use solana_sdk::{ + instruction::{AccountMeta, Instruction}, + signature::{Keypair, Signer}, + system_program, + transaction::Transaction, + }; + + #[tokio::test] + async fn test_counter_program() { + // Test code will go here + } +} +``` + +Next, set up the test using `ProgramTest`. Then create a new keypair to use as +the address for the counter account we'll initialize and define an initial value +to set for the counter. + +```rs filename="lib.rs" +#[cfg(test)] +mod test { + use super::*; + use solana_program_test::*; + use solana_sdk::{ + instruction::{AccountMeta, Instruction}, + signature::{Keypair, Signer}, + system_program, + transaction::Transaction, + }; + + #[tokio::test] + async fn test_counter_program() { + let program_id = Pubkey::new_unique(); + let (mut banks_client, payer, recent_blockhash) = ProgramTest::new( + "counter_program", + program_id, + processor!(process_instruction), + ) + .start() + .await; + + // Create a new keypair to use as the address for our counter account + let counter_keypair = Keypair::new(); + let initial_value: u64 = 42; + } +} +``` + +When building an instruction, each account must be provided as an +[`AccountMeta`](https://github.com/solana-labs/solana/blob/v2.0/sdk/program/src/instruction.rs#L539-L545), +which specifies: + +- The account's public key (`Pubkey`) +- `is_writable`: Whether the account data will be modified +- `is_signer`: Whether the account must sign the transaction + +```rs +AccountMeta::new(account1_pubkey, true), // writable, signer +AccountMeta::new(account2_pubkey, false), // writable, not signer +AccountMeta::new_readonly(account3_pubkey, false), // not writable, not signer +AccountMeta::new_readonly(account4_pubkey, true), // writable, signer +``` + +To test the initialize instruction: + +- Create instruction data with variant 0 (`InitializeCounter`) and initial value +- Build the instruction with the program ID, instruction data, and required + accounts +- Send a transaction with the initialize instruction +- Check the account was created with the correct initial value + +```rs filename="lib.rs" {16-53} + #[tokio::test] + async fn test_counter_program() { + let program_id = Pubkey::new_unique(); + let (mut banks_client, payer, recent_blockhash) = ProgramTest::new( + "counter_program", + program_id, + processor!(process_instruction), + ) + .start() + .await; + + // Create a new keypair to use as the address for our counter account + let counter_keypair = Keypair::new(); + let initial_value: u64 = 42; + + // Step 1: Initialize the counter + println!("Testing counter initialization..."); + + // Create initialization instruction + let mut init_instruction_data = vec![0]; // 0 = initialize instruction + init_instruction_data.extend_from_slice(&initial_value.to_le_bytes()); + + let initialize_instruction = Instruction::new_with_bytes( + program_id, + &init_instruction_data, + vec![ + AccountMeta::new(counter_keypair.pubkey(), true), + AccountMeta::new(payer.pubkey(), true), + AccountMeta::new_readonly(system_program::id(), false), + ], + ); + + // Send transaction with initialize instruction + let mut transaction = + Transaction::new_with_payer(&[initialize_instruction], Some(&payer.pubkey())); + transaction.sign(&[&payer, &counter_keypair], recent_blockhash); + banks_client.process_transaction(transaction).await.unwrap(); + + // Check account data + let account = banks_client + .get_account(counter_keypair.pubkey()) + .await + .expect("Failed to get counter account"); + + if let Some(account_data) = account { + let counter: CounterAccount = CounterAccount::try_from_slice(&account_data.data) + .expect("Failed to deserialize counter data"); + assert_eq!(counter.count, 42); + println!( + "✅ Counter initialized successfully with value: {}", + counter.count + ); + } + } +``` + +To test the increment instruction: + +- Build the instruction with the program ID, instruction data, and required + accounts +- Send a transaction with the increment instruction +- Check the account was incremented to the correct value + +Note that the instruction data for the increment instruction is `[1]`, which +corresponds to variant 1 (`IncrementCounter`). Since there are no additional +parameters to the increment instruction, the data is simply the instruction +variant. + +```rs filename="lib.rs" {55-82} + #[tokio::test] + async fn test_counter_program() { + let program_id = Pubkey::new_unique(); + let (mut banks_client, payer, recent_blockhash) = ProgramTest::new( + "counter_program", + program_id, + processor!(process_instruction), + ) + .start() + .await; + + // Create a new keypair to use as the address for our counter account + let counter_keypair = Keypair::new(); + let initial_value: u64 = 42; + + // Step 1: Initialize the counter + println!("Testing counter initialization..."); + + // Create initialization instruction + let mut init_instruction_data = vec![0]; // 0 = initialize instruction + init_instruction_data.extend_from_slice(&initial_value.to_le_bytes()); + + let initialize_instruction = Instruction::new_with_bytes( + program_id, + &init_instruction_data, + vec![ + AccountMeta::new(counter_keypair.pubkey(), true), + AccountMeta::new(payer.pubkey(), true), + AccountMeta::new_readonly(system_program::id(), false), + ], + ); + + // Send transaction with initialize instruction + let mut transaction = + Transaction::new_with_payer(&[initialize_instruction], Some(&payer.pubkey())); + transaction.sign(&[&payer, &counter_keypair], recent_blockhash); + banks_client.process_transaction(transaction).await.unwrap(); + + // Check account data + let account = banks_client + .get_account(counter_keypair.pubkey()) + .await + .expect("Failed to get counter account"); + + if let Some(account_data) = account { + let counter: CounterAccount = CounterAccount::try_from_slice(&account_data.data) + .expect("Failed to deserialize counter data"); + assert_eq!(counter.count, 42); + println!( + "✅ Counter initialized successfully with value: {}", + counter.count + ); + } + + // Step 2: Increment the counter + println!("Testing counter increment..."); + + // Create increment instruction + let increment_instruction = Instruction::new_with_bytes( + program_id, + &[1], // 1 = increment instruction + vec![AccountMeta::new(counter_keypair.pubkey(), true)], + ); + + // Send transaction with increment instruction + let mut transaction = + Transaction::new_with_payer(&[increment_instruction], Some(&payer.pubkey())); + transaction.sign(&[&payer, &counter_keypair], recent_blockhash); + banks_client.process_transaction(transaction).await.unwrap(); + + // Check account data + let account = banks_client + .get_account(counter_keypair.pubkey()) + .await + .expect("Failed to get counter account"); + + if let Some(account_data) = account { + let counter: CounterAccount = CounterAccount::try_from_slice(&account_data.data) + .expect("Failed to deserialize counter data"); + assert_eq!(counter.count, 43); + println!("✅ Counter incremented successfully to: {}", counter.count); + } + } +``` + + + + +```rs filename="lib.rs" +#[cfg(test)] +mod test { + use super::*; + use solana_program_test::*; + use solana_sdk::{ + instruction::{AccountMeta, Instruction}, + signature::{Keypair, Signer}, + system_program, + transaction::Transaction, + }; + + #[tokio::test] + async fn test_counter_program() { + let program_id = Pubkey::new_unique(); + let (mut banks_client, payer, recent_blockhash) = ProgramTest::new( + "counter_program", + program_id, + processor!(process_instruction), + ) + .start() + .await; + + // Create a new keypair to use as the address for our counter account + let counter_keypair = Keypair::new(); + let initial_value: u64 = 42; + + // Step 1: Initialize the counter + println!("Testing counter initialization..."); + + // Create initialization instruction + let mut init_instruction_data = vec![0]; // 0 = initialize instruction + init_instruction_data.extend_from_slice(&initial_value.to_le_bytes()); + + let initialize_instruction = Instruction::new_with_bytes( + program_id, + &init_instruction_data, + vec![ + AccountMeta::new(counter_keypair.pubkey(), true), + AccountMeta::new(payer.pubkey(), true), + AccountMeta::new_readonly(system_program::id(), false), + ], + ); + + // Send transaction with initialize instruction + let mut transaction = + Transaction::new_with_payer(&[initialize_instruction], Some(&payer.pubkey())); + transaction.sign(&[&payer, &counter_keypair], recent_blockhash); + banks_client.process_transaction(transaction).await.unwrap(); + + // Check account data + let account = banks_client + .get_account(counter_keypair.pubkey()) + .await + .expect("Failed to get counter account"); + + if let Some(account_data) = account { + let counter: CounterAccount = CounterAccount::try_from_slice(&account_data.data) + .expect("Failed to deserialize counter data"); + assert_eq!(counter.count, 42); + println!( + "✅ Counter initialized successfully with value: {}", + counter.count + ); + } + + // Step 2: Increment the counter + println!("Testing counter increment..."); + + // Create increment instruction + let increment_instruction = Instruction::new_with_bytes( + program_id, + &[1], // 1 = increment instruction + vec![AccountMeta::new(counter_keypair.pubkey(), true)], + ); + + // Send transaction with increment instruction + let mut transaction = + Transaction::new_with_payer(&[increment_instruction], Some(&payer.pubkey())); + transaction.sign(&[&payer, &counter_keypair], recent_blockhash); + banks_client.process_transaction(transaction).await.unwrap(); + + // Check account data + let account = banks_client + .get_account(counter_keypair.pubkey()) + .await + .expect("Failed to get counter account"); + + if let Some(account_data) = account { + let counter: CounterAccount = CounterAccount::try_from_slice(&account_data.data) + .expect("Failed to deserialize counter data"); + assert_eq!(counter.count, 43); + println!("✅ Counter incremented successfully to: {}", counter.count); + } + } +} +``` + +Example output: + +```shell filename="Terminal" {6} {10} +running 1 test +[2024-10-29T20:51:13.783708000Z INFO solana_program_test] "counter_program" SBF program from /counter_program/target/deploy/counter_program.so, modified 2 seconds, 169 ms, 153 µs and 461 ns ago +[2024-10-29T20:51:13.855204000Z DEBUG solana_runtime::message_processor::stable_log] Program 1111111QLbz7JHiBTspS962RLKV8GndWFwiEaqKM invoke [1] +[2024-10-29T20:51:13.856052000Z DEBUG solana_runtime::message_processor::stable_log] Program 11111111111111111111111111111111 invoke [2] +[2024-10-29T20:51:13.856135000Z DEBUG solana_runtime::message_processor::stable_log] Program 11111111111111111111111111111111 success +[2024-10-29T20:51:13.856242000Z DEBUG solana_runtime::message_processor::stable_log] Program log: Counter initialized with value: 42 +[2024-10-29T20:51:13.856285000Z DEBUG solana_runtime::message_processor::stable_log] Program 1111111QLbz7JHiBTspS962RLKV8GndWFwiEaqKM consumed 3791 of 200000 compute units +[2024-10-29T20:51:13.856307000Z DEBUG solana_runtime::message_processor::stable_log] Program 1111111QLbz7JHiBTspS962RLKV8GndWFwiEaqKM success +[2024-10-29T20:51:13.860038000Z DEBUG solana_runtime::message_processor::stable_log] Program 1111111QLbz7JHiBTspS962RLKV8GndWFwiEaqKM invoke [1] +[2024-10-29T20:51:13.860333000Z DEBUG solana_runtime::message_processor::stable_log] Program log: Counter incremented to: 43 +[2024-10-29T20:51:13.860355000Z DEBUG solana_runtime::message_processor::stable_log] Program 1111111QLbz7JHiBTspS962RLKV8GndWFwiEaqKM consumed 756 of 200000 compute units +[2024-10-29T20:51:13.860375000Z DEBUG solana_runtime::message_processor::stable_log] Program 1111111QLbz7JHiBTspS962RLKV8GndWFwiEaqKM success +test test::test_counter_program ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.08s +``` + + diff --git a/docs/programs/testing.md b/docs/programs/testing.md index 029c24cfa..2af73ebe5 100644 --- a/docs/programs/testing.md +++ b/docs/programs/testing.md @@ -1,6 +1,7 @@ --- title: "Testing with NodeJS" description: "Testing native solana programs written with rust using NodeJS" +sidebarSortOrder: 5 --- When developing programs on Solana, ensuring their correctness and reliability @@ -18,7 +19,7 @@ There are two ways to test programs on Solana: 2. The various [BanksClient-based](https://docs.rs/solana-banks-client/latest/solana_banks_client/) test frameworks for SBF (Solana Bytecode Format) programs: Bankrun is a - framework that simulates a Solana bank’s operations, enabling developers to + framework that simulates a Solana bank's operations, enabling developers to deploy, interact with, and assess the behavior of programs under test conditions that mimic the mainnet. It helps set up the test environment and offers tools for detailed transaction insights, enhancing debugging and @@ -38,13 +39,13 @@ There are two ways to test programs on Solana: In this guide, we are using Solana Bankrun. `Bankrun` is a superfast, powerful, and lightweight framework for testing Solana programs in Node.js. -- The biggest advantage of using Solana Bankrun is that you don’t have to set +- The biggest advantage of using Solana Bankrun is that you don't have to set up - an environment to test programs like you’d have to do while using the + an environment to test programs like you'd have to do while using the `solana-test-validator`. Instead, you can do that with a piece of code, inside the tests. -- It also dynamically sets time and account data, which isn’t possible with +- It also dynamically sets time and account data, which isn't possible with `solana-test-validator` ## Installation @@ -66,7 +67,7 @@ directories: - `./tests/fixtures` (just create this directory if it doesn't exist already). - Your current working directory. - A directory you define in the `BPF_OUT_DIR` or `SBF_OUT_DIR` environment - variables. `export BPF_OUT_DIR=’/path/to/binary’` + variables. `export BPF_OUT_DIR='/path/to/binary'` - Build your program specifying the correct directory so that library can pick the file up from directory just from the name. `cargo build-sbf --manifest-path=./program/Cargo.toml --sbf-out-dir=./tests/fixtures` @@ -151,7 +152,7 @@ let transaction = await client.processTransaction(tx); ## Example -Here’s an example to write test for +Here's an example to write test for a [hello world program](https://github.com/solana-developers/program-examples/tree/main/basics/hello-solana/native) : ```typescript @@ -232,16 +233,3 @@ This is how the output looks like after running the tests for ℹ todo 0 ℹ duration_ms 63.52616 ``` - -## Next Steps - -- Checkout more testing examples from the - [Program Examples](/docs/programs/examples.md) -- You can also - use [anchor-bankrun](https://kevinheavey.github.io/solana-bankrun/tutorial/#anchor-integration) to - write tests in NodeJS for Anchor programs -- [Writing and testing your Solana programs using Rust](https://solana.com/docs/programs/lang-rust#how-to-test) - is possible with - [solana_program_test](https://docs.rs/solana-program-test/1.18.14/solana_program_test/) -- You can also write test with python for Solana programs written in Rust with - [solders.bankrun](https://kevinheavey.github.io/solders/api_reference/bankrun.html) diff --git a/docs/rpc/deprecated/getConfirmedBlock.mdx b/docs/rpc/deprecated/getConfirmedBlock.mdx index 42179d731..d9236f130 100644 --- a/docs/rpc/deprecated/getConfirmedBlock.mdx +++ b/docs/rpc/deprecated/getConfirmedBlock.mdx @@ -152,7 +152,7 @@ The result field will be an object with the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, "method": "getConfirmedBlock", diff --git a/docs/rpc/deprecated/getConfirmedBlocks.mdx b/docs/rpc/deprecated/getConfirmedBlocks.mdx index 4070da744..2e4cc769b 100644 --- a/docs/rpc/deprecated/getConfirmedBlocks.mdx +++ b/docs/rpc/deprecated/getConfirmedBlocks.mdx @@ -50,7 +50,7 @@ block, inclusive. Max range allowed is 500,000 slots. ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc": "2.0","id":1,"method":"getConfirmedBlocks","params":[5, 10]} ' ``` diff --git a/docs/rpc/deprecated/getConfirmedBlocksWithLimit.mdx b/docs/rpc/deprecated/getConfirmedBlocksWithLimit.mdx index 003014196..21efd752f 100644 --- a/docs/rpc/deprecated/getConfirmedBlocksWithLimit.mdx +++ b/docs/rpc/deprecated/getConfirmedBlocksWithLimit.mdx @@ -53,7 +53,7 @@ starting at `start_slot` for up to `limit` blocks, inclusive. ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, "method": "getConfirmedBlocksWithLimit", diff --git a/docs/rpc/deprecated/getConfirmedSignaturesForAddress2.mdx b/docs/rpc/deprecated/getConfirmedSignaturesForAddress2.mdx index 1960df880..5db4d57a0 100644 --- a/docs/rpc/deprecated/getConfirmedSignaturesForAddress2.mdx +++ b/docs/rpc/deprecated/getConfirmedSignaturesForAddress2.mdx @@ -83,7 +83,7 @@ fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, diff --git a/docs/rpc/deprecated/getConfirmedTransaction.mdx b/docs/rpc/deprecated/getConfirmedTransaction.mdx index a60762801..3a96d97c3 100644 --- a/docs/rpc/deprecated/getConfirmedTransaction.mdx +++ b/docs/rpc/deprecated/getConfirmedTransaction.mdx @@ -108,7 +108,7 @@ Encoding format for Account data ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, diff --git a/docs/rpc/deprecated/getFeeCalculatorForBlockhash.mdx b/docs/rpc/deprecated/getFeeCalculatorForBlockhash.mdx index e071dadd8..75d6093e0 100644 --- a/docs/rpc/deprecated/getFeeCalculatorForBlockhash.mdx +++ b/docs/rpc/deprecated/getFeeCalculatorForBlockhash.mdx @@ -59,7 +59,7 @@ The result will be an RpcResponse JSON object with `value` equal to: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, diff --git a/docs/rpc/deprecated/getFeeRateGovernor.mdx b/docs/rpc/deprecated/getFeeRateGovernor.mdx index 3a1a748b9..292be480e 100644 --- a/docs/rpc/deprecated/getFeeRateGovernor.mdx +++ b/docs/rpc/deprecated/getFeeRateGovernor.mdx @@ -42,7 +42,7 @@ with the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getFeeRateGovernor"} ' ``` diff --git a/docs/rpc/deprecated/getFees.mdx b/docs/rpc/deprecated/getFees.mdx index 7ff41b570..30f0a2435 100644 --- a/docs/rpc/deprecated/getFees.mdx +++ b/docs/rpc/deprecated/getFees.mdx @@ -60,7 +60,7 @@ with the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc":"2.0", "id": 1, "method":"getFees"} ' ``` diff --git a/docs/rpc/deprecated/getRecentBlockhash.mdx b/docs/rpc/deprecated/getRecentBlockhash.mdx index f5d06254f..5668aa90a 100644 --- a/docs/rpc/deprecated/getRecentBlockhash.mdx +++ b/docs/rpc/deprecated/getRecentBlockhash.mdx @@ -56,7 +56,7 @@ FeeCalculator JSON object. ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getRecentBlockhash"} ' ``` diff --git a/docs/rpc/deprecated/getSnapshotSlot.mdx b/docs/rpc/deprecated/getSnapshotSlot.mdx index e2b35fa5b..6b4d83fc7 100644 --- a/docs/rpc/deprecated/getSnapshotSlot.mdx +++ b/docs/rpc/deprecated/getSnapshotSlot.mdx @@ -33,7 +33,7 @@ Returns the highest slot that the node has a snapshot for ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getSnapshotSlot"} ' ``` diff --git a/docs/rpc/deprecated/getStakeActivation.mdx b/docs/rpc/deprecated/getStakeActivation.mdx index 2c7654cb4..c0a808703 100644 --- a/docs/rpc/deprecated/getStakeActivation.mdx +++ b/docs/rpc/deprecated/getStakeActivation.mdx @@ -64,7 +64,7 @@ The result will be a JSON object with the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, diff --git a/docs/rpc/http/getAccountInfo.mdx b/docs/rpc/http/getAccountInfo.mdx index 5f3caa40c..a96d9ebb2 100644 --- a/docs/rpc/http/getAccountInfo.mdx +++ b/docs/rpc/http/getAccountInfo.mdx @@ -90,7 +90,7 @@ The result will be an RpcResponse JSON object with `value` equal to: \(and is strictly read-only\) - `rentEpoch: ` - the epoch at which this account will next owe rent, as u64 - - `size: ` - the data size of the account + - `space: ` - the data size of the account @@ -99,7 +99,7 @@ The result will be an RpcResponse JSON object with `value` equal to: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, @@ -120,19 +120,14 @@ curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" - { "jsonrpc": "2.0", "result": { - "context": { - "slot": 1 - }, + "context": { "apiVersion": "2.0.15", "slot": 341197053 }, "value": { - "data": [ - "11116bv5nS2h3y12kD1yUKeMZvGcKLSjQgX6BeV7u1FrjeJcKfsHRTPuR3oZ1EioKtYGiYxpxMG5vpbZLsbcBYBEmZZcMKaSoGx9JZeAuWf", - "base58" - ], + "data": ["", "base58"], "executable": false, - "lamports": 1000000000, + "lamports": 88849814690250, "owner": "11111111111111111111111111111111", - "rentEpoch": 2, - "space": 80 + "rentEpoch": 18446744073709551615, + "space": 0 } }, "id": 1 diff --git a/docs/rpc/http/getBalance.mdx b/docs/rpc/http/getBalance.mdx index 7105ed15d..514f653cf 100644 --- a/docs/rpc/http/getBalance.mdx +++ b/docs/rpc/http/getBalance.mdx @@ -47,7 +47,7 @@ balance ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, "method": "getBalance", diff --git a/docs/rpc/http/getBlock.mdx b/docs/rpc/http/getBlock.mdx index 559bd6273..d75329dc4 100644 --- a/docs/rpc/http/getBlock.mdx +++ b/docs/rpc/http/getBlock.mdx @@ -205,7 +205,7 @@ The result field will be an object with the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0","id":1, "method":"getBlock", diff --git a/docs/rpc/http/getBlockCommitment.mdx b/docs/rpc/http/getBlockCommitment.mdx index 54ecfe5bd..3d4446da0 100644 --- a/docs/rpc/http/getBlockCommitment.mdx +++ b/docs/rpc/http/getBlockCommitment.mdx @@ -36,7 +36,7 @@ The result field will be a JSON object containing: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, "method": "getBlockCommitment", diff --git a/docs/rpc/http/getBlockHeight.mdx b/docs/rpc/http/getBlockHeight.mdx index 3d48fcdfc..8be00d7b0 100644 --- a/docs/rpc/http/getBlockHeight.mdx +++ b/docs/rpc/http/getBlockHeight.mdx @@ -42,7 +42,7 @@ Configuration object containing the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc":"2.0","id":1, "method":"getBlockHeight" diff --git a/docs/rpc/http/getBlockProduction.mdx b/docs/rpc/http/getBlockProduction.mdx index 6e79849e8..90d03b34f 100644 --- a/docs/rpc/http/getBlockProduction.mdx +++ b/docs/rpc/http/getBlockProduction.mdx @@ -62,7 +62,7 @@ The result will be an RpcResponse JSON object with `value` equal to: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getBlockProduction"} ' ``` diff --git a/docs/rpc/http/getBlockTime.mdx b/docs/rpc/http/getBlockTime.mdx index f8a92ac1f..c0435fab4 100644 --- a/docs/rpc/http/getBlockTime.mdx +++ b/docs/rpc/http/getBlockTime.mdx @@ -37,7 +37,7 @@ Returns the estimated production time of a block. ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc":"2.0", "id":1, "method": "getBlockTime", diff --git a/docs/rpc/http/getBlocks.mdx b/docs/rpc/http/getBlocks.mdx index 59cb0c1b6..3967e2a85 100644 --- a/docs/rpc/http/getBlocks.mdx +++ b/docs/rpc/http/getBlocks.mdx @@ -54,7 +54,7 @@ block, inclusive. Max range allowed is 500,000 slots. ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, "method": "getBlocks", diff --git a/docs/rpc/http/getBlocksWithLimit.mdx b/docs/rpc/http/getBlocksWithLimit.mdx index dd432d759..f86b888bc 100644 --- a/docs/rpc/http/getBlocksWithLimit.mdx +++ b/docs/rpc/http/getBlocksWithLimit.mdx @@ -53,7 +53,7 @@ starting at `start_slot` for up to `limit` blocks, inclusive. ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id":1, diff --git a/docs/rpc/http/getClusterNodes.mdx b/docs/rpc/http/getClusterNodes.mdx index 647753ccb..18e685a75 100644 --- a/docs/rpc/http/getClusterNodes.mdx +++ b/docs/rpc/http/getClusterNodes.mdx @@ -39,7 +39,7 @@ fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, "method": "getClusterNodes" diff --git a/docs/rpc/http/getEpochInfo.mdx b/docs/rpc/http/getEpochInfo.mdx index 97fbdc91f..ce4f696e6 100644 --- a/docs/rpc/http/getEpochInfo.mdx +++ b/docs/rpc/http/getEpochInfo.mdx @@ -50,7 +50,7 @@ The result field will be an object with the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getEpochInfo"} ' ``` diff --git a/docs/rpc/http/getEpochSchedule.mdx b/docs/rpc/http/getEpochSchedule.mdx index 37f3b34ca..46f61341f 100644 --- a/docs/rpc/http/getEpochSchedule.mdx +++ b/docs/rpc/http/getEpochSchedule.mdx @@ -36,7 +36,7 @@ The result field will be an object with the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc":"2.0","id":1, "method":"getEpochSchedule" diff --git a/docs/rpc/http/getFeeForMessage.mdx b/docs/rpc/http/getFeeForMessage.mdx index 8c42926d4..3c2ee74be 100644 --- a/docs/rpc/http/getFeeForMessage.mdx +++ b/docs/rpc/http/getFeeForMessage.mdx @@ -51,7 +51,7 @@ Configuration object containing the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "id":1, "jsonrpc":"2.0", diff --git a/docs/rpc/http/getFirstAvailableBlock.mdx b/docs/rpc/http/getFirstAvailableBlock.mdx index 7df457c80..c21af52eb 100644 --- a/docs/rpc/http/getFirstAvailableBlock.mdx +++ b/docs/rpc/http/getFirstAvailableBlock.mdx @@ -27,7 +27,7 @@ ledger ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc":"2.0","id":1, "method":"getFirstAvailableBlock" diff --git a/docs/rpc/http/getGenesisHash.mdx b/docs/rpc/http/getGenesisHash.mdx index 565c65e24..e7f537644 100644 --- a/docs/rpc/http/getGenesisHash.mdx +++ b/docs/rpc/http/getGenesisHash.mdx @@ -26,7 +26,7 @@ Returns the genesis hash ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getGenesisHash"} ' ``` diff --git a/docs/rpc/http/getHealth.mdx b/docs/rpc/http/getHealth.mdx index c036737b5..48952db2c 100644 --- a/docs/rpc/http/getHealth.mdx +++ b/docs/rpc/http/getHealth.mdx @@ -31,7 +31,7 @@ of the error response are **UNSTABLE** and may change in the future ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getHealth"} ' ``` diff --git a/docs/rpc/http/getHighestSnapshotSlot.mdx b/docs/rpc/http/getHighestSnapshotSlot.mdx index 6822dfbbd..e851c05a0 100644 --- a/docs/rpc/http/getHighestSnapshotSlot.mdx +++ b/docs/rpc/http/getHighestSnapshotSlot.mdx @@ -41,7 +41,7 @@ fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1,"method":"getHighestSnapshotSlot"} ' ``` diff --git a/docs/rpc/http/getIdentity.mdx b/docs/rpc/http/getIdentity.mdx index ae835488a..a027615ad 100644 --- a/docs/rpc/http/getIdentity.mdx +++ b/docs/rpc/http/getIdentity.mdx @@ -29,7 +29,7 @@ The result field will be a JSON object with the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getIdentity"} ' ``` diff --git a/docs/rpc/http/getInflationGovernor.mdx b/docs/rpc/http/getInflationGovernor.mdx index 8d4624307..e93e7bd5e 100644 --- a/docs/rpc/http/getInflationGovernor.mdx +++ b/docs/rpc/http/getInflationGovernor.mdx @@ -46,7 +46,7 @@ The result field will be a JSON object with the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getInflationGovernor"} ' ``` diff --git a/docs/rpc/http/getInflationRate.mdx b/docs/rpc/http/getInflationRate.mdx index 4901da3ca..78120b0c0 100644 --- a/docs/rpc/http/getInflationRate.mdx +++ b/docs/rpc/http/getInflationRate.mdx @@ -32,7 +32,7 @@ The result field will be a JSON object with the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getInflationRate"} ' ``` diff --git a/docs/rpc/http/getInflationReward.mdx b/docs/rpc/http/getInflationReward.mdx index 677b69b9f..ebde5bc1b 100644 --- a/docs/rpc/http/getInflationReward.mdx +++ b/docs/rpc/http/getInflationReward.mdx @@ -58,7 +58,7 @@ The result field will be a JSON array with the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, diff --git a/docs/rpc/http/getLargestAccounts.mdx b/docs/rpc/http/getLargestAccounts.mdx index c513f1393..6ca86f8a3 100644 --- a/docs/rpc/http/getLargestAccounts.mdx +++ b/docs/rpc/http/getLargestAccounts.mdx @@ -49,7 +49,7 @@ The result will be an RpcResponse JSON object with `value` equal to an array of ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getLargestAccounts"} ' ``` diff --git a/docs/rpc/http/getLatestBlockhash.mdx b/docs/rpc/http/getLatestBlockhash.mdx index a91818b8c..b43940402 100644 --- a/docs/rpc/http/getLatestBlockhash.mdx +++ b/docs/rpc/http/getLatestBlockhash.mdx @@ -54,7 +54,7 @@ object including: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "id":1, "jsonrpc":"2.0", diff --git a/docs/rpc/http/getLeaderSchedule.mdx b/docs/rpc/http/getLeaderSchedule.mdx index 26dee13c9..4ba0a3bc3 100644 --- a/docs/rpc/http/getLeaderSchedule.mdx +++ b/docs/rpc/http/getLeaderSchedule.mdx @@ -57,7 +57,7 @@ Returns a result with one of the two following values: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, diff --git a/docs/rpc/http/getMaxRetransmitSlot.mdx b/docs/rpc/http/getMaxRetransmitSlot.mdx index 717a767a7..b1f2a1847 100644 --- a/docs/rpc/http/getMaxRetransmitSlot.mdx +++ b/docs/rpc/http/getMaxRetransmitSlot.mdx @@ -27,7 +27,7 @@ Get the max slot seen from retransmit stage. ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getMaxRetransmitSlot"} ' ``` diff --git a/docs/rpc/http/getMaxShredInsertSlot.mdx b/docs/rpc/http/getMaxShredInsertSlot.mdx index 64981ff07..059c0fd1d 100644 --- a/docs/rpc/http/getMaxShredInsertSlot.mdx +++ b/docs/rpc/http/getMaxShredInsertSlot.mdx @@ -27,7 +27,7 @@ Get the max slot seen from after shred insert. ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getMaxShredInsertSlot"} ' ``` diff --git a/docs/rpc/http/getMinimumBalanceForRentExemption.mdx b/docs/rpc/http/getMinimumBalanceForRentExemption.mdx index 87c1bf861..11ef41b38 100644 --- a/docs/rpc/http/getMinimumBalanceForRentExemption.mdx +++ b/docs/rpc/http/getMinimumBalanceForRentExemption.mdx @@ -42,7 +42,7 @@ Configuration object containing the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, "method": "getMinimumBalanceForRentExemption", diff --git a/docs/rpc/http/getMultipleAccounts.mdx b/docs/rpc/http/getMultipleAccounts.mdx index 6bbe97cd5..ce9312bdc 100644 --- a/docs/rpc/http/getMultipleAccounts.mdx +++ b/docs/rpc/http/getMultipleAccounts.mdx @@ -102,7 +102,7 @@ The result will be a JSON object with `value` equal to an array of: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, @@ -126,24 +126,22 @@ curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" - { "jsonrpc": "2.0", "result": { - "context": { - "slot": 1 - }, + "context": { "apiVersion": "2.0.15", "slot": 341197247 }, "value": [ { - "data": ["", "base64"], + "data": ["", "base58"], "executable": false, - "lamports": 1000000000, + "lamports": 88849814690250, "owner": "11111111111111111111111111111111", - "rentEpoch": 2, - "space": 16 + "rentEpoch": 18446744073709551615, + "space": 0 }, { - "data": ["", "base64"], + "data": ["", "base58"], "executable": false, - "lamports": 5000000000, - "owner": "11111111111111111111111111111111", - "rentEpoch": 2, + "lamports": 998763433, + "owner": "2WRuhE4GJFoE23DYzp2ij6ZnuQ8p9mJeU6gDgfsjR4or", + "rentEpoch": 18446744073709551615, "space": 0 } ] diff --git a/docs/rpc/http/getProgramAccounts.mdx b/docs/rpc/http/getProgramAccounts.mdx index 79b37ad3e..dd59c122d 100644 --- a/docs/rpc/http/getProgramAccounts.mdx +++ b/docs/rpc/http/getProgramAccounts.mdx @@ -119,7 +119,7 @@ The resultant response array will contain: \(and is strictly read-only\) - `rentEpoch: ` - the epoch at which this account will next owe rent, as u64 - - `size: ` - the data size of the account + - `space: ` - the data size of the account @@ -128,7 +128,7 @@ The resultant response array will contain: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, diff --git a/docs/rpc/http/getRecentPerformanceSamples.mdx b/docs/rpc/http/getRecentPerformanceSamples.mdx index 7fcabb9a4..2c431533c 100644 --- a/docs/rpc/http/getRecentPerformanceSamples.mdx +++ b/docs/rpc/http/getRecentPerformanceSamples.mdx @@ -31,14 +31,14 @@ An array of `RpcPerfSample` with the following fields: period - `numSlots: ` - Number of slots completed during the sample period - `samplePeriodSecs: ` - Number of seconds in a sample window -- `numNonVoteTransaction: ` - Number of non-vote transactions processed +- `numNonVoteTransactions: ` - Number of non-vote transactions processed during the sample period. - `numNonVoteTransaction` is present starting with v1.15. To get a number of + `numNonVoteTransactions` is present starting with v1.15. To get a number of voting transactions compute:
- `numTransactions - numNonVoteTransaction` + `numTransactions - numNonVoteTransactions`
@@ -48,11 +48,12 @@ An array of `RpcPerfSample` with the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc":"2.0", "id":1, "method": "getRecentPerformanceSamples", - "params": [4]} + "params": [4] + } ' ``` @@ -65,28 +66,28 @@ curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" - { "numSlots": 126, "numTransactions": 126, - "numNonVoteTransaction": 1, + "numNonVoteTransactions": 1, "samplePeriodSecs": 60, "slot": 348125 }, { "numSlots": 126, "numTransactions": 126, - "numNonVoteTransaction": 1, + "numNonVoteTransactions": 1, "samplePeriodSecs": 60, "slot": 347999 }, { "numSlots": 125, "numTransactions": 125, - "numNonVoteTransaction": 0, + "numNonVoteTransactions": 0, "samplePeriodSecs": 60, "slot": 347873 }, { "numSlots": 125, "numTransactions": 125, - "numNonVoteTransaction": 0, + "numNonVoteTransactions": 0, "samplePeriodSecs": 60, "slot": 347748 } diff --git a/docs/rpc/http/getRecentPrioritizationFees.mdx b/docs/rpc/http/getRecentPrioritizationFees.mdx index 8860cf925..25359e528 100644 --- a/docs/rpc/http/getRecentPrioritizationFees.mdx +++ b/docs/rpc/http/getRecentPrioritizationFees.mdx @@ -47,7 +47,7 @@ An array of `RpcPrioritizationFee` with the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc":"2.0", "id":1, "method": "getRecentPrioritizationFees", diff --git a/docs/rpc/http/getSignatureStatuses.mdx b/docs/rpc/http/getSignatureStatuses.mdx index 6c4258ba7..bf101e360 100644 --- a/docs/rpc/http/getSignatureStatuses.mdx +++ b/docs/rpc/http/getSignatureStatuses.mdx @@ -68,7 +68,7 @@ An array of `RpcResponse` consisting of either: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, diff --git a/docs/rpc/http/getSignaturesForAddress.mdx b/docs/rpc/http/getSignaturesForAddress.mdx index 33373bf27..2587fa931 100644 --- a/docs/rpc/http/getSignaturesForAddress.mdx +++ b/docs/rpc/http/getSignaturesForAddress.mdx @@ -78,7 +78,7 @@ containing transaction signature information with the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, diff --git a/docs/rpc/http/getSlot.mdx b/docs/rpc/http/getSlot.mdx index 266f30702..504c6c323 100644 --- a/docs/rpc/http/getSlot.mdx +++ b/docs/rpc/http/getSlot.mdx @@ -43,7 +43,7 @@ Configuration object containing the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getSlot"} ' ``` diff --git a/docs/rpc/http/getSlotLeader.mdx b/docs/rpc/http/getSlotLeader.mdx index b0fa2e787..355c9c024 100644 --- a/docs/rpc/http/getSlotLeader.mdx +++ b/docs/rpc/http/getSlotLeader.mdx @@ -42,7 +42,7 @@ Configuration object containing the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getSlotLeader"} ' ``` diff --git a/docs/rpc/http/getSlotLeaders.mdx b/docs/rpc/http/getSlotLeaders.mdx index cf1975d69..2b095babd 100644 --- a/docs/rpc/http/getSlotLeaders.mdx +++ b/docs/rpc/http/getSlotLeaders.mdx @@ -37,7 +37,7 @@ If the current slot is `#99` - query the next `10` leaders with the following request: ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc":"2.0", "id": 1, "method": "getSlotLeaders", diff --git a/docs/rpc/http/getStakeMinimumDelegation.mdx b/docs/rpc/http/getStakeMinimumDelegation.mdx index 9383f0ec7..b98f1a359 100644 --- a/docs/rpc/http/getStakeMinimumDelegation.mdx +++ b/docs/rpc/http/getStakeMinimumDelegation.mdx @@ -40,7 +40,7 @@ The result will be an RpcResponse JSON object with `value` equal to: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc":"2.0", "id":1, "method": "getStakeMinimumDelegation" diff --git a/docs/rpc/http/getSupply.mdx b/docs/rpc/http/getSupply.mdx index 814993916..ebd60fa00 100644 --- a/docs/rpc/http/getSupply.mdx +++ b/docs/rpc/http/getSupply.mdx @@ -50,7 +50,7 @@ object containing: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0", "id":1, "method":"getSupply"} ' ``` diff --git a/docs/rpc/http/getTokenAccountBalance.mdx b/docs/rpc/http/getTokenAccountBalance.mdx index 74dd8558d..dfef4c521 100644 --- a/docs/rpc/http/getTokenAccountBalance.mdx +++ b/docs/rpc/http/getTokenAccountBalance.mdx @@ -55,7 +55,7 @@ from [getBlock](/docs/rpc/http/getblock) follows a similar structure. ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, "method": "getTokenAccountBalance", diff --git a/docs/rpc/http/getTokenAccountsByDelegate.mdx b/docs/rpc/http/getTokenAccountsByDelegate.mdx index 480f90564..619f743ac 100644 --- a/docs/rpc/http/getTokenAccountsByDelegate.mdx +++ b/docs/rpc/http/getTokenAccountsByDelegate.mdx @@ -101,7 +101,7 @@ JSON objects, which will contain: (and is strictly read-only\) - `rentEpoch: ` - the epoch at which this account will next owe rent, as u64 - - `size: ` - the data size of the account + - `space: ` - the data size of the account When the data is requested with the `jsonParsed` encoding a format similar to that of the [Token Balances Structure](/docs/rpc/json-structures#token-balances) @@ -115,7 +115,7 @@ can be expected inside the structure, both for the `tokenAmount` and the ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, diff --git a/docs/rpc/http/getTokenAccountsByOwner.mdx b/docs/rpc/http/getTokenAccountsByOwner.mdx index 84d5d1fd0..a6d1677b2 100644 --- a/docs/rpc/http/getTokenAccountsByOwner.mdx +++ b/docs/rpc/http/getTokenAccountsByOwner.mdx @@ -18,9 +18,9 @@ Returns all SPL Token accounts by token owner. Pubkey of account delegate to query, as base-58 encoded string - + -A JSON object with one of the following fields: +A JSON object with either one of the following fields: - `mint: ` - Pubkey of the specific token Mint to limit accounts to, as base-58 encoded string; or @@ -101,7 +101,7 @@ JSON objects, which will contain: \(and is strictly read-only\) - `rentEpoch: ` - the epoch at which this account will next owe rent, as u64 - - `size: ` - the data size of the account + - `space: ` - the data size of the account When the data is requested with the `jsonParsed` encoding a format similar to that of the [Token Balances Structure](/docs/rpc/json-structures#token-balances) @@ -115,15 +115,15 @@ can be expected inside the structure, both for the `tokenAmount` and the ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, "method": "getTokenAccountsByOwner", "params": [ - "4Qkev8aNZcqFNSRhQzwyLMFSsi94jHqE8WNVTJzTP99F", + "A1TMhSGzQxMr1TboBKtgixKz1sS6REASMxPo1qsyTSJd", { - "mint": "3wyAj7Rt1TWVPZVteFJPLa26JmLvdb1CAKEFZm3NY75E" + "programId": "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA" }, { "encoding": "jsonParsed" @@ -139,46 +139,65 @@ curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" - { "jsonrpc": "2.0", "result": { - "context": { - "slot": 1114 - }, + "context": { "apiVersion": "2.0.15", "slot": 341197933 }, "value": [ { "account": { "data": { - "program": "spl-token", "parsed": { - "accountType": "account", "info": { - "tokenAmount": { - "amount": "1", - "decimals": 1, - "uiAmount": 0.1, - "uiAmountString": "0.1" - }, - "delegate": "4Nd1mBQtrMJVYVfKf2PJy9NZUZdTAsp7D4xWLs4gDB4T", - "delegatedAmount": { - "amount": "1", - "decimals": 1, - "uiAmount": 0.1, - "uiAmountString": "0.1" - }, + "isNative": false, + "mint": "2cHr7QS3xfuSV8wdxo3ztuF4xbiarF6Nrgx3qpx3HzXR", + "owner": "A1TMhSGzQxMr1TboBKtgixKz1sS6REASMxPo1qsyTSJd", "state": "initialized", + "tokenAmount": { + "amount": "420000000000000", + "decimals": 6, + "uiAmount": 420000000.0, + "uiAmountString": "420000000" + } + }, + "type": "account" + }, + "program": "spl-token", + "space": 165 + }, + "executable": false, + "lamports": 2039280, + "owner": "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA", + "rentEpoch": 18446744073709551615, + "space": 165 + }, + "pubkey": "BGocb4GEpbTFm8UFV2VsDSaBXHELPfAXrvd4vtt8QWrA" + }, + { + "account": { + "data": { + "parsed": { + "info": { "isNative": false, - "mint": "3wyAj7Rt1TWVPZVteFJPLa26JmLvdb1CAKEFZm3NY75E", - "owner": "4Qkev8aNZcqFNSRhQzwyLMFSsi94jHqE8WNVTJzTP99F" + "mint": "4KVSsAtsG8JByKfB2jYWgGwvVR9WcBSUfsqpTSL9c3Jr", + "owner": "A1TMhSGzQxMr1TboBKtgixKz1sS6REASMxPo1qsyTSJd", + "state": "initialized", + "tokenAmount": { + "amount": "10000000000000", + "decimals": 9, + "uiAmount": 10000.0, + "uiAmountString": "10000" + } }, "type": "account" }, + "program": "spl-token", "space": 165 }, "executable": false, - "lamports": 1726080, + "lamports": 2039280, "owner": "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA", - "rentEpoch": 4, + "rentEpoch": 18446744073709551615, "space": 165 }, - "pubkey": "C2gJg6tKpQs41PRS1nC8aw3ZKNZK3HQQZGVrDFDup5nx" + "pubkey": "9PwCPoWJ75LSgZeGMubXBdufYMVd66HrcF78QzW6ZHkV" } ] }, diff --git a/docs/rpc/http/getTokenLargestAccounts.mdx b/docs/rpc/http/getTokenLargestAccounts.mdx index 570c8dd68..3702a0ad7 100644 --- a/docs/rpc/http/getTokenLargestAccounts.mdx +++ b/docs/rpc/http/getTokenLargestAccounts.mdx @@ -52,7 +52,7 @@ JSON objects containing: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, "method": "getTokenLargestAccounts", diff --git a/docs/rpc/http/getTokenSupply.mdx b/docs/rpc/http/getTokenSupply.mdx index f49ae5ecc..193f79d48 100644 --- a/docs/rpc/http/getTokenSupply.mdx +++ b/docs/rpc/http/getTokenSupply.mdx @@ -51,7 +51,7 @@ object containing: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, "method": "getTokenSupply", diff --git a/docs/rpc/http/getTransaction.mdx b/docs/rpc/http/getTransaction.mdx index 164b7ea1a..1fa973600 100644 --- a/docs/rpc/http/getTransaction.mdx +++ b/docs/rpc/http/getTransaction.mdx @@ -143,7 +143,7 @@ Encoding for the returned Transaction ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, diff --git a/docs/rpc/http/getTransactionCount.mdx b/docs/rpc/http/getTransactionCount.mdx index c466131a8..d3b07b6c7 100644 --- a/docs/rpc/http/getTransactionCount.mdx +++ b/docs/rpc/http/getTransactionCount.mdx @@ -42,7 +42,7 @@ Configuration object containing the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getTransactionCount"} ' ``` diff --git a/docs/rpc/http/getVersion.mdx b/docs/rpc/http/getVersion.mdx index e71f85c99..5aeb25ea1 100644 --- a/docs/rpc/http/getVersion.mdx +++ b/docs/rpc/http/getVersion.mdx @@ -31,7 +31,7 @@ The result field will be a JSON object with the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"getVersion"} ' ``` diff --git a/docs/rpc/http/getVoteAccounts.mdx b/docs/rpc/http/getVoteAccounts.mdx index 322b0ce68..2ba1257d8 100644 --- a/docs/rpc/http/getVoteAccounts.mdx +++ b/docs/rpc/http/getVoteAccounts.mdx @@ -69,7 +69,7 @@ each containing an array of JSON objects with the following sub fields: Restrict results to a single validator vote account: ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, diff --git a/docs/rpc/http/index.mdx b/docs/rpc/http/index.mdx index db962ca73..0392e5605 100644 --- a/docs/rpc/http/index.mdx +++ b/docs/rpc/http/index.mdx @@ -29,14 +29,16 @@ To make a JSON-RPC request, send an HTTP POST request with a fields: - `jsonrpc: ` - set to `"2.0"` -- `id: ` - a unique client-generated identifying integer +- `id: ` - a unique identifier for the request, + generated by the client. Typically a string or number, though null is + technically allowed but not advised - `method: ` - a string containing the method to be invoked - `params: ` - a JSON array of ordered parameter values Example using curl: ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, @@ -64,7 +66,7 @@ The commitment parameter should be included as the last element in the `params` array: ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, diff --git a/docs/rpc/http/isBlockhashValid.mdx b/docs/rpc/http/isBlockhashValid.mdx index 76eb165c7..c9e951f67 100644 --- a/docs/rpc/http/isBlockhashValid.mdx +++ b/docs/rpc/http/isBlockhashValid.mdx @@ -52,7 +52,7 @@ Configuration object containing the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "id":45, "jsonrpc":"2.0", diff --git a/docs/rpc/http/minimumLedgerSlot.mdx b/docs/rpc/http/minimumLedgerSlot.mdx index 73d5d40c4..d4596b0cb 100644 --- a/docs/rpc/http/minimumLedgerSlot.mdx +++ b/docs/rpc/http/minimumLedgerSlot.mdx @@ -32,7 +32,7 @@ Returns the lowest slot that the node has information about in its ledger. ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' {"jsonrpc":"2.0","id":1, "method":"minimumLedgerSlot"} ' ``` diff --git a/docs/rpc/http/requestAirdrop.mdx b/docs/rpc/http/requestAirdrop.mdx index b6181d644..b2ff929ad 100644 --- a/docs/rpc/http/requestAirdrop.mdx +++ b/docs/rpc/http/requestAirdrop.mdx @@ -46,7 +46,7 @@ Configuration object containing the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, "method": "requestAirdrop", diff --git a/docs/rpc/http/sendTransaction.mdx b/docs/rpc/http/sendTransaction.mdx index 08e63e327..d77f578d0 100644 --- a/docs/rpc/http/sendTransaction.mdx +++ b/docs/rpc/http/sendTransaction.mdx @@ -19,8 +19,8 @@ cluster. While the rpc service will reasonably retry to submit it, the transaction could be rejected if transaction's `recent_blockhash` expires before it lands. -Use [`getSignatureStatuses`](#getsignaturestatuses) to ensure a transaction is -processed and confirmed. +Use [`getSignatureStatuses`](/docs/rpc/http/getSignatureStatuses) to ensure a +transaction is processed and confirmed. Before submitting, the following preflight checks are performed: @@ -99,7 +99,7 @@ encoded string ([transaction id](/docs/terminology.md#transaction-id)) ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, diff --git a/docs/rpc/http/simulateTransaction.mdx b/docs/rpc/http/simulateTransaction.mdx index c211772f0..8256f7d72 100644 --- a/docs/rpc/http/simulateTransaction.mdx +++ b/docs/rpc/http/simulateTransaction.mdx @@ -149,7 +149,7 @@ with the following fields: ### Code sample ```shell -curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d ' +curl https://api.devnet.solana.com -s -X POST -H "Content-Type: application/json" -d ' { "jsonrpc": "2.0", "id": 1, @@ -157,7 +157,7 @@ curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" - "params": [ "AQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAEDArczbMia1tLmq7zz4DinMNN0pJ1JtLdqIJPUw3YrGCzYAMHBsgN27lcgB6H2WQvFgyZuJYHa46puOQo9yQ8CVQbd9uHXZaGT2cvhRs7reawctIXtX1s3kTqM9YV+/wCp20C7Wj2aiuk5TReAXo+VTVg8QTHjs0UjNMMKCvpzZ+ABAgEBARU=", { - "encoding":"base64", + "encoding":"base64" } ] } diff --git a/docs/terminology.md b/docs/terminology.md index 24ea1a8db..eb82266b9 100644 --- a/docs/terminology.md +++ b/docs/terminology.md @@ -35,10 +35,23 @@ The key may be one of: The address of the program that owns the account. Only the owning program is capable of modifying the account. +See also [authority](#authority). + ## app A front-end application that interacts with a Solana cluster. +## authority + +The address of a user that has some kind of permission over an account. + +For example: + +- The ability to mint new tokens is given to the account that is the 'mint + authority' for the token mint. +- The ability to upgrade a program is given to the account that is the 'upgrade + authority' of a program. + ## bank state The result of interpreting all programs on the ledger at a given @@ -69,7 +82,7 @@ a block chain. ## BPF loader The Solana program that owns and loads -[BPF](/docs/programs/faq.md#berkeley-packet-filter-bpf) +[BPF](/docs/core/programs#berkeley-packet-filter-bpf) [onchain programs](#onchain-program), allowing the program to interface with the runtime. @@ -131,7 +144,7 @@ consensus. ## drone -An off-chain service that acts as a custodian for a user's private key. It +An offchain service that acts as a custodian for a user's private key. It typically serves to validate and sign transactions. ## entry @@ -434,7 +447,7 @@ non-overlapping, comprising roughly equal real-world time as per See [onchain program](#onchain-program). -## sol +## SOL The [native token](#native-token) of a Solana [cluster](#cluster). @@ -490,6 +503,11 @@ same features as the [Token Program](#token-program), but comes with extensions such as confidential transfers, custom transfer logic, extended metadata, and much more. +## token mint + +An account that can produce (or 'mint') tokens. Different tokens are +distinguished by their unique token mint addresses. + ## Token Program The [Token Program](https://spl.solana.com/token) has the program ID diff --git a/package.json b/package.json index f09b9b1f3..7b05ad619 100644 --- a/package.json +++ b/package.json @@ -10,10 +10,11 @@ "scripts": { "runner": "npx ts-node -r tsconfig-paths/register", "contentlayer:build": "npx contentlayer2 build --clearCache", - "dev": "yarn contentlayer:build && next dev -p 3001", - "build": "yarn prettier:i18n && yarn contentlayer:build && next build", + "code-import": "npx esrun coder.ts", + "dev": "pnpm code-import && pnpm contentlayer:build && concurrently -p \"[{name}]\" -n \"code import,next dev\" -c \"bgBlue.bold,bgGreen.bold\" \"pnpm code-import --watch\" \"next dev -p 3001\"", + "build": "pnpm code-import && pnpm prettier:i18n && pnpm contentlayer:build && next build", "start": "next start -p 3001", - "test": "yarn contentlayer:build", + "test": "pnpm code-import && pnpm prettier && pnpm contentlayer:build", "lint": "next lint", "prettier:i18n": "prettier -cw \"i18n/**/*.{js,jsx,ts,tsx,md,css,md,mdx}\" --ignore-path \"[]\"", "prettier": "prettier -c \"./**/*.{js,jsx,ts,tsx,md,css,md,mdx}\"", @@ -21,7 +22,8 @@ "crowdin:download": "crowdin download && npm run prettier:i18n", "crowdin:upload": "crowdin upload sources", "browser-sync": "browser-sync start --proxy \"localhost:3000\" --files \"**/*.md\"", - "dev:sync": "yarn dev & (sleep 3 && yarn browser-sync)" + "prepare": "husky", + "dev:sync": "pnpm dev & (sleep 5 && pnpm browser-sync)" }, "dependencies": { "@crowdin/cli": "^3.18.0", @@ -40,7 +42,25 @@ "typescript": "5.3.3" }, "devDependencies": { + "@types/mdast": "^4.0.4", + "chokidar": "^3.6.0", + "concurrently": "^8.2.2", "contentlayer2": "^0.4.6", - "prettier": "^3.2.4" + "husky": "^9.1.4", + "ignore": "^5.3.1", + "lint-staged": "^15.2.7", + "mdast": "^3.0.0", + "prettier": "^3.2.4", + "remark": "^15.0.1", + "remark-frontmatter": "^5.0.0", + "remark-parse": "^11.0.0", + "remark-stringify": "^11.0.0", + "strip-indent": "^4.0.0", + "unified": "^11.0.5", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.2" + }, + "lint-staged": { + "*.{js,jsx,json,ts,tsx,md,css,md,mdx,yml,yaml}": "pnpm prettier:fix" } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml new file mode 100644 index 000000000..627a7b48e --- /dev/null +++ b/pnpm-lock.yaml @@ -0,0 +1,8226 @@ +lockfileVersion: "9.0" + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + .: + dependencies: + "@crowdin/cli": + specifier: ^3.18.0 + version: 3.19.4 + "@types/node": + specifier: 20.11.17 + version: 20.11.17 + "@types/react": + specifier: 18.2.15 + version: 18.2.15 + "@types/react-dom": + specifier: 18.2.7 + version: 18.2.7 + autoprefixer: + specifier: 10.4.14 + version: 10.4.14(postcss@8.4.26) + eslint: + specifier: 8.45.0 + version: 8.45.0 + eslint-config-next: + specifier: 14.1.0 + version: 14.1.0(eslint@8.45.0)(typescript@5.3.3) + next: + specifier: 14.1.0 + version: 14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + next-contentlayer2: + specifier: ^0.4.6 + version: 0.4.6(contentlayer2@0.4.6(esbuild@0.20.2))(esbuild@0.20.2)(next@14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + postcss: + specifier: 8.4.26 + version: 8.4.26 + react: + specifier: 18.2.0 + version: 18.2.0 + react-dom: + specifier: 18.2.0 + version: 18.2.0(react@18.2.0) + tailwindcss: + specifier: 3.4.1 + version: 3.4.1 + typescript: + specifier: 5.3.3 + version: 5.3.3 + devDependencies: + "@types/mdast": + specifier: ^4.0.4 + version: 4.0.4 + chokidar: + specifier: ^3.6.0 + version: 3.6.0 + concurrently: + specifier: ^8.2.2 + version: 8.2.2 + contentlayer2: + specifier: ^0.4.6 + version: 0.4.6(esbuild@0.20.2) + husky: + specifier: ^9.1.4 + version: 9.1.6 + ignore: + specifier: ^5.3.1 + version: 5.3.2 + lint-staged: + specifier: ^15.2.7 + version: 15.2.10 + mdast: + specifier: ^3.0.0 + version: 3.0.0 + prettier: + specifier: ^3.2.4 + version: 3.3.3 + remark: + specifier: ^15.0.1 + version: 15.0.1 + remark-frontmatter: + specifier: ^5.0.0 + version: 5.0.0 + remark-parse: + specifier: ^11.0.0 + version: 11.0.0 + remark-stringify: + specifier: ^11.0.0 + version: 11.0.0 + strip-indent: + specifier: ^4.0.0 + version: 4.0.0 + unified: + specifier: ^11.0.5 + version: 11.0.5 + unist-util-visit: + specifier: ^5.0.0 + version: 5.0.0 + vfile: + specifier: ^6.0.2 + version: 6.0.3 + +packages: + "@alloc/quick-lru@5.2.0": + resolution: + { + integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==, + } + engines: { node: ">=10" } + + "@babel/runtime@7.25.6": + resolution: + { + integrity: sha512-VBj9MYyDb9tuLq7yzqjgzt6Q+IBQLrGZfdjOekyEirZPHxXWoTSGUTMrpsfi58Up73d13NfYLv8HT9vmznjzhQ==, + } + engines: { node: ">=6.9.0" } + + "@contentlayer2/cli@0.4.3": + resolution: + { + integrity: sha512-ZJ+Iiu2rVI50x60XoqnrsO/Q8eqFX5AlP1L0U/3ygaAas3tnOqTzQZ1UsxYQMpJzcLok24ddlhKfQKbCMUJPiQ==, + } + + "@contentlayer2/client@0.4.3": + resolution: + { + integrity: sha512-Znz6zW0ShVRq3OCQ6SwsfPQ1Wn28dQpiL33r53OjWuiirXhaSdam4MXRjYSFzCq5JK0kY6Hx0CF0fk3oVbsiVQ==, + } + + "@contentlayer2/core@0.4.3": + resolution: + { + integrity: sha512-jNagCFHLB8u/qfjoAI+JUuz3jf2hxYFYusgfh+oSq9+jHala4bbLEJOhJmmaY1Dmv2AV6BI7W8Jjy6WIu5g5Yg==, + } + peerDependencies: + esbuild: 0.17.x || 0.18.x || 0.19.x || 0.20.x + markdown-wasm: 1.x + peerDependenciesMeta: + esbuild: + optional: true + markdown-wasm: + optional: true + + "@contentlayer2/source-files@0.4.3": + resolution: + { + integrity: sha512-NaLkK9iJcjS7xdqbtEJElEXhdEXv47yj4Ju2TCiZU7/8veQQ67QwtTDesyRMSw4mBt5+hxiAcqvf0hKaHh6lxw==, + } + + "@contentlayer2/source-remote-files@0.4.3": + resolution: + { + integrity: sha512-uow1teWiAJA0WNbqqjEz4FfLPQF6BpdhRfF8JHgX7r8jzqChB5zS8bXX9xJDGhn41oEqR/FDDI1Pn7564H8lsA==, + } + + "@contentlayer2/utils@0.4.3": + resolution: + { + integrity: sha512-E360wNhvGemU1X+uhIwkzA5v+b8U5Q+Gps50uIDAVW6TWV9boUgAGPspSXXcFAIYG+H4z2eV3jTZjFCwyXGMxg==, + } + peerDependencies: + "@effect-ts/otel-node": "*" + peerDependenciesMeta: + "@effect-ts/otel-node": + optional: true + + "@crowdin/cli@3.19.4": + resolution: + { + integrity: sha512-j0SiRGKOH/Pa/TdBeIxBBRrByHPqmVqWVo/LSjnri1lLPGywjcu9kB+pib7P4wmI00jgcVu+80yGdun5zRcDNQ==, + } + hasBin: true + + "@effect-ts/core@0.60.5": + resolution: + { + integrity: sha512-qi1WrtJA90XLMnj2hnUszW9Sx4dXP03ZJtCc5DiUBIOhF4Vw7plfb65/bdBySPoC9s7zy995TdUX1XBSxUkl5w==, + } + + "@effect-ts/otel-sdk-trace-node@0.15.1": + resolution: + { + integrity: sha512-a2sF0ylmn8xOJs8fNeT/spJ1gUcsksAJCALxo9WOfuTCMtTwMVtVhCKEPEeQoL7wFqU+JgPkVdP91+FJ/Rkeow==, + } + peerDependencies: + "@effect-ts/core": ^0.60.2 + "@opentelemetry/api": ^1.4.0 + "@opentelemetry/core": ^1.13.0 + "@opentelemetry/sdk-trace-base": ^1.13.0 + "@opentelemetry/sdk-trace-node": ^1.13.0 + + "@effect-ts/otel@0.15.1": + resolution: + { + integrity: sha512-AmZJHl7t0+Peh7Yb2+hqn6r9+rd9/UfeA4AMV9h0YGTdOyouyFfD3wzWlxnAUzAQ4Lrod4kC7Noruret4EpqpA==, + } + peerDependencies: + "@effect-ts/core": ^0.60.2 + "@opentelemetry/api": ^1.4.0 + "@opentelemetry/core": ^1.13.0 + "@opentelemetry/sdk-trace-base": ^1.13.0 + + "@effect-ts/system@0.57.5": + resolution: + { + integrity: sha512-/crHGujo0xnuHIYNc1VgP0HGJGFSoSqq88JFXe6FmFyXPpWt8Xu39LyLg7rchsxfXFeEdA9CrIZvLV5eswXV5g==, + } + + "@esbuild-plugins/node-resolve@0.2.2": + resolution: + { + integrity: sha512-+t5FdX3ATQlb53UFDBRb4nqjYBz492bIrnVWvpQHpzZlu9BQL5HasMZhqc409ygUwOWCXZhrWr6NyZ6T6Y+cxw==, + } + peerDependencies: + esbuild: "*" + + "@esbuild/aix-ppc64@0.20.2": + resolution: + { + integrity: sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==, + } + engines: { node: ">=12" } + cpu: [ppc64] + os: [aix] + + "@esbuild/android-arm64@0.20.2": + resolution: + { + integrity: sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==, + } + engines: { node: ">=12" } + cpu: [arm64] + os: [android] + + "@esbuild/android-arm@0.20.2": + resolution: + { + integrity: sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==, + } + engines: { node: ">=12" } + cpu: [arm] + os: [android] + + "@esbuild/android-x64@0.20.2": + resolution: + { + integrity: sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==, + } + engines: { node: ">=12" } + cpu: [x64] + os: [android] + + "@esbuild/darwin-arm64@0.20.2": + resolution: + { + integrity: sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==, + } + engines: { node: ">=12" } + cpu: [arm64] + os: [darwin] + + "@esbuild/darwin-x64@0.20.2": + resolution: + { + integrity: sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==, + } + engines: { node: ">=12" } + cpu: [x64] + os: [darwin] + + "@esbuild/freebsd-arm64@0.20.2": + resolution: + { + integrity: sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==, + } + engines: { node: ">=12" } + cpu: [arm64] + os: [freebsd] + + "@esbuild/freebsd-x64@0.20.2": + resolution: + { + integrity: sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==, + } + engines: { node: ">=12" } + cpu: [x64] + os: [freebsd] + + "@esbuild/linux-arm64@0.20.2": + resolution: + { + integrity: sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==, + } + engines: { node: ">=12" } + cpu: [arm64] + os: [linux] + + "@esbuild/linux-arm@0.20.2": + resolution: + { + integrity: sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==, + } + engines: { node: ">=12" } + cpu: [arm] + os: [linux] + + "@esbuild/linux-ia32@0.20.2": + resolution: + { + integrity: sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==, + } + engines: { node: ">=12" } + cpu: [ia32] + os: [linux] + + "@esbuild/linux-loong64@0.20.2": + resolution: + { + integrity: sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==, + } + engines: { node: ">=12" } + cpu: [loong64] + os: [linux] + + "@esbuild/linux-mips64el@0.20.2": + resolution: + { + integrity: sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==, + } + engines: { node: ">=12" } + cpu: [mips64el] + os: [linux] + + "@esbuild/linux-ppc64@0.20.2": + resolution: + { + integrity: sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==, + } + engines: { node: ">=12" } + cpu: [ppc64] + os: [linux] + + "@esbuild/linux-riscv64@0.20.2": + resolution: + { + integrity: sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==, + } + engines: { node: ">=12" } + cpu: [riscv64] + os: [linux] + + "@esbuild/linux-s390x@0.20.2": + resolution: + { + integrity: sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==, + } + engines: { node: ">=12" } + cpu: [s390x] + os: [linux] + + "@esbuild/linux-x64@0.20.2": + resolution: + { + integrity: sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==, + } + engines: { node: ">=12" } + cpu: [x64] + os: [linux] + + "@esbuild/netbsd-x64@0.20.2": + resolution: + { + integrity: sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==, + } + engines: { node: ">=12" } + cpu: [x64] + os: [netbsd] + + "@esbuild/openbsd-x64@0.20.2": + resolution: + { + integrity: sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==, + } + engines: { node: ">=12" } + cpu: [x64] + os: [openbsd] + + "@esbuild/sunos-x64@0.20.2": + resolution: + { + integrity: sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==, + } + engines: { node: ">=12" } + cpu: [x64] + os: [sunos] + + "@esbuild/win32-arm64@0.20.2": + resolution: + { + integrity: sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==, + } + engines: { node: ">=12" } + cpu: [arm64] + os: [win32] + + "@esbuild/win32-ia32@0.20.2": + resolution: + { + integrity: sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==, + } + engines: { node: ">=12" } + cpu: [ia32] + os: [win32] + + "@esbuild/win32-x64@0.20.2": + resolution: + { + integrity: sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==, + } + engines: { node: ">=12" } + cpu: [x64] + os: [win32] + + "@eslint-community/eslint-utils@4.4.0": + resolution: + { + integrity: sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==, + } + engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + + "@eslint-community/regexpp@4.11.1": + resolution: + { + integrity: sha512-m4DVN9ZqskZoLU5GlWZadwDnYo3vAEydiUayB9widCl9ffWx2IvPnp6n3on5rJmziJSw9Bv+Z3ChDVdMwXCY8Q==, + } + engines: { node: ^12.0.0 || ^14.0.0 || >=16.0.0 } + + "@eslint/eslintrc@2.1.4": + resolution: + { + integrity: sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==, + } + engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } + + "@eslint/js@8.44.0": + resolution: + { + integrity: sha512-Ag+9YM4ocKQx9AarydN0KY2j0ErMHNIocPDrVo8zAE44xLTjEtz81OdR68/cydGtk6m6jDb5Za3r2useMzYmSw==, + } + engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } + + "@fal-works/esbuild-plugin-global-externals@2.1.2": + resolution: + { + integrity: sha512-cEee/Z+I12mZcFJshKcCqC8tuX5hG3s+d+9nZ3LabqKF1vKdF41B92pJVCBggjAGORAeOzyyDDKrZwIkLffeOQ==, + } + + "@grpc/grpc-js@1.11.3": + resolution: + { + integrity: sha512-i9UraDzFHMR+Iz/MhFLljT+fCpgxZ3O6CxwGJ8YuNYHJItIHUzKJpW2LvoFZNnGPwqc9iWy9RAucxV0JoR9aUQ==, + } + engines: { node: ">=12.10.0" } + + "@grpc/proto-loader@0.7.13": + resolution: + { + integrity: sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==, + } + engines: { node: ">=6" } + hasBin: true + + "@humanwhocodes/config-array@0.11.14": + resolution: + { + integrity: sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==, + } + engines: { node: ">=10.10.0" } + deprecated: Use @eslint/config-array instead + + "@humanwhocodes/module-importer@1.0.1": + resolution: + { + integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==, + } + engines: { node: ">=12.22" } + + "@humanwhocodes/object-schema@2.0.3": + resolution: + { + integrity: sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==, + } + deprecated: Use @eslint/object-schema instead + + "@isaacs/cliui@8.0.2": + resolution: + { + integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==, + } + engines: { node: ">=12" } + + "@jridgewell/gen-mapping@0.3.5": + resolution: + { + integrity: sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==, + } + engines: { node: ">=6.0.0" } + + "@jridgewell/resolve-uri@3.1.2": + resolution: + { + integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==, + } + engines: { node: ">=6.0.0" } + + "@jridgewell/set-array@1.2.1": + resolution: + { + integrity: sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==, + } + engines: { node: ">=6.0.0" } + + "@jridgewell/sourcemap-codec@1.5.0": + resolution: + { + integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==, + } + + "@jridgewell/trace-mapping@0.3.25": + resolution: + { + integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==, + } + + "@js-sdsl/ordered-map@4.4.2": + resolution: + { + integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==, + } + + "@js-temporal/polyfill@0.4.4": + resolution: + { + integrity: sha512-2X6bvghJ/JAoZO52lbgyAPFj8uCflhTo2g7nkFzEQdXd/D8rEeD4HtmTEpmtGCva260fcd66YNXBOYdnmHqSOg==, + } + engines: { node: ">=12" } + + "@jsonjoy.com/base64@1.1.2": + resolution: + { + integrity: sha512-q6XAnWQDIMA3+FTiOYajoYqySkO+JSat0ytXGSuRdq9uXE7o92gzuQwQM14xaCRlBLGq3v5miDGC4vkVTn54xA==, + } + engines: { node: ">=10.0" } + peerDependencies: + tslib: "2" + + "@jsonjoy.com/json-pack@1.1.0": + resolution: + { + integrity: sha512-zlQONA+msXPPwHWZMKFVS78ewFczIll5lXiVPwFPCZUsrOKdxc2AvxU1HoNBmMRhqDZUR9HkC3UOm+6pME6Xsg==, + } + engines: { node: ">=10.0" } + peerDependencies: + tslib: "2" + + "@jsonjoy.com/util@1.3.0": + resolution: + { + integrity: sha512-Cebt4Vk7k1xHy87kHY7KSPLT77A7Ev7IfOblyLZhtYEhrdQ6fX4EoLq3xOQ3O/DRMEh2ok5nyC180E+ABS8Wmw==, + } + engines: { node: ">=10.0" } + peerDependencies: + tslib: "2" + + "@mdx-js/esbuild@3.0.1": + resolution: + { + integrity: sha512-+KZbCKcRjFtRD6qzD+c70Vq/VPVt5LHFsOshNcsdcONkaLTCSjmM7/uj71i3BcP+170f+P4DwVEMtqR/k0t5aw==, + } + peerDependencies: + esbuild: ">=0.14.0" + + "@mdx-js/mdx@3.0.1": + resolution: + { + integrity: sha512-eIQ4QTrOWyL3LWEe/bu6Taqzq2HQvHcyTMaOrI95P2/LmJE7AsfPfgJGuFLPVqBUE1BC1rik3VIhU+s9u72arA==, + } + + "@next/env@14.1.0": + resolution: + { + integrity: sha512-Py8zIo+02ht82brwwhTg36iogzFqGLPXlRGKQw5s+qP/kMNc4MAyDeEwBKDijk6zTIbegEgu8Qy7C1LboslQAw==, + } + + "@next/eslint-plugin-next@14.1.0": + resolution: + { + integrity: sha512-x4FavbNEeXx/baD/zC/SdrvkjSby8nBn8KcCREqk6UuwvwoAPZmaV8TFCAuo/cpovBRTIY67mHhe86MQQm/68Q==, + } + + "@next/swc-darwin-arm64@14.1.0": + resolution: + { + integrity: sha512-nUDn7TOGcIeyQni6lZHfzNoo9S0euXnu0jhsbMOmMJUBfgsnESdjN97kM7cBqQxZa8L/bM9om/S5/1dzCrW6wQ==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [darwin] + + "@next/swc-darwin-x64@14.1.0": + resolution: + { + integrity: sha512-1jgudN5haWxiAl3O1ljUS2GfupPmcftu2RYJqZiMJmmbBT5M1XDffjUtRUzP4W3cBHsrvkfOFdQ71hAreNQP6g==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [darwin] + + "@next/swc-linux-arm64-gnu@14.1.0": + resolution: + { + integrity: sha512-RHo7Tcj+jllXUbK7xk2NyIDod3YcCPDZxj1WLIYxd709BQ7WuRYl3OWUNG+WUfqeQBds6kvZYlc42NJJTNi4tQ==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [linux] + + "@next/swc-linux-arm64-musl@14.1.0": + resolution: + { + integrity: sha512-v6kP8sHYxjO8RwHmWMJSq7VZP2nYCkRVQ0qolh2l6xroe9QjbgV8siTbduED4u0hlk0+tjS6/Tuy4n5XCp+l6g==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [linux] + + "@next/swc-linux-x64-gnu@14.1.0": + resolution: + { + integrity: sha512-zJ2pnoFYB1F4vmEVlb/eSe+VH679zT1VdXlZKX+pE66grOgjmKJHKacf82g/sWE4MQ4Rk2FMBCRnX+l6/TVYzQ==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [linux] + + "@next/swc-linux-x64-musl@14.1.0": + resolution: + { + integrity: sha512-rbaIYFt2X9YZBSbH/CwGAjbBG2/MrACCVu2X0+kSykHzHnYH5FjHxwXLkcoJ10cX0aWCEynpu+rP76x0914atg==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [linux] + + "@next/swc-win32-arm64-msvc@14.1.0": + resolution: + { + integrity: sha512-o1N5TsYc8f/HpGt39OUQpQ9AKIGApd3QLueu7hXk//2xq5Z9OxmV6sQfNp8C7qYmiOlHYODOGqNNa0e9jvchGQ==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [win32] + + "@next/swc-win32-ia32-msvc@14.1.0": + resolution: + { + integrity: sha512-XXIuB1DBRCFwNO6EEzCTMHT5pauwaSj4SWs7CYnME57eaReAKBXCnkUE80p/pAZcewm7hs+vGvNqDPacEXHVkw==, + } + engines: { node: ">= 10" } + cpu: [ia32] + os: [win32] + + "@next/swc-win32-x64-msvc@14.1.0": + resolution: + { + integrity: sha512-9WEbVRRAqJ3YFVqEZIxUqkiO8l1nool1LmNxygr5HWF8AcSYsEpneUDhmjUVJEzO2A04+oPtZdombzzPPkTtgg==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [win32] + + "@nodelib/fs.scandir@2.1.5": + resolution: + { + integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==, + } + engines: { node: ">= 8" } + + "@nodelib/fs.stat@2.0.5": + resolution: + { + integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==, + } + engines: { node: ">= 8" } + + "@nodelib/fs.walk@1.2.8": + resolution: + { + integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==, + } + engines: { node: ">= 8" } + + "@nolyfill/is-core-module@1.0.39": + resolution: + { + integrity: sha512-nn5ozdjYQpUCZlWGuxcJY/KpxkWQs4DcbMCmKojjyrYDEAGy4Ce19NN4v5MduafTwJlbKc99UA8YhSVqq9yPZA==, + } + engines: { node: ">=12.4.0" } + + "@opentelemetry/api-logs@0.51.1": + resolution: + { + integrity: sha512-E3skn949Pk1z2XtXu/lxf6QAZpawuTM/IUEXcAzpiUkTd73Hmvw26FiN3cJuTmkpM5hZzHwkomVdtrh/n/zzwA==, + } + engines: { node: ">=14" } + + "@opentelemetry/api@1.9.0": + resolution: + { + integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==, + } + engines: { node: ">=8.0.0" } + + "@opentelemetry/context-async-hooks@1.26.0": + resolution: + { + integrity: sha512-HedpXXYzzbaoutw6DFLWLDket2FwLkLpil4hGCZ1xYEIMTcivdfwEOISgdbLEWyG3HW52gTq2V9mOVJrONgiwg==, + } + engines: { node: ">=14" } + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + + "@opentelemetry/core@1.24.1": + resolution: + { + integrity: sha512-wMSGfsdmibI88K9wB498zXY04yThPexo8jvwNNlm542HZB7XrrMRBbAyKJqG8qDRJwIBdBrPMi4V9ZPW/sqrcg==, + } + engines: { node: ">=14" } + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.9.0" + + "@opentelemetry/core@1.26.0": + resolution: + { + integrity: sha512-1iKxXXE8415Cdv0yjG3G6hQnB5eVEsJce3QaawX8SjDn0mAS0ZM8fAbZZJD4ajvhC15cePvosSCut404KrIIvQ==, + } + engines: { node: ">=14" } + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + + "@opentelemetry/exporter-trace-otlp-grpc@0.51.1": + resolution: + { + integrity: sha512-P9+Hkszih95ITvldGZ+kXvj9HpD1QfS+PwooyHK72GYA+Bgm+yUSAsDkUkDms8+s9HW6poxURv3LcjaMuBBpVQ==, + } + engines: { node: ">=14" } + peerDependencies: + "@opentelemetry/api": ^1.0.0 + + "@opentelemetry/otlp-exporter-base@0.51.1": + resolution: + { + integrity: sha512-UYlnOYyDdzo1Gw559EHCzru0RwhvuXCwoH8jGo9J4gO1TE58GjnEmIjomMsKBCym3qWNJfIQXw+9SZCV0DdQNg==, + } + engines: { node: ">=14" } + peerDependencies: + "@opentelemetry/api": ^1.0.0 + + "@opentelemetry/otlp-grpc-exporter-base@0.51.1": + resolution: + { + integrity: sha512-ZAS+4pq8o7dsugGTwV9s6JMKSxi+guIHdn0acOv0bqj26e9pWDFx5Ky+bI0aY46uR9Y0JyXqY+KAEYM/SO3DFA==, + } + engines: { node: ">=14" } + peerDependencies: + "@opentelemetry/api": ^1.0.0 + + "@opentelemetry/otlp-transformer@0.51.1": + resolution: + { + integrity: sha512-OppYOXwV9LQqqtYUCywqoOqX/JT9LQ5/FMuPZ//eTkvuHdUC4ZMwz2c6uSoT2R90GWvvGnF1iEqTGyTT3xAt2Q==, + } + engines: { node: ">=14" } + peerDependencies: + "@opentelemetry/api": ">=1.3.0 <1.9.0" + + "@opentelemetry/propagator-b3@1.26.0": + resolution: + { + integrity: sha512-vvVkQLQ/lGGyEy9GT8uFnI047pajSOVnZI2poJqVGD3nJ+B9sFGdlHNnQKophE3lHfnIH0pw2ubrCTjZCgIj+Q==, + } + engines: { node: ">=14" } + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + + "@opentelemetry/propagator-jaeger@1.26.0": + resolution: + { + integrity: sha512-DelFGkCdaxA1C/QA0Xilszfr0t4YbGd3DjxiCDPh34lfnFr+VkkrjV9S8ZTJvAzfdKERXhfOxIKBoGPJwoSz7Q==, + } + engines: { node: ">=14" } + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + + "@opentelemetry/resources@1.24.1": + resolution: + { + integrity: sha512-cyv0MwAaPF7O86x5hk3NNgenMObeejZFLJJDVuSeSMIsknlsj3oOZzRv3qSzlwYomXsICfBeFFlxwHQte5mGXQ==, + } + engines: { node: ">=14" } + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.9.0" + + "@opentelemetry/resources@1.26.0": + resolution: + { + integrity: sha512-CPNYchBE7MBecCSVy0HKpUISEeJOniWqcHaAHpmasZ3j9o6V3AyBzhRc90jdmemq0HOxDr6ylhUbDhBqqPpeNw==, + } + engines: { node: ">=14" } + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + + "@opentelemetry/sdk-logs@0.51.1": + resolution: + { + integrity: sha512-ULQQtl82b673PpZc5/0EtH4V+BrwVOgKJZEB7tYZnGTG3I98tQVk89S9/JSixomDr++F4ih+LSJTCqIKBz+MQQ==, + } + engines: { node: ">=14" } + peerDependencies: + "@opentelemetry/api": ">=1.4.0 <1.9.0" + "@opentelemetry/api-logs": ">=0.39.1" + + "@opentelemetry/sdk-metrics@1.24.1": + resolution: + { + integrity: sha512-FrAqCbbGao9iKI+Mgh+OsC9+U2YMoXnlDHe06yH7dvavCKzE3S892dGtX54+WhSFVxHR/TMRVJiK/CV93GR0TQ==, + } + engines: { node: ">=14" } + peerDependencies: + "@opentelemetry/api": ">=1.3.0 <1.9.0" + + "@opentelemetry/sdk-trace-base@1.24.1": + resolution: + { + integrity: sha512-zz+N423IcySgjihl2NfjBf0qw1RWe11XIAWVrTNOSSI6dtSPJiVom2zipFB2AEEtJWpv0Iz6DY6+TjnyTV5pWg==, + } + engines: { node: ">=14" } + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.9.0" + + "@opentelemetry/sdk-trace-base@1.26.0": + resolution: + { + integrity: sha512-olWQldtvbK4v22ymrKLbIcBi9L2SpMO84sCPY54IVsJhP9fRsxJT194C/AVaAuJzLE30EdhhM1VmvVYR7az+cw==, + } + engines: { node: ">=14" } + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + + "@opentelemetry/sdk-trace-node@1.26.0": + resolution: + { + integrity: sha512-Fj5IVKrj0yeUwlewCRwzOVcr5avTuNnMHWf7GPc1t6WaT78J6CJyF3saZ/0RkZfdeNO8IcBl/bNcWMVZBMRW8Q==, + } + engines: { node: ">=14" } + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + + "@opentelemetry/semantic-conventions@1.24.1": + resolution: + { + integrity: sha512-VkliWlS4/+GHLLW7J/rVBA00uXus1SWvwFvcUDxDwmFxYfg/2VI6ekwdXS28cjI8Qz2ky2BzG8OUHo+WeYIWqw==, + } + engines: { node: ">=14" } + + "@opentelemetry/semantic-conventions@1.27.0": + resolution: + { + integrity: sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg==, + } + engines: { node: ">=14" } + + "@pkgjs/parseargs@0.11.0": + resolution: + { + integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==, + } + engines: { node: ">=14" } + + "@protobufjs/aspromise@1.1.2": + resolution: + { + integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==, + } + + "@protobufjs/base64@1.1.2": + resolution: + { + integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==, + } + + "@protobufjs/codegen@2.0.4": + resolution: + { + integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==, + } + + "@protobufjs/eventemitter@1.1.0": + resolution: + { + integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==, + } + + "@protobufjs/fetch@1.1.0": + resolution: + { + integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==, + } + + "@protobufjs/float@1.0.2": + resolution: + { + integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==, + } + + "@protobufjs/inquire@1.1.0": + resolution: + { + integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==, + } + + "@protobufjs/path@1.1.2": + resolution: + { + integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==, + } + + "@protobufjs/pool@1.1.0": + resolution: + { + integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==, + } + + "@protobufjs/utf8@1.1.0": + resolution: + { + integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==, + } + + "@rtsao/scc@1.1.0": + resolution: + { + integrity: sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==, + } + + "@rushstack/eslint-patch@1.10.4": + resolution: + { + integrity: sha512-WJgX9nzTqknM393q1QJDJmoW28kUfEnybeTfVNcNAPnIx210RXm2DiXiHzfNPJNIUUb1tJnz/l4QGtJ30PgWmA==, + } + + "@swc/helpers@0.5.2": + resolution: + { + integrity: sha512-E4KcWTpoLHqwPHLxidpOqQbcrZVgi0rsmmZXUle1jXmJfuIf/UWpczUJ7MZZ5tlxytgJXyp0w4PGkkeLiuIdZw==, + } + + "@types/acorn@4.0.6": + resolution: + { + integrity: sha512-veQTnWP+1D/xbxVrPC3zHnCZRjSrKfhbMUlEA43iMZLu7EsnTtkJklIuwrCPbOi8YkvDQAiW05VQQFvvz9oieQ==, + } + + "@types/debug@4.1.12": + resolution: + { + integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==, + } + + "@types/estree-jsx@1.0.5": + resolution: + { + integrity: sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==, + } + + "@types/estree@1.0.6": + resolution: + { + integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==, + } + + "@types/hast@3.0.4": + resolution: + { + integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==, + } + + "@types/json5@0.0.29": + resolution: + { + integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==, + } + + "@types/mdast@4.0.4": + resolution: + { + integrity: sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==, + } + + "@types/mdx@2.0.13": + resolution: + { + integrity: sha512-+OWZQfAYyio6YkJb3HLxDrvnx6SWWDbC0zVPfBRzUk0/nqoDyf6dNxQi3eArPe8rJ473nobTMQ/8Zk+LxJ+Yuw==, + } + + "@types/ms@0.7.34": + resolution: + { + integrity: sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==, + } + + "@types/node@20.11.17": + resolution: + { + integrity: sha512-QmgQZGWu1Yw9TDyAP9ZzpFJKynYNeOvwMJmaxABfieQoVoiVOS6MN1WSpqpRcbeA5+RW82kraAVxCCJg+780Qw==, + } + + "@types/prop-types@15.7.13": + resolution: + { + integrity: sha512-hCZTSvwbzWGvhqxp/RqVqwU999pBf2vp7hzIjiYOsl8wqOmUxkQ6ddw1cV3l8811+kdUFus/q4d1Y3E3SyEifA==, + } + + "@types/react-dom@18.2.7": + resolution: + { + integrity: sha512-GRaAEriuT4zp9N4p1i8BDBYmEyfo+xQ3yHjJU4eiK5NDa1RmUZG+unZABUTK4/Ox/M+GaHwb6Ow8rUITrtjszA==, + } + + "@types/react@18.2.15": + resolution: + { + integrity: sha512-oEjE7TQt1fFTFSbf8kkNuc798ahTUzn3Le67/PWjE8MAfYAD/qB7O8hSTcromLFqHCt9bcdOg5GXMokzTjJ5SA==, + } + + "@types/resolve@1.20.6": + resolution: + { + integrity: sha512-A4STmOXPhMUtHH+S6ymgE2GiBSMqf4oTvcQZMcHzokuTLVYzXTB8ttjcgxOVaAp2lGwEdzZ0J+cRbbeevQj1UQ==, + } + + "@types/scheduler@0.23.0": + resolution: + { + integrity: sha512-YIoDCTH3Af6XM5VuwGG/QL/CJqga1Zm3NkU3HZ4ZHK2fRMPYP1VczsTUqtsf43PH/iJNVlPHAo2oWX7BSdB2Hw==, + } + + "@types/unist@2.0.11": + resolution: + { + integrity: sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==, + } + + "@types/unist@3.0.3": + resolution: + { + integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==, + } + + "@typescript-eslint/parser@6.21.0": + resolution: + { + integrity: sha512-tbsV1jPne5CkFQCgPBcDOt30ItF7aJoZL997JSF7MhGQqOeT3svWRYxiqlfA5RUdlHN6Fi+EI9bxqbdyAUZjYQ==, + } + engines: { node: ^16.0.0 || >=18.0.0 } + peerDependencies: + eslint: ^7.0.0 || ^8.0.0 + typescript: "*" + peerDependenciesMeta: + typescript: + optional: true + + "@typescript-eslint/scope-manager@6.21.0": + resolution: + { + integrity: sha512-OwLUIWZJry80O99zvqXVEioyniJMa+d2GrqpUTqi5/v5D5rOrppJVBPa0yKCblcigC0/aYAzxxqQ1B+DS2RYsg==, + } + engines: { node: ^16.0.0 || >=18.0.0 } + + "@typescript-eslint/types@6.21.0": + resolution: + { + integrity: sha512-1kFmZ1rOm5epu9NZEZm1kckCDGj5UJEf7P1kliH4LKu/RkwpsfqqGmY2OOcUs18lSlQBKLDYBOGxRVtrMN5lpg==, + } + engines: { node: ^16.0.0 || >=18.0.0 } + + "@typescript-eslint/typescript-estree@6.21.0": + resolution: + { + integrity: sha512-6npJTkZcO+y2/kr+z0hc4HwNfrrP4kNYh57ek7yCNlrBjWQ1Y0OS7jiZTkgumrvkX5HkEKXFZkkdFNkaW2wmUQ==, + } + engines: { node: ^16.0.0 || >=18.0.0 } + peerDependencies: + typescript: "*" + peerDependenciesMeta: + typescript: + optional: true + + "@typescript-eslint/visitor-keys@6.21.0": + resolution: + { + integrity: sha512-JJtkDduxLi9bivAB+cYOVMtbkqdPOhZ+ZI5LC47MIRrDV4Yn2o+ZnW10Nkmr28xRpSpdJ6Sm42Hjf2+REYXm0A==, + } + engines: { node: ^16.0.0 || >=18.0.0 } + + "@ungap/structured-clone@1.2.0": + resolution: + { + integrity: sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==, + } + + acorn-jsx@5.3.2: + resolution: + { + integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==, + } + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + + acorn@8.12.1: + resolution: + { + integrity: sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==, + } + engines: { node: ">=0.4.0" } + hasBin: true + + ajv@6.12.6: + resolution: + { + integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==, + } + + ansi-escapes@7.0.0: + resolution: + { + integrity: sha512-GdYO7a61mR0fOlAsvC9/rIHf7L96sBc6dEWzeOu+KAea5bZyQRPIpojrVoI4AXGJS/ycu/fBTdLrUkA4ODrvjw==, + } + engines: { node: ">=18" } + + ansi-regex@5.0.1: + resolution: + { + integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==, + } + engines: { node: ">=8" } + + ansi-regex@6.1.0: + resolution: + { + integrity: sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==, + } + engines: { node: ">=12" } + + ansi-styles@4.3.0: + resolution: + { + integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==, + } + engines: { node: ">=8" } + + ansi-styles@6.2.1: + resolution: + { + integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==, + } + engines: { node: ">=12" } + + any-promise@1.3.0: + resolution: + { + integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==, + } + + anymatch@3.1.3: + resolution: + { + integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==, + } + engines: { node: ">= 8" } + + arg@5.0.2: + resolution: + { + integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==, + } + + argparse@1.0.10: + resolution: + { + integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==, + } + + argparse@2.0.1: + resolution: + { + integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==, + } + + aria-query@5.1.3: + resolution: + { + integrity: sha512-R5iJ5lkuHybztUfuOAznmboyjWq8O6sqNqtK7CLOqdydi54VNbORp49mb14KbWgG1QD3JFO9hJdZ+y4KutfdOQ==, + } + + array-buffer-byte-length@1.0.1: + resolution: + { + integrity: sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==, + } + engines: { node: ">= 0.4" } + + array-includes@3.1.8: + resolution: + { + integrity: sha512-itaWrbYbqpGXkGhZPGUulwnhVf5Hpy1xiCFsGqyIGglbBxmG5vSjxQen3/WGOjPpNEv1RtBLKxbmVXm8HpJStQ==, + } + engines: { node: ">= 0.4" } + + array-timsort@1.0.3: + resolution: + { + integrity: sha512-/+3GRL7dDAGEfM6TseQk/U+mi18TU2Ms9I3UlLdUMhz2hbvGNTKdj9xniwXfUqgYhHxRx0+8UnKkvlNwVU+cWQ==, + } + + array-union@2.1.0: + resolution: + { + integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==, + } + engines: { node: ">=8" } + + array.prototype.findlast@1.2.5: + resolution: + { + integrity: sha512-CVvd6FHg1Z3POpBLxO6E6zr+rSKEQ9L6rZHAaY7lLfhKsWYUBBOuMs0e9o24oopj6H+geRCX0YJ+TJLBK2eHyQ==, + } + engines: { node: ">= 0.4" } + + array.prototype.findlastindex@1.2.5: + resolution: + { + integrity: sha512-zfETvRFA8o7EiNn++N5f/kaCw221hrpGsDmcpndVupkPzEc1Wuf3VgC0qby1BbHs7f5DVYjgtEU2LLh5bqeGfQ==, + } + engines: { node: ">= 0.4" } + + array.prototype.flat@1.3.2: + resolution: + { + integrity: sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA==, + } + engines: { node: ">= 0.4" } + + array.prototype.flatmap@1.3.2: + resolution: + { + integrity: sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ==, + } + engines: { node: ">= 0.4" } + + array.prototype.tosorted@1.1.4: + resolution: + { + integrity: sha512-p6Fx8B7b7ZhL/gmUsAy0D15WhvDccw3mnGNbZpi3pmeJdxtWsj2jEaI4Y6oo3XiHfzuSgPwKc04MYt6KgvC/wA==, + } + engines: { node: ">= 0.4" } + + arraybuffer.prototype.slice@1.0.3: + resolution: + { + integrity: sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==, + } + engines: { node: ">= 0.4" } + + ast-types-flow@0.0.8: + resolution: + { + integrity: sha512-OH/2E5Fg20h2aPrbe+QL8JZQFko0YZaF+j4mnQ7BGhfavO7OpSLa8a0y9sBwomHdSbkhTS8TQNayBfnW5DwbvQ==, + } + + astring@1.9.0: + resolution: + { + integrity: sha512-LElXdjswlqjWrPpJFg1Fx4wpkOCxj1TDHlSV4PlaRxHGWko024xICaa97ZkMfs6DRKlCguiAI+rbXv5GWwXIkg==, + } + hasBin: true + + autoprefixer@10.4.14: + resolution: + { + integrity: sha512-FQzyfOsTlwVzjHxKEqRIAdJx9niO6VCBCoEwax/VLSoQF29ggECcPuBqUMZ+u8jCZOPSy8b8/8KnuFbp0SaFZQ==, + } + engines: { node: ^10 || ^12 || >=14 } + hasBin: true + peerDependencies: + postcss: ^8.1.0 + + available-typed-arrays@1.0.7: + resolution: + { + integrity: sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==, + } + engines: { node: ">= 0.4" } + + axe-core@4.10.0: + resolution: + { + integrity: sha512-Mr2ZakwQ7XUAjp7pAwQWRhhK8mQQ6JAaNWSjmjxil0R8BPioMtQsTLOolGYkji1rcL++3dCqZA3zWqpT+9Ew6g==, + } + engines: { node: ">=4" } + + axobject-query@4.1.0: + resolution: + { + integrity: sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==, + } + engines: { node: ">= 0.4" } + + bail@2.0.2: + resolution: + { + integrity: sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==, + } + + balanced-match@1.0.2: + resolution: + { + integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==, + } + + binary-extensions@2.3.0: + resolution: + { + integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==, + } + engines: { node: ">=8" } + + brace-expansion@1.1.11: + resolution: + { + integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==, + } + + brace-expansion@2.0.1: + resolution: + { + integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==, + } + + braces@3.0.3: + resolution: + { + integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==, + } + engines: { node: ">=8" } + + browserslist@4.24.0: + resolution: + { + integrity: sha512-Rmb62sR1Zpjql25eSanFGEhAxcFwfA1K0GuQcLoaJBAcENegrQut3hYdhXFF1obQfiDyqIW/cLM5HSJ/9k884A==, + } + engines: { node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7 } + hasBin: true + + buffer-crc32@0.2.13: + resolution: + { + integrity: sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==, + } + + buffer-from@1.1.2: + resolution: + { + integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==, + } + + busboy@1.6.0: + resolution: + { + integrity: sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==, + } + engines: { node: ">=10.16.0" } + + call-bind@1.0.7: + resolution: + { + integrity: sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==, + } + engines: { node: ">= 0.4" } + + callsites@3.1.0: + resolution: + { + integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==, + } + engines: { node: ">=6" } + + camel-case@4.1.2: + resolution: + { + integrity: sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==, + } + + camelcase-css@2.0.1: + resolution: + { + integrity: sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==, + } + engines: { node: ">= 6" } + + caniuse-lite@1.0.30001664: + resolution: + { + integrity: sha512-AmE7k4dXiNKQipgn7a2xg558IRqPN3jMQY/rOsbxDhrd0tyChwbITBfiwtnqz8bi2M5mIWbxAYBvk7W7QBUS2g==, + } + + ccount@2.0.1: + resolution: + { + integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==, + } + + chalk@4.1.2: + resolution: + { + integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==, + } + engines: { node: ">=10" } + + chalk@5.3.0: + resolution: + { + integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==, + } + engines: { node: ^12.17.0 || ^14.13 || >=16.0.0 } + + character-entities-html4@2.1.0: + resolution: + { + integrity: sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==, + } + + character-entities-legacy@3.0.0: + resolution: + { + integrity: sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==, + } + + character-entities@2.0.2: + resolution: + { + integrity: sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==, + } + + character-reference-invalid@2.0.1: + resolution: + { + integrity: sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==, + } + + chokidar@3.6.0: + resolution: + { + integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==, + } + engines: { node: ">= 8.10.0" } + + chownr@2.0.0: + resolution: + { + integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==, + } + engines: { node: ">=10" } + + cli-cursor@5.0.0: + resolution: + { + integrity: sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==, + } + engines: { node: ">=18" } + + cli-truncate@4.0.0: + resolution: + { + integrity: sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA==, + } + engines: { node: ">=18" } + + client-only@0.0.1: + resolution: + { + integrity: sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==, + } + + clipanion@3.2.1: + resolution: + { + integrity: sha512-dYFdjLb7y1ajfxQopN05mylEpK9ZX0sO1/RfMXdfmwjlIsPkbh4p7A682x++zFPLDCo1x3p82dtljHf5cW2LKA==, + } + peerDependencies: + typanion: "*" + + cliui@8.0.1: + resolution: + { + integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==, + } + engines: { node: ">=12" } + + collapse-white-space@2.1.0: + resolution: + { + integrity: sha512-loKTxY1zCOuG4j9f6EPnuyyYkf58RnhhWTvRoZEokgB+WbdXehfjFviyOVYkqzEWz1Q5kRiZdBYS5SwxbQYwzw==, + } + + color-convert@2.0.1: + resolution: + { + integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==, + } + engines: { node: ">=7.0.0" } + + color-name@1.1.4: + resolution: + { + integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==, + } + + colorette@2.0.20: + resolution: + { + integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==, + } + + comma-separated-tokens@2.0.3: + resolution: + { + integrity: sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==, + } + + command-exists-promise@2.0.2: + resolution: + { + integrity: sha512-T6PB6vdFrwnHXg/I0kivM3DqaCGZLjjYSOe0a5WgFKcz1sOnmOeIjnhQPXVXX3QjVbLyTJ85lJkX6lUpukTzaA==, + } + engines: { node: ">=6" } + + commander@12.1.0: + resolution: + { + integrity: sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==, + } + engines: { node: ">=18" } + + commander@4.1.1: + resolution: + { + integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==, + } + engines: { node: ">= 6" } + + comment-json@4.2.5: + resolution: + { + integrity: sha512-bKw/r35jR3HGt5PEPm1ljsQQGyCrR8sFGNiN5L+ykDHdpO8Smxkrkla9Yi6NkQyUrb8V54PGhfMs6NrIwtxtdw==, + } + engines: { node: ">= 6" } + + concat-map@0.0.1: + resolution: { integrity: sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= } + + concurrently@8.2.2: + resolution: + { + integrity: sha512-1dP4gpXFhei8IOtlXRE/T/4H88ElHgTiUzh71YUmtjTEHMSRS2Z/fgOxHSxxusGHogsRfxNq1vyAwxSC+EVyDg==, + } + engines: { node: ^14.13.0 || >=16.0.0 } + hasBin: true + + contentlayer2@0.4.6: + resolution: + { + integrity: sha512-EhdabpVsn8u3EkoovGrLB/sIxWUlVJGNiYal9rZn0XJRjIyncGrhz9EJ9gn+z3cRHYUdHCuCMLW/ev6isgKXYw==, + } + engines: { node: ">=14.18" } + hasBin: true + + core-util-is@1.0.3: + resolution: + { + integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==, + } + + cross-spawn@7.0.3: + resolution: + { + integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==, + } + engines: { node: ">= 8" } + + cssesc@3.0.0: + resolution: + { + integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==, + } + engines: { node: ">=4" } + hasBin: true + + csstype@3.1.3: + resolution: + { + integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==, + } + + damerau-levenshtein@1.0.8: + resolution: + { + integrity: sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==, + } + + data-view-buffer@1.0.1: + resolution: + { + integrity: sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==, + } + engines: { node: ">= 0.4" } + + data-view-byte-length@1.0.1: + resolution: + { + integrity: sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==, + } + engines: { node: ">= 0.4" } + + data-view-byte-offset@1.0.0: + resolution: + { + integrity: sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==, + } + engines: { node: ">= 0.4" } + + date-fns@2.30.0: + resolution: + { + integrity: sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==, + } + engines: { node: ">=0.11" } + + debug@3.2.7: + resolution: + { + integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==, + } + peerDependencies: + supports-color: "*" + peerDependenciesMeta: + supports-color: + optional: true + + debug@4.3.7: + resolution: + { + integrity: sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==, + } + engines: { node: ">=6.0" } + peerDependencies: + supports-color: "*" + peerDependenciesMeta: + supports-color: + optional: true + + decode-named-character-reference@1.0.2: + resolution: + { + integrity: sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==, + } + + deep-equal@2.2.3: + resolution: + { + integrity: sha512-ZIwpnevOurS8bpT4192sqAowWM76JDKSHYzMLty3BZGSswgq6pBaH3DhCSW5xVAZICZyKdOBPjwww5wfgT/6PA==, + } + engines: { node: ">= 0.4" } + + deep-is@0.1.4: + resolution: + { + integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==, + } + + define-data-property@1.1.4: + resolution: + { + integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==, + } + engines: { node: ">= 0.4" } + + define-properties@1.2.1: + resolution: + { + integrity: sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==, + } + engines: { node: ">= 0.4" } + + dequal@2.0.3: + resolution: + { + integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==, + } + engines: { node: ">=6" } + + devlop@1.1.0: + resolution: + { + integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==, + } + + didyoumean@1.2.2: + resolution: + { + integrity: sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==, + } + + dir-glob@3.0.1: + resolution: + { + integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==, + } + engines: { node: ">=8" } + + dlv@1.1.3: + resolution: + { + integrity: sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==, + } + + doctrine@2.1.0: + resolution: + { + integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==, + } + engines: { node: ">=0.10.0" } + + doctrine@3.0.0: + resolution: + { + integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==, + } + engines: { node: ">=6.0.0" } + + eastasianwidth@0.2.0: + resolution: + { + integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==, + } + + electron-to-chromium@1.5.29: + resolution: + { + integrity: sha512-PF8n2AlIhCKXQ+gTpiJi0VhcHDb69kYX4MtCiivctc2QD3XuNZ/XIOlbGzt7WAjjEev0TtaH6Cu3arZExm5DOw==, + } + + emoji-regex@10.4.0: + resolution: + { + integrity: sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==, + } + + emoji-regex@8.0.0: + resolution: + { + integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==, + } + + emoji-regex@9.2.2: + resolution: + { + integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==, + } + + enhanced-resolve@5.17.1: + resolution: + { + integrity: sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg==, + } + engines: { node: ">=10.13.0" } + + environment@1.1.0: + resolution: + { + integrity: sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==, + } + engines: { node: ">=18" } + + es-abstract@1.23.3: + resolution: + { + integrity: sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==, + } + engines: { node: ">= 0.4" } + + es-define-property@1.0.0: + resolution: + { + integrity: sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==, + } + engines: { node: ">= 0.4" } + + es-errors@1.3.0: + resolution: + { + integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==, + } + engines: { node: ">= 0.4" } + + es-get-iterator@1.1.3: + resolution: + { + integrity: sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw==, + } + + es-iterator-helpers@1.0.19: + resolution: + { + integrity: sha512-zoMwbCcH5hwUkKJkT8kDIBZSz9I6mVG//+lDCinLCGov4+r7NIy0ld8o03M0cJxl2spVf6ESYVS6/gpIfq1FFw==, + } + engines: { node: ">= 0.4" } + + es-object-atoms@1.0.0: + resolution: + { + integrity: sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==, + } + engines: { node: ">= 0.4" } + + es-set-tostringtag@2.0.3: + resolution: + { + integrity: sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==, + } + engines: { node: ">= 0.4" } + + es-shim-unscopables@1.0.2: + resolution: + { + integrity: sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==, + } + + es-to-primitive@1.2.1: + resolution: + { + integrity: sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==, + } + engines: { node: ">= 0.4" } + + esbuild@0.20.2: + resolution: + { + integrity: sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==, + } + engines: { node: ">=12" } + hasBin: true + + escalade@3.2.0: + resolution: + { + integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==, + } + engines: { node: ">=6" } + + escape-string-regexp@4.0.0: + resolution: + { + integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==, + } + engines: { node: ">=10" } + + escape-string-regexp@5.0.0: + resolution: + { + integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==, + } + engines: { node: ">=12" } + + eslint-config-next@14.1.0: + resolution: + { + integrity: sha512-SBX2ed7DoRFXC6CQSLc/SbLY9Ut6HxNB2wPTcoIWjUMd7aF7O/SIE7111L8FdZ9TXsNV4pulUDnfthpyPtbFUg==, + } + peerDependencies: + eslint: ^7.23.0 || ^8.0.0 + typescript: ">=3.3.1" + peerDependenciesMeta: + typescript: + optional: true + + eslint-import-resolver-node@0.3.9: + resolution: + { + integrity: sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==, + } + + eslint-import-resolver-typescript@3.6.3: + resolution: + { + integrity: sha512-ud9aw4szY9cCT1EWWdGv1L1XR6hh2PaRWif0j2QjQ0pgTY/69iw+W0Z4qZv5wHahOl8isEr+k/JnyAqNQkLkIA==, + } + engines: { node: ^14.18.0 || >=16.0.0 } + peerDependencies: + eslint: "*" + eslint-plugin-import: "*" + eslint-plugin-import-x: "*" + peerDependenciesMeta: + eslint-plugin-import: + optional: true + eslint-plugin-import-x: + optional: true + + eslint-module-utils@2.12.0: + resolution: + { + integrity: sha512-wALZ0HFoytlyh/1+4wuZ9FJCD/leWHQzzrxJ8+rebyReSLk7LApMyd3WJaLVoN+D5+WIdJyDK1c6JnE65V4Zyg==, + } + engines: { node: ">=4" } + peerDependencies: + "@typescript-eslint/parser": "*" + eslint: "*" + eslint-import-resolver-node: "*" + eslint-import-resolver-typescript: "*" + eslint-import-resolver-webpack: "*" + peerDependenciesMeta: + "@typescript-eslint/parser": + optional: true + eslint: + optional: true + eslint-import-resolver-node: + optional: true + eslint-import-resolver-typescript: + optional: true + eslint-import-resolver-webpack: + optional: true + + eslint-plugin-import@2.30.0: + resolution: + { + integrity: sha512-/mHNE9jINJfiD2EKkg1BKyPyUk4zdnT54YgbOgfjSakWT5oyX/qQLVNTkehyfpcMxZXMy1zyonZ2v7hZTX43Yw==, + } + engines: { node: ">=4" } + peerDependencies: + "@typescript-eslint/parser": "*" + eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 + peerDependenciesMeta: + "@typescript-eslint/parser": + optional: true + + eslint-plugin-jsx-a11y@6.10.0: + resolution: + { + integrity: sha512-ySOHvXX8eSN6zz8Bywacm7CvGNhUtdjvqfQDVe6020TUK34Cywkw7m0KsCCk1Qtm9G1FayfTN1/7mMYnYO2Bhg==, + } + engines: { node: ">=4.0" } + peerDependencies: + eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9 + + eslint-plugin-react-hooks@4.6.2: + resolution: + { + integrity: sha512-QzliNJq4GinDBcD8gPB5v0wh6g8q3SUi6EFF0x8N/BL9PoVs0atuGc47ozMRyOWAKdwaZ5OnbOEa3WR+dSGKuQ==, + } + engines: { node: ">=10" } + peerDependencies: + eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 + + eslint-plugin-react@7.36.1: + resolution: + { + integrity: sha512-/qwbqNXZoq+VP30s1d4Nc1C5GTxjJQjk4Jzs4Wq2qzxFM7dSmuG2UkIjg2USMLh3A/aVcUNrK7v0J5U1XEGGwA==, + } + engines: { node: ">=4" } + peerDependencies: + eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7 + + eslint-scope@7.2.2: + resolution: + { + integrity: sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==, + } + engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } + + eslint-visitor-keys@3.4.3: + resolution: + { + integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==, + } + engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } + + eslint@8.45.0: + resolution: + { + integrity: sha512-pd8KSxiQpdYRfYa9Wufvdoct3ZPQQuVuU5O6scNgMuOMYuxvH0IGaYK0wUFjo4UYYQQCUndlXiMbnxopwvvTiw==, + } + engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } + hasBin: true + + espree@9.6.1: + resolution: + { + integrity: sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==, + } + engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } + + esprima@4.0.1: + resolution: + { + integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==, + } + engines: { node: ">=4" } + hasBin: true + + esquery@1.6.0: + resolution: + { + integrity: sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==, + } + engines: { node: ">=0.10" } + + esrecurse@4.3.0: + resolution: + { + integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==, + } + engines: { node: ">=4.0" } + + estraverse@5.3.0: + resolution: + { + integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==, + } + engines: { node: ">=4.0" } + + estree-util-attach-comments@3.0.0: + resolution: + { + integrity: sha512-cKUwm/HUcTDsYh/9FgnuFqpfquUbwIqwKM26BVCGDPVgvaCl/nDCCjUfiLlx6lsEZ3Z4RFxNbOQ60pkaEwFxGw==, + } + + estree-util-build-jsx@3.0.1: + resolution: + { + integrity: sha512-8U5eiL6BTrPxp/CHbs2yMgP8ftMhR5ww1eIKoWRMlqvltHF8fZn5LRDvTKuxD3DUn+shRbLGqXemcP51oFCsGQ==, + } + + estree-util-is-identifier-name@3.0.0: + resolution: + { + integrity: sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==, + } + + estree-util-to-js@2.0.0: + resolution: + { + integrity: sha512-WDF+xj5rRWmD5tj6bIqRi6CkLIXbbNQUcxQHzGysQzvHmdYG2G7p/Tf0J0gpxGgkeMZNTIjT/AoSvC9Xehcgdg==, + } + + estree-util-value-to-estree@3.1.2: + resolution: + { + integrity: sha512-S0gW2+XZkmsx00tU2uJ4L9hUT7IFabbml9pHh2WQqFmAbxit++YGZne0sKJbNwkj9Wvg9E4uqWl4nCIFQMmfag==, + } + + estree-util-visit@2.0.0: + resolution: + { + integrity: sha512-m5KgiH85xAhhW8Wta0vShLcUvOsh3LLPI2YVwcbio1l7E09NTLL1EyMZFM1OyWowoH0skScNbhOPl4kcBgzTww==, + } + + estree-walker@3.0.3: + resolution: + { + integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==, + } + + esutils@2.0.3: + resolution: + { + integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==, + } + engines: { node: ">=0.10.0" } + + eventemitter3@5.0.1: + resolution: + { + integrity: sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==, + } + + execa@8.0.1: + resolution: + { + integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==, + } + engines: { node: ">=16.17" } + + extend-shallow@2.0.1: + resolution: + { + integrity: sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==, + } + engines: { node: ">=0.10.0" } + + extend@3.0.2: + resolution: + { + integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==, + } + + fast-deep-equal@3.1.3: + resolution: + { + integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==, + } + + fast-glob@3.3.2: + resolution: + { + integrity: sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==, + } + engines: { node: ">=8.6.0" } + + fast-json-stable-stringify@2.1.0: + resolution: + { + integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==, + } + + fast-levenshtein@2.0.6: + resolution: + { + integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==, + } + + fastq@1.17.1: + resolution: + { + integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==, + } + + fault@2.0.1: + resolution: + { + integrity: sha512-WtySTkS4OKev5JtpHXnib4Gxiurzh5NCGvWrFaZ34m6JehfTUhKZvn9njTfw48t6JumVQOmrKqpmGcdwxnhqBQ==, + } + + file-entry-cache@6.0.1: + resolution: + { + integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==, + } + engines: { node: ^10.12.0 || >=12.0.0 } + + fill-range@7.1.1: + resolution: + { + integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==, + } + engines: { node: ">=8" } + + find-up@5.0.0: + resolution: + { + integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==, + } + engines: { node: ">=10" } + + flat-cache@3.2.0: + resolution: + { + integrity: sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==, + } + engines: { node: ^10.12.0 || >=12.0.0 } + + flatted@3.3.1: + resolution: + { + integrity: sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==, + } + + for-each@0.3.3: + resolution: + { + integrity: sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==, + } + + foreground-child@3.3.0: + resolution: + { + integrity: sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==, + } + engines: { node: ">=14" } + + format@0.2.2: + resolution: + { + integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==, + } + engines: { node: ">=0.4.x" } + + fraction.js@4.3.7: + resolution: + { + integrity: sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==, + } + + fs-minipass@2.1.0: + resolution: + { + integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==, + } + engines: { node: ">= 8" } + + fs.realpath@1.0.0: + resolution: + { + integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==, + } + + fsevents@2.3.3: + resolution: + { + integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==, + } + engines: { node: ^8.16.0 || ^10.6.0 || >=11.0.0 } + os: [darwin] + + function-bind@1.1.2: + resolution: + { + integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==, + } + + function.prototype.name@1.1.6: + resolution: + { + integrity: sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==, + } + engines: { node: ">= 0.4" } + + functions-have-names@1.2.3: + resolution: + { + integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==, + } + + get-caller-file@2.0.5: + resolution: + { + integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==, + } + engines: { node: 6.* || 8.* || >= 10.* } + + get-east-asian-width@1.2.0: + resolution: + { + integrity: sha512-2nk+7SIVb14QrgXFHcm84tD4bKQz0RxPuMT8Ag5KPOq7J5fEmAg0UbXdTOSHqNuHSU28k55qnceesxXRZGzKWA==, + } + engines: { node: ">=18" } + + get-intrinsic@1.2.4: + resolution: + { + integrity: sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==, + } + engines: { node: ">= 0.4" } + + get-stream@8.0.1: + resolution: + { + integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==, + } + engines: { node: ">=16" } + + get-symbol-description@1.0.2: + resolution: + { + integrity: sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==, + } + engines: { node: ">= 0.4" } + + get-tsconfig@4.8.1: + resolution: + { + integrity: sha512-k9PN+cFBmaLWtVz29SkUoqU5O0slLuHJXt/2P+tMVFT+phsSGXGkp9t3rQIqdz0e+06EHNGs3oM6ZX1s2zHxRg==, + } + + glob-parent@5.1.2: + resolution: + { + integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==, + } + engines: { node: ">= 6" } + + glob-parent@6.0.2: + resolution: + { + integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==, + } + engines: { node: ">=10.13.0" } + + glob@10.3.10: + resolution: + { + integrity: sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g==, + } + engines: { node: ">=16 || 14 >=14.17" } + hasBin: true + + glob@10.4.5: + resolution: + { + integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==, + } + hasBin: true + + glob@7.2.3: + resolution: + { + integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==, + } + deprecated: Glob versions prior to v9 are no longer supported + + globals@13.24.0: + resolution: + { + integrity: sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==, + } + engines: { node: ">=8" } + + globalthis@1.0.4: + resolution: + { + integrity: sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==, + } + engines: { node: ">= 0.4" } + + globby@11.1.0: + resolution: + { + integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==, + } + engines: { node: ">=10" } + + gopd@1.0.1: + resolution: + { + integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==, + } + + graceful-fs@4.2.11: + resolution: + { + integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==, + } + + graphemer@1.4.0: + resolution: + { + integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==, + } + + gray-matter@4.0.3: + resolution: + { + integrity: sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==, + } + engines: { node: ">=6.0" } + + has-bigints@1.0.2: + resolution: + { + integrity: sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==, + } + + has-flag@4.0.0: + resolution: + { + integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==, + } + engines: { node: ">=8" } + + has-own-prop@2.0.0: + resolution: + { + integrity: sha512-Pq0h+hvsVm6dDEa8x82GnLSYHOzNDt7f0ddFa3FqcQlgzEiptPqL+XrOJNavjOzSYiYWIrgeVYYgGlLmnxwilQ==, + } + engines: { node: ">=8" } + + has-property-descriptors@1.0.2: + resolution: + { + integrity: sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==, + } + + has-proto@1.0.3: + resolution: + { + integrity: sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==, + } + engines: { node: ">= 0.4" } + + has-symbols@1.0.3: + resolution: + { + integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==, + } + engines: { node: ">= 0.4" } + + has-tostringtag@1.0.2: + resolution: + { + integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==, + } + engines: { node: ">= 0.4" } + + hash-wasm@4.11.0: + resolution: + { + integrity: sha512-HVusNXlVqHe0fzIzdQOGolnFN6mX/fqcrSAOcTBXdvzrXVHwTz11vXeKRmkR5gTuwVpvHZEIyKoePDvuAR+XwQ==, + } + + hasown@2.0.2: + resolution: + { + integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==, + } + engines: { node: ">= 0.4" } + + hast-util-to-estree@3.1.0: + resolution: + { + integrity: sha512-lfX5g6hqVh9kjS/B9E2gSkvHH4SZNiQFiqWS0x9fENzEl+8W12RqdRxX6d/Cwxi30tPQs3bIO+aolQJNp1bIyw==, + } + + hast-util-to-html@9.0.3: + resolution: + { + integrity: sha512-M17uBDzMJ9RPCqLMO92gNNUDuBSq10a25SDBI08iCCxmorf4Yy6sYHK57n9WAbRAAaU+DuR4W6GN9K4DFZesYg==, + } + + hast-util-to-jsx-runtime@2.3.0: + resolution: + { + integrity: sha512-H/y0+IWPdsLLS738P8tDnrQ8Z+dj12zQQ6WC11TIM21C8WFVoIxcqWXf2H3hiTVZjF1AWqoimGwrTWecWrnmRQ==, + } + + hast-util-whitespace@3.0.0: + resolution: + { + integrity: sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==, + } + + html-void-elements@3.0.0: + resolution: + { + integrity: sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==, + } + + human-signals@5.0.0: + resolution: + { + integrity: sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==, + } + engines: { node: ">=16.17.0" } + + husky@9.1.6: + resolution: + { + integrity: sha512-sqbjZKK7kf44hfdE94EoX8MZNk0n7HeW37O4YrVGCF4wzgQjp+akPAkfUK5LZ6KuR/6sqeAVuXHji+RzQgOn5A==, + } + engines: { node: ">=18" } + hasBin: true + + hyperdyperid@1.2.0: + resolution: + { + integrity: sha512-Y93lCzHYgGWdrJ66yIktxiaGULYc6oGiABxhcO5AufBeOyoIdZF7bIfLaOrbM0iGIOXQQgxxRrFEnb+Y6w1n4A==, + } + engines: { node: ">=10.18" } + + ignore@5.3.2: + resolution: + { + integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==, + } + engines: { node: ">= 4" } + + imagescript@1.3.0: + resolution: + { + integrity: sha512-lCYzQrWzdnA68K03oMj/BUlBJrVBnslzDOgGFymAp49NmdGEJxGeN7sHh5mCva0nQkq+kkKSuru2zLf1m04+3A==, + } + engines: { node: ">=14.0.0" } + + import-fresh@3.3.0: + resolution: + { + integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==, + } + engines: { node: ">=6" } + + imurmurhash@0.1.4: + resolution: + { + integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==, + } + engines: { node: ">=0.8.19" } + + inflection@3.0.0: + resolution: + { + integrity: sha512-1zEJU1l19SgJlmwqsEyFTbScw/tkMHFenUo//Y0i+XEP83gDFdMvPizAD/WGcE+l1ku12PcTVHQhO6g5E0UCMw==, + } + engines: { node: ">=18.0.0" } + + inflight@1.0.6: + resolution: + { + integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==, + } + deprecated: + This module is not supported, and leaks memory. Do not use it. Check out + lru-cache if you want a good and tested way to coalesce async requests by + a key value, which is much more comprehensive and powerful. + + inherits@2.0.4: + resolution: + { + integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==, + } + + inline-style-parser@0.1.1: + resolution: + { + integrity: sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==, + } + + inline-style-parser@0.2.4: + resolution: + { + integrity: sha512-0aO8FkhNZlj/ZIbNi7Lxxr12obT7cL1moPfE4tg1LkX7LlLfC6DeX4l2ZEud1ukP9jNQyNnfzQVqwbwmAATY4Q==, + } + + internal-slot@1.0.7: + resolution: + { + integrity: sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==, + } + engines: { node: ">= 0.4" } + + interpret@1.4.0: + resolution: + { + integrity: sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==, + } + engines: { node: ">= 0.10" } + + is-alphabetical@2.0.1: + resolution: + { + integrity: sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==, + } + + is-alphanumerical@2.0.1: + resolution: + { + integrity: sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==, + } + + is-arguments@1.1.1: + resolution: + { + integrity: sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==, + } + engines: { node: ">= 0.4" } + + is-array-buffer@3.0.4: + resolution: + { + integrity: sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==, + } + engines: { node: ">= 0.4" } + + is-async-function@2.0.0: + resolution: + { + integrity: sha512-Y1JXKrfykRJGdlDwdKlLpLyMIiWqWvuSd17TvZk68PLAOGOoF4Xyav1z0Xhoi+gCYjZVeC5SI+hYFOfvXmGRCA==, + } + engines: { node: ">= 0.4" } + + is-bigint@1.0.4: + resolution: + { + integrity: sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==, + } + + is-binary-path@2.1.0: + resolution: + { + integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==, + } + engines: { node: ">=8" } + + is-boolean-object@1.1.2: + resolution: + { + integrity: sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==, + } + engines: { node: ">= 0.4" } + + is-bun-module@1.2.1: + resolution: + { + integrity: sha512-AmidtEM6D6NmUiLOvvU7+IePxjEjOzra2h0pSrsfSAcXwl/83zLLXDByafUJy9k/rKK0pvXMLdwKwGHlX2Ke6Q==, + } + + is-callable@1.2.7: + resolution: + { + integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==, + } + engines: { node: ">= 0.4" } + + is-core-module@2.15.1: + resolution: + { + integrity: sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==, + } + engines: { node: ">= 0.4" } + + is-data-view@1.0.1: + resolution: + { + integrity: sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==, + } + engines: { node: ">= 0.4" } + + is-date-object@1.0.5: + resolution: + { + integrity: sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==, + } + engines: { node: ">= 0.4" } + + is-decimal@2.0.1: + resolution: + { + integrity: sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==, + } + + is-extendable@0.1.1: + resolution: + { + integrity: sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==, + } + engines: { node: ">=0.10.0" } + + is-extglob@2.1.1: + resolution: + { + integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==, + } + engines: { node: ">=0.10.0" } + + is-finalizationregistry@1.0.2: + resolution: + { + integrity: sha512-0by5vtUJs8iFQb5TYUHHPudOR+qXYIMKtiUzvLIZITZUjknFmziyBJuLhVRc+Ds0dREFlskDNJKYIdIzu/9pfw==, + } + + is-fullwidth-code-point@3.0.0: + resolution: + { + integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==, + } + engines: { node: ">=8" } + + is-fullwidth-code-point@4.0.0: + resolution: + { + integrity: sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==, + } + engines: { node: ">=12" } + + is-fullwidth-code-point@5.0.0: + resolution: + { + integrity: sha512-OVa3u9kkBbw7b8Xw5F9P+D/T9X+Z4+JruYVNapTjPYZYUznQ5YfWeFkOj606XYYW8yugTfC8Pj0hYqvi4ryAhA==, + } + engines: { node: ">=18" } + + is-generator-function@1.0.10: + resolution: + { + integrity: sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==, + } + engines: { node: ">= 0.4" } + + is-glob@4.0.3: + resolution: + { + integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==, + } + engines: { node: ">=0.10.0" } + + is-hexadecimal@2.0.1: + resolution: + { + integrity: sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==, + } + + is-map@2.0.3: + resolution: + { + integrity: sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==, + } + engines: { node: ">= 0.4" } + + is-negative-zero@2.0.3: + resolution: + { + integrity: sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==, + } + engines: { node: ">= 0.4" } + + is-number-object@1.0.7: + resolution: + { + integrity: sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==, + } + engines: { node: ">= 0.4" } + + is-number@7.0.0: + resolution: + { + integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==, + } + engines: { node: ">=0.12.0" } + + is-path-inside@3.0.3: + resolution: + { + integrity: sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==, + } + engines: { node: ">=8" } + + is-plain-obj@4.1.0: + resolution: + { + integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==, + } + engines: { node: ">=12" } + + is-reference@3.0.2: + resolution: + { + integrity: sha512-v3rht/LgVcsdZa3O2Nqs+NMowLOxeOm7Ay9+/ARQ2F+qEoANRcqrjAZKGN0v8ymUetZGgkp26LTnGT7H0Qo9Pg==, + } + + is-regex@1.1.4: + resolution: + { + integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==, + } + engines: { node: ">= 0.4" } + + is-set@2.0.3: + resolution: + { + integrity: sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==, + } + engines: { node: ">= 0.4" } + + is-shared-array-buffer@1.0.3: + resolution: + { + integrity: sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==, + } + engines: { node: ">= 0.4" } + + is-stream@3.0.0: + resolution: + { + integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==, + } + engines: { node: ^12.20.0 || ^14.13.1 || >=16.0.0 } + + is-string@1.0.7: + resolution: + { + integrity: sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==, + } + engines: { node: ">= 0.4" } + + is-symbol@1.0.4: + resolution: + { + integrity: sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==, + } + engines: { node: ">= 0.4" } + + is-typed-array@1.1.13: + resolution: + { + integrity: sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==, + } + engines: { node: ">= 0.4" } + + is-weakmap@2.0.2: + resolution: + { + integrity: sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==, + } + engines: { node: ">= 0.4" } + + is-weakref@1.0.2: + resolution: + { + integrity: sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==, + } + + is-weakset@2.0.3: + resolution: + { + integrity: sha512-LvIm3/KWzS9oRFHugab7d+M/GcBXuXX5xZkzPmN+NxihdQlZUQ4dWuSV1xR/sq6upL1TJEDrfBgRepHFdBtSNQ==, + } + engines: { node: ">= 0.4" } + + isarray@2.0.5: + resolution: + { + integrity: sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==, + } + + isexe@2.0.0: + resolution: + { + integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==, + } + + iterator.prototype@1.1.2: + resolution: + { + integrity: sha512-DR33HMMr8EzwuRL8Y9D3u2BMj8+RqSE850jfGu59kS7tbmPLzGkZmVSfyCFSDxuZiEY6Rzt3T2NA/qU+NwVj1w==, + } + + jackspeak@2.3.6: + resolution: + { + integrity: sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==, + } + engines: { node: ">=14" } + + jackspeak@3.4.3: + resolution: + { + integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==, + } + + jiti@1.21.6: + resolution: + { + integrity: sha512-2yTgeWTWzMWkHu6Jp9NKgePDaYHbntiwvYuuJLbbN9vl7DC9DvXKOB2BC3ZZ92D3cvV/aflH0osDfwpHepQ53w==, + } + hasBin: true + + js-tokens@4.0.0: + resolution: + { + integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==, + } + + js-yaml@3.14.1: + resolution: + { + integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==, + } + hasBin: true + + js-yaml@4.1.0: + resolution: + { + integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==, + } + hasBin: true + + jsbi@4.3.0: + resolution: + { + integrity: sha512-SnZNcinB4RIcnEyZqFPdGPVgrg2AcnykiBy0sHVJQKHYeaLUvi3Exj+iaPpLnFVkDPZIV4U0yvgC9/R4uEAZ9g==, + } + + json-buffer@3.0.1: + resolution: + { + integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==, + } + + json-schema-traverse@0.4.1: + resolution: + { + integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==, + } + + json-stable-stringify-without-jsonify@1.0.1: + resolution: + { + integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==, + } + + json5@1.0.2: + resolution: + { + integrity: sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==, + } + hasBin: true + + jsx-ast-utils@3.3.5: + resolution: + { + integrity: sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==, + } + engines: { node: ">=4.0" } + + keyv@4.5.4: + resolution: + { + integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==, + } + + kind-of@6.0.3: + resolution: + { + integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==, + } + engines: { node: ">=0.10.0" } + + language-subtag-registry@0.3.23: + resolution: + { + integrity: sha512-0K65Lea881pHotoGEa5gDlMxt3pctLi2RplBb7Ezh4rRdLEOtgi7n4EwK9lamnUCkKBqaeKRVebTq6BAxSkpXQ==, + } + + language-tags@1.0.9: + resolution: + { + integrity: sha512-MbjN408fEndfiQXbFQ1vnd+1NoLDsnQW41410oQBXiyXDMYH5z505juWa4KUE1LqxRC7DgOgZDbKLxHIwm27hA==, + } + engines: { node: ">=0.10" } + + levn@0.4.1: + resolution: + { + integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==, + } + engines: { node: ">= 0.8.0" } + + lilconfig@2.1.0: + resolution: + { + integrity: sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==, + } + engines: { node: ">=10" } + + lilconfig@3.1.2: + resolution: + { + integrity: sha512-eop+wDAvpItUys0FWkHIKeC9ybYrTGbU41U5K7+bttZZeohvnY7M9dZ5kB21GNWiFT2q1OoPTvncPCgSOVO5ow==, + } + engines: { node: ">=14" } + + lines-and-columns@1.2.4: + resolution: + { + integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==, + } + + lint-staged@15.2.10: + resolution: + { + integrity: sha512-5dY5t743e1byO19P9I4b3x8HJwalIznL5E1FWYnU6OWw33KxNBSLAc6Cy7F2PsFEO8FKnLwjwm5hx7aMF0jzZg==, + } + engines: { node: ">=18.12.0" } + hasBin: true + + listr2@8.2.4: + resolution: + { + integrity: sha512-opevsywziHd3zHCVQGAj8zu+Z3yHNkkoYhWIGnq54RrCVwLz0MozotJEDnKsIBLvkfLGN6BLOyAeRrYI0pKA4g==, + } + engines: { node: ">=18.0.0" } + + locate-path@6.0.0: + resolution: + { + integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==, + } + engines: { node: ">=10" } + + lodash.camelcase@4.3.0: + resolution: + { + integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==, + } + + lodash.merge@4.6.2: + resolution: + { + integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==, + } + + lodash@4.17.21: + resolution: + { + integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==, + } + + log-update@6.1.0: + resolution: + { + integrity: sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==, + } + engines: { node: ">=18" } + + long@5.2.3: + resolution: + { + integrity: sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==, + } + + longest-streak@3.1.0: + resolution: + { + integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==, + } + + loose-envify@1.4.0: + resolution: + { + integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==, + } + hasBin: true + + lower-case@2.0.2: + resolution: + { + integrity: sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==, + } + + lru-cache@10.4.3: + resolution: + { + integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==, + } + + markdown-extensions@2.0.0: + resolution: + { + integrity: sha512-o5vL7aDWatOTX8LzaS1WMoaoxIiLRQJuIKKe2wAw6IeULDHaqbiqiggmx+pKvZDb1Sj+pE46Sn1T7lCqfFtg1Q==, + } + engines: { node: ">=16" } + + mdast-util-from-markdown@2.0.1: + resolution: + { + integrity: sha512-aJEUyzZ6TzlsX2s5B4Of7lN7EQtAxvtradMMglCQDyaTFgse6CmtmdJ15ElnVRlCg1vpNyVtbem0PWzlNieZsA==, + } + + mdast-util-frontmatter@2.0.1: + resolution: + { + integrity: sha512-LRqI9+wdgC25P0URIJY9vwocIzCcksduHQ9OF2joxQoyTNVduwLAFUzjoopuRJbJAReaKrNQKAZKL3uCMugWJA==, + } + + mdast-util-mdx-expression@2.0.1: + resolution: + { + integrity: sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ==, + } + + mdast-util-mdx-jsx@3.1.3: + resolution: + { + integrity: sha512-bfOjvNt+1AcbPLTFMFWY149nJz0OjmewJs3LQQ5pIyVGxP4CdOqNVJL6kTaM5c68p8q82Xv3nCyFfUnuEcH3UQ==, + } + + mdast-util-mdx@3.0.0: + resolution: + { + integrity: sha512-JfbYLAW7XnYTTbUsmpu0kdBUVe+yKVJZBItEjwyYJiDJuZ9w4eeaqks4HQO+R7objWgS2ymV60GYpI14Ug554w==, + } + + mdast-util-mdxjs-esm@2.0.1: + resolution: + { + integrity: sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==, + } + + mdast-util-phrasing@4.1.0: + resolution: + { + integrity: sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==, + } + + mdast-util-to-hast@13.2.0: + resolution: + { + integrity: sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==, + } + + mdast-util-to-markdown@2.1.0: + resolution: + { + integrity: sha512-SR2VnIEdVNCJbP6y7kVTJgPLifdr8WEU440fQec7qHoHOUz/oJ2jmNRqdDQ3rbiStOXb2mCDGTuwsK5OPUgYlQ==, + } + + mdast-util-to-string@4.0.0: + resolution: + { + integrity: sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==, + } + + mdast@3.0.0: + resolution: + { + integrity: sha512-xySmf8g4fPKMeC07jXGz971EkLbWAJ83s4US2Tj9lEdnZ142UP5grN73H1Xd3HzrdbU5o9GYYP/y8F9ZSwLE9g==, + } + deprecated: "`mdast` was renamed to `remark`" + + mdx-bundler@10.0.3: + resolution: + { + integrity: sha512-vRtVZ5t+nUP0QtoRVgjDFO10YDjRgKe/19ie0IR8FqE8SugNn5RP4sCWBPzKoEwoGbqfQOrgHy+PHCVyfaCDQQ==, + } + engines: { node: ">=18", npm: ">=6" } + peerDependencies: + esbuild: 0.* + + memfs@4.12.0: + resolution: + { + integrity: sha512-74wDsex5tQDSClVkeK1vtxqYCAgCoXxx+K4NSHzgU/muYVYByFqa+0RnrPO9NM6naWm1+G9JmZ0p6QHhXmeYfA==, + } + engines: { node: ">= 4.0.0" } + + merge-stream@2.0.0: + resolution: + { + integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==, + } + + merge2@1.4.1: + resolution: + { + integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==, + } + engines: { node: ">= 8" } + + micromark-core-commonmark@2.0.1: + resolution: + { + integrity: sha512-CUQyKr1e///ZODyD1U3xit6zXwy1a8q2a1S1HKtIlmgvurrEpaw/Y9y6KSIbF8P59cn/NjzHyO+Q2fAyYLQrAA==, + } + + micromark-extension-frontmatter@2.0.0: + resolution: + { + integrity: sha512-C4AkuM3dA58cgZha7zVnuVxBhDsbttIMiytjgsM2XbHAB2faRVaHRle40558FBN+DJcrLNCoqG5mlrpdU4cRtg==, + } + + micromark-extension-mdx-expression@3.0.0: + resolution: + { + integrity: sha512-sI0nwhUDz97xyzqJAbHQhp5TfaxEvZZZ2JDqUo+7NvyIYG6BZ5CPPqj2ogUoPJlmXHBnyZUzISg9+oUmU6tUjQ==, + } + + micromark-extension-mdx-jsx@3.0.1: + resolution: + { + integrity: sha512-vNuFb9czP8QCtAQcEJn0UJQJZA8Dk6DXKBqx+bg/w0WGuSxDxNr7hErW89tHUY31dUW4NqEOWwmEUNhjTFmHkg==, + } + + micromark-extension-mdx-md@2.0.0: + resolution: + { + integrity: sha512-EpAiszsB3blw4Rpba7xTOUptcFeBFi+6PY8VnJ2hhimH+vCQDirWgsMpz7w1XcZE7LVrSAUGb9VJpG9ghlYvYQ==, + } + + micromark-extension-mdxjs-esm@3.0.0: + resolution: + { + integrity: sha512-DJFl4ZqkErRpq/dAPyeWp15tGrcrrJho1hKK5uBS70BCtfrIFg81sqcTVu3Ta+KD1Tk5vAtBNElWxtAa+m8K9A==, + } + + micromark-extension-mdxjs@3.0.0: + resolution: + { + integrity: sha512-A873fJfhnJ2siZyUrJ31l34Uqwy4xIFmvPY1oj+Ean5PHcPBYzEsvqvWGaWcfEIr11O5Dlw3p2y0tZWpKHDejQ==, + } + + micromark-factory-destination@2.0.0: + resolution: + { + integrity: sha512-j9DGrQLm/Uhl2tCzcbLhy5kXsgkHUrjJHg4fFAeoMRwJmJerT9aw4FEhIbZStWN8A3qMwOp1uzHr4UL8AInxtA==, + } + + micromark-factory-label@2.0.0: + resolution: + { + integrity: sha512-RR3i96ohZGde//4WSe/dJsxOX6vxIg9TimLAS3i4EhBAFx8Sm5SmqVfR8E87DPSR31nEAjZfbt91OMZWcNgdZw==, + } + + micromark-factory-mdx-expression@2.0.2: + resolution: + { + integrity: sha512-5E5I2pFzJyg2CtemqAbcyCktpHXuJbABnsb32wX2U8IQKhhVFBqkcZR5LRm1WVoFqa4kTueZK4abep7wdo9nrw==, + } + + micromark-factory-space@2.0.0: + resolution: + { + integrity: sha512-TKr+LIDX2pkBJXFLzpyPyljzYK3MtmllMUMODTQJIUfDGncESaqB90db9IAUcz4AZAJFdd8U9zOp9ty1458rxg==, + } + + micromark-factory-title@2.0.0: + resolution: + { + integrity: sha512-jY8CSxmpWLOxS+t8W+FG3Xigc0RDQA9bKMY/EwILvsesiRniiVMejYTE4wumNc2f4UbAa4WsHqe3J1QS1sli+A==, + } + + micromark-factory-whitespace@2.0.0: + resolution: + { + integrity: sha512-28kbwaBjc5yAI1XadbdPYHX/eDnqaUFVikLwrO7FDnKG7lpgxnvk/XGRhX/PN0mOZ+dBSZ+LgunHS+6tYQAzhA==, + } + + micromark-util-character@2.1.0: + resolution: + { + integrity: sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==, + } + + micromark-util-chunked@2.0.0: + resolution: + { + integrity: sha512-anK8SWmNphkXdaKgz5hJvGa7l00qmcaUQoMYsBwDlSKFKjc6gjGXPDw3FNL3Nbwq5L8gE+RCbGqTw49FK5Qyvg==, + } + + micromark-util-classify-character@2.0.0: + resolution: + { + integrity: sha512-S0ze2R9GH+fu41FA7pbSqNWObo/kzwf8rN/+IGlW/4tC6oACOs8B++bh+i9bVyNnwCcuksbFwsBme5OCKXCwIw==, + } + + micromark-util-combine-extensions@2.0.0: + resolution: + { + integrity: sha512-vZZio48k7ON0fVS3CUgFatWHoKbbLTK/rT7pzpJ4Bjp5JjkZeasRfrS9wsBdDJK2cJLHMckXZdzPSSr1B8a4oQ==, + } + + micromark-util-decode-numeric-character-reference@2.0.1: + resolution: + { + integrity: sha512-bmkNc7z8Wn6kgjZmVHOX3SowGmVdhYS7yBpMnuMnPzDq/6xwVA604DuOXMZTO1lvq01g+Adfa0pE2UKGlxL1XQ==, + } + + micromark-util-decode-string@2.0.0: + resolution: + { + integrity: sha512-r4Sc6leeUTn3P6gk20aFMj2ntPwn6qpDZqWvYmAG6NgvFTIlj4WtrAudLi65qYoaGdXYViXYw2pkmn7QnIFasA==, + } + + micromark-util-encode@2.0.0: + resolution: + { + integrity: sha512-pS+ROfCXAGLWCOc8egcBvT0kf27GoWMqtdarNfDcjb6YLuV5cM3ioG45Ys2qOVqeqSbjaKg72vU+Wby3eddPsA==, + } + + micromark-util-events-to-acorn@2.0.2: + resolution: + { + integrity: sha512-Fk+xmBrOv9QZnEDguL9OI9/NQQp6Hz4FuQ4YmCb/5V7+9eAh1s6AYSvL20kHkD67YIg7EpE54TiSlcsf3vyZgA==, + } + + micromark-util-html-tag-name@2.0.0: + resolution: + { + integrity: sha512-xNn4Pqkj2puRhKdKTm8t1YHC/BAjx6CEwRFXntTaRf/x16aqka6ouVoutm+QdkISTlT7e2zU7U4ZdlDLJd2Mcw==, + } + + micromark-util-normalize-identifier@2.0.0: + resolution: + { + integrity: sha512-2xhYT0sfo85FMrUPtHcPo2rrp1lwbDEEzpx7jiH2xXJLqBuy4H0GgXk5ToU8IEwoROtXuL8ND0ttVa4rNqYK3w==, + } + + micromark-util-resolve-all@2.0.0: + resolution: + { + integrity: sha512-6KU6qO7DZ7GJkaCgwBNtplXCvGkJToU86ybBAUdavvgsCiG8lSSvYxr9MhwmQ+udpzywHsl4RpGJsYWG1pDOcA==, + } + + micromark-util-sanitize-uri@2.0.0: + resolution: + { + integrity: sha512-WhYv5UEcZrbAtlsnPuChHUAsu/iBPOVaEVsntLBIdpibO0ddy8OzavZz3iL2xVvBZOpolujSliP65Kq0/7KIYw==, + } + + micromark-util-subtokenize@2.0.1: + resolution: + { + integrity: sha512-jZNtiFl/1aY73yS3UGQkutD0UbhTt68qnRpw2Pifmz5wV9h8gOVsN70v+Lq/f1rKaU/W8pxRe8y8Q9FX1AOe1Q==, + } + + micromark-util-symbol@2.0.0: + resolution: + { + integrity: sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw==, + } + + micromark-util-types@2.0.0: + resolution: + { + integrity: sha512-oNh6S2WMHWRZrmutsRmDDfkzKtxF+bc2VxLC9dvtrDIRFln627VsFP6fLMgTryGDljgLPjkrzQSDcPrjPyDJ5w==, + } + + micromark@4.0.0: + resolution: + { + integrity: sha512-o/sd0nMof8kYff+TqcDx3VSrgBTcZpSvYcAHIfHhv5VAuNmisCxjhx6YmxS8PFEpb9z5WKWKPdzf0jM23ro3RQ==, + } + + micromatch@4.0.8: + resolution: + { + integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==, + } + engines: { node: ">=8.6" } + + mimic-fn@4.0.0: + resolution: + { + integrity: sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==, + } + engines: { node: ">=12" } + + mimic-function@5.0.1: + resolution: + { + integrity: sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==, + } + engines: { node: ">=18" } + + min-indent@1.0.1: + resolution: + { + integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==, + } + engines: { node: ">=4" } + + minimatch@3.1.2: + resolution: + { + integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==, + } + + minimatch@9.0.3: + resolution: + { + integrity: sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==, + } + engines: { node: ">=16 || 14 >=14.17" } + + minimatch@9.0.5: + resolution: + { + integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==, + } + engines: { node: ">=16 || 14 >=14.17" } + + minimist@1.2.8: + resolution: + { + integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==, + } + + minipass@3.3.6: + resolution: + { + integrity: sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==, + } + engines: { node: ">=8" } + + minipass@5.0.0: + resolution: + { + integrity: sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==, + } + engines: { node: ">=8" } + + minipass@7.1.2: + resolution: + { + integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==, + } + engines: { node: ">=16 || 14 >=14.17" } + + minizlib@2.1.2: + resolution: + { + integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==, + } + engines: { node: ">= 8" } + + mkdirp@1.0.4: + resolution: + { + integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==, + } + engines: { node: ">=10" } + hasBin: true + + ms@2.1.3: + resolution: + { + integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==, + } + + mz@2.7.0: + resolution: + { + integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==, + } + + nanoid@3.3.7: + resolution: + { + integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==, + } + engines: { node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1 } + hasBin: true + + natural-compare@1.4.0: + resolution: + { + integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==, + } + + next-contentlayer2@0.4.6: + resolution: + { + integrity: sha512-r1g/zCl3dXFfMlMI87n2+V90Jz7inMrfyRzmMSQdDfCBy3uexnsoM43wPbklza2teyd/pdfUT0I0gw/IE+YA4Q==, + } + peerDependencies: + contentlayer2: 0.4.6 + next: ^12 || ^13 || ^14 + react: "*" + react-dom: "*" + + next@14.1.0: + resolution: + { + integrity: sha512-wlzrsbfeSU48YQBjZhDzOwhWhGsy+uQycR8bHAOt1LY1bn3zZEcDyHQOEoN3aWzQ8LHCAJ1nqrWCc9XF2+O45Q==, + } + engines: { node: ">=18.17.0" } + hasBin: true + peerDependencies: + "@opentelemetry/api": ^1.1.0 + react: ^18.2.0 + react-dom: ^18.2.0 + sass: ^1.3.0 + peerDependenciesMeta: + "@opentelemetry/api": + optional: true + sass: + optional: true + + no-case@3.0.4: + resolution: + { + integrity: sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==, + } + + node-fetch@2.7.0: + resolution: + { + integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==, + } + engines: { node: 4.x || >=6.0.0 } + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + + node-releases@2.0.18: + resolution: + { + integrity: sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==, + } + + normalize-path@3.0.0: + resolution: + { + integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==, + } + engines: { node: ">=0.10.0" } + + normalize-range@0.1.2: + resolution: + { + integrity: sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==, + } + engines: { node: ">=0.10.0" } + + npm-run-path@5.3.0: + resolution: + { + integrity: sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==, + } + engines: { node: ^12.20.0 || ^14.13.1 || >=16.0.0 } + + object-assign@4.1.1: + resolution: + { + integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==, + } + engines: { node: ">=0.10.0" } + + object-hash@3.0.0: + resolution: + { + integrity: sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==, + } + engines: { node: ">= 6" } + + object-inspect@1.13.2: + resolution: + { + integrity: sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==, + } + engines: { node: ">= 0.4" } + + object-is@1.1.6: + resolution: + { + integrity: sha512-F8cZ+KfGlSGi09lJT7/Nd6KJZ9ygtvYC0/UYYLI9nmQKLMnydpB9yvbv9K1uSkEu7FU9vYPmVwLg328tX+ot3Q==, + } + engines: { node: ">= 0.4" } + + object-keys@1.1.1: + resolution: + { + integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==, + } + engines: { node: ">= 0.4" } + + object.assign@4.1.5: + resolution: + { + integrity: sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==, + } + engines: { node: ">= 0.4" } + + object.entries@1.1.8: + resolution: + { + integrity: sha512-cmopxi8VwRIAw/fkijJohSfpef5PdN0pMQJN6VC/ZKvn0LIknWD8KtgY6KlQdEc4tIjcQ3HxSMmnvtzIscdaYQ==, + } + engines: { node: ">= 0.4" } + + object.fromentries@2.0.8: + resolution: + { + integrity: sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==, + } + engines: { node: ">= 0.4" } + + object.groupby@1.0.3: + resolution: + { + integrity: sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==, + } + engines: { node: ">= 0.4" } + + object.values@1.2.0: + resolution: + { + integrity: sha512-yBYjY9QX2hnRmZHAjG/f13MzmBzxzYgQhFrke06TTyKY5zSTEqkOeukBzIdVA3j3ulu8Qa3MbVFShV7T2RmGtQ==, + } + engines: { node: ">= 0.4" } + + once@1.4.0: + resolution: + { + integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==, + } + + onetime@6.0.0: + resolution: + { + integrity: sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==, + } + engines: { node: ">=12" } + + onetime@7.0.0: + resolution: + { + integrity: sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==, + } + engines: { node: ">=18" } + + oo-ascii-tree@1.103.1: + resolution: + { + integrity: sha512-X0nmbb8xUUi637JXzCxY/K4AtO/I0fB5b7iiGaHJHu8IXBWV8TnQ4xqa0Igb/NoAg3OP2uXNhSeiTsErETOA/g==, + } + engines: { node: ">= 14.17.0" } + + optionator@0.9.4: + resolution: + { + integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==, + } + engines: { node: ">= 0.8.0" } + + p-limit@3.1.0: + resolution: + { + integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==, + } + engines: { node: ">=10" } + + p-locate@5.0.0: + resolution: + { + integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==, + } + engines: { node: ">=10" } + + package-json-from-dist@1.0.1: + resolution: + { + integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==, + } + + parent-module@1.0.1: + resolution: + { + integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==, + } + engines: { node: ">=6" } + + parse-entities@4.0.1: + resolution: + { + integrity: sha512-SWzvYcSJh4d/SGLIOQfZ/CoNv6BTlI6YEQ7Nj82oDVnRpwe/Z/F1EMx42x3JAOwGBlCjeCH0BRJQbQ/opHL17w==, + } + + pascal-case@3.1.2: + resolution: + { + integrity: sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==, + } + + path-exists@4.0.0: + resolution: + { + integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==, + } + engines: { node: ">=8" } + + path-is-absolute@1.0.1: + resolution: + { + integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==, + } + engines: { node: ">=0.10.0" } + + path-key@3.1.1: + resolution: + { + integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==, + } + engines: { node: ">=8" } + + path-key@4.0.0: + resolution: + { + integrity: sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==, + } + engines: { node: ">=12" } + + path-parse@1.0.7: + resolution: + { + integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==, + } + + path-scurry@1.11.1: + resolution: + { + integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==, + } + engines: { node: ">=16 || 14 >=14.18" } + + path-type@4.0.0: + resolution: + { + integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==, + } + engines: { node: ">=8" } + + pend@1.2.0: + resolution: + { + integrity: sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==, + } + + periscopic@3.1.0: + resolution: + { + integrity: sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw==, + } + + picocolors@1.1.0: + resolution: + { + integrity: sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==, + } + + picomatch@2.3.1: + resolution: + { + integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==, + } + engines: { node: ">=8.6" } + + pidtree@0.6.0: + resolution: + { + integrity: sha512-eG2dWTVw5bzqGRztnHExczNxt5VGsE6OwTeCG3fdUf9KBsZzO3R5OIIIzWR+iZA0NtZ+RDVdaoE2dK1cn6jH4g==, + } + engines: { node: ">=0.10" } + hasBin: true + + pify@2.3.0: + resolution: + { + integrity: sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==, + } + engines: { node: ">=0.10.0" } + + pirates@4.0.6: + resolution: + { + integrity: sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==, + } + engines: { node: ">= 6" } + + possible-typed-array-names@1.0.0: + resolution: + { + integrity: sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==, + } + engines: { node: ">= 0.4" } + + postcss-import@15.1.0: + resolution: + { + integrity: sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==, + } + engines: { node: ">=14.0.0" } + peerDependencies: + postcss: ^8.0.0 + + postcss-js@4.0.1: + resolution: + { + integrity: sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw==, + } + engines: { node: ^12 || ^14 || >= 16 } + peerDependencies: + postcss: ^8.4.21 + + postcss-load-config@4.0.2: + resolution: + { + integrity: sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==, + } + engines: { node: ">= 14" } + peerDependencies: + postcss: ">=8.0.9" + ts-node: ">=9.0.0" + peerDependenciesMeta: + postcss: + optional: true + ts-node: + optional: true + + postcss-nested@6.2.0: + resolution: + { + integrity: sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==, + } + engines: { node: ">=12.0" } + peerDependencies: + postcss: ^8.2.14 + + postcss-selector-parser@6.1.2: + resolution: + { + integrity: sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==, + } + engines: { node: ">=4" } + + postcss-value-parser@4.2.0: + resolution: + { + integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==, + } + + postcss@8.4.26: + resolution: + { + integrity: sha512-jrXHFF8iTloAenySjM/ob3gSj7pCu0Ji49hnjqzsgSRa50hkWCKD0HQ+gMNJkW38jBI68MpAAg7ZWwHwX8NMMw==, + } + engines: { node: ^10 || ^12 || >=14 } + + postcss@8.4.31: + resolution: + { + integrity: sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==, + } + engines: { node: ^10 || ^12 || >=14 } + + prelude-ls@1.2.1: + resolution: + { + integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==, + } + engines: { node: ">= 0.8.0" } + + prettier@3.3.3: + resolution: + { + integrity: sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==, + } + engines: { node: ">=14" } + hasBin: true + + prop-types@15.8.1: + resolution: + { + integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==, + } + + property-information@6.5.0: + resolution: + { + integrity: sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==, + } + + protobufjs@7.4.0: + resolution: + { + integrity: sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==, + } + engines: { node: ">=12.0.0" } + + punycode@2.3.1: + resolution: + { + integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==, + } + engines: { node: ">=6" } + + queue-microtask@1.2.3: + resolution: + { + integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==, + } + + react-dom@18.2.0: + resolution: + { + integrity: sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==, + } + peerDependencies: + react: ^18.2.0 + + react-is@16.13.1: + resolution: + { + integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==, + } + + react@18.2.0: + resolution: + { + integrity: sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==, + } + engines: { node: ">=0.10.0" } + + read-cache@1.0.0: + resolution: + { + integrity: sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==, + } + + readdirp@3.6.0: + resolution: + { + integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==, + } + engines: { node: ">=8.10.0" } + + rechoir@0.6.2: + resolution: + { + integrity: sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==, + } + engines: { node: ">= 0.10" } + + reflect.getprototypeof@1.0.6: + resolution: + { + integrity: sha512-fmfw4XgoDke3kdI6h4xcUz1dG8uaiv5q9gcEwLS4Pnth2kxT+GZ7YehS1JTMGBQmtV7Y4GFGbs2re2NqhdozUg==, + } + engines: { node: ">= 0.4" } + + regenerator-runtime@0.14.1: + resolution: + { + integrity: sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==, + } + + regexp.prototype.flags@1.5.2: + resolution: + { + integrity: sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==, + } + engines: { node: ">= 0.4" } + + rehype-stringify@10.0.0: + resolution: + { + integrity: sha512-1TX1i048LooI9QoecrXy7nGFFbFSufxVRAfc6Y9YMRAi56l+oB0zP51mLSV312uRuvVLPV1opSlJmslozR1XHQ==, + } + + remark-frontmatter@5.0.0: + resolution: + { + integrity: sha512-XTFYvNASMe5iPN0719nPrdItC9aU0ssC4v14mH1BCi1u0n1gAocqcujWUrByftZTbLhRtiKRyjYTSIOcr69UVQ==, + } + + remark-mdx-frontmatter@4.0.0: + resolution: + { + integrity: sha512-PZzAiDGOEfv1Ua7exQ8S5kKxkD8CDaSb4nM+1Mprs6u8dyvQifakh+kCj6NovfGXW+bTvrhjaR3srzjS2qJHKg==, + } + + remark-mdx@3.0.1: + resolution: + { + integrity: sha512-3Pz3yPQ5Rht2pM5R+0J2MrGoBSrzf+tJG94N+t/ilfdh8YLyyKYtidAYwTveB20BoHAcwIopOUqhcmh2F7hGYA==, + } + + remark-parse@11.0.0: + resolution: + { + integrity: sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==, + } + + remark-rehype@11.1.1: + resolution: + { + integrity: sha512-g/osARvjkBXb6Wo0XvAeXQohVta8i84ACbenPpoSsxTOQH/Ae0/RGP4WZgnMH5pMLpsj4FG7OHmcIcXxpza8eQ==, + } + + remark-stringify@11.0.0: + resolution: + { + integrity: sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==, + } + + remark@15.0.1: + resolution: + { + integrity: sha512-Eht5w30ruCXgFmxVUSlNWQ9iiimq07URKeFS3hNc8cUWy1llX4KDWfyEDZRycMc+znsN9Ux5/tJ/BFdgdOwA3A==, + } + + repeat-string@1.6.1: + resolution: + { + integrity: sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==, + } + engines: { node: ">=0.10" } + + require-directory@2.1.1: + resolution: + { + integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==, + } + engines: { node: ">=0.10.0" } + + resolve-from@4.0.0: + resolution: + { + integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==, + } + engines: { node: ">=4" } + + resolve-pkg-maps@1.0.0: + resolution: + { + integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==, + } + + resolve@1.22.8: + resolution: + { + integrity: sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==, + } + hasBin: true + + resolve@2.0.0-next.5: + resolution: + { + integrity: sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==, + } + hasBin: true + + restore-cursor@5.1.0: + resolution: + { + integrity: sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==, + } + engines: { node: ">=18" } + + reusify@1.0.4: + resolution: + { + integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==, + } + engines: { iojs: ">=1.0.0", node: ">=0.10.0" } + + rfdc@1.4.1: + resolution: + { + integrity: sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==, + } + + rimraf@3.0.2: + resolution: + { + integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==, + } + deprecated: Rimraf versions prior to v4 are no longer supported + hasBin: true + + run-parallel@1.2.0: + resolution: + { + integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==, + } + + rxjs@7.8.1: + resolution: + { + integrity: sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==, + } + + safe-array-concat@1.1.2: + resolution: + { + integrity: sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==, + } + engines: { node: ">=0.4" } + + safe-regex-test@1.0.3: + resolution: + { + integrity: sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==, + } + engines: { node: ">= 0.4" } + + scheduler@0.23.2: + resolution: + { + integrity: sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==, + } + + section-matter@1.0.0: + resolution: + { + integrity: sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==, + } + engines: { node: ">=4" } + + semver@6.3.1: + resolution: + { + integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==, + } + hasBin: true + + semver@7.6.3: + resolution: + { + integrity: sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==, + } + engines: { node: ">=10" } + hasBin: true + + set-function-length@1.2.2: + resolution: + { + integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==, + } + engines: { node: ">= 0.4" } + + set-function-name@2.0.2: + resolution: + { + integrity: sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==, + } + engines: { node: ">= 0.4" } + + shebang-command@2.0.0: + resolution: + { + integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==, + } + engines: { node: ">=8" } + + shebang-regex@3.0.0: + resolution: + { + integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==, + } + engines: { node: ">=8" } + + shell-quote@1.8.1: + resolution: + { + integrity: sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA==, + } + + shelljs@0.8.5: + resolution: + { + integrity: sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==, + } + engines: { node: ">=4" } + hasBin: true + + side-channel@1.0.6: + resolution: + { + integrity: sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==, + } + engines: { node: ">= 0.4" } + + signal-exit@4.1.0: + resolution: + { + integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==, + } + engines: { node: ">=14" } + + slash@3.0.0: + resolution: + { + integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==, + } + engines: { node: ">=8" } + + slice-ansi@5.0.0: + resolution: + { + integrity: sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==, + } + engines: { node: ">=12" } + + slice-ansi@7.1.0: + resolution: + { + integrity: sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg==, + } + engines: { node: ">=18" } + + source-map-js@1.2.1: + resolution: + { + integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==, + } + engines: { node: ">=0.10.0" } + + source-map-support@0.5.21: + resolution: + { + integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==, + } + + source-map@0.6.1: + resolution: + { + integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==, + } + engines: { node: ">=0.10.0" } + + source-map@0.7.4: + resolution: + { + integrity: sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==, + } + engines: { node: ">= 8" } + + space-separated-tokens@2.0.2: + resolution: + { + integrity: sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==, + } + + spawn-command@0.0.2: + resolution: + { + integrity: sha512-zC8zGoGkmc8J9ndvml8Xksr1Amk9qBujgbF0JAIWO7kXr43w0h/0GJNM/Vustixu+YE8N/MTrQ7N31FvHUACxQ==, + } + + sprintf-js@1.0.3: + resolution: + { + integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==, + } + + stop-iteration-iterator@1.0.0: + resolution: + { + integrity: sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==, + } + engines: { node: ">= 0.4" } + + streamsearch@1.1.0: + resolution: + { + integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==, + } + engines: { node: ">=10.0.0" } + + string-argv@0.3.2: + resolution: + { + integrity: sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==, + } + engines: { node: ">=0.6.19" } + + string-width@4.2.3: + resolution: + { + integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==, + } + engines: { node: ">=8" } + + string-width@5.1.2: + resolution: + { + integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==, + } + engines: { node: ">=12" } + + string-width@7.2.0: + resolution: + { + integrity: sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==, + } + engines: { node: ">=18" } + + string.prototype.includes@2.0.0: + resolution: + { + integrity: sha512-E34CkBgyeqNDcrbU76cDjL5JLcVrtSdYq0MEh/B10r17pRP4ciHLwTgnuLV8Ay6cgEMLkcBkFCKyFZ43YldYzg==, + } + + string.prototype.matchall@4.0.11: + resolution: + { + integrity: sha512-NUdh0aDavY2og7IbBPenWqR9exH+E26Sv8e0/eTe1tltDGZL+GtBkDAnnyBtmekfK6/Dq3MkcGtzXFEd1LQrtg==, + } + engines: { node: ">= 0.4" } + + string.prototype.repeat@1.0.0: + resolution: + { + integrity: sha512-0u/TldDbKD8bFCQ/4f5+mNRrXwZ8hg2w7ZR8wa16e8z9XpePWl3eGEcUD0OXpEH/VJH/2G3gjUtR3ZOiBe2S/w==, + } + + string.prototype.trim@1.2.9: + resolution: + { + integrity: sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==, + } + engines: { node: ">= 0.4" } + + string.prototype.trimend@1.0.8: + resolution: + { + integrity: sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==, + } + + string.prototype.trimstart@1.0.8: + resolution: + { + integrity: sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==, + } + engines: { node: ">= 0.4" } + + stringify-entities@4.0.4: + resolution: + { + integrity: sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==, + } + + strip-ansi@6.0.1: + resolution: + { + integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==, + } + engines: { node: ">=8" } + + strip-ansi@7.1.0: + resolution: + { + integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==, + } + engines: { node: ">=12" } + + strip-bom-string@1.0.0: + resolution: + { + integrity: sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==, + } + engines: { node: ">=0.10.0" } + + strip-bom@3.0.0: + resolution: + { + integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==, + } + engines: { node: ">=4" } + + strip-final-newline@3.0.0: + resolution: + { + integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==, + } + engines: { node: ">=12" } + + strip-indent@4.0.0: + resolution: + { + integrity: sha512-mnVSV2l+Zv6BLpSD/8V87CW/y9EmmbYzGCIavsnsI6/nwn26DwffM/yztm30Z/I2DY9wdS3vXVCMnHDgZaVNoA==, + } + engines: { node: ">=12" } + + strip-json-comments@3.1.1: + resolution: + { + integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==, + } + engines: { node: ">=8" } + + style-to-object@0.4.4: + resolution: + { + integrity: sha512-HYNoHZa2GorYNyqiCaBgsxvcJIn7OHq6inEga+E6Ke3m5JkoqpQbnFssk4jwe+K7AhGa2fcha4wSOf1Kn01dMg==, + } + + style-to-object@1.0.8: + resolution: + { + integrity: sha512-xT47I/Eo0rwJmaXC4oilDGDWLohVhR6o/xAQcPQN8q6QBuZVL8qMYL85kLmST5cPjAorwvqIA4qXTRQoYHaL6g==, + } + + styled-jsx@5.1.1: + resolution: + { + integrity: sha512-pW7uC1l4mBZ8ugbiZrcIsiIvVx1UmTfw7UkC3Um2tmfUq9Bhk8IiyEIPl6F8agHgjzku6j0xQEZbfA5uSgSaCw==, + } + engines: { node: ">= 12.0.0" } + peerDependencies: + "@babel/core": "*" + babel-plugin-macros: "*" + react: ">= 16.8.0 || 17.x.x || ^18.0.0-0" + peerDependenciesMeta: + "@babel/core": + optional: true + babel-plugin-macros: + optional: true + + sucrase@3.35.0: + resolution: + { + integrity: sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA==, + } + engines: { node: ">=16 || 14 >=14.17" } + hasBin: true + + supports-color@7.2.0: + resolution: + { + integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==, + } + engines: { node: ">=8" } + + supports-color@8.1.1: + resolution: + { + integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==, + } + engines: { node: ">=10" } + + supports-preserve-symlinks-flag@1.0.0: + resolution: + { + integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==, + } + engines: { node: ">= 0.4" } + + tailwindcss@3.4.1: + resolution: + { + integrity: sha512-qAYmXRfk3ENzuPBakNK0SRrUDipP8NQnEY6772uDhflcQz5EhRdD7JNZxyrFHVQNCwULPBn6FNPp9brpO7ctcA==, + } + engines: { node: ">=14.0.0" } + hasBin: true + + tapable@2.2.1: + resolution: + { + integrity: sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==, + } + engines: { node: ">=6" } + + tar@6.2.1: + resolution: + { + integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==, + } + engines: { node: ">=10" } + + text-table@0.2.0: + resolution: + { + integrity: sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==, + } + + thenify-all@1.6.0: + resolution: + { + integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==, + } + engines: { node: ">=0.8" } + + thenify@3.3.1: + resolution: + { + integrity: sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==, + } + + thingies@1.21.0: + resolution: + { + integrity: sha512-hsqsJsFMsV+aD4s3CWKk85ep/3I9XzYV/IXaSouJMYIoDlgyi11cBhsqYe9/geRfB0YIikBQg6raRaM+nIMP9g==, + } + engines: { node: ">=10.18" } + peerDependencies: + tslib: ^2 + + to-regex-range@5.0.1: + resolution: + { + integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==, + } + engines: { node: ">=8.0" } + + toml@3.0.0: + resolution: + { + integrity: sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==, + } + + tr46@0.0.3: + resolution: + { + integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==, + } + + tree-dump@1.0.2: + resolution: + { + integrity: sha512-dpev9ABuLWdEubk+cIaI9cHwRNNDjkBBLXTwI4UCUFdQ5xXKqNXoK4FEciw/vxf+NQ7Cb7sGUyeUtORvHIdRXQ==, + } + engines: { node: ">=10.0" } + peerDependencies: + tslib: "2" + + tree-kill@1.2.2: + resolution: + { + integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==, + } + hasBin: true + + trim-lines@3.0.1: + resolution: + { + integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==, + } + + trough@2.2.0: + resolution: + { + integrity: sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==, + } + + ts-api-utils@1.3.0: + resolution: + { + integrity: sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ==, + } + engines: { node: ">=16" } + peerDependencies: + typescript: ">=4.2.0" + + ts-interface-checker@0.1.13: + resolution: + { + integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==, + } + + ts-pattern@5.4.0: + resolution: + { + integrity: sha512-hgfOMfjlrARCnYtGD/xEAkFHDXuSyuqjzFSltyQCbN689uNvoQL20TVN2XFcLMjfNuwSsQGU+xtH6MrjIwhwUg==, + } + + tsconfig-paths@3.15.0: + resolution: + { + integrity: sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==, + } + + tslib@2.7.0: + resolution: + { + integrity: sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA==, + } + + typanion@3.14.0: + resolution: + { + integrity: sha512-ZW/lVMRabETuYCd9O9ZvMhAh8GslSqaUjxmK/JLPCh6l73CvLBiuXswj/+7LdnWOgYsQ130FqLzFz5aGT4I3Ug==, + } + + type-check@0.4.0: + resolution: + { + integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==, + } + engines: { node: ">= 0.8.0" } + + type-fest@0.20.2: + resolution: + { + integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==, + } + engines: { node: ">=10" } + + type-fest@4.26.1: + resolution: + { + integrity: sha512-yOGpmOAL7CkKe/91I5O3gPICmJNLJ1G4zFYVAsRHg7M64biSnPtRj0WNQt++bRkjYOqjWXrhnUw1utzmVErAdg==, + } + engines: { node: ">=16" } + + typed-array-buffer@1.0.2: + resolution: + { + integrity: sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==, + } + engines: { node: ">= 0.4" } + + typed-array-byte-length@1.0.1: + resolution: + { + integrity: sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==, + } + engines: { node: ">= 0.4" } + + typed-array-byte-offset@1.0.2: + resolution: + { + integrity: sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==, + } + engines: { node: ">= 0.4" } + + typed-array-length@1.0.6: + resolution: + { + integrity: sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==, + } + engines: { node: ">= 0.4" } + + typescript@5.3.3: + resolution: + { + integrity: sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==, + } + engines: { node: ">=14.17" } + hasBin: true + + unbox-primitive@1.0.2: + resolution: + { + integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==, + } + + undici-types@5.26.5: + resolution: + { + integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==, + } + + unified@11.0.5: + resolution: + { + integrity: sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==, + } + + unist-util-is@6.0.0: + resolution: + { + integrity: sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==, + } + + unist-util-position-from-estree@2.0.0: + resolution: + { + integrity: sha512-KaFVRjoqLyF6YXCbVLNad/eS4+OfPQQn2yOd7zF/h5T/CSL2v8NpN6a5TPvtbXthAGw5nG+PuTtq+DdIZr+cRQ==, + } + + unist-util-position@5.0.0: + resolution: + { + integrity: sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==, + } + + unist-util-stringify-position@4.0.0: + resolution: + { + integrity: sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==, + } + + unist-util-visit-parents@6.0.1: + resolution: + { + integrity: sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==, + } + + unist-util-visit@5.0.0: + resolution: + { + integrity: sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==, + } + + update-browserslist-db@1.1.0: + resolution: + { + integrity: sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ==, + } + hasBin: true + peerDependencies: + browserslist: ">= 4.21.0" + + uri-js@4.4.1: + resolution: + { + integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==, + } + + util-deprecate@1.0.2: + resolution: + { + integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==, + } + + uuid@9.0.1: + resolution: + { + integrity: sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==, + } + hasBin: true + + vfile-message@4.0.2: + resolution: + { + integrity: sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==, + } + + vfile@6.0.3: + resolution: + { + integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==, + } + + webidl-conversions@3.0.1: + resolution: + { + integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==, + } + + whatwg-url@5.0.0: + resolution: + { + integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==, + } + + which-boxed-primitive@1.0.2: + resolution: + { + integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==, + } + + which-builtin-type@1.1.4: + resolution: + { + integrity: sha512-bppkmBSsHFmIMSl8BO9TbsyzsvGjVoppt8xUiGzwiu/bhDCGxnpOKCxgqj6GuyHE0mINMDecBFPlOm2hzY084w==, + } + engines: { node: ">= 0.4" } + + which-collection@1.0.2: + resolution: + { + integrity: sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==, + } + engines: { node: ">= 0.4" } + + which-typed-array@1.1.15: + resolution: + { + integrity: sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==, + } + engines: { node: ">= 0.4" } + + which@2.0.2: + resolution: + { + integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==, + } + engines: { node: ">= 8" } + hasBin: true + + word-wrap@1.2.5: + resolution: + { + integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==, + } + engines: { node: ">=0.10.0" } + + wrap-ansi@7.0.0: + resolution: + { + integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==, + } + engines: { node: ">=10" } + + wrap-ansi@8.1.0: + resolution: + { + integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==, + } + engines: { node: ">=12" } + + wrap-ansi@9.0.0: + resolution: + { + integrity: sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q==, + } + engines: { node: ">=18" } + + wrappy@1.0.2: + resolution: + { + integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==, + } + + y18n@5.0.8: + resolution: + { + integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==, + } + engines: { node: ">=10" } + + yallist@4.0.0: + resolution: + { + integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==, + } + + yaml@2.5.1: + resolution: + { + integrity: sha512-bLQOjaX/ADgQ20isPJRvF0iRUHIxVhYvr53Of7wGcWlO2jvtUlH5m87DsmulFVxRpNLOnI4tB6p/oh8D7kpn9Q==, + } + engines: { node: ">= 14" } + hasBin: true + + yargs-parser@21.1.1: + resolution: + { + integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==, + } + engines: { node: ">=12" } + + yargs@17.7.2: + resolution: + { + integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==, + } + engines: { node: ">=12" } + + yauzl@3.1.3: + resolution: + { + integrity: sha512-JCCdmlJJWv7L0q/KylOekyRaUrdEoUxWkWVcgorosTROCFWiS9p2NNPE9Yb91ak7b1N5SxAZEliWpspbZccivw==, + } + engines: { node: ">=12" } + + yocto-queue@0.1.0: + resolution: + { + integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==, + } + engines: { node: ">=10" } + + zod@3.23.8: + resolution: + { + integrity: sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==, + } + + zwitch@2.0.4: + resolution: + { + integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==, + } + +snapshots: + "@alloc/quick-lru@5.2.0": {} + + "@babel/runtime@7.25.6": + dependencies: + regenerator-runtime: 0.14.1 + + "@contentlayer2/cli@0.4.3(esbuild@0.20.2)": + dependencies: + "@contentlayer2/core": 0.4.3(esbuild@0.20.2) + "@contentlayer2/utils": 0.4.3 + clipanion: 3.2.1(typanion@3.14.0) + typanion: 3.14.0 + transitivePeerDependencies: + - "@effect-ts/otel-node" + - esbuild + - markdown-wasm + - supports-color + + "@contentlayer2/client@0.4.3(esbuild@0.20.2)": + dependencies: + "@contentlayer2/core": 0.4.3(esbuild@0.20.2) + transitivePeerDependencies: + - "@effect-ts/otel-node" + - esbuild + - markdown-wasm + - supports-color + + "@contentlayer2/core@0.4.3(esbuild@0.20.2)": + dependencies: + "@contentlayer2/utils": 0.4.3 + camel-case: 4.1.2 + comment-json: 4.2.5 + gray-matter: 4.0.3 + mdx-bundler: 10.0.3(esbuild@0.20.2) + rehype-stringify: 10.0.0 + remark-frontmatter: 5.0.0 + remark-parse: 11.0.0 + remark-rehype: 11.1.1 + source-map-support: 0.5.21 + type-fest: 4.26.1 + unified: 11.0.5 + optionalDependencies: + esbuild: 0.20.2 + transitivePeerDependencies: + - "@effect-ts/otel-node" + - supports-color + + "@contentlayer2/source-files@0.4.3(esbuild@0.20.2)": + dependencies: + "@contentlayer2/core": 0.4.3(esbuild@0.20.2) + "@contentlayer2/utils": 0.4.3 + chokidar: 3.6.0 + fast-glob: 3.3.2 + gray-matter: 4.0.3 + imagescript: 1.3.0 + micromatch: 4.0.8 + ts-pattern: 5.4.0 + unified: 11.0.5 + yaml: 2.5.1 + zod: 3.23.8 + transitivePeerDependencies: + - "@effect-ts/otel-node" + - esbuild + - markdown-wasm + - supports-color + + "@contentlayer2/source-remote-files@0.4.3(esbuild@0.20.2)": + dependencies: + "@contentlayer2/core": 0.4.3(esbuild@0.20.2) + "@contentlayer2/source-files": 0.4.3(esbuild@0.20.2) + "@contentlayer2/utils": 0.4.3 + transitivePeerDependencies: + - "@effect-ts/otel-node" + - esbuild + - markdown-wasm + - supports-color + + "@contentlayer2/utils@0.4.3": + dependencies: + "@effect-ts/core": 0.60.5 + "@effect-ts/otel": 0.15.1(@effect-ts/core@0.60.5)(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.26.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.26.0(@opentelemetry/api@1.9.0)) + "@effect-ts/otel-sdk-trace-node": 0.15.1(@effect-ts/core@0.60.5)(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.26.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.26.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-node@1.26.0(@opentelemetry/api@1.9.0)) + "@js-temporal/polyfill": 0.4.4 + "@opentelemetry/api": 1.9.0 + "@opentelemetry/core": 1.26.0(@opentelemetry/api@1.9.0) + "@opentelemetry/exporter-trace-otlp-grpc": 0.51.1(@opentelemetry/api@1.9.0) + "@opentelemetry/resources": 1.26.0(@opentelemetry/api@1.9.0) + "@opentelemetry/sdk-trace-base": 1.26.0(@opentelemetry/api@1.9.0) + "@opentelemetry/sdk-trace-node": 1.26.0(@opentelemetry/api@1.9.0) + "@opentelemetry/semantic-conventions": 1.27.0 + chokidar: 3.6.0 + hash-wasm: 4.11.0 + inflection: 3.0.0 + memfs: 4.12.0 + oo-ascii-tree: 1.103.1 + ts-pattern: 5.4.0 + type-fest: 4.26.1 + + "@crowdin/cli@3.19.4": + dependencies: + command-exists-promise: 2.0.2 + node-fetch: 2.7.0 + shelljs: 0.8.5 + tar: 6.2.1 + yauzl: 3.1.3 + transitivePeerDependencies: + - encoding + + "@effect-ts/core@0.60.5": + dependencies: + "@effect-ts/system": 0.57.5 + + "@effect-ts/otel-sdk-trace-node@0.15.1(@effect-ts/core@0.60.5)(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.26.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.26.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-node@1.26.0(@opentelemetry/api@1.9.0))": + dependencies: + "@effect-ts/core": 0.60.5 + "@effect-ts/otel": 0.15.1(@effect-ts/core@0.60.5)(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.26.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.26.0(@opentelemetry/api@1.9.0)) + "@opentelemetry/api": 1.9.0 + "@opentelemetry/core": 1.26.0(@opentelemetry/api@1.9.0) + "@opentelemetry/sdk-trace-base": 1.26.0(@opentelemetry/api@1.9.0) + "@opentelemetry/sdk-trace-node": 1.26.0(@opentelemetry/api@1.9.0) + + "@effect-ts/otel@0.15.1(@effect-ts/core@0.60.5)(@opentelemetry/api@1.9.0)(@opentelemetry/core@1.26.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@1.26.0(@opentelemetry/api@1.9.0))": + dependencies: + "@effect-ts/core": 0.60.5 + "@opentelemetry/api": 1.9.0 + "@opentelemetry/core": 1.26.0(@opentelemetry/api@1.9.0) + "@opentelemetry/sdk-trace-base": 1.26.0(@opentelemetry/api@1.9.0) + + "@effect-ts/system@0.57.5": {} + + "@esbuild-plugins/node-resolve@0.2.2(esbuild@0.20.2)": + dependencies: + "@types/resolve": 1.20.6 + debug: 4.3.7 + esbuild: 0.20.2 + escape-string-regexp: 4.0.0 + resolve: 1.22.8 + transitivePeerDependencies: + - supports-color + + "@esbuild/aix-ppc64@0.20.2": + optional: true + + "@esbuild/android-arm64@0.20.2": + optional: true + + "@esbuild/android-arm@0.20.2": + optional: true + + "@esbuild/android-x64@0.20.2": + optional: true + + "@esbuild/darwin-arm64@0.20.2": + optional: true + + "@esbuild/darwin-x64@0.20.2": + optional: true + + "@esbuild/freebsd-arm64@0.20.2": + optional: true + + "@esbuild/freebsd-x64@0.20.2": + optional: true + + "@esbuild/linux-arm64@0.20.2": + optional: true + + "@esbuild/linux-arm@0.20.2": + optional: true + + "@esbuild/linux-ia32@0.20.2": + optional: true + + "@esbuild/linux-loong64@0.20.2": + optional: true + + "@esbuild/linux-mips64el@0.20.2": + optional: true + + "@esbuild/linux-ppc64@0.20.2": + optional: true + + "@esbuild/linux-riscv64@0.20.2": + optional: true + + "@esbuild/linux-s390x@0.20.2": + optional: true + + "@esbuild/linux-x64@0.20.2": + optional: true + + "@esbuild/netbsd-x64@0.20.2": + optional: true + + "@esbuild/openbsd-x64@0.20.2": + optional: true + + "@esbuild/sunos-x64@0.20.2": + optional: true + + "@esbuild/win32-arm64@0.20.2": + optional: true + + "@esbuild/win32-ia32@0.20.2": + optional: true + + "@esbuild/win32-x64@0.20.2": + optional: true + + "@eslint-community/eslint-utils@4.4.0(eslint@8.45.0)": + dependencies: + eslint: 8.45.0 + eslint-visitor-keys: 3.4.3 + + "@eslint-community/regexpp@4.11.1": {} + + "@eslint/eslintrc@2.1.4": + dependencies: + ajv: 6.12.6 + debug: 4.3.7 + espree: 9.6.1 + globals: 13.24.0 + ignore: 5.3.2 + import-fresh: 3.3.0 + js-yaml: 4.1.0 + minimatch: 3.1.2 + strip-json-comments: 3.1.1 + transitivePeerDependencies: + - supports-color + + "@eslint/js@8.44.0": {} + + "@fal-works/esbuild-plugin-global-externals@2.1.2": {} + + "@grpc/grpc-js@1.11.3": + dependencies: + "@grpc/proto-loader": 0.7.13 + "@js-sdsl/ordered-map": 4.4.2 + + "@grpc/proto-loader@0.7.13": + dependencies: + lodash.camelcase: 4.3.0 + long: 5.2.3 + protobufjs: 7.4.0 + yargs: 17.7.2 + + "@humanwhocodes/config-array@0.11.14": + dependencies: + "@humanwhocodes/object-schema": 2.0.3 + debug: 4.3.7 + minimatch: 3.1.2 + transitivePeerDependencies: + - supports-color + + "@humanwhocodes/module-importer@1.0.1": {} + + "@humanwhocodes/object-schema@2.0.3": {} + + "@isaacs/cliui@8.0.2": + dependencies: + string-width: 5.1.2 + string-width-cjs: string-width@4.2.3 + strip-ansi: 7.1.0 + strip-ansi-cjs: strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: wrap-ansi@7.0.0 + + "@jridgewell/gen-mapping@0.3.5": + dependencies: + "@jridgewell/set-array": 1.2.1 + "@jridgewell/sourcemap-codec": 1.5.0 + "@jridgewell/trace-mapping": 0.3.25 + + "@jridgewell/resolve-uri@3.1.2": {} + + "@jridgewell/set-array@1.2.1": {} + + "@jridgewell/sourcemap-codec@1.5.0": {} + + "@jridgewell/trace-mapping@0.3.25": + dependencies: + "@jridgewell/resolve-uri": 3.1.2 + "@jridgewell/sourcemap-codec": 1.5.0 + + "@js-sdsl/ordered-map@4.4.2": {} + + "@js-temporal/polyfill@0.4.4": + dependencies: + jsbi: 4.3.0 + tslib: 2.7.0 + + "@jsonjoy.com/base64@1.1.2(tslib@2.7.0)": + dependencies: + tslib: 2.7.0 + + "@jsonjoy.com/json-pack@1.1.0(tslib@2.7.0)": + dependencies: + "@jsonjoy.com/base64": 1.1.2(tslib@2.7.0) + "@jsonjoy.com/util": 1.3.0(tslib@2.7.0) + hyperdyperid: 1.2.0 + thingies: 1.21.0(tslib@2.7.0) + tslib: 2.7.0 + + "@jsonjoy.com/util@1.3.0(tslib@2.7.0)": + dependencies: + tslib: 2.7.0 + + "@mdx-js/esbuild@3.0.1(esbuild@0.20.2)": + dependencies: + "@mdx-js/mdx": 3.0.1 + "@types/unist": 3.0.3 + esbuild: 0.20.2 + vfile: 6.0.3 + vfile-message: 4.0.2 + transitivePeerDependencies: + - supports-color + + "@mdx-js/mdx@3.0.1": + dependencies: + "@types/estree": 1.0.6 + "@types/estree-jsx": 1.0.5 + "@types/hast": 3.0.4 + "@types/mdx": 2.0.13 + collapse-white-space: 2.1.0 + devlop: 1.1.0 + estree-util-build-jsx: 3.0.1 + estree-util-is-identifier-name: 3.0.0 + estree-util-to-js: 2.0.0 + estree-walker: 3.0.3 + hast-util-to-estree: 3.1.0 + hast-util-to-jsx-runtime: 2.3.0 + markdown-extensions: 2.0.0 + periscopic: 3.1.0 + remark-mdx: 3.0.1 + remark-parse: 11.0.0 + remark-rehype: 11.1.1 + source-map: 0.7.4 + unified: 11.0.5 + unist-util-position-from-estree: 2.0.0 + unist-util-stringify-position: 4.0.0 + unist-util-visit: 5.0.0 + vfile: 6.0.3 + transitivePeerDependencies: + - supports-color + + "@next/env@14.1.0": {} + + "@next/eslint-plugin-next@14.1.0": + dependencies: + glob: 10.3.10 + + "@next/swc-darwin-arm64@14.1.0": + optional: true + + "@next/swc-darwin-x64@14.1.0": + optional: true + + "@next/swc-linux-arm64-gnu@14.1.0": + optional: true + + "@next/swc-linux-arm64-musl@14.1.0": + optional: true + + "@next/swc-linux-x64-gnu@14.1.0": + optional: true + + "@next/swc-linux-x64-musl@14.1.0": + optional: true + + "@next/swc-win32-arm64-msvc@14.1.0": + optional: true + + "@next/swc-win32-ia32-msvc@14.1.0": + optional: true + + "@next/swc-win32-x64-msvc@14.1.0": + optional: true + + "@nodelib/fs.scandir@2.1.5": + dependencies: + "@nodelib/fs.stat": 2.0.5 + run-parallel: 1.2.0 + + "@nodelib/fs.stat@2.0.5": {} + + "@nodelib/fs.walk@1.2.8": + dependencies: + "@nodelib/fs.scandir": 2.1.5 + fastq: 1.17.1 + + "@nolyfill/is-core-module@1.0.39": {} + + "@opentelemetry/api-logs@0.51.1": + dependencies: + "@opentelemetry/api": 1.9.0 + + "@opentelemetry/api@1.9.0": {} + + "@opentelemetry/context-async-hooks@1.26.0(@opentelemetry/api@1.9.0)": + dependencies: + "@opentelemetry/api": 1.9.0 + + "@opentelemetry/core@1.24.1(@opentelemetry/api@1.9.0)": + dependencies: + "@opentelemetry/api": 1.9.0 + "@opentelemetry/semantic-conventions": 1.24.1 + + "@opentelemetry/core@1.26.0(@opentelemetry/api@1.9.0)": + dependencies: + "@opentelemetry/api": 1.9.0 + "@opentelemetry/semantic-conventions": 1.27.0 + + "@opentelemetry/exporter-trace-otlp-grpc@0.51.1(@opentelemetry/api@1.9.0)": + dependencies: + "@grpc/grpc-js": 1.11.3 + "@opentelemetry/api": 1.9.0 + "@opentelemetry/core": 1.24.1(@opentelemetry/api@1.9.0) + "@opentelemetry/otlp-grpc-exporter-base": 0.51.1(@opentelemetry/api@1.9.0) + "@opentelemetry/otlp-transformer": 0.51.1(@opentelemetry/api@1.9.0) + "@opentelemetry/resources": 1.24.1(@opentelemetry/api@1.9.0) + "@opentelemetry/sdk-trace-base": 1.24.1(@opentelemetry/api@1.9.0) + + "@opentelemetry/otlp-exporter-base@0.51.1(@opentelemetry/api@1.9.0)": + dependencies: + "@opentelemetry/api": 1.9.0 + "@opentelemetry/core": 1.24.1(@opentelemetry/api@1.9.0) + + "@opentelemetry/otlp-grpc-exporter-base@0.51.1(@opentelemetry/api@1.9.0)": + dependencies: + "@grpc/grpc-js": 1.11.3 + "@opentelemetry/api": 1.9.0 + "@opentelemetry/core": 1.24.1(@opentelemetry/api@1.9.0) + "@opentelemetry/otlp-exporter-base": 0.51.1(@opentelemetry/api@1.9.0) + protobufjs: 7.4.0 + + "@opentelemetry/otlp-transformer@0.51.1(@opentelemetry/api@1.9.0)": + dependencies: + "@opentelemetry/api": 1.9.0 + "@opentelemetry/api-logs": 0.51.1 + "@opentelemetry/core": 1.24.1(@opentelemetry/api@1.9.0) + "@opentelemetry/resources": 1.24.1(@opentelemetry/api@1.9.0) + "@opentelemetry/sdk-logs": 0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.9.0) + "@opentelemetry/sdk-metrics": 1.24.1(@opentelemetry/api@1.9.0) + "@opentelemetry/sdk-trace-base": 1.24.1(@opentelemetry/api@1.9.0) + + "@opentelemetry/propagator-b3@1.26.0(@opentelemetry/api@1.9.0)": + dependencies: + "@opentelemetry/api": 1.9.0 + "@opentelemetry/core": 1.26.0(@opentelemetry/api@1.9.0) + + "@opentelemetry/propagator-jaeger@1.26.0(@opentelemetry/api@1.9.0)": + dependencies: + "@opentelemetry/api": 1.9.0 + "@opentelemetry/core": 1.26.0(@opentelemetry/api@1.9.0) + + "@opentelemetry/resources@1.24.1(@opentelemetry/api@1.9.0)": + dependencies: + "@opentelemetry/api": 1.9.0 + "@opentelemetry/core": 1.24.1(@opentelemetry/api@1.9.0) + "@opentelemetry/semantic-conventions": 1.24.1 + + "@opentelemetry/resources@1.26.0(@opentelemetry/api@1.9.0)": + dependencies: + "@opentelemetry/api": 1.9.0 + "@opentelemetry/core": 1.26.0(@opentelemetry/api@1.9.0) + "@opentelemetry/semantic-conventions": 1.27.0 + + "@opentelemetry/sdk-logs@0.51.1(@opentelemetry/api-logs@0.51.1)(@opentelemetry/api@1.9.0)": + dependencies: + "@opentelemetry/api": 1.9.0 + "@opentelemetry/api-logs": 0.51.1 + "@opentelemetry/core": 1.24.1(@opentelemetry/api@1.9.0) + "@opentelemetry/resources": 1.24.1(@opentelemetry/api@1.9.0) + + "@opentelemetry/sdk-metrics@1.24.1(@opentelemetry/api@1.9.0)": + dependencies: + "@opentelemetry/api": 1.9.0 + "@opentelemetry/core": 1.24.1(@opentelemetry/api@1.9.0) + "@opentelemetry/resources": 1.24.1(@opentelemetry/api@1.9.0) + lodash.merge: 4.6.2 + + "@opentelemetry/sdk-trace-base@1.24.1(@opentelemetry/api@1.9.0)": + dependencies: + "@opentelemetry/api": 1.9.0 + "@opentelemetry/core": 1.24.1(@opentelemetry/api@1.9.0) + "@opentelemetry/resources": 1.24.1(@opentelemetry/api@1.9.0) + "@opentelemetry/semantic-conventions": 1.24.1 + + "@opentelemetry/sdk-trace-base@1.26.0(@opentelemetry/api@1.9.0)": + dependencies: + "@opentelemetry/api": 1.9.0 + "@opentelemetry/core": 1.26.0(@opentelemetry/api@1.9.0) + "@opentelemetry/resources": 1.26.0(@opentelemetry/api@1.9.0) + "@opentelemetry/semantic-conventions": 1.27.0 + + "@opentelemetry/sdk-trace-node@1.26.0(@opentelemetry/api@1.9.0)": + dependencies: + "@opentelemetry/api": 1.9.0 + "@opentelemetry/context-async-hooks": 1.26.0(@opentelemetry/api@1.9.0) + "@opentelemetry/core": 1.26.0(@opentelemetry/api@1.9.0) + "@opentelemetry/propagator-b3": 1.26.0(@opentelemetry/api@1.9.0) + "@opentelemetry/propagator-jaeger": 1.26.0(@opentelemetry/api@1.9.0) + "@opentelemetry/sdk-trace-base": 1.26.0(@opentelemetry/api@1.9.0) + semver: 7.6.3 + + "@opentelemetry/semantic-conventions@1.24.1": {} + + "@opentelemetry/semantic-conventions@1.27.0": {} + + "@pkgjs/parseargs@0.11.0": + optional: true + + "@protobufjs/aspromise@1.1.2": {} + + "@protobufjs/base64@1.1.2": {} + + "@protobufjs/codegen@2.0.4": {} + + "@protobufjs/eventemitter@1.1.0": {} + + "@protobufjs/fetch@1.1.0": + dependencies: + "@protobufjs/aspromise": 1.1.2 + "@protobufjs/inquire": 1.1.0 + + "@protobufjs/float@1.0.2": {} + + "@protobufjs/inquire@1.1.0": {} + + "@protobufjs/path@1.1.2": {} + + "@protobufjs/pool@1.1.0": {} + + "@protobufjs/utf8@1.1.0": {} + + "@rtsao/scc@1.1.0": {} + + "@rushstack/eslint-patch@1.10.4": {} + + "@swc/helpers@0.5.2": + dependencies: + tslib: 2.7.0 + + "@types/acorn@4.0.6": + dependencies: + "@types/estree": 1.0.6 + + "@types/debug@4.1.12": + dependencies: + "@types/ms": 0.7.34 + + "@types/estree-jsx@1.0.5": + dependencies: + "@types/estree": 1.0.6 + + "@types/estree@1.0.6": {} + + "@types/hast@3.0.4": + dependencies: + "@types/unist": 3.0.3 + + "@types/json5@0.0.29": {} + + "@types/mdast@4.0.4": + dependencies: + "@types/unist": 3.0.3 + + "@types/mdx@2.0.13": {} + + "@types/ms@0.7.34": {} + + "@types/node@20.11.17": + dependencies: + undici-types: 5.26.5 + + "@types/prop-types@15.7.13": {} + + "@types/react-dom@18.2.7": + dependencies: + "@types/react": 18.2.15 + + "@types/react@18.2.15": + dependencies: + "@types/prop-types": 15.7.13 + "@types/scheduler": 0.23.0 + csstype: 3.1.3 + + "@types/resolve@1.20.6": {} + + "@types/scheduler@0.23.0": {} + + "@types/unist@2.0.11": {} + + "@types/unist@3.0.3": {} + + "@typescript-eslint/parser@6.21.0(eslint@8.45.0)(typescript@5.3.3)": + dependencies: + "@typescript-eslint/scope-manager": 6.21.0 + "@typescript-eslint/types": 6.21.0 + "@typescript-eslint/typescript-estree": 6.21.0(typescript@5.3.3) + "@typescript-eslint/visitor-keys": 6.21.0 + debug: 4.3.7 + eslint: 8.45.0 + optionalDependencies: + typescript: 5.3.3 + transitivePeerDependencies: + - supports-color + + "@typescript-eslint/scope-manager@6.21.0": + dependencies: + "@typescript-eslint/types": 6.21.0 + "@typescript-eslint/visitor-keys": 6.21.0 + + "@typescript-eslint/types@6.21.0": {} + + "@typescript-eslint/typescript-estree@6.21.0(typescript@5.3.3)": + dependencies: + "@typescript-eslint/types": 6.21.0 + "@typescript-eslint/visitor-keys": 6.21.0 + debug: 4.3.7 + globby: 11.1.0 + is-glob: 4.0.3 + minimatch: 9.0.3 + semver: 7.6.3 + ts-api-utils: 1.3.0(typescript@5.3.3) + optionalDependencies: + typescript: 5.3.3 + transitivePeerDependencies: + - supports-color + + "@typescript-eslint/visitor-keys@6.21.0": + dependencies: + "@typescript-eslint/types": 6.21.0 + eslint-visitor-keys: 3.4.3 + + "@ungap/structured-clone@1.2.0": {} + + acorn-jsx@5.3.2(acorn@8.12.1): + dependencies: + acorn: 8.12.1 + + acorn@8.12.1: {} + + ajv@6.12.6: + dependencies: + fast-deep-equal: 3.1.3 + fast-json-stable-stringify: 2.1.0 + json-schema-traverse: 0.4.1 + uri-js: 4.4.1 + + ansi-escapes@7.0.0: + dependencies: + environment: 1.1.0 + + ansi-regex@5.0.1: {} + + ansi-regex@6.1.0: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + ansi-styles@6.2.1: {} + + any-promise@1.3.0: {} + + anymatch@3.1.3: + dependencies: + normalize-path: 3.0.0 + picomatch: 2.3.1 + + arg@5.0.2: {} + + argparse@1.0.10: + dependencies: + sprintf-js: 1.0.3 + + argparse@2.0.1: {} + + aria-query@5.1.3: + dependencies: + deep-equal: 2.2.3 + + array-buffer-byte-length@1.0.1: + dependencies: + call-bind: 1.0.7 + is-array-buffer: 3.0.4 + + array-includes@3.1.8: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + es-object-atoms: 1.0.0 + get-intrinsic: 1.2.4 + is-string: 1.0.7 + + array-timsort@1.0.3: {} + + array-union@2.1.0: {} + + array.prototype.findlast@1.2.5: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + es-errors: 1.3.0 + es-object-atoms: 1.0.0 + es-shim-unscopables: 1.0.2 + + array.prototype.findlastindex@1.2.5: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + es-errors: 1.3.0 + es-object-atoms: 1.0.0 + es-shim-unscopables: 1.0.2 + + array.prototype.flat@1.3.2: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + es-shim-unscopables: 1.0.2 + + array.prototype.flatmap@1.3.2: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + es-shim-unscopables: 1.0.2 + + array.prototype.tosorted@1.1.4: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + es-errors: 1.3.0 + es-shim-unscopables: 1.0.2 + + arraybuffer.prototype.slice@1.0.3: + dependencies: + array-buffer-byte-length: 1.0.1 + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + es-errors: 1.3.0 + get-intrinsic: 1.2.4 + is-array-buffer: 3.0.4 + is-shared-array-buffer: 1.0.3 + + ast-types-flow@0.0.8: {} + + astring@1.9.0: {} + + autoprefixer@10.4.14(postcss@8.4.26): + dependencies: + browserslist: 4.24.0 + caniuse-lite: 1.0.30001664 + fraction.js: 4.3.7 + normalize-range: 0.1.2 + picocolors: 1.1.0 + postcss: 8.4.26 + postcss-value-parser: 4.2.0 + + available-typed-arrays@1.0.7: + dependencies: + possible-typed-array-names: 1.0.0 + + axe-core@4.10.0: {} + + axobject-query@4.1.0: {} + + bail@2.0.2: {} + + balanced-match@1.0.2: {} + + binary-extensions@2.3.0: {} + + brace-expansion@1.1.11: + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + + brace-expansion@2.0.1: + dependencies: + balanced-match: 1.0.2 + + braces@3.0.3: + dependencies: + fill-range: 7.1.1 + + browserslist@4.24.0: + dependencies: + caniuse-lite: 1.0.30001664 + electron-to-chromium: 1.5.29 + node-releases: 2.0.18 + update-browserslist-db: 1.1.0(browserslist@4.24.0) + + buffer-crc32@0.2.13: {} + + buffer-from@1.1.2: {} + + busboy@1.6.0: + dependencies: + streamsearch: 1.1.0 + + call-bind@1.0.7: + dependencies: + es-define-property: 1.0.0 + es-errors: 1.3.0 + function-bind: 1.1.2 + get-intrinsic: 1.2.4 + set-function-length: 1.2.2 + + callsites@3.1.0: {} + + camel-case@4.1.2: + dependencies: + pascal-case: 3.1.2 + tslib: 2.7.0 + + camelcase-css@2.0.1: {} + + caniuse-lite@1.0.30001664: {} + + ccount@2.0.1: {} + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + chalk@5.3.0: {} + + character-entities-html4@2.1.0: {} + + character-entities-legacy@3.0.0: {} + + character-entities@2.0.2: {} + + character-reference-invalid@2.0.1: {} + + chokidar@3.6.0: + dependencies: + anymatch: 3.1.3 + braces: 3.0.3 + glob-parent: 5.1.2 + is-binary-path: 2.1.0 + is-glob: 4.0.3 + normalize-path: 3.0.0 + readdirp: 3.6.0 + optionalDependencies: + fsevents: 2.3.3 + + chownr@2.0.0: {} + + cli-cursor@5.0.0: + dependencies: + restore-cursor: 5.1.0 + + cli-truncate@4.0.0: + dependencies: + slice-ansi: 5.0.0 + string-width: 7.2.0 + + client-only@0.0.1: {} + + clipanion@3.2.1(typanion@3.14.0): + dependencies: + typanion: 3.14.0 + + cliui@8.0.1: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + + collapse-white-space@2.1.0: {} + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + colorette@2.0.20: {} + + comma-separated-tokens@2.0.3: {} + + command-exists-promise@2.0.2: {} + + commander@12.1.0: {} + + commander@4.1.1: {} + + comment-json@4.2.5: + dependencies: + array-timsort: 1.0.3 + core-util-is: 1.0.3 + esprima: 4.0.1 + has-own-prop: 2.0.0 + repeat-string: 1.6.1 + + concat-map@0.0.1: {} + + concurrently@8.2.2: + dependencies: + chalk: 4.1.2 + date-fns: 2.30.0 + lodash: 4.17.21 + rxjs: 7.8.1 + shell-quote: 1.8.1 + spawn-command: 0.0.2 + supports-color: 8.1.1 + tree-kill: 1.2.2 + yargs: 17.7.2 + + contentlayer2@0.4.6(esbuild@0.20.2): + dependencies: + "@contentlayer2/cli": 0.4.3(esbuild@0.20.2) + "@contentlayer2/client": 0.4.3(esbuild@0.20.2) + "@contentlayer2/core": 0.4.3(esbuild@0.20.2) + "@contentlayer2/source-files": 0.4.3(esbuild@0.20.2) + "@contentlayer2/source-remote-files": 0.4.3(esbuild@0.20.2) + "@contentlayer2/utils": 0.4.3 + transitivePeerDependencies: + - "@effect-ts/otel-node" + - esbuild + - markdown-wasm + - supports-color + + core-util-is@1.0.3: {} + + cross-spawn@7.0.3: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + cssesc@3.0.0: {} + + csstype@3.1.3: {} + + damerau-levenshtein@1.0.8: {} + + data-view-buffer@1.0.1: + dependencies: + call-bind: 1.0.7 + es-errors: 1.3.0 + is-data-view: 1.0.1 + + data-view-byte-length@1.0.1: + dependencies: + call-bind: 1.0.7 + es-errors: 1.3.0 + is-data-view: 1.0.1 + + data-view-byte-offset@1.0.0: + dependencies: + call-bind: 1.0.7 + es-errors: 1.3.0 + is-data-view: 1.0.1 + + date-fns@2.30.0: + dependencies: + "@babel/runtime": 7.25.6 + + debug@3.2.7: + dependencies: + ms: 2.1.3 + + debug@4.3.7: + dependencies: + ms: 2.1.3 + + decode-named-character-reference@1.0.2: + dependencies: + character-entities: 2.0.2 + + deep-equal@2.2.3: + dependencies: + array-buffer-byte-length: 1.0.1 + call-bind: 1.0.7 + es-get-iterator: 1.1.3 + get-intrinsic: 1.2.4 + is-arguments: 1.1.1 + is-array-buffer: 3.0.4 + is-date-object: 1.0.5 + is-regex: 1.1.4 + is-shared-array-buffer: 1.0.3 + isarray: 2.0.5 + object-is: 1.1.6 + object-keys: 1.1.1 + object.assign: 4.1.5 + regexp.prototype.flags: 1.5.2 + side-channel: 1.0.6 + which-boxed-primitive: 1.0.2 + which-collection: 1.0.2 + which-typed-array: 1.1.15 + + deep-is@0.1.4: {} + + define-data-property@1.1.4: + dependencies: + es-define-property: 1.0.0 + es-errors: 1.3.0 + gopd: 1.0.1 + + define-properties@1.2.1: + dependencies: + define-data-property: 1.1.4 + has-property-descriptors: 1.0.2 + object-keys: 1.1.1 + + dequal@2.0.3: {} + + devlop@1.1.0: + dependencies: + dequal: 2.0.3 + + didyoumean@1.2.2: {} + + dir-glob@3.0.1: + dependencies: + path-type: 4.0.0 + + dlv@1.1.3: {} + + doctrine@2.1.0: + dependencies: + esutils: 2.0.3 + + doctrine@3.0.0: + dependencies: + esutils: 2.0.3 + + eastasianwidth@0.2.0: {} + + electron-to-chromium@1.5.29: {} + + emoji-regex@10.4.0: {} + + emoji-regex@8.0.0: {} + + emoji-regex@9.2.2: {} + + enhanced-resolve@5.17.1: + dependencies: + graceful-fs: 4.2.11 + tapable: 2.2.1 + + environment@1.1.0: {} + + es-abstract@1.23.3: + dependencies: + array-buffer-byte-length: 1.0.1 + arraybuffer.prototype.slice: 1.0.3 + available-typed-arrays: 1.0.7 + call-bind: 1.0.7 + data-view-buffer: 1.0.1 + data-view-byte-length: 1.0.1 + data-view-byte-offset: 1.0.0 + es-define-property: 1.0.0 + es-errors: 1.3.0 + es-object-atoms: 1.0.0 + es-set-tostringtag: 2.0.3 + es-to-primitive: 1.2.1 + function.prototype.name: 1.1.6 + get-intrinsic: 1.2.4 + get-symbol-description: 1.0.2 + globalthis: 1.0.4 + gopd: 1.0.1 + has-property-descriptors: 1.0.2 + has-proto: 1.0.3 + has-symbols: 1.0.3 + hasown: 2.0.2 + internal-slot: 1.0.7 + is-array-buffer: 3.0.4 + is-callable: 1.2.7 + is-data-view: 1.0.1 + is-negative-zero: 2.0.3 + is-regex: 1.1.4 + is-shared-array-buffer: 1.0.3 + is-string: 1.0.7 + is-typed-array: 1.1.13 + is-weakref: 1.0.2 + object-inspect: 1.13.2 + object-keys: 1.1.1 + object.assign: 4.1.5 + regexp.prototype.flags: 1.5.2 + safe-array-concat: 1.1.2 + safe-regex-test: 1.0.3 + string.prototype.trim: 1.2.9 + string.prototype.trimend: 1.0.8 + string.prototype.trimstart: 1.0.8 + typed-array-buffer: 1.0.2 + typed-array-byte-length: 1.0.1 + typed-array-byte-offset: 1.0.2 + typed-array-length: 1.0.6 + unbox-primitive: 1.0.2 + which-typed-array: 1.1.15 + + es-define-property@1.0.0: + dependencies: + get-intrinsic: 1.2.4 + + es-errors@1.3.0: {} + + es-get-iterator@1.1.3: + dependencies: + call-bind: 1.0.7 + get-intrinsic: 1.2.4 + has-symbols: 1.0.3 + is-arguments: 1.1.1 + is-map: 2.0.3 + is-set: 2.0.3 + is-string: 1.0.7 + isarray: 2.0.5 + stop-iteration-iterator: 1.0.0 + + es-iterator-helpers@1.0.19: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + es-errors: 1.3.0 + es-set-tostringtag: 2.0.3 + function-bind: 1.1.2 + get-intrinsic: 1.2.4 + globalthis: 1.0.4 + has-property-descriptors: 1.0.2 + has-proto: 1.0.3 + has-symbols: 1.0.3 + internal-slot: 1.0.7 + iterator.prototype: 1.1.2 + safe-array-concat: 1.1.2 + + es-object-atoms@1.0.0: + dependencies: + es-errors: 1.3.0 + + es-set-tostringtag@2.0.3: + dependencies: + get-intrinsic: 1.2.4 + has-tostringtag: 1.0.2 + hasown: 2.0.2 + + es-shim-unscopables@1.0.2: + dependencies: + hasown: 2.0.2 + + es-to-primitive@1.2.1: + dependencies: + is-callable: 1.2.7 + is-date-object: 1.0.5 + is-symbol: 1.0.4 + + esbuild@0.20.2: + optionalDependencies: + "@esbuild/aix-ppc64": 0.20.2 + "@esbuild/android-arm": 0.20.2 + "@esbuild/android-arm64": 0.20.2 + "@esbuild/android-x64": 0.20.2 + "@esbuild/darwin-arm64": 0.20.2 + "@esbuild/darwin-x64": 0.20.2 + "@esbuild/freebsd-arm64": 0.20.2 + "@esbuild/freebsd-x64": 0.20.2 + "@esbuild/linux-arm": 0.20.2 + "@esbuild/linux-arm64": 0.20.2 + "@esbuild/linux-ia32": 0.20.2 + "@esbuild/linux-loong64": 0.20.2 + "@esbuild/linux-mips64el": 0.20.2 + "@esbuild/linux-ppc64": 0.20.2 + "@esbuild/linux-riscv64": 0.20.2 + "@esbuild/linux-s390x": 0.20.2 + "@esbuild/linux-x64": 0.20.2 + "@esbuild/netbsd-x64": 0.20.2 + "@esbuild/openbsd-x64": 0.20.2 + "@esbuild/sunos-x64": 0.20.2 + "@esbuild/win32-arm64": 0.20.2 + "@esbuild/win32-ia32": 0.20.2 + "@esbuild/win32-x64": 0.20.2 + + escalade@3.2.0: {} + + escape-string-regexp@4.0.0: {} + + escape-string-regexp@5.0.0: {} + + eslint-config-next@14.1.0(eslint@8.45.0)(typescript@5.3.3): + dependencies: + "@next/eslint-plugin-next": 14.1.0 + "@rushstack/eslint-patch": 1.10.4 + "@typescript-eslint/parser": 6.21.0(eslint@8.45.0)(typescript@5.3.3) + eslint: 8.45.0 + eslint-import-resolver-node: 0.3.9 + eslint-import-resolver-typescript: 3.6.3(@typescript-eslint/parser@6.21.0(eslint@8.45.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0(eslint@8.45.0))(eslint@8.45.0) + eslint-plugin-import: 2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.45.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@6.21.0(eslint@8.45.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0(eslint@8.45.0))(eslint@8.45.0))(eslint@8.45.0) + eslint-plugin-jsx-a11y: 6.10.0(eslint@8.45.0) + eslint-plugin-react: 7.36.1(eslint@8.45.0) + eslint-plugin-react-hooks: 4.6.2(eslint@8.45.0) + optionalDependencies: + typescript: 5.3.3 + transitivePeerDependencies: + - eslint-import-resolver-webpack + - eslint-plugin-import-x + - supports-color + + eslint-import-resolver-node@0.3.9: + dependencies: + debug: 3.2.7 + is-core-module: 2.15.1 + resolve: 1.22.8 + transitivePeerDependencies: + - supports-color + + eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@6.21.0(eslint@8.45.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0(eslint@8.45.0))(eslint@8.45.0): + dependencies: + "@nolyfill/is-core-module": 1.0.39 + debug: 4.3.7 + enhanced-resolve: 5.17.1 + eslint: 8.45.0 + eslint-module-utils: 2.12.0(@typescript-eslint/parser@6.21.0(eslint@8.45.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@6.21.0(eslint@8.45.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0(eslint@8.45.0))(eslint@8.45.0))(eslint@8.45.0) + fast-glob: 3.3.2 + get-tsconfig: 4.8.1 + is-bun-module: 1.2.1 + is-glob: 4.0.3 + optionalDependencies: + eslint-plugin-import: 2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.45.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@6.21.0(eslint@8.45.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0(eslint@8.45.0))(eslint@8.45.0))(eslint@8.45.0) + transitivePeerDependencies: + - "@typescript-eslint/parser" + - eslint-import-resolver-node + - eslint-import-resolver-webpack + - supports-color + + eslint-module-utils@2.12.0(@typescript-eslint/parser@6.21.0(eslint@8.45.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@6.21.0(eslint@8.45.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0(eslint@8.45.0))(eslint@8.45.0))(eslint@8.45.0): + dependencies: + debug: 3.2.7 + optionalDependencies: + "@typescript-eslint/parser": 6.21.0(eslint@8.45.0)(typescript@5.3.3) + eslint: 8.45.0 + eslint-import-resolver-node: 0.3.9 + eslint-import-resolver-typescript: 3.6.3(@typescript-eslint/parser@6.21.0(eslint@8.45.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0(eslint@8.45.0))(eslint@8.45.0) + transitivePeerDependencies: + - supports-color + + eslint-plugin-import@2.30.0(@typescript-eslint/parser@6.21.0(eslint@8.45.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@6.21.0(eslint@8.45.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0(eslint@8.45.0))(eslint@8.45.0))(eslint@8.45.0): + dependencies: + "@rtsao/scc": 1.1.0 + array-includes: 3.1.8 + array.prototype.findlastindex: 1.2.5 + array.prototype.flat: 1.3.2 + array.prototype.flatmap: 1.3.2 + debug: 3.2.7 + doctrine: 2.1.0 + eslint: 8.45.0 + eslint-import-resolver-node: 0.3.9 + eslint-module-utils: 2.12.0(@typescript-eslint/parser@6.21.0(eslint@8.45.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.3(@typescript-eslint/parser@6.21.0(eslint@8.45.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.30.0(eslint@8.45.0))(eslint@8.45.0))(eslint@8.45.0) + hasown: 2.0.2 + is-core-module: 2.15.1 + is-glob: 4.0.3 + minimatch: 3.1.2 + object.fromentries: 2.0.8 + object.groupby: 1.0.3 + object.values: 1.2.0 + semver: 6.3.1 + tsconfig-paths: 3.15.0 + optionalDependencies: + "@typescript-eslint/parser": 6.21.0(eslint@8.45.0)(typescript@5.3.3) + transitivePeerDependencies: + - eslint-import-resolver-typescript + - eslint-import-resolver-webpack + - supports-color + + eslint-plugin-jsx-a11y@6.10.0(eslint@8.45.0): + dependencies: + aria-query: 5.1.3 + array-includes: 3.1.8 + array.prototype.flatmap: 1.3.2 + ast-types-flow: 0.0.8 + axe-core: 4.10.0 + axobject-query: 4.1.0 + damerau-levenshtein: 1.0.8 + emoji-regex: 9.2.2 + es-iterator-helpers: 1.0.19 + eslint: 8.45.0 + hasown: 2.0.2 + jsx-ast-utils: 3.3.5 + language-tags: 1.0.9 + minimatch: 3.1.2 + object.fromentries: 2.0.8 + safe-regex-test: 1.0.3 + string.prototype.includes: 2.0.0 + + eslint-plugin-react-hooks@4.6.2(eslint@8.45.0): + dependencies: + eslint: 8.45.0 + + eslint-plugin-react@7.36.1(eslint@8.45.0): + dependencies: + array-includes: 3.1.8 + array.prototype.findlast: 1.2.5 + array.prototype.flatmap: 1.3.2 + array.prototype.tosorted: 1.1.4 + doctrine: 2.1.0 + es-iterator-helpers: 1.0.19 + eslint: 8.45.0 + estraverse: 5.3.0 + hasown: 2.0.2 + jsx-ast-utils: 3.3.5 + minimatch: 3.1.2 + object.entries: 1.1.8 + object.fromentries: 2.0.8 + object.values: 1.2.0 + prop-types: 15.8.1 + resolve: 2.0.0-next.5 + semver: 6.3.1 + string.prototype.matchall: 4.0.11 + string.prototype.repeat: 1.0.0 + + eslint-scope@7.2.2: + dependencies: + esrecurse: 4.3.0 + estraverse: 5.3.0 + + eslint-visitor-keys@3.4.3: {} + + eslint@8.45.0: + dependencies: + "@eslint-community/eslint-utils": 4.4.0(eslint@8.45.0) + "@eslint-community/regexpp": 4.11.1 + "@eslint/eslintrc": 2.1.4 + "@eslint/js": 8.44.0 + "@humanwhocodes/config-array": 0.11.14 + "@humanwhocodes/module-importer": 1.0.1 + "@nodelib/fs.walk": 1.2.8 + ajv: 6.12.6 + chalk: 4.1.2 + cross-spawn: 7.0.3 + debug: 4.3.7 + doctrine: 3.0.0 + escape-string-regexp: 4.0.0 + eslint-scope: 7.2.2 + eslint-visitor-keys: 3.4.3 + espree: 9.6.1 + esquery: 1.6.0 + esutils: 2.0.3 + fast-deep-equal: 3.1.3 + file-entry-cache: 6.0.1 + find-up: 5.0.0 + glob-parent: 6.0.2 + globals: 13.24.0 + graphemer: 1.4.0 + ignore: 5.3.2 + imurmurhash: 0.1.4 + is-glob: 4.0.3 + is-path-inside: 3.0.3 + js-yaml: 4.1.0 + json-stable-stringify-without-jsonify: 1.0.1 + levn: 0.4.1 + lodash.merge: 4.6.2 + minimatch: 3.1.2 + natural-compare: 1.4.0 + optionator: 0.9.4 + strip-ansi: 6.0.1 + text-table: 0.2.0 + transitivePeerDependencies: + - supports-color + + espree@9.6.1: + dependencies: + acorn: 8.12.1 + acorn-jsx: 5.3.2(acorn@8.12.1) + eslint-visitor-keys: 3.4.3 + + esprima@4.0.1: {} + + esquery@1.6.0: + dependencies: + estraverse: 5.3.0 + + esrecurse@4.3.0: + dependencies: + estraverse: 5.3.0 + + estraverse@5.3.0: {} + + estree-util-attach-comments@3.0.0: + dependencies: + "@types/estree": 1.0.6 + + estree-util-build-jsx@3.0.1: + dependencies: + "@types/estree-jsx": 1.0.5 + devlop: 1.1.0 + estree-util-is-identifier-name: 3.0.0 + estree-walker: 3.0.3 + + estree-util-is-identifier-name@3.0.0: {} + + estree-util-to-js@2.0.0: + dependencies: + "@types/estree-jsx": 1.0.5 + astring: 1.9.0 + source-map: 0.7.4 + + estree-util-value-to-estree@3.1.2: + dependencies: + "@types/estree": 1.0.6 + + estree-util-visit@2.0.0: + dependencies: + "@types/estree-jsx": 1.0.5 + "@types/unist": 3.0.3 + + estree-walker@3.0.3: + dependencies: + "@types/estree": 1.0.6 + + esutils@2.0.3: {} + + eventemitter3@5.0.1: {} + + execa@8.0.1: + dependencies: + cross-spawn: 7.0.3 + get-stream: 8.0.1 + human-signals: 5.0.0 + is-stream: 3.0.0 + merge-stream: 2.0.0 + npm-run-path: 5.3.0 + onetime: 6.0.0 + signal-exit: 4.1.0 + strip-final-newline: 3.0.0 + + extend-shallow@2.0.1: + dependencies: + is-extendable: 0.1.1 + + extend@3.0.2: {} + + fast-deep-equal@3.1.3: {} + + fast-glob@3.3.2: + dependencies: + "@nodelib/fs.stat": 2.0.5 + "@nodelib/fs.walk": 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.8 + + fast-json-stable-stringify@2.1.0: {} + + fast-levenshtein@2.0.6: {} + + fastq@1.17.1: + dependencies: + reusify: 1.0.4 + + fault@2.0.1: + dependencies: + format: 0.2.2 + + file-entry-cache@6.0.1: + dependencies: + flat-cache: 3.2.0 + + fill-range@7.1.1: + dependencies: + to-regex-range: 5.0.1 + + find-up@5.0.0: + dependencies: + locate-path: 6.0.0 + path-exists: 4.0.0 + + flat-cache@3.2.0: + dependencies: + flatted: 3.3.1 + keyv: 4.5.4 + rimraf: 3.0.2 + + flatted@3.3.1: {} + + for-each@0.3.3: + dependencies: + is-callable: 1.2.7 + + foreground-child@3.3.0: + dependencies: + cross-spawn: 7.0.3 + signal-exit: 4.1.0 + + format@0.2.2: {} + + fraction.js@4.3.7: {} + + fs-minipass@2.1.0: + dependencies: + minipass: 3.3.6 + + fs.realpath@1.0.0: {} + + fsevents@2.3.3: + optional: true + + function-bind@1.1.2: {} + + function.prototype.name@1.1.6: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + functions-have-names: 1.2.3 + + functions-have-names@1.2.3: {} + + get-caller-file@2.0.5: {} + + get-east-asian-width@1.2.0: {} + + get-intrinsic@1.2.4: + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + has-proto: 1.0.3 + has-symbols: 1.0.3 + hasown: 2.0.2 + + get-stream@8.0.1: {} + + get-symbol-description@1.0.2: + dependencies: + call-bind: 1.0.7 + es-errors: 1.3.0 + get-intrinsic: 1.2.4 + + get-tsconfig@4.8.1: + dependencies: + resolve-pkg-maps: 1.0.0 + + glob-parent@5.1.2: + dependencies: + is-glob: 4.0.3 + + glob-parent@6.0.2: + dependencies: + is-glob: 4.0.3 + + glob@10.3.10: + dependencies: + foreground-child: 3.3.0 + jackspeak: 2.3.6 + minimatch: 9.0.5 + minipass: 7.1.2 + path-scurry: 1.11.1 + + glob@10.4.5: + dependencies: + foreground-child: 3.3.0 + jackspeak: 3.4.3 + minimatch: 9.0.5 + minipass: 7.1.2 + package-json-from-dist: 1.0.1 + path-scurry: 1.11.1 + + glob@7.2.3: + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.2 + once: 1.4.0 + path-is-absolute: 1.0.1 + + globals@13.24.0: + dependencies: + type-fest: 0.20.2 + + globalthis@1.0.4: + dependencies: + define-properties: 1.2.1 + gopd: 1.0.1 + + globby@11.1.0: + dependencies: + array-union: 2.1.0 + dir-glob: 3.0.1 + fast-glob: 3.3.2 + ignore: 5.3.2 + merge2: 1.4.1 + slash: 3.0.0 + + gopd@1.0.1: + dependencies: + get-intrinsic: 1.2.4 + + graceful-fs@4.2.11: {} + + graphemer@1.4.0: {} + + gray-matter@4.0.3: + dependencies: + js-yaml: 3.14.1 + kind-of: 6.0.3 + section-matter: 1.0.0 + strip-bom-string: 1.0.0 + + has-bigints@1.0.2: {} + + has-flag@4.0.0: {} + + has-own-prop@2.0.0: {} + + has-property-descriptors@1.0.2: + dependencies: + es-define-property: 1.0.0 + + has-proto@1.0.3: {} + + has-symbols@1.0.3: {} + + has-tostringtag@1.0.2: + dependencies: + has-symbols: 1.0.3 + + hash-wasm@4.11.0: {} + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + + hast-util-to-estree@3.1.0: + dependencies: + "@types/estree": 1.0.6 + "@types/estree-jsx": 1.0.5 + "@types/hast": 3.0.4 + comma-separated-tokens: 2.0.3 + devlop: 1.1.0 + estree-util-attach-comments: 3.0.0 + estree-util-is-identifier-name: 3.0.0 + hast-util-whitespace: 3.0.0 + mdast-util-mdx-expression: 2.0.1 + mdast-util-mdx-jsx: 3.1.3 + mdast-util-mdxjs-esm: 2.0.1 + property-information: 6.5.0 + space-separated-tokens: 2.0.2 + style-to-object: 0.4.4 + unist-util-position: 5.0.0 + zwitch: 2.0.4 + transitivePeerDependencies: + - supports-color + + hast-util-to-html@9.0.3: + dependencies: + "@types/hast": 3.0.4 + "@types/unist": 3.0.3 + ccount: 2.0.1 + comma-separated-tokens: 2.0.3 + hast-util-whitespace: 3.0.0 + html-void-elements: 3.0.0 + mdast-util-to-hast: 13.2.0 + property-information: 6.5.0 + space-separated-tokens: 2.0.2 + stringify-entities: 4.0.4 + zwitch: 2.0.4 + + hast-util-to-jsx-runtime@2.3.0: + dependencies: + "@types/estree": 1.0.6 + "@types/hast": 3.0.4 + "@types/unist": 3.0.3 + comma-separated-tokens: 2.0.3 + devlop: 1.1.0 + estree-util-is-identifier-name: 3.0.0 + hast-util-whitespace: 3.0.0 + mdast-util-mdx-expression: 2.0.1 + mdast-util-mdx-jsx: 3.1.3 + mdast-util-mdxjs-esm: 2.0.1 + property-information: 6.5.0 + space-separated-tokens: 2.0.2 + style-to-object: 1.0.8 + unist-util-position: 5.0.0 + vfile-message: 4.0.2 + transitivePeerDependencies: + - supports-color + + hast-util-whitespace@3.0.0: + dependencies: + "@types/hast": 3.0.4 + + html-void-elements@3.0.0: {} + + human-signals@5.0.0: {} + + husky@9.1.6: {} + + hyperdyperid@1.2.0: {} + + ignore@5.3.2: {} + + imagescript@1.3.0: {} + + import-fresh@3.3.0: + dependencies: + parent-module: 1.0.1 + resolve-from: 4.0.0 + + imurmurhash@0.1.4: {} + + inflection@3.0.0: {} + + inflight@1.0.6: + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + + inherits@2.0.4: {} + + inline-style-parser@0.1.1: {} + + inline-style-parser@0.2.4: {} + + internal-slot@1.0.7: + dependencies: + es-errors: 1.3.0 + hasown: 2.0.2 + side-channel: 1.0.6 + + interpret@1.4.0: {} + + is-alphabetical@2.0.1: {} + + is-alphanumerical@2.0.1: + dependencies: + is-alphabetical: 2.0.1 + is-decimal: 2.0.1 + + is-arguments@1.1.1: + dependencies: + call-bind: 1.0.7 + has-tostringtag: 1.0.2 + + is-array-buffer@3.0.4: + dependencies: + call-bind: 1.0.7 + get-intrinsic: 1.2.4 + + is-async-function@2.0.0: + dependencies: + has-tostringtag: 1.0.2 + + is-bigint@1.0.4: + dependencies: + has-bigints: 1.0.2 + + is-binary-path@2.1.0: + dependencies: + binary-extensions: 2.3.0 + + is-boolean-object@1.1.2: + dependencies: + call-bind: 1.0.7 + has-tostringtag: 1.0.2 + + is-bun-module@1.2.1: + dependencies: + semver: 7.6.3 + + is-callable@1.2.7: {} + + is-core-module@2.15.1: + dependencies: + hasown: 2.0.2 + + is-data-view@1.0.1: + dependencies: + is-typed-array: 1.1.13 + + is-date-object@1.0.5: + dependencies: + has-tostringtag: 1.0.2 + + is-decimal@2.0.1: {} + + is-extendable@0.1.1: {} + + is-extglob@2.1.1: {} + + is-finalizationregistry@1.0.2: + dependencies: + call-bind: 1.0.7 + + is-fullwidth-code-point@3.0.0: {} + + is-fullwidth-code-point@4.0.0: {} + + is-fullwidth-code-point@5.0.0: + dependencies: + get-east-asian-width: 1.2.0 + + is-generator-function@1.0.10: + dependencies: + has-tostringtag: 1.0.2 + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-hexadecimal@2.0.1: {} + + is-map@2.0.3: {} + + is-negative-zero@2.0.3: {} + + is-number-object@1.0.7: + dependencies: + has-tostringtag: 1.0.2 + + is-number@7.0.0: {} + + is-path-inside@3.0.3: {} + + is-plain-obj@4.1.0: {} + + is-reference@3.0.2: + dependencies: + "@types/estree": 1.0.6 + + is-regex@1.1.4: + dependencies: + call-bind: 1.0.7 + has-tostringtag: 1.0.2 + + is-set@2.0.3: {} + + is-shared-array-buffer@1.0.3: + dependencies: + call-bind: 1.0.7 + + is-stream@3.0.0: {} + + is-string@1.0.7: + dependencies: + has-tostringtag: 1.0.2 + + is-symbol@1.0.4: + dependencies: + has-symbols: 1.0.3 + + is-typed-array@1.1.13: + dependencies: + which-typed-array: 1.1.15 + + is-weakmap@2.0.2: {} + + is-weakref@1.0.2: + dependencies: + call-bind: 1.0.7 + + is-weakset@2.0.3: + dependencies: + call-bind: 1.0.7 + get-intrinsic: 1.2.4 + + isarray@2.0.5: {} + + isexe@2.0.0: {} + + iterator.prototype@1.1.2: + dependencies: + define-properties: 1.2.1 + get-intrinsic: 1.2.4 + has-symbols: 1.0.3 + reflect.getprototypeof: 1.0.6 + set-function-name: 2.0.2 + + jackspeak@2.3.6: + dependencies: + "@isaacs/cliui": 8.0.2 + optionalDependencies: + "@pkgjs/parseargs": 0.11.0 + + jackspeak@3.4.3: + dependencies: + "@isaacs/cliui": 8.0.2 + optionalDependencies: + "@pkgjs/parseargs": 0.11.0 + + jiti@1.21.6: {} + + js-tokens@4.0.0: {} + + js-yaml@3.14.1: + dependencies: + argparse: 1.0.10 + esprima: 4.0.1 + + js-yaml@4.1.0: + dependencies: + argparse: 2.0.1 + + jsbi@4.3.0: {} + + json-buffer@3.0.1: {} + + json-schema-traverse@0.4.1: {} + + json-stable-stringify-without-jsonify@1.0.1: {} + + json5@1.0.2: + dependencies: + minimist: 1.2.8 + + jsx-ast-utils@3.3.5: + dependencies: + array-includes: 3.1.8 + array.prototype.flat: 1.3.2 + object.assign: 4.1.5 + object.values: 1.2.0 + + keyv@4.5.4: + dependencies: + json-buffer: 3.0.1 + + kind-of@6.0.3: {} + + language-subtag-registry@0.3.23: {} + + language-tags@1.0.9: + dependencies: + language-subtag-registry: 0.3.23 + + levn@0.4.1: + dependencies: + prelude-ls: 1.2.1 + type-check: 0.4.0 + + lilconfig@2.1.0: {} + + lilconfig@3.1.2: {} + + lines-and-columns@1.2.4: {} + + lint-staged@15.2.10: + dependencies: + chalk: 5.3.0 + commander: 12.1.0 + debug: 4.3.7 + execa: 8.0.1 + lilconfig: 3.1.2 + listr2: 8.2.4 + micromatch: 4.0.8 + pidtree: 0.6.0 + string-argv: 0.3.2 + yaml: 2.5.1 + transitivePeerDependencies: + - supports-color + + listr2@8.2.4: + dependencies: + cli-truncate: 4.0.0 + colorette: 2.0.20 + eventemitter3: 5.0.1 + log-update: 6.1.0 + rfdc: 1.4.1 + wrap-ansi: 9.0.0 + + locate-path@6.0.0: + dependencies: + p-locate: 5.0.0 + + lodash.camelcase@4.3.0: {} + + lodash.merge@4.6.2: {} + + lodash@4.17.21: {} + + log-update@6.1.0: + dependencies: + ansi-escapes: 7.0.0 + cli-cursor: 5.0.0 + slice-ansi: 7.1.0 + strip-ansi: 7.1.0 + wrap-ansi: 9.0.0 + + long@5.2.3: {} + + longest-streak@3.1.0: {} + + loose-envify@1.4.0: + dependencies: + js-tokens: 4.0.0 + + lower-case@2.0.2: + dependencies: + tslib: 2.7.0 + + lru-cache@10.4.3: {} + + markdown-extensions@2.0.0: {} + + mdast-util-from-markdown@2.0.1: + dependencies: + "@types/mdast": 4.0.4 + "@types/unist": 3.0.3 + decode-named-character-reference: 1.0.2 + devlop: 1.1.0 + mdast-util-to-string: 4.0.0 + micromark: 4.0.0 + micromark-util-decode-numeric-character-reference: 2.0.1 + micromark-util-decode-string: 2.0.0 + micromark-util-normalize-identifier: 2.0.0 + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 + unist-util-stringify-position: 4.0.0 + transitivePeerDependencies: + - supports-color + + mdast-util-frontmatter@2.0.1: + dependencies: + "@types/mdast": 4.0.4 + devlop: 1.1.0 + escape-string-regexp: 5.0.0 + mdast-util-from-markdown: 2.0.1 + mdast-util-to-markdown: 2.1.0 + micromark-extension-frontmatter: 2.0.0 + transitivePeerDependencies: + - supports-color + + mdast-util-mdx-expression@2.0.1: + dependencies: + "@types/estree-jsx": 1.0.5 + "@types/hast": 3.0.4 + "@types/mdast": 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.1 + mdast-util-to-markdown: 2.1.0 + transitivePeerDependencies: + - supports-color + + mdast-util-mdx-jsx@3.1.3: + dependencies: + "@types/estree-jsx": 1.0.5 + "@types/hast": 3.0.4 + "@types/mdast": 4.0.4 + "@types/unist": 3.0.3 + ccount: 2.0.1 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.1 + mdast-util-to-markdown: 2.1.0 + parse-entities: 4.0.1 + stringify-entities: 4.0.4 + unist-util-stringify-position: 4.0.0 + vfile-message: 4.0.2 + transitivePeerDependencies: + - supports-color + + mdast-util-mdx@3.0.0: + dependencies: + mdast-util-from-markdown: 2.0.1 + mdast-util-mdx-expression: 2.0.1 + mdast-util-mdx-jsx: 3.1.3 + mdast-util-mdxjs-esm: 2.0.1 + mdast-util-to-markdown: 2.1.0 + transitivePeerDependencies: + - supports-color + + mdast-util-mdxjs-esm@2.0.1: + dependencies: + "@types/estree-jsx": 1.0.5 + "@types/hast": 3.0.4 + "@types/mdast": 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.1 + mdast-util-to-markdown: 2.1.0 + transitivePeerDependencies: + - supports-color + + mdast-util-phrasing@4.1.0: + dependencies: + "@types/mdast": 4.0.4 + unist-util-is: 6.0.0 + + mdast-util-to-hast@13.2.0: + dependencies: + "@types/hast": 3.0.4 + "@types/mdast": 4.0.4 + "@ungap/structured-clone": 1.2.0 + devlop: 1.1.0 + micromark-util-sanitize-uri: 2.0.0 + trim-lines: 3.0.1 + unist-util-position: 5.0.0 + unist-util-visit: 5.0.0 + vfile: 6.0.3 + + mdast-util-to-markdown@2.1.0: + dependencies: + "@types/mdast": 4.0.4 + "@types/unist": 3.0.3 + longest-streak: 3.1.0 + mdast-util-phrasing: 4.1.0 + mdast-util-to-string: 4.0.0 + micromark-util-decode-string: 2.0.0 + unist-util-visit: 5.0.0 + zwitch: 2.0.4 + + mdast-util-to-string@4.0.0: + dependencies: + "@types/mdast": 4.0.4 + + mdast@3.0.0: {} + + mdx-bundler@10.0.3(esbuild@0.20.2): + dependencies: + "@babel/runtime": 7.25.6 + "@esbuild-plugins/node-resolve": 0.2.2(esbuild@0.20.2) + "@fal-works/esbuild-plugin-global-externals": 2.1.2 + "@mdx-js/esbuild": 3.0.1(esbuild@0.20.2) + esbuild: 0.20.2 + gray-matter: 4.0.3 + remark-frontmatter: 5.0.0 + remark-mdx-frontmatter: 4.0.0 + uuid: 9.0.1 + vfile: 6.0.3 + transitivePeerDependencies: + - supports-color + + memfs@4.12.0: + dependencies: + "@jsonjoy.com/json-pack": 1.1.0(tslib@2.7.0) + "@jsonjoy.com/util": 1.3.0(tslib@2.7.0) + tree-dump: 1.0.2(tslib@2.7.0) + tslib: 2.7.0 + + merge-stream@2.0.0: {} + + merge2@1.4.1: {} + + micromark-core-commonmark@2.0.1: + dependencies: + decode-named-character-reference: 1.0.2 + devlop: 1.1.0 + micromark-factory-destination: 2.0.0 + micromark-factory-label: 2.0.0 + micromark-factory-space: 2.0.0 + micromark-factory-title: 2.0.0 + micromark-factory-whitespace: 2.0.0 + micromark-util-character: 2.1.0 + micromark-util-chunked: 2.0.0 + micromark-util-classify-character: 2.0.0 + micromark-util-html-tag-name: 2.0.0 + micromark-util-normalize-identifier: 2.0.0 + micromark-util-resolve-all: 2.0.0 + micromark-util-subtokenize: 2.0.1 + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 + + micromark-extension-frontmatter@2.0.0: + dependencies: + fault: 2.0.1 + micromark-util-character: 2.1.0 + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 + + micromark-extension-mdx-expression@3.0.0: + dependencies: + "@types/estree": 1.0.6 + devlop: 1.1.0 + micromark-factory-mdx-expression: 2.0.2 + micromark-factory-space: 2.0.0 + micromark-util-character: 2.1.0 + micromark-util-events-to-acorn: 2.0.2 + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 + + micromark-extension-mdx-jsx@3.0.1: + dependencies: + "@types/acorn": 4.0.6 + "@types/estree": 1.0.6 + devlop: 1.1.0 + estree-util-is-identifier-name: 3.0.0 + micromark-factory-mdx-expression: 2.0.2 + micromark-factory-space: 2.0.0 + micromark-util-character: 2.1.0 + micromark-util-events-to-acorn: 2.0.2 + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 + vfile-message: 4.0.2 + + micromark-extension-mdx-md@2.0.0: + dependencies: + micromark-util-types: 2.0.0 + + micromark-extension-mdxjs-esm@3.0.0: + dependencies: + "@types/estree": 1.0.6 + devlop: 1.1.0 + micromark-core-commonmark: 2.0.1 + micromark-util-character: 2.1.0 + micromark-util-events-to-acorn: 2.0.2 + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 + unist-util-position-from-estree: 2.0.0 + vfile-message: 4.0.2 + + micromark-extension-mdxjs@3.0.0: + dependencies: + acorn: 8.12.1 + acorn-jsx: 5.3.2(acorn@8.12.1) + micromark-extension-mdx-expression: 3.0.0 + micromark-extension-mdx-jsx: 3.0.1 + micromark-extension-mdx-md: 2.0.0 + micromark-extension-mdxjs-esm: 3.0.0 + micromark-util-combine-extensions: 2.0.0 + micromark-util-types: 2.0.0 + + micromark-factory-destination@2.0.0: + dependencies: + micromark-util-character: 2.1.0 + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 + + micromark-factory-label@2.0.0: + dependencies: + devlop: 1.1.0 + micromark-util-character: 2.1.0 + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 + + micromark-factory-mdx-expression@2.0.2: + dependencies: + "@types/estree": 1.0.6 + devlop: 1.1.0 + micromark-factory-space: 2.0.0 + micromark-util-character: 2.1.0 + micromark-util-events-to-acorn: 2.0.2 + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 + unist-util-position-from-estree: 2.0.0 + vfile-message: 4.0.2 + + micromark-factory-space@2.0.0: + dependencies: + micromark-util-character: 2.1.0 + micromark-util-types: 2.0.0 + + micromark-factory-title@2.0.0: + dependencies: + micromark-factory-space: 2.0.0 + micromark-util-character: 2.1.0 + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 + + micromark-factory-whitespace@2.0.0: + dependencies: + micromark-factory-space: 2.0.0 + micromark-util-character: 2.1.0 + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 + + micromark-util-character@2.1.0: + dependencies: + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 + + micromark-util-chunked@2.0.0: + dependencies: + micromark-util-symbol: 2.0.0 + + micromark-util-classify-character@2.0.0: + dependencies: + micromark-util-character: 2.1.0 + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 + + micromark-util-combine-extensions@2.0.0: + dependencies: + micromark-util-chunked: 2.0.0 + micromark-util-types: 2.0.0 + + micromark-util-decode-numeric-character-reference@2.0.1: + dependencies: + micromark-util-symbol: 2.0.0 + + micromark-util-decode-string@2.0.0: + dependencies: + decode-named-character-reference: 1.0.2 + micromark-util-character: 2.1.0 + micromark-util-decode-numeric-character-reference: 2.0.1 + micromark-util-symbol: 2.0.0 + + micromark-util-encode@2.0.0: {} + + micromark-util-events-to-acorn@2.0.2: + dependencies: + "@types/acorn": 4.0.6 + "@types/estree": 1.0.6 + "@types/unist": 3.0.3 + devlop: 1.1.0 + estree-util-visit: 2.0.0 + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 + vfile-message: 4.0.2 + + micromark-util-html-tag-name@2.0.0: {} + + micromark-util-normalize-identifier@2.0.0: + dependencies: + micromark-util-symbol: 2.0.0 + + micromark-util-resolve-all@2.0.0: + dependencies: + micromark-util-types: 2.0.0 + + micromark-util-sanitize-uri@2.0.0: + dependencies: + micromark-util-character: 2.1.0 + micromark-util-encode: 2.0.0 + micromark-util-symbol: 2.0.0 + + micromark-util-subtokenize@2.0.1: + dependencies: + devlop: 1.1.0 + micromark-util-chunked: 2.0.0 + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 + + micromark-util-symbol@2.0.0: {} + + micromark-util-types@2.0.0: {} + + micromark@4.0.0: + dependencies: + "@types/debug": 4.1.12 + debug: 4.3.7 + decode-named-character-reference: 1.0.2 + devlop: 1.1.0 + micromark-core-commonmark: 2.0.1 + micromark-factory-space: 2.0.0 + micromark-util-character: 2.1.0 + micromark-util-chunked: 2.0.0 + micromark-util-combine-extensions: 2.0.0 + micromark-util-decode-numeric-character-reference: 2.0.1 + micromark-util-encode: 2.0.0 + micromark-util-normalize-identifier: 2.0.0 + micromark-util-resolve-all: 2.0.0 + micromark-util-sanitize-uri: 2.0.0 + micromark-util-subtokenize: 2.0.1 + micromark-util-symbol: 2.0.0 + micromark-util-types: 2.0.0 + transitivePeerDependencies: + - supports-color + + micromatch@4.0.8: + dependencies: + braces: 3.0.3 + picomatch: 2.3.1 + + mimic-fn@4.0.0: {} + + mimic-function@5.0.1: {} + + min-indent@1.0.1: {} + + minimatch@3.1.2: + dependencies: + brace-expansion: 1.1.11 + + minimatch@9.0.3: + dependencies: + brace-expansion: 2.0.1 + + minimatch@9.0.5: + dependencies: + brace-expansion: 2.0.1 + + minimist@1.2.8: {} + + minipass@3.3.6: + dependencies: + yallist: 4.0.0 + + minipass@5.0.0: {} + + minipass@7.1.2: {} + + minizlib@2.1.2: + dependencies: + minipass: 3.3.6 + yallist: 4.0.0 + + mkdirp@1.0.4: {} + + ms@2.1.3: {} + + mz@2.7.0: + dependencies: + any-promise: 1.3.0 + object-assign: 4.1.1 + thenify-all: 1.6.0 + + nanoid@3.3.7: {} + + natural-compare@1.4.0: {} + + next-contentlayer2@0.4.6(contentlayer2@0.4.6(esbuild@0.20.2))(esbuild@0.20.2)(next@14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react-dom@18.2.0(react@18.2.0))(react@18.2.0): + dependencies: + "@contentlayer2/core": 0.4.3(esbuild@0.20.2) + "@contentlayer2/utils": 0.4.3 + contentlayer2: 0.4.6(esbuild@0.20.2) + next: 14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + transitivePeerDependencies: + - "@effect-ts/otel-node" + - esbuild + - markdown-wasm + - supports-color + + next@14.1.0(@opentelemetry/api@1.9.0)(react-dom@18.2.0(react@18.2.0))(react@18.2.0): + dependencies: + "@next/env": 14.1.0 + "@swc/helpers": 0.5.2 + busboy: 1.6.0 + caniuse-lite: 1.0.30001664 + graceful-fs: 4.2.11 + postcss: 8.4.31 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + styled-jsx: 5.1.1(react@18.2.0) + optionalDependencies: + "@next/swc-darwin-arm64": 14.1.0 + "@next/swc-darwin-x64": 14.1.0 + "@next/swc-linux-arm64-gnu": 14.1.0 + "@next/swc-linux-arm64-musl": 14.1.0 + "@next/swc-linux-x64-gnu": 14.1.0 + "@next/swc-linux-x64-musl": 14.1.0 + "@next/swc-win32-arm64-msvc": 14.1.0 + "@next/swc-win32-ia32-msvc": 14.1.0 + "@next/swc-win32-x64-msvc": 14.1.0 + "@opentelemetry/api": 1.9.0 + transitivePeerDependencies: + - "@babel/core" + - babel-plugin-macros + + no-case@3.0.4: + dependencies: + lower-case: 2.0.2 + tslib: 2.7.0 + + node-fetch@2.7.0: + dependencies: + whatwg-url: 5.0.0 + + node-releases@2.0.18: {} + + normalize-path@3.0.0: {} + + normalize-range@0.1.2: {} + + npm-run-path@5.3.0: + dependencies: + path-key: 4.0.0 + + object-assign@4.1.1: {} + + object-hash@3.0.0: {} + + object-inspect@1.13.2: {} + + object-is@1.1.6: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + + object-keys@1.1.1: {} + + object.assign@4.1.5: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + has-symbols: 1.0.3 + object-keys: 1.1.1 + + object.entries@1.1.8: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-object-atoms: 1.0.0 + + object.fromentries@2.0.8: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + es-object-atoms: 1.0.0 + + object.groupby@1.0.3: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + + object.values@1.2.0: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-object-atoms: 1.0.0 + + once@1.4.0: + dependencies: + wrappy: 1.0.2 + + onetime@6.0.0: + dependencies: + mimic-fn: 4.0.0 + + onetime@7.0.0: + dependencies: + mimic-function: 5.0.1 + + oo-ascii-tree@1.103.1: {} + + optionator@0.9.4: + dependencies: + deep-is: 0.1.4 + fast-levenshtein: 2.0.6 + levn: 0.4.1 + prelude-ls: 1.2.1 + type-check: 0.4.0 + word-wrap: 1.2.5 + + p-limit@3.1.0: + dependencies: + yocto-queue: 0.1.0 + + p-locate@5.0.0: + dependencies: + p-limit: 3.1.0 + + package-json-from-dist@1.0.1: {} + + parent-module@1.0.1: + dependencies: + callsites: 3.1.0 + + parse-entities@4.0.1: + dependencies: + "@types/unist": 2.0.11 + character-entities: 2.0.2 + character-entities-legacy: 3.0.0 + character-reference-invalid: 2.0.1 + decode-named-character-reference: 1.0.2 + is-alphanumerical: 2.0.1 + is-decimal: 2.0.1 + is-hexadecimal: 2.0.1 + + pascal-case@3.1.2: + dependencies: + no-case: 3.0.4 + tslib: 2.7.0 + + path-exists@4.0.0: {} + + path-is-absolute@1.0.1: {} + + path-key@3.1.1: {} + + path-key@4.0.0: {} + + path-parse@1.0.7: {} + + path-scurry@1.11.1: + dependencies: + lru-cache: 10.4.3 + minipass: 7.1.2 + + path-type@4.0.0: {} + + pend@1.2.0: {} + + periscopic@3.1.0: + dependencies: + "@types/estree": 1.0.6 + estree-walker: 3.0.3 + is-reference: 3.0.2 + + picocolors@1.1.0: {} + + picomatch@2.3.1: {} + + pidtree@0.6.0: {} + + pify@2.3.0: {} + + pirates@4.0.6: {} + + possible-typed-array-names@1.0.0: {} + + postcss-import@15.1.0(postcss@8.4.26): + dependencies: + postcss: 8.4.26 + postcss-value-parser: 4.2.0 + read-cache: 1.0.0 + resolve: 1.22.8 + + postcss-js@4.0.1(postcss@8.4.26): + dependencies: + camelcase-css: 2.0.1 + postcss: 8.4.26 + + postcss-load-config@4.0.2(postcss@8.4.26): + dependencies: + lilconfig: 3.1.2 + yaml: 2.5.1 + optionalDependencies: + postcss: 8.4.26 + + postcss-nested@6.2.0(postcss@8.4.26): + dependencies: + postcss: 8.4.26 + postcss-selector-parser: 6.1.2 + + postcss-selector-parser@6.1.2: + dependencies: + cssesc: 3.0.0 + util-deprecate: 1.0.2 + + postcss-value-parser@4.2.0: {} + + postcss@8.4.26: + dependencies: + nanoid: 3.3.7 + picocolors: 1.1.0 + source-map-js: 1.2.1 + + postcss@8.4.31: + dependencies: + nanoid: 3.3.7 + picocolors: 1.1.0 + source-map-js: 1.2.1 + + prelude-ls@1.2.1: {} + + prettier@3.3.3: {} + + prop-types@15.8.1: + dependencies: + loose-envify: 1.4.0 + object-assign: 4.1.1 + react-is: 16.13.1 + + property-information@6.5.0: {} + + protobufjs@7.4.0: + dependencies: + "@protobufjs/aspromise": 1.1.2 + "@protobufjs/base64": 1.1.2 + "@protobufjs/codegen": 2.0.4 + "@protobufjs/eventemitter": 1.1.0 + "@protobufjs/fetch": 1.1.0 + "@protobufjs/float": 1.0.2 + "@protobufjs/inquire": 1.1.0 + "@protobufjs/path": 1.1.2 + "@protobufjs/pool": 1.1.0 + "@protobufjs/utf8": 1.1.0 + "@types/node": 20.11.17 + long: 5.2.3 + + punycode@2.3.1: {} + + queue-microtask@1.2.3: {} + + react-dom@18.2.0(react@18.2.0): + dependencies: + loose-envify: 1.4.0 + react: 18.2.0 + scheduler: 0.23.2 + + react-is@16.13.1: {} + + react@18.2.0: + dependencies: + loose-envify: 1.4.0 + + read-cache@1.0.0: + dependencies: + pify: 2.3.0 + + readdirp@3.6.0: + dependencies: + picomatch: 2.3.1 + + rechoir@0.6.2: + dependencies: + resolve: 1.22.8 + + reflect.getprototypeof@1.0.6: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + es-errors: 1.3.0 + get-intrinsic: 1.2.4 + globalthis: 1.0.4 + which-builtin-type: 1.1.4 + + regenerator-runtime@0.14.1: {} + + regexp.prototype.flags@1.5.2: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-errors: 1.3.0 + set-function-name: 2.0.2 + + rehype-stringify@10.0.0: + dependencies: + "@types/hast": 3.0.4 + hast-util-to-html: 9.0.3 + unified: 11.0.5 + + remark-frontmatter@5.0.0: + dependencies: + "@types/mdast": 4.0.4 + mdast-util-frontmatter: 2.0.1 + micromark-extension-frontmatter: 2.0.0 + unified: 11.0.5 + transitivePeerDependencies: + - supports-color + + remark-mdx-frontmatter@4.0.0: + dependencies: + "@types/mdast": 4.0.4 + estree-util-is-identifier-name: 3.0.0 + estree-util-value-to-estree: 3.1.2 + toml: 3.0.0 + unified: 11.0.5 + yaml: 2.5.1 + + remark-mdx@3.0.1: + dependencies: + mdast-util-mdx: 3.0.0 + micromark-extension-mdxjs: 3.0.0 + transitivePeerDependencies: + - supports-color + + remark-parse@11.0.0: + dependencies: + "@types/mdast": 4.0.4 + mdast-util-from-markdown: 2.0.1 + micromark-util-types: 2.0.0 + unified: 11.0.5 + transitivePeerDependencies: + - supports-color + + remark-rehype@11.1.1: + dependencies: + "@types/hast": 3.0.4 + "@types/mdast": 4.0.4 + mdast-util-to-hast: 13.2.0 + unified: 11.0.5 + vfile: 6.0.3 + + remark-stringify@11.0.0: + dependencies: + "@types/mdast": 4.0.4 + mdast-util-to-markdown: 2.1.0 + unified: 11.0.5 + + remark@15.0.1: + dependencies: + "@types/mdast": 4.0.4 + remark-parse: 11.0.0 + remark-stringify: 11.0.0 + unified: 11.0.5 + transitivePeerDependencies: + - supports-color + + repeat-string@1.6.1: {} + + require-directory@2.1.1: {} + + resolve-from@4.0.0: {} + + resolve-pkg-maps@1.0.0: {} + + resolve@1.22.8: + dependencies: + is-core-module: 2.15.1 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + + resolve@2.0.0-next.5: + dependencies: + is-core-module: 2.15.1 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + + restore-cursor@5.1.0: + dependencies: + onetime: 7.0.0 + signal-exit: 4.1.0 + + reusify@1.0.4: {} + + rfdc@1.4.1: {} + + rimraf@3.0.2: + dependencies: + glob: 7.2.3 + + run-parallel@1.2.0: + dependencies: + queue-microtask: 1.2.3 + + rxjs@7.8.1: + dependencies: + tslib: 2.7.0 + + safe-array-concat@1.1.2: + dependencies: + call-bind: 1.0.7 + get-intrinsic: 1.2.4 + has-symbols: 1.0.3 + isarray: 2.0.5 + + safe-regex-test@1.0.3: + dependencies: + call-bind: 1.0.7 + es-errors: 1.3.0 + is-regex: 1.1.4 + + scheduler@0.23.2: + dependencies: + loose-envify: 1.4.0 + + section-matter@1.0.0: + dependencies: + extend-shallow: 2.0.1 + kind-of: 6.0.3 + + semver@6.3.1: {} + + semver@7.6.3: {} + + set-function-length@1.2.2: + dependencies: + define-data-property: 1.1.4 + es-errors: 1.3.0 + function-bind: 1.1.2 + get-intrinsic: 1.2.4 + gopd: 1.0.1 + has-property-descriptors: 1.0.2 + + set-function-name@2.0.2: + dependencies: + define-data-property: 1.1.4 + es-errors: 1.3.0 + functions-have-names: 1.2.3 + has-property-descriptors: 1.0.2 + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + + shell-quote@1.8.1: {} + + shelljs@0.8.5: + dependencies: + glob: 7.2.3 + interpret: 1.4.0 + rechoir: 0.6.2 + + side-channel@1.0.6: + dependencies: + call-bind: 1.0.7 + es-errors: 1.3.0 + get-intrinsic: 1.2.4 + object-inspect: 1.13.2 + + signal-exit@4.1.0: {} + + slash@3.0.0: {} + + slice-ansi@5.0.0: + dependencies: + ansi-styles: 6.2.1 + is-fullwidth-code-point: 4.0.0 + + slice-ansi@7.1.0: + dependencies: + ansi-styles: 6.2.1 + is-fullwidth-code-point: 5.0.0 + + source-map-js@1.2.1: {} + + source-map-support@0.5.21: + dependencies: + buffer-from: 1.1.2 + source-map: 0.6.1 + + source-map@0.6.1: {} + + source-map@0.7.4: {} + + space-separated-tokens@2.0.2: {} + + spawn-command@0.0.2: {} + + sprintf-js@1.0.3: {} + + stop-iteration-iterator@1.0.0: + dependencies: + internal-slot: 1.0.7 + + streamsearch@1.1.0: {} + + string-argv@0.3.2: {} + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + string-width@5.1.2: + dependencies: + eastasianwidth: 0.2.0 + emoji-regex: 9.2.2 + strip-ansi: 7.1.0 + + string-width@7.2.0: + dependencies: + emoji-regex: 10.4.0 + get-east-asian-width: 1.2.0 + strip-ansi: 7.1.0 + + string.prototype.includes@2.0.0: + dependencies: + define-properties: 1.2.1 + es-abstract: 1.23.3 + + string.prototype.matchall@4.0.11: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + es-errors: 1.3.0 + es-object-atoms: 1.0.0 + get-intrinsic: 1.2.4 + gopd: 1.0.1 + has-symbols: 1.0.3 + internal-slot: 1.0.7 + regexp.prototype.flags: 1.5.2 + set-function-name: 2.0.2 + side-channel: 1.0.6 + + string.prototype.repeat@1.0.0: + dependencies: + define-properties: 1.2.1 + es-abstract: 1.23.3 + + string.prototype.trim@1.2.9: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-abstract: 1.23.3 + es-object-atoms: 1.0.0 + + string.prototype.trimend@1.0.8: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-object-atoms: 1.0.0 + + string.prototype.trimstart@1.0.8: + dependencies: + call-bind: 1.0.7 + define-properties: 1.2.1 + es-object-atoms: 1.0.0 + + stringify-entities@4.0.4: + dependencies: + character-entities-html4: 2.1.0 + character-entities-legacy: 3.0.0 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-ansi@7.1.0: + dependencies: + ansi-regex: 6.1.0 + + strip-bom-string@1.0.0: {} + + strip-bom@3.0.0: {} + + strip-final-newline@3.0.0: {} + + strip-indent@4.0.0: + dependencies: + min-indent: 1.0.1 + + strip-json-comments@3.1.1: {} + + style-to-object@0.4.4: + dependencies: + inline-style-parser: 0.1.1 + + style-to-object@1.0.8: + dependencies: + inline-style-parser: 0.2.4 + + styled-jsx@5.1.1(react@18.2.0): + dependencies: + client-only: 0.0.1 + react: 18.2.0 + + sucrase@3.35.0: + dependencies: + "@jridgewell/gen-mapping": 0.3.5 + commander: 4.1.1 + glob: 10.4.5 + lines-and-columns: 1.2.4 + mz: 2.7.0 + pirates: 4.0.6 + ts-interface-checker: 0.1.13 + + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + + supports-color@8.1.1: + dependencies: + has-flag: 4.0.0 + + supports-preserve-symlinks-flag@1.0.0: {} + + tailwindcss@3.4.1: + dependencies: + "@alloc/quick-lru": 5.2.0 + arg: 5.0.2 + chokidar: 3.6.0 + didyoumean: 1.2.2 + dlv: 1.1.3 + fast-glob: 3.3.2 + glob-parent: 6.0.2 + is-glob: 4.0.3 + jiti: 1.21.6 + lilconfig: 2.1.0 + micromatch: 4.0.8 + normalize-path: 3.0.0 + object-hash: 3.0.0 + picocolors: 1.1.0 + postcss: 8.4.26 + postcss-import: 15.1.0(postcss@8.4.26) + postcss-js: 4.0.1(postcss@8.4.26) + postcss-load-config: 4.0.2(postcss@8.4.26) + postcss-nested: 6.2.0(postcss@8.4.26) + postcss-selector-parser: 6.1.2 + resolve: 1.22.8 + sucrase: 3.35.0 + transitivePeerDependencies: + - ts-node + + tapable@2.2.1: {} + + tar@6.2.1: + dependencies: + chownr: 2.0.0 + fs-minipass: 2.1.0 + minipass: 5.0.0 + minizlib: 2.1.2 + mkdirp: 1.0.4 + yallist: 4.0.0 + + text-table@0.2.0: {} + + thenify-all@1.6.0: + dependencies: + thenify: 3.3.1 + + thenify@3.3.1: + dependencies: + any-promise: 1.3.0 + + thingies@1.21.0(tslib@2.7.0): + dependencies: + tslib: 2.7.0 + + to-regex-range@5.0.1: + dependencies: + is-number: 7.0.0 + + toml@3.0.0: {} + + tr46@0.0.3: {} + + tree-dump@1.0.2(tslib@2.7.0): + dependencies: + tslib: 2.7.0 + + tree-kill@1.2.2: {} + + trim-lines@3.0.1: {} + + trough@2.2.0: {} + + ts-api-utils@1.3.0(typescript@5.3.3): + dependencies: + typescript: 5.3.3 + + ts-interface-checker@0.1.13: {} + + ts-pattern@5.4.0: {} + + tsconfig-paths@3.15.0: + dependencies: + "@types/json5": 0.0.29 + json5: 1.0.2 + minimist: 1.2.8 + strip-bom: 3.0.0 + + tslib@2.7.0: {} + + typanion@3.14.0: {} + + type-check@0.4.0: + dependencies: + prelude-ls: 1.2.1 + + type-fest@0.20.2: {} + + type-fest@4.26.1: {} + + typed-array-buffer@1.0.2: + dependencies: + call-bind: 1.0.7 + es-errors: 1.3.0 + is-typed-array: 1.1.13 + + typed-array-byte-length@1.0.1: + dependencies: + call-bind: 1.0.7 + for-each: 0.3.3 + gopd: 1.0.1 + has-proto: 1.0.3 + is-typed-array: 1.1.13 + + typed-array-byte-offset@1.0.2: + dependencies: + available-typed-arrays: 1.0.7 + call-bind: 1.0.7 + for-each: 0.3.3 + gopd: 1.0.1 + has-proto: 1.0.3 + is-typed-array: 1.1.13 + + typed-array-length@1.0.6: + dependencies: + call-bind: 1.0.7 + for-each: 0.3.3 + gopd: 1.0.1 + has-proto: 1.0.3 + is-typed-array: 1.1.13 + possible-typed-array-names: 1.0.0 + + typescript@5.3.3: {} + + unbox-primitive@1.0.2: + dependencies: + call-bind: 1.0.7 + has-bigints: 1.0.2 + has-symbols: 1.0.3 + which-boxed-primitive: 1.0.2 + + undici-types@5.26.5: {} + + unified@11.0.5: + dependencies: + "@types/unist": 3.0.3 + bail: 2.0.2 + devlop: 1.1.0 + extend: 3.0.2 + is-plain-obj: 4.1.0 + trough: 2.2.0 + vfile: 6.0.3 + + unist-util-is@6.0.0: + dependencies: + "@types/unist": 3.0.3 + + unist-util-position-from-estree@2.0.0: + dependencies: + "@types/unist": 3.0.3 + + unist-util-position@5.0.0: + dependencies: + "@types/unist": 3.0.3 + + unist-util-stringify-position@4.0.0: + dependencies: + "@types/unist": 3.0.3 + + unist-util-visit-parents@6.0.1: + dependencies: + "@types/unist": 3.0.3 + unist-util-is: 6.0.0 + + unist-util-visit@5.0.0: + dependencies: + "@types/unist": 3.0.3 + unist-util-is: 6.0.0 + unist-util-visit-parents: 6.0.1 + + update-browserslist-db@1.1.0(browserslist@4.24.0): + dependencies: + browserslist: 4.24.0 + escalade: 3.2.0 + picocolors: 1.1.0 + + uri-js@4.4.1: + dependencies: + punycode: 2.3.1 + + util-deprecate@1.0.2: {} + + uuid@9.0.1: {} + + vfile-message@4.0.2: + dependencies: + "@types/unist": 3.0.3 + unist-util-stringify-position: 4.0.0 + + vfile@6.0.3: + dependencies: + "@types/unist": 3.0.3 + vfile-message: 4.0.2 + + webidl-conversions@3.0.1: {} + + whatwg-url@5.0.0: + dependencies: + tr46: 0.0.3 + webidl-conversions: 3.0.1 + + which-boxed-primitive@1.0.2: + dependencies: + is-bigint: 1.0.4 + is-boolean-object: 1.1.2 + is-number-object: 1.0.7 + is-string: 1.0.7 + is-symbol: 1.0.4 + + which-builtin-type@1.1.4: + dependencies: + function.prototype.name: 1.1.6 + has-tostringtag: 1.0.2 + is-async-function: 2.0.0 + is-date-object: 1.0.5 + is-finalizationregistry: 1.0.2 + is-generator-function: 1.0.10 + is-regex: 1.1.4 + is-weakref: 1.0.2 + isarray: 2.0.5 + which-boxed-primitive: 1.0.2 + which-collection: 1.0.2 + which-typed-array: 1.1.15 + + which-collection@1.0.2: + dependencies: + is-map: 2.0.3 + is-set: 2.0.3 + is-weakmap: 2.0.2 + is-weakset: 2.0.3 + + which-typed-array@1.1.15: + dependencies: + available-typed-arrays: 1.0.7 + call-bind: 1.0.7 + for-each: 0.3.3 + gopd: 1.0.1 + has-tostringtag: 1.0.2 + + which@2.0.2: + dependencies: + isexe: 2.0.0 + + word-wrap@1.2.5: {} + + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrap-ansi@8.1.0: + dependencies: + ansi-styles: 6.2.1 + string-width: 5.1.2 + strip-ansi: 7.1.0 + + wrap-ansi@9.0.0: + dependencies: + ansi-styles: 6.2.1 + string-width: 7.2.0 + strip-ansi: 7.1.0 + + wrappy@1.0.2: {} + + y18n@5.0.8: {} + + yallist@4.0.0: {} + + yaml@2.5.1: {} + + yargs-parser@21.1.1: {} + + yargs@17.7.2: + dependencies: + cliui: 8.0.1 + escalade: 3.2.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 + + yauzl@3.1.3: + dependencies: + buffer-crc32: 0.2.13 + pend: 1.2.0 + + yocto-queue@0.1.0: {} + + zod@3.23.8: {} + + zwitch@2.0.4: {} diff --git a/public/assets/courses/movie-review-dapp.png b/public/assets/courses/movie-review-dapp.png new file mode 100644 index 000000000..a770aa715 Binary files /dev/null and b/public/assets/courses/movie-review-dapp.png differ diff --git a/public/assets/courses/movie-review-frontend-dapp.png b/public/assets/courses/movie-review-frontend-dapp.png new file mode 100644 index 000000000..a770aa715 Binary files /dev/null and b/public/assets/courses/movie-review-frontend-dapp.png differ diff --git a/public/assets/courses/student-intros-frontend.png b/public/assets/courses/student-intros-frontend.png new file mode 100644 index 000000000..e4ec073de Binary files /dev/null and b/public/assets/courses/student-intros-frontend.png differ diff --git a/public/assets/courses/unboxed/anchor-frontend-demo.gif b/public/assets/courses/unboxed/anchor-frontend-demo.gif index ab21143f5..6d07aca3e 100644 Binary files a/public/assets/courses/unboxed/anchor-frontend-demo.gif and b/public/assets/courses/unboxed/anchor-frontend-demo.gif differ diff --git a/public/assets/courses/unboxed/anchor-frontend-increment.png b/public/assets/courses/unboxed/anchor-frontend-increment.png index 429d9495e..4950519a3 100644 Binary files a/public/assets/courses/unboxed/anchor-frontend-increment.png and b/public/assets/courses/unboxed/anchor-frontend-increment.png differ diff --git a/public/assets/courses/unboxed/anchor-frontend-initialize.png b/public/assets/courses/unboxed/anchor-frontend-initialize.png index 4219a46b6..a6ce83918 100644 Binary files a/public/assets/courses/unboxed/anchor-frontend-initialize.png and b/public/assets/courses/unboxed/anchor-frontend-initialize.png differ diff --git a/public/assets/courses/unboxed/basic-solana-mobile-connect.png b/public/assets/courses/unboxed/basic-solana-mobile-connect.png index e54494c06..6f9c61911 100644 Binary files a/public/assets/courses/unboxed/basic-solana-mobile-connect.png and b/public/assets/courses/unboxed/basic-solana-mobile-connect.png differ diff --git a/public/assets/courses/unboxed/basic-solana-mobile-flow.png b/public/assets/courses/unboxed/basic-solana-mobile-flow.png index a030480d6..950cd2c5c 100644 Binary files a/public/assets/courses/unboxed/basic-solana-mobile-flow.png and b/public/assets/courses/unboxed/basic-solana-mobile-flow.png differ diff --git a/public/assets/courses/unboxed/basic-solana-mobile-transact.png b/public/assets/courses/unboxed/basic-solana-mobile-transact.png index ae03da06d..646058a84 100644 Binary files a/public/assets/courses/unboxed/basic-solana-mobile-transact.png and b/public/assets/courses/unboxed/basic-solana-mobile-transact.png differ diff --git a/public/assets/courses/unboxed/burn-tokens.png b/public/assets/courses/unboxed/burn-tokens.png new file mode 100644 index 000000000..1547f718a Binary files /dev/null and b/public/assets/courses/unboxed/burn-tokens.png differ diff --git a/public/assets/courses/unboxed/delegate-token.png b/public/assets/courses/unboxed/delegate-token.png new file mode 100644 index 000000000..b5e954f69 Binary files /dev/null and b/public/assets/courses/unboxed/delegate-token.png differ diff --git a/public/assets/courses/unboxed/image.png b/public/assets/courses/unboxed/image.png new file mode 100644 index 000000000..07f1aa5b6 Binary files /dev/null and b/public/assets/courses/unboxed/image.png differ diff --git a/public/assets/courses/unboxed/revoke-approve-tokens.png b/public/assets/courses/unboxed/revoke-approve-tokens.png new file mode 100644 index 000000000..0cf4f8cf1 Binary files /dev/null and b/public/assets/courses/unboxed/revoke-approve-tokens.png differ diff --git a/public/assets/courses/unboxed/solana-explorer-create-tree.png b/public/assets/courses/unboxed/solana-explorer-create-tree.png new file mode 100644 index 000000000..cfe10dff5 Binary files /dev/null and b/public/assets/courses/unboxed/solana-explorer-create-tree.png differ diff --git a/public/assets/courses/unboxed/solana-explorer-metaplex-collection.png b/public/assets/courses/unboxed/solana-explorer-metaplex-collection.png new file mode 100644 index 000000000..8d7585278 Binary files /dev/null and b/public/assets/courses/unboxed/solana-explorer-metaplex-collection.png differ diff --git a/public/assets/courses/unboxed/solana-explorer-metaplex-nft.png b/public/assets/courses/unboxed/solana-explorer-metaplex-nft.png new file mode 100644 index 000000000..adce8c95d Binary files /dev/null and b/public/assets/courses/unboxed/solana-explorer-metaplex-nft.png differ diff --git a/public/assets/courses/unboxed/solana-explorer-showing-cnft-transfer-logs.png b/public/assets/courses/unboxed/solana-explorer-showing-cnft-transfer-logs.png new file mode 100644 index 000000000..2cf699e0b Binary files /dev/null and b/public/assets/courses/unboxed/solana-explorer-showing-cnft-transfer-logs.png differ diff --git a/public/assets/courses/unboxed/solana-explorer-verified-nft.png b/public/assets/courses/unboxed/solana-explorer-verified-nft.png new file mode 100644 index 000000000..0a74f6de8 Binary files /dev/null and b/public/assets/courses/unboxed/solana-explorer-verified-nft.png differ diff --git a/public/assets/courses/unboxed/solana-explorer-with-updated-NFT.png b/public/assets/courses/unboxed/solana-explorer-with-updated-NFT.png new file mode 100644 index 000000000..f1c698117 Binary files /dev/null and b/public/assets/courses/unboxed/solana-explorer-with-updated-NFT.png differ diff --git a/public/assets/docs/compression/canopy-depth-1.png b/public/assets/docs/compression/canopy-depth-1.png new file mode 100644 index 000000000..63bd3e31e Binary files /dev/null and b/public/assets/docs/compression/canopy-depth-1.png differ diff --git a/public/assets/guides/make-a-token/token-with-metadata.png b/public/assets/guides/make-a-token/token-with-metadata.png index 8f59987c0..e9368e889 100644 Binary files a/public/assets/guides/make-a-token/token-with-metadata.png and b/public/assets/guides/make-a-token/token-with-metadata.png differ diff --git a/redirects.json b/redirects.json index a480657df..12e411839 100644 --- a/redirects.json +++ b/redirects.json @@ -1,5 +1,17 @@ { "common-docs": [ + { + "source": "/install", + "destination": "/docs/intro/installation" + }, + { + "source": "/cli/install", + "destination": "/docs/intro/installation" + }, + { + "source": "/cli/install-solana-cli-tools", + "destination": "/docs/intro/installation" + }, { "source": "/apps", "destination": "/developers" }, { "source": "/developing/programming-model/overview", @@ -138,7 +150,7 @@ "source": "/getstarted/hello-world" }, { - "destination": "docs/intro/installation", + "destination": "/docs/intro/installation", "source": "/getstarted/local" }, { @@ -207,10 +219,6 @@ "source": "/implemented-proposals", "destination": "/implemented-proposals" }, - { - "source": "/cli/install-solana-cli-tools", - "destination": "/cli/install" - }, { "source": "/cli/conventions", "destination": "/cli/intro" }, { "source": "/cli/choose-a-cluster", diff --git a/src/components/HomeComponent.tsx b/src/components/HomeComponent.tsx index a2178cd80..61e57d7c4 100644 --- a/src/components/HomeComponent.tsx +++ b/src/components/HomeComponent.tsx @@ -116,7 +116,7 @@ export const HomeViewComponent = () => { , which contains all the information specific to that validator client's implementation and inner workings. Including{" "} - + installing the Solana CLI tool-suite , this{" "} diff --git a/src/utils/code-import.ts b/src/utils/code-import.ts new file mode 100644 index 000000000..b31e0277a --- /dev/null +++ b/src/utils/code-import.ts @@ -0,0 +1,169 @@ +// remark-code-import +// code-import.ts +// https://github.com/kevin940726/remark-code-import +import { readFile, stat } from "node:fs/promises"; +import path from "node:path"; +import { EOL } from "node:os"; +import { visit } from "unist-util-visit"; +import stripIndent from "strip-indent"; +import type { Root, Code, Parent } from "mdast"; +import type { VFile } from "vfile"; + +interface CodeImportOptions { + async?: boolean; + preserveTrailingNewline?: boolean; + removeRedundantIndentations?: boolean; + rootDir?: string; + allowImportingFromOutside?: boolean; +} + +interface LineRange { + from: number; + to: number; +} + +function parseLineRanges(rangeString: string): LineRange[] { + const rangeRegex = /#L(\d+)(?:-L?(\d+))?/g; + const ranges: LineRange[] = []; + let match; + + while ((match = rangeRegex.exec(rangeString)) !== null) { + const [, from, to] = match; + const fromLine = parseInt(from, 10); + const toLine = to ? parseInt(to, 10) : fromLine; + + if (fromLine === 0 || toLine === 0) { + throw new Error( + `Invalid line number: Line numbers must be positive integers`, + ); + } + + if (fromLine > toLine) { + throw new Error( + `Invalid range: L${fromLine}-L${toLine}. 'from' should be less than or equal to 'to'`, + ); + } + + ranges.push({ from: fromLine, to: toLine }); + } + + // Sort ranges and check for overlaps + ranges.sort((a, b) => a.from - b.from); + for (let i = 1; i < ranges.length; i++) { + if (ranges[i].from <= ranges[i - 1].to) { + throw new Error(`Overlapping or out-of-order ranges are not allowed`); + } + } + + return ranges; +} + +function extractLines( + content: string, + ranges: LineRange[], + preserveTrailingNewline = false, +): string { + const lines = content.split(EOL); + let result: string[] = []; + + for (const range of ranges) { + if (range.to > lines.length) { + throw new Error( + `Line range exceeds file length of ${lines.length} lines`, + ); + } + result = result.concat(lines.slice(range.from - 1, range.to)); + } + + let finalResult = result.join("\n"); + if ( + preserveTrailingNewline && + content.endsWith("\n") && + !finalResult.endsWith("\n") + ) { + finalResult += "\n"; + } + + return finalResult; +} + +function importCode(options: CodeImportOptions = {}) { + const rootDir = options.rootDir || process.cwd(); + + if (!path.isAbsolute(rootDir)) { + throw new Error(`"rootDir" has to be an absolute path`); + } + + return async function transform(tree: Root, file: VFile) { + const codes: [Code, number | null, Parent][] = []; + + visit(tree, "code", (node, index, parent) => { + codes.push([node as Code, index as null | number, parent as Parent]); + }); + + for (const [node] of codes) { + const fileMeta = (node.meta || "") + .split(/(? meta.startsWith("file=")); + + if (!fileMeta) { + continue; + } + + const res = /^file=(["'])?(\/.+?)\1?(#.+)?$/.exec(fileMeta); + + if (!res) { + throw new Error( + `Unable to parse file path ${fileMeta}. File path must start with a forward slash (/)`, + ); + } + + const [, , filePath, rangeString = ""] = res; + + // Resolve the path relative to rootDir + const normalizedFilePath = path.join(rootDir, filePath.slice(1)); + const fileAbsPath = path.resolve(normalizedFilePath); + + try { + // Check if the path is a directory + const stats = await stat(fileAbsPath); + if (stats.isDirectory()) { + throw new Error(`Path is a directory, not a file`); + } + + if (!options.allowImportingFromOutside) { + const relativePathFromRootDir = path.relative(rootDir, fileAbsPath); + if ( + relativePathFromRootDir.startsWith(`..${path.sep}`) || + path.isAbsolute(relativePathFromRootDir) + ) { + throw new Error( + `Attempted to import code from "${fileAbsPath}", which is outside from the rootDir "${rootDir}"`, + ); + } + } + + const ranges = rangeString + ? parseLineRanges(rangeString) + : [{ from: 1, to: Infinity }]; + + const fileContent = await readFile(fileAbsPath, "utf8"); + node.value = extractLines( + fileContent, + ranges, + options.preserveTrailingNewline, + ); + if (options.removeRedundantIndentations) { + node.value = stripIndent(node.value); + } + } catch (error) { + throw new Error( + `Error processing ${fileAbsPath}: ${(error as Error).message}`, + ); + } + } + }; +} + +export { importCode }; +export default importCode; diff --git a/tsconfig.json b/tsconfig.json index 26231d6e6..5f9e81ca6 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,6 +1,6 @@ { "compilerOptions": { - "target": "es5", + "target": "ESNext", "lib": ["dom", "dom.iterable", "esnext"], "skipLibCheck": true, "allowJs": true, @@ -33,5 +33,5 @@ "**/*.tsx", ".next/types/**/*.ts" ], - "exclude": ["node_modules"] + "exclude": ["node_modules", "code"] } diff --git a/yarn.lock b/yarn.lock deleted file mode 100644 index e719e6d8d..000000000 --- a/yarn.lock +++ /dev/null @@ -1,4822 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@aashutoshrathi/word-wrap@^1.2.3": - version "1.2.6" - resolved "https://registry.yarnpkg.com/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz#bd9154aec9983f77b3a034ecaa015c2e4201f6cf" - integrity sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA== - -"@alloc/quick-lru@^5.2.0": - version "5.2.0" - resolved "https://registry.yarnpkg.com/@alloc/quick-lru/-/quick-lru-5.2.0.tgz#7bf68b20c0a350f936915fcae06f58e32007ce30" - integrity sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw== - -"@babel/runtime@^7.23.2": - version "7.23.9" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.9.tgz#47791a15e4603bb5f905bc0753801cf21d6345f7" - integrity sha512-0CX6F+BI2s9dkUqr08KFrAIZgNFj75rdBU/DjCyYLIaV/quFjkk6T+EJ2LkZHyZTbEV4L5p97mNkUsHl2wLFAw== - dependencies: - regenerator-runtime "^0.14.0" - -"@contentlayer2/cli@0.4.3": - version "0.4.3" - resolved "https://registry.yarnpkg.com/@contentlayer2/cli/-/cli-0.4.3.tgz#5a92ba7d4161cff2c180e2029f0b8f79275dde5f" - integrity sha512-ZJ+Iiu2rVI50x60XoqnrsO/Q8eqFX5AlP1L0U/3ygaAas3tnOqTzQZ1UsxYQMpJzcLok24ddlhKfQKbCMUJPiQ== - dependencies: - "@contentlayer2/core" "0.4.3" - "@contentlayer2/utils" "0.4.3" - clipanion "^3.2.1" - typanion "^3.12.1" - -"@contentlayer2/client@0.4.3": - version "0.4.3" - resolved "https://registry.yarnpkg.com/@contentlayer2/client/-/client-0.4.3.tgz#efdb1df4466bb450f7de4aa1df9da7842f0ebe2b" - integrity sha512-Znz6zW0ShVRq3OCQ6SwsfPQ1Wn28dQpiL33r53OjWuiirXhaSdam4MXRjYSFzCq5JK0kY6Hx0CF0fk3oVbsiVQ== - dependencies: - "@contentlayer2/core" "0.4.3" - -"@contentlayer2/core@0.4.3": - version "0.4.3" - resolved "https://registry.yarnpkg.com/@contentlayer2/core/-/core-0.4.3.tgz#bee82b0df47d92f83af839aa29594f218cbdb939" - integrity sha512-jNagCFHLB8u/qfjoAI+JUuz3jf2hxYFYusgfh+oSq9+jHala4bbLEJOhJmmaY1Dmv2AV6BI7W8Jjy6WIu5g5Yg== - dependencies: - "@contentlayer2/utils" "0.4.3" - camel-case "^4.1.2" - comment-json "^4.2.3" - esbuild "0.17.x || 0.18.x || 0.19.x || 0.20.x" - gray-matter "^4.0.3" - mdx-bundler "^10.0.2" - rehype-stringify "^10.0.0" - remark-frontmatter "^5.0.0" - remark-parse "^11.0.0" - remark-rehype "^11.1.0" - source-map-support "^0.5.21" - type-fest "^4.10.0" - unified "^11.0.4" - -"@contentlayer2/source-files@0.4.3": - version "0.4.3" - resolved "https://registry.yarnpkg.com/@contentlayer2/source-files/-/source-files-0.4.3.tgz#391f9740ff36bd88f0b42f8fc510df53ea9f9a8a" - integrity sha512-NaLkK9iJcjS7xdqbtEJElEXhdEXv47yj4Ju2TCiZU7/8veQQ67QwtTDesyRMSw4mBt5+hxiAcqvf0hKaHh6lxw== - dependencies: - "@contentlayer2/core" "0.4.3" - "@contentlayer2/utils" "0.4.3" - chokidar "^3.5.3" - fast-glob "^3.2.12" - gray-matter "^4.0.3" - imagescript "^1.2.16" - micromatch "^4.0.5" - ts-pattern "^5.0.6" - unified "^11.0.4" - yaml "^2.3.1" - zod "^3.22.4" - -"@contentlayer2/source-remote-files@0.4.3": - version "0.4.3" - resolved "https://registry.yarnpkg.com/@contentlayer2/source-remote-files/-/source-remote-files-0.4.3.tgz#eedf44366c5a34833bf48399462e5f624b2aabd1" - integrity sha512-uow1teWiAJA0WNbqqjEz4FfLPQF6BpdhRfF8JHgX7r8jzqChB5zS8bXX9xJDGhn41oEqR/FDDI1Pn7564H8lsA== - dependencies: - "@contentlayer2/core" "0.4.3" - "@contentlayer2/source-files" "0.4.3" - "@contentlayer2/utils" "0.4.3" - -"@contentlayer2/utils@0.4.3": - version "0.4.3" - resolved "https://registry.yarnpkg.com/@contentlayer2/utils/-/utils-0.4.3.tgz#84089f68c3573a18c8639cc4f8a8d70f60a95141" - integrity sha512-E360wNhvGemU1X+uhIwkzA5v+b8U5Q+Gps50uIDAVW6TWV9boUgAGPspSXXcFAIYG+H4z2eV3jTZjFCwyXGMxg== - dependencies: - "@effect-ts/core" "^0.60.5" - "@effect-ts/otel" "^0.15.1" - "@effect-ts/otel-sdk-trace-node" "^0.15.1" - "@js-temporal/polyfill" "^0.4.4" - "@opentelemetry/api" "^1.8.0" - "@opentelemetry/core" "^1.24.0" - "@opentelemetry/exporter-trace-otlp-grpc" "^0.51.0" - "@opentelemetry/resources" "^1.21.0" - "@opentelemetry/sdk-trace-base" "^1.21.0" - "@opentelemetry/sdk-trace-node" "^1.21.0" - "@opentelemetry/semantic-conventions" "^1.21.0" - chokidar "^3.5.3" - hash-wasm "^4.11.0" - inflection "^3.0.0" - memfs "^4.8.2" - oo-ascii-tree "^1.94.0" - ts-pattern "^5.0.6" - type-fest "^4.10.0" - -"@crowdin/cli@^3.18.0": - version "3.18.0" - resolved "https://registry.yarnpkg.com/@crowdin/cli/-/cli-3.18.0.tgz#17ff3fe7a43f7a31c3cde24ecbafacf4a5c3aad1" - integrity sha512-W4lvXhnAfGWxFW4L6n6tNIaXxuHO8vzRBQmbOz6OqLA7EgGLvYih0L9zeQpt9KNda66lC/iKqnA52ezTqPAEgg== - dependencies: - command-exists-promise "^2.0.2" - node-fetch "2.6.7" - shelljs "^0.8.4" - tar "^4.4.8" - yauzl "^2.10.0" - -"@effect-ts/core@^0.60.5": - version "0.60.5" - resolved "https://registry.yarnpkg.com/@effect-ts/core/-/core-0.60.5.tgz#df79049e1be4a576ab6b45abbe92c831bda62361" - integrity sha512-qi1WrtJA90XLMnj2hnUszW9Sx4dXP03ZJtCc5DiUBIOhF4Vw7plfb65/bdBySPoC9s7zy995TdUX1XBSxUkl5w== - dependencies: - "@effect-ts/system" "^0.57.5" - -"@effect-ts/otel-sdk-trace-node@^0.15.1": - version "0.15.1" - resolved "https://registry.yarnpkg.com/@effect-ts/otel-sdk-trace-node/-/otel-sdk-trace-node-0.15.1.tgz#d425cc51f82da208de702b4dec4434c7bb78b512" - integrity sha512-a2sF0ylmn8xOJs8fNeT/spJ1gUcsksAJCALxo9WOfuTCMtTwMVtVhCKEPEeQoL7wFqU+JgPkVdP91+FJ/Rkeow== - dependencies: - "@effect-ts/otel" "^0.15.1" - -"@effect-ts/otel@^0.15.1": - version "0.15.1" - resolved "https://registry.yarnpkg.com/@effect-ts/otel/-/otel-0.15.1.tgz#c5466ed96229c9cda40978f7726086a1b390359e" - integrity sha512-AmZJHl7t0+Peh7Yb2+hqn6r9+rd9/UfeA4AMV9h0YGTdOyouyFfD3wzWlxnAUzAQ4Lrod4kC7Noruret4EpqpA== - -"@effect-ts/system@^0.57.5": - version "0.57.5" - resolved "https://registry.yarnpkg.com/@effect-ts/system/-/system-0.57.5.tgz#921e9b39dcea2d1728e0f49a0af233472efdc6cb" - integrity sha512-/crHGujo0xnuHIYNc1VgP0HGJGFSoSqq88JFXe6FmFyXPpWt8Xu39LyLg7rchsxfXFeEdA9CrIZvLV5eswXV5g== - -"@esbuild-plugins/node-resolve@^0.2.2": - version "0.2.2" - resolved "https://registry.yarnpkg.com/@esbuild-plugins/node-resolve/-/node-resolve-0.2.2.tgz#4f1b8d265a1b6e8b2438a03770239277687f0c17" - integrity sha512-+t5FdX3ATQlb53UFDBRb4nqjYBz492bIrnVWvpQHpzZlu9BQL5HasMZhqc409ygUwOWCXZhrWr6NyZ6T6Y+cxw== - dependencies: - "@types/resolve" "^1.17.1" - debug "^4.3.1" - escape-string-regexp "^4.0.0" - resolve "^1.19.0" - -"@esbuild/aix-ppc64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz#a70f4ac11c6a1dfc18b8bbb13284155d933b9537" - integrity sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g== - -"@esbuild/android-arm64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.20.2.tgz#db1c9202a5bc92ea04c7b6840f1bbe09ebf9e6b9" - integrity sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg== - -"@esbuild/android-arm@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.20.2.tgz#3b488c49aee9d491c2c8f98a909b785870d6e995" - integrity sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w== - -"@esbuild/android-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.20.2.tgz#3b1628029e5576249d2b2d766696e50768449f98" - integrity sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg== - -"@esbuild/darwin-arm64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.20.2.tgz#6e8517a045ddd86ae30c6608c8475ebc0c4000bb" - integrity sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA== - -"@esbuild/darwin-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.20.2.tgz#90ed098e1f9dd8a9381695b207e1cff45540a0d0" - integrity sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA== - -"@esbuild/freebsd-arm64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.2.tgz#d71502d1ee89a1130327e890364666c760a2a911" - integrity sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw== - -"@esbuild/freebsd-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.20.2.tgz#aa5ea58d9c1dd9af688b8b6f63ef0d3d60cea53c" - integrity sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw== - -"@esbuild/linux-arm64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.20.2.tgz#055b63725df678379b0f6db9d0fa85463755b2e5" - integrity sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A== - -"@esbuild/linux-arm@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.20.2.tgz#76b3b98cb1f87936fbc37f073efabad49dcd889c" - integrity sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg== - -"@esbuild/linux-ia32@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.20.2.tgz#c0e5e787c285264e5dfc7a79f04b8b4eefdad7fa" - integrity sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig== - -"@esbuild/linux-loong64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.20.2.tgz#a6184e62bd7cdc63e0c0448b83801001653219c5" - integrity sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ== - -"@esbuild/linux-mips64el@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.20.2.tgz#d08e39ce86f45ef8fc88549d29c62b8acf5649aa" - integrity sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA== - -"@esbuild/linux-ppc64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.20.2.tgz#8d252f0b7756ffd6d1cbde5ea67ff8fd20437f20" - integrity sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg== - -"@esbuild/linux-riscv64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.20.2.tgz#19f6dcdb14409dae607f66ca1181dd4e9db81300" - integrity sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg== - -"@esbuild/linux-s390x@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.20.2.tgz#3c830c90f1a5d7dd1473d5595ea4ebb920988685" - integrity sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ== - -"@esbuild/linux-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.20.2.tgz#86eca35203afc0d9de0694c64ec0ab0a378f6fff" - integrity sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw== - -"@esbuild/netbsd-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.20.2.tgz#e771c8eb0e0f6e1877ffd4220036b98aed5915e6" - integrity sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ== - -"@esbuild/openbsd-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz#9a795ae4b4e37e674f0f4d716f3e226dd7c39baf" - integrity sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ== - -"@esbuild/sunos-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.20.2.tgz#7df23b61a497b8ac189def6e25a95673caedb03f" - integrity sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w== - -"@esbuild/win32-arm64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.20.2.tgz#f1ae5abf9ca052ae11c1bc806fb4c0f519bacf90" - integrity sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ== - -"@esbuild/win32-ia32@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.20.2.tgz#241fe62c34d8e8461cd708277813e1d0ba55ce23" - integrity sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ== - -"@esbuild/win32-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.20.2.tgz#9c907b21e30a52db959ba4f80bb01a0cc403d5cc" - integrity sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ== - -"@eslint-community/eslint-utils@^4.2.0": - version "4.4.0" - resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" - integrity sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA== - dependencies: - eslint-visitor-keys "^3.3.0" - -"@eslint-community/regexpp@^4.4.0": - version "4.10.0" - resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.10.0.tgz#548f6de556857c8bb73bbee70c35dc82a2e74d63" - integrity sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA== - -"@eslint/eslintrc@^2.1.0": - version "2.1.4" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.1.4.tgz#388a269f0f25c1b6adc317b5a2c55714894c70ad" - integrity sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ== - dependencies: - ajv "^6.12.4" - debug "^4.3.2" - espree "^9.6.0" - globals "^13.19.0" - ignore "^5.2.0" - import-fresh "^3.2.1" - js-yaml "^4.1.0" - minimatch "^3.1.2" - strip-json-comments "^3.1.1" - -"@eslint/js@8.44.0": - version "8.44.0" - resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.44.0.tgz#961a5903c74139390478bdc808bcde3fc45ab7af" - integrity sha512-Ag+9YM4ocKQx9AarydN0KY2j0ErMHNIocPDrVo8zAE44xLTjEtz81OdR68/cydGtk6m6jDb5Za3r2useMzYmSw== - -"@fal-works/esbuild-plugin-global-externals@^2.1.2": - version "2.1.2" - resolved "https://registry.yarnpkg.com/@fal-works/esbuild-plugin-global-externals/-/esbuild-plugin-global-externals-2.1.2.tgz#c05ed35ad82df8e6ac616c68b92c2282bd083ba4" - integrity sha512-cEee/Z+I12mZcFJshKcCqC8tuX5hG3s+d+9nZ3LabqKF1vKdF41B92pJVCBggjAGORAeOzyyDDKrZwIkLffeOQ== - -"@grpc/grpc-js@^1.7.1": - version "1.9.14" - resolved "https://registry.yarnpkg.com/@grpc/grpc-js/-/grpc-js-1.9.14.tgz#236378822876cbf7903f9d61a0330410e8dcc5a1" - integrity sha512-nOpuzZ2G3IuMFN+UPPpKrC6NsLmWsTqSsm66IRfnBt1D4pwTqE27lmbpcPM+l2Ua4gE7PfjRHI6uedAy7hoXUw== - dependencies: - "@grpc/proto-loader" "^0.7.8" - "@types/node" ">=12.12.47" - -"@grpc/proto-loader@^0.7.8": - version "0.7.10" - resolved "https://registry.yarnpkg.com/@grpc/proto-loader/-/proto-loader-0.7.10.tgz#6bf26742b1b54d0a473067743da5d3189d06d720" - integrity sha512-CAqDfoaQ8ykFd9zqBDn4k6iWT9loLAlc2ETmDFS9JCD70gDcnA4L3AFEo2iV7KyAtAAHFW9ftq1Fz+Vsgq80RQ== - dependencies: - lodash.camelcase "^4.3.0" - long "^5.0.0" - protobufjs "^7.2.4" - yargs "^17.7.2" - -"@humanwhocodes/config-array@^0.11.10": - version "0.11.14" - resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.14.tgz#d78e481a039f7566ecc9660b4ea7fe6b1fec442b" - integrity sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg== - dependencies: - "@humanwhocodes/object-schema" "^2.0.2" - debug "^4.3.1" - minimatch "^3.0.5" - -"@humanwhocodes/module-importer@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" - integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== - -"@humanwhocodes/object-schema@^2.0.2": - version "2.0.2" - resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-2.0.2.tgz#d9fae00a2d5cb40f92cfe64b47ad749fbc38f917" - integrity sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw== - -"@isaacs/cliui@^8.0.2": - version "8.0.2" - resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" - integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA== - dependencies: - string-width "^5.1.2" - string-width-cjs "npm:string-width@^4.2.0" - strip-ansi "^7.0.1" - strip-ansi-cjs "npm:strip-ansi@^6.0.1" - wrap-ansi "^8.1.0" - wrap-ansi-cjs "npm:wrap-ansi@^7.0.0" - -"@jridgewell/gen-mapping@^0.3.2": - version "0.3.3" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" - integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== - dependencies: - "@jridgewell/set-array" "^1.0.1" - "@jridgewell/sourcemap-codec" "^1.4.10" - "@jridgewell/trace-mapping" "^0.3.9" - -"@jridgewell/resolve-uri@^3.1.0": - version "3.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" - integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA== - -"@jridgewell/set-array@^1.0.1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" - integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== - -"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14": - version "1.4.15" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" - integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== - -"@jridgewell/trace-mapping@^0.3.9": - version "0.3.22" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.22.tgz#72a621e5de59f5f1ef792d0793a82ee20f645e4c" - integrity sha512-Wf963MzWtA2sjrNt+g18IAln9lKnlRp+K2eH4jjIoF1wYeq3aMREpG09xhlhdzS0EjwU7qmUJYangWa+151vZw== - dependencies: - "@jridgewell/resolve-uri" "^3.1.0" - "@jridgewell/sourcemap-codec" "^1.4.14" - -"@js-temporal/polyfill@^0.4.4": - version "0.4.4" - resolved "https://registry.yarnpkg.com/@js-temporal/polyfill/-/polyfill-0.4.4.tgz#4c26b4a1a68c19155808363f520204712cfc2558" - integrity sha512-2X6bvghJ/JAoZO52lbgyAPFj8uCflhTo2g7nkFzEQdXd/D8rEeD4HtmTEpmtGCva260fcd66YNXBOYdnmHqSOg== - dependencies: - jsbi "^4.3.0" - tslib "^2.4.1" - -"@jsonjoy.com/base64@^1.1.1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@jsonjoy.com/base64/-/base64-1.1.2.tgz#cf8ea9dcb849b81c95f14fc0aaa151c6b54d2578" - integrity sha512-q6XAnWQDIMA3+FTiOYajoYqySkO+JSat0ytXGSuRdq9uXE7o92gzuQwQM14xaCRlBLGq3v5miDGC4vkVTn54xA== - -"@jsonjoy.com/json-pack@^1.0.3": - version "1.0.4" - resolved "https://registry.yarnpkg.com/@jsonjoy.com/json-pack/-/json-pack-1.0.4.tgz#ab59c642a2e5368e8bcfd815d817143d4f3035d0" - integrity sha512-aOcSN4MeAtFROysrbqG137b7gaDDSmVrl5mpo6sT/w+kcXpWnzhMjmY/Fh/sDx26NBxyIE7MB1seqLeCAzy9Sg== - dependencies: - "@jsonjoy.com/base64" "^1.1.1" - "@jsonjoy.com/util" "^1.1.2" - hyperdyperid "^1.2.0" - thingies "^1.20.0" - -"@jsonjoy.com/util@^1.1.2": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@jsonjoy.com/util/-/util-1.2.0.tgz#0fe9a92de72308c566ebcebe8b5a3f01d3149df2" - integrity sha512-4B8B+3vFsY4eo33DMKyJPlQ3sBMpPFUZK2dr3O3rXrOGKKbYG44J0XSFkDo1VOQiri5HFEhIeVvItjR2xcazmg== - -"@mdx-js/esbuild@^3.0.0": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@mdx-js/esbuild/-/esbuild-3.0.1.tgz#58f9a40b0591018149bc9719a1baa0d7ecbc2e0e" - integrity sha512-+KZbCKcRjFtRD6qzD+c70Vq/VPVt5LHFsOshNcsdcONkaLTCSjmM7/uj71i3BcP+170f+P4DwVEMtqR/k0t5aw== - dependencies: - "@mdx-js/mdx" "^3.0.0" - "@types/unist" "^3.0.0" - vfile "^6.0.0" - vfile-message "^4.0.0" - -"@mdx-js/mdx@^3.0.0": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@mdx-js/mdx/-/mdx-3.0.1.tgz#617bd2629ae561fdca1bb88e3badd947f5a82191" - integrity sha512-eIQ4QTrOWyL3LWEe/bu6Taqzq2HQvHcyTMaOrI95P2/LmJE7AsfPfgJGuFLPVqBUE1BC1rik3VIhU+s9u72arA== - dependencies: - "@types/estree" "^1.0.0" - "@types/estree-jsx" "^1.0.0" - "@types/hast" "^3.0.0" - "@types/mdx" "^2.0.0" - collapse-white-space "^2.0.0" - devlop "^1.0.0" - estree-util-build-jsx "^3.0.0" - estree-util-is-identifier-name "^3.0.0" - estree-util-to-js "^2.0.0" - estree-walker "^3.0.0" - hast-util-to-estree "^3.0.0" - hast-util-to-jsx-runtime "^2.0.0" - markdown-extensions "^2.0.0" - periscopic "^3.0.0" - remark-mdx "^3.0.0" - remark-parse "^11.0.0" - remark-rehype "^11.0.0" - source-map "^0.7.0" - unified "^11.0.0" - unist-util-position-from-estree "^2.0.0" - unist-util-stringify-position "^4.0.0" - unist-util-visit "^5.0.0" - vfile "^6.0.0" - -"@next/env@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/env/-/env-14.1.0.tgz#43d92ebb53bc0ae43dcc64fb4d418f8f17d7a341" - integrity sha512-Py8zIo+02ht82brwwhTg36iogzFqGLPXlRGKQw5s+qP/kMNc4MAyDeEwBKDijk6zTIbegEgu8Qy7C1LboslQAw== - -"@next/eslint-plugin-next@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/eslint-plugin-next/-/eslint-plugin-next-14.1.0.tgz#29b041233fac7417e22eefa4146432d5cd910820" - integrity sha512-x4FavbNEeXx/baD/zC/SdrvkjSby8nBn8KcCREqk6UuwvwoAPZmaV8TFCAuo/cpovBRTIY67mHhe86MQQm/68Q== - dependencies: - glob "10.3.10" - -"@next/swc-darwin-arm64@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.0.tgz#70a57c87ab1ae5aa963a3ba0f4e59e18f4ecea39" - integrity sha512-nUDn7TOGcIeyQni6lZHfzNoo9S0euXnu0jhsbMOmMJUBfgsnESdjN97kM7cBqQxZa8L/bM9om/S5/1dzCrW6wQ== - -"@next/swc-darwin-x64@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.0.tgz#0863a22feae1540e83c249384b539069fef054e9" - integrity sha512-1jgudN5haWxiAl3O1ljUS2GfupPmcftu2RYJqZiMJmmbBT5M1XDffjUtRUzP4W3cBHsrvkfOFdQ71hAreNQP6g== - -"@next/swc-linux-arm64-gnu@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.0.tgz#893da533d3fce4aec7116fe772d4f9b95232423c" - integrity sha512-RHo7Tcj+jllXUbK7xk2NyIDod3YcCPDZxj1WLIYxd709BQ7WuRYl3OWUNG+WUfqeQBds6kvZYlc42NJJTNi4tQ== - -"@next/swc-linux-arm64-musl@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.0.tgz#d81ddcf95916310b8b0e4ad32b637406564244c0" - integrity sha512-v6kP8sHYxjO8RwHmWMJSq7VZP2nYCkRVQ0qolh2l6xroe9QjbgV8siTbduED4u0hlk0+tjS6/Tuy4n5XCp+l6g== - -"@next/swc-linux-x64-gnu@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.0.tgz#18967f100ec19938354332dcb0268393cbacf581" - integrity sha512-zJ2pnoFYB1F4vmEVlb/eSe+VH679zT1VdXlZKX+pE66grOgjmKJHKacf82g/sWE4MQ4Rk2FMBCRnX+l6/TVYzQ== - -"@next/swc-linux-x64-musl@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.0.tgz#77077cd4ba8dda8f349dc7ceb6230e68ee3293cf" - integrity sha512-rbaIYFt2X9YZBSbH/CwGAjbBG2/MrACCVu2X0+kSykHzHnYH5FjHxwXLkcoJ10cX0aWCEynpu+rP76x0914atg== - -"@next/swc-win32-arm64-msvc@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.0.tgz#5f0b8cf955644104621e6d7cc923cad3a4c5365a" - integrity sha512-o1N5TsYc8f/HpGt39OUQpQ9AKIGApd3QLueu7hXk//2xq5Z9OxmV6sQfNp8C7qYmiOlHYODOGqNNa0e9jvchGQ== - -"@next/swc-win32-ia32-msvc@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.0.tgz#21f4de1293ac5e5a168a412b139db5d3420a89d0" - integrity sha512-XXIuB1DBRCFwNO6EEzCTMHT5pauwaSj4SWs7CYnME57eaReAKBXCnkUE80p/pAZcewm7hs+vGvNqDPacEXHVkw== - -"@next/swc-win32-x64-msvc@14.1.0": - version "14.1.0" - resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.0.tgz#e561fb330466d41807123d932b365cf3d33ceba2" - integrity sha512-9WEbVRRAqJ3YFVqEZIxUqkiO8l1nool1LmNxygr5HWF8AcSYsEpneUDhmjUVJEzO2A04+oPtZdombzzPPkTtgg== - -"@nodelib/fs.scandir@2.1.5": - version "2.1.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" - integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== - dependencies: - "@nodelib/fs.stat" "2.0.5" - run-parallel "^1.1.9" - -"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": - version "2.0.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" - integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== - -"@nodelib/fs.walk@^1.2.3", "@nodelib/fs.walk@^1.2.8": - version "1.2.8" - resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" - integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== - dependencies: - "@nodelib/fs.scandir" "2.1.5" - fastq "^1.6.0" - -"@opentelemetry/api-logs@0.51.1": - version "0.51.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/api-logs/-/api-logs-0.51.1.tgz#ded1874c04516c2b8cb24828eef3d6c3d1f75343" - integrity sha512-E3skn949Pk1z2XtXu/lxf6QAZpawuTM/IUEXcAzpiUkTd73Hmvw26FiN3cJuTmkpM5hZzHwkomVdtrh/n/zzwA== - dependencies: - "@opentelemetry/api" "^1.0.0" - -"@opentelemetry/api@^1.0.0": - version "1.7.0" - resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.7.0.tgz#b139c81999c23e3c8d3c0a7234480e945920fc40" - integrity sha512-AdY5wvN0P2vXBi3b29hxZgSFvdhdxPB9+f0B6s//P9Q8nibRWeA3cHm8UmLpio9ABigkVHJ5NMPk+Mz8VCCyrw== - -"@opentelemetry/api@^1.8.0": - version "1.9.0" - resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.9.0.tgz#d03eba68273dc0f7509e2a3d5cba21eae10379fe" - integrity sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg== - -"@opentelemetry/context-async-hooks@1.25.1": - version "1.25.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/context-async-hooks/-/context-async-hooks-1.25.1.tgz#810bff2fcab84ec51f4684aff2d21f6c057d9e73" - integrity sha512-UW/ge9zjvAEmRWVapOP0qyCvPulWU6cQxGxDbWEFfGOj1VBBZAuOqTo3X6yWmDTD3Xe15ysCZChHncr2xFMIfQ== - -"@opentelemetry/core@1.24.1": - version "1.24.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/core/-/core-1.24.1.tgz#35ab9d2ac9ca938e0ffbdfa40c49c169ac8ba80d" - integrity sha512-wMSGfsdmibI88K9wB498zXY04yThPexo8jvwNNlm542HZB7XrrMRBbAyKJqG8qDRJwIBdBrPMi4V9ZPW/sqrcg== - dependencies: - "@opentelemetry/semantic-conventions" "1.24.1" - -"@opentelemetry/core@1.25.1", "@opentelemetry/core@^1.24.0": - version "1.25.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/core/-/core-1.25.1.tgz#ff667d939d128adfc7c793edae2f6bca177f829d" - integrity sha512-GeT/l6rBYWVQ4XArluLVB6WWQ8flHbdb6r2FCHC3smtdOAbrJBIv35tpV/yp9bmYUJf+xmZpu9DRTIeJVhFbEQ== - dependencies: - "@opentelemetry/semantic-conventions" "1.25.1" - -"@opentelemetry/exporter-trace-otlp-grpc@^0.51.0": - version "0.51.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-trace-otlp-grpc/-/exporter-trace-otlp-grpc-0.51.1.tgz#f38ef867ec11d25880fa81d6b5cbd1edec7d75db" - integrity sha512-P9+Hkszih95ITvldGZ+kXvj9HpD1QfS+PwooyHK72GYA+Bgm+yUSAsDkUkDms8+s9HW6poxURv3LcjaMuBBpVQ== - dependencies: - "@grpc/grpc-js" "^1.7.1" - "@opentelemetry/core" "1.24.1" - "@opentelemetry/otlp-grpc-exporter-base" "0.51.1" - "@opentelemetry/otlp-transformer" "0.51.1" - "@opentelemetry/resources" "1.24.1" - "@opentelemetry/sdk-trace-base" "1.24.1" - -"@opentelemetry/otlp-exporter-base@0.51.1": - version "0.51.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/otlp-exporter-base/-/otlp-exporter-base-0.51.1.tgz#e3b1e877f88adfd6a5d48acc8b72a1cf70167371" - integrity sha512-UYlnOYyDdzo1Gw559EHCzru0RwhvuXCwoH8jGo9J4gO1TE58GjnEmIjomMsKBCym3qWNJfIQXw+9SZCV0DdQNg== - dependencies: - "@opentelemetry/core" "1.24.1" - -"@opentelemetry/otlp-grpc-exporter-base@0.51.1": - version "0.51.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/otlp-grpc-exporter-base/-/otlp-grpc-exporter-base-0.51.1.tgz#79a5ffd7c1d8cc0c5dd1af0e57e61b63c7ccc02e" - integrity sha512-ZAS+4pq8o7dsugGTwV9s6JMKSxi+guIHdn0acOv0bqj26e9pWDFx5Ky+bI0aY46uR9Y0JyXqY+KAEYM/SO3DFA== - dependencies: - "@grpc/grpc-js" "^1.7.1" - "@opentelemetry/core" "1.24.1" - "@opentelemetry/otlp-exporter-base" "0.51.1" - protobufjs "^7.2.3" - -"@opentelemetry/otlp-transformer@0.51.1": - version "0.51.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/otlp-transformer/-/otlp-transformer-0.51.1.tgz#d2344075de50ff64a11fc047db6bcb301a11dcf8" - integrity sha512-OppYOXwV9LQqqtYUCywqoOqX/JT9LQ5/FMuPZ//eTkvuHdUC4ZMwz2c6uSoT2R90GWvvGnF1iEqTGyTT3xAt2Q== - dependencies: - "@opentelemetry/api-logs" "0.51.1" - "@opentelemetry/core" "1.24.1" - "@opentelemetry/resources" "1.24.1" - "@opentelemetry/sdk-logs" "0.51.1" - "@opentelemetry/sdk-metrics" "1.24.1" - "@opentelemetry/sdk-trace-base" "1.24.1" - -"@opentelemetry/propagator-b3@1.25.1": - version "1.25.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/propagator-b3/-/propagator-b3-1.25.1.tgz#653ee5f3f0f223c000907c1559c89c0a208819f7" - integrity sha512-p6HFscpjrv7//kE+7L+3Vn00VEDUJB0n6ZrjkTYHrJ58QZ8B3ajSJhRbCcY6guQ3PDjTbxWklyvIN2ojVbIb1A== - dependencies: - "@opentelemetry/core" "1.25.1" - -"@opentelemetry/propagator-jaeger@1.25.1": - version "1.25.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/propagator-jaeger/-/propagator-jaeger-1.25.1.tgz#7eae165921e65dce6f8d87339379880125dab765" - integrity sha512-nBprRf0+jlgxks78G/xq72PipVK+4or9Ypntw0gVZYNTCSK8rg5SeaGV19tV920CMqBD/9UIOiFr23Li/Q8tiA== - dependencies: - "@opentelemetry/core" "1.25.1" - -"@opentelemetry/resources@1.24.1": - version "1.24.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/resources/-/resources-1.24.1.tgz#5e2cb84814824f3b1e1017e6caeeee8402e0ad6e" - integrity sha512-cyv0MwAaPF7O86x5hk3NNgenMObeejZFLJJDVuSeSMIsknlsj3oOZzRv3qSzlwYomXsICfBeFFlxwHQte5mGXQ== - dependencies: - "@opentelemetry/core" "1.24.1" - "@opentelemetry/semantic-conventions" "1.24.1" - -"@opentelemetry/resources@1.25.1", "@opentelemetry/resources@^1.21.0": - version "1.25.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/resources/-/resources-1.25.1.tgz#bb9a674af25a1a6c30840b755bc69da2796fefbb" - integrity sha512-pkZT+iFYIZsVn6+GzM0kSX+u3MSLCY9md+lIJOoKl/P+gJFfxJte/60Usdp8Ce4rOs8GduUpSPNe1ddGyDT1sQ== - dependencies: - "@opentelemetry/core" "1.25.1" - "@opentelemetry/semantic-conventions" "1.25.1" - -"@opentelemetry/sdk-logs@0.51.1": - version "0.51.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-logs/-/sdk-logs-0.51.1.tgz#8e4544e1034413ed2bd5e0741734d062c5e81c94" - integrity sha512-ULQQtl82b673PpZc5/0EtH4V+BrwVOgKJZEB7tYZnGTG3I98tQVk89S9/JSixomDr++F4ih+LSJTCqIKBz+MQQ== - dependencies: - "@opentelemetry/core" "1.24.1" - "@opentelemetry/resources" "1.24.1" - -"@opentelemetry/sdk-metrics@1.24.1": - version "1.24.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-metrics/-/sdk-metrics-1.24.1.tgz#82ee3069b2ca9bb7c1e91272ff81536dc2e9bc8d" - integrity sha512-FrAqCbbGao9iKI+Mgh+OsC9+U2YMoXnlDHe06yH7dvavCKzE3S892dGtX54+WhSFVxHR/TMRVJiK/CV93GR0TQ== - dependencies: - "@opentelemetry/core" "1.24.1" - "@opentelemetry/resources" "1.24.1" - lodash.merge "^4.6.2" - -"@opentelemetry/sdk-trace-base@1.24.1": - version "1.24.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.24.1.tgz#dc2ab89126e75e442913fb5af98803fde67b2536" - integrity sha512-zz+N423IcySgjihl2NfjBf0qw1RWe11XIAWVrTNOSSI6dtSPJiVom2zipFB2AEEtJWpv0Iz6DY6+TjnyTV5pWg== - dependencies: - "@opentelemetry/core" "1.24.1" - "@opentelemetry/resources" "1.24.1" - "@opentelemetry/semantic-conventions" "1.24.1" - -"@opentelemetry/sdk-trace-base@1.25.1", "@opentelemetry/sdk-trace-base@^1.21.0": - version "1.25.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.25.1.tgz#cbc1e60af255655d2020aa14cde17b37bd13df37" - integrity sha512-C8k4hnEbc5FamuZQ92nTOp8X/diCY56XUTnMiv9UTuJitCzaNNHAVsdm5+HLCdI8SLQsLWIrG38tddMxLVoftw== - dependencies: - "@opentelemetry/core" "1.25.1" - "@opentelemetry/resources" "1.25.1" - "@opentelemetry/semantic-conventions" "1.25.1" - -"@opentelemetry/sdk-trace-node@^1.21.0": - version "1.25.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-trace-node/-/sdk-trace-node-1.25.1.tgz#856063bef1167ae74139199338c24fb958838ff3" - integrity sha512-nMcjFIKxnFqoez4gUmihdBrbpsEnAX/Xj16sGvZm+guceYE0NE00vLhpDVK6f3q8Q4VFI5xG8JjlXKMB/SkTTQ== - dependencies: - "@opentelemetry/context-async-hooks" "1.25.1" - "@opentelemetry/core" "1.25.1" - "@opentelemetry/propagator-b3" "1.25.1" - "@opentelemetry/propagator-jaeger" "1.25.1" - "@opentelemetry/sdk-trace-base" "1.25.1" - semver "^7.5.2" - -"@opentelemetry/semantic-conventions@1.24.1": - version "1.24.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.24.1.tgz#d4bcebda1cb5146d47a2a53daaa7922f8e084dfb" - integrity sha512-VkliWlS4/+GHLLW7J/rVBA00uXus1SWvwFvcUDxDwmFxYfg/2VI6ekwdXS28cjI8Qz2ky2BzG8OUHo+WeYIWqw== - -"@opentelemetry/semantic-conventions@1.25.1", "@opentelemetry/semantic-conventions@^1.21.0": - version "1.25.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.25.1.tgz#0deecb386197c5e9c2c28f2f89f51fb8ae9f145e" - integrity sha512-ZDjMJJQRlyk8A1KZFCc+bCbsyrn1wTwdNt56F7twdfUfnHUZUq77/WfONCj8p72NZOyP7pNTdUWSTYC3GTbuuQ== - -"@pkgjs/parseargs@^0.11.0": - version "0.11.0" - resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33" - integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== - -"@protobufjs/aspromise@^1.1.1", "@protobufjs/aspromise@^1.1.2": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@protobufjs/aspromise/-/aspromise-1.1.2.tgz#9b8b0cc663d669a7d8f6f5d0893a14d348f30fbf" - integrity sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ== - -"@protobufjs/base64@^1.1.2": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@protobufjs/base64/-/base64-1.1.2.tgz#4c85730e59b9a1f1f349047dbf24296034bb2735" - integrity sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg== - -"@protobufjs/codegen@^2.0.4": - version "2.0.4" - resolved "https://registry.yarnpkg.com/@protobufjs/codegen/-/codegen-2.0.4.tgz#7ef37f0d010fb028ad1ad59722e506d9262815cb" - integrity sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg== - -"@protobufjs/eventemitter@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz#355cbc98bafad5978f9ed095f397621f1d066b70" - integrity sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q== - -"@protobufjs/fetch@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@protobufjs/fetch/-/fetch-1.1.0.tgz#ba99fb598614af65700c1619ff06d454b0d84c45" - integrity sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ== - dependencies: - "@protobufjs/aspromise" "^1.1.1" - "@protobufjs/inquire" "^1.1.0" - -"@protobufjs/float@^1.0.2": - version "1.0.2" - resolved "https://registry.yarnpkg.com/@protobufjs/float/-/float-1.0.2.tgz#5e9e1abdcb73fc0a7cb8b291df78c8cbd97b87d1" - integrity sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ== - -"@protobufjs/inquire@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@protobufjs/inquire/-/inquire-1.1.0.tgz#ff200e3e7cf2429e2dcafc1140828e8cc638f089" - integrity sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q== - -"@protobufjs/path@^1.1.2": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@protobufjs/path/-/path-1.1.2.tgz#6cc2b20c5c9ad6ad0dccfd21ca7673d8d7fbf68d" - integrity sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA== - -"@protobufjs/pool@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@protobufjs/pool/-/pool-1.1.0.tgz#09fd15f2d6d3abfa9b65bc366506d6ad7846ff54" - integrity sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw== - -"@protobufjs/utf8@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570" - integrity sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw== - -"@rushstack/eslint-patch@^1.3.3": - version "1.7.2" - resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.7.2.tgz#2d4260033e199b3032a08b41348ac10de21c47e9" - integrity sha512-RbhOOTCNoCrbfkRyoXODZp75MlpiHMgbE5MEBZAnnnLyQNgrigEj4p0lzsMDyc1zVsJDLrivB58tgg3emX0eEA== - -"@swc/helpers@0.5.2": - version "0.5.2" - resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.5.2.tgz#85ea0c76450b61ad7d10a37050289eded783c27d" - integrity sha512-E4KcWTpoLHqwPHLxidpOqQbcrZVgi0rsmmZXUle1jXmJfuIf/UWpczUJ7MZZ5tlxytgJXyp0w4PGkkeLiuIdZw== - dependencies: - tslib "^2.4.0" - -"@types/acorn@^4.0.0": - version "4.0.6" - resolved "https://registry.yarnpkg.com/@types/acorn/-/acorn-4.0.6.tgz#d61ca5480300ac41a7d973dd5b84d0a591154a22" - integrity sha512-veQTnWP+1D/xbxVrPC3zHnCZRjSrKfhbMUlEA43iMZLu7EsnTtkJklIuwrCPbOi8YkvDQAiW05VQQFvvz9oieQ== - dependencies: - "@types/estree" "*" - -"@types/debug@^4.0.0": - version "4.1.12" - resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.12.tgz#a155f21690871953410df4b6b6f53187f0500917" - integrity sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ== - dependencies: - "@types/ms" "*" - -"@types/estree-jsx@^1.0.0": - version "1.0.3" - resolved "https://registry.yarnpkg.com/@types/estree-jsx/-/estree-jsx-1.0.3.tgz#f8aa833ec986d82b8271a294a92ed1565bf2c66a" - integrity sha512-pvQ+TKeRHeiUGRhvYwRrQ/ISnohKkSJR14fT2yqyZ4e9K5vqc7hrtY2Y1Dw0ZwAzQ6DQsxsaCUuSIIi8v0Cq6w== - dependencies: - "@types/estree" "*" - -"@types/estree@*", "@types/estree@^1.0.0": - version "1.0.5" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.5.tgz#a6ce3e556e00fd9895dd872dd172ad0d4bd687f4" - integrity sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw== - -"@types/hast@^3.0.0": - version "3.0.4" - resolved "https://registry.yarnpkg.com/@types/hast/-/hast-3.0.4.tgz#1d6b39993b82cea6ad783945b0508c25903e15aa" - integrity sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ== - dependencies: - "@types/unist" "*" - -"@types/json5@^0.0.29": - version "0.0.29" - resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" - integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== - -"@types/mdast@^4.0.0": - version "4.0.4" - resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-4.0.4.tgz#7ccf72edd2f1aa7dd3437e180c64373585804dd6" - integrity sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA== - dependencies: - "@types/unist" "*" - -"@types/mdx@^2.0.0": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@types/mdx/-/mdx-2.0.10.tgz#0d7b57fb1d83e27656156e4ee0dfba96532930e4" - integrity sha512-Rllzc5KHk0Al5/WANwgSPl1/CwjqCy+AZrGd78zuK+jO9aDM6ffblZ+zIjgPNAaEBmlO0RYDvLNh7wD0zKVgEg== - -"@types/ms@*": - version "0.7.34" - resolved "https://registry.yarnpkg.com/@types/ms/-/ms-0.7.34.tgz#10964ba0dee6ac4cd462e2795b6bebd407303433" - integrity sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g== - -"@types/node@20.11.17": - version "20.11.17" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.17.tgz#cdd642d0e62ef3a861f88ddbc2b61e32578a9292" - integrity sha512-QmgQZGWu1Yw9TDyAP9ZzpFJKynYNeOvwMJmaxABfieQoVoiVOS6MN1WSpqpRcbeA5+RW82kraAVxCCJg+780Qw== - dependencies: - undici-types "~5.26.4" - -"@types/node@>=12.12.47", "@types/node@>=13.7.0": - version "20.11.7" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.7.tgz#cb49aedd758c978c30806d0c38b520ed2a3df6e0" - integrity sha512-GPmeN1C3XAyV5uybAf4cMLWT9fDWcmQhZVtMFu7OR32WjrqGG+Wnk2V1d0bmtUyE/Zy1QJ9BxyiTih9z8Oks8A== - dependencies: - undici-types "~5.26.4" - -"@types/prop-types@*": - version "15.7.11" - resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.11.tgz#2596fb352ee96a1379c657734d4b913a613ad563" - integrity sha512-ga8y9v9uyeiLdpKddhxYQkxNDrfvuPrlFb0N1qnZZByvcElJaXthF1UhvCh9TLWJBEHeNtdnbysW7Y6Uq8CVng== - -"@types/react-dom@18.2.7": - version "18.2.7" - resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-18.2.7.tgz#67222a08c0a6ae0a0da33c3532348277c70abb63" - integrity sha512-GRaAEriuT4zp9N4p1i8BDBYmEyfo+xQ3yHjJU4eiK5NDa1RmUZG+unZABUTK4/Ox/M+GaHwb6Ow8rUITrtjszA== - dependencies: - "@types/react" "*" - -"@types/react@*": - version "18.2.48" - resolved "https://registry.yarnpkg.com/@types/react/-/react-18.2.48.tgz#11df5664642d0bd879c1f58bc1d37205b064e8f1" - integrity sha512-qboRCl6Ie70DQQG9hhNREz81jqC1cs9EVNcjQ1AU+jH6NFfSAhVVbrrY/+nSF+Bsk4AOwm9Qa61InvMCyV+H3w== - dependencies: - "@types/prop-types" "*" - "@types/scheduler" "*" - csstype "^3.0.2" - -"@types/react@18.2.15": - version "18.2.15" - resolved "https://registry.yarnpkg.com/@types/react/-/react-18.2.15.tgz#14792b35df676c20ec3cf595b262f8c615a73066" - integrity sha512-oEjE7TQt1fFTFSbf8kkNuc798ahTUzn3Le67/PWjE8MAfYAD/qB7O8hSTcromLFqHCt9bcdOg5GXMokzTjJ5SA== - dependencies: - "@types/prop-types" "*" - "@types/scheduler" "*" - csstype "^3.0.2" - -"@types/resolve@^1.17.1": - version "1.20.6" - resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-1.20.6.tgz#e6e60dad29c2c8c206c026e6dd8d6d1bdda850b8" - integrity sha512-A4STmOXPhMUtHH+S6ymgE2GiBSMqf4oTvcQZMcHzokuTLVYzXTB8ttjcgxOVaAp2lGwEdzZ0J+cRbbeevQj1UQ== - -"@types/scheduler@*": - version "0.16.8" - resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.8.tgz#ce5ace04cfeabe7ef87c0091e50752e36707deff" - integrity sha512-WZLiwShhwLRmeV6zH+GkbOFT6Z6VklCItrDioxUnv+u4Ll+8vKeFySoFyK/0ctcRpOmwAicELfmys1sDc/Rw+A== - -"@types/unist@*", "@types/unist@^3.0.0": - version "3.0.2" - resolved "https://registry.yarnpkg.com/@types/unist/-/unist-3.0.2.tgz#6dd61e43ef60b34086287f83683a5c1b2dc53d20" - integrity sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ== - -"@types/unist@^2.0.0": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.10.tgz#04ffa7f406ab628f7f7e97ca23e290cd8ab15efc" - integrity sha512-IfYcSBWE3hLpBg8+X2SEa8LVkJdJEkT2Ese2aaLs3ptGdVtABxndrMaxuFlQ1qdFf9Q5rDvDpxI3WwgvKFAsQA== - -"@typescript-eslint/parser@^5.4.2 || ^6.0.0": - version "6.19.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-6.19.1.tgz#68a87bb21afaf0b1689e9cdce0e6e75bc91ada78" - integrity sha512-WEfX22ziAh6pRE9jnbkkLGp/4RhTpffr2ZK5bJ18M8mIfA8A+k97U9ZyaXCEJRlmMHh7R9MJZWXp/r73DzINVQ== - dependencies: - "@typescript-eslint/scope-manager" "6.19.1" - "@typescript-eslint/types" "6.19.1" - "@typescript-eslint/typescript-estree" "6.19.1" - "@typescript-eslint/visitor-keys" "6.19.1" - debug "^4.3.4" - -"@typescript-eslint/scope-manager@6.19.1": - version "6.19.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-6.19.1.tgz#2f527ee30703a6169a52b31d42a1103d80acd51b" - integrity sha512-4CdXYjKf6/6aKNMSly/BP4iCSOpvMmqtDzRtqFyyAae3z5kkqEjKndR5vDHL8rSuMIIWP8u4Mw4VxLyxZW6D5w== - dependencies: - "@typescript-eslint/types" "6.19.1" - "@typescript-eslint/visitor-keys" "6.19.1" - -"@typescript-eslint/types@6.19.1": - version "6.19.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-6.19.1.tgz#2d4c9d492a63ede15e7ba7d129bdf7714b77f771" - integrity sha512-6+bk6FEtBhvfYvpHsDgAL3uo4BfvnTnoge5LrrCj2eJN8g3IJdLTD4B/jK3Q6vo4Ql/Hoip9I8aB6fF+6RfDqg== - -"@typescript-eslint/typescript-estree@6.19.1": - version "6.19.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-6.19.1.tgz#796d88d88882f12e85bb33d6d82d39e1aea54ed1" - integrity sha512-aFdAxuhzBFRWhy+H20nYu19+Km+gFfwNO4TEqyszkMcgBDYQjmPJ61erHxuT2ESJXhlhrO7I5EFIlZ+qGR8oVA== - dependencies: - "@typescript-eslint/types" "6.19.1" - "@typescript-eslint/visitor-keys" "6.19.1" - debug "^4.3.4" - globby "^11.1.0" - is-glob "^4.0.3" - minimatch "9.0.3" - semver "^7.5.4" - ts-api-utils "^1.0.1" - -"@typescript-eslint/visitor-keys@6.19.1": - version "6.19.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-6.19.1.tgz#2164073ed4fc34a5ff3b5e25bb5a442100454c4c" - integrity sha512-gkdtIO+xSO/SmI0W68DBg4u1KElmIUo3vXzgHyGPs6cxgB0sa3TlptRAAE0hUY1hM6FcDKEv7aIwiTGm76cXfQ== - dependencies: - "@typescript-eslint/types" "6.19.1" - eslint-visitor-keys "^3.4.1" - -"@ungap/structured-clone@^1.0.0": - version "1.2.0" - resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406" - integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ== - -acorn-jsx@^5.0.0, acorn-jsx@^5.3.2: - version "5.3.2" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" - integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== - -acorn@^8.0.0, acorn@^8.9.0: - version "8.11.3" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a" - integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg== - -ajv@^6.10.0, ajv@^6.12.4: - version "6.12.6" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" - integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - -ansi-regex@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" - integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== - -ansi-regex@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" - integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== - -ansi-styles@^4.0.0, ansi-styles@^4.1.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" - integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== - dependencies: - color-convert "^2.0.1" - -ansi-styles@^6.1.0: - version "6.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" - integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== - -any-promise@^1.0.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" - integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== - -anymatch@~3.1.2: - version "3.1.3" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" - integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== - dependencies: - normalize-path "^3.0.0" - picomatch "^2.0.4" - -arg@^5.0.2: - version "5.0.2" - resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" - integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== - -argparse@^1.0.7: - version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" - integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== - dependencies: - sprintf-js "~1.0.2" - -argparse@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" - integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== - -aria-query@^5.3.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.3.0.tgz#650c569e41ad90b51b3d7df5e5eed1c7549c103e" - integrity sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A== - dependencies: - dequal "^2.0.3" - -array-buffer-byte-length@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz#fabe8bc193fea865f317fe7807085ee0dee5aead" - integrity sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A== - dependencies: - call-bind "^1.0.2" - is-array-buffer "^3.0.1" - -array-includes@^3.1.6, array-includes@^3.1.7: - version "3.1.7" - resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.7.tgz#8cd2e01b26f7a3086cbc87271593fe921c62abda" - integrity sha512-dlcsNBIiWhPkHdOEEKnehA+RNUWDc4UqFtnIXU4uuYDPtA4LDkr7qip2p0VvFAEXNDr0yWZ9PJyIRiGjRLQzwQ== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - get-intrinsic "^1.2.1" - is-string "^1.0.7" - -array-timsort@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/array-timsort/-/array-timsort-1.0.3.tgz#3c9e4199e54fb2b9c3fe5976396a21614ef0d926" - integrity sha512-/+3GRL7dDAGEfM6TseQk/U+mi18TU2Ms9I3UlLdUMhz2hbvGNTKdj9xniwXfUqgYhHxRx0+8UnKkvlNwVU+cWQ== - -array-union@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" - integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== - -array.prototype.findlastindex@^1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz#b37598438f97b579166940814e2c0493a4f50207" - integrity sha512-LzLoiOMAxvy+Gd3BAq3B7VeIgPdo+Q8hthvKtXybMvRV0jrXfJM/t8mw7nNlpEcVlVUnCnM2KSX4XU5HmpodOA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - es-shim-unscopables "^1.0.0" - get-intrinsic "^1.2.1" - -array.prototype.flat@^1.3.1, array.prototype.flat@^1.3.2: - version "1.3.2" - resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz#1476217df8cff17d72ee8f3ba06738db5b387d18" - integrity sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - es-shim-unscopables "^1.0.0" - -array.prototype.flatmap@^1.3.1, array.prototype.flatmap@^1.3.2: - version "1.3.2" - resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.3.2.tgz#c9a7c6831db8e719d6ce639190146c24bbd3e527" - integrity sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - es-shim-unscopables "^1.0.0" - -array.prototype.tosorted@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/array.prototype.tosorted/-/array.prototype.tosorted-1.1.2.tgz#620eff7442503d66c799d95503f82b475745cefd" - integrity sha512-HuQCHOlk1Weat5jzStICBCd83NxiIMwqDg/dHEsoefabn/hJRj5pVdWcPUSpRrwhwxZOsQassMpgN/xRYFBMIg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - es-shim-unscopables "^1.0.0" - get-intrinsic "^1.2.1" - -arraybuffer.prototype.slice@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.2.tgz#98bd561953e3e74bb34938e77647179dfe6e9f12" - integrity sha512-yMBKppFur/fbHu9/6USUe03bZ4knMYiwFBcyiaXB8Go0qNehwX6inYPzK9U0NeQvGxKthcmHcaR8P5MStSRBAw== - dependencies: - array-buffer-byte-length "^1.0.0" - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - get-intrinsic "^1.2.1" - is-array-buffer "^3.0.2" - is-shared-array-buffer "^1.0.2" - -ast-types-flow@^0.0.8: - version "0.0.8" - resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.8.tgz#0a85e1c92695769ac13a428bb653e7538bea27d6" - integrity sha512-OH/2E5Fg20h2aPrbe+QL8JZQFko0YZaF+j4mnQ7BGhfavO7OpSLa8a0y9sBwomHdSbkhTS8TQNayBfnW5DwbvQ== - -astring@^1.8.0: - version "1.8.6" - resolved "https://registry.yarnpkg.com/astring/-/astring-1.8.6.tgz#2c9c157cf1739d67561c56ba896e6948f6b93731" - integrity sha512-ISvCdHdlTDlH5IpxQJIex7BWBywFWgjJSVdwst+/iQCoEYnyOaQ95+X1JGshuBjGp6nxKUy1jMgE3zPqN7fQdg== - -asynciterator.prototype@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/asynciterator.prototype/-/asynciterator.prototype-1.0.0.tgz#8c5df0514936cdd133604dfcc9d3fb93f09b2b62" - integrity sha512-wwHYEIS0Q80f5mosx3L/dfG5t5rjEa9Ft51GTaNt862EnpyGHpgz2RkZvLPp1oF5TnAiTohkEKVEu8pQPJI7Vg== - dependencies: - has-symbols "^1.0.3" - -autoprefixer@10.4.14: - version "10.4.14" - resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.14.tgz#e28d49902f8e759dd25b153264e862df2705f79d" - integrity sha512-FQzyfOsTlwVzjHxKEqRIAdJx9niO6VCBCoEwax/VLSoQF29ggECcPuBqUMZ+u8jCZOPSy8b8/8KnuFbp0SaFZQ== - dependencies: - browserslist "^4.21.5" - caniuse-lite "^1.0.30001464" - fraction.js "^4.2.0" - normalize-range "^0.1.2" - picocolors "^1.0.0" - postcss-value-parser "^4.2.0" - -available-typed-arrays@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7" - integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw== - -axe-core@=4.7.0: - version "4.7.0" - resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.7.0.tgz#34ba5a48a8b564f67e103f0aa5768d76e15bbbbf" - integrity sha512-M0JtH+hlOL5pLQwHOLNYZaXuhqmvS8oExsqB1SBYgA4Dk7u/xx+YdGHXaK5pyUfed5mYXdlYiphWq3G8cRi5JQ== - -axobject-query@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-3.2.1.tgz#39c378a6e3b06ca679f29138151e45b2b32da62a" - integrity sha512-jsyHu61e6N4Vbz/v18DHwWYKK0bSWLqn47eeDSKPB7m8tqMHF9YJ+mhIk2lVteyZrY8tnSj/jHOv4YiTCuCJgg== - dependencies: - dequal "^2.0.3" - -bail@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/bail/-/bail-2.0.2.tgz#d26f5cd8fe5d6f832a31517b9f7c356040ba6d5d" - integrity sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw== - -balanced-match@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" - integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== - -binary-extensions@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" - integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -brace-expansion@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" - integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== - dependencies: - balanced-match "^1.0.0" - -braces@^3.0.2, braces@~3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" - integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== - dependencies: - fill-range "^7.0.1" - -browserslist@^4.21.5: - version "4.22.3" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.22.3.tgz#299d11b7e947a6b843981392721169e27d60c5a6" - integrity sha512-UAp55yfwNv0klWNapjs/ktHoguxuQNGnOzxYmfnXIS+8AsRDZkSDxg7R1AX3GKzn078SBI5dzwzj/Yx0Or0e3A== - dependencies: - caniuse-lite "^1.0.30001580" - electron-to-chromium "^1.4.648" - node-releases "^2.0.14" - update-browserslist-db "^1.0.13" - -buffer-crc32@~0.2.3: - version "0.2.13" - resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242" - integrity sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ== - -buffer-from@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" - integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== - -busboy@1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893" - integrity sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA== - dependencies: - streamsearch "^1.1.0" - -call-bind@^1.0.0, call-bind@^1.0.2, call-bind@^1.0.4, call-bind@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.5.tgz#6fa2b7845ce0ea49bf4d8b9ef64727a2c2e2e513" - integrity sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ== - dependencies: - function-bind "^1.1.2" - get-intrinsic "^1.2.1" - set-function-length "^1.1.1" - -callsites@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" - integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== - -camel-case@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" - integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== - dependencies: - pascal-case "^3.1.2" - tslib "^2.0.3" - -camelcase-css@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" - integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== - -caniuse-lite@^1.0.30001464, caniuse-lite@^1.0.30001579, caniuse-lite@^1.0.30001580: - version "1.0.30001580" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001580.tgz#e3c76bc6fe020d9007647044278954ff8cd17d1e" - integrity sha512-mtj5ur2FFPZcCEpXFy8ADXbDACuNFXg6mxVDqp7tqooX6l3zwm+d8EPoeOSIFRDvHs8qu7/SLFOGniULkcH2iA== - -ccount@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/ccount/-/ccount-2.0.1.tgz#17a3bf82302e0870d6da43a01311a8bc02a3ecf5" - integrity sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg== - -chalk@^4.0.0: - version "4.1.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" - integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -character-entities-html4@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/character-entities-html4/-/character-entities-html4-2.1.0.tgz#1f1adb940c971a4b22ba39ddca6b618dc6e56b2b" - integrity sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA== - -character-entities-legacy@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz#76bc83a90738901d7bc223a9e93759fdd560125b" - integrity sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ== - -character-entities@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/character-entities/-/character-entities-2.0.2.tgz#2d09c2e72cd9523076ccb21157dff66ad43fcc22" - integrity sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ== - -character-reference-invalid@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz#85c66b041e43b47210faf401278abf808ac45cb9" - integrity sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw== - -chokidar@^3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" - integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== - dependencies: - anymatch "~3.1.2" - braces "~3.0.2" - glob-parent "~5.1.2" - is-binary-path "~2.1.0" - is-glob "~4.0.1" - normalize-path "~3.0.0" - readdirp "~3.6.0" - optionalDependencies: - fsevents "~2.3.2" - -chownr@^1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" - integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== - -client-only@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/client-only/-/client-only-0.0.1.tgz#38bba5d403c41ab150bff64a95c85013cf73bca1" - integrity sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA== - -clipanion@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/clipanion/-/clipanion-3.2.1.tgz#2887db4cb232e80ba57cf19347a4e3a1c4a74133" - integrity sha512-dYFdjLb7y1ajfxQopN05mylEpK9ZX0sO1/RfMXdfmwjlIsPkbh4p7A682x++zFPLDCo1x3p82dtljHf5cW2LKA== - dependencies: - typanion "^3.8.0" - -cliui@^8.0.1: - version "8.0.1" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" - integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== - dependencies: - string-width "^4.2.0" - strip-ansi "^6.0.1" - wrap-ansi "^7.0.0" - -collapse-white-space@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/collapse-white-space/-/collapse-white-space-2.1.0.tgz#640257174f9f42c740b40f3b55ee752924feefca" - integrity sha512-loKTxY1zCOuG4j9f6EPnuyyYkf58RnhhWTvRoZEokgB+WbdXehfjFviyOVYkqzEWz1Q5kRiZdBYS5SwxbQYwzw== - -color-convert@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" - integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== - dependencies: - color-name "~1.1.4" - -color-name@~1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - -comma-separated-tokens@^2.0.0: - version "2.0.3" - resolved "https://registry.yarnpkg.com/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz#4e89c9458acb61bc8fef19f4529973b2392839ee" - integrity sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg== - -command-exists-promise@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/command-exists-promise/-/command-exists-promise-2.0.2.tgz#7beecc4b218299f3c61fa69a4047aa0b36a64a99" - integrity sha512-T6PB6vdFrwnHXg/I0kivM3DqaCGZLjjYSOe0a5WgFKcz1sOnmOeIjnhQPXVXX3QjVbLyTJ85lJkX6lUpukTzaA== - -commander@^4.0.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" - integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== - -comment-json@^4.2.3: - version "4.2.3" - resolved "https://registry.yarnpkg.com/comment-json/-/comment-json-4.2.3.tgz#50b487ebbf43abe44431f575ebda07d30d015365" - integrity sha512-SsxdiOf064DWoZLH799Ata6u7iV658A11PlWtZATDlXPpKGJnbJZ5Z24ybixAi+LUUqJ/GKowAejtC5GFUG7Tw== - dependencies: - array-timsort "^1.0.3" - core-util-is "^1.0.3" - esprima "^4.0.1" - has-own-prop "^2.0.0" - repeat-string "^1.6.1" - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== - -contentlayer2@^0.4.6: - version "0.4.6" - resolved "https://registry.yarnpkg.com/contentlayer2/-/contentlayer2-0.4.6.tgz#229ac362e27ea58b9a1862132e9a1628cab37ffa" - integrity sha512-EhdabpVsn8u3EkoovGrLB/sIxWUlVJGNiYal9rZn0XJRjIyncGrhz9EJ9gn+z3cRHYUdHCuCMLW/ev6isgKXYw== - dependencies: - "@contentlayer2/cli" "0.4.3" - "@contentlayer2/client" "0.4.3" - "@contentlayer2/core" "0.4.3" - "@contentlayer2/source-files" "0.4.3" - "@contentlayer2/source-remote-files" "0.4.3" - "@contentlayer2/utils" "0.4.3" - -core-util-is@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" - integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== - -cross-spawn@^7.0.0, cross-spawn@^7.0.2: - version "7.0.3" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" - integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== - dependencies: - path-key "^3.1.0" - shebang-command "^2.0.0" - which "^2.0.1" - -cssesc@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" - integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== - -csstype@^3.0.2: - version "3.1.3" - resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.3.tgz#d80ff294d114fb0e6ac500fbf85b60137d7eff81" - integrity sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw== - -damerau-levenshtein@^1.0.8: - version "1.0.8" - resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" - integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== - -debug@^3.2.7: - version "3.2.7" - resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" - integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== - dependencies: - ms "^2.1.1" - -debug@^4.0.0, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: - version "4.3.4" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" - integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== - dependencies: - ms "2.1.2" - -decode-named-character-reference@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz#daabac9690874c394c81e4162a0304b35d824f0e" - integrity sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg== - dependencies: - character-entities "^2.0.0" - -deep-is@^0.1.3: - version "0.1.4" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" - integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== - -define-data-property@^1.0.1, define-data-property@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.1.tgz#c35f7cd0ab09883480d12ac5cb213715587800b3" - integrity sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ== - dependencies: - get-intrinsic "^1.2.1" - gopd "^1.0.1" - has-property-descriptors "^1.0.0" - -define-properties@^1.1.3, define-properties@^1.2.0, define-properties@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.1.tgz#10781cc616eb951a80a034bafcaa7377f6af2b6c" - integrity sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg== - dependencies: - define-data-property "^1.0.1" - has-property-descriptors "^1.0.0" - object-keys "^1.1.1" - -dequal@^2.0.0, dequal@^2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/dequal/-/dequal-2.0.3.tgz#2644214f1997d39ed0ee0ece72335490a7ac67be" - integrity sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA== - -devlop@^1.0.0, devlop@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/devlop/-/devlop-1.1.0.tgz#4db7c2ca4dc6e0e834c30be70c94bbc976dc7018" - integrity sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA== - dependencies: - dequal "^2.0.0" - -didyoumean@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" - integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== - -dir-glob@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" - integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== - dependencies: - path-type "^4.0.0" - -dlv@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" - integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== - -doctrine@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" - integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== - dependencies: - esutils "^2.0.2" - -doctrine@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" - integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== - dependencies: - esutils "^2.0.2" - -eastasianwidth@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" - integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== - -electron-to-chromium@^1.4.648: - version "1.4.648" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.648.tgz#c7b46c9010752c37bb4322739d6d2dd82354fbe4" - integrity sha512-EmFMarXeqJp9cUKu/QEciEApn0S/xRcpZWuAm32U7NgoZCimjsilKXHRO9saeEW55eHZagIDg6XTUOv32w9pjg== - -emoji-regex@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" - integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== - -emoji-regex@^9.2.2: - version "9.2.2" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" - integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== - -enhanced-resolve@^5.12.0: - version "5.15.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz#1af946c7d93603eb88e9896cee4904dc012e9c35" - integrity sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg== - dependencies: - graceful-fs "^4.2.4" - tapable "^2.2.0" - -entities@^4.4.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/entities/-/entities-4.5.0.tgz#5d268ea5e7113ec74c4d033b79ea5a35a488fb48" - integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw== - -es-abstract@^1.22.1: - version "1.22.3" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.22.3.tgz#48e79f5573198de6dee3589195727f4f74bc4f32" - integrity sha512-eiiY8HQeYfYH2Con2berK+To6GrK2RxbPawDkGq4UiCQQfZHb6wX9qQqkbpPqaxQFcl8d9QzZqo0tGE0VcrdwA== - dependencies: - array-buffer-byte-length "^1.0.0" - arraybuffer.prototype.slice "^1.0.2" - available-typed-arrays "^1.0.5" - call-bind "^1.0.5" - es-set-tostringtag "^2.0.1" - es-to-primitive "^1.2.1" - function.prototype.name "^1.1.6" - get-intrinsic "^1.2.2" - get-symbol-description "^1.0.0" - globalthis "^1.0.3" - gopd "^1.0.1" - has-property-descriptors "^1.0.0" - has-proto "^1.0.1" - has-symbols "^1.0.3" - hasown "^2.0.0" - internal-slot "^1.0.5" - is-array-buffer "^3.0.2" - is-callable "^1.2.7" - is-negative-zero "^2.0.2" - is-regex "^1.1.4" - is-shared-array-buffer "^1.0.2" - is-string "^1.0.7" - is-typed-array "^1.1.12" - is-weakref "^1.0.2" - object-inspect "^1.13.1" - object-keys "^1.1.1" - object.assign "^4.1.4" - regexp.prototype.flags "^1.5.1" - safe-array-concat "^1.0.1" - safe-regex-test "^1.0.0" - string.prototype.trim "^1.2.8" - string.prototype.trimend "^1.0.7" - string.prototype.trimstart "^1.0.7" - typed-array-buffer "^1.0.0" - typed-array-byte-length "^1.0.0" - typed-array-byte-offset "^1.0.0" - typed-array-length "^1.0.4" - unbox-primitive "^1.0.2" - which-typed-array "^1.1.13" - -es-iterator-helpers@^1.0.12, es-iterator-helpers@^1.0.15: - version "1.0.15" - resolved "https://registry.yarnpkg.com/es-iterator-helpers/-/es-iterator-helpers-1.0.15.tgz#bd81d275ac766431d19305923707c3efd9f1ae40" - integrity sha512-GhoY8uYqd6iwUl2kgjTm4CZAf6oo5mHK7BPqx3rKgx893YSsy0LGHV6gfqqQvZt/8xM8xeOnfXBCfqclMKkJ5g== - dependencies: - asynciterator.prototype "^1.0.0" - call-bind "^1.0.2" - define-properties "^1.2.1" - es-abstract "^1.22.1" - es-set-tostringtag "^2.0.1" - function-bind "^1.1.1" - get-intrinsic "^1.2.1" - globalthis "^1.0.3" - has-property-descriptors "^1.0.0" - has-proto "^1.0.1" - has-symbols "^1.0.3" - internal-slot "^1.0.5" - iterator.prototype "^1.1.2" - safe-array-concat "^1.0.1" - -es-set-tostringtag@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz#11f7cc9f63376930a5f20be4915834f4bc74f9c9" - integrity sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q== - dependencies: - get-intrinsic "^1.2.2" - has-tostringtag "^1.0.0" - hasown "^2.0.0" - -es-shim-unscopables@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz#1f6942e71ecc7835ed1c8a83006d8771a63a3763" - integrity sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw== - dependencies: - hasown "^2.0.0" - -es-to-primitive@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" - integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== - dependencies: - is-callable "^1.1.4" - is-date-object "^1.0.1" - is-symbol "^1.0.2" - -"esbuild@0.17.x || 0.18.x || 0.19.x || 0.20.x": - version "0.20.2" - resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.20.2.tgz#9d6b2386561766ee6b5a55196c6d766d28c87ea1" - integrity sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g== - optionalDependencies: - "@esbuild/aix-ppc64" "0.20.2" - "@esbuild/android-arm" "0.20.2" - "@esbuild/android-arm64" "0.20.2" - "@esbuild/android-x64" "0.20.2" - "@esbuild/darwin-arm64" "0.20.2" - "@esbuild/darwin-x64" "0.20.2" - "@esbuild/freebsd-arm64" "0.20.2" - "@esbuild/freebsd-x64" "0.20.2" - "@esbuild/linux-arm" "0.20.2" - "@esbuild/linux-arm64" "0.20.2" - "@esbuild/linux-ia32" "0.20.2" - "@esbuild/linux-loong64" "0.20.2" - "@esbuild/linux-mips64el" "0.20.2" - "@esbuild/linux-ppc64" "0.20.2" - "@esbuild/linux-riscv64" "0.20.2" - "@esbuild/linux-s390x" "0.20.2" - "@esbuild/linux-x64" "0.20.2" - "@esbuild/netbsd-x64" "0.20.2" - "@esbuild/openbsd-x64" "0.20.2" - "@esbuild/sunos-x64" "0.20.2" - "@esbuild/win32-arm64" "0.20.2" - "@esbuild/win32-ia32" "0.20.2" - "@esbuild/win32-x64" "0.20.2" - -escalade@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" - integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== - -escape-string-regexp@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" - integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== - -escape-string-regexp@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz#4683126b500b61762f2dbebace1806e8be31b1c8" - integrity sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw== - -eslint-config-next@14.1.0: - version "14.1.0" - resolved "https://registry.yarnpkg.com/eslint-config-next/-/eslint-config-next-14.1.0.tgz#7e309d426b8afacaba3b32fdbb02ba220b6d0a97" - integrity sha512-SBX2ed7DoRFXC6CQSLc/SbLY9Ut6HxNB2wPTcoIWjUMd7aF7O/SIE7111L8FdZ9TXsNV4pulUDnfthpyPtbFUg== - dependencies: - "@next/eslint-plugin-next" "14.1.0" - "@rushstack/eslint-patch" "^1.3.3" - "@typescript-eslint/parser" "^5.4.2 || ^6.0.0" - eslint-import-resolver-node "^0.3.6" - eslint-import-resolver-typescript "^3.5.2" - eslint-plugin-import "^2.28.1" - eslint-plugin-jsx-a11y "^6.7.1" - eslint-plugin-react "^7.33.2" - eslint-plugin-react-hooks "^4.5.0 || 5.0.0-canary-7118f5dd7-20230705" - -eslint-import-resolver-node@^0.3.6, eslint-import-resolver-node@^0.3.9: - version "0.3.9" - resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz#d4eaac52b8a2e7c3cd1903eb00f7e053356118ac" - integrity sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g== - dependencies: - debug "^3.2.7" - is-core-module "^2.13.0" - resolve "^1.22.4" - -eslint-import-resolver-typescript@^3.5.2: - version "3.6.1" - resolved "https://registry.yarnpkg.com/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.6.1.tgz#7b983680edd3f1c5bce1a5829ae0bc2d57fe9efa" - integrity sha512-xgdptdoi5W3niYeuQxKmzVDTATvLYqhpwmykwsh7f6HIOStGWEIL9iqZgQDF9u9OEzrRwR8no5q2VT+bjAujTg== - dependencies: - debug "^4.3.4" - enhanced-resolve "^5.12.0" - eslint-module-utils "^2.7.4" - fast-glob "^3.3.1" - get-tsconfig "^4.5.0" - is-core-module "^2.11.0" - is-glob "^4.0.3" - -eslint-module-utils@^2.7.4, eslint-module-utils@^2.8.0: - version "2.8.0" - resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz#e439fee65fc33f6bba630ff621efc38ec0375c49" - integrity sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw== - dependencies: - debug "^3.2.7" - -eslint-plugin-import@^2.28.1: - version "2.29.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz#d45b37b5ef5901d639c15270d74d46d161150643" - integrity sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw== - dependencies: - array-includes "^3.1.7" - array.prototype.findlastindex "^1.2.3" - array.prototype.flat "^1.3.2" - array.prototype.flatmap "^1.3.2" - debug "^3.2.7" - doctrine "^2.1.0" - eslint-import-resolver-node "^0.3.9" - eslint-module-utils "^2.8.0" - hasown "^2.0.0" - is-core-module "^2.13.1" - is-glob "^4.0.3" - minimatch "^3.1.2" - object.fromentries "^2.0.7" - object.groupby "^1.0.1" - object.values "^1.1.7" - semver "^6.3.1" - tsconfig-paths "^3.15.0" - -eslint-plugin-jsx-a11y@^6.7.1: - version "6.8.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.8.0.tgz#2fa9c701d44fcd722b7c771ec322432857fcbad2" - integrity sha512-Hdh937BS3KdwwbBaKd5+PLCOmYY6U4f2h9Z2ktwtNKvIdIEu137rjYbcb9ApSbVJfWxANNuiKTD/9tOKjK9qOA== - dependencies: - "@babel/runtime" "^7.23.2" - aria-query "^5.3.0" - array-includes "^3.1.7" - array.prototype.flatmap "^1.3.2" - ast-types-flow "^0.0.8" - axe-core "=4.7.0" - axobject-query "^3.2.1" - damerau-levenshtein "^1.0.8" - emoji-regex "^9.2.2" - es-iterator-helpers "^1.0.15" - hasown "^2.0.0" - jsx-ast-utils "^3.3.5" - language-tags "^1.0.9" - minimatch "^3.1.2" - object.entries "^1.1.7" - object.fromentries "^2.0.7" - -"eslint-plugin-react-hooks@^4.5.0 || 5.0.0-canary-7118f5dd7-20230705": - version "4.6.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz#4c3e697ad95b77e93f8646aaa1630c1ba607edd3" - integrity sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g== - -eslint-plugin-react@^7.33.2: - version "7.33.2" - resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.33.2.tgz#69ee09443ffc583927eafe86ffebb470ee737608" - integrity sha512-73QQMKALArI8/7xGLNI/3LylrEYrlKZSb5C9+q3OtOewTnMQi5cT+aE9E41sLCmli3I9PGGmD1yiZydyo4FEPw== - dependencies: - array-includes "^3.1.6" - array.prototype.flatmap "^1.3.1" - array.prototype.tosorted "^1.1.1" - doctrine "^2.1.0" - es-iterator-helpers "^1.0.12" - estraverse "^5.3.0" - jsx-ast-utils "^2.4.1 || ^3.0.0" - minimatch "^3.1.2" - object.entries "^1.1.6" - object.fromentries "^2.0.6" - object.hasown "^1.1.2" - object.values "^1.1.6" - prop-types "^15.8.1" - resolve "^2.0.0-next.4" - semver "^6.3.1" - string.prototype.matchall "^4.0.8" - -eslint-scope@^7.2.0: - version "7.2.2" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.2.2.tgz#deb4f92563390f32006894af62a22dba1c46423f" - integrity sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg== - dependencies: - esrecurse "^4.3.0" - estraverse "^5.2.0" - -eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1: - version "3.4.3" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz#0cd72fe8550e3c2eae156a96a4dddcd1c8ac5800" - integrity sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag== - -eslint@8.45.0: - version "8.45.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.45.0.tgz#bab660f90d18e1364352c0a6b7c6db8edb458b78" - integrity sha512-pd8KSxiQpdYRfYa9Wufvdoct3ZPQQuVuU5O6scNgMuOMYuxvH0IGaYK0wUFjo4UYYQQCUndlXiMbnxopwvvTiw== - dependencies: - "@eslint-community/eslint-utils" "^4.2.0" - "@eslint-community/regexpp" "^4.4.0" - "@eslint/eslintrc" "^2.1.0" - "@eslint/js" "8.44.0" - "@humanwhocodes/config-array" "^0.11.10" - "@humanwhocodes/module-importer" "^1.0.1" - "@nodelib/fs.walk" "^1.2.8" - ajv "^6.10.0" - chalk "^4.0.0" - cross-spawn "^7.0.2" - debug "^4.3.2" - doctrine "^3.0.0" - escape-string-regexp "^4.0.0" - eslint-scope "^7.2.0" - eslint-visitor-keys "^3.4.1" - espree "^9.6.0" - esquery "^1.4.2" - esutils "^2.0.2" - fast-deep-equal "^3.1.3" - file-entry-cache "^6.0.1" - find-up "^5.0.0" - glob-parent "^6.0.2" - globals "^13.19.0" - graphemer "^1.4.0" - ignore "^5.2.0" - imurmurhash "^0.1.4" - is-glob "^4.0.0" - is-path-inside "^3.0.3" - js-yaml "^4.1.0" - json-stable-stringify-without-jsonify "^1.0.1" - levn "^0.4.1" - lodash.merge "^4.6.2" - minimatch "^3.1.2" - natural-compare "^1.4.0" - optionator "^0.9.3" - strip-ansi "^6.0.1" - text-table "^0.2.0" - -espree@^9.6.0: - version "9.6.1" - resolved "https://registry.yarnpkg.com/espree/-/espree-9.6.1.tgz#a2a17b8e434690a5432f2f8018ce71d331a48c6f" - integrity sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ== - dependencies: - acorn "^8.9.0" - acorn-jsx "^5.3.2" - eslint-visitor-keys "^3.4.1" - -esprima@^4.0.0, esprima@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - -esquery@^1.4.2: - version "1.5.0" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b" - integrity sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg== - dependencies: - estraverse "^5.1.0" - -esrecurse@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" - integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== - dependencies: - estraverse "^5.2.0" - -estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" - integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== - -estree-util-attach-comments@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/estree-util-attach-comments/-/estree-util-attach-comments-3.0.0.tgz#344bde6a64c8a31d15231e5ee9e297566a691c2d" - integrity sha512-cKUwm/HUcTDsYh/9FgnuFqpfquUbwIqwKM26BVCGDPVgvaCl/nDCCjUfiLlx6lsEZ3Z4RFxNbOQ60pkaEwFxGw== - dependencies: - "@types/estree" "^1.0.0" - -estree-util-build-jsx@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/estree-util-build-jsx/-/estree-util-build-jsx-3.0.1.tgz#b6d0bced1dcc4f06f25cf0ceda2b2dcaf98168f1" - integrity sha512-8U5eiL6BTrPxp/CHbs2yMgP8ftMhR5ww1eIKoWRMlqvltHF8fZn5LRDvTKuxD3DUn+shRbLGqXemcP51oFCsGQ== - dependencies: - "@types/estree-jsx" "^1.0.0" - devlop "^1.0.0" - estree-util-is-identifier-name "^3.0.0" - estree-walker "^3.0.0" - -estree-util-is-identifier-name@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz#0b5ef4c4ff13508b34dcd01ecfa945f61fce5dbd" - integrity sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg== - -estree-util-to-js@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/estree-util-to-js/-/estree-util-to-js-2.0.0.tgz#10a6fb924814e6abb62becf0d2bc4dea51d04f17" - integrity sha512-WDF+xj5rRWmD5tj6bIqRi6CkLIXbbNQUcxQHzGysQzvHmdYG2G7p/Tf0J0gpxGgkeMZNTIjT/AoSvC9Xehcgdg== - dependencies: - "@types/estree-jsx" "^1.0.0" - astring "^1.8.0" - source-map "^0.7.0" - -estree-util-value-to-estree@^3.0.0: - version "3.1.2" - resolved "https://registry.yarnpkg.com/estree-util-value-to-estree/-/estree-util-value-to-estree-3.1.2.tgz#d2f0e5d350a6c181673eb7299743325b86a9bf5c" - integrity sha512-S0gW2+XZkmsx00tU2uJ4L9hUT7IFabbml9pHh2WQqFmAbxit++YGZne0sKJbNwkj9Wvg9E4uqWl4nCIFQMmfag== - dependencies: - "@types/estree" "^1.0.0" - -estree-util-visit@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/estree-util-visit/-/estree-util-visit-2.0.0.tgz#13a9a9f40ff50ed0c022f831ddf4b58d05446feb" - integrity sha512-m5KgiH85xAhhW8Wta0vShLcUvOsh3LLPI2YVwcbio1l7E09NTLL1EyMZFM1OyWowoH0skScNbhOPl4kcBgzTww== - dependencies: - "@types/estree-jsx" "^1.0.0" - "@types/unist" "^3.0.0" - -estree-walker@^3.0.0: - version "3.0.3" - resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-3.0.3.tgz#67c3e549ec402a487b4fc193d1953a524752340d" - integrity sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g== - dependencies: - "@types/estree" "^1.0.0" - -esutils@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" - integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== - -extend-shallow@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" - integrity sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug== - dependencies: - is-extendable "^0.1.0" - -extend@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" - integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== - -fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" - integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== - -fast-glob@^3.2.12, fast-glob@^3.2.9, fast-glob@^3.3.0, fast-glob@^3.3.1: - version "3.3.2" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.2.tgz#a904501e57cfdd2ffcded45e99a54fef55e46129" - integrity sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow== - dependencies: - "@nodelib/fs.stat" "^2.0.2" - "@nodelib/fs.walk" "^1.2.3" - glob-parent "^5.1.2" - merge2 "^1.3.0" - micromatch "^4.0.4" - -fast-json-stable-stringify@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" - integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== - -fast-levenshtein@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== - -fastq@^1.6.0: - version "1.17.0" - resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.17.0.tgz#ca5e1a90b5e68f97fc8b61330d5819b82f5fab03" - integrity sha512-zGygtijUMT7jnk3h26kUms3BkSDp4IfIKjmnqI2tvx6nuBfiF1UqOxbnLfzdv+apBy+53oaImsKtMw/xYbW+1w== - dependencies: - reusify "^1.0.4" - -fault@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/fault/-/fault-2.0.1.tgz#d47ca9f37ca26e4bd38374a7c500b5a384755b6c" - integrity sha512-WtySTkS4OKev5JtpHXnib4Gxiurzh5NCGvWrFaZ34m6JehfTUhKZvn9njTfw48t6JumVQOmrKqpmGcdwxnhqBQ== - dependencies: - format "^0.2.0" - -fd-slicer@~1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/fd-slicer/-/fd-slicer-1.1.0.tgz#25c7c89cb1f9077f8891bbe61d8f390eae256f1e" - integrity sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g== - dependencies: - pend "~1.2.0" - -file-entry-cache@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" - integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== - dependencies: - flat-cache "^3.0.4" - -fill-range@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" - integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== - dependencies: - to-regex-range "^5.0.1" - -find-up@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" - integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== - dependencies: - locate-path "^6.0.0" - path-exists "^4.0.0" - -flat-cache@^3.0.4: - version "3.2.0" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.2.0.tgz#2c0c2d5040c99b1632771a9d105725c0115363ee" - integrity sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw== - dependencies: - flatted "^3.2.9" - keyv "^4.5.3" - rimraf "^3.0.2" - -flatted@^3.2.9: - version "3.2.9" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.9.tgz#7eb4c67ca1ba34232ca9d2d93e9886e611ad7daf" - integrity sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ== - -for-each@^0.3.3: - version "0.3.3" - resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" - integrity sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw== - dependencies: - is-callable "^1.1.3" - -foreground-child@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-3.1.1.tgz#1d173e776d75d2772fed08efe4a0de1ea1b12d0d" - integrity sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg== - dependencies: - cross-spawn "^7.0.0" - signal-exit "^4.0.1" - -format@^0.2.0: - version "0.2.2" - resolved "https://registry.yarnpkg.com/format/-/format-0.2.2.tgz#d6170107e9efdc4ed30c9dc39016df942b5cb58b" - integrity sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww== - -fraction.js@^4.2.0: - version "4.3.7" - resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.3.7.tgz#06ca0085157e42fda7f9e726e79fefc4068840f7" - integrity sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew== - -fs-minipass@^1.2.7: - version "1.2.7" - resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7" - integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA== - dependencies: - minipass "^2.6.0" - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== - -fsevents@~2.3.2: - version "2.3.3" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" - integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== - -function-bind@^1.1.1, function-bind@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" - integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== - -function.prototype.name@^1.1.5, function.prototype.name@^1.1.6: - version "1.1.6" - resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.6.tgz#cdf315b7d90ee77a4c6ee216c3c3362da07533fd" - integrity sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - functions-have-names "^1.2.3" - -functions-have-names@^1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" - integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== - -get-caller-file@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" - integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== - -get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@^1.2.0, get-intrinsic@^1.2.1, get-intrinsic@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.2.tgz#281b7622971123e1ef4b3c90fd7539306da93f3b" - integrity sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA== - dependencies: - function-bind "^1.1.2" - has-proto "^1.0.1" - has-symbols "^1.0.3" - hasown "^2.0.0" - -get-symbol-description@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" - integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== - dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.1.1" - -get-tsconfig@^4.5.0: - version "4.7.2" - resolved "https://registry.yarnpkg.com/get-tsconfig/-/get-tsconfig-4.7.2.tgz#0dcd6fb330391d46332f4c6c1bf89a6514c2ddce" - integrity sha512-wuMsz4leaj5hbGgg4IvDU0bqJagpftG5l5cXIAvo8uZrqn0NJqwtfupTN00VnkQJPcIRrxYrm1Ue24btpCha2A== - dependencies: - resolve-pkg-maps "^1.0.0" - -glob-parent@^5.1.2, glob-parent@~5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" - integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== - dependencies: - is-glob "^4.0.1" - -glob-parent@^6.0.2: - version "6.0.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" - integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== - dependencies: - is-glob "^4.0.3" - -glob@10.3.10, glob@^10.3.10: - version "10.3.10" - resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.10.tgz#0351ebb809fd187fe421ab96af83d3a70715df4b" - integrity sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g== - dependencies: - foreground-child "^3.1.0" - jackspeak "^2.3.5" - minimatch "^9.0.1" - minipass "^5.0.0 || ^6.0.2 || ^7.0.0" - path-scurry "^1.10.1" - -glob@^7.0.0, glob@^7.1.3: - version "7.2.3" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" - integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.1.1" - once "^1.3.0" - path-is-absolute "^1.0.0" - -globals@^13.19.0: - version "13.24.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.24.0.tgz#8432a19d78ce0c1e833949c36adb345400bb1171" - integrity sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ== - dependencies: - type-fest "^0.20.2" - -globalthis@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.3.tgz#5852882a52b80dc301b0660273e1ed082f0b6ccf" - integrity sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA== - dependencies: - define-properties "^1.1.3" - -globby@^11.1.0: - version "11.1.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" - integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.2.9" - ignore "^5.2.0" - merge2 "^1.4.1" - slash "^3.0.0" - -gopd@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" - integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== - dependencies: - get-intrinsic "^1.1.3" - -graceful-fs@^4.2.11, graceful-fs@^4.2.4: - version "4.2.11" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" - integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== - -graphemer@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" - integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== - -gray-matter@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/gray-matter/-/gray-matter-4.0.3.tgz#e893c064825de73ea1f5f7d88c7a9f7274288798" - integrity sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q== - dependencies: - js-yaml "^3.13.1" - kind-of "^6.0.2" - section-matter "^1.0.0" - strip-bom-string "^1.0.0" - -has-bigints@^1.0.1, has-bigints@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" - integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== - -has-flag@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" - integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== - -has-own-prop@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/has-own-prop/-/has-own-prop-2.0.0.tgz#f0f95d58f65804f5d218db32563bb85b8e0417af" - integrity sha512-Pq0h+hvsVm6dDEa8x82GnLSYHOzNDt7f0ddFa3FqcQlgzEiptPqL+XrOJNavjOzSYiYWIrgeVYYgGlLmnxwilQ== - -has-property-descriptors@^1.0.0, has-property-descriptors@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz#52ba30b6c5ec87fd89fa574bc1c39125c6f65340" - integrity sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg== - dependencies: - get-intrinsic "^1.2.2" - -has-proto@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" - integrity sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg== - -has-symbols@^1.0.2, has-symbols@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" - integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== - -has-tostringtag@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" - integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== - dependencies: - has-symbols "^1.0.2" - -hash-wasm@^4.11.0: - version "4.11.0" - resolved "https://registry.yarnpkg.com/hash-wasm/-/hash-wasm-4.11.0.tgz#7d1479b114c82e48498fdb1d2462a687d00386d5" - integrity sha512-HVusNXlVqHe0fzIzdQOGolnFN6mX/fqcrSAOcTBXdvzrXVHwTz11vXeKRmkR5gTuwVpvHZEIyKoePDvuAR+XwQ== - -hasown@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.0.tgz#f4c513d454a57b7c7e1650778de226b11700546c" - integrity sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA== - dependencies: - function-bind "^1.1.2" - -hast-util-from-parse5@^8.0.0: - version "8.0.1" - resolved "https://registry.yarnpkg.com/hast-util-from-parse5/-/hast-util-from-parse5-8.0.1.tgz#654a5676a41211e14ee80d1b1758c399a0327651" - integrity sha512-Er/Iixbc7IEa7r/XLtuG52zoqn/b3Xng/w6aZQ0xGVxzhw5xUFxcRqdPzP6yFi/4HBYRaifaI5fQ1RH8n0ZeOQ== - dependencies: - "@types/hast" "^3.0.0" - "@types/unist" "^3.0.0" - devlop "^1.0.0" - hastscript "^8.0.0" - property-information "^6.0.0" - vfile "^6.0.0" - vfile-location "^5.0.0" - web-namespaces "^2.0.0" - -hast-util-parse-selector@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz#352879fa86e25616036037dd8931fb5f34cb4a27" - integrity sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A== - dependencies: - "@types/hast" "^3.0.0" - -hast-util-raw@^9.0.0: - version "9.0.4" - resolved "https://registry.yarnpkg.com/hast-util-raw/-/hast-util-raw-9.0.4.tgz#2da03e37c46eb1a6f1391f02f9b84ae65818f7ed" - integrity sha512-LHE65TD2YiNsHD3YuXcKPHXPLuYh/gjp12mOfU8jxSrm1f/yJpsb0F/KKljS6U9LJoP0Ux+tCe8iJ2AsPzTdgA== - dependencies: - "@types/hast" "^3.0.0" - "@types/unist" "^3.0.0" - "@ungap/structured-clone" "^1.0.0" - hast-util-from-parse5 "^8.0.0" - hast-util-to-parse5 "^8.0.0" - html-void-elements "^3.0.0" - mdast-util-to-hast "^13.0.0" - parse5 "^7.0.0" - unist-util-position "^5.0.0" - unist-util-visit "^5.0.0" - vfile "^6.0.0" - web-namespaces "^2.0.0" - zwitch "^2.0.0" - -hast-util-to-estree@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/hast-util-to-estree/-/hast-util-to-estree-3.1.0.tgz#f2afe5e869ddf0cf690c75f9fc699f3180b51b19" - integrity sha512-lfX5g6hqVh9kjS/B9E2gSkvHH4SZNiQFiqWS0x9fENzEl+8W12RqdRxX6d/Cwxi30tPQs3bIO+aolQJNp1bIyw== - dependencies: - "@types/estree" "^1.0.0" - "@types/estree-jsx" "^1.0.0" - "@types/hast" "^3.0.0" - comma-separated-tokens "^2.0.0" - devlop "^1.0.0" - estree-util-attach-comments "^3.0.0" - estree-util-is-identifier-name "^3.0.0" - hast-util-whitespace "^3.0.0" - mdast-util-mdx-expression "^2.0.0" - mdast-util-mdx-jsx "^3.0.0" - mdast-util-mdxjs-esm "^2.0.0" - property-information "^6.0.0" - space-separated-tokens "^2.0.0" - style-to-object "^0.4.0" - unist-util-position "^5.0.0" - zwitch "^2.0.0" - -hast-util-to-html@^9.0.0: - version "9.0.1" - resolved "https://registry.yarnpkg.com/hast-util-to-html/-/hast-util-to-html-9.0.1.tgz#d108aba473c0ced8377267b1a725b25e818ff3c8" - integrity sha512-hZOofyZANbyWo+9RP75xIDV/gq+OUKx+T46IlwERnKmfpwp81XBFbT9mi26ws+SJchA4RVUQwIBJpqEOBhMzEQ== - dependencies: - "@types/hast" "^3.0.0" - "@types/unist" "^3.0.0" - ccount "^2.0.0" - comma-separated-tokens "^2.0.0" - hast-util-raw "^9.0.0" - hast-util-whitespace "^3.0.0" - html-void-elements "^3.0.0" - mdast-util-to-hast "^13.0.0" - property-information "^6.0.0" - space-separated-tokens "^2.0.0" - stringify-entities "^4.0.0" - zwitch "^2.0.4" - -hast-util-to-jsx-runtime@^2.0.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.0.tgz#3ed27caf8dc175080117706bf7269404a0aa4f7c" - integrity sha512-H/y0+IWPdsLLS738P8tDnrQ8Z+dj12zQQ6WC11TIM21C8WFVoIxcqWXf2H3hiTVZjF1AWqoimGwrTWecWrnmRQ== - dependencies: - "@types/estree" "^1.0.0" - "@types/hast" "^3.0.0" - "@types/unist" "^3.0.0" - comma-separated-tokens "^2.0.0" - devlop "^1.0.0" - estree-util-is-identifier-name "^3.0.0" - hast-util-whitespace "^3.0.0" - mdast-util-mdx-expression "^2.0.0" - mdast-util-mdx-jsx "^3.0.0" - mdast-util-mdxjs-esm "^2.0.0" - property-information "^6.0.0" - space-separated-tokens "^2.0.0" - style-to-object "^1.0.0" - unist-util-position "^5.0.0" - vfile-message "^4.0.0" - -hast-util-to-parse5@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/hast-util-to-parse5/-/hast-util-to-parse5-8.0.0.tgz#477cd42d278d4f036bc2ea58586130f6f39ee6ed" - integrity sha512-3KKrV5ZVI8if87DVSi1vDeByYrkGzg4mEfeu4alwgmmIeARiBLKCZS2uw5Gb6nU9x9Yufyj3iudm6i7nl52PFw== - dependencies: - "@types/hast" "^3.0.0" - comma-separated-tokens "^2.0.0" - devlop "^1.0.0" - property-information "^6.0.0" - space-separated-tokens "^2.0.0" - web-namespaces "^2.0.0" - zwitch "^2.0.0" - -hast-util-whitespace@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz#7778ed9d3c92dd9e8c5c8f648a49c21fc51cb621" - integrity sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw== - dependencies: - "@types/hast" "^3.0.0" - -hastscript@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/hastscript/-/hastscript-8.0.0.tgz#4ef795ec8dee867101b9f23cc830d4baf4fd781a" - integrity sha512-dMOtzCEd3ABUeSIISmrETiKuyydk1w0pa+gE/uormcTpSYuaNJPbX1NU3JLyscSLjwAQM8bWMhhIlnCqnRvDTw== - dependencies: - "@types/hast" "^3.0.0" - comma-separated-tokens "^2.0.0" - hast-util-parse-selector "^4.0.0" - property-information "^6.0.0" - space-separated-tokens "^2.0.0" - -html-void-elements@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/html-void-elements/-/html-void-elements-3.0.0.tgz#fc9dbd84af9e747249034d4d62602def6517f1d7" - integrity sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg== - -hyperdyperid@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/hyperdyperid/-/hyperdyperid-1.2.0.tgz#59668d323ada92228d2a869d3e474d5a33b69e6b" - integrity sha512-Y93lCzHYgGWdrJ66yIktxiaGULYc6oGiABxhcO5AufBeOyoIdZF7bIfLaOrbM0iGIOXQQgxxRrFEnb+Y6w1n4A== - -ignore@^5.2.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.0.tgz#67418ae40d34d6999c95ff56016759c718c82f78" - integrity sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg== - -imagescript@^1.2.16: - version "1.2.18" - resolved "https://registry.yarnpkg.com/imagescript/-/imagescript-1.2.18.tgz#058744cb22a49065c78415304a537d608c02d9ab" - integrity sha512-8AwTawraXovLo2PgKvFt96SZqJDwl0CnHDyrtoPUQHMmoA7u9M8EnqFZwCofSM+Uo623Z580iKW74bs2fzjoYQ== - -import-fresh@^3.2.1: - version "3.3.0" - resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" - integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== - dependencies: - parent-module "^1.0.0" - resolve-from "^4.0.0" - -imurmurhash@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== - -inflection@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/inflection/-/inflection-3.0.0.tgz#6a956fa90d72a27d22e6b32ec1064877593ee23b" - integrity sha512-1zEJU1l19SgJlmwqsEyFTbScw/tkMHFenUo//Y0i+XEP83gDFdMvPizAD/WGcE+l1ku12PcTVHQhO6g5E0UCMw== - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2: - version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -inline-style-parser@0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/inline-style-parser/-/inline-style-parser-0.1.1.tgz#ec8a3b429274e9c0a1f1c4ffa9453a7fef72cea1" - integrity sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q== - -inline-style-parser@0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/inline-style-parser/-/inline-style-parser-0.2.3.tgz#e35c5fb45f3a83ed7849fe487336eb7efa25971c" - integrity sha512-qlD8YNDqyTKTyuITrDOffsl6Tdhv+UC4hcdAVuQsK4IMQ99nSgd1MIA/Q+jQYoh9r3hVUXhYh7urSRmXPkW04g== - -internal-slot@^1.0.5: - version "1.0.6" - resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.6.tgz#37e756098c4911c5e912b8edbf71ed3aa116f930" - integrity sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg== - dependencies: - get-intrinsic "^1.2.2" - hasown "^2.0.0" - side-channel "^1.0.4" - -interpret@^1.0.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.4.0.tgz#665ab8bc4da27a774a40584e812e3e0fa45b1a1e" - integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA== - -is-alphabetical@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-alphabetical/-/is-alphabetical-2.0.1.tgz#01072053ea7c1036df3c7d19a6daaec7f19e789b" - integrity sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ== - -is-alphanumerical@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz#7c03fbe96e3e931113e57f964b0a368cc2dfd875" - integrity sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw== - dependencies: - is-alphabetical "^2.0.0" - is-decimal "^2.0.0" - -is-array-buffer@^3.0.1, is-array-buffer@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.2.tgz#f2653ced8412081638ecb0ebbd0c41c6e0aecbbe" - integrity sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w== - dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.2.0" - is-typed-array "^1.1.10" - -is-async-function@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-async-function/-/is-async-function-2.0.0.tgz#8e4418efd3e5d3a6ebb0164c05ef5afb69aa9646" - integrity sha512-Y1JXKrfykRJGdlDwdKlLpLyMIiWqWvuSd17TvZk68PLAOGOoF4Xyav1z0Xhoi+gCYjZVeC5SI+hYFOfvXmGRCA== - dependencies: - has-tostringtag "^1.0.0" - -is-bigint@^1.0.1: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" - integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== - dependencies: - has-bigints "^1.0.1" - -is-binary-path@~2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" - integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== - dependencies: - binary-extensions "^2.0.0" - -is-boolean-object@^1.1.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" - integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== - dependencies: - call-bind "^1.0.2" - has-tostringtag "^1.0.0" - -is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.7: - version "1.2.7" - resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" - integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== - -is-core-module@^2.11.0, is-core-module@^2.13.0, is-core-module@^2.13.1: - version "2.13.1" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.1.tgz#ad0d7532c6fea9da1ebdc82742d74525c6273384" - integrity sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw== - dependencies: - hasown "^2.0.0" - -is-date-object@^1.0.1, is-date-object@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" - integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== - dependencies: - has-tostringtag "^1.0.0" - -is-decimal@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-decimal/-/is-decimal-2.0.1.tgz#9469d2dc190d0214fd87d78b78caecc0cc14eef7" - integrity sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A== - -is-extendable@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" - integrity sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw== - -is-extglob@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== - -is-finalizationregistry@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-finalizationregistry/-/is-finalizationregistry-1.0.2.tgz#c8749b65f17c133313e661b1289b95ad3dbd62e6" - integrity sha512-0by5vtUJs8iFQb5TYUHHPudOR+qXYIMKtiUzvLIZITZUjknFmziyBJuLhVRc+Ds0dREFlskDNJKYIdIzu/9pfw== - dependencies: - call-bind "^1.0.2" - -is-fullwidth-code-point@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" - integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== - -is-generator-function@^1.0.10: - version "1.0.10" - resolved "https://registry.yarnpkg.com/is-generator-function/-/is-generator-function-1.0.10.tgz#f1558baf1ac17e0deea7c0415c438351ff2b3c72" - integrity sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A== - dependencies: - has-tostringtag "^1.0.0" - -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: - version "4.0.3" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" - integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== - dependencies: - is-extglob "^2.1.1" - -is-hexadecimal@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz#86b5bf668fca307498d319dfc03289d781a90027" - integrity sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg== - -is-map@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/is-map/-/is-map-2.0.2.tgz#00922db8c9bf73e81b7a335827bc2a43f2b91127" - integrity sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg== - -is-negative-zero@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" - integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== - -is-number-object@^1.0.4: - version "1.0.7" - resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" - integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== - dependencies: - has-tostringtag "^1.0.0" - -is-number@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" - integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== - -is-path-inside@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" - integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== - -is-plain-obj@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-4.1.0.tgz#d65025edec3657ce032fd7db63c97883eaed71f0" - integrity sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg== - -is-reference@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/is-reference/-/is-reference-3.0.2.tgz#154747a01f45cd962404ee89d43837af2cba247c" - integrity sha512-v3rht/LgVcsdZa3O2Nqs+NMowLOxeOm7Ay9+/ARQ2F+qEoANRcqrjAZKGN0v8ymUetZGgkp26LTnGT7H0Qo9Pg== - dependencies: - "@types/estree" "*" - -is-regex@^1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" - integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== - dependencies: - call-bind "^1.0.2" - has-tostringtag "^1.0.0" - -is-set@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/is-set/-/is-set-2.0.2.tgz#90755fa4c2562dc1c5d4024760d6119b94ca18ec" - integrity sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g== - -is-shared-array-buffer@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" - integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== - dependencies: - call-bind "^1.0.2" - -is-string@^1.0.5, is-string@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" - integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== - dependencies: - has-tostringtag "^1.0.0" - -is-symbol@^1.0.2, is-symbol@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" - integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== - dependencies: - has-symbols "^1.0.2" - -is-typed-array@^1.1.10, is-typed-array@^1.1.12, is-typed-array@^1.1.9: - version "1.1.12" - resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.12.tgz#d0bab5686ef4a76f7a73097b95470ab199c57d4a" - integrity sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg== - dependencies: - which-typed-array "^1.1.11" - -is-weakmap@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-weakmap/-/is-weakmap-2.0.1.tgz#5008b59bdc43b698201d18f62b37b2ca243e8cf2" - integrity sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA== - -is-weakref@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" - integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== - dependencies: - call-bind "^1.0.2" - -is-weakset@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/is-weakset/-/is-weakset-2.0.2.tgz#4569d67a747a1ce5a994dfd4ef6dcea76e7c0a1d" - integrity sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg== - dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.1.1" - -isarray@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.5.tgz#8af1e4c1221244cc62459faf38940d4e644a5723" - integrity sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw== - -isexe@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" - integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== - -iterator.prototype@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/iterator.prototype/-/iterator.prototype-1.1.2.tgz#5e29c8924f01916cb9335f1ff80619dcff22b0c0" - integrity sha512-DR33HMMr8EzwuRL8Y9D3u2BMj8+RqSE850jfGu59kS7tbmPLzGkZmVSfyCFSDxuZiEY6Rzt3T2NA/qU+NwVj1w== - dependencies: - define-properties "^1.2.1" - get-intrinsic "^1.2.1" - has-symbols "^1.0.3" - reflect.getprototypeof "^1.0.4" - set-function-name "^2.0.1" - -jackspeak@^2.3.5: - version "2.3.6" - resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-2.3.6.tgz#647ecc472238aee4b06ac0e461acc21a8c505ca8" - integrity sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ== - dependencies: - "@isaacs/cliui" "^8.0.2" - optionalDependencies: - "@pkgjs/parseargs" "^0.11.0" - -jiti@^1.19.1: - version "1.21.0" - resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.21.0.tgz#7c97f8fe045724e136a397f7340475244156105d" - integrity sha512-gFqAIbuKyyso/3G2qhiO2OM6shY6EPP/R0+mkDbyspxKazh8BXDC5FiFsUjlczgdNz/vfra0da2y+aHrusLG/Q== - -"js-tokens@^3.0.0 || ^4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" - integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== - -js-yaml@^3.13.1: - version "3.14.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" - integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - -js-yaml@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" - integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== - dependencies: - argparse "^2.0.1" - -jsbi@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/jsbi/-/jsbi-4.3.0.tgz#b54ee074fb6fcbc00619559305c8f7e912b04741" - integrity sha512-SnZNcinB4RIcnEyZqFPdGPVgrg2AcnykiBy0sHVJQKHYeaLUvi3Exj+iaPpLnFVkDPZIV4U0yvgC9/R4uEAZ9g== - -json-buffer@3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" - integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== - -json-schema-traverse@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" - integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== - -json-stable-stringify-without-jsonify@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" - integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== - -json5@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" - integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== - dependencies: - minimist "^1.2.0" - -"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.3.5: - version "3.3.5" - resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz#4766bd05a8e2a11af222becd19e15575e52a853a" - integrity sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ== - dependencies: - array-includes "^3.1.6" - array.prototype.flat "^1.3.1" - object.assign "^4.1.4" - object.values "^1.1.6" - -keyv@^4.5.3: - version "4.5.4" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.4.tgz#a879a99e29452f942439f2a405e3af8b31d4de93" - integrity sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw== - dependencies: - json-buffer "3.0.1" - -kind-of@^6.0.0, kind-of@^6.0.2: - version "6.0.3" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" - integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== - -language-subtag-registry@^0.3.20: - version "0.3.22" - resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz#2e1500861b2e457eba7e7ae86877cbd08fa1fd1d" - integrity sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w== - -language-tags@^1.0.9: - version "1.0.9" - resolved "https://registry.yarnpkg.com/language-tags/-/language-tags-1.0.9.tgz#1ffdcd0ec0fafb4b1be7f8b11f306ad0f9c08777" - integrity sha512-MbjN408fEndfiQXbFQ1vnd+1NoLDsnQW41410oQBXiyXDMYH5z505juWa4KUE1LqxRC7DgOgZDbKLxHIwm27hA== - dependencies: - language-subtag-registry "^0.3.20" - -levn@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" - integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== - dependencies: - prelude-ls "^1.2.1" - type-check "~0.4.0" - -lilconfig@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.1.0.tgz#78e23ac89ebb7e1bfbf25b18043de756548e7f52" - integrity sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ== - -lilconfig@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-3.0.0.tgz#f8067feb033b5b74dab4602a5f5029420be749bc" - integrity sha512-K2U4W2Ff5ibV7j7ydLr+zLAkIg5JJ4lPn1Ltsdt+Tz/IjQ8buJ55pZAxoP34lqIiwtF9iAvtLv3JGv7CAyAg+g== - -lines-and-columns@^1.1.6: - version "1.2.4" - resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" - integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== - -locate-path@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" - integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== - dependencies: - p-locate "^5.0.0" - -lodash.camelcase@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6" - integrity sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA== - -lodash.merge@^4.6.2: - version "4.6.2" - resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" - integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== - -long@^5.0.0: - version "5.2.3" - resolved "https://registry.yarnpkg.com/long/-/long-5.2.3.tgz#a3ba97f3877cf1d778eccbcb048525ebb77499e1" - integrity sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q== - -longest-streak@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/longest-streak/-/longest-streak-3.1.0.tgz#62fa67cd958742a1574af9f39866364102d90cd4" - integrity sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g== - -loose-envify@^1.1.0, loose-envify@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" - integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== - dependencies: - js-tokens "^3.0.0 || ^4.0.0" - -lower-case@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" - integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== - dependencies: - tslib "^2.0.3" - -lru-cache@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" - integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== - dependencies: - yallist "^4.0.0" - -"lru-cache@^9.1.1 || ^10.0.0": - version "10.2.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.2.0.tgz#0bd445ca57363465900f4d1f9bd8db343a4d95c3" - integrity sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q== - -markdown-extensions@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/markdown-extensions/-/markdown-extensions-2.0.0.tgz#34bebc83e9938cae16e0e017e4a9814a8330d3c4" - integrity sha512-o5vL7aDWatOTX8LzaS1WMoaoxIiLRQJuIKKe2wAw6IeULDHaqbiqiggmx+pKvZDb1Sj+pE46Sn1T7lCqfFtg1Q== - -mdast-util-from-markdown@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.1.tgz#32a6e8f512b416e1f51eb817fc64bd867ebcd9cc" - integrity sha512-aJEUyzZ6TzlsX2s5B4Of7lN7EQtAxvtradMMglCQDyaTFgse6CmtmdJ15ElnVRlCg1vpNyVtbem0PWzlNieZsA== - dependencies: - "@types/mdast" "^4.0.0" - "@types/unist" "^3.0.0" - decode-named-character-reference "^1.0.0" - devlop "^1.0.0" - mdast-util-to-string "^4.0.0" - micromark "^4.0.0" - micromark-util-decode-numeric-character-reference "^2.0.0" - micromark-util-decode-string "^2.0.0" - micromark-util-normalize-identifier "^2.0.0" - micromark-util-symbol "^2.0.0" - micromark-util-types "^2.0.0" - unist-util-stringify-position "^4.0.0" - -mdast-util-frontmatter@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/mdast-util-frontmatter/-/mdast-util-frontmatter-2.0.1.tgz#f5f929eb1eb36c8a7737475c7eb438261f964ee8" - integrity sha512-LRqI9+wdgC25P0URIJY9vwocIzCcksduHQ9OF2joxQoyTNVduwLAFUzjoopuRJbJAReaKrNQKAZKL3uCMugWJA== - dependencies: - "@types/mdast" "^4.0.0" - devlop "^1.0.0" - escape-string-regexp "^5.0.0" - mdast-util-from-markdown "^2.0.0" - mdast-util-to-markdown "^2.0.0" - micromark-extension-frontmatter "^2.0.0" - -mdast-util-mdx-expression@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.0.tgz#4968b73724d320a379110d853e943a501bfd9d87" - integrity sha512-fGCu8eWdKUKNu5mohVGkhBXCXGnOTLuFqOvGMvdikr+J1w7lDJgxThOKpwRWzzbyXAU2hhSwsmssOY4yTokluw== - dependencies: - "@types/estree-jsx" "^1.0.0" - "@types/hast" "^3.0.0" - "@types/mdast" "^4.0.0" - devlop "^1.0.0" - mdast-util-from-markdown "^2.0.0" - mdast-util-to-markdown "^2.0.0" - -mdast-util-mdx-jsx@^3.0.0: - version "3.1.2" - resolved "https://registry.yarnpkg.com/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.1.2.tgz#daae777c72f9c4a106592e3025aa50fb26068e1b" - integrity sha512-eKMQDeywY2wlHc97k5eD8VC+9ASMjN8ItEZQNGwJ6E0XWKiW/Z0V5/H8pvoXUf+y+Mj0VIgeRRbujBmFn4FTyA== - dependencies: - "@types/estree-jsx" "^1.0.0" - "@types/hast" "^3.0.0" - "@types/mdast" "^4.0.0" - "@types/unist" "^3.0.0" - ccount "^2.0.0" - devlop "^1.1.0" - mdast-util-from-markdown "^2.0.0" - mdast-util-to-markdown "^2.0.0" - parse-entities "^4.0.0" - stringify-entities "^4.0.0" - unist-util-remove-position "^5.0.0" - unist-util-stringify-position "^4.0.0" - vfile-message "^4.0.0" - -mdast-util-mdx@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/mdast-util-mdx/-/mdast-util-mdx-3.0.0.tgz#792f9cf0361b46bee1fdf1ef36beac424a099c41" - integrity sha512-JfbYLAW7XnYTTbUsmpu0kdBUVe+yKVJZBItEjwyYJiDJuZ9w4eeaqks4HQO+R7objWgS2ymV60GYpI14Ug554w== - dependencies: - mdast-util-from-markdown "^2.0.0" - mdast-util-mdx-expression "^2.0.0" - mdast-util-mdx-jsx "^3.0.0" - mdast-util-mdxjs-esm "^2.0.0" - mdast-util-to-markdown "^2.0.0" - -mdast-util-mdxjs-esm@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz#019cfbe757ad62dd557db35a695e7314bcc9fa97" - integrity sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg== - dependencies: - "@types/estree-jsx" "^1.0.0" - "@types/hast" "^3.0.0" - "@types/mdast" "^4.0.0" - devlop "^1.0.0" - mdast-util-from-markdown "^2.0.0" - mdast-util-to-markdown "^2.0.0" - -mdast-util-phrasing@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz#7cc0a8dec30eaf04b7b1a9661a92adb3382aa6e3" - integrity sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w== - dependencies: - "@types/mdast" "^4.0.0" - unist-util-is "^6.0.0" - -mdast-util-to-hast@^13.0.0: - version "13.2.0" - resolved "https://registry.yarnpkg.com/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz#5ca58e5b921cc0a3ded1bc02eed79a4fe4fe41f4" - integrity sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA== - dependencies: - "@types/hast" "^3.0.0" - "@types/mdast" "^4.0.0" - "@ungap/structured-clone" "^1.0.0" - devlop "^1.0.0" - micromark-util-sanitize-uri "^2.0.0" - trim-lines "^3.0.0" - unist-util-position "^5.0.0" - unist-util-visit "^5.0.0" - vfile "^6.0.0" - -mdast-util-to-markdown@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.0.tgz#9813f1d6e0cdaac7c244ec8c6dabfdb2102ea2b4" - integrity sha512-SR2VnIEdVNCJbP6y7kVTJgPLifdr8WEU440fQec7qHoHOUz/oJ2jmNRqdDQ3rbiStOXb2mCDGTuwsK5OPUgYlQ== - dependencies: - "@types/mdast" "^4.0.0" - "@types/unist" "^3.0.0" - longest-streak "^3.0.0" - mdast-util-phrasing "^4.0.0" - mdast-util-to-string "^4.0.0" - micromark-util-decode-string "^2.0.0" - unist-util-visit "^5.0.0" - zwitch "^2.0.0" - -mdast-util-to-string@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz#7a5121475556a04e7eddeb67b264aae79d312814" - integrity sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg== - dependencies: - "@types/mdast" "^4.0.0" - -mdx-bundler@^10.0.2: - version "10.0.2" - resolved "https://registry.yarnpkg.com/mdx-bundler/-/mdx-bundler-10.0.2.tgz#bae47e7c477a220e68ee4f316cdfde94192949c5" - integrity sha512-0wF0zoCv+Ms4G+eSlk/jaKYoJHc0oXBaOma3kYlFJiKq9H8h41Dd66ioDBGF4noy80Pf7KTBQlyHfEpTqVml7A== - dependencies: - "@babel/runtime" "^7.23.2" - "@esbuild-plugins/node-resolve" "^0.2.2" - "@fal-works/esbuild-plugin-global-externals" "^2.1.2" - "@mdx-js/esbuild" "^3.0.0" - gray-matter "^4.0.3" - remark-frontmatter "^5.0.0" - remark-mdx-frontmatter "^4.0.0" - uuid "^9.0.1" - vfile "^6.0.1" - -memfs@^4.8.2: - version "4.9.3" - resolved "https://registry.yarnpkg.com/memfs/-/memfs-4.9.3.tgz#41a3218065fe3911d9eba836250c8f4e43f816bc" - integrity sha512-bsYSSnirtYTWi1+OPMFb0M048evMKyUYe0EbtuGQgq6BVQM1g1W8/KIUJCCvjgI/El0j6Q4WsmMiBwLUBSw8LA== - dependencies: - "@jsonjoy.com/json-pack" "^1.0.3" - "@jsonjoy.com/util" "^1.1.2" - tree-dump "^1.0.1" - tslib "^2.0.0" - -merge2@^1.3.0, merge2@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" - integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== - -micromark-core-commonmark@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/micromark-core-commonmark/-/micromark-core-commonmark-2.0.1.tgz#9a45510557d068605c6e9a80f282b2bb8581e43d" - integrity sha512-CUQyKr1e///ZODyD1U3xit6zXwy1a8q2a1S1HKtIlmgvurrEpaw/Y9y6KSIbF8P59cn/NjzHyO+Q2fAyYLQrAA== - dependencies: - decode-named-character-reference "^1.0.0" - devlop "^1.0.0" - micromark-factory-destination "^2.0.0" - micromark-factory-label "^2.0.0" - micromark-factory-space "^2.0.0" - micromark-factory-title "^2.0.0" - micromark-factory-whitespace "^2.0.0" - micromark-util-character "^2.0.0" - micromark-util-chunked "^2.0.0" - micromark-util-classify-character "^2.0.0" - micromark-util-html-tag-name "^2.0.0" - micromark-util-normalize-identifier "^2.0.0" - micromark-util-resolve-all "^2.0.0" - micromark-util-subtokenize "^2.0.0" - micromark-util-symbol "^2.0.0" - micromark-util-types "^2.0.0" - -micromark-extension-frontmatter@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-extension-frontmatter/-/micromark-extension-frontmatter-2.0.0.tgz#651c52ffa5d7a8eeed687c513cd869885882d67a" - integrity sha512-C4AkuM3dA58cgZha7zVnuVxBhDsbttIMiytjgsM2XbHAB2faRVaHRle40558FBN+DJcrLNCoqG5mlrpdU4cRtg== - dependencies: - fault "^2.0.0" - micromark-util-character "^2.0.0" - micromark-util-symbol "^2.0.0" - micromark-util-types "^2.0.0" - -micromark-extension-mdx-expression@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/micromark-extension-mdx-expression/-/micromark-extension-mdx-expression-3.0.0.tgz#1407b9ce69916cf5e03a196ad9586889df25302a" - integrity sha512-sI0nwhUDz97xyzqJAbHQhp5TfaxEvZZZ2JDqUo+7NvyIYG6BZ5CPPqj2ogUoPJlmXHBnyZUzISg9+oUmU6tUjQ== - dependencies: - "@types/estree" "^1.0.0" - devlop "^1.0.0" - micromark-factory-mdx-expression "^2.0.0" - micromark-factory-space "^2.0.0" - micromark-util-character "^2.0.0" - micromark-util-events-to-acorn "^2.0.0" - micromark-util-symbol "^2.0.0" - micromark-util-types "^2.0.0" - -micromark-extension-mdx-jsx@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/micromark-extension-mdx-jsx/-/micromark-extension-mdx-jsx-3.0.0.tgz#4aba0797c25efb2366a3fd2d367c6b1c1159f4f5" - integrity sha512-uvhhss8OGuzR4/N17L1JwvmJIpPhAd8oByMawEKx6NVdBCbesjH4t+vjEp3ZXft9DwvlKSD07fCeI44/N0Vf2w== - dependencies: - "@types/acorn" "^4.0.0" - "@types/estree" "^1.0.0" - devlop "^1.0.0" - estree-util-is-identifier-name "^3.0.0" - micromark-factory-mdx-expression "^2.0.0" - micromark-factory-space "^2.0.0" - micromark-util-character "^2.0.0" - micromark-util-symbol "^2.0.0" - micromark-util-types "^2.0.0" - vfile-message "^4.0.0" - -micromark-extension-mdx-md@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-extension-mdx-md/-/micromark-extension-mdx-md-2.0.0.tgz#1d252881ea35d74698423ab44917e1f5b197b92d" - integrity sha512-EpAiszsB3blw4Rpba7xTOUptcFeBFi+6PY8VnJ2hhimH+vCQDirWgsMpz7w1XcZE7LVrSAUGb9VJpG9ghlYvYQ== - dependencies: - micromark-util-types "^2.0.0" - -micromark-extension-mdxjs-esm@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/micromark-extension-mdxjs-esm/-/micromark-extension-mdxjs-esm-3.0.0.tgz#de21b2b045fd2059bd00d36746081de38390d54a" - integrity sha512-DJFl4ZqkErRpq/dAPyeWp15tGrcrrJho1hKK5uBS70BCtfrIFg81sqcTVu3Ta+KD1Tk5vAtBNElWxtAa+m8K9A== - dependencies: - "@types/estree" "^1.0.0" - devlop "^1.0.0" - micromark-core-commonmark "^2.0.0" - micromark-util-character "^2.0.0" - micromark-util-events-to-acorn "^2.0.0" - micromark-util-symbol "^2.0.0" - micromark-util-types "^2.0.0" - unist-util-position-from-estree "^2.0.0" - vfile-message "^4.0.0" - -micromark-extension-mdxjs@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/micromark-extension-mdxjs/-/micromark-extension-mdxjs-3.0.0.tgz#b5a2e0ed449288f3f6f6c544358159557549de18" - integrity sha512-A873fJfhnJ2siZyUrJ31l34Uqwy4xIFmvPY1oj+Ean5PHcPBYzEsvqvWGaWcfEIr11O5Dlw3p2y0tZWpKHDejQ== - dependencies: - acorn "^8.0.0" - acorn-jsx "^5.0.0" - micromark-extension-mdx-expression "^3.0.0" - micromark-extension-mdx-jsx "^3.0.0" - micromark-extension-mdx-md "^2.0.0" - micromark-extension-mdxjs-esm "^3.0.0" - micromark-util-combine-extensions "^2.0.0" - micromark-util-types "^2.0.0" - -micromark-factory-destination@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-factory-destination/-/micromark-factory-destination-2.0.0.tgz#857c94debd2c873cba34e0445ab26b74f6a6ec07" - integrity sha512-j9DGrQLm/Uhl2tCzcbLhy5kXsgkHUrjJHg4fFAeoMRwJmJerT9aw4FEhIbZStWN8A3qMwOp1uzHr4UL8AInxtA== - dependencies: - micromark-util-character "^2.0.0" - micromark-util-symbol "^2.0.0" - micromark-util-types "^2.0.0" - -micromark-factory-label@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-factory-label/-/micromark-factory-label-2.0.0.tgz#17c5c2e66ce39ad6f4fc4cbf40d972f9096f726a" - integrity sha512-RR3i96ohZGde//4WSe/dJsxOX6vxIg9TimLAS3i4EhBAFx8Sm5SmqVfR8E87DPSR31nEAjZfbt91OMZWcNgdZw== - dependencies: - devlop "^1.0.0" - micromark-util-character "^2.0.0" - micromark-util-symbol "^2.0.0" - micromark-util-types "^2.0.0" - -micromark-factory-mdx-expression@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/micromark-factory-mdx-expression/-/micromark-factory-mdx-expression-2.0.1.tgz#f2a9724ce174f1751173beb2c1f88062d3373b1b" - integrity sha512-F0ccWIUHRLRrYp5TC9ZYXmZo+p2AM13ggbsW4T0b5CRKP8KHVRB8t4pwtBgTxtjRmwrK0Irwm7vs2JOZabHZfg== - dependencies: - "@types/estree" "^1.0.0" - devlop "^1.0.0" - micromark-util-character "^2.0.0" - micromark-util-events-to-acorn "^2.0.0" - micromark-util-symbol "^2.0.0" - micromark-util-types "^2.0.0" - unist-util-position-from-estree "^2.0.0" - vfile-message "^4.0.0" - -micromark-factory-space@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-factory-space/-/micromark-factory-space-2.0.0.tgz#5e7afd5929c23b96566d0e1ae018ae4fcf81d030" - integrity sha512-TKr+LIDX2pkBJXFLzpyPyljzYK3MtmllMUMODTQJIUfDGncESaqB90db9IAUcz4AZAJFdd8U9zOp9ty1458rxg== - dependencies: - micromark-util-character "^2.0.0" - micromark-util-types "^2.0.0" - -micromark-factory-title@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-factory-title/-/micromark-factory-title-2.0.0.tgz#726140fc77892af524705d689e1cf06c8a83ea95" - integrity sha512-jY8CSxmpWLOxS+t8W+FG3Xigc0RDQA9bKMY/EwILvsesiRniiVMejYTE4wumNc2f4UbAa4WsHqe3J1QS1sli+A== - dependencies: - micromark-factory-space "^2.0.0" - micromark-util-character "^2.0.0" - micromark-util-symbol "^2.0.0" - micromark-util-types "^2.0.0" - -micromark-factory-whitespace@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.0.tgz#9e92eb0f5468083381f923d9653632b3cfb5f763" - integrity sha512-28kbwaBjc5yAI1XadbdPYHX/eDnqaUFVikLwrO7FDnKG7lpgxnvk/XGRhX/PN0mOZ+dBSZ+LgunHS+6tYQAzhA== - dependencies: - micromark-factory-space "^2.0.0" - micromark-util-character "^2.0.0" - micromark-util-symbol "^2.0.0" - micromark-util-types "^2.0.0" - -micromark-util-character@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/micromark-util-character/-/micromark-util-character-2.1.0.tgz#31320ace16b4644316f6bf057531689c71e2aee1" - integrity sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ== - dependencies: - micromark-util-symbol "^2.0.0" - micromark-util-types "^2.0.0" - -micromark-util-chunked@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-chunked/-/micromark-util-chunked-2.0.0.tgz#e51f4db85fb203a79dbfef23fd41b2f03dc2ef89" - integrity sha512-anK8SWmNphkXdaKgz5hJvGa7l00qmcaUQoMYsBwDlSKFKjc6gjGXPDw3FNL3Nbwq5L8gE+RCbGqTw49FK5Qyvg== - dependencies: - micromark-util-symbol "^2.0.0" - -micromark-util-classify-character@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-classify-character/-/micromark-util-classify-character-2.0.0.tgz#8c7537c20d0750b12df31f86e976d1d951165f34" - integrity sha512-S0ze2R9GH+fu41FA7pbSqNWObo/kzwf8rN/+IGlW/4tC6oACOs8B++bh+i9bVyNnwCcuksbFwsBme5OCKXCwIw== - dependencies: - micromark-util-character "^2.0.0" - micromark-util-symbol "^2.0.0" - micromark-util-types "^2.0.0" - -micromark-util-combine-extensions@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.0.tgz#75d6ab65c58b7403616db8d6b31315013bfb7ee5" - integrity sha512-vZZio48k7ON0fVS3CUgFatWHoKbbLTK/rT7pzpJ4Bjp5JjkZeasRfrS9wsBdDJK2cJLHMckXZdzPSSr1B8a4oQ== - dependencies: - micromark-util-chunked "^2.0.0" - micromark-util-types "^2.0.0" - -micromark-util-decode-numeric-character-reference@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.1.tgz#2698bbb38f2a9ba6310e359f99fcb2b35a0d2bd5" - integrity sha512-bmkNc7z8Wn6kgjZmVHOX3SowGmVdhYS7yBpMnuMnPzDq/6xwVA604DuOXMZTO1lvq01g+Adfa0pE2UKGlxL1XQ== - dependencies: - micromark-util-symbol "^2.0.0" - -micromark-util-decode-string@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-decode-string/-/micromark-util-decode-string-2.0.0.tgz#7dfa3a63c45aecaa17824e656bcdb01f9737154a" - integrity sha512-r4Sc6leeUTn3P6gk20aFMj2ntPwn6qpDZqWvYmAG6NgvFTIlj4WtrAudLi65qYoaGdXYViXYw2pkmn7QnIFasA== - dependencies: - decode-named-character-reference "^1.0.0" - micromark-util-character "^2.0.0" - micromark-util-decode-numeric-character-reference "^2.0.0" - micromark-util-symbol "^2.0.0" - -micromark-util-encode@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-encode/-/micromark-util-encode-2.0.0.tgz#0921ac7953dc3f1fd281e3d1932decfdb9382ab1" - integrity sha512-pS+ROfCXAGLWCOc8egcBvT0kf27GoWMqtdarNfDcjb6YLuV5cM3ioG45Ys2qOVqeqSbjaKg72vU+Wby3eddPsA== - -micromark-util-events-to-acorn@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/micromark-util-events-to-acorn/-/micromark-util-events-to-acorn-2.0.2.tgz#4275834f5453c088bd29cd72dfbf80e3327cec07" - integrity sha512-Fk+xmBrOv9QZnEDguL9OI9/NQQp6Hz4FuQ4YmCb/5V7+9eAh1s6AYSvL20kHkD67YIg7EpE54TiSlcsf3vyZgA== - dependencies: - "@types/acorn" "^4.0.0" - "@types/estree" "^1.0.0" - "@types/unist" "^3.0.0" - devlop "^1.0.0" - estree-util-visit "^2.0.0" - micromark-util-symbol "^2.0.0" - micromark-util-types "^2.0.0" - vfile-message "^4.0.0" - -micromark-util-html-tag-name@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.0.tgz#ae34b01cbe063363847670284c6255bb12138ec4" - integrity sha512-xNn4Pqkj2puRhKdKTm8t1YHC/BAjx6CEwRFXntTaRf/x16aqka6ouVoutm+QdkISTlT7e2zU7U4ZdlDLJd2Mcw== - -micromark-util-normalize-identifier@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.0.tgz#91f9a4e65fe66cc80c53b35b0254ad67aa431d8b" - integrity sha512-2xhYT0sfo85FMrUPtHcPo2rrp1lwbDEEzpx7jiH2xXJLqBuy4H0GgXk5ToU8IEwoROtXuL8ND0ttVa4rNqYK3w== - dependencies: - micromark-util-symbol "^2.0.0" - -micromark-util-resolve-all@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.0.tgz#189656e7e1a53d0c86a38a652b284a252389f364" - integrity sha512-6KU6qO7DZ7GJkaCgwBNtplXCvGkJToU86ybBAUdavvgsCiG8lSSvYxr9MhwmQ+udpzywHsl4RpGJsYWG1pDOcA== - dependencies: - micromark-util-types "^2.0.0" - -micromark-util-sanitize-uri@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.0.tgz#ec8fbf0258e9e6d8f13d9e4770f9be64342673de" - integrity sha512-WhYv5UEcZrbAtlsnPuChHUAsu/iBPOVaEVsntLBIdpibO0ddy8OzavZz3iL2xVvBZOpolujSliP65Kq0/7KIYw== - dependencies: - micromark-util-character "^2.0.0" - micromark-util-encode "^2.0.0" - micromark-util-symbol "^2.0.0" - -micromark-util-subtokenize@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/micromark-util-subtokenize/-/micromark-util-subtokenize-2.0.1.tgz#76129c49ac65da6e479c09d0ec4b5f29ec6eace5" - integrity sha512-jZNtiFl/1aY73yS3UGQkutD0UbhTt68qnRpw2Pifmz5wV9h8gOVsN70v+Lq/f1rKaU/W8pxRe8y8Q9FX1AOe1Q== - dependencies: - devlop "^1.0.0" - micromark-util-chunked "^2.0.0" - micromark-util-symbol "^2.0.0" - micromark-util-types "^2.0.0" - -micromark-util-symbol@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-symbol/-/micromark-util-symbol-2.0.0.tgz#12225c8f95edf8b17254e47080ce0862d5db8044" - integrity sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw== - -micromark-util-types@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/micromark-util-types/-/micromark-util-types-2.0.0.tgz#63b4b7ffeb35d3ecf50d1ca20e68fc7caa36d95e" - integrity sha512-oNh6S2WMHWRZrmutsRmDDfkzKtxF+bc2VxLC9dvtrDIRFln627VsFP6fLMgTryGDljgLPjkrzQSDcPrjPyDJ5w== - -micromark@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/micromark/-/micromark-4.0.0.tgz#84746a249ebd904d9658cfabc1e8e5f32cbc6249" - integrity sha512-o/sd0nMof8kYff+TqcDx3VSrgBTcZpSvYcAHIfHhv5VAuNmisCxjhx6YmxS8PFEpb9z5WKWKPdzf0jM23ro3RQ== - dependencies: - "@types/debug" "^4.0.0" - debug "^4.0.0" - decode-named-character-reference "^1.0.0" - devlop "^1.0.0" - micromark-core-commonmark "^2.0.0" - micromark-factory-space "^2.0.0" - micromark-util-character "^2.0.0" - micromark-util-chunked "^2.0.0" - micromark-util-combine-extensions "^2.0.0" - micromark-util-decode-numeric-character-reference "^2.0.0" - micromark-util-encode "^2.0.0" - micromark-util-normalize-identifier "^2.0.0" - micromark-util-resolve-all "^2.0.0" - micromark-util-sanitize-uri "^2.0.0" - micromark-util-subtokenize "^2.0.0" - micromark-util-symbol "^2.0.0" - micromark-util-types "^2.0.0" - -micromatch@^4.0.4, micromatch@^4.0.5: - version "4.0.5" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" - integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== - dependencies: - braces "^3.0.2" - picomatch "^2.3.1" - -minimatch@9.0.3, minimatch@^9.0.1: - version "9.0.3" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.3.tgz#a6e00c3de44c3a542bfaae70abfc22420a6da825" - integrity sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg== - dependencies: - brace-expansion "^2.0.1" - -minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" - integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== - dependencies: - brace-expansion "^1.1.7" - -minimist@^1.2.0, minimist@^1.2.6: - version "1.2.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" - integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== - -minipass@^2.6.0, minipass@^2.9.0: - version "2.9.0" - resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6" - integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg== - dependencies: - safe-buffer "^5.1.2" - yallist "^3.0.0" - -"minipass@^5.0.0 || ^6.0.2 || ^7.0.0": - version "7.0.4" - resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.0.4.tgz#dbce03740f50a4786ba994c1fb908844d27b038c" - integrity sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ== - -minizlib@^1.3.3: - version "1.3.3" - resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d" - integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q== - dependencies: - minipass "^2.9.0" - -mkdirp@^0.5.5: - version "0.5.6" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" - integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== - dependencies: - minimist "^1.2.6" - -ms@2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== - -ms@^2.1.1: - version "2.1.3" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" - integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== - -mz@^2.7.0: - version "2.7.0" - resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" - integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== - dependencies: - any-promise "^1.0.0" - object-assign "^4.0.1" - thenify-all "^1.0.0" - -nanoid@^3.3.6, nanoid@^3.3.7: - version "3.3.7" - resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8" - integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g== - -natural-compare@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" - integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== - -next-contentlayer2@^0.4.6: - version "0.4.6" - resolved "https://registry.yarnpkg.com/next-contentlayer2/-/next-contentlayer2-0.4.6.tgz#4474ad1a435c69c1ffff9145421da66609328749" - integrity sha512-r1g/zCl3dXFfMlMI87n2+V90Jz7inMrfyRzmMSQdDfCBy3uexnsoM43wPbklza2teyd/pdfUT0I0gw/IE+YA4Q== - dependencies: - "@contentlayer2/core" "0.4.3" - "@contentlayer2/utils" "0.4.3" - -next@14.1.0: - version "14.1.0" - resolved "https://registry.yarnpkg.com/next/-/next-14.1.0.tgz#b31c0261ff9caa6b4a17c5af019ed77387174b69" - integrity sha512-wlzrsbfeSU48YQBjZhDzOwhWhGsy+uQycR8bHAOt1LY1bn3zZEcDyHQOEoN3aWzQ8LHCAJ1nqrWCc9XF2+O45Q== - dependencies: - "@next/env" "14.1.0" - "@swc/helpers" "0.5.2" - busboy "1.6.0" - caniuse-lite "^1.0.30001579" - graceful-fs "^4.2.11" - postcss "8.4.31" - styled-jsx "5.1.1" - optionalDependencies: - "@next/swc-darwin-arm64" "14.1.0" - "@next/swc-darwin-x64" "14.1.0" - "@next/swc-linux-arm64-gnu" "14.1.0" - "@next/swc-linux-arm64-musl" "14.1.0" - "@next/swc-linux-x64-gnu" "14.1.0" - "@next/swc-linux-x64-musl" "14.1.0" - "@next/swc-win32-arm64-msvc" "14.1.0" - "@next/swc-win32-ia32-msvc" "14.1.0" - "@next/swc-win32-x64-msvc" "14.1.0" - -no-case@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" - integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== - dependencies: - lower-case "^2.0.2" - tslib "^2.0.3" - -node-fetch@2.6.7: - version "2.6.7" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" - integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== - dependencies: - whatwg-url "^5.0.0" - -node-releases@^2.0.14: - version "2.0.14" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.14.tgz#2ffb053bceb8b2be8495ece1ab6ce600c4461b0b" - integrity sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw== - -normalize-path@^3.0.0, normalize-path@~3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" - integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== - -normalize-range@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" - integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== - -object-assign@^4.0.1, object-assign@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" - integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== - -object-hash@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9" - integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== - -object-inspect@^1.13.1, object-inspect@^1.9.0: - version "1.13.1" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.1.tgz#b96c6109324ccfef6b12216a956ca4dc2ff94bc2" - integrity sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ== - -object-keys@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" - integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== - -object.assign@^4.1.4: - version "4.1.5" - resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.5.tgz#3a833f9ab7fdb80fc9e8d2300c803d216d8fdbb0" - integrity sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ== - dependencies: - call-bind "^1.0.5" - define-properties "^1.2.1" - has-symbols "^1.0.3" - object-keys "^1.1.1" - -object.entries@^1.1.6, object.entries@^1.1.7: - version "1.1.7" - resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.7.tgz#2b47760e2a2e3a752f39dd874655c61a7f03c131" - integrity sha512-jCBs/0plmPsOnrKAfFQXRG2NFjlhZgjjcBLSmTnEhU8U6vVTsVe8ANeQJCHTl3gSsI4J+0emOoCgoKlmQPMgmA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - -object.fromentries@^2.0.6, object.fromentries@^2.0.7: - version "2.0.7" - resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.7.tgz#71e95f441e9a0ea6baf682ecaaf37fa2a8d7e616" - integrity sha512-UPbPHML6sL8PI/mOqPwsH4G6iyXcCGzLin8KvEPenOZN5lpCNBZZQ+V62vdjB1mQHrmqGQt5/OJzemUA+KJmEA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - -object.groupby@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/object.groupby/-/object.groupby-1.0.1.tgz#d41d9f3c8d6c778d9cbac86b4ee9f5af103152ee" - integrity sha512-HqaQtqLnp/8Bn4GL16cj+CUYbnpe1bh0TtEaWvybszDG4tgxCJuRpV8VGuvNaI1fAnI4lUJzDG55MXcOH4JZcQ== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - get-intrinsic "^1.2.1" - -object.hasown@^1.1.2: - version "1.1.3" - resolved "https://registry.yarnpkg.com/object.hasown/-/object.hasown-1.1.3.tgz#6a5f2897bb4d3668b8e79364f98ccf971bda55ae" - integrity sha512-fFI4VcYpRHvSLXxP7yiZOMAd331cPfd2p7PFDVbgUsYOfCT3tICVqXWngbjr4m49OvsBwUBQ6O2uQoJvy3RexA== - dependencies: - define-properties "^1.2.0" - es-abstract "^1.22.1" - -object.values@^1.1.6, object.values@^1.1.7: - version "1.1.7" - resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.7.tgz#617ed13272e7e1071b43973aa1655d9291b8442a" - integrity sha512-aU6xnDFYT3x17e/f0IiiwlGPTy2jzMySGfUB4fq6z7CV8l85CWHDk5ErhyhpfDHhrOMwGFhSQkhMGHaIotA6Ng== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - -once@^1.3.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== - dependencies: - wrappy "1" - -oo-ascii-tree@^1.94.0: - version "1.101.0" - resolved "https://registry.yarnpkg.com/oo-ascii-tree/-/oo-ascii-tree-1.101.0.tgz#bd14acf6a71430c02443f865975ec0c4b4ff03aa" - integrity sha512-hNE9Nfvo4HLa9/dAiaiXUm64KHUvgBa7jPftsb8gZdTv1G1wSMMnd9j7SMcRzaMbDEqi+0cfgeBSIcsKy+k0vA== - -optionator@^0.9.3: - version "0.9.3" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.3.tgz#007397d44ed1872fdc6ed31360190f81814e2c64" - integrity sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg== - dependencies: - "@aashutoshrathi/word-wrap" "^1.2.3" - deep-is "^0.1.3" - fast-levenshtein "^2.0.6" - levn "^0.4.1" - prelude-ls "^1.2.1" - type-check "^0.4.0" - -p-limit@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" - integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== - dependencies: - yocto-queue "^0.1.0" - -p-locate@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" - integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== - dependencies: - p-limit "^3.0.2" - -parent-module@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" - integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== - dependencies: - callsites "^3.0.0" - -parse-entities@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/parse-entities/-/parse-entities-4.0.1.tgz#4e2a01111fb1c986549b944af39eeda258fc9e4e" - integrity sha512-SWzvYcSJh4d/SGLIOQfZ/CoNv6BTlI6YEQ7Nj82oDVnRpwe/Z/F1EMx42x3JAOwGBlCjeCH0BRJQbQ/opHL17w== - dependencies: - "@types/unist" "^2.0.0" - character-entities "^2.0.0" - character-entities-legacy "^3.0.0" - character-reference-invalid "^2.0.0" - decode-named-character-reference "^1.0.0" - is-alphanumerical "^2.0.0" - is-decimal "^2.0.0" - is-hexadecimal "^2.0.0" - -parse5@^7.0.0: - version "7.1.2" - resolved "https://registry.yarnpkg.com/parse5/-/parse5-7.1.2.tgz#0736bebbfd77793823240a23b7fc5e010b7f8e32" - integrity sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw== - dependencies: - entities "^4.4.0" - -pascal-case@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb" - integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== - dependencies: - no-case "^3.0.4" - tslib "^2.0.3" - -path-exists@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" - integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== - -path-key@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" - integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== - -path-parse@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" - integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== - -path-scurry@^1.10.1: - version "1.10.1" - resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.10.1.tgz#9ba6bf5aa8500fe9fd67df4f0d9483b2b0bfc698" - integrity sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ== - dependencies: - lru-cache "^9.1.1 || ^10.0.0" - minipass "^5.0.0 || ^6.0.2 || ^7.0.0" - -path-type@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" - integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== - -pend@~1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50" - integrity sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg== - -periscopic@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/periscopic/-/periscopic-3.1.0.tgz#7e9037bf51c5855bd33b48928828db4afa79d97a" - integrity sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw== - dependencies: - "@types/estree" "^1.0.0" - estree-walker "^3.0.0" - is-reference "^3.0.0" - -picocolors@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" - integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== - -picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" - integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== - -pify@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" - integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== - -pirates@^4.0.1: - version "4.0.6" - resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.6.tgz#3018ae32ecfcff6c29ba2267cbf21166ac1f36b9" - integrity sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg== - -postcss-import@^15.1.0: - version "15.1.0" - resolved "https://registry.yarnpkg.com/postcss-import/-/postcss-import-15.1.0.tgz#41c64ed8cc0e23735a9698b3249ffdbf704adc70" - integrity sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew== - dependencies: - postcss-value-parser "^4.0.0" - read-cache "^1.0.0" - resolve "^1.1.7" - -postcss-js@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/postcss-js/-/postcss-js-4.0.1.tgz#61598186f3703bab052f1c4f7d805f3991bee9d2" - integrity sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw== - dependencies: - camelcase-css "^2.0.1" - -postcss-load-config@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-4.0.2.tgz#7159dcf626118d33e299f485d6afe4aff7c4a3e3" - integrity sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ== - dependencies: - lilconfig "^3.0.0" - yaml "^2.3.4" - -postcss-nested@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/postcss-nested/-/postcss-nested-6.0.1.tgz#f83dc9846ca16d2f4fa864f16e9d9f7d0961662c" - integrity sha512-mEp4xPMi5bSWiMbsgoPfcP74lsWLHkQbZc3sY+jWYd65CUwXrUaTp0fmNpa01ZcETKlIgUdFN/MpS2xZtqL9dQ== - dependencies: - postcss-selector-parser "^6.0.11" - -postcss-selector-parser@^6.0.11: - version "6.0.15" - resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.15.tgz#11cc2b21eebc0b99ea374ffb9887174855a01535" - integrity sha512-rEYkQOMUCEMhsKbK66tbEU9QVIxbhN18YiniAwA7XQYTVBqrBy+P2p5JcdqsHgKM2zWylp8d7J6eszocfds5Sw== - dependencies: - cssesc "^3.0.0" - util-deprecate "^1.0.2" - -postcss-value-parser@^4.0.0, postcss-value-parser@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" - integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== - -postcss@8.4.26: - version "8.4.26" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.26.tgz#1bc62ab19f8e1e5463d98cf74af39702a00a9e94" - integrity sha512-jrXHFF8iTloAenySjM/ob3gSj7pCu0Ji49hnjqzsgSRa50hkWCKD0HQ+gMNJkW38jBI68MpAAg7ZWwHwX8NMMw== - dependencies: - nanoid "^3.3.6" - picocolors "^1.0.0" - source-map-js "^1.0.2" - -postcss@8.4.31: - version "8.4.31" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.31.tgz#92b451050a9f914da6755af352bdc0192508656d" - integrity sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ== - dependencies: - nanoid "^3.3.6" - picocolors "^1.0.0" - source-map-js "^1.0.2" - -postcss@^8.4.23: - version "8.4.33" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.33.tgz#1378e859c9f69bf6f638b990a0212f43e2aaa742" - integrity sha512-Kkpbhhdjw2qQs2O2DGX+8m5OVqEcbB9HRBvuYM9pgrjEFUg30A9LmXNlTAUj4S9kgtGyrMbTzVjH7E+s5Re2yg== - dependencies: - nanoid "^3.3.7" - picocolors "^1.0.0" - source-map-js "^1.0.2" - -prelude-ls@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" - integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== - -prettier@^3.2.4: - version "3.2.4" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.2.4.tgz#4723cadeac2ce7c9227de758e5ff9b14e075f283" - integrity sha512-FWu1oLHKCrtpO1ypU6J0SbK2d9Ckwysq6bHj/uaCP26DxrPpppCLQRGVuqAxSTvhF00AcvDRyYrLNW7ocBhFFQ== - -prop-types@^15.8.1: - version "15.8.1" - resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" - integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== - dependencies: - loose-envify "^1.4.0" - object-assign "^4.1.1" - react-is "^16.13.1" - -property-information@^6.0.0: - version "6.4.1" - resolved "https://registry.yarnpkg.com/property-information/-/property-information-6.4.1.tgz#de8b79a7415fd2107dfbe65758bb2cc9dfcf60ac" - integrity sha512-OHYtXfu5aI2sS2LWFSN5rgJjrQ4pCy8i1jubJLe2QvMF8JJ++HXTUIVWFLfXJoaOfvYYjk2SN8J2wFUWIGXT4w== - -protobufjs@^7.2.3: - version "7.3.2" - resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-7.3.2.tgz#60f3b7624968868f6f739430cfbc8c9370e26df4" - integrity sha512-RXyHaACeqXeqAKGLDl68rQKbmObRsTIn4TYVUUug1KfS47YWCo5MacGITEryugIgZqORCvJWEk4l449POg5Txg== - dependencies: - "@protobufjs/aspromise" "^1.1.2" - "@protobufjs/base64" "^1.1.2" - "@protobufjs/codegen" "^2.0.4" - "@protobufjs/eventemitter" "^1.1.0" - "@protobufjs/fetch" "^1.1.0" - "@protobufjs/float" "^1.0.2" - "@protobufjs/inquire" "^1.1.0" - "@protobufjs/path" "^1.1.2" - "@protobufjs/pool" "^1.1.0" - "@protobufjs/utf8" "^1.1.0" - "@types/node" ">=13.7.0" - long "^5.0.0" - -protobufjs@^7.2.4: - version "7.2.6" - resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-7.2.6.tgz#4a0ccd79eb292717aacf07530a07e0ed20278215" - integrity sha512-dgJaEDDL6x8ASUZ1YqWciTRrdOuYNzoOf27oHNfdyvKqHr5i0FV7FSLU+aIeFjyFgVxrpTOtQUi0BLLBymZaBw== - dependencies: - "@protobufjs/aspromise" "^1.1.2" - "@protobufjs/base64" "^1.1.2" - "@protobufjs/codegen" "^2.0.4" - "@protobufjs/eventemitter" "^1.1.0" - "@protobufjs/fetch" "^1.1.0" - "@protobufjs/float" "^1.0.2" - "@protobufjs/inquire" "^1.1.0" - "@protobufjs/path" "^1.1.2" - "@protobufjs/pool" "^1.1.0" - "@protobufjs/utf8" "^1.1.0" - "@types/node" ">=13.7.0" - long "^5.0.0" - -punycode@^2.1.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" - integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== - -queue-microtask@^1.2.2: - version "1.2.3" - resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" - integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== - -react-dom@18.2.0: - version "18.2.0" - resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-18.2.0.tgz#22aaf38708db2674ed9ada224ca4aa708d821e3d" - integrity sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g== - dependencies: - loose-envify "^1.1.0" - scheduler "^0.23.0" - -react-is@^16.13.1: - version "16.13.1" - resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" - integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== - -react@18.2.0: - version "18.2.0" - resolved "https://registry.yarnpkg.com/react/-/react-18.2.0.tgz#555bd98592883255fa00de14f1151a917b5d77d5" - integrity sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ== - dependencies: - loose-envify "^1.1.0" - -read-cache@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774" - integrity sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA== - dependencies: - pify "^2.3.0" - -readdirp@~3.6.0: - version "3.6.0" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" - integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== - dependencies: - picomatch "^2.2.1" - -rechoir@^0.6.2: - version "0.6.2" - resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384" - integrity sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw== - dependencies: - resolve "^1.1.6" - -reflect.getprototypeof@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/reflect.getprototypeof/-/reflect.getprototypeof-1.0.4.tgz#aaccbf41aca3821b87bb71d9dcbc7ad0ba50a3f3" - integrity sha512-ECkTw8TmJwW60lOTR+ZkODISW6RQ8+2CL3COqtiJKLd6MmB45hN51HprHFziKLGkAuTGQhBb91V8cy+KHlaCjw== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - get-intrinsic "^1.2.1" - globalthis "^1.0.3" - which-builtin-type "^1.1.3" - -regenerator-runtime@^0.14.0: - version "0.14.1" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz#356ade10263f685dda125100cd862c1db895327f" - integrity sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw== - -regexp.prototype.flags@^1.5.0, regexp.prototype.flags@^1.5.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz#90ce989138db209f81492edd734183ce99f9677e" - integrity sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - set-function-name "^2.0.0" - -rehype-stringify@^10.0.0: - version "10.0.0" - resolved "https://registry.yarnpkg.com/rehype-stringify/-/rehype-stringify-10.0.0.tgz#2031cf6fdd0355393706f0474ec794c75e5492f2" - integrity sha512-1TX1i048LooI9QoecrXy7nGFFbFSufxVRAfc6Y9YMRAi56l+oB0zP51mLSV312uRuvVLPV1opSlJmslozR1XHQ== - dependencies: - "@types/hast" "^3.0.0" - hast-util-to-html "^9.0.0" - unified "^11.0.0" - -remark-frontmatter@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/remark-frontmatter/-/remark-frontmatter-5.0.0.tgz#b68d61552a421ec412c76f4f66c344627dc187a2" - integrity sha512-XTFYvNASMe5iPN0719nPrdItC9aU0ssC4v14mH1BCi1u0n1gAocqcujWUrByftZTbLhRtiKRyjYTSIOcr69UVQ== - dependencies: - "@types/mdast" "^4.0.0" - mdast-util-frontmatter "^2.0.0" - micromark-extension-frontmatter "^2.0.0" - unified "^11.0.0" - -remark-mdx-frontmatter@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/remark-mdx-frontmatter/-/remark-mdx-frontmatter-4.0.0.tgz#1d0287103ac73c5d493d2fad19dd805e69db55ca" - integrity sha512-PZzAiDGOEfv1Ua7exQ8S5kKxkD8CDaSb4nM+1Mprs6u8dyvQifakh+kCj6NovfGXW+bTvrhjaR3srzjS2qJHKg== - dependencies: - "@types/mdast" "^4.0.0" - estree-util-is-identifier-name "^3.0.0" - estree-util-value-to-estree "^3.0.0" - toml "^3.0.0" - unified "^11.0.0" - yaml "^2.0.0" - -remark-mdx@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/remark-mdx/-/remark-mdx-3.0.1.tgz#8f73dd635c1874e44426e243f72c0977cf60e212" - integrity sha512-3Pz3yPQ5Rht2pM5R+0J2MrGoBSrzf+tJG94N+t/ilfdh8YLyyKYtidAYwTveB20BoHAcwIopOUqhcmh2F7hGYA== - dependencies: - mdast-util-mdx "^3.0.0" - micromark-extension-mdxjs "^3.0.0" - -remark-parse@^11.0.0: - version "11.0.0" - resolved "https://registry.yarnpkg.com/remark-parse/-/remark-parse-11.0.0.tgz#aa60743fcb37ebf6b069204eb4da304e40db45a1" - integrity sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA== - dependencies: - "@types/mdast" "^4.0.0" - mdast-util-from-markdown "^2.0.0" - micromark-util-types "^2.0.0" - unified "^11.0.0" - -remark-rehype@^11.0.0, remark-rehype@^11.1.0: - version "11.1.0" - resolved "https://registry.yarnpkg.com/remark-rehype/-/remark-rehype-11.1.0.tgz#d5f264f42bcbd4d300f030975609d01a1697ccdc" - integrity sha512-z3tJrAs2kIs1AqIIy6pzHmAHlF1hWQ+OdY4/hv+Wxe35EhyLKcajL33iUEn3ScxtFox9nUvRufR/Zre8Q08H/g== - dependencies: - "@types/hast" "^3.0.0" - "@types/mdast" "^4.0.0" - mdast-util-to-hast "^13.0.0" - unified "^11.0.0" - vfile "^6.0.0" - -repeat-string@^1.6.1: - version "1.6.1" - resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" - integrity sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w== - -require-directory@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" - integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== - -resolve-from@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" - integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== - -resolve-pkg-maps@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz#616b3dc2c57056b5588c31cdf4b3d64db133720f" - integrity sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw== - -resolve@^1.1.6, resolve@^1.1.7, resolve@^1.19.0, resolve@^1.22.2, resolve@^1.22.4: - version "1.22.8" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" - integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== - dependencies: - is-core-module "^2.13.0" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - -resolve@^2.0.0-next.4: - version "2.0.0-next.5" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.5.tgz#6b0ec3107e671e52b68cd068ef327173b90dc03c" - integrity sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA== - dependencies: - is-core-module "^2.13.0" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - -reusify@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" - integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== - -rimraf@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" - integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== - dependencies: - glob "^7.1.3" - -run-parallel@^1.1.9: - version "1.2.0" - resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" - integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== - dependencies: - queue-microtask "^1.2.2" - -safe-array-concat@^1.0.1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/safe-array-concat/-/safe-array-concat-1.1.0.tgz#8d0cae9cb806d6d1c06e08ab13d847293ebe0692" - integrity sha512-ZdQ0Jeb9Ofti4hbt5lX3T2JcAamT9hfzYU1MNB+z/jaEbB6wfFfPIR/zEORmZqobkCCJhSjodobH6WHNmJ97dg== - dependencies: - call-bind "^1.0.5" - get-intrinsic "^1.2.2" - has-symbols "^1.0.3" - isarray "^2.0.5" - -safe-buffer@^5.1.2, safe-buffer@^5.2.1: - version "5.2.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== - -safe-regex-test@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.0.2.tgz#3ba32bdb3ea35f940ee87e5087c60ee786c3f6c5" - integrity sha512-83S9w6eFq12BBIJYvjMux6/dkirb8+4zJRA9cxNBVb7Wq5fJBW+Xze48WqR8pxua7bDuAaaAxtVVd4Idjp1dBQ== - dependencies: - call-bind "^1.0.5" - get-intrinsic "^1.2.2" - is-regex "^1.1.4" - -scheduler@^0.23.0: - version "0.23.0" - resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.23.0.tgz#ba8041afc3d30eb206a487b6b384002e4e61fdfe" - integrity sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw== - dependencies: - loose-envify "^1.1.0" - -section-matter@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/section-matter/-/section-matter-1.0.0.tgz#e9041953506780ec01d59f292a19c7b850b84167" - integrity sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA== - dependencies: - extend-shallow "^2.0.1" - kind-of "^6.0.0" - -semver@^6.3.1: - version "6.3.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" - integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== - -semver@^7.5.2, semver@^7.5.4: - version "7.5.4" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" - integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== - dependencies: - lru-cache "^6.0.0" - -set-function-length@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.0.tgz#2f81dc6c16c7059bda5ab7c82c11f03a515ed8e1" - integrity sha512-4DBHDoyHlM1IRPGYcoxexgh67y4ueR53FKV1yyxwFMY7aCqcN/38M1+SwZ/qJQ8iLv7+ck385ot4CcisOAPT9w== - dependencies: - define-data-property "^1.1.1" - function-bind "^1.1.2" - get-intrinsic "^1.2.2" - gopd "^1.0.1" - has-property-descriptors "^1.0.1" - -set-function-name@^2.0.0, set-function-name@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/set-function-name/-/set-function-name-2.0.1.tgz#12ce38b7954310b9f61faa12701620a0c882793a" - integrity sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA== - dependencies: - define-data-property "^1.0.1" - functions-have-names "^1.2.3" - has-property-descriptors "^1.0.0" - -shebang-command@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" - integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== - dependencies: - shebang-regex "^3.0.0" - -shebang-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" - integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== - -shelljs@^0.8.4: - version "0.8.5" - resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.5.tgz#de055408d8361bed66c669d2f000538ced8ee20c" - integrity sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow== - dependencies: - glob "^7.0.0" - interpret "^1.0.0" - rechoir "^0.6.2" - -side-channel@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" - integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== - dependencies: - call-bind "^1.0.0" - get-intrinsic "^1.0.2" - object-inspect "^1.9.0" - -signal-exit@^4.0.1: - version "4.1.0" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" - integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== - -slash@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" - integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== - -source-map-js@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" - integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== - -source-map-support@^0.5.21: - version "0.5.21" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" - integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map@^0.6.0: - version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -source-map@^0.7.0: - version "0.7.4" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656" - integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== - -space-separated-tokens@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz#1ecd9d2350a3844572c3f4a312bceb018348859f" - integrity sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q== - -sprintf-js@~1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== - -streamsearch@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" - integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== - -"string-width-cjs@npm:string-width@^4.2.0": - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - -string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - -string-width@^5.0.1, string-width@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" - integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA== - dependencies: - eastasianwidth "^0.2.0" - emoji-regex "^9.2.2" - strip-ansi "^7.0.1" - -string.prototype.matchall@^4.0.8: - version "4.0.10" - resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.10.tgz#a1553eb532221d4180c51581d6072cd65d1ee100" - integrity sha512-rGXbGmOEosIQi6Qva94HUjgPs9vKW+dkG7Y8Q5O2OYkWL6wFaTRZO8zM4mhP94uX55wgyrXzfS2aGtGzUL7EJQ== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - get-intrinsic "^1.2.1" - has-symbols "^1.0.3" - internal-slot "^1.0.5" - regexp.prototype.flags "^1.5.0" - set-function-name "^2.0.0" - side-channel "^1.0.4" - -string.prototype.trim@^1.2.8: - version "1.2.8" - resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.8.tgz#f9ac6f8af4bd55ddfa8895e6aea92a96395393bd" - integrity sha512-lfjY4HcixfQXOfaqCvcBuOIapyaroTXhbkfJN3gcB1OtyupngWK4sEET9Knd0cXd28kTUqu/kHoV4HKSJdnjiQ== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - -string.prototype.trimend@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.7.tgz#1bb3afc5008661d73e2dc015cd4853732d6c471e" - integrity sha512-Ni79DqeB72ZFq1uH/L6zJ+DKZTkOtPIHovb3YZHQViE+HDouuU4mBrLOLDn5Dde3RF8qw5qVETEjhu9locMLvA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - -string.prototype.trimstart@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.7.tgz#d4cdb44b83a4737ffbac2d406e405d43d0184298" - integrity sha512-NGhtDFu3jCEm7B4Fy0DpLewdJQOZcQ0rGbwQ/+stjnrp2i+rlKeCvos9hOIeCmqwratM47OBxY7uFZzjxHXmrg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.2.0" - es-abstract "^1.22.1" - -stringify-entities@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/stringify-entities/-/stringify-entities-4.0.3.tgz#cfabd7039d22ad30f3cc435b0ca2c1574fc88ef8" - integrity sha512-BP9nNHMhhfcMbiuQKCqMjhDP5yBCAxsPu4pHFFzJ6Alo9dZgY4VLDPutXqIjpRiMoKdp7Av85Gr73Q5uH9k7+g== - dependencies: - character-entities-html4 "^2.0.0" - character-entities-legacy "^3.0.0" - -"strip-ansi-cjs@npm:strip-ansi@^6.0.1": - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - -strip-ansi@^6.0.0, strip-ansi@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - -strip-ansi@^7.0.1: - version "7.1.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" - integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== - dependencies: - ansi-regex "^6.0.1" - -strip-bom-string@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/strip-bom-string/-/strip-bom-string-1.0.0.tgz#e5211e9224369fbb81d633a2f00044dc8cedad92" - integrity sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g== - -strip-bom@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" - integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== - -strip-json-comments@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" - integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== - -style-to-object@^0.4.0: - version "0.4.4" - resolved "https://registry.yarnpkg.com/style-to-object/-/style-to-object-0.4.4.tgz#266e3dfd56391a7eefb7770423612d043c3f33ec" - integrity sha512-HYNoHZa2GorYNyqiCaBgsxvcJIn7OHq6inEga+E6Ke3m5JkoqpQbnFssk4jwe+K7AhGa2fcha4wSOf1Kn01dMg== - dependencies: - inline-style-parser "0.1.1" - -style-to-object@^1.0.0: - version "1.0.6" - resolved "https://registry.yarnpkg.com/style-to-object/-/style-to-object-1.0.6.tgz#0c28aed8be1813d166c60d962719b2907c26547b" - integrity sha512-khxq+Qm3xEyZfKd/y9L3oIWQimxuc4STrQKtQn8aSDRHb8mFgpukgX1hdzfrMEW6JCjyJ8p89x+IUMVnCBI1PA== - dependencies: - inline-style-parser "0.2.3" - -styled-jsx@5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/styled-jsx/-/styled-jsx-5.1.1.tgz#839a1c3aaacc4e735fed0781b8619ea5d0009d1f" - integrity sha512-pW7uC1l4mBZ8ugbiZrcIsiIvVx1UmTfw7UkC3Um2tmfUq9Bhk8IiyEIPl6F8agHgjzku6j0xQEZbfA5uSgSaCw== - dependencies: - client-only "0.0.1" - -sucrase@^3.32.0: - version "3.35.0" - resolved "https://registry.yarnpkg.com/sucrase/-/sucrase-3.35.0.tgz#57f17a3d7e19b36d8995f06679d121be914ae263" - integrity sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA== - dependencies: - "@jridgewell/gen-mapping" "^0.3.2" - commander "^4.0.0" - glob "^10.3.10" - lines-and-columns "^1.1.6" - mz "^2.7.0" - pirates "^4.0.1" - ts-interface-checker "^0.1.9" - -supports-color@^7.1.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" - integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== - dependencies: - has-flag "^4.0.0" - -supports-preserve-symlinks-flag@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" - integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== - -tailwindcss@3.4.1: - version "3.4.1" - resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.4.1.tgz#f512ca5d1dd4c9503c7d3d28a968f1ad8f5c839d" - integrity sha512-qAYmXRfk3ENzuPBakNK0SRrUDipP8NQnEY6772uDhflcQz5EhRdD7JNZxyrFHVQNCwULPBn6FNPp9brpO7ctcA== - dependencies: - "@alloc/quick-lru" "^5.2.0" - arg "^5.0.2" - chokidar "^3.5.3" - didyoumean "^1.2.2" - dlv "^1.1.3" - fast-glob "^3.3.0" - glob-parent "^6.0.2" - is-glob "^4.0.3" - jiti "^1.19.1" - lilconfig "^2.1.0" - micromatch "^4.0.5" - normalize-path "^3.0.0" - object-hash "^3.0.0" - picocolors "^1.0.0" - postcss "^8.4.23" - postcss-import "^15.1.0" - postcss-js "^4.0.1" - postcss-load-config "^4.0.1" - postcss-nested "^6.0.1" - postcss-selector-parser "^6.0.11" - resolve "^1.22.2" - sucrase "^3.32.0" - -tapable@^2.2.0: - version "2.2.1" - resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" - integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== - -tar@^4.4.8: - version "4.4.19" - resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.19.tgz#2e4d7263df26f2b914dee10c825ab132123742f3" - integrity sha512-a20gEsvHnWe0ygBY8JbxoM4w3SJdhc7ZAuxkLqh+nvNQN2IOt0B5lLgM490X5Hl8FF0dl0tOf2ewFYAlIFgzVA== - dependencies: - chownr "^1.1.4" - fs-minipass "^1.2.7" - minipass "^2.9.0" - minizlib "^1.3.3" - mkdirp "^0.5.5" - safe-buffer "^5.2.1" - yallist "^3.1.1" - -text-table@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" - integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== - -thenify-all@^1.0.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" - integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA== - dependencies: - thenify ">= 3.1.0 < 4" - -"thenify@>= 3.1.0 < 4": - version "3.3.1" - resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.1.tgz#8932e686a4066038a016dd9e2ca46add9838a95f" - integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw== - dependencies: - any-promise "^1.0.0" - -thingies@^1.20.0: - version "1.21.0" - resolved "https://registry.yarnpkg.com/thingies/-/thingies-1.21.0.tgz#e80fbe58fd6fdaaab8fad9b67bd0a5c943c445c1" - integrity sha512-hsqsJsFMsV+aD4s3CWKk85ep/3I9XzYV/IXaSouJMYIoDlgyi11cBhsqYe9/geRfB0YIikBQg6raRaM+nIMP9g== - -to-regex-range@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" - integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== - dependencies: - is-number "^7.0.0" - -toml@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/toml/-/toml-3.0.0.tgz#342160f1af1904ec9d204d03a5d61222d762c5ee" - integrity sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w== - -tr46@~0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" - integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== - -tree-dump@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/tree-dump/-/tree-dump-1.0.2.tgz#c460d5921caeb197bde71d0e9a7b479848c5b8ac" - integrity sha512-dpev9ABuLWdEubk+cIaI9cHwRNNDjkBBLXTwI4UCUFdQ5xXKqNXoK4FEciw/vxf+NQ7Cb7sGUyeUtORvHIdRXQ== - -trim-lines@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/trim-lines/-/trim-lines-3.0.1.tgz#d802e332a07df861c48802c04321017b1bd87338" - integrity sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg== - -trough@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/trough/-/trough-2.1.0.tgz#0f7b511a4fde65a46f18477ab38849b22c554876" - integrity sha512-AqTiAOLcj85xS7vQ8QkAV41hPDIJ71XJB4RCUrzo/1GM2CQwhkJGaf9Hgr7BOugMRpgGUrqRg/DrBDl4H40+8g== - -ts-api-utils@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.0.3.tgz#f12c1c781d04427313dbac808f453f050e54a331" - integrity sha512-wNMeqtMz5NtwpT/UZGY5alT+VoKdSsOOP/kqHFcUW1P/VRhH2wJ48+DN2WwUliNbQ976ETwDL0Ifd2VVvgonvg== - -ts-interface-checker@^0.1.9: - version "0.1.13" - resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699" - integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA== - -ts-pattern@^5.0.6: - version "5.2.0" - resolved "https://registry.yarnpkg.com/ts-pattern/-/ts-pattern-5.2.0.tgz#2cad8b58fcd87c52d1785f84eba572641e1bb5f3" - integrity sha512-aGaSpOlDcns7ZoeG/OMftWyQG1KqPVhgplhJxNCvyIXqWrumM5uIoOSarw/hmmi/T1PnuQ/uD8NaFHvLpHicDg== - -tsconfig-paths@^3.15.0: - version "3.15.0" - resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz#5299ec605e55b1abb23ec939ef15edaf483070d4" - integrity sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg== - dependencies: - "@types/json5" "^0.0.29" - json5 "^1.0.2" - minimist "^1.2.6" - strip-bom "^3.0.0" - -tslib@^2.0.0: - version "2.6.3" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.3.tgz#0438f810ad7a9edcde7a241c3d80db693c8cbfe0" - integrity sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ== - -tslib@^2.0.3, tslib@^2.4.0, tslib@^2.4.1: - version "2.6.2" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" - integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== - -typanion@^3.12.1, typanion@^3.8.0: - version "3.14.0" - resolved "https://registry.yarnpkg.com/typanion/-/typanion-3.14.0.tgz#a766a91810ce8258033975733e836c43a2929b94" - integrity sha512-ZW/lVMRabETuYCd9O9ZvMhAh8GslSqaUjxmK/JLPCh6l73CvLBiuXswj/+7LdnWOgYsQ130FqLzFz5aGT4I3Ug== - -type-check@^0.4.0, type-check@~0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" - integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== - dependencies: - prelude-ls "^1.2.1" - -type-fest@^0.20.2: - version "0.20.2" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" - integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== - -type-fest@^4.10.0: - version "4.20.1" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-4.20.1.tgz#d97bb1e923bf524e5b4b43421d586760fb2ee8be" - integrity sha512-R6wDsVsoS9xYOpy8vgeBlqpdOyzJ12HNfQhC/aAKWM3YoCV9TtunJzh/QpkMgeDhkoynDcw5f1y+qF9yc/HHyg== - -typed-array-buffer@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz#18de3e7ed7974b0a729d3feecb94338d1472cd60" - integrity sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw== - dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.2.1" - is-typed-array "^1.1.10" - -typed-array-byte-length@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz#d787a24a995711611fb2b87a4052799517b230d0" - integrity sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA== - dependencies: - call-bind "^1.0.2" - for-each "^0.3.3" - has-proto "^1.0.1" - is-typed-array "^1.1.10" - -typed-array-byte-offset@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz#cbbe89b51fdef9cd6aaf07ad4707340abbc4ea0b" - integrity sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg== - dependencies: - available-typed-arrays "^1.0.5" - call-bind "^1.0.2" - for-each "^0.3.3" - has-proto "^1.0.1" - is-typed-array "^1.1.10" - -typed-array-length@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/typed-array-length/-/typed-array-length-1.0.4.tgz#89d83785e5c4098bec72e08b319651f0eac9c1bb" - integrity sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng== - dependencies: - call-bind "^1.0.2" - for-each "^0.3.3" - is-typed-array "^1.1.9" - -typescript@5.3.3: - version "5.3.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.3.3.tgz#b3ce6ba258e72e6305ba66f5c9b452aaee3ffe37" - integrity sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw== - -unbox-primitive@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" - integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== - dependencies: - call-bind "^1.0.2" - has-bigints "^1.0.2" - has-symbols "^1.0.3" - which-boxed-primitive "^1.0.2" - -undici-types@~5.26.4: - version "5.26.5" - resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" - integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== - -unified@^11.0.0, unified@^11.0.4: - version "11.0.5" - resolved "https://registry.yarnpkg.com/unified/-/unified-11.0.5.tgz#f66677610a5c0a9ee90cab2b8d4d66037026d9e1" - integrity sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA== - dependencies: - "@types/unist" "^3.0.0" - bail "^2.0.0" - devlop "^1.0.0" - extend "^3.0.0" - is-plain-obj "^4.0.0" - trough "^2.0.0" - vfile "^6.0.0" - -unist-util-is@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/unist-util-is/-/unist-util-is-6.0.0.tgz#b775956486aff107a9ded971d996c173374be424" - integrity sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw== - dependencies: - "@types/unist" "^3.0.0" - -unist-util-position-from-estree@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/unist-util-position-from-estree/-/unist-util-position-from-estree-2.0.0.tgz#d94da4df596529d1faa3de506202f0c9a23f2200" - integrity sha512-KaFVRjoqLyF6YXCbVLNad/eS4+OfPQQn2yOd7zF/h5T/CSL2v8NpN6a5TPvtbXthAGw5nG+PuTtq+DdIZr+cRQ== - dependencies: - "@types/unist" "^3.0.0" - -unist-util-position@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/unist-util-position/-/unist-util-position-5.0.0.tgz#678f20ab5ca1207a97d7ea8a388373c9cf896be4" - integrity sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA== - dependencies: - "@types/unist" "^3.0.0" - -unist-util-remove-position@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/unist-util-remove-position/-/unist-util-remove-position-5.0.0.tgz#fea68a25658409c9460408bc6b4991b965b52163" - integrity sha512-Hp5Kh3wLxv0PHj9m2yZhhLt58KzPtEYKQQ4yxfYFEO7EvHwzyDYnduhHnY1mDxoqr7VUwVuHXk9RXKIiYS1N8Q== - dependencies: - "@types/unist" "^3.0.0" - unist-util-visit "^5.0.0" - -unist-util-stringify-position@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz#449c6e21a880e0855bf5aabadeb3a740314abac2" - integrity sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ== - dependencies: - "@types/unist" "^3.0.0" - -unist-util-visit-parents@^6.0.0: - version "6.0.1" - resolved "https://registry.yarnpkg.com/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz#4d5f85755c3b8f0dc69e21eca5d6d82d22162815" - integrity sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw== - dependencies: - "@types/unist" "^3.0.0" - unist-util-is "^6.0.0" - -unist-util-visit@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/unist-util-visit/-/unist-util-visit-5.0.0.tgz#a7de1f31f72ffd3519ea71814cccf5fd6a9217d6" - integrity sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg== - dependencies: - "@types/unist" "^3.0.0" - unist-util-is "^6.0.0" - unist-util-visit-parents "^6.0.0" - -update-browserslist-db@^1.0.13: - version "1.0.13" - resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz#3c5e4f5c083661bd38ef64b6328c26ed6c8248c4" - integrity sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg== - dependencies: - escalade "^3.1.1" - picocolors "^1.0.0" - -uri-js@^4.2.2: - version "4.4.1" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" - integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== - dependencies: - punycode "^2.1.0" - -util-deprecate@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== - -uuid@^9.0.1: - version "9.0.1" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30" - integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA== - -vfile-location@^5.0.0: - version "5.0.2" - resolved "https://registry.yarnpkg.com/vfile-location/-/vfile-location-5.0.2.tgz#220d9ca1ab6f8b2504a4db398f7ebc149f9cb464" - integrity sha512-NXPYyxyBSH7zB5U6+3uDdd6Nybz6o6/od9rk8bp9H8GR3L+cm/fC0uUTbqBmUTnMCUDslAGBOIKNfvvb+gGlDg== - dependencies: - "@types/unist" "^3.0.0" - vfile "^6.0.0" - -vfile-message@^4.0.0: - version "4.0.2" - resolved "https://registry.yarnpkg.com/vfile-message/-/vfile-message-4.0.2.tgz#c883c9f677c72c166362fd635f21fc165a7d1181" - integrity sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw== - dependencies: - "@types/unist" "^3.0.0" - unist-util-stringify-position "^4.0.0" - -vfile@^6.0.0, vfile@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/vfile/-/vfile-6.0.1.tgz#1e8327f41eac91947d4fe9d237a2dd9209762536" - integrity sha512-1bYqc7pt6NIADBJ98UiG0Bn/CHIVOoZ/IyEkqIruLg0mE1BKzkOXY2D6CSqQIcKqgadppE5lrxgWXJmXd7zZJw== - dependencies: - "@types/unist" "^3.0.0" - unist-util-stringify-position "^4.0.0" - vfile-message "^4.0.0" - -web-namespaces@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/web-namespaces/-/web-namespaces-2.0.1.tgz#1010ff7c650eccb2592cebeeaf9a1b253fd40692" - integrity sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ== - -webidl-conversions@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" - integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== - -whatwg-url@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" - integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== - dependencies: - tr46 "~0.0.3" - webidl-conversions "^3.0.0" - -which-boxed-primitive@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" - integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== - dependencies: - is-bigint "^1.0.1" - is-boolean-object "^1.1.0" - is-number-object "^1.0.4" - is-string "^1.0.5" - is-symbol "^1.0.3" - -which-builtin-type@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/which-builtin-type/-/which-builtin-type-1.1.3.tgz#b1b8443707cc58b6e9bf98d32110ff0c2cbd029b" - integrity sha512-YmjsSMDBYsM1CaFiayOVT06+KJeXf0o5M/CAd4o1lTadFAtacTUM49zoYxr/oroopFDfhvN6iEcBxUyc3gvKmw== - dependencies: - function.prototype.name "^1.1.5" - has-tostringtag "^1.0.0" - is-async-function "^2.0.0" - is-date-object "^1.0.5" - is-finalizationregistry "^1.0.2" - is-generator-function "^1.0.10" - is-regex "^1.1.4" - is-weakref "^1.0.2" - isarray "^2.0.5" - which-boxed-primitive "^1.0.2" - which-collection "^1.0.1" - which-typed-array "^1.1.9" - -which-collection@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/which-collection/-/which-collection-1.0.1.tgz#70eab71ebbbd2aefaf32f917082fc62cdcb70906" - integrity sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A== - dependencies: - is-map "^2.0.1" - is-set "^2.0.1" - is-weakmap "^2.0.1" - is-weakset "^2.0.1" - -which-typed-array@^1.1.11, which-typed-array@^1.1.13, which-typed-array@^1.1.9: - version "1.1.13" - resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.13.tgz#870cd5be06ddb616f504e7b039c4c24898184d36" - integrity sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow== - dependencies: - available-typed-arrays "^1.0.5" - call-bind "^1.0.4" - for-each "^0.3.3" - gopd "^1.0.1" - has-tostringtag "^1.0.0" - -which@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" - integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== - dependencies: - isexe "^2.0.0" - -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - -wrap-ansi@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - -wrap-ansi@^8.1.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" - integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ== - dependencies: - ansi-styles "^6.1.0" - string-width "^5.0.1" - strip-ansi "^7.0.1" - -wrappy@1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== - -y18n@^5.0.5: - version "5.0.8" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" - integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== - -yallist@^3.0.0, yallist@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" - integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== - -yallist@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" - integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== - -yaml@^2.0.0: - version "2.4.5" - resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.4.5.tgz#60630b206dd6d84df97003d33fc1ddf6296cca5e" - integrity sha512-aBx2bnqDzVOyNKfsysjA2ms5ZlnjSAW2eG3/L5G/CSujfjLJTJsEw1bGw8kCf04KodQWk1pxlGnZ56CRxiawmg== - -yaml@^2.3.1, yaml@^2.3.4: - version "2.3.4" - resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.4.tgz#53fc1d514be80aabf386dc6001eb29bf3b7523b2" - integrity sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA== - -yargs-parser@^21.1.1: - version "21.1.1" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" - integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== - -yargs@^17.7.2: - version "17.7.2" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" - integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== - dependencies: - cliui "^8.0.1" - escalade "^3.1.1" - get-caller-file "^2.0.5" - require-directory "^2.1.1" - string-width "^4.2.3" - y18n "^5.0.5" - yargs-parser "^21.1.1" - -yauzl@^2.10.0: - version "2.10.0" - resolved "https://registry.yarnpkg.com/yauzl/-/yauzl-2.10.0.tgz#c7eb17c93e112cb1086fa6d8e51fb0667b79a5f9" - integrity sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g== - dependencies: - buffer-crc32 "~0.2.3" - fd-slicer "~1.1.0" - -yocto-queue@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" - integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== - -zod@^3.22.4: - version "3.23.8" - resolved "https://registry.yarnpkg.com/zod/-/zod-3.23.8.tgz#e37b957b5d52079769fb8097099b592f0ef4067d" - integrity sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g== - -zwitch@^2.0.0, zwitch@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/zwitch/-/zwitch-2.0.4.tgz#c827d4b0acb76fc3e685a4c6ec2902d51070e9d7" - integrity sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==